main
 1from __future__ import annotations
 2
 3import rich
 4from pydantic import BaseModel
 5
 6import openai
 7from openai import OpenAI
 8
 9
10class GetWeather(BaseModel):
11    city: str
12    country: str
13
14
15client = OpenAI()
16
17
18with client.chat.completions.stream(
19    model="gpt-4o-2024-08-06",
20    messages=[
21        {
22            "role": "user",
23            "content": "What's the weather like in SF and New York?",
24        },
25    ],
26    tools=[
27        # because we're using `.parse_stream()`, the returned tool calls
28        # will be automatically deserialized into this `GetWeather` type
29        openai.pydantic_function_tool(GetWeather, name="get_weather"),
30    ],
31    parallel_tool_calls=True,
32) as stream:
33    for event in stream:
34        if event.type == "tool_calls.function.arguments.delta" or event.type == "tool_calls.function.arguments.done":
35            rich.get_console().print(event, width=80)
36
37print("----\n")
38rich.print(stream.get_final_completion())