Commit c4db321b

Stainless Bot <107565488+stainless-bot@users.noreply.github.com>
2024-01-04 10:27:18
feat: add `None` default value to nullable response properties (#1043)
1 parent a47375b
src/openai/types/beta/threads/runs/function_tool_call.py
@@ -15,7 +15,7 @@ class Function(BaseModel):
     name: str
     """The name of the function."""
 
-    output: Optional[str]
+    output: Optional[str] = None
     """The output of the function.
 
     This will be `null` if the outputs have not been
src/openai/types/beta/threads/runs/run_step.py
@@ -33,31 +33,31 @@ class RunStep(BaseModel):
     associated with the run step.
     """
 
-    cancelled_at: Optional[int]
+    cancelled_at: Optional[int] = None
     """The Unix timestamp (in seconds) for when the run step was cancelled."""
 
-    completed_at: Optional[int]
+    completed_at: Optional[int] = None
     """The Unix timestamp (in seconds) for when the run step completed."""
 
     created_at: int
     """The Unix timestamp (in seconds) for when the run step was created."""
 
-    expired_at: Optional[int]
+    expired_at: Optional[int] = None
     """The Unix timestamp (in seconds) for when the run step expired.
 
     A step is considered expired if the parent run is expired.
     """
 
-    failed_at: Optional[int]
+    failed_at: Optional[int] = None
     """The Unix timestamp (in seconds) for when the run step failed."""
 
-    last_error: Optional[LastError]
+    last_error: Optional[LastError] = None
     """The last error associated with this run step.
 
     Will be `null` if there are no errors.
     """
 
-    metadata: Optional[builtins.object]
+    metadata: Optional[builtins.object] = None
     """Set of 16 key-value pairs that can be attached to an object.
 
     This can be useful for storing additional information about the object in a
src/openai/types/beta/threads/run.py
@@ -72,10 +72,10 @@ class Run(BaseModel):
     execution of this run.
     """
 
-    cancelled_at: Optional[int]
+    cancelled_at: Optional[int] = None
     """The Unix timestamp (in seconds) for when the run was cancelled."""
 
-    completed_at: Optional[int]
+    completed_at: Optional[int] = None
     """The Unix timestamp (in seconds) for when the run was completed."""
 
     created_at: int
@@ -84,7 +84,7 @@ class Run(BaseModel):
     expires_at: int
     """The Unix timestamp (in seconds) for when the run will expire."""
 
-    failed_at: Optional[int]
+    failed_at: Optional[int] = None
     """The Unix timestamp (in seconds) for when the run failed."""
 
     file_ids: List[str]
@@ -101,10 +101,10 @@ class Run(BaseModel):
     this run.
     """
 
-    last_error: Optional[LastError]
+    last_error: Optional[LastError] = None
     """The last error associated with this run. Will be `null` if there are no errors."""
 
-    metadata: Optional[builtins.object]
+    metadata: Optional[builtins.object] = None
     """Set of 16 key-value pairs that can be attached to an object.
 
     This can be useful for storing additional information about the object in a
@@ -122,13 +122,13 @@ class Run(BaseModel):
     object: Literal["thread.run"]
     """The object type, which is always `thread.run`."""
 
-    required_action: Optional[RequiredAction]
+    required_action: Optional[RequiredAction] = None
     """Details on the action required to continue the run.
 
     Will be `null` if no action is required.
     """
 
-    started_at: Optional[int]
+    started_at: Optional[int] = None
     """The Unix timestamp (in seconds) for when the run was started."""
 
     status: Literal[
src/openai/types/beta/threads/thread_message.py
@@ -17,7 +17,7 @@ class ThreadMessage(BaseModel):
     id: str
     """The identifier, which can be referenced in API endpoints."""
 
-    assistant_id: Optional[str]
+    assistant_id: Optional[str] = None
     """
     If applicable, the ID of the
     [assistant](https://platform.openai.com/docs/api-reference/assistants) that
@@ -37,7 +37,7 @@ class ThreadMessage(BaseModel):
     that can access files. A maximum of 10 files can be attached to a message.
     """
 
-    metadata: Optional[builtins.object]
+    metadata: Optional[builtins.object] = None
     """Set of 16 key-value pairs that can be attached to an object.
 
     This can be useful for storing additional information about the object in a
@@ -51,7 +51,7 @@ class ThreadMessage(BaseModel):
     role: Literal["user", "assistant"]
     """The entity that produced the message. One of `user` or `assistant`."""
 
-    run_id: Optional[str]
+    run_id: Optional[str] = None
     """
     If applicable, the ID of the
     [run](https://platform.openai.com/docs/api-reference/runs) associated with the
src/openai/types/beta/assistant.py
@@ -37,7 +37,7 @@ class Assistant(BaseModel):
     created_at: int
     """The Unix timestamp (in seconds) for when the assistant was created."""
 
-    description: Optional[str]
+    description: Optional[str] = None
     """The description of the assistant. The maximum length is 512 characters."""
 
     file_ids: List[str]
@@ -47,13 +47,13 @@ class Assistant(BaseModel):
     assistant. Files are ordered by their creation date in ascending order.
     """
 
-    instructions: Optional[str]
+    instructions: Optional[str] = None
     """The system instructions that the assistant uses.
 
     The maximum length is 32768 characters.
     """
 
-    metadata: Optional[builtins.object]
+    metadata: Optional[builtins.object] = None
     """Set of 16 key-value pairs that can be attached to an object.
 
     This can be useful for storing additional information about the object in a
@@ -71,7 +71,7 @@ class Assistant(BaseModel):
     descriptions of them.
     """
 
-    name: Optional[str]
+    name: Optional[str] = None
     """The name of the assistant. The maximum length is 256 characters."""
 
     object: Literal["assistant"]
src/openai/types/beta/thread.py
@@ -16,7 +16,7 @@ class Thread(BaseModel):
     created_at: int
     """The Unix timestamp (in seconds) for when the thread was created."""
 
-    metadata: Optional[builtins.object]
+    metadata: Optional[builtins.object] = None
     """Set of 16 key-value pairs that can be attached to an object.
 
     This can be useful for storing additional information about the object in a
src/openai/types/chat/chat_completion.py
@@ -12,7 +12,7 @@ __all__ = ["ChatCompletion", "Choice", "ChoiceLogprobs"]
 
 
 class ChoiceLogprobs(BaseModel):
-    content: Optional[List[ChatCompletionTokenLogprob]]
+    content: Optional[List[ChatCompletionTokenLogprob]] = None
     """A list of message content tokens with log probability information."""
 
 
@@ -30,7 +30,7 @@ class Choice(BaseModel):
     index: int
     """The index of the choice in the list of choices."""
 
-    logprobs: Optional[ChoiceLogprobs]
+    logprobs: Optional[ChoiceLogprobs] = None
     """Log probability information for the choice."""
 
     message: ChatCompletionMessage
src/openai/types/chat/chat_completion_chunk.py
@@ -73,7 +73,7 @@ class ChoiceDelta(BaseModel):
 
 
 class ChoiceLogprobs(BaseModel):
-    content: Optional[List[ChatCompletionTokenLogprob]]
+    content: Optional[List[ChatCompletionTokenLogprob]] = None
     """A list of message content tokens with log probability information."""
 
 
@@ -81,7 +81,7 @@ class Choice(BaseModel):
     delta: ChoiceDelta
     """A chat completion delta generated by streamed model responses."""
 
-    finish_reason: Optional[Literal["stop", "length", "tool_calls", "content_filter", "function_call"]]
+    finish_reason: Optional[Literal["stop", "length", "tool_calls", "content_filter", "function_call"]] = None
     """The reason the model stopped generating tokens.
 
     This will be `stop` if the model hit a natural stop point or a provided stop
src/openai/types/chat/chat_completion_message.py
@@ -23,7 +23,7 @@ class FunctionCall(BaseModel):
 
 
 class ChatCompletionMessage(BaseModel):
-    content: Optional[str]
+    content: Optional[str] = None
     """The contents of the message."""
 
     role: Literal["assistant"]
src/openai/types/chat/chat_completion_token_logprob.py
@@ -11,7 +11,7 @@ class TopLogprob(BaseModel):
     token: str
     """The token."""
 
-    bytes: Optional[List[int]]
+    bytes: Optional[List[int]] = None
     """A list of integers representing the UTF-8 bytes representation of the token.
 
     Useful in instances where characters are represented by multiple tokens and
@@ -27,7 +27,7 @@ class ChatCompletionTokenLogprob(BaseModel):
     token: str
     """The token."""
 
-    bytes: Optional[List[int]]
+    bytes: Optional[List[int]] = None
     """A list of integers representing the UTF-8 bytes representation of the token.
 
     Useful in instances where characters are represented by multiple tokens and
src/openai/types/fine_tuning/fine_tuning_job.py
@@ -15,7 +15,7 @@ class Error(BaseModel):
     message: str
     """A human-readable error message."""
 
-    param: Optional[str]
+    param: Optional[str] = None
     """The parameter that was invalid, usually `training_file` or `validation_file`.
 
     This field will be null if the failure was not parameter-specific.
@@ -39,19 +39,19 @@ class FineTuningJob(BaseModel):
     created_at: int
     """The Unix timestamp (in seconds) for when the fine-tuning job was created."""
 
-    error: Optional[Error]
+    error: Optional[Error] = None
     """
     For fine-tuning jobs that have `failed`, this will contain more information on
     the cause of the failure.
     """
 
-    fine_tuned_model: Optional[str]
+    fine_tuned_model: Optional[str] = None
     """The name of the fine-tuned model that is being created.
 
     The value will be null if the fine-tuning job is still running.
     """
 
-    finished_at: Optional[int]
+    finished_at: Optional[int] = None
     """The Unix timestamp (in seconds) for when the fine-tuning job was finished.
 
     The value will be null if the fine-tuning job is still running.
@@ -86,7 +86,7 @@ class FineTuningJob(BaseModel):
     `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`.
     """
 
-    trained_tokens: Optional[int]
+    trained_tokens: Optional[int] = None
     """The total number of billable tokens processed by this fine-tuning job.
 
     The value will be null if the fine-tuning job is still running.
@@ -99,7 +99,7 @@ class FineTuningJob(BaseModel):
     [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents).
     """
 
-    validation_file: Optional[str]
+    validation_file: Optional[str] = None
     """The file ID used for validation.
 
     You can retrieve the validation results with the
src/openai/types/completion_choice.py
@@ -30,6 +30,6 @@ class CompletionChoice(BaseModel):
 
     index: int
 
-    logprobs: Optional[Logprobs]
+    logprobs: Optional[Logprobs] = None
 
     text: str
src/openai/types/fine_tune.py
@@ -50,7 +50,7 @@ class FineTune(BaseModel):
     created_at: int
     """The Unix timestamp (in seconds) for when the fine-tuning job was created."""
 
-    fine_tuned_model: Optional[str]
+    fine_tuned_model: Optional[str] = None
     """The name of the fine-tuned model that is being created."""
 
     hyperparams: Hyperparams
src/openai/_exceptions.py
@@ -40,8 +40,8 @@ class APIError(OpenAIError):
     If there was no response associated with this error then it will be `None`.
     """
 
-    code: Optional[str]
-    param: Optional[str]
+    code: Optional[str] = None
+    param: Optional[str] = None
     type: Optional[str]
 
     def __init__(self, message: str, request: httpx.Request, *, body: object | None) -> None: