main
1# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
3from typing import List, Optional
4from typing_extensions import Literal
5
6from .._models import BaseModel
7from .batch_error import BatchError
8from .batch_usage import BatchUsage
9from .shared.metadata import Metadata
10from .batch_request_counts import BatchRequestCounts
11
12__all__ = ["Batch", "Errors"]
13
14
15class Errors(BaseModel):
16 data: Optional[List[BatchError]] = None
17
18 object: Optional[str] = None
19 """The object type, which is always `list`."""
20
21
22class Batch(BaseModel):
23 id: str
24
25 completion_window: str
26 """The time frame within which the batch should be processed."""
27
28 created_at: int
29 """The Unix timestamp (in seconds) for when the batch was created."""
30
31 endpoint: str
32 """The OpenAI API endpoint used by the batch."""
33
34 input_file_id: str
35 """The ID of the input file for the batch."""
36
37 object: Literal["batch"]
38 """The object type, which is always `batch`."""
39
40 status: Literal[
41 "validating", "failed", "in_progress", "finalizing", "completed", "expired", "cancelling", "cancelled"
42 ]
43 """The current status of the batch."""
44
45 cancelled_at: Optional[int] = None
46 """The Unix timestamp (in seconds) for when the batch was cancelled."""
47
48 cancelling_at: Optional[int] = None
49 """The Unix timestamp (in seconds) for when the batch started cancelling."""
50
51 completed_at: Optional[int] = None
52 """The Unix timestamp (in seconds) for when the batch was completed."""
53
54 error_file_id: Optional[str] = None
55 """The ID of the file containing the outputs of requests with errors."""
56
57 errors: Optional[Errors] = None
58
59 expired_at: Optional[int] = None
60 """The Unix timestamp (in seconds) for when the batch expired."""
61
62 expires_at: Optional[int] = None
63 """The Unix timestamp (in seconds) for when the batch will expire."""
64
65 failed_at: Optional[int] = None
66 """The Unix timestamp (in seconds) for when the batch failed."""
67
68 finalizing_at: Optional[int] = None
69 """The Unix timestamp (in seconds) for when the batch started finalizing."""
70
71 in_progress_at: Optional[int] = None
72 """The Unix timestamp (in seconds) for when the batch started processing."""
73
74 metadata: Optional[Metadata] = None
75 """Set of 16 key-value pairs that can be attached to an object.
76
77 This can be useful for storing additional information about the object in a
78 structured format, and querying for objects via API or the dashboard.
79
80 Keys are strings with a maximum length of 64 characters. Values are strings with
81 a maximum length of 512 characters.
82 """
83
84 model: Optional[str] = None
85 """Model ID used to process the batch, like `gpt-5-2025-08-07`.
86
87 OpenAI offers a wide range of models with different capabilities, performance
88 characteristics, and price points. Refer to the
89 [model guide](https://platform.openai.com/docs/models) to browse and compare
90 available models.
91 """
92
93 output_file_id: Optional[str] = None
94 """The ID of the file containing the outputs of successfully executed requests."""
95
96 request_counts: Optional[BatchRequestCounts] = None
97 """The request counts for different statuses within the batch."""
98
99 usage: Optional[BatchUsage] = None
100 """
101 Represents token usage details including input tokens, output tokens, a
102 breakdown of output tokens, and the total tokens used. Only populated on batches
103 created after September 7, 2025.
104 """