@@ -103,7 +103,6 @@ def parse(
103
103
presence_penalty : Optional [float ] | NotGiven = NOT_GIVEN ,
104
104
prompt_cache_key : str | NotGiven = NOT_GIVEN ,
105
105
reasoning_effort : Optional [ReasoningEffort ] | NotGiven = NOT_GIVEN ,
106
- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
107
106
safety_identifier : str | NotGiven = NOT_GIVEN ,
108
107
seed : Optional [int ] | NotGiven = NOT_GIVEN ,
109
108
service_tier : Optional [Literal ["auto" , "default" , "flex" , "scale" , "priority" ]] | NotGiven = NOT_GIVEN ,
@@ -204,7 +203,6 @@ def parser(raw_completion: ChatCompletion) -> ParsedChatCompletion[ResponseForma
204
203
"prompt_cache_key" : prompt_cache_key ,
205
204
"reasoning_effort" : reasoning_effort ,
206
205
"response_format" : _type_to_response_format (response_format ),
207
- "text" : text ,
208
206
"safety_identifier" : safety_identifier ,
209
207
"seed" : seed ,
210
208
"service_tier" : service_tier ,
@@ -267,7 +265,6 @@ def create(
267
265
stream : Optional [Literal [False ]] | NotGiven = NOT_GIVEN ,
268
266
stream_options : Optional [ChatCompletionStreamOptionsParam ] | NotGiven = NOT_GIVEN ,
269
267
temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
270
- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
271
268
tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
272
269
tools : Iterable [ChatCompletionToolUnionParam ] | NotGiven = NOT_GIVEN ,
273
270
top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -459,7 +456,7 @@ def create(
459
456
our [model distillation](https://platform.openai.com/docs/guides/distillation)
460
457
or [evals](https://platform.openai.com/docs/guides/evals) products.
461
458
462
- Supports text and image inputs. Note: image inputs over 10MB will be dropped.
459
+ Supports text and image inputs. Note: image inputs over 8MB will be dropped.
463
460
464
461
stream: If set to true, the model response data will be streamed to the client as it is
465
462
generated using
@@ -556,7 +553,6 @@ def create(
556
553
store : Optional [bool ] | NotGiven = NOT_GIVEN ,
557
554
stream_options : Optional [ChatCompletionStreamOptionsParam ] | NotGiven = NOT_GIVEN ,
558
555
temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
559
- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
560
556
tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
561
557
tools : Iterable [ChatCompletionToolUnionParam ] | NotGiven = NOT_GIVEN ,
562
558
top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -757,7 +753,7 @@ def create(
757
753
our [model distillation](https://platform.openai.com/docs/guides/distillation)
758
754
or [evals](https://platform.openai.com/docs/guides/evals) products.
759
755
760
- Supports text and image inputs. Note: image inputs over 10MB will be dropped.
756
+ Supports text and image inputs. Note: image inputs over 8MB will be dropped.
761
757
762
758
stream_options: Options for streaming response. Only set this when you set `stream: true`.
763
759
@@ -845,7 +841,6 @@ def create(
845
841
store : Optional [bool ] | NotGiven = NOT_GIVEN ,
846
842
stream_options : Optional [ChatCompletionStreamOptionsParam ] | NotGiven = NOT_GIVEN ,
847
843
temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
848
- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
849
844
tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
850
845
tools : Iterable [ChatCompletionToolUnionParam ] | NotGiven = NOT_GIVEN ,
851
846
top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -1046,7 +1041,7 @@ def create(
1046
1041
our [model distillation](https://platform.openai.com/docs/guides/distillation)
1047
1042
or [evals](https://platform.openai.com/docs/guides/evals) products.
1048
1043
1049
- Supports text and image inputs. Note: image inputs over 10MB will be dropped.
1044
+ Supports text and image inputs. Note: image inputs over 8MB will be dropped.
1050
1045
1051
1046
stream_options: Options for streaming response. Only set this when you set `stream: true`.
1052
1047
@@ -1134,7 +1129,6 @@ def create(
1134
1129
stream : Optional [Literal [False ]] | Literal [True ] | NotGiven = NOT_GIVEN ,
1135
1130
stream_options : Optional [ChatCompletionStreamOptionsParam ] | NotGiven = NOT_GIVEN ,
1136
1131
temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
1137
- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
1138
1132
tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
1139
1133
tools : Iterable [ChatCompletionToolUnionParam ] | NotGiven = NOT_GIVEN ,
1140
1134
top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -1181,7 +1175,6 @@ def create(
1181
1175
"stream" : stream ,
1182
1176
"stream_options" : stream_options ,
1183
1177
"temperature" : temperature ,
1184
- "text" : text ,
1185
1178
"tool_choice" : tool_choice ,
1186
1179
"tools" : tools ,
1187
1180
"top_logprobs" : top_logprobs ,
@@ -1404,7 +1397,6 @@ def stream(
1404
1397
presence_penalty : Optional [float ] | NotGiven = NOT_GIVEN ,
1405
1398
prompt_cache_key : str | NotGiven = NOT_GIVEN ,
1406
1399
reasoning_effort : Optional [ReasoningEffort ] | NotGiven = NOT_GIVEN ,
1407
- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
1408
1400
safety_identifier : str | NotGiven = NOT_GIVEN ,
1409
1401
seed : Optional [int ] | NotGiven = NOT_GIVEN ,
1410
1402
service_tier : Optional [Literal ["auto" , "default" , "flex" , "scale" , "priority" ]] | NotGiven = NOT_GIVEN ,
@@ -1475,7 +1467,6 @@ def stream(
1475
1467
presence_penalty = presence_penalty ,
1476
1468
prompt_cache_key = prompt_cache_key ,
1477
1469
reasoning_effort = reasoning_effort ,
1478
- text = text ,
1479
1470
safety_identifier = safety_identifier ,
1480
1471
seed = seed ,
1481
1472
service_tier = service_tier ,
@@ -1548,7 +1539,6 @@ async def parse(
1548
1539
presence_penalty : Optional [float ] | NotGiven = NOT_GIVEN ,
1549
1540
prompt_cache_key : str | NotGiven = NOT_GIVEN ,
1550
1541
reasoning_effort : Optional [ReasoningEffort ] | NotGiven = NOT_GIVEN ,
1551
- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
1552
1542
safety_identifier : str | NotGiven = NOT_GIVEN ,
1553
1543
seed : Optional [int ] | NotGiven = NOT_GIVEN ,
1554
1544
service_tier : Optional [Literal ["auto" , "default" , "flex" , "scale" , "priority" ]] | NotGiven = NOT_GIVEN ,
@@ -1649,7 +1639,6 @@ def parser(raw_completion: ChatCompletion) -> ParsedChatCompletion[ResponseForma
1649
1639
"prompt_cache_key" : prompt_cache_key ,
1650
1640
"reasoning_effort" : reasoning_effort ,
1651
1641
"response_format" : _type_to_response_format (response_format ),
1652
- "text" : text ,
1653
1642
"safety_identifier" : safety_identifier ,
1654
1643
"seed" : seed ,
1655
1644
"service_tier" : service_tier ,
@@ -1712,7 +1701,6 @@ async def create(
1712
1701
stream : Optional [Literal [False ]] | NotGiven = NOT_GIVEN ,
1713
1702
stream_options : Optional [ChatCompletionStreamOptionsParam ] | NotGiven = NOT_GIVEN ,
1714
1703
temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
1715
- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
1716
1704
tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
1717
1705
tools : Iterable [ChatCompletionToolUnionParam ] | NotGiven = NOT_GIVEN ,
1718
1706
top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -1904,7 +1892,7 @@ async def create(
1904
1892
our [model distillation](https://platform.openai.com/docs/guides/distillation)
1905
1893
or [evals](https://platform.openai.com/docs/guides/evals) products.
1906
1894
1907
- Supports text and image inputs. Note: image inputs over 10MB will be dropped.
1895
+ Supports text and image inputs. Note: image inputs over 8MB will be dropped.
1908
1896
1909
1897
stream: If set to true, the model response data will be streamed to the client as it is
1910
1898
generated using
@@ -2001,7 +1989,6 @@ async def create(
2001
1989
store : Optional [bool ] | NotGiven = NOT_GIVEN ,
2002
1990
stream_options : Optional [ChatCompletionStreamOptionsParam ] | NotGiven = NOT_GIVEN ,
2003
1991
temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
2004
- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
2005
1992
tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
2006
1993
tools : Iterable [ChatCompletionToolUnionParam ] | NotGiven = NOT_GIVEN ,
2007
1994
top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -2202,7 +2189,7 @@ async def create(
2202
2189
our [model distillation](https://platform.openai.com/docs/guides/distillation)
2203
2190
or [evals](https://platform.openai.com/docs/guides/evals) products.
2204
2191
2205
- Supports text and image inputs. Note: image inputs over 10MB will be dropped.
2192
+ Supports text and image inputs. Note: image inputs over 8MB will be dropped.
2206
2193
2207
2194
stream_options: Options for streaming response. Only set this when you set `stream: true`.
2208
2195
@@ -2290,7 +2277,6 @@ async def create(
2290
2277
store : Optional [bool ] | NotGiven = NOT_GIVEN ,
2291
2278
stream_options : Optional [ChatCompletionStreamOptionsParam ] | NotGiven = NOT_GIVEN ,
2292
2279
temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
2293
- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
2294
2280
tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
2295
2281
tools : Iterable [ChatCompletionToolUnionParam ] | NotGiven = NOT_GIVEN ,
2296
2282
top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -2491,7 +2477,7 @@ async def create(
2491
2477
our [model distillation](https://platform.openai.com/docs/guides/distillation)
2492
2478
or [evals](https://platform.openai.com/docs/guides/evals) products.
2493
2479
2494
- Supports text and image inputs. Note: image inputs over 10MB will be dropped.
2480
+ Supports text and image inputs. Note: image inputs over 8MB will be dropped.
2495
2481
2496
2482
stream_options: Options for streaming response. Only set this when you set `stream: true`.
2497
2483
@@ -2579,7 +2565,6 @@ async def create(
2579
2565
stream : Optional [Literal [False ]] | Literal [True ] | NotGiven = NOT_GIVEN ,
2580
2566
stream_options : Optional [ChatCompletionStreamOptionsParam ] | NotGiven = NOT_GIVEN ,
2581
2567
temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
2582
- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
2583
2568
tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
2584
2569
tools : Iterable [ChatCompletionToolUnionParam ] | NotGiven = NOT_GIVEN ,
2585
2570
top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -2626,7 +2611,6 @@ async def create(
2626
2611
"stream" : stream ,
2627
2612
"stream_options" : stream_options ,
2628
2613
"temperature" : temperature ,
2629
- "text" : text ,
2630
2614
"tool_choice" : tool_choice ,
2631
2615
"tools" : tools ,
2632
2616
"top_logprobs" : top_logprobs ,
@@ -2849,7 +2833,6 @@ def stream(
2849
2833
presence_penalty : Optional [float ] | NotGiven = NOT_GIVEN ,
2850
2834
prompt_cache_key : str | NotGiven = NOT_GIVEN ,
2851
2835
reasoning_effort : Optional [ReasoningEffort ] | NotGiven = NOT_GIVEN ,
2852
- text : completion_create_params .Text | NotGiven = NOT_GIVEN ,
2853
2836
safety_identifier : str | NotGiven = NOT_GIVEN ,
2854
2837
seed : Optional [int ] | NotGiven = NOT_GIVEN ,
2855
2838
service_tier : Optional [Literal ["auto" , "default" , "flex" , "scale" , "priority" ]] | NotGiven = NOT_GIVEN ,
@@ -2921,7 +2904,6 @@ def stream(
2921
2904
presence_penalty = presence_penalty ,
2922
2905
prompt_cache_key = prompt_cache_key ,
2923
2906
reasoning_effort = reasoning_effort ,
2924
- text = text ,
2925
2907
safety_identifier = safety_identifier ,
2926
2908
seed = seed ,
2927
2909
service_tier = service_tier ,
0 commit comments