@@ -950,6 +950,7 @@ def stream(
950950 max_prompt_tokens : Optional [int ] | NotGiven = NOT_GIVEN ,
951951 metadata : Optional [object ] | NotGiven = NOT_GIVEN ,
952952 model : Union [str , ChatModel , None ] | NotGiven = NOT_GIVEN ,
953+ parallel_tool_calls : bool | NotGiven = NOT_GIVEN ,
953954 response_format : Optional [AssistantResponseFormatOptionParam ] | NotGiven = NOT_GIVEN ,
954955 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
955956 tool_choice : Optional [AssistantToolChoiceOptionParam ] | NotGiven = NOT_GIVEN ,
@@ -979,6 +980,7 @@ def stream(
979980 max_prompt_tokens : Optional [int ] | NotGiven = NOT_GIVEN ,
980981 metadata : Optional [object ] | NotGiven = NOT_GIVEN ,
981982 model : Union [str , ChatModel , None ] | NotGiven = NOT_GIVEN ,
983+ parallel_tool_calls : bool | NotGiven = NOT_GIVEN ,
982984 response_format : Optional [AssistantResponseFormatOptionParam ] | NotGiven = NOT_GIVEN ,
983985 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
984986 tool_choice : Optional [AssistantToolChoiceOptionParam ] | NotGiven = NOT_GIVEN ,
@@ -1008,6 +1010,7 @@ def stream(
10081010 max_prompt_tokens : Optional [int ] | NotGiven = NOT_GIVEN ,
10091011 metadata : Optional [object ] | NotGiven = NOT_GIVEN ,
10101012 model : Union [str , ChatModel , None ] | NotGiven = NOT_GIVEN ,
1013+ parallel_tool_calls : bool | NotGiven = NOT_GIVEN ,
10111014 response_format : Optional [AssistantResponseFormatOptionParam ] | NotGiven = NOT_GIVEN ,
10121015 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
10131016 tool_choice : Optional [AssistantToolChoiceOptionParam ] | NotGiven = NOT_GIVEN ,
@@ -1051,6 +1054,7 @@ def stream(
10511054 "tool_choice" : tool_choice ,
10521055 "stream" : True ,
10531056 "tools" : tools ,
1057+ "parallel_tool_calls" : parallel_tool_calls ,
10541058 "truncation_strategy" : truncation_strategy ,
10551059 "top_p" : top_p ,
10561060 },
@@ -2246,6 +2250,7 @@ def stream(
22462250 max_prompt_tokens : Optional [int ] | NotGiven = NOT_GIVEN ,
22472251 metadata : Optional [object ] | NotGiven = NOT_GIVEN ,
22482252 model : Union [str , ChatModel , None ] | NotGiven = NOT_GIVEN ,
2253+ parallel_tool_calls : bool | NotGiven = NOT_GIVEN ,
22492254 response_format : Optional [AssistantResponseFormatOptionParam ] | NotGiven = NOT_GIVEN ,
22502255 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
22512256 tool_choice : Optional [AssistantToolChoiceOptionParam ] | NotGiven = NOT_GIVEN ,
@@ -2275,6 +2280,7 @@ def stream(
22752280 max_prompt_tokens : Optional [int ] | NotGiven = NOT_GIVEN ,
22762281 metadata : Optional [object ] | NotGiven = NOT_GIVEN ,
22772282 model : Union [str , ChatModel , None ] | NotGiven = NOT_GIVEN ,
2283+ parallel_tool_calls : bool | NotGiven = NOT_GIVEN ,
22782284 response_format : Optional [AssistantResponseFormatOptionParam ] | NotGiven = NOT_GIVEN ,
22792285 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
22802286 tool_choice : Optional [AssistantToolChoiceOptionParam ] | NotGiven = NOT_GIVEN ,
@@ -2304,6 +2310,7 @@ def stream(
23042310 max_prompt_tokens : Optional [int ] | NotGiven = NOT_GIVEN ,
23052311 metadata : Optional [object ] | NotGiven = NOT_GIVEN ,
23062312 model : Union [str , ChatModel , None ] | NotGiven = NOT_GIVEN ,
2313+ parallel_tool_calls : bool | NotGiven = NOT_GIVEN ,
23072314 response_format : Optional [AssistantResponseFormatOptionParam ] | NotGiven = NOT_GIVEN ,
23082315 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
23092316 tool_choice : Optional [AssistantToolChoiceOptionParam ] | NotGiven = NOT_GIVEN ,
@@ -2349,6 +2356,7 @@ def stream(
23492356 "tool_choice" : tool_choice ,
23502357 "stream" : True ,
23512358 "tools" : tools ,
2359+ "parallel_tool_calls" : parallel_tool_calls ,
23522360 "truncation_strategy" : truncation_strategy ,
23532361 "top_p" : top_p ,
23542362 },
0 commit comments