Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit bdb31a3

Browse files
authoredFeb 6, 2024
fix(types): loosen most List params types to Iterable (#1129)
1 parent 4c021c0 commit bdb31a3

17 files changed

+77
-77
lines changed
 

‎src/openai/resources/beta/assistants/assistants.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from __future__ import annotations
44

5-
from typing import List, Optional
5+
from typing import List, Iterable, Optional
66
from typing_extensions import Literal
77

88
import httpx
@@ -59,7 +59,7 @@ def create(
5959
instructions: Optional[str] | NotGiven = NOT_GIVEN,
6060
metadata: Optional[object] | NotGiven = NOT_GIVEN,
6161
name: Optional[str] | NotGiven = NOT_GIVEN,
62-
tools: List[assistant_create_params.Tool] | NotGiven = NOT_GIVEN,
62+
tools: Iterable[assistant_create_params.Tool] | NotGiven = NOT_GIVEN,
6363
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
6464
# The extra values given here take precedence over values defined on the client or passed to this method.
6565
extra_headers: Headers | None = None,
@@ -169,7 +169,7 @@ def update(
169169
metadata: Optional[object] | NotGiven = NOT_GIVEN,
170170
model: str | NotGiven = NOT_GIVEN,
171171
name: Optional[str] | NotGiven = NOT_GIVEN,
172-
tools: List[assistant_update_params.Tool] | NotGiven = NOT_GIVEN,
172+
tools: Iterable[assistant_update_params.Tool] | NotGiven = NOT_GIVEN,
173173
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
174174
# The extra values given here take precedence over values defined on the client or passed to this method.
175175
extra_headers: Headers | None = None,
@@ -362,7 +362,7 @@ async def create(
362362
instructions: Optional[str] | NotGiven = NOT_GIVEN,
363363
metadata: Optional[object] | NotGiven = NOT_GIVEN,
364364
name: Optional[str] | NotGiven = NOT_GIVEN,
365-
tools: List[assistant_create_params.Tool] | NotGiven = NOT_GIVEN,
365+
tools: Iterable[assistant_create_params.Tool] | NotGiven = NOT_GIVEN,
366366
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
367367
# The extra values given here take precedence over values defined on the client or passed to this method.
368368
extra_headers: Headers | None = None,
@@ -472,7 +472,7 @@ async def update(
472472
metadata: Optional[object] | NotGiven = NOT_GIVEN,
473473
model: str | NotGiven = NOT_GIVEN,
474474
name: Optional[str] | NotGiven = NOT_GIVEN,
475-
tools: List[assistant_update_params.Tool] | NotGiven = NOT_GIVEN,
475+
tools: Iterable[assistant_update_params.Tool] | NotGiven = NOT_GIVEN,
476476
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
477477
# The extra values given here take precedence over values defined on the client or passed to this method.
478478
extra_headers: Headers | None = None,

‎src/openai/resources/beta/threads/runs/runs.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from __future__ import annotations
44

5-
from typing import List, Optional
5+
from typing import Iterable, Optional
66
from typing_extensions import Literal
77

88
import httpx
@@ -59,7 +59,7 @@ def create(
5959
instructions: Optional[str] | NotGiven = NOT_GIVEN,
6060
metadata: Optional[object] | NotGiven = NOT_GIVEN,
6161
model: Optional[str] | NotGiven = NOT_GIVEN,
62-
tools: Optional[List[run_create_params.Tool]] | NotGiven = NOT_GIVEN,
62+
tools: Optional[Iterable[run_create_params.Tool]] | NotGiven = NOT_GIVEN,
6363
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
6464
# The extra values given here take precedence over values defined on the client or passed to this method.
6565
extra_headers: Headers | None = None,
@@ -316,7 +316,7 @@ def submit_tool_outputs(
316316
run_id: str,
317317
*,
318318
thread_id: str,
319-
tool_outputs: List[run_submit_tool_outputs_params.ToolOutput],
319+
tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput],
320320
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
321321
# The extra values given here take precedence over values defined on the client or passed to this method.
322322
extra_headers: Headers | None = None,
@@ -380,7 +380,7 @@ async def create(
380380
instructions: Optional[str] | NotGiven = NOT_GIVEN,
381381
metadata: Optional[object] | NotGiven = NOT_GIVEN,
382382
model: Optional[str] | NotGiven = NOT_GIVEN,
383-
tools: Optional[List[run_create_params.Tool]] | NotGiven = NOT_GIVEN,
383+
tools: Optional[Iterable[run_create_params.Tool]] | NotGiven = NOT_GIVEN,
384384
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
385385
# The extra values given here take precedence over values defined on the client or passed to this method.
386386
extra_headers: Headers | None = None,
@@ -637,7 +637,7 @@ async def submit_tool_outputs(
637637
run_id: str,
638638
*,
639639
thread_id: str,
640-
tool_outputs: List[run_submit_tool_outputs_params.ToolOutput],
640+
tool_outputs: Iterable[run_submit_tool_outputs_params.ToolOutput],
641641
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
642642
# The extra values given here take precedence over values defined on the client or passed to this method.
643643
extra_headers: Headers | None = None,

‎src/openai/resources/beta/threads/threads.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from __future__ import annotations
44

5-
from typing import List, Optional
5+
from typing import Iterable, Optional
66

77
import httpx
88

@@ -65,7 +65,7 @@ def with_streaming_response(self) -> ThreadsWithStreamingResponse:
6565
def create(
6666
self,
6767
*,
68-
messages: List[thread_create_params.Message] | NotGiven = NOT_GIVEN,
68+
messages: Iterable[thread_create_params.Message] | NotGiven = NOT_GIVEN,
6969
metadata: Optional[object] | NotGiven = NOT_GIVEN,
7070
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
7171
# The extra values given here take precedence over values defined on the client or passed to this method.
@@ -227,7 +227,7 @@ def create_and_run(
227227
metadata: Optional[object] | NotGiven = NOT_GIVEN,
228228
model: Optional[str] | NotGiven = NOT_GIVEN,
229229
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
230-
tools: Optional[List[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN,
230+
tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN,
231231
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
232232
# The extra values given here take precedence over values defined on the client or passed to this method.
233233
extra_headers: Headers | None = None,
@@ -310,7 +310,7 @@ def with_streaming_response(self) -> AsyncThreadsWithStreamingResponse:
310310
async def create(
311311
self,
312312
*,
313-
messages: List[thread_create_params.Message] | NotGiven = NOT_GIVEN,
313+
messages: Iterable[thread_create_params.Message] | NotGiven = NOT_GIVEN,
314314
metadata: Optional[object] | NotGiven = NOT_GIVEN,
315315
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
316316
# The extra values given here take precedence over values defined on the client or passed to this method.
@@ -472,7 +472,7 @@ async def create_and_run(
472472
metadata: Optional[object] | NotGiven = NOT_GIVEN,
473473
model: Optional[str] | NotGiven = NOT_GIVEN,
474474
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
475-
tools: Optional[List[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN,
475+
tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN,
476476
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
477477
# The extra values given here take precedence over values defined on the client or passed to this method.
478478
extra_headers: Headers | None = None,

‎src/openai/resources/chat/completions.py

+25-25
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from __future__ import annotations
44

5-
from typing import Dict, List, Union, Optional, overload
5+
from typing import Dict, List, Union, Iterable, Optional, overload
66
from typing_extensions import Literal
77

88
import httpx
@@ -42,7 +42,7 @@ def with_streaming_response(self) -> CompletionsWithStreamingResponse:
4242
def create(
4343
self,
4444
*,
45-
messages: List[ChatCompletionMessageParam],
45+
messages: Iterable[ChatCompletionMessageParam],
4646
model: Union[
4747
str,
4848
Literal[
@@ -67,7 +67,7 @@ def create(
6767
],
6868
frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN,
6969
function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN,
70-
functions: List[completion_create_params.Function] | NotGiven = NOT_GIVEN,
70+
functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN,
7171
logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN,
7272
logprobs: Optional[bool] | NotGiven = NOT_GIVEN,
7373
max_tokens: Optional[int] | NotGiven = NOT_GIVEN,
@@ -79,7 +79,7 @@ def create(
7979
stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN,
8080
temperature: Optional[float] | NotGiven = NOT_GIVEN,
8181
tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN,
82-
tools: List[ChatCompletionToolParam] | NotGiven = NOT_GIVEN,
82+
tools: Iterable[ChatCompletionToolParam] | NotGiven = NOT_GIVEN,
8383
top_logprobs: Optional[int] | NotGiven = NOT_GIVEN,
8484
top_p: Optional[float] | NotGiven = NOT_GIVEN,
8585
user: str | NotGiven = NOT_GIVEN,
@@ -232,7 +232,7 @@ def create(
232232
def create(
233233
self,
234234
*,
235-
messages: List[ChatCompletionMessageParam],
235+
messages: Iterable[ChatCompletionMessageParam],
236236
model: Union[
237237
str,
238238
Literal[
@@ -258,7 +258,7 @@ def create(
258258
stream: Literal[True],
259259
frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN,
260260
function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN,
261-
functions: List[completion_create_params.Function] | NotGiven = NOT_GIVEN,
261+
functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN,
262262
logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN,
263263
logprobs: Optional[bool] | NotGiven = NOT_GIVEN,
264264
max_tokens: Optional[int] | NotGiven = NOT_GIVEN,
@@ -269,7 +269,7 @@ def create(
269269
stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN,
270270
temperature: Optional[float] | NotGiven = NOT_GIVEN,
271271
tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN,
272-
tools: List[ChatCompletionToolParam] | NotGiven = NOT_GIVEN,
272+
tools: Iterable[ChatCompletionToolParam] | NotGiven = NOT_GIVEN,
273273
top_logprobs: Optional[int] | NotGiven = NOT_GIVEN,
274274
top_p: Optional[float] | NotGiven = NOT_GIVEN,
275275
user: str | NotGiven = NOT_GIVEN,
@@ -422,7 +422,7 @@ def create(
422422
def create(
423423
self,
424424
*,
425-
messages: List[ChatCompletionMessageParam],
425+
messages: Iterable[ChatCompletionMessageParam],
426426
model: Union[
427427
str,
428428
Literal[
@@ -448,7 +448,7 @@ def create(
448448
stream: bool,
449449
frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN,
450450
function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN,
451-
functions: List[completion_create_params.Function] | NotGiven = NOT_GIVEN,
451+
functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN,
452452
logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN,
453453
logprobs: Optional[bool] | NotGiven = NOT_GIVEN,
454454
max_tokens: Optional[int] | NotGiven = NOT_GIVEN,
@@ -459,7 +459,7 @@ def create(
459459
stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN,
460460
temperature: Optional[float] | NotGiven = NOT_GIVEN,
461461
tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN,
462-
tools: List[ChatCompletionToolParam] | NotGiven = NOT_GIVEN,
462+
tools: Iterable[ChatCompletionToolParam] | NotGiven = NOT_GIVEN,
463463
top_logprobs: Optional[int] | NotGiven = NOT_GIVEN,
464464
top_p: Optional[float] | NotGiven = NOT_GIVEN,
465465
user: str | NotGiven = NOT_GIVEN,
@@ -612,7 +612,7 @@ def create(
612612
def create(
613613
self,
614614
*,
615-
messages: List[ChatCompletionMessageParam],
615+
messages: Iterable[ChatCompletionMessageParam],
616616
model: Union[
617617
str,
618618
Literal[
@@ -637,7 +637,7 @@ def create(
637637
],
638638
frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN,
639639
function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN,
640-
functions: List[completion_create_params.Function] | NotGiven = NOT_GIVEN,
640+
functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN,
641641
logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN,
642642
logprobs: Optional[bool] | NotGiven = NOT_GIVEN,
643643
max_tokens: Optional[int] | NotGiven = NOT_GIVEN,
@@ -649,7 +649,7 @@ def create(
649649
stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN,
650650
temperature: Optional[float] | NotGiven = NOT_GIVEN,
651651
tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN,
652-
tools: List[ChatCompletionToolParam] | NotGiven = NOT_GIVEN,
652+
tools: Iterable[ChatCompletionToolParam] | NotGiven = NOT_GIVEN,
653653
top_logprobs: Optional[int] | NotGiven = NOT_GIVEN,
654654
top_p: Optional[float] | NotGiven = NOT_GIVEN,
655655
user: str | NotGiven = NOT_GIVEN,
@@ -709,7 +709,7 @@ def with_streaming_response(self) -> AsyncCompletionsWithStreamingResponse:
709709
async def create(
710710
self,
711711
*,
712-
messages: List[ChatCompletionMessageParam],
712+
messages: Iterable[ChatCompletionMessageParam],
713713
model: Union[
714714
str,
715715
Literal[
@@ -734,7 +734,7 @@ async def create(
734734
],
735735
frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN,
736736
function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN,
737-
functions: List[completion_create_params.Function] | NotGiven = NOT_GIVEN,
737+
functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN,
738738
logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN,
739739
logprobs: Optional[bool] | NotGiven = NOT_GIVEN,
740740
max_tokens: Optional[int] | NotGiven = NOT_GIVEN,
@@ -746,7 +746,7 @@ async def create(
746746
stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN,
747747
temperature: Optional[float] | NotGiven = NOT_GIVEN,
748748
tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN,
749-
tools: List[ChatCompletionToolParam] | NotGiven = NOT_GIVEN,
749+
tools: Iterable[ChatCompletionToolParam] | NotGiven = NOT_GIVEN,
750750
top_logprobs: Optional[int] | NotGiven = NOT_GIVEN,
751751
top_p: Optional[float] | NotGiven = NOT_GIVEN,
752752
user: str | NotGiven = NOT_GIVEN,
@@ -899,7 +899,7 @@ async def create(
899899
async def create(
900900
self,
901901
*,
902-
messages: List[ChatCompletionMessageParam],
902+
messages: Iterable[ChatCompletionMessageParam],
903903
model: Union[
904904
str,
905905
Literal[
@@ -925,7 +925,7 @@ async def create(
925925
stream: Literal[True],
926926
frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN,
927927
function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN,
928-
functions: List[completion_create_params.Function] | NotGiven = NOT_GIVEN,
928+
functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN,
929929
logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN,
930930
logprobs: Optional[bool] | NotGiven = NOT_GIVEN,
931931
max_tokens: Optional[int] | NotGiven = NOT_GIVEN,
@@ -936,7 +936,7 @@ async def create(
936936
stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN,
937937
temperature: Optional[float] | NotGiven = NOT_GIVEN,
938938
tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN,
939-
tools: List[ChatCompletionToolParam] | NotGiven = NOT_GIVEN,
939+
tools: Iterable[ChatCompletionToolParam] | NotGiven = NOT_GIVEN,
940940
top_logprobs: Optional[int] | NotGiven = NOT_GIVEN,
941941
top_p: Optional[float] | NotGiven = NOT_GIVEN,
942942
user: str | NotGiven = NOT_GIVEN,
@@ -1089,7 +1089,7 @@ async def create(
10891089
async def create(
10901090
self,
10911091
*,
1092-
messages: List[ChatCompletionMessageParam],
1092+
messages: Iterable[ChatCompletionMessageParam],
10931093
model: Union[
10941094
str,
10951095
Literal[
@@ -1115,7 +1115,7 @@ async def create(
11151115
stream: bool,
11161116
frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN,
11171117
function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN,
1118-
functions: List[completion_create_params.Function] | NotGiven = NOT_GIVEN,
1118+
functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN,
11191119
logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN,
11201120
logprobs: Optional[bool] | NotGiven = NOT_GIVEN,
11211121
max_tokens: Optional[int] | NotGiven = NOT_GIVEN,
@@ -1126,7 +1126,7 @@ async def create(
11261126
stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN,
11271127
temperature: Optional[float] | NotGiven = NOT_GIVEN,
11281128
tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN,
1129-
tools: List[ChatCompletionToolParam] | NotGiven = NOT_GIVEN,
1129+
tools: Iterable[ChatCompletionToolParam] | NotGiven = NOT_GIVEN,
11301130
top_logprobs: Optional[int] | NotGiven = NOT_GIVEN,
11311131
top_p: Optional[float] | NotGiven = NOT_GIVEN,
11321132
user: str | NotGiven = NOT_GIVEN,
@@ -1279,7 +1279,7 @@ async def create(
12791279
async def create(
12801280
self,
12811281
*,
1282-
messages: List[ChatCompletionMessageParam],
1282+
messages: Iterable[ChatCompletionMessageParam],
12831283
model: Union[
12841284
str,
12851285
Literal[
@@ -1304,7 +1304,7 @@ async def create(
13041304
],
13051305
frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN,
13061306
function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN,
1307-
functions: List[completion_create_params.Function] | NotGiven = NOT_GIVEN,
1307+
functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN,
13081308
logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN,
13091309
logprobs: Optional[bool] | NotGiven = NOT_GIVEN,
13101310
max_tokens: Optional[int] | NotGiven = NOT_GIVEN,
@@ -1316,7 +1316,7 @@ async def create(
13161316
stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN,
13171317
temperature: Optional[float] | NotGiven = NOT_GIVEN,
13181318
tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN,
1319-
tools: List[ChatCompletionToolParam] | NotGiven = NOT_GIVEN,
1319+
tools: Iterable[ChatCompletionToolParam] | NotGiven = NOT_GIVEN,
13201320
top_logprobs: Optional[int] | NotGiven = NOT_GIVEN,
13211321
top_p: Optional[float] | NotGiven = NOT_GIVEN,
13221322
user: str | NotGiven = NOT_GIVEN,

‎src/openai/resources/completions.py

+9-9
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from __future__ import annotations
44

5-
from typing import Dict, List, Union, Optional, overload
5+
from typing import Dict, List, Union, Iterable, Optional, overload
66
from typing_extensions import Literal
77

88
import httpx
@@ -36,7 +36,7 @@ def create(
3636
self,
3737
*,
3838
model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]],
39-
prompt: Union[str, List[str], List[int], List[List[int]], None],
39+
prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None],
4040
best_of: Optional[int] | NotGiven = NOT_GIVEN,
4141
echo: Optional[bool] | NotGiven = NOT_GIVEN,
4242
frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN,
@@ -184,7 +184,7 @@ def create(
184184
self,
185185
*,
186186
model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]],
187-
prompt: Union[str, List[str], List[int], List[List[int]], None],
187+
prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None],
188188
stream: Literal[True],
189189
best_of: Optional[int] | NotGiven = NOT_GIVEN,
190190
echo: Optional[bool] | NotGiven = NOT_GIVEN,
@@ -332,7 +332,7 @@ def create(
332332
self,
333333
*,
334334
model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]],
335-
prompt: Union[str, List[str], List[int], List[List[int]], None],
335+
prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None],
336336
stream: bool,
337337
best_of: Optional[int] | NotGiven = NOT_GIVEN,
338338
echo: Optional[bool] | NotGiven = NOT_GIVEN,
@@ -480,7 +480,7 @@ def create(
480480
self,
481481
*,
482482
model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]],
483-
prompt: Union[str, List[str], List[int], List[List[int]], None],
483+
prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None],
484484
best_of: Optional[int] | NotGiven = NOT_GIVEN,
485485
echo: Optional[bool] | NotGiven = NOT_GIVEN,
486486
frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN,
@@ -550,7 +550,7 @@ async def create(
550550
self,
551551
*,
552552
model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]],
553-
prompt: Union[str, List[str], List[int], List[List[int]], None],
553+
prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None],
554554
best_of: Optional[int] | NotGiven = NOT_GIVEN,
555555
echo: Optional[bool] | NotGiven = NOT_GIVEN,
556556
frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN,
@@ -698,7 +698,7 @@ async def create(
698698
self,
699699
*,
700700
model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]],
701-
prompt: Union[str, List[str], List[int], List[List[int]], None],
701+
prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None],
702702
stream: Literal[True],
703703
best_of: Optional[int] | NotGiven = NOT_GIVEN,
704704
echo: Optional[bool] | NotGiven = NOT_GIVEN,
@@ -846,7 +846,7 @@ async def create(
846846
self,
847847
*,
848848
model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]],
849-
prompt: Union[str, List[str], List[int], List[List[int]], None],
849+
prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None],
850850
stream: bool,
851851
best_of: Optional[int] | NotGiven = NOT_GIVEN,
852852
echo: Optional[bool] | NotGiven = NOT_GIVEN,
@@ -994,7 +994,7 @@ async def create(
994994
self,
995995
*,
996996
model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]],
997-
prompt: Union[str, List[str], List[int], List[List[int]], None],
997+
prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None],
998998
best_of: Optional[int] | NotGiven = NOT_GIVEN,
999999
echo: Optional[bool] | NotGiven = NOT_GIVEN,
10001000
frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN,

‎src/openai/resources/embeddings.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from __future__ import annotations
44

55
import base64
6-
from typing import List, Union, cast
6+
from typing import List, Union, Iterable, cast
77
from typing_extensions import Literal
88

99
import httpx
@@ -35,7 +35,7 @@ def with_streaming_response(self) -> EmbeddingsWithStreamingResponse:
3535
def create(
3636
self,
3737
*,
38-
input: Union[str, List[str], List[int], List[List[int]]],
38+
input: Union[str, List[str], Iterable[int], Iterable[Iterable[int]]],
3939
model: Union[str, Literal["text-embedding-ada-002", "text-embedding-3-small", "text-embedding-3-large"]],
4040
dimensions: int | NotGiven = NOT_GIVEN,
4141
encoding_format: Literal["float", "base64"] | NotGiven = NOT_GIVEN,
@@ -136,7 +136,7 @@ def with_streaming_response(self) -> AsyncEmbeddingsWithStreamingResponse:
136136
async def create(
137137
self,
138138
*,
139-
input: Union[str, List[str], List[int], List[List[int]]],
139+
input: Union[str, List[str], Iterable[int], Iterable[Iterable[int]]],
140140
model: Union[str, Literal["text-embedding-ada-002", "text-embedding-3-small", "text-embedding-3-large"]],
141141
dimensions: int | NotGiven = NOT_GIVEN,
142142
encoding_format: Literal["float", "base64"] | NotGiven = NOT_GIVEN,

‎src/openai/types/beta/assistant_create_params.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from __future__ import annotations
44

5-
from typing import List, Union, Optional
5+
from typing import List, Union, Iterable, Optional
66
from typing_extensions import Literal, Required, TypedDict
77

88
from ...types import shared_params
@@ -54,7 +54,7 @@ class AssistantCreateParams(TypedDict, total=False):
5454
name: Optional[str]
5555
"""The name of the assistant. The maximum length is 256 characters."""
5656

57-
tools: List[Tool]
57+
tools: Iterable[Tool]
5858
"""A list of tool enabled on the assistant.
5959
6060
There can be a maximum of 128 tools per assistant. Tools can be of types

‎src/openai/types/beta/assistant_update_params.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from __future__ import annotations
44

5-
from typing import List, Union, Optional
5+
from typing import List, Union, Iterable, Optional
66
from typing_extensions import Literal, Required, TypedDict
77

88
from ...types import shared_params
@@ -56,7 +56,7 @@ class AssistantUpdateParams(TypedDict, total=False):
5656
name: Optional[str]
5757
"""The name of the assistant. The maximum length is 256 characters."""
5858

59-
tools: List[Tool]
59+
tools: Iterable[Tool]
6060
"""A list of tool enabled on the assistant.
6161
6262
There can be a maximum of 128 tools per assistant. Tools can be of types

‎src/openai/types/beta/thread_create_and_run_params.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from __future__ import annotations
44

5-
from typing import List, Union, Optional
5+
from typing import List, Union, Iterable, Optional
66
from typing_extensions import Literal, Required, TypedDict
77

88
from ...types import shared_params
@@ -51,7 +51,7 @@ class ThreadCreateAndRunParams(TypedDict, total=False):
5151
thread: Thread
5252
"""If no thread is provided, an empty thread will be created."""
5353

54-
tools: Optional[List[Tool]]
54+
tools: Optional[Iterable[Tool]]
5555
"""Override the tools the assistant can use for this run.
5656
5757
This is useful for modifying the behavior on a per-run basis.
@@ -86,7 +86,7 @@ class ThreadMessage(TypedDict, total=False):
8686

8787

8888
class Thread(TypedDict, total=False):
89-
messages: List[ThreadMessage]
89+
messages: Iterable[ThreadMessage]
9090
"""
9191
A list of [messages](https://platform.openai.com/docs/api-reference/messages) to
9292
start the thread with.

‎src/openai/types/beta/thread_create_params.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,14 @@
22

33
from __future__ import annotations
44

5-
from typing import List, Optional
5+
from typing import List, Iterable, Optional
66
from typing_extensions import Literal, Required, TypedDict
77

88
__all__ = ["ThreadCreateParams", "Message"]
99

1010

1111
class ThreadCreateParams(TypedDict, total=False):
12-
messages: List[Message]
12+
messages: Iterable[Message]
1313
"""
1414
A list of [messages](https://platform.openai.com/docs/api-reference/messages) to
1515
start the thread with.

‎src/openai/types/beta/threads/run_create_params.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from __future__ import annotations
44

5-
from typing import List, Union, Optional
5+
from typing import Union, Iterable, Optional
66
from typing_extensions import Literal, Required, TypedDict
77

88
from ....types import shared_params
@@ -54,7 +54,7 @@ class RunCreateParams(TypedDict, total=False):
5454
assistant will be used.
5555
"""
5656

57-
tools: Optional[List[Tool]]
57+
tools: Optional[Iterable[Tool]]
5858
"""Override the tools the assistant can use for this run.
5959
6060
This is useful for modifying the behavior on a per-run basis.

‎src/openai/types/beta/threads/run_submit_tool_outputs_params.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from __future__ import annotations
44

5-
from typing import List
5+
from typing import Iterable
66
from typing_extensions import Required, TypedDict
77

88
__all__ = ["RunSubmitToolOutputsParams", "ToolOutput"]
@@ -11,7 +11,7 @@
1111
class RunSubmitToolOutputsParams(TypedDict, total=False):
1212
thread_id: Required[str]
1313

14-
tool_outputs: Required[List[ToolOutput]]
14+
tool_outputs: Required[Iterable[ToolOutput]]
1515
"""A list of tools for which the outputs are being submitted."""
1616

1717

‎src/openai/types/chat/chat_completion_assistant_message_param.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from __future__ import annotations
44

5-
from typing import List, Optional
5+
from typing import Iterable, Optional
66
from typing_extensions import Literal, Required, TypedDict
77

88
from .chat_completion_message_tool_call_param import ChatCompletionMessageToolCallParam
@@ -47,5 +47,5 @@ class ChatCompletionAssistantMessageParam(TypedDict, total=False):
4747
role.
4848
"""
4949

50-
tool_calls: List[ChatCompletionMessageToolCallParam]
50+
tool_calls: Iterable[ChatCompletionMessageToolCallParam]
5151
"""The tool calls generated by the model, such as function calls."""

‎src/openai/types/chat/chat_completion_user_message_param.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from __future__ import annotations
44

5-
from typing import List, Union
5+
from typing import Union, Iterable
66
from typing_extensions import Literal, Required, TypedDict
77

88
from .chat_completion_content_part_param import ChatCompletionContentPartParam
@@ -11,7 +11,7 @@
1111

1212

1313
class ChatCompletionUserMessageParam(TypedDict, total=False):
14-
content: Required[Union[str, List[ChatCompletionContentPartParam]]]
14+
content: Required[Union[str, Iterable[ChatCompletionContentPartParam]]]
1515
"""The contents of the user message."""
1616

1717
role: Required[Literal["user"]]

‎src/openai/types/chat/completion_create_params.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from __future__ import annotations
44

5-
from typing import Dict, List, Union, Optional
5+
from typing import Dict, List, Union, Iterable, Optional
66
from typing_extensions import Literal, Required, TypedDict
77

88
from ...types import shared_params
@@ -22,7 +22,7 @@
2222

2323

2424
class CompletionCreateParamsBase(TypedDict, total=False):
25-
messages: Required[List[ChatCompletionMessageParam]]
25+
messages: Required[Iterable[ChatCompletionMessageParam]]
2626
"""A list of messages comprising the conversation so far.
2727
2828
[Example Python code](https://cookbook.openai.com/examples/how_to_format_inputs_to_chatgpt_models).
@@ -81,7 +81,7 @@ class CompletionCreateParamsBase(TypedDict, total=False):
8181
functions are present.
8282
"""
8383

84-
functions: List[Function]
84+
functions: Iterable[Function]
8585
"""Deprecated in favor of `tools`.
8686
8787
A list of functions the model may generate JSON inputs for.
@@ -186,7 +186,7 @@ class CompletionCreateParamsBase(TypedDict, total=False):
186186
functions are present.
187187
"""
188188

189-
tools: List[ChatCompletionToolParam]
189+
tools: Iterable[ChatCompletionToolParam]
190190
"""A list of tools the model may call.
191191
192192
Currently, only functions are supported as a tool. Use this to provide a list of

‎src/openai/types/completion_create_params.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from __future__ import annotations
44

5-
from typing import Dict, List, Union, Optional
5+
from typing import Dict, List, Union, Iterable, Optional
66
from typing_extensions import Literal, Required, TypedDict
77

88
__all__ = ["CompletionCreateParamsBase", "CompletionCreateParamsNonStreaming", "CompletionCreateParamsStreaming"]
@@ -19,7 +19,7 @@ class CompletionCreateParamsBase(TypedDict, total=False):
1919
descriptions of them.
2020
"""
2121

22-
prompt: Required[Union[str, List[str], List[int], List[List[int]], None]]
22+
prompt: Required[Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None]]
2323
"""
2424
The prompt(s) to generate completions for, encoded as a string, array of
2525
strings, array of tokens, or array of token arrays.

‎src/openai/types/embedding_create_params.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,14 @@
22

33
from __future__ import annotations
44

5-
from typing import List, Union
5+
from typing import List, Union, Iterable
66
from typing_extensions import Literal, Required, TypedDict
77

88
__all__ = ["EmbeddingCreateParams"]
99

1010

1111
class EmbeddingCreateParams(TypedDict, total=False):
12-
input: Required[Union[str, List[str], List[int], List[List[int]]]]
12+
input: Required[Union[str, List[str], Iterable[int], Iterable[Iterable[int]]]]
1313
"""Input text to embed, encoded as a string or array of tokens.
1414
1515
To embed multiple inputs in a single request, pass an array of strings or array

0 commit comments

Comments
 (0)
Please sign in to comment.