Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit b56b357

Browse filesBrowse files
RobertCraigiestainless-app[bot]
authored andcommitted
chore(types): fix Metadata types
1 parent fdd5247 commit b56b357
Copy full SHA for b56b357

File tree

Expand file treeCollapse file tree

3 files changed

+27
-26
lines changed
Filter options
Expand file treeCollapse file tree

3 files changed

+27
-26
lines changed

‎src/openai/resources/beta/chat/completions.py

Copy file name to clipboardExpand all lines: src/openai/resources/beta/chat/completions.py
+5-4Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
)
2929
from ....types.chat_model import ChatModel
3030
from ....lib.streaming.chat import ChatCompletionStreamManager, AsyncChatCompletionStreamManager
31+
from ....types.shared_params import Metadata
3132
from ....types.chat.chat_completion import ChatCompletion
3233
from ....types.chat.chat_completion_chunk import ChatCompletionChunk
3334
from ....types.chat.parsed_chat_completion import ParsedChatCompletion
@@ -76,7 +77,7 @@ def parse(
7677
logprobs: Optional[bool] | NotGiven = NOT_GIVEN,
7778
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
7879
max_tokens: Optional[int] | NotGiven = NOT_GIVEN,
79-
metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN,
80+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
8081
modalities: Optional[List[ChatCompletionModality]] | NotGiven = NOT_GIVEN,
8182
n: Optional[int] | NotGiven = NOT_GIVEN,
8283
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
@@ -221,7 +222,7 @@ def stream(
221222
logprobs: Optional[bool] | NotGiven = NOT_GIVEN,
222223
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
223224
max_tokens: Optional[int] | NotGiven = NOT_GIVEN,
224-
metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN,
225+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
225226
modalities: Optional[List[ChatCompletionModality]] | NotGiven = NOT_GIVEN,
226227
n: Optional[int] | NotGiven = NOT_GIVEN,
227228
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
@@ -351,7 +352,7 @@ async def parse(
351352
logprobs: Optional[bool] | NotGiven = NOT_GIVEN,
352353
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
353354
max_tokens: Optional[int] | NotGiven = NOT_GIVEN,
354-
metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN,
355+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
355356
modalities: Optional[List[ChatCompletionModality]] | NotGiven = NOT_GIVEN,
356357
n: Optional[int] | NotGiven = NOT_GIVEN,
357358
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
@@ -496,7 +497,7 @@ def stream(
496497
logprobs: Optional[bool] | NotGiven = NOT_GIVEN,
497498
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
498499
max_tokens: Optional[int] | NotGiven = NOT_GIVEN,
499-
metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN,
500+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
500501
modalities: Optional[List[ChatCompletionModality]] | NotGiven = NOT_GIVEN,
501502
n: Optional[int] | NotGiven = NOT_GIVEN,
502503
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,

‎src/openai/resources/beta/threads/runs/runs.py

Copy file name to clipboardExpand all lines: src/openai/resources/beta/threads/runs/runs.py
+14-14Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -771,7 +771,7 @@ def create_and_poll(
771771
instructions: Optional[str] | NotGiven = NOT_GIVEN,
772772
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
773773
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
774-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
774+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
775775
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
776776
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
777777
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -840,7 +840,7 @@ def create_and_stream(
840840
instructions: Optional[str] | NotGiven = NOT_GIVEN,
841841
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
842842
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
843-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
843+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
844844
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
845845
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
846846
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -871,7 +871,7 @@ def create_and_stream(
871871
instructions: Optional[str] | NotGiven = NOT_GIVEN,
872872
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
873873
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
874-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
874+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
875875
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
876876
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
877877
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -902,7 +902,7 @@ def create_and_stream(
902902
instructions: Optional[str] | NotGiven = NOT_GIVEN,
903903
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
904904
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
905-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
905+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
906906
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
907907
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
908908
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -1019,7 +1019,7 @@ def stream(
10191019
instructions: Optional[str] | NotGiven = NOT_GIVEN,
10201020
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
10211021
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
1022-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
1022+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
10231023
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
10241024
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
10251025
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -1050,7 +1050,7 @@ def stream(
10501050
instructions: Optional[str] | NotGiven = NOT_GIVEN,
10511051
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
10521052
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
1053-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
1053+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
10541054
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
10551055
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
10561056
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -1081,7 +1081,7 @@ def stream(
10811081
instructions: Optional[str] | NotGiven = NOT_GIVEN,
10821082
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
10831083
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
1084-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
1084+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
10851085
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
10861086
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
10871087
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -2144,7 +2144,7 @@ async def create_and_poll(
21442144
instructions: Optional[str] | NotGiven = NOT_GIVEN,
21452145
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
21462146
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
2147-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
2147+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
21482148
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
21492149
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
21502150
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -2213,7 +2213,7 @@ def create_and_stream(
22132213
instructions: Optional[str] | NotGiven = NOT_GIVEN,
22142214
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
22152215
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
2216-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
2216+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
22172217
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
22182218
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
22192219
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -2244,7 +2244,7 @@ def create_and_stream(
22442244
instructions: Optional[str] | NotGiven = NOT_GIVEN,
22452245
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
22462246
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
2247-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
2247+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
22482248
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
22492249
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
22502250
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -2275,7 +2275,7 @@ def create_and_stream(
22752275
instructions: Optional[str] | NotGiven = NOT_GIVEN,
22762276
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
22772277
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
2278-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
2278+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
22792279
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
22802280
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
22812281
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -2393,7 +2393,7 @@ def stream(
23932393
instructions: Optional[str] | NotGiven = NOT_GIVEN,
23942394
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
23952395
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
2396-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
2396+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
23972397
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
23982398
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
23992399
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -2424,7 +2424,7 @@ def stream(
24242424
instructions: Optional[str] | NotGiven = NOT_GIVEN,
24252425
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
24262426
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
2427-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
2427+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
24282428
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
24292429
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
24302430
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -2455,7 +2455,7 @@ def stream(
24552455
instructions: Optional[str] | NotGiven = NOT_GIVEN,
24562456
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
24572457
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
2458-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
2458+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
24592459
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
24602460
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
24612461
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,

‎src/openai/resources/beta/threads/threads.py

Copy file name to clipboardExpand all lines: src/openai/resources/beta/threads/threads.py
+8-8Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -734,7 +734,7 @@ def create_and_run_poll(
734734
instructions: Optional[str] | NotGiven = NOT_GIVEN,
735735
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
736736
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
737-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
737+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
738738
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
739739
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
740740
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -790,7 +790,7 @@ def create_and_run_stream(
790790
instructions: Optional[str] | NotGiven = NOT_GIVEN,
791791
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
792792
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
793-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
793+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
794794
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
795795
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
796796
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -819,7 +819,7 @@ def create_and_run_stream(
819819
instructions: Optional[str] | NotGiven = NOT_GIVEN,
820820
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
821821
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
822-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
822+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
823823
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
824824
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
825825
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -848,7 +848,7 @@ def create_and_run_stream(
848848
instructions: Optional[str] | NotGiven = NOT_GIVEN,
849849
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
850850
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
851-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
851+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
852852
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
853853
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
854854
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -1581,7 +1581,7 @@ async def create_and_run_poll(
15811581
instructions: Optional[str] | NotGiven = NOT_GIVEN,
15821582
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
15831583
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
1584-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
1584+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
15851585
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
15861586
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
15871587
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -1639,7 +1639,7 @@ def create_and_run_stream(
16391639
instructions: Optional[str] | NotGiven = NOT_GIVEN,
16401640
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
16411641
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
1642-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
1642+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
16431643
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
16441644
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
16451645
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -1668,7 +1668,7 @@ def create_and_run_stream(
16681668
instructions: Optional[str] | NotGiven = NOT_GIVEN,
16691669
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
16701670
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
1671-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
1671+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
16721672
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
16731673
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
16741674
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
@@ -1697,7 +1697,7 @@ def create_and_run_stream(
16971697
instructions: Optional[str] | NotGiven = NOT_GIVEN,
16981698
max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN,
16991699
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
1700-
metadata: Optional[object] | NotGiven = NOT_GIVEN,
1700+
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
17011701
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
17021702
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
17031703
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.