Skip to content

Commit e30bcbc

Browse files
feat(api): adding support for /v1/conversations to the API
1 parent e328fb4 commit e30bcbc

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

47 files changed

+2948
-50
lines changed

.stats.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
configured_endpoints: 111
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-7ef7a457c3bf05364e66e48c9ca34f31bfef1f6c9b7c15b1812346105e0abb16.yml
3-
openapi_spec_hash: a2b1f5d8fbb62175c93b0ebea9f10063
4-
config_hash: 4870312b04f48fd717ea4151053e7fb9
1+
configured_endpoints: 119
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-4bcdfe525558e67a09b32dec7a573e87b94bab47db3951eb4a86a4dafb60296c.yml
3+
openapi_spec_hash: 49e7e46bfe9f61b7b7a60e36840c0cd7
4+
config_hash: e4514526ae01126a61f9b6c14a351737

api.md

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -751,6 +751,7 @@ from openai.types.responses import (
751751
ResponseContent,
752752
ResponseContentPartAddedEvent,
753753
ResponseContentPartDoneEvent,
754+
ResponseConversationParam,
754755
ResponseCreatedEvent,
755756
ResponseCustomToolCall,
756757
ResponseCustomToolCallInputDeltaEvent,
@@ -854,6 +855,54 @@ Methods:
854855

855856
- <code title="get /responses/{response_id}/input_items">client.responses.input_items.<a href="./src/openai/resources/responses/input_items.py">list</a>(response_id, \*\*<a href="src/openai/types/responses/input_item_list_params.py">params</a>) -> <a href="./src/openai/types/responses/response_item.py">SyncCursorPage[ResponseItem]</a></code>
856857

858+
# Conversations
859+
860+
Types:
861+
862+
```python
863+
from openai.types.conversations import (
864+
ComputerScreenshotContent,
865+
ContainerFileCitationBody,
866+
Conversation,
867+
ConversationDeleted,
868+
ConversationDeletedResource,
869+
FileCitationBody,
870+
InputFileContent,
871+
InputImageContent,
872+
InputTextContent,
873+
LobProb,
874+
Message,
875+
OutputTextContent,
876+
RefusalContent,
877+
SummaryTextContent,
878+
TextContent,
879+
TopLogProb,
880+
URLCitationBody,
881+
)
882+
```
883+
884+
Methods:
885+
886+
- <code title="post /conversations">client.conversations.<a href="./src/openai/resources/conversations/conversations.py">create</a>(\*\*<a href="src/openai/types/conversations/conversation_create_params.py">params</a>) -> <a href="./src/openai/types/conversations/conversation.py">Conversation</a></code>
887+
- <code title="get /conversations/{conversation_id}">client.conversations.<a href="./src/openai/resources/conversations/conversations.py">retrieve</a>(conversation_id) -> <a href="./src/openai/types/conversations/conversation.py">Conversation</a></code>
888+
- <code title="post /conversations/{conversation_id}">client.conversations.<a href="./src/openai/resources/conversations/conversations.py">update</a>(conversation_id, \*\*<a href="src/openai/types/conversations/conversation_update_params.py">params</a>) -> <a href="./src/openai/types/conversations/conversation.py">Conversation</a></code>
889+
- <code title="delete /conversations/{conversation_id}">client.conversations.<a href="./src/openai/resources/conversations/conversations.py">delete</a>(conversation_id) -> <a href="./src/openai/types/conversations/conversation_deleted_resource.py">ConversationDeletedResource</a></code>
890+
891+
## Items
892+
893+
Types:
894+
895+
```python
896+
from openai.types.conversations import ConversationItem, ConversationItemList
897+
```
898+
899+
Methods:
900+
901+
- <code title="post /conversations/{conversation_id}/items">client.conversations.items.<a href="./src/openai/resources/conversations/items.py">create</a>(conversation_id, \*\*<a href="src/openai/types/conversations/item_create_params.py">params</a>) -> <a href="./src/openai/types/conversations/conversation_item_list.py">ConversationItemList</a></code>
902+
- <code title="get /conversations/{conversation_id}/items/{item_id}">client.conversations.items.<a href="./src/openai/resources/conversations/items.py">retrieve</a>(item_id, \*, conversation_id, \*\*<a href="src/openai/types/conversations/item_retrieve_params.py">params</a>) -> <a href="./src/openai/types/conversations/conversation_item.py">ConversationItem</a></code>
903+
- <code title="get /conversations/{conversation_id}/items">client.conversations.items.<a href="./src/openai/resources/conversations/items.py">list</a>(conversation_id, \*\*<a href="src/openai/types/conversations/item_list_params.py">params</a>) -> <a href="./src/openai/types/conversations/conversation_item.py">SyncConversationCursorPage[ConversationItem]</a></code>
904+
- <code title="delete /conversations/{conversation_id}/items/{item_id}">client.conversations.items.<a href="./src/openai/resources/conversations/items.py">delete</a>(item_id, \*, conversation_id) -> <a href="./src/openai/types/conversations/conversation.py">Conversation</a></code>
905+
857906
# Evals
858907

859908
Types:

src/openai/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -386,5 +386,6 @@ def _reset_client() -> None: # type: ignore[reportUnusedFunction]
386386
completions as completions,
387387
fine_tuning as fine_tuning,
388388
moderations as moderations,
389+
conversations as conversations,
389390
vector_stores as vector_stores,
390391
)

src/openai/_client.py

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@
5151
completions,
5252
fine_tuning,
5353
moderations,
54+
conversations,
5455
vector_stores,
5556
)
5657
from .resources.files import Files, AsyncFiles
@@ -69,6 +70,7 @@
6970
from .resources.responses.responses import Responses, AsyncResponses
7071
from .resources.containers.containers import Containers, AsyncContainers
7172
from .resources.fine_tuning.fine_tuning import FineTuning, AsyncFineTuning
73+
from .resources.conversations.conversations import Conversations, AsyncConversations
7274
from .resources.vector_stores.vector_stores import VectorStores, AsyncVectorStores
7375

7476
__all__ = ["Timeout", "Transport", "ProxiesTypes", "RequestOptions", "OpenAI", "AsyncOpenAI", "Client", "AsyncClient"]
@@ -254,6 +256,12 @@ def responses(self) -> Responses:
254256

255257
return Responses(self)
256258

259+
@cached_property
260+
def conversations(self) -> Conversations:
261+
from .resources.conversations import Conversations
262+
263+
return Conversations(self)
264+
257265
@cached_property
258266
def evals(self) -> Evals:
259267
from .resources.evals import Evals
@@ -573,6 +581,12 @@ def responses(self) -> AsyncResponses:
573581

574582
return AsyncResponses(self)
575583

584+
@cached_property
585+
def conversations(self) -> AsyncConversations:
586+
from .resources.conversations import AsyncConversations
587+
588+
return AsyncConversations(self)
589+
576590
@cached_property
577591
def evals(self) -> AsyncEvals:
578592
from .resources.evals import AsyncEvals
@@ -802,6 +816,12 @@ def responses(self) -> responses.ResponsesWithRawResponse:
802816

803817
return ResponsesWithRawResponse(self._client.responses)
804818

819+
@cached_property
820+
def conversations(self) -> conversations.ConversationsWithRawResponse:
821+
from .resources.conversations import ConversationsWithRawResponse
822+
823+
return ConversationsWithRawResponse(self._client.conversations)
824+
805825
@cached_property
806826
def evals(self) -> evals.EvalsWithRawResponse:
807827
from .resources.evals import EvalsWithRawResponse
@@ -905,6 +925,12 @@ def responses(self) -> responses.AsyncResponsesWithRawResponse:
905925

906926
return AsyncResponsesWithRawResponse(self._client.responses)
907927

928+
@cached_property
929+
def conversations(self) -> conversations.AsyncConversationsWithRawResponse:
930+
from .resources.conversations import AsyncConversationsWithRawResponse
931+
932+
return AsyncConversationsWithRawResponse(self._client.conversations)
933+
908934
@cached_property
909935
def evals(self) -> evals.AsyncEvalsWithRawResponse:
910936
from .resources.evals import AsyncEvalsWithRawResponse
@@ -1008,6 +1034,12 @@ def responses(self) -> responses.ResponsesWithStreamingResponse:
10081034

10091035
return ResponsesWithStreamingResponse(self._client.responses)
10101036

1037+
@cached_property
1038+
def conversations(self) -> conversations.ConversationsWithStreamingResponse:
1039+
from .resources.conversations import ConversationsWithStreamingResponse
1040+
1041+
return ConversationsWithStreamingResponse(self._client.conversations)
1042+
10111043
@cached_property
10121044
def evals(self) -> evals.EvalsWithStreamingResponse:
10131045
from .resources.evals import EvalsWithStreamingResponse
@@ -1111,6 +1143,12 @@ def responses(self) -> responses.AsyncResponsesWithStreamingResponse:
11111143

11121144
return AsyncResponsesWithStreamingResponse(self._client.responses)
11131145

1146+
@cached_property
1147+
def conversations(self) -> conversations.AsyncConversationsWithStreamingResponse:
1148+
from .resources.conversations import AsyncConversationsWithStreamingResponse
1149+
1150+
return AsyncConversationsWithStreamingResponse(self._client.conversations)
1151+
11141152
@cached_property
11151153
def evals(self) -> evals.AsyncEvalsWithStreamingResponse:
11161154
from .resources.evals import AsyncEvalsWithStreamingResponse

src/openai/_module_client.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
from .resources.responses.responses import Responses
2323
from .resources.containers.containers import Containers
2424
from .resources.fine_tuning.fine_tuning import FineTuning
25+
from .resources.conversations.conversations import Conversations
2526
from .resources.vector_stores.vector_stores import VectorStores
2627

2728
from . import _load_client
@@ -130,6 +131,12 @@ def __load__(self) -> VectorStores:
130131
return _load_client().vector_stores
131132

132133

134+
class ConversationsProxy(LazyProxy["Conversations"]):
135+
@override
136+
def __load__(self) -> Conversations:
137+
return _load_client().conversations
138+
139+
133140
chat: Chat = ChatProxy().__as_proxied__()
134141
beta: Beta = BetaProxy().__as_proxied__()
135142
files: Files = FilesProxy().__as_proxied__()
@@ -147,3 +154,4 @@ def __load__(self) -> VectorStores:
147154
moderations: Moderations = ModerationsProxy().__as_proxied__()
148155
fine_tuning: FineTuning = FineTuningProxy().__as_proxied__()
149156
vector_stores: VectorStores = VectorStoresProxy().__as_proxied__()
157+
conversations: Conversations = ConversationsProxy().__as_proxied__()

src/openai/pagination.py

Lines changed: 66 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,14 @@
55

66
from ._base_client import BasePage, PageInfo, BaseSyncPage, BaseAsyncPage
77

8-
__all__ = ["SyncPage", "AsyncPage", "SyncCursorPage", "AsyncCursorPage"]
8+
__all__ = [
9+
"SyncPage",
10+
"AsyncPage",
11+
"SyncCursorPage",
12+
"AsyncCursorPage",
13+
"SyncConversationCursorPage",
14+
"AsyncConversationCursorPage",
15+
]
916

1017
_T = TypeVar("_T")
1118

@@ -123,3 +130,61 @@ def next_page_info(self) -> Optional[PageInfo]:
123130
return None
124131

125132
return PageInfo(params={"after": item.id})
133+
134+
135+
class SyncConversationCursorPage(BaseSyncPage[_T], BasePage[_T], Generic[_T]):
136+
data: List[_T]
137+
has_more: Optional[bool] = None
138+
last_id: Optional[str] = None
139+
140+
@override
141+
def _get_page_items(self) -> List[_T]:
142+
data = self.data
143+
if not data:
144+
return []
145+
return data
146+
147+
@override
148+
def has_next_page(self) -> bool:
149+
has_more = self.has_more
150+
if has_more is not None and has_more is False:
151+
return False
152+
153+
return super().has_next_page()
154+
155+
@override
156+
def next_page_info(self) -> Optional[PageInfo]:
157+
last_id = self.last_id
158+
if not last_id:
159+
return None
160+
161+
return PageInfo(params={"after": last_id})
162+
163+
164+
class AsyncConversationCursorPage(BaseAsyncPage[_T], BasePage[_T], Generic[_T]):
165+
data: List[_T]
166+
has_more: Optional[bool] = None
167+
last_id: Optional[str] = None
168+
169+
@override
170+
def _get_page_items(self) -> List[_T]:
171+
data = self.data
172+
if not data:
173+
return []
174+
return data
175+
176+
@override
177+
def has_next_page(self) -> bool:
178+
has_more = self.has_more
179+
if has_more is not None and has_more is False:
180+
return False
181+
182+
return super().has_next_page()
183+
184+
@override
185+
def next_page_info(self) -> Optional[PageInfo]:
186+
last_id = self.last_id
187+
if not last_id:
188+
return None
189+
190+
return PageInfo(params={"after": last_id})
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2+
3+
from .items import (
4+
Items,
5+
AsyncItems,
6+
ItemsWithRawResponse,
7+
AsyncItemsWithRawResponse,
8+
ItemsWithStreamingResponse,
9+
AsyncItemsWithStreamingResponse,
10+
)
11+
from .conversations import (
12+
Conversations,
13+
AsyncConversations,
14+
ConversationsWithRawResponse,
15+
AsyncConversationsWithRawResponse,
16+
ConversationsWithStreamingResponse,
17+
AsyncConversationsWithStreamingResponse,
18+
)
19+
20+
__all__ = [
21+
"Items",
22+
"AsyncItems",
23+
"ItemsWithRawResponse",
24+
"AsyncItemsWithRawResponse",
25+
"ItemsWithStreamingResponse",
26+
"AsyncItemsWithStreamingResponse",
27+
"Conversations",
28+
"AsyncConversations",
29+
"ConversationsWithRawResponse",
30+
"AsyncConversationsWithRawResponse",
31+
"ConversationsWithStreamingResponse",
32+
"AsyncConversationsWithStreamingResponse",
33+
]

0 commit comments

Comments
 (0)