Skip to content
This repository was archived by the owner on Aug 14, 2025. It is now read-only.

Commit 99cbd68

Browse files
feat(api): update via SDK Studio
1 parent fc6d0b3 commit 99cbd68

File tree

17 files changed

+402
-406
lines changed

17 files changed

+402
-406
lines changed

.stats.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
configured_endpoints: 106
22
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-c371abef4463f174f8d35ef3da4697fae5eb221db615f9c305319196472f313b.yml
33
openapi_spec_hash: d9bb62faf229c2c2875c732715e9cfd1
4-
config_hash: 8f38e5a8cd8426e678a65d7e3975af4a
4+
config_hash: 558dfd918ea253caf37c332fdf8d27cf

api.md

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ Methods:
8989

9090
- <code title="post /v1/openai/v1/responses">client.responses.<a href="./src/llama_stack_client/resources/responses/responses.py">create</a>(\*\*<a href="src/llama_stack_client/types/response_create_params.py">params</a>) -> <a href="./src/llama_stack_client/types/response_object.py">ResponseObject</a></code>
9191
- <code title="get /v1/openai/v1/responses/{response_id}">client.responses.<a href="./src/llama_stack_client/resources/responses/responses.py">retrieve</a>(response_id) -> <a href="./src/llama_stack_client/types/response_object.py">ResponseObject</a></code>
92-
- <code title="get /v1/openai/v1/responses">client.responses.<a href="./src/llama_stack_client/resources/responses/responses.py">list</a>(\*\*<a href="src/llama_stack_client/types/response_list_params.py">params</a>) -> <a href="./src/llama_stack_client/types/response_list_response.py">ResponseListResponse</a></code>
92+
- <code title="get /v1/openai/v1/responses">client.responses.<a href="./src/llama_stack_client/resources/responses/responses.py">list</a>(\*\*<a href="src/llama_stack_client/types/response_list_params.py">params</a>) -> <a href="./src/llama_stack_client/types/response_list_response.py">SyncOpenAICursorPagination[ResponseListResponse]</a></code>
9393

9494
## InputItems
9595

@@ -290,7 +290,7 @@ Methods:
290290

291291
- <code title="post /v1/openai/v1/chat/completions">client.chat.completions.<a href="./src/llama_stack_client/resources/chat/completions.py">create</a>(\*\*<a href="src/llama_stack_client/types/chat/completion_create_params.py">params</a>) -> <a href="./src/llama_stack_client/types/chat/completion_create_response.py">CompletionCreateResponse</a></code>
292292
- <code title="get /v1/openai/v1/chat/completions/{completion_id}">client.chat.completions.<a href="./src/llama_stack_client/resources/chat/completions.py">retrieve</a>(completion_id) -> <a href="./src/llama_stack_client/types/chat/completion_retrieve_response.py">CompletionRetrieveResponse</a></code>
293-
- <code title="get /v1/openai/v1/chat/completions">client.chat.completions.<a href="./src/llama_stack_client/resources/chat/completions.py">list</a>(\*\*<a href="src/llama_stack_client/types/chat/completion_list_params.py">params</a>) -> <a href="./src/llama_stack_client/types/chat/completion_list_response.py">CompletionListResponse</a></code>
293+
- <code title="get /v1/openai/v1/chat/completions">client.chat.completions.<a href="./src/llama_stack_client/resources/chat/completions.py">list</a>(\*\*<a href="src/llama_stack_client/types/chat/completion_list_params.py">params</a>) -> <a href="./src/llama_stack_client/types/chat/completion_list_response.py">SyncOpenAICursorPagination[CompletionListResponse]</a></code>
294294

295295
# Completions
296296

@@ -355,7 +355,7 @@ Methods:
355355
- <code title="post /v1/openai/v1/vector_stores">client.vector_stores.<a href="./src/llama_stack_client/resources/vector_stores/vector_stores.py">create</a>(\*\*<a href="src/llama_stack_client/types/vector_store_create_params.py">params</a>) -> <a href="./src/llama_stack_client/types/vector_store.py">VectorStore</a></code>
356356
- <code title="get /v1/openai/v1/vector_stores/{vector_store_id}">client.vector_stores.<a href="./src/llama_stack_client/resources/vector_stores/vector_stores.py">retrieve</a>(vector_store_id) -> <a href="./src/llama_stack_client/types/vector_store.py">VectorStore</a></code>
357357
- <code title="post /v1/openai/v1/vector_stores/{vector_store_id}">client.vector_stores.<a href="./src/llama_stack_client/resources/vector_stores/vector_stores.py">update</a>(vector_store_id, \*\*<a href="src/llama_stack_client/types/vector_store_update_params.py">params</a>) -> <a href="./src/llama_stack_client/types/vector_store.py">VectorStore</a></code>
358-
- <code title="get /v1/openai/v1/vector_stores">client.vector_stores.<a href="./src/llama_stack_client/resources/vector_stores/vector_stores.py">list</a>(\*\*<a href="src/llama_stack_client/types/vector_store_list_params.py">params</a>) -> <a href="./src/llama_stack_client/types/list_vector_stores_response.py">ListVectorStoresResponse</a></code>
358+
- <code title="get /v1/openai/v1/vector_stores">client.vector_stores.<a href="./src/llama_stack_client/resources/vector_stores/vector_stores.py">list</a>(\*\*<a href="src/llama_stack_client/types/vector_store_list_params.py">params</a>) -> <a href="./src/llama_stack_client/types/vector_store.py">SyncOpenAICursorPagination[VectorStore]</a></code>
359359
- <code title="delete /v1/openai/v1/vector_stores/{vector_store_id}">client.vector_stores.<a href="./src/llama_stack_client/resources/vector_stores/vector_stores.py">delete</a>(vector_store_id) -> <a href="./src/llama_stack_client/types/vector_store_delete_response.py">VectorStoreDeleteResponse</a></code>
360360
- <code title="post /v1/openai/v1/vector_stores/{vector_store_id}/search">client.vector_stores.<a href="./src/llama_stack_client/resources/vector_stores/vector_stores.py">search</a>(vector_store_id, \*\*<a href="src/llama_stack_client/types/vector_store_search_params.py">params</a>) -> <a href="./src/llama_stack_client/types/vector_store_search_response.py">VectorStoreSearchResponse</a></code>
361361

@@ -366,7 +366,6 @@ Types:
366366
```python
367367
from llama_stack_client.types.vector_stores import (
368368
VectorStoreFile,
369-
FileListResponse,
370369
FileDeleteResponse,
371370
FileContentResponse,
372371
)
@@ -377,7 +376,7 @@ Methods:
377376
- <code title="post /v1/openai/v1/vector_stores/{vector_store_id}/files">client.vector_stores.files.<a href="./src/llama_stack_client/resources/vector_stores/files.py">create</a>(vector_store_id, \*\*<a href="src/llama_stack_client/types/vector_stores/file_create_params.py">params</a>) -> <a href="./src/llama_stack_client/types/vector_stores/vector_store_file.py">VectorStoreFile</a></code>
378377
- <code title="get /v1/openai/v1/vector_stores/{vector_store_id}/files/{file_id}">client.vector_stores.files.<a href="./src/llama_stack_client/resources/vector_stores/files.py">retrieve</a>(file_id, \*, vector_store_id) -> <a href="./src/llama_stack_client/types/vector_stores/vector_store_file.py">VectorStoreFile</a></code>
379378
- <code title="post /v1/openai/v1/vector_stores/{vector_store_id}/files/{file_id}">client.vector_stores.files.<a href="./src/llama_stack_client/resources/vector_stores/files.py">update</a>(file_id, \*, vector_store_id, \*\*<a href="src/llama_stack_client/types/vector_stores/file_update_params.py">params</a>) -> <a href="./src/llama_stack_client/types/vector_stores/vector_store_file.py">VectorStoreFile</a></code>
380-
- <code title="get /v1/openai/v1/vector_stores/{vector_store_id}/files">client.vector_stores.files.<a href="./src/llama_stack_client/resources/vector_stores/files.py">list</a>(vector_store_id, \*\*<a href="src/llama_stack_client/types/vector_stores/file_list_params.py">params</a>) -> <a href="./src/llama_stack_client/types/vector_stores/file_list_response.py">FileListResponse</a></code>
379+
- <code title="get /v1/openai/v1/vector_stores/{vector_store_id}/files">client.vector_stores.files.<a href="./src/llama_stack_client/resources/vector_stores/files.py">list</a>(vector_store_id, \*\*<a href="src/llama_stack_client/types/vector_stores/file_list_params.py">params</a>) -> <a href="./src/llama_stack_client/types/vector_stores/vector_store_file.py">SyncOpenAICursorPagination[VectorStoreFile]</a></code>
381380
- <code title="delete /v1/openai/v1/vector_stores/{vector_store_id}/files/{file_id}">client.vector_stores.files.<a href="./src/llama_stack_client/resources/vector_stores/files.py">delete</a>(file_id, \*, vector_store_id) -> <a href="./src/llama_stack_client/types/vector_stores/file_delete_response.py">FileDeleteResponse</a></code>
382381
- <code title="get /v1/openai/v1/vector_stores/{vector_store_id}/files/{file_id}/content">client.vector_stores.files.<a href="./src/llama_stack_client/resources/vector_stores/files.py">content</a>(file_id, \*, vector_store_id) -> <a href="./src/llama_stack_client/types/vector_stores/file_content_response.py">FileContentResponse</a></code>
383382

@@ -589,6 +588,6 @@ Methods:
589588

590589
- <code title="post /v1/openai/v1/files">client.files.<a href="./src/llama_stack_client/resources/files.py">create</a>(\*\*<a href="src/llama_stack_client/types/file_create_params.py">params</a>) -> <a href="./src/llama_stack_client/types/file.py">File</a></code>
591590
- <code title="get /v1/openai/v1/files/{file_id}">client.files.<a href="./src/llama_stack_client/resources/files.py">retrieve</a>(file_id) -> <a href="./src/llama_stack_client/types/file.py">File</a></code>
592-
- <code title="get /v1/openai/v1/files">client.files.<a href="./src/llama_stack_client/resources/files.py">list</a>(\*\*<a href="src/llama_stack_client/types/file_list_params.py">params</a>) -> <a href="./src/llama_stack_client/types/list_files_response.py">ListFilesResponse</a></code>
591+
- <code title="get /v1/openai/v1/files">client.files.<a href="./src/llama_stack_client/resources/files.py">list</a>(\*\*<a href="src/llama_stack_client/types/file_list_params.py">params</a>) -> <a href="./src/llama_stack_client/types/file.py">SyncOpenAICursorPagination[File]</a></code>
593592
- <code title="delete /v1/openai/v1/files/{file_id}">client.files.<a href="./src/llama_stack_client/resources/files.py">delete</a>(file_id) -> <a href="./src/llama_stack_client/types/delete_file_response.py">DeleteFileResponse</a></code>
594593
- <code title="get /v1/openai/v1/files/{file_id}/content">client.files.<a href="./src/llama_stack_client/resources/files.py">content</a>(file_id) -> object</code>

src/llama_stack_client/pagination.py

Lines changed: 59 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
from ._base_client import BasePage, PageInfo, BaseSyncPage, BaseAsyncPage
77

8-
__all__ = ["SyncDatasetsIterrows", "AsyncDatasetsIterrows"]
8+
__all__ = ["SyncDatasetsIterrows", "AsyncDatasetsIterrows", "SyncOpenAICursorPagination", "AsyncOpenAICursorPagination"]
99

1010
_T = TypeVar("_T")
1111

@@ -48,3 +48,61 @@ def next_page_info(self) -> Optional[PageInfo]:
4848
return None
4949

5050
return PageInfo(params={"start_index": next_index})
51+
52+
53+
class SyncOpenAICursorPagination(BaseSyncPage[_T], BasePage[_T], Generic[_T]):
54+
data: List[_T]
55+
has_more: Optional[bool] = None
56+
last_id: Optional[str] = None
57+
58+
@override
59+
def _get_page_items(self) -> List[_T]:
60+
data = self.data
61+
if not data:
62+
return []
63+
return data
64+
65+
@override
66+
def has_next_page(self) -> bool:
67+
has_more = self.has_more
68+
if has_more is not None and has_more is False:
69+
return False
70+
71+
return super().has_next_page()
72+
73+
@override
74+
def next_page_info(self) -> Optional[PageInfo]:
75+
last_id = self.last_id
76+
if not last_id:
77+
return None
78+
79+
return PageInfo(params={"after": last_id})
80+
81+
82+
class AsyncOpenAICursorPagination(BaseAsyncPage[_T], BasePage[_T], Generic[_T]):
83+
data: List[_T]
84+
has_more: Optional[bool] = None
85+
last_id: Optional[str] = None
86+
87+
@override
88+
def _get_page_items(self) -> List[_T]:
89+
data = self.data
90+
if not data:
91+
return []
92+
return data
93+
94+
@override
95+
def has_next_page(self) -> bool:
96+
has_more = self.has_more
97+
if has_more is not None and has_more is False:
98+
return False
99+
100+
return super().has_next_page()
101+
102+
@override
103+
def next_page_info(self) -> Optional[PageInfo]:
104+
last_id = self.last_id
105+
if not last_id:
106+
return None
107+
108+
return PageInfo(params={"after": last_id})

src/llama_stack_client/resources/chat/completions.py

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,9 @@
1818
async_to_streamed_response_wrapper,
1919
)
2020
from ..._streaming import Stream, AsyncStream
21+
from ...pagination import SyncOpenAICursorPagination, AsyncOpenAICursorPagination
2122
from ...types.chat import completion_list_params, completion_create_params
22-
from ..._base_client import make_request_options
23+
from ..._base_client import AsyncPaginator, make_request_options
2324
from ...types.chat_completion_chunk import ChatCompletionChunk
2425
from ...types.chat.completion_list_response import CompletionListResponse
2526
from ...types.chat.completion_create_response import CompletionCreateResponse
@@ -466,7 +467,7 @@ def list(
466467
extra_query: Query | None = None,
467468
extra_body: Body | None = None,
468469
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
469-
) -> CompletionListResponse:
470+
) -> SyncOpenAICursorPagination[CompletionListResponse]:
470471
"""
471472
List all chat completions.
472473
@@ -487,8 +488,9 @@ def list(
487488
488489
timeout: Override the client-level default timeout for this request, in seconds
489490
"""
490-
return self._get(
491+
return self._get_api_list(
491492
"/v1/openai/v1/chat/completions",
493+
page=SyncOpenAICursorPagination[CompletionListResponse],
492494
options=make_request_options(
493495
extra_headers=extra_headers,
494496
extra_query=extra_query,
@@ -504,7 +506,7 @@ def list(
504506
completion_list_params.CompletionListParams,
505507
),
506508
),
507-
cast_to=CompletionListResponse,
509+
model=CompletionListResponse,
508510
)
509511

510512

@@ -933,7 +935,7 @@ async def retrieve(
933935
cast_to=CompletionRetrieveResponse,
934936
)
935937

936-
async def list(
938+
def list(
937939
self,
938940
*,
939941
after: str | NotGiven = NOT_GIVEN,
@@ -946,7 +948,7 @@ async def list(
946948
extra_query: Query | None = None,
947949
extra_body: Body | None = None,
948950
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
949-
) -> CompletionListResponse:
951+
) -> AsyncPaginator[CompletionListResponse, AsyncOpenAICursorPagination[CompletionListResponse]]:
950952
"""
951953
List all chat completions.
952954
@@ -967,14 +969,15 @@ async def list(
967969
968970
timeout: Override the client-level default timeout for this request, in seconds
969971
"""
970-
return await self._get(
972+
return self._get_api_list(
971973
"/v1/openai/v1/chat/completions",
974+
page=AsyncOpenAICursorPagination[CompletionListResponse],
972975
options=make_request_options(
973976
extra_headers=extra_headers,
974977
extra_query=extra_query,
975978
extra_body=extra_body,
976979
timeout=timeout,
977-
query=await async_maybe_transform(
980+
query=maybe_transform(
978981
{
979982
"after": after,
980983
"limit": limit,
@@ -984,7 +987,7 @@ async def list(
984987
completion_list_params.CompletionListParams,
985988
),
986989
),
987-
cast_to=CompletionListResponse,
990+
model=CompletionListResponse,
988991
)
989992

990993

src/llama_stack_client/resources/files.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@
1818
async_to_raw_response_wrapper,
1919
async_to_streamed_response_wrapper,
2020
)
21+
from ..pagination import SyncOpenAICursorPagination, AsyncOpenAICursorPagination
2122
from ..types.file import File
22-
from .._base_client import make_request_options
23-
from ..types.list_files_response import ListFilesResponse
23+
from .._base_client import AsyncPaginator, make_request_options
2424
from ..types.delete_file_response import DeleteFileResponse
2525

2626
__all__ = ["FilesResource", "AsyncFilesResource"]
@@ -144,7 +144,7 @@ def list(
144144
extra_query: Query | None = None,
145145
extra_body: Body | None = None,
146146
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
147-
) -> ListFilesResponse:
147+
) -> SyncOpenAICursorPagination[File]:
148148
"""
149149
Returns a list of files that belong to the user's organization.
150150
@@ -170,8 +170,9 @@ def list(
170170
171171
timeout: Override the client-level default timeout for this request, in seconds
172172
"""
173-
return self._get(
173+
return self._get_api_list(
174174
"/v1/openai/v1/files",
175+
page=SyncOpenAICursorPagination[File],
175176
options=make_request_options(
176177
extra_headers=extra_headers,
177178
extra_query=extra_query,
@@ -187,7 +188,7 @@ def list(
187188
file_list_params.FileListParams,
188189
),
189190
),
190-
cast_to=ListFilesResponse,
191+
model=File,
191192
)
192193

193194
def delete(
@@ -362,7 +363,7 @@ async def retrieve(
362363
cast_to=File,
363364
)
364365

365-
async def list(
366+
def list(
366367
self,
367368
*,
368369
after: str | NotGiven = NOT_GIVEN,
@@ -375,7 +376,7 @@ async def list(
375376
extra_query: Query | None = None,
376377
extra_body: Body | None = None,
377378
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
378-
) -> ListFilesResponse:
379+
) -> AsyncPaginator[File, AsyncOpenAICursorPagination[File]]:
379380
"""
380381
Returns a list of files that belong to the user's organization.
381382
@@ -401,14 +402,15 @@ async def list(
401402
402403
timeout: Override the client-level default timeout for this request, in seconds
403404
"""
404-
return await self._get(
405+
return self._get_api_list(
405406
"/v1/openai/v1/files",
407+
page=AsyncOpenAICursorPagination[File],
406408
options=make_request_options(
407409
extra_headers=extra_headers,
408410
extra_query=extra_query,
409411
extra_body=extra_body,
410412
timeout=timeout,
411-
query=await async_maybe_transform(
413+
query=maybe_transform(
412414
{
413415
"after": after,
414416
"limit": limit,
@@ -418,7 +420,7 @@ async def list(
418420
file_list_params.FileListParams,
419421
),
420422
),
421-
cast_to=ListFilesResponse,
423+
model=File,
422424
)
423425

424426
async def delete(

0 commit comments

Comments
 (0)