Skip to content
This repository was archived by the owner on Aug 14, 2025. It is now read-only.

Commit db99707

Browse files
feat(api): update via SDK Studio
1 parent e3508c4 commit db99707

File tree

5 files changed

+39
-4
lines changed

5 files changed

+39
-4
lines changed

.stats.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
configured_endpoints: 106
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-c371abef4463f174f8d35ef3da4697fae5eb221db615f9c305319196472f313b.yml
3-
openapi_spec_hash: d9bb62faf229c2c2875c732715e9cfd1
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-83f6df45d805a86b006e19d7e3e2decdfaba03af9b2a7bb8a5080d8801ee0838.yml
3+
openapi_spec_hash: a08671c120ecd7142115b61b5728e537
44
config_hash: e67fd054e95c1e82f78f4b834e96bb65

src/llama_stack_client/resources/responses/responses.py

Lines changed: 23 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from __future__ import annotations
44

5-
from typing import Union, Iterable
5+
from typing import List, Union, Iterable
66
from typing_extensions import Literal, overload
77

88
import httpx
@@ -66,6 +66,7 @@ def create(
6666
*,
6767
input: Union[str, Iterable[response_create_params.InputUnionMember1]],
6868
model: str,
69+
include: List[str] | NotGiven = NOT_GIVEN,
6970
instructions: str | NotGiven = NOT_GIVEN,
7071
max_infer_iters: int | NotGiven = NOT_GIVEN,
7172
previous_response_id: str | NotGiven = NOT_GIVEN,
@@ -89,6 +90,8 @@ def create(
8990
9091
model: The underlying LLM used for completions.
9192
93+
include: (Optional) Additional fields to include in the response.
94+
9295
previous_response_id: (Optional) if specified, the new response will be a continuation of the previous
9396
response. This can be used to easily fork-off new responses from existing
9497
responses.
@@ -112,6 +115,7 @@ def create(
112115
input: Union[str, Iterable[response_create_params.InputUnionMember1]],
113116
model: str,
114117
stream: Literal[True],
118+
include: List[str] | NotGiven = NOT_GIVEN,
115119
instructions: str | NotGiven = NOT_GIVEN,
116120
max_infer_iters: int | NotGiven = NOT_GIVEN,
117121
previous_response_id: str | NotGiven = NOT_GIVEN,
@@ -134,6 +138,8 @@ def create(
134138
135139
model: The underlying LLM used for completions.
136140
141+
include: (Optional) Additional fields to include in the response.
142+
137143
previous_response_id: (Optional) if specified, the new response will be a continuation of the previous
138144
response. This can be used to easily fork-off new responses from existing
139145
responses.
@@ -157,6 +163,7 @@ def create(
157163
input: Union[str, Iterable[response_create_params.InputUnionMember1]],
158164
model: str,
159165
stream: bool,
166+
include: List[str] | NotGiven = NOT_GIVEN,
160167
instructions: str | NotGiven = NOT_GIVEN,
161168
max_infer_iters: int | NotGiven = NOT_GIVEN,
162169
previous_response_id: str | NotGiven = NOT_GIVEN,
@@ -179,6 +186,8 @@ def create(
179186
180187
model: The underlying LLM used for completions.
181188
189+
include: (Optional) Additional fields to include in the response.
190+
182191
previous_response_id: (Optional) if specified, the new response will be a continuation of the previous
183192
response. This can be used to easily fork-off new responses from existing
184193
responses.
@@ -201,6 +210,7 @@ def create(
201210
*,
202211
input: Union[str, Iterable[response_create_params.InputUnionMember1]],
203212
model: str,
213+
include: List[str] | NotGiven = NOT_GIVEN,
204214
instructions: str | NotGiven = NOT_GIVEN,
205215
max_infer_iters: int | NotGiven = NOT_GIVEN,
206216
previous_response_id: str | NotGiven = NOT_GIVEN,
@@ -222,6 +232,7 @@ def create(
222232
{
223233
"input": input,
224234
"model": model,
235+
"include": include,
225236
"instructions": instructions,
226237
"max_infer_iters": max_infer_iters,
227238
"previous_response_id": previous_response_id,
@@ -362,6 +373,7 @@ async def create(
362373
*,
363374
input: Union[str, Iterable[response_create_params.InputUnionMember1]],
364375
model: str,
376+
include: List[str] | NotGiven = NOT_GIVEN,
365377
instructions: str | NotGiven = NOT_GIVEN,
366378
max_infer_iters: int | NotGiven = NOT_GIVEN,
367379
previous_response_id: str | NotGiven = NOT_GIVEN,
@@ -385,6 +397,8 @@ async def create(
385397
386398
model: The underlying LLM used for completions.
387399
400+
include: (Optional) Additional fields to include in the response.
401+
388402
previous_response_id: (Optional) if specified, the new response will be a continuation of the previous
389403
response. This can be used to easily fork-off new responses from existing
390404
responses.
@@ -408,6 +422,7 @@ async def create(
408422
input: Union[str, Iterable[response_create_params.InputUnionMember1]],
409423
model: str,
410424
stream: Literal[True],
425+
include: List[str] | NotGiven = NOT_GIVEN,
411426
instructions: str | NotGiven = NOT_GIVEN,
412427
max_infer_iters: int | NotGiven = NOT_GIVEN,
413428
previous_response_id: str | NotGiven = NOT_GIVEN,
@@ -430,6 +445,8 @@ async def create(
430445
431446
model: The underlying LLM used for completions.
432447
448+
include: (Optional) Additional fields to include in the response.
449+
433450
previous_response_id: (Optional) if specified, the new response will be a continuation of the previous
434451
response. This can be used to easily fork-off new responses from existing
435452
responses.
@@ -453,6 +470,7 @@ async def create(
453470
input: Union[str, Iterable[response_create_params.InputUnionMember1]],
454471
model: str,
455472
stream: bool,
473+
include: List[str] | NotGiven = NOT_GIVEN,
456474
instructions: str | NotGiven = NOT_GIVEN,
457475
max_infer_iters: int | NotGiven = NOT_GIVEN,
458476
previous_response_id: str | NotGiven = NOT_GIVEN,
@@ -475,6 +493,8 @@ async def create(
475493
476494
model: The underlying LLM used for completions.
477495
496+
include: (Optional) Additional fields to include in the response.
497+
478498
previous_response_id: (Optional) if specified, the new response will be a continuation of the previous
479499
response. This can be used to easily fork-off new responses from existing
480500
responses.
@@ -497,6 +517,7 @@ async def create(
497517
*,
498518
input: Union[str, Iterable[response_create_params.InputUnionMember1]],
499519
model: str,
520+
include: List[str] | NotGiven = NOT_GIVEN,
500521
instructions: str | NotGiven = NOT_GIVEN,
501522
max_infer_iters: int | NotGiven = NOT_GIVEN,
502523
previous_response_id: str | NotGiven = NOT_GIVEN,
@@ -518,6 +539,7 @@ async def create(
518539
{
519540
"input": input,
520541
"model": model,
542+
"include": include,
521543
"instructions": instructions,
522544
"max_infer_iters": max_infer_iters,
523545
"previous_response_id": previous_response_id,

src/llama_stack_client/types/create_response.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,13 @@ class Result(BaseModel):
2222
"""
2323

2424
category_scores: Optional[Dict[str, float]] = None
25-
"""A list of the categories along with their scores as predicted by model."""
25+
"""A list of the categories along with their scores as predicted by model.
26+
27+
Required set of categories that need to be in response - violence -
28+
violence/graphic - harassment - harassment/threatening - hate -
29+
hate/threatening - illicit - illicit/violent - sexual - sexual/minors -
30+
self-harm - self-harm/intent - self-harm/instructions
31+
"""
2632

2733
user_message: Optional[str] = None
2834

src/llama_stack_client/types/response_create_params.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,9 @@ class ResponseCreateParamsBase(TypedDict, total=False):
4646
model: Required[str]
4747
"""The underlying LLM used for completions."""
4848

49+
include: List[str]
50+
"""(Optional) Additional fields to include in the response."""
51+
4952
instructions: str
5053

5154
max_infer_iters: int

tests/api_resources/test_responses.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ def test_method_create_with_all_params_overload_1(self, client: LlamaStackClient
3131
response = client.responses.create(
3232
input="string",
3333
model="model",
34+
include=["string"],
3435
instructions="instructions",
3536
max_infer_iters=0,
3637
previous_response_id="previous_response_id",
@@ -96,6 +97,7 @@ def test_method_create_with_all_params_overload_2(self, client: LlamaStackClient
9697
input="string",
9798
model="model",
9899
stream=True,
100+
include=["string"],
99101
instructions="instructions",
100102
max_infer_iters=0,
101103
previous_response_id="previous_response_id",
@@ -238,6 +240,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
238240
response = await async_client.responses.create(
239241
input="string",
240242
model="model",
243+
include=["string"],
241244
instructions="instructions",
242245
max_infer_iters=0,
243246
previous_response_id="previous_response_id",
@@ -303,6 +306,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
303306
input="string",
304307
model="model",
305308
stream=True,
309+
include=["string"],
306310
instructions="instructions",
307311
max_infer_iters=0,
308312
previous_response_id="previous_response_id",

0 commit comments

Comments
 (0)