Skip to content

Commit f3b87fb

Browse files
feat(api): fix completion response breakage perhaps?
1 parent 7501b6e commit f3b87fb

16 files changed

+51
-81
lines changed

.stats.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
configured_endpoints: 107
22
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-f252873ea1e1f38fd207331ef2621c511154d5be3f4076e59cc15754fc58eee4.yml
33
openapi_spec_hash: 10cbb4337a06a9fdd7d08612dd6044c3
4-
config_hash: 17fe64b23723fc54f2ee61c80223c3e3
4+
config_hash: 4138b0fa1be4c2807ad4ec538b8b38f4

api.md

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,19 +11,18 @@ from llama_stack_client.types import (
1111
InterleavedContent,
1212
InterleavedContentItem,
1313
Message,
14+
Metric,
1415
ParamType,
1516
QueryConfig,
1617
QueryGeneratorConfig,
1718
QueryResult,
1819
ResponseFormat,
19-
ReturnType,
2020
SafetyViolation,
2121
SamplingParams,
2222
ScoringResult,
2323
SharedTokenLogProbs,
2424
SystemMessage,
2525
ToolCall,
26-
ToolCallOrString,
2726
ToolParamDefinition,
2827
ToolResponseMessage,
2928
UserMessage,
@@ -509,7 +508,6 @@ Types:
509508
```python
510509
from llama_stack_client.types import (
511510
Event,
512-
Metric,
513511
QueryCondition,
514512
QuerySpansResponse,
515513
SpanWithStatus,

src/llama_stack_client/resources/scoring_functions.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121
from .._base_client import make_request_options
2222
from ..types.scoring_fn import ScoringFn
2323
from ..types.scoring_fn_params_param import ScoringFnParamsParam
24-
from ..types.shared_params.return_type import ReturnType
2524
from ..types.scoring_function_list_response import ScoringFunctionListResponse
2625

2726
__all__ = ["ScoringFunctionsResource", "AsyncScoringFunctionsResource"]
@@ -107,7 +106,7 @@ def register(
107106
self,
108107
*,
109108
description: str,
110-
return_type: ReturnType,
109+
return_type: scoring_function_register_params.ReturnType,
111110
scoring_fn_id: str,
112111
params: ScoringFnParamsParam | NotGiven = NOT_GIVEN,
113112
provider_id: str | NotGiven = NOT_GIVEN,
@@ -243,7 +242,7 @@ async def register(
243242
self,
244243
*,
245244
description: str,
246-
return_type: ReturnType,
245+
return_type: scoring_function_register_params.ReturnType,
247246
scoring_fn_id: str,
248247
params: ScoringFnParamsParam | NotGiven = NOT_GIVEN,
249248
provider_id: str | NotGiven = NOT_GIVEN,

src/llama_stack_client/types/__init__.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,12 @@
77
from .tool import Tool as Tool
88
from .model import Model as Model
99
from .trace import Trace as Trace
10-
from .metric import Metric as Metric
1110
from .shared import (
11+
Metric as Metric,
1212
Message as Message,
1313
Document as Document,
1414
ToolCall as ToolCall,
1515
ParamType as ParamType,
16-
ReturnType as ReturnType,
1716
AgentConfig as AgentConfig,
1817
QueryConfig as QueryConfig,
1918
QueryResult as QueryResult,
@@ -25,7 +24,6 @@
2524
SamplingParams as SamplingParams,
2625
BatchCompletion as BatchCompletion,
2726
SafetyViolation as SafetyViolation,
28-
ToolCallOrString as ToolCallOrString,
2927
CompletionMessage as CompletionMessage,
3028
InterleavedContent as InterleavedContent,
3129
SharedTokenLogProbs as SharedTokenLogProbs,

src/llama_stack_client/types/chat_completion_response_stream_chunk.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33
from typing import List, Optional
44
from typing_extensions import Literal
55

6-
from .metric import Metric
76
from .._models import BaseModel
7+
from .shared.metric import Metric
88
from .shared.content_delta import ContentDelta
99
from .shared.shared_token_log_probs import SharedTokenLogProbs
1010

src/llama_stack_client/types/scoring_fn.py

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,23 @@
55

66
from .._models import BaseModel
77
from .scoring_fn_params import ScoringFnParams
8-
from .shared.return_type import ReturnType
98

10-
__all__ = ["ScoringFn"]
9+
__all__ = ["ScoringFn", "ReturnType"]
10+
11+
12+
class ReturnType(BaseModel):
13+
type: Literal[
14+
"string",
15+
"number",
16+
"boolean",
17+
"array",
18+
"object",
19+
"json",
20+
"union",
21+
"chat_completion_input",
22+
"completion_input",
23+
"agent_turn_input",
24+
]
1125

1226

1327
class ScoringFn(BaseModel):

src/llama_stack_client/types/scoring_function_register_params.py

Lines changed: 19 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,11 @@
22

33
from __future__ import annotations
44

5-
from typing_extensions import Required, TypedDict
5+
from typing_extensions import Literal, Required, TypedDict
66

77
from .scoring_fn_params_param import ScoringFnParamsParam
8-
from .shared_params.return_type import ReturnType
98

10-
__all__ = ["ScoringFunctionRegisterParams"]
9+
__all__ = ["ScoringFunctionRegisterParams", "ReturnType"]
1110

1211

1312
class ScoringFunctionRegisterParams(TypedDict, total=False):
@@ -30,3 +29,20 @@ class ScoringFunctionRegisterParams(TypedDict, total=False):
3029

3130
provider_scoring_fn_id: str
3231
"""The ID of the provider scoring function to use for the scoring function."""
32+
33+
34+
class ReturnType(TypedDict, total=False):
35+
type: Required[
36+
Literal[
37+
"string",
38+
"number",
39+
"boolean",
40+
"array",
41+
"object",
42+
"json",
43+
"union",
44+
"chat_completion_input",
45+
"completion_input",
46+
"agent_turn_input",
47+
]
48+
]

src/llama_stack_client/types/shared/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
22

3+
from .metric import Metric as Metric
34
from .message import Message as Message
45
from .document import Document as Document
56
from .tool_call import ToolCall as ToolCall
67
from .param_type import ParamType as ParamType
7-
from .return_type import ReturnType as ReturnType
88
from .agent_config import AgentConfig as AgentConfig
99
from .query_config import QueryConfig as QueryConfig
1010
from .query_result import QueryResult as QueryResult
@@ -18,7 +18,6 @@
1818
from .safety_violation import SafetyViolation as SafetyViolation
1919
from .completion_message import CompletionMessage as CompletionMessage
2020
from .interleaved_content import InterleavedContent as InterleavedContent
21-
from .tool_call_or_string import ToolCallOrString as ToolCallOrString
2221
from .tool_param_definition import ToolParamDefinition as ToolParamDefinition
2322
from .tool_response_message import ToolResponseMessage as ToolResponseMessage
2423
from .query_generator_config import QueryGeneratorConfig as QueryGeneratorConfig

src/llama_stack_client/types/shared/batch_completion.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from typing import List, Optional
44
from typing_extensions import Literal
55

6-
from ..metric import Metric
6+
from .metric import Metric
77
from ..._models import BaseModel
88
from .shared_token_log_probs import SharedTokenLogProbs
99

src/llama_stack_client/types/shared/chat_completion_response.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from typing import List, Optional
44

5-
from ..metric import Metric
5+
from .metric import Metric
66
from ..._models import BaseModel
77
from .completion_message import CompletionMessage
88
from .shared_token_log_probs import SharedTokenLogProbs

0 commit comments

Comments
 (0)