@@ -38,16 +38,7 @@ func TestUserAgentHeader(t *testing.T) {
38
38
},
39
39
}),
40
40
)
41
- client .Inference .ChatCompletion (context .Background (), llamastackclient.InferenceChatCompletionParams {
42
- Messages : []llamastackclient.MessageUnionParam {{
43
- OfUser : & llamastackclient.UserMessageParam {
44
- Content : llamastackclient.InterleavedContentUnionParam {
45
- OfString : llamastackclient .String ("string" ),
46
- },
47
- },
48
- }},
49
- ModelID : "model_id" ,
50
- })
41
+ client .Toolgroups .List (context .Background ())
51
42
if userAgent != fmt .Sprintf ("LlamaStackClient/Go %s" , internal .PackageVersion ) {
52
43
t .Errorf ("Expected User-Agent to be correct, but got: %#v" , userAgent )
53
44
}
@@ -70,16 +61,7 @@ func TestRetryAfter(t *testing.T) {
70
61
},
71
62
}),
72
63
)
73
- _ , err := client .Inference .ChatCompletion (context .Background (), llamastackclient.InferenceChatCompletionParams {
74
- Messages : []llamastackclient.MessageUnionParam {{
75
- OfUser : & llamastackclient.UserMessageParam {
76
- Content : llamastackclient.InterleavedContentUnionParam {
77
- OfString : llamastackclient .String ("string" ),
78
- },
79
- },
80
- }},
81
- ModelID : "model_id" ,
82
- })
64
+ _ , err := client .Toolgroups .List (context .Background ())
83
65
if err == nil {
84
66
t .Error ("Expected there to be a cancel error" )
85
67
}
@@ -113,16 +95,7 @@ func TestDeleteRetryCountHeader(t *testing.T) {
113
95
}),
114
96
option .WithHeaderDel ("X-Stainless-Retry-Count" ),
115
97
)
116
- _ , err := client .Inference .ChatCompletion (context .Background (), llamastackclient.InferenceChatCompletionParams {
117
- Messages : []llamastackclient.MessageUnionParam {{
118
- OfUser : & llamastackclient.UserMessageParam {
119
- Content : llamastackclient.InterleavedContentUnionParam {
120
- OfString : llamastackclient .String ("string" ),
121
- },
122
- },
123
- }},
124
- ModelID : "model_id" ,
125
- })
98
+ _ , err := client .Toolgroups .List (context .Background ())
126
99
if err == nil {
127
100
t .Error ("Expected there to be a cancel error" )
128
101
}
@@ -151,16 +124,7 @@ func TestOverwriteRetryCountHeader(t *testing.T) {
151
124
}),
152
125
option .WithHeader ("X-Stainless-Retry-Count" , "42" ),
153
126
)
154
- _ , err := client .Inference .ChatCompletion (context .Background (), llamastackclient.InferenceChatCompletionParams {
155
- Messages : []llamastackclient.MessageUnionParam {{
156
- OfUser : & llamastackclient.UserMessageParam {
157
- Content : llamastackclient.InterleavedContentUnionParam {
158
- OfString : llamastackclient .String ("string" ),
159
- },
160
- },
161
- }},
162
- ModelID : "model_id" ,
163
- })
127
+ _ , err := client .Toolgroups .List (context .Background ())
164
128
if err == nil {
165
129
t .Error ("Expected there to be a cancel error" )
166
130
}
@@ -188,16 +152,7 @@ func TestRetryAfterMs(t *testing.T) {
188
152
},
189
153
}),
190
154
)
191
- _ , err := client .Inference .ChatCompletion (context .Background (), llamastackclient.InferenceChatCompletionParams {
192
- Messages : []llamastackclient.MessageUnionParam {{
193
- OfUser : & llamastackclient.UserMessageParam {
194
- Content : llamastackclient.InterleavedContentUnionParam {
195
- OfString : llamastackclient .String ("string" ),
196
- },
197
- },
198
- }},
199
- ModelID : "model_id" ,
200
- })
155
+ _ , err := client .Toolgroups .List (context .Background ())
201
156
if err == nil {
202
157
t .Error ("Expected there to be a cancel error" )
203
158
}
@@ -219,16 +174,7 @@ func TestContextCancel(t *testing.T) {
219
174
)
220
175
cancelCtx , cancel := context .WithCancel (context .Background ())
221
176
cancel ()
222
- _ , err := client .Inference .ChatCompletion (cancelCtx , llamastackclient.InferenceChatCompletionParams {
223
- Messages : []llamastackclient.MessageUnionParam {{
224
- OfUser : & llamastackclient.UserMessageParam {
225
- Content : llamastackclient.InterleavedContentUnionParam {
226
- OfString : llamastackclient .String ("string" ),
227
- },
228
- },
229
- }},
230
- ModelID : "model_id" ,
231
- })
177
+ _ , err := client .Toolgroups .List (cancelCtx )
232
178
if err == nil {
233
179
t .Error ("Expected there to be a cancel error" )
234
180
}
@@ -247,16 +193,7 @@ func TestContextCancelDelay(t *testing.T) {
247
193
)
248
194
cancelCtx , cancel := context .WithTimeout (context .Background (), 2 * time .Millisecond )
249
195
defer cancel ()
250
- _ , err := client .Inference .ChatCompletion (cancelCtx , llamastackclient.InferenceChatCompletionParams {
251
- Messages : []llamastackclient.MessageUnionParam {{
252
- OfUser : & llamastackclient.UserMessageParam {
253
- Content : llamastackclient.InterleavedContentUnionParam {
254
- OfString : llamastackclient .String ("string" ),
255
- },
256
- },
257
- }},
258
- ModelID : "model_id" ,
259
- })
196
+ _ , err := client .Toolgroups .List (cancelCtx )
260
197
if err == nil {
261
198
t .Error ("expected there to be a cancel error" )
262
199
}
@@ -281,16 +218,7 @@ func TestContextDeadline(t *testing.T) {
281
218
},
282
219
}),
283
220
)
284
- _ , err := client .Inference .ChatCompletion (deadlineCtx , llamastackclient.InferenceChatCompletionParams {
285
- Messages : []llamastackclient.MessageUnionParam {{
286
- OfUser : & llamastackclient.UserMessageParam {
287
- Content : llamastackclient.InterleavedContentUnionParam {
288
- OfString : llamastackclient .String ("string" ),
289
- },
290
- },
291
- }},
292
- ModelID : "model_id" ,
293
- })
221
+ _ , err := client .Toolgroups .List (deadlineCtx )
294
222
if err == nil {
295
223
t .Error ("expected there to be a deadline error" )
296
224
}
@@ -334,15 +262,11 @@ func TestContextDeadlineStreaming(t *testing.T) {
334
262
},
335
263
}),
336
264
)
337
- stream := client .Inference .ChatCompletionStreaming (deadlineCtx , llamastackclient.InferenceChatCompletionParams {
338
- Messages : []llamastackclient.MessageUnionParam {{
339
- OfUser : & llamastackclient.UserMessageParam {
340
- Content : llamastackclient.InterleavedContentUnionParam {
341
- OfString : llamastackclient .String ("string" ),
342
- },
343
- },
344
- }},
345
- ModelID : "model_id" ,
265
+ stream := client .Responses .NewStreaming (deadlineCtx , llamastackclient.ResponseNewParams {
266
+ Input : llamastackclient.ResponseNewParamsInputUnion {
267
+ OfString : llamastackclient .String ("string" ),
268
+ },
269
+ Model : "model" ,
346
270
})
347
271
for stream .Next () {
348
272
_ = stream .Current ()
@@ -387,17 +311,13 @@ func TestContextDeadlineStreamingWithRequestTimeout(t *testing.T) {
387
311
},
388
312
}),
389
313
)
390
- stream := client .Inference . ChatCompletionStreaming (
314
+ stream := client .Responses . NewStreaming (
391
315
context .Background (),
392
- llamastackclient.InferenceChatCompletionParams {
393
- Messages : []llamastackclient.MessageUnionParam {{
394
- OfUser : & llamastackclient.UserMessageParam {
395
- Content : llamastackclient.InterleavedContentUnionParam {
396
- OfString : llamastackclient .String ("string" ),
397
- },
398
- },
399
- }},
400
- ModelID : "model_id" ,
316
+ llamastackclient.ResponseNewParams {
317
+ Input : llamastackclient.ResponseNewParamsInputUnion {
318
+ OfString : llamastackclient .String ("string" ),
319
+ },
320
+ Model : "model" ,
401
321
},
402
322
option .WithRequestTimeout ((100 * time .Millisecond )),
403
323
)
0 commit comments