Skip to content

Commit 348097d

Browse files
committed
Update code with snipets from engineer ( Azure-Samples/openai#176)
1 parent 75f03a9 commit 348097d

File tree

1 file changed

+192
-35
lines changed

1 file changed

+192
-35
lines changed

articles/ai-foundry/foundry-models/how-to/use-chat-completions.md

Lines changed: 192 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -135,15 +135,27 @@ Microsoft Entra authentication only supports Azure OpenAI resources. Complete th
135135
136136
```csharp
137137
using OpenAI;
138-
using System;
138+
using OpenAI.Responses;
139139
using System.ClientModel;
140140

141-
OpenAIClient client = new(
142-
new ApiKeyCredential("{your-api-key}"),
143-
new OpenAIClientOptions()
141+
#pragma warning disable OPENAI001
142+
143+
string deploymentName = "my-gpt-4.1-nano-deployment"; // Your model deployment name
144+
OpenAIResponseClient client = new(
145+
model: deploymentName,
146+
credential: new ApiKeyCredential("{your-api-key}"),
147+
options: new OpenAIClientOptions()
144148
{
145149
Endpoint = new("https://YOUR-RESOURCE-NAME.openai.azure.com/openai/v1/"),
146-
})
150+
});
151+
152+
OpenAIResponse response = client.CreateResponse(
153+
[
154+
ResponseItem.CreateUserMessageItem("What's the weather like today for my current location?")
155+
]);
156+
157+
Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}");
158+
147159
```
148160

149161
**Microsoft Entra authentication**:
@@ -159,21 +171,34 @@ Microsoft Entra authentication only supports Azure OpenAI resources. Complete th
159171
1. Use the following code to configure the OpenAI client object, specify your deployment, and generate responses.
160172
161173
```csharp
174+
using Azure.Identity;
175+
using OpenAI;
176+
using OpenAI.Responses;
177+
using System.ClientModel.Primitives;
178+
162179
#pragma warning disable OPENAI001
163180
164181
BearerTokenPolicy tokenPolicy = new(
165182
new DefaultAzureCredential(),
166183
"https://cognitiveservices.azure.com/.default");
167-
OpenAIClient client = new(
168-
authenticationPolicy: tokenPolicy,
169-
options: new OpenAIClientOptions()
170-
{
171-
Endpoint = new("https://YOUR-RESOURCE-NAME.openai.azure.com/openai/v1/"),
172-
})
173-
174-
string deploymentName = "my-gpt-4.1-nano-deployment";
175-
OpenAIResponseClient response = client.GetOpenAIResponseClient(deploymentName);
176-
184+
185+
string deploymentName = "my-gpt-4.1-nano-deployment"; // Your model deployment name
186+
OpenAIResponseClient client = new(
187+
model: deploymentName,
188+
authenticationPolicy: tokenPolicy,
189+
options: new OpenAIClientOptions()
190+
{
191+
Endpoint = new("https://YOUR-RESOURCE-NAME.openai.azure.com/openai/v1/"),
192+
});
193+
194+
OpenAIResponse response = client.CreateResponse(
195+
[
196+
ResponseItem.CreateUserMessageItem("What's the weather like today for my current location?")
197+
]);
198+
199+
Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}");
200+
201+
177202
```
178203
179204
# [JavaScript](#tab/javascript)
@@ -189,9 +214,23 @@ const client = new OpenAI({
189214
baseURL: "https://YOUR-RESOURCE-NAME.openai.azure.com/openai/v1/",
190215
apiKey: "{your-api-key}"
191216
});
217+
218+
// Make the API request with top-level await
219+
const result = await client.responses
220+
.stream({
221+
model: 'grok-3-mini', // Your model deployment name
222+
input: 'solve 8x + 31 = 2',
223+
}).finalResponse()
224+
225+
// Print the full response
226+
console.log('Full response:', result);
227+
228+
// Print just the message content from the response
229+
console.log('Response content:', result.choices[0].message.content);
230+
192231
```
193232

194-
To use the API key with environment variables set for `OPENAI_BASE_URL` and `OPENAI_API_KEY`:
233+
To use the API key with environment variables set for `OPENAI_BASE_URL` and `OPENAI_API_KEY`, modeify the previous code by creating the client as follows:
195234

196235
```javascript
197236
import { OpenAI } from "openai";
@@ -214,10 +253,24 @@ import { OpenAI } from "openai";
214253
const tokenProvider = getBearerTokenProvider(
215254
new DefaultAzureCredential(),
216255
'https://cognitiveservices.azure.com/.default');
256+
217257
const client = new OpenAI({
218258
baseURL: "https://YOUR-RESOURCE-NAME.openai.azure.com/openai/v1/",
219259
apiKey: tokenProvider
220260
});
261+
262+
// Make the API request with top-level await
263+
const result = await client.responses
264+
.stream({
265+
model: 'grok-3-mini', // Your model deployment name
266+
input: 'solve 8x + 31 = 2',
267+
}).finalResponse()
268+
269+
// Print the full response
270+
console.log('Full response:', result);
271+
272+
// Print just the message content from the response
273+
console.log('Response content:', result.choices[0].message.content);
221274
```
222275

223276
# [Go](#tab/go)
@@ -231,14 +284,30 @@ import (
231284
"context"
232285
"fmt"
233286

234-
"github.com/openai/openai-go/v2"
235-
"github.com/openai/openai-go/v2/option"
287+
"github.com/openai/openai-go/v3"
288+
"github.com/openai/openai-go/v3/option"
289+
"github.com/openai/openai-go/v3/responses"
236290
)
237291

238292
client := openai.NewClient(
239293
option.WithBaseURL("https://YOUR-RESOURCE-NAME.openai.azure.com/openai/v1/"),
240294
option.WithAPIKey("{your-api-key}")
241295
)
296+
297+
// Make a completion request
298+
question := "Write me a haiku about computers"
299+
300+
resp, err := client.Responses.New(context.Background(), responses.ResponseNewParams{
301+
Input: responses.ResponseNewParamsInputUnion{OfString: openai.String(question)},
302+
Model: "grok-3-mini", // Use your deployed model name on Azure
303+
})
304+
305+
306+
if err != nil {
307+
panic(err.Error())
308+
}
309+
310+
println(resp.OutputText())
242311
```
243312

244313
To use the API key with environment variables set for `OPENAI_BASE_URL` and `OPENAI_API_KEY`:
@@ -248,8 +317,9 @@ import (
248317
"context"
249318
"fmt"
250319

251-
"github.com/openai/openai-go/v2"
252-
"github.com/openai/openai-go/v2/option"
320+
"github.com/openai/openai-go/v3"
321+
"github.com/openai/openai-go/v3/option"
322+
"github.com/openai/openai-go/v3/responses"
253323
)
254324
client := openai.NewClient()
255325
```
@@ -272,6 +342,7 @@ import (
272342
"github.com/openai/openai-go/v3"
273343
"github.com/openai/openai-go/v3/azure"
274344
"github.com/openai/openai-go/v3/option"
345+
"github.com/openai/openai-go/v3/responses"
275346
)
276347

277348
tokenCredential, err := azidentity.NewDefaultAzureCredential(nil)
@@ -280,6 +351,22 @@ client := openai.NewClient(
280351
option.WithBaseURL("https://YOUR-RESOURCE-NAME.openai.azure.com/openai/v1/"),
281352
azure.WithTokenCredential(tokenCredential)
282353
)
354+
355+
// Make a completion request
356+
question := "Write me a haiku about computers"
357+
358+
resp, err := client.Responses.New(context.Background(), responses.ResponseNewParams{
359+
Input: responses.ResponseNewParamsInputUnion{OfString: openai.String(question)},
360+
Model: "grok-3-mini", // Use your deployed model name on Azure
361+
})
362+
363+
364+
if err != nil {
365+
panic(err.Error())
366+
}
367+
368+
println(resp.OutputText())
369+
283370
```
284371

285372
# [Java](#tab/Java)
@@ -314,6 +401,46 @@ OpenAIClient client = OpenAIOkHttpClient.builder()
314401
.build();
315402
```
316403

404+
Generate responses:
405+
406+
```java
407+
package com.example;
408+
409+
import com.openai.client.OpenAIClient;
410+
import com.openai.client.okhttp.OpenAIOkHttpClient;
411+
import com.openai.models.ChatModel;
412+
import com.openai.models.responses.ResponseCreateParams;
413+
414+
public class OpenAITest {
415+
public static void main(String[] args) {
416+
// Get API key from environment variable for security
417+
String apiKey = System.getenv("OPENAI_API_KEY");
418+
String resourceName = "https://YOUR-RESOURCE-NAME.openai.azure.com/openai/v1";
419+
String modelDeploymentName = "grok-3-mini"; //replace with you model deployment name
420+
421+
try {
422+
OpenAIClient client = OpenAIOkHttpClient.builder()
423+
.baseUrl(resourceName)
424+
.apiKey(apiKey)
425+
.build();
426+
427+
ResponseCreateParams.Builder paramsBuilder = ResponseCreateParams.builder()
428+
.model(deploymentName)
429+
.input("What's the capital of France?");
430+
431+
432+
ResponseCreateParams createParams = paramsBuilder.build();
433+
434+
client.responses().create(createParams).output().stream()
435+
.flatMap(item -> item.message().stream())
436+
.flatMap(message -> message.content().stream())
437+
.flatMap(content -> content.outputText().stream())
438+
.forEach(outputText -> System.out.println(outputText.text()));
439+
}
440+
}
441+
}
442+
```
443+
317444
**Microsoft Entra authentication**:
318445

319446
Authentication with Microsoft Entra ID requires some initial setup. First install the Azure Identity client library. For more options on how to install this library, see [Azure Identity client library for Java](https://github.com/Azure/azure-sdk-for-java/blob/main/sdk/identity/azure-identity/README.md#include-the-package).
@@ -335,15 +462,44 @@ Authentication is easiest using `DefaultAzureCredential`. It finds the best cred
335462

336463

337464
```java
338-
Credential tokenCredential = BearerTokenCredential.create(
339-
AuthenticationUtil.getBearerTokenSupplier(
340-
new DefaultAzureCredentialBuilder().build(),
341-
"https://cognitiveservices.azure.com/.default"));
342-
OpenAIClient client = OpenAIOkHttpClient.builder()
343-
.baseUrl("https://YOUR-RESOURCE-NAME.openai.azure.com/openai/v1/")
344-
.credential(tokenCredential)
345-
.build();
346-
```
465+
package com.example;
466+
467+
import com.openai.client.OpenAIClient;
468+
import com.openai.client.okhttp.OpenAIOkHttpClient;
469+
import com.openai.models.ChatModel;
470+
import com.openai.models.responses.ResponseCreateParams;
471+
472+
public class OpenAITest {
473+
public static void main(String[] args) {
474+
475+
String resourceName = "https://YOUR-RESOURCE-NAME.openai.azure.com/openai/v1";
476+
String deploymentName = "grok-3-mini"; //replace with you model deployment name
477+
478+
try {
479+
OpenAIClient client = OpenAIOkHttpClient.builder()
480+
.baseUrl(resourceName)
481+
// Set the Azure Entra ID
482+
.credential(BearerTokenCredential.create(AuthenticationUtil.getBearerTokenSupplier(
483+
new DefaultAzureCredentialBuilder().build(), "https://cognitiveservices.azure.com/.default")))
484+
.build();
485+
486+
ResponseCreateParams.Builder paramsBuilder = ResponseCreateParams.builder()
487+
.model(deploymentName)
488+
.input("What's the capital of France?");
489+
490+
491+
ResponseCreateParams createParams = paramsBuilder.build();
492+
493+
client.responses().create(createParams).output().stream()
494+
.flatMap(item -> item.message().stream())
495+
.flatMap(message -> message.content().stream())
496+
.flatMap(content -> content.outputText().stream())
497+
.forEach(outputText -> System.out.println(outputText.text()));
498+
}
499+
}
500+
}
501+
502+
347503

348504
# [REST](#tab/rest)
349505

@@ -442,7 +598,7 @@ using System.ClientModel;
442598
string keyFromEnvironment = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY");
443599

444600
ChatClient client = new(
445-
model: "grok-3-mini",
601+
model: "grok-3-mini", // Replace with your model deployment name.
446602
credential: new ApiKeyCredential(keyFromEnvironment),
447603
options: new OpenAIClientOptions() {
448604
Endpoint = new Uri("https://YOUR-RESOURCE-NAME.openai.azure.com/openai/v1/")
@@ -521,7 +677,7 @@ const messages = [
521677
// Make the API request with top-level await
522678
const result = await client.chat.completions.create({
523679
messages,
524-
model: 'grok-3-mini', // model deployment name
680+
model: 'grok-3-mini', // Your model deployment name
525681
max_tokens: 100
526682
});
527683

@@ -549,6 +705,7 @@ import { OpenAI } from "openai";
549705
const tokenProvider = getBearerTokenProvider(
550706
new DefaultAzureCredential(),
551707
'https://cognitiveservices.azure.com/.default');
708+
552709
const client = new OpenAI({
553710
baseURL: "https://YOUR-RESOURCE-NAME.openai.azure.com/openai/v1/",
554711
apiKey: tokenProvider
@@ -562,7 +719,7 @@ const messages = [
562719
// Make the API request with top-level await
563720
const result = await client.chat.completions.create({
564721
messages,
565-
model: 'grok-3-mini', // model deployment name
722+
model: 'grok-3-mini', // Your model deployment name
566723
max_tokens: 100
567724
});
568725

@@ -628,9 +785,9 @@ import (
628785
"fmt"
629786

630787
"github.com/Azure/azure-sdk-for-go/sdk/azidentity"
631-
"github.com/openai/openai-go/v3"
632-
"github.com/openai/openai-go/v3/azure"
633-
"github.com/openai/openai-go/v3/option"
788+
"github.com/openai/openai-go/v2"
789+
"github.com/openai/openai-go/v2/azure"
790+
"github.com/openai/openai-go/v2/option"
634791
)
635792

636793
func main() {

0 commit comments

Comments
 (0)