Skip to content

Commit 45e03c8

Browse files
authored
updated iOS Swift SDK for LS0.1.3 (#20)
* updated iOS Swift SDK for LS0.1.3 * README update * LocalAgents.swift update
1 parent 6df64f1 commit 45e03c8

File tree

7 files changed

+4751
-5964
lines changed

7 files changed

+4751
-5964
lines changed

README.md

Lines changed: 27 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
llama-stack-client-swift brings the inference and agents APIs of [Llama Stack](https://github.com/meta-llama/llama-stack) to iOS.
66

7-
**Update: January 27, 2025** The llama-stack-client-swift SDK version has been updated to 0.1.0, working with Llama Stack 0.1.0 ([release note](https://github.com/meta-llama/llama-stack/releases/tag/v0.1.0)).
7+
**Update: February 18, 2025** The llama-stack-client-swift SDK version has been updated to 0.1.3, working with Llama Stack 0.1.3 ([release note](https://github.com/meta-llama/llama-stack/releases/tag/v0.1.3)).
88

99
## Features
1010

@@ -21,76 +21,81 @@ For a more advanced demo using the Llama Stack Agent API and custom tool calling
2121

2222
1. Click "Xcode > File > Add Package Dependencies...".
2323

24-
2. Add this repo URL at the top right: `https://github.com/meta-llama/llama-stack-client-swift` and 0.1.0 in the Dependency Rule, then click Add Package.
24+
2. Add this repo URL at the top right: `https://github.com/meta-llama/llama-stack-client-swift` and 0.1.3 in the Dependency Rule, then click Add Package.
2525

2626
3. Select and add `llama-stack-client-swift` to your app target.
2727

2828
4. On the first build: Enable & Trust the OpenAPIGenerator extension when prompted.
2929

30-
5. Set up a remote Llama Stack distributions, assuming you have a [Fireworks](https://fireworks.ai/account/api-keys) or [Together](https://api.together.ai/) API key, which you can get easily by clicking the link:
30+
5. The quickest way to try out the demo for remote inference is using Together.ai's Llama Stack distro at https://llama-stack.together.ai - you can skip Step 6 unless you want to build your own distro.
31+
32+
6. (Optional) Set up a remote Llama Stack distributions, assuming you have a [Fireworks](https://fireworks.ai/account/api-keys) or [Together](https://api.together.ai/) API key, which you can get easily by clicking the link:
3133

3234
```
3335
conda create -n llama-stack python=3.10
3436
conda activate llama-stack
35-
pip install --no-cache llama-stack==0.1.0 llama-models==0.1.0 llama-stack-client==0.1.0
37+
pip install --no-cache llama-stack==0.1.3 llama-models==0.1.3 llama-stack-client==0.1.3
3638
```
3739

3840
Then, either:
3941
```
40-
PYPI_VERSION=0.1.0 llama stack build --template fireworks --image-type conda
42+
PYPI_VERSION=0.1.3 llama stack build --template fireworks --image-type conda
4143
export FIREWORKS_API_KEY="<your_fireworks_api_key>"
4244
llama stack run fireworks
4345
```
4446
or
4547
```
46-
PYPI_VERSION=0.1.0 llama stack build --template together --image-type conda
48+
PYPI_VERSION=0.1.3 llama stack build --template together --image-type conda
4749
export TOGETHER_API_KEY="<your_together_api_key>"
4850
llama stack run together
4951
```
5052

5153
The default port is 5000 for `llama stack run` and you can specify a different port by adding `--port <your_port>` to the end of `llama stack run fireworks|together`.
5254

53-
6. Replace the `RemoteInference` url string below with the host IP and port of the remote Llama Stack distro in Step 5:
55+
Replace the `RemoteInference` url string below with the host IP and port of the remote Llama Stack distro in Step 6:
5456

5557
```swift
5658
import LlamaStackClient
5759

58-
let inference = RemoteInference(url: URL(string: "http://127.0.0.1:5000")!)
60+
let inference = RemoteInference(url: URL(string: "https://llama-stack.together.ai")!)
5961
```
62+
63+
7. Build and Run the iOS demo.
64+
6065
Below is an example code snippet to use the Llama Stack inference API. See the iOS Demos above for complete code.
6166

6267
```swift
6368
for await chunk in try await inference.chatCompletion(
6469
request:
6570
Components.Schemas.ChatCompletionRequest(
71+
model_id: "meta-llama/Llama-3.1-8B-Instruct",
6672
messages: [
6773
.user(
6874
Components.Schemas.UserMessage(
75+
role: .user,
6976
content:
7077
.InterleavedContentItem(
7178
.text(Components.Schemas.TextContentItem(
72-
text: userInput,
73-
_type: .text
79+
_type: .text,
80+
text: userInput
7481
)
7582
)
76-
),
77-
role: .user
83+
)
7884
)
7985
)
8086
],
81-
model_id: "meta-llama/Llama-3.1-8B-Instruct",
8287
stream: true)
8388
) {
8489
switch (chunk.event.delta) {
85-
case .text(let s):
86-
message += s.text
87-
break
88-
case .image(let s):
89-
print("> \(s)")
90-
break
91-
case .tool_call(let s):
92-
print("> \(s)")
93-
break
90+
case .text(let s):
91+
message += s.text
92+
break
93+
case .image(let s):
94+
print("> \(s)")
95+
break
96+
case .tool_call(let s):
97+
print("> \(s)")
98+
break
9499
}
95100
}
96101
```

Sources/LlamaStackClient/Agents/ChatAgent.swift

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@ class ChatAgent {
2222
let session = Components.Schemas.Session(
2323
session_id: sessionId,
2424
session_name: name,
25-
started_at: Date(),
26-
turns: []
25+
turns: [],
26+
started_at: Date()
2727
)
2828

2929
sessions[sessionId] = session
@@ -82,13 +82,12 @@ class ChatAgent {
8282
continuation.yield(chunk)
8383
}
8484
let turn = Components.Schemas.Turn(
85-
input_messages: request.messages.map { $0.toAgenticSystemTurnCreateRequest() },
86-
output_attachments: [],
87-
output_message: outputMessage!,
85+
turn_id: turnId,
8886
session_id: session_id,
89-
started_at: Date(),
87+
input_messages: request.messages.map { $0.toAgenticSystemTurnCreateRequest() },
9088
steps: steps,
91-
turn_id: turnId
89+
output_message: outputMessage!,
90+
started_at: Date()
9291
)
9392
await MainActor.run {
9493
var s = self.sessions[session_id]
@@ -122,10 +121,10 @@ class ChatAgent {
122121
do {
123122
for try await chunk: Components.Schemas.ChatCompletionResponseStreamChunk in try await inferenceApi.chatCompletion(
124123
request: Components.Schemas.ChatCompletionRequest(
125-
messages: inputMessages,
126124
model_id: agentConfig.model,
127-
stream: true,
128-
tools: [] //agentConfig.client_tools
125+
messages: inputMessages,
126+
tools: [], //agentConfig.client_tools
127+
stream: true
129128
)
130129
) {
131130
continuation.yield(
@@ -134,10 +133,10 @@ class ChatAgent {
134133
payload:
135134
.step_progress(
136135
Components.Schemas.AgentTurnResponseStepProgressPayload(
137-
delta: chunk.event.delta,
138136
event_type: .step_progress,
137+
step_type: .inference,
139138
step_id: UUID().uuidString,
140-
step_type: .inference
139+
delta: chunk.event.delta
141140
)
142141
)
143142
)

Sources/LlamaStackClient/Agents/CustomTools.swift

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -6,54 +6,53 @@ public class CustomTools {
66
// for chat completion (inference) tool calling
77
public class func getCreateEventTool() -> Components.Schemas.ToolDefinition {
88
return Components.Schemas.ToolDefinition(
9+
tool_name: Components.Schemas.ToolDefinition.tool_namePayload.case2( "create_event"),
910
description: "Create a calendar event",
1011
parameters: Components.Schemas.ToolDefinition.parametersPayload(
1112
additionalProperties: [
1213
"event_name": Components.Schemas.ToolParamDefinition(
13-
description: "The name of the meeting",
1414
param_type: "string",
15+
description: "The name of the meeting",
1516
required: true
1617
),
1718
"start": Components.Schemas.ToolParamDefinition(
18-
description: "Start date in yyyy-MM-dd HH:mm format, eg. '2024-01-01 13:00'",
1919
param_type: "string",
20+
description: "Start date in yyyy-MM-dd HH:mm format, eg. '2024-01-01 13:00'",
2021
required: true
2122
),
2223
"end": Components.Schemas.ToolParamDefinition(
23-
description: "End date in yyyy-MM-dd HH:mm format, eg. '2024-01-01 14:00'",
2424
param_type: "string",
25+
description: "End date in yyyy-MM-dd HH:mm format, eg. '2024-01-01 14:00'",
2526
required: true
26-
),
27+
)
2728
]
28-
),
29-
tool_name: Components.Schemas.ToolDefinition.tool_namePayload.case2( "create_event")
30-
29+
)
3130
)
3231
}
3332

3433
// for agent tool calling
3534
public class func getCreateEventToolForAgent() -> Components.Schemas.ToolDef {
3635
return Components.Schemas.ToolDef(
37-
description: "Create a calendar event",
38-
metadata: nil,
3936
name: "create_event",
37+
description: "Create a calendar event",
4038
parameters: [
4139
Components.Schemas.ToolParameter(
42-
description: "The name of the meeting",
4340
name: "event_name",
4441
parameter_type: "string",
42+
description: "The name of the meeting",
4543
required: true),
4644
Components.Schemas.ToolParameter(
47-
description: "Start date in yyyy-MM-dd HH:mm format, eg. '2024-01-01 13:00'",
4845
name: "start",
4946
parameter_type: "string",
47+
description: "Start date in yyyy-MM-dd HH:mm format, eg. '2024-01-01 13:00'",
5048
required: true),
5149
Components.Schemas.ToolParameter(
52-
description: "End date in yyyy-MM-dd HH:mm format, eg. '2024-01-01 14:00'",
5350
name: "end",
5451
parameter_type: "string",
52+
description: "End date in yyyy-MM-dd HH:mm format, eg. '2024-01-01 14:00'",
5553
required: true)
56-
]
54+
],
55+
metadata: nil
5756
)
5857
}
5958
}

Sources/LlamaStackClient/Agents/LocalAgents.swift

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -17,17 +17,14 @@ public class LocalAgents: Agents {
1717
let createSystemResponse = try await create(
1818
request: Components.Schemas.CreateAgentRequest(
1919
agent_config: Components.Schemas.AgentConfig(
20-
enable_session_persistence: false,
20+
sampling_params: nil,
2121
input_shields: [],
22-
instructions: "You are a helpful assistant",
23-
max_infer_iters: 1,
22+
output_shields: [],
23+
client_tools: [ CustomTools.getCreateEventToolForAgent() ],
24+
max_infer_iters: 1,
2425
model: "Meta-Llama3.1-8B-Instruct",
25-
output_shields: []
26-
// tools: [
27-
// Components.Schemas.AgentConfig.toolsPayloadPayload.FunctionCallToolDefinition(
28-
// CustomTools.getCreateEventTool()
29-
// )
30-
// ]
26+
instructions: "You are a helpful assistant",
27+
enable_session_persistence: false
3128
)
3229
)
3330
)

Sources/LlamaStackClient/Agents/RemoteAgents.swift

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -24,13 +24,13 @@ public class RemoteAgents: Agents {
2424
let createSystemResponse = try await create(
2525
request: Components.Schemas.CreateAgentRequest(
2626
agent_config: Components.Schemas.AgentConfig(
27-
client_tools: [ CustomTools.getCreateEventToolForAgent() ],
28-
enable_session_persistence: false,
2927
input_shields: ["llama_guard"],
30-
instructions: "You are a helpful assistant",
28+
output_shields: ["llama_guard"],
29+
client_tools: [ CustomTools.getCreateEventToolForAgent() ],
3130
max_infer_iters: 1,
3231
model: "Meta-Llama3.1-8B-Instruct",
33-
output_shields: ["llama_guard"]
32+
instructions: "You are a helpful assistant",
33+
enable_session_persistence: false
3434
)
3535
)
3636
)

0 commit comments

Comments
 (0)