Skip to content
This repository was archived by the owner on Aug 14, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
e54ba91
chore: update SDK settings
stainless-app[bot] May 6, 2025
9c98da3
codegen metadata
stainless-app[bot] May 6, 2025
758a188
chore(internal): avoid errors for isinstance checks on proxies
stainless-app[bot] May 9, 2025
a862d55
fix(package): support direct resource imports
stainless-app[bot] May 10, 2025
2d2282b
chore(ci): upload sdks to package manager
stainless-app[bot] May 15, 2025
40d9854
chore(ci): fix installation instructions
stainless-app[bot] May 16, 2025
ab9f05c
chore(internal): codegen related update
stainless-app[bot] May 17, 2025
6f57b13
chore(docs): grammar improvements
stainless-app[bot] May 22, 2025
bcf315a
chore(docs): remove reference to rye shell
stainless-app[bot] Jun 3, 2025
60ec829
chore(docs): remove unnecessary param examples
stainless-app[bot] Jun 3, 2025
a77a9ee
feat(client): add follow_redirects request option
stainless-app[bot] Jun 3, 2025
1287a3c
chore(tests): run tests in parallel
stainless-app[bot] Jun 13, 2025
85d6bbd
fix(client): correctly parse binary response | stream
stainless-app[bot] Jun 13, 2025
b27b11b
chore(tests): add tests for httpx client instantiation & proxies
stainless-app[bot] Jun 17, 2025
218e172
chore(internal): update conftest.py
stainless-app[bot] Jun 17, 2025
c9b6347
chore(ci): enable for pull requests
stainless-app[bot] Jun 17, 2025
9b63e1b
chore(readme): update badges
stainless-app[bot] Jun 18, 2025
347a4bf
fix(tests): fix: tests which call HTTP endpoints directly with the ex…
stainless-app[bot] Jun 18, 2025
497f2a1
docs(client): fix httpx.Timeout documentation reference
stainless-app[bot] Jun 19, 2025
fdd7a07
chore: change publish docs url
stainless-app[bot] Jun 21, 2025
d78982b
feat(client): add support for aiohttp
stainless-app[bot] Jun 21, 2025
73b5705
chore(tests): skip some failing tests on the latest python versions
stainless-app[bot] Jun 24, 2025
6f3a4e2
fix(ci): release-doctor — report correct token name
stainless-app[bot] Jun 27, 2025
7bf5d0a
release: 0.1.0-alpha.1
stainless-app[bot] Jun 27, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@ on:
- 'integrated/**'
- 'stl-preview-head/**'
- 'stl-preview-base/**'
pull_request:
branches-ignore:
- 'stl-preview-head/**'
- 'stl-preview-base/**'

jobs:
lint:
Expand All @@ -30,6 +34,30 @@ jobs:
- name: Run lints
run: ./scripts/lint

upload:
if: github.repository == 'stainless-sdks/llama-stack-client-python'
timeout-minutes: 10
name: upload
permissions:
contents: read
id-token: write
runs-on: depot-ubuntu-24.04
steps:
- uses: actions/checkout@v4

- name: Get GitHub OIDC Token
id: github-oidc
uses: actions/github-script@v6
with:
script: core.setOutput('github_token', await core.getIDToken());

- name: Upload tarball
env:
URL: https://pkg.stainless.com/s
AUTH: ${{ steps.github-oidc.outputs.github_token }}
SHA: ${{ github.sha }}
run: ./scripts/utils/upload-artifact.sh

test:
timeout-minutes: 10
name: test
Expand Down
31 changes: 31 additions & 0 deletions .github/workflows/publish-pypi.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# This workflow is triggered when a GitHub release is created.
# It can also be run manually to re-publish to PyPI in case it failed for some reason.
# You can run this workflow by navigating to https://www.github.com/llamastack/llama-stack-client-python/actions/workflows/publish-pypi.yml
name: Publish PyPI
on:
workflow_dispatch:

release:
types: [published]

jobs:
publish:
name: publish
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4

- name: Install Rye
run: |
curl -sSf https://rye.astral.sh/get | bash
echo "$HOME/.rye/shims" >> $GITHUB_PATH
env:
RYE_VERSION: '0.44.0'
RYE_INSTALL_OPTION: '--yes'

- name: Publish to PyPI
run: |
bash ./bin/publish-pypi
env:
PYPI_TOKEN: ${{ secrets.LLAMA_STACK_CLIENT_PYPI_TOKEN || secrets.PYPI_TOKEN }}
21 changes: 21 additions & 0 deletions .github/workflows/release-doctor.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
name: Release Doctor
on:
pull_request:
branches:
- main
workflow_dispatch:

jobs:
release_doctor:
name: release doctor
runs-on: ubuntu-latest
if: github.repository == 'llamastack/llama-stack-client-python' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next')

steps:
- uses: actions/checkout@v4

- name: Check release environment
run: |
bash ./bin/check-release-environment
env:
PYPI_TOKEN: ${{ secrets.LLAMA_STACK_CLIENT_PYPI_TOKEN || secrets.PYPI_TOKEN }}
3 changes: 3 additions & 0 deletions .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
".": "0.1.0-alpha.1"
}
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
configured_endpoints: 91
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-0e756984d87c3fd1eb96d486947b3bc2086d5afcf299e8119b6b89bbd86dbe75.yml
openapi_spec_hash: 7c519a25bb9a094d4b4bda17bb20dd88
config_hash: b83ca660b1609a8903f32e3d54b4ff00
config_hash: d1f21dfdbf5d9925eecf56b6c1fab755
42 changes: 42 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
# Changelog

## 0.1.0-alpha.1 (2025-06-27)

Full Changelog: [v0.0.1-alpha.0...v0.1.0-alpha.1](https://github.com/llamastack/llama-stack-client-python/compare/v0.0.1-alpha.0...v0.1.0-alpha.1)

### Features

* **client:** add follow_redirects request option ([a77a9ee](https://github.com/llamastack/llama-stack-client-python/commit/a77a9eed9038782ba6b93ce0d3147ee4a6b8a3b7))
* **client:** add support for aiohttp ([d78982b](https://github.com/llamastack/llama-stack-client-python/commit/d78982b197c5e0a0fb67afcb44e9644fd8d931be))


### Bug Fixes

* **ci:** release-doctor — report correct token name ([6f3a4e2](https://github.com/llamastack/llama-stack-client-python/commit/6f3a4e24d8b357d7dc01adb0d9f736989fa9517d))
* **client:** correctly parse binary response | stream ([85d6bbd](https://github.com/llamastack/llama-stack-client-python/commit/85d6bbd97efac7509cbff0bb2d461a80d09b5e61))
* **package:** support direct resource imports ([a862d55](https://github.com/llamastack/llama-stack-client-python/commit/a862d551553aac41573306ce39480e1eb16ea3d3))
* **tests:** fix: tests which call HTTP endpoints directly with the example parameters ([347a4bf](https://github.com/llamastack/llama-stack-client-python/commit/347a4bffa920f5727a4c02eba18bd207001698b5))


### Chores

* change publish docs url ([fdd7a07](https://github.com/llamastack/llama-stack-client-python/commit/fdd7a075564ac206e91b2d06bf130c4de9473838))
* **ci:** enable for pull requests ([c9b6347](https://github.com/llamastack/llama-stack-client-python/commit/c9b6347f084acb1566b8e8283cf0bcfde7f6562c))
* **ci:** fix installation instructions ([40d9854](https://github.com/llamastack/llama-stack-client-python/commit/40d9854bd2630a471f1ca93d249e4d44b73fa864))
* **ci:** upload sdks to package manager ([2d2282b](https://github.com/llamastack/llama-stack-client-python/commit/2d2282bb49d58daef1f32fa0f1e5a356abf8df0d))
* **docs:** grammar improvements ([6f57b13](https://github.com/llamastack/llama-stack-client-python/commit/6f57b1363367de7ed5035fd1d6ba1a071eee67ba))
* **docs:** remove reference to rye shell ([bcf315a](https://github.com/llamastack/llama-stack-client-python/commit/bcf315ae00c458f89dfa3684bcc7abdb732b6c5f))
* **docs:** remove unnecessary param examples ([60ec829](https://github.com/llamastack/llama-stack-client-python/commit/60ec829e809156217cf2f911b3cac6b23a06baad))
* **internal:** avoid errors for isinstance checks on proxies ([758a188](https://github.com/llamastack/llama-stack-client-python/commit/758a188dbfaa284a13b70816689c99917a05d16c))
* **internal:** codegen related update ([ab9f05c](https://github.com/llamastack/llama-stack-client-python/commit/ab9f05cc1da5b21afceacdf9c8eb54b6e59eed01))
* **internal:** update conftest.py ([218e172](https://github.com/llamastack/llama-stack-client-python/commit/218e172c16014dad41a7c189c5620077955d6bdf))
* **readme:** update badges ([9b63e1b](https://github.com/llamastack/llama-stack-client-python/commit/9b63e1b7dbbbd7556d046a2a4224a8385bbea24c))
* **tests:** add tests for httpx client instantiation & proxies ([b27b11b](https://github.com/llamastack/llama-stack-client-python/commit/b27b11bbe0a9c5778b757733c11828d9603307ea))
* **tests:** run tests in parallel ([1287a3c](https://github.com/llamastack/llama-stack-client-python/commit/1287a3c11f668d916c8c7af534a48523e2e69140))
* **tests:** skip some failing tests on the latest python versions ([73b5705](https://github.com/llamastack/llama-stack-client-python/commit/73b57051c48d2ec42b844a288ffc9b5e3bbe6f2b))
* update SDK settings ([e54ba91](https://github.com/llamastack/llama-stack-client-python/commit/e54ba9163792ab80362a189acb825bcd00e5384b))


### Documentation

* **client:** fix httpx.Timeout documentation reference ([497f2a1](https://github.com/llamastack/llama-stack-client-python/commit/497f2a198140f73525a880497bf1c51b5749c1f3))
7 changes: 3 additions & 4 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,7 @@ $ rye sync --all-features
You can then run scripts using `rye run python script.py` or by activating the virtual environment:

```sh
$ rye shell
# or manually activate - https://docs.python.org/3/library/venv.html#how-venvs-work
# Activate the virtual environment - https://docs.python.org/3/library/venv.html#how-venvs-work
$ source .venv/bin/activate

# now you can omit the `rye run` prefix
Expand Down Expand Up @@ -63,7 +62,7 @@ If you’d like to use the repository from source, you can either install from g
To install via git:

```sh
$ pip install git+ssh://[email protected]/stainless-sdks/llama-stack-client-python.git
$ pip install git+ssh://[email protected]/llamastack/llama-stack-client-python.git
```

Alternatively, you can build from source and install the wheel file:
Expand Down Expand Up @@ -121,7 +120,7 @@ the changes aren't made through the automated pipeline, you may want to make rel

### Publish with a GitHub workflow

You can release to package managers by using [the `Publish PyPI` GitHub action](https://www.github.com/stainless-sdks/llama-stack-client-python/actions/workflows/publish-pypi.yml). This requires a setup organization or repository secret to be set up.
You can release to package managers by using [the `Publish PyPI` GitHub action](https://www.github.com/llamastack/llama-stack-client-python/actions/workflows/publish-pypi.yml). This requires a setup organization or repository secret to be set up.

### Publish manually

Expand Down
54 changes: 45 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Llama Stack Client Python API library

[![PyPI version](https://img.shields.io/pypi/v/llama_stack_client.svg)](https://pypi.org/project/llama_stack_client/)
[![PyPI version](<https://img.shields.io/pypi/v/llama_stack_client.svg?label=pypi%20(stable)>)](https://pypi.org/project/llama_stack_client/)

The Llama Stack Client Python library provides convenient access to the Llama Stack Client REST API from any Python 3.8+
application. The library includes type definitions for all request params and response fields,
Expand All @@ -15,12 +15,12 @@ The full API of this library can be found in [api.md](api.md).
## Installation

```sh
# install from this staging repo
pip install git+ssh://[email protected]/stainless-sdks/llama-stack-client-python.git
# install from the production repo
pip install git+ssh://[email protected]/llamastack/llama-stack-client-python.git
```

> [!NOTE]
> Once this package is [published to PyPI](https://app.stainless.com/docs/guides/publish), this will become: `pip install --pre llama_stack_client`
> Once this package is [published to PyPI](https://www.stainless.com/docs/guides/publish), this will become: `pip install --pre llama_stack_client`

## Usage

Expand Down Expand Up @@ -71,6 +71,42 @@ asyncio.run(main())

Functionality between the synchronous and asynchronous clients is otherwise identical.

### With aiohttp

By default, the async client uses `httpx` for HTTP requests. However, for improved concurrency performance you may also use `aiohttp` as the HTTP backend.

You can enable this by installing `aiohttp`:

```sh
# install from the production repo
pip install 'llama_stack_client[aiohttp] @ git+ssh://[email protected]/llamastack/llama-stack-client-python.git'
```

Then you can enable it by instantiating the client with `http_client=DefaultAioHttpClient()`:

```python
import os
import asyncio
from llama_stack_client import DefaultAioHttpClient
from llama_stack_client import AsyncLlamaStackClient


async def main() -> None:
async with AsyncLlamaStackClient(
api_key=os.environ.get(
"LLAMA_STACK_CLIENT_API_KEY"
), # This is the default and can be omitted
http_client=DefaultAioHttpClient(),
) as client:
await client.datasetio.append_rows(
dataset_id="REPLACE_ME",
rows=[{"foo": True}],
)


asyncio.run(main())
```

## Using types

Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict). Responses are [Pydantic models](https://docs.pydantic.dev) which also provide helper methods for things like:
Expand Down Expand Up @@ -99,7 +135,7 @@ response = client.inference.batch_chat_completion(
]
],
model_id="model_id",
logprobs={"top_k": 0},
logprobs={},
)
print(response.logprobs)
```
Expand Down Expand Up @@ -175,7 +211,7 @@ client.with_options(max_retries=5).datasetio.append_rows(
### Timeouts

By default requests time out after 1 minute. You can configure this with a `timeout` option,
which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/#fine-tuning-the-configuration) object:
which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/timeouts/#fine-tuning-the-configuration) object:

```python
from llama_stack_client import LlamaStackClient
Expand Down Expand Up @@ -248,9 +284,9 @@ datasetio = response.parse() # get the object that `datasetio.append_rows()` wo
print(datasetio)
```

These methods return an [`APIResponse`](https://github.com/stainless-sdks/llama-stack-client-python/tree/main/src/llama_stack_client/_response.py) object.
These methods return an [`APIResponse`](https://github.com/llamastack/llama-stack-client-python/tree/main/src/llama_stack_client/_response.py) object.

The async client returns an [`AsyncAPIResponse`](https://github.com/stainless-sdks/llama-stack-client-python/tree/main/src/llama_stack_client/_response.py) with the same structure, the only difference being `await`able methods for reading the response content.
The async client returns an [`AsyncAPIResponse`](https://github.com/llamastack/llama-stack-client-python/tree/main/src/llama_stack_client/_response.py) with the same structure, the only difference being `await`able methods for reading the response content.

#### `.with_streaming_response`

Expand Down Expand Up @@ -357,7 +393,7 @@ This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) con

We take backwards-compatibility seriously and work hard to ensure you can rely on a smooth upgrade experience.

We are keen for your feedback; please open an [issue](https://www.github.com/stainless-sdks/llama-stack-client-python/issues) with questions, bugs, or suggestions.
We are keen for your feedback; please open an [issue](https://www.github.com/llamastack/llama-stack-client-python/issues) with questions, bugs, or suggestions.

### Determining the installed version

Expand Down
2 changes: 1 addition & 1 deletion SECURITY.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ before making any information public.
## Reporting Non-SDK Related Security Issues

If you encounter security issues that are not directly related to SDKs but pertain to the services
or products provided by Llama Stack Client please follow the respective company's security reporting guidelines.
or products provided by Llama Stack Client, please follow the respective company's security reporting guidelines.

---

Expand Down
21 changes: 21 additions & 0 deletions bin/check-release-environment
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
#!/usr/bin/env bash

errors=()

if [ -z "${PYPI_TOKEN}" ]; then
errors+=("The PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.")
fi

lenErrors=${#errors[@]}

if [[ lenErrors -gt 0 ]]; then
echo -e "Found the following errors in the release environment:\n"

for error in "${errors[@]}"; do
echo -e "- $error\n"
done

exit 1
fi

echo "The environment is ready to push releases!"
13 changes: 8 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "llama_stack_client"
version = "0.0.1-alpha.0"
version = "0.1.0-alpha.1"
description = "The official Python library for the llama-stack-client API"
dynamic = ["readme"]
license = "Apache-2.0"
Expand Down Expand Up @@ -34,9 +34,11 @@ classifiers = [
]

[project.urls]
Homepage = "https://github.com/stainless-sdks/llama-stack-client-python"
Repository = "https://github.com/stainless-sdks/llama-stack-client-python"
Homepage = "https://github.com/llamastack/llama-stack-client-python"
Repository = "https://github.com/llamastack/llama-stack-client-python"

[project.optional-dependencies]
aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.6"]

[tool.rye]
managed = true
Expand All @@ -54,6 +56,7 @@ dev-dependencies = [
"importlib-metadata>=6.7.0",
"rich>=13.7.1",
"nest_asyncio==1.6.0",
"pytest-xdist>=3.6.1",
]

[tool.rye.scripts]
Expand Down Expand Up @@ -121,11 +124,11 @@ path = "README.md"
[[tool.hatch.metadata.hooks.fancy-pypi-readme.substitutions]]
# replace relative links with absolute links
pattern = '\[(.+?)\]\(((?!https?://)\S+?)\)'
replacement = '[\1](https://github.com/stainless-sdks/llama-stack-client-python/tree/main/\g<2>)'
replacement = '[\1](https://github.com/llamastack/llama-stack-client-python/tree/main/\g<2>)'

[tool.pytest.ini_options]
testpaths = ["tests"]
addopts = "--tb=short"
addopts = "--tb=short -n auto"
xfail_strict = true
asyncio_mode = "auto"
asyncio_default_fixture_loop_scope = "session"
Expand Down
Loading
Loading