Skip to content

Commit ea66d34

Browse files
committed
feat: Remove initialize() Method from LlamaStackAsLibrary
Signed-off-by: Mustafa Elbehery <[email protected]>
1 parent 1f07663 commit ea66d34

File tree

5 files changed

+76
-73
lines changed

5 files changed

+76
-73
lines changed

docs/source/distributions/importing_as_library.md

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@ client = LlamaStackAsLibraryClient(
1717
# provider_data is optional, but if you need to pass in any provider specific data, you can do so here.
1818
provider_data={"tavily_search_api_key": os.environ["TAVILY_SEARCH_API_KEY"]},
1919
)
20-
client.initialize()
2120
```
2221

2322
This will parse your config and set up any inline implementations and remote clients needed for your implementation.
@@ -32,5 +31,4 @@ If you've created a [custom distribution](https://llama-stack.readthedocs.io/en/
3231

3332
```python
3433
client = LlamaStackAsLibraryClient(config_path)
35-
client.initialize()
3634
```

llama_stack/core/library_client.py

Lines changed: 28 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,11 @@ def __init__(
153153

154154
self.loop = asyncio.new_event_loop()
155155

156-
def initialize(self):
156+
# Automatically initialize the client
157+
self._auto_initialize()
158+
159+
def _auto_initialize(self):
160+
"""Internal method to handle automatic initialization during construction."""
157161
if in_notebook():
158162
import nest_asyncio
159163

@@ -169,6 +173,18 @@ def initialize(self):
169173
finally:
170174
asyncio.set_event_loop(None)
171175

176+
def initialize(self):
177+
"""
178+
Deprecated method for backward compatibility.
179+
180+
Initialization now happens automatically in __init__.
181+
This method is kept for backward compatibility and does nothing.
182+
183+
Returns:
184+
None: Returns None to match historical behavior
185+
"""
186+
return None
187+
172188
def _remove_root_logger_handlers(self):
173189
"""
174190
Remove all handlers from the root logger. Needed to avoid polluting the console with logs.
@@ -237,8 +253,18 @@ def __init__(
237253
self.custom_provider_registry = custom_provider_registry
238254
self.provider_data = provider_data
239255
self.route_impls: RouteImpls | None = None # Initialize to None to prevent AttributeError
256+
self._is_initialized: bool = False # Track initialization state
240257

241258
async def initialize(self) -> bool:
259+
"""
260+
Initialize the async client. Can be called multiple times safely.
261+
262+
Returns:
263+
bool: True if initialization was successful
264+
"""
265+
if self._is_initialized:
266+
return True
267+
242268
try:
243269
self.route_impls = None
244270
self.impls = await construct_stack(self.config, self.custom_provider_registry)
@@ -288,6 +314,7 @@ async def initialize(self) -> bool:
288314
console.print(yaml.dump(safe_config, indent=2))
289315

290316
self.route_impls = initialize_route_impls(self.impls)
317+
self._is_initialized = True
291318
return True
292319

293320
async def request(
@@ -298,9 +325,6 @@ async def request(
298325
stream=False,
299326
stream_cls=None,
300327
):
301-
if self.route_impls is None:
302-
raise ValueError("Client not initialized. Please call initialize() first.")
303-
304328
# Create headers with provider data if available
305329
headers = options.headers or {}
306330
if self.provider_data:

tests/integration/fixtures/common.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -246,9 +246,7 @@ def llama_stack_client(request, provider_data):
246246
provider_data=provider_data,
247247
skip_logger_removal=True,
248248
)
249-
if not client.initialize():
250-
raise RuntimeError("Initialization failed")
251-
249+
# Client is automatically initialized during construction
252250
return client
253251

254252

tests/unit/distribution/test_library_client_initialization.py

Lines changed: 46 additions & 62 deletions
Original file line numberDiff line numberDiff line change
@@ -5,86 +5,70 @@
55
# the root directory of this source tree.
66

77
"""
8-
Unit tests for LlamaStackAsLibraryClient initialization error handling.
8+
Unit tests for LlamaStackAsLibraryClient automatic initialization.
99
10-
These tests ensure that users get proper error messages when they forget to call
11-
initialize() on the library client, preventing AttributeError regressions.
10+
These tests ensure that the library client is automatically initialized
11+
and ready to use immediately after construction.
1212
"""
1313

14-
import pytest
15-
1614
from llama_stack.core.library_client import (
1715
AsyncLlamaStackAsLibraryClient,
1816
LlamaStackAsLibraryClient,
1917
)
2018

2119

22-
class TestLlamaStackAsLibraryClientInitialization:
23-
"""Test proper error handling for uninitialized library clients."""
24-
25-
@pytest.mark.parametrize(
26-
"api_call",
27-
[
28-
lambda client: client.models.list(),
29-
lambda client: client.chat.completions.create(model="test", messages=[{"role": "user", "content": "test"}]),
30-
lambda client: next(
31-
client.chat.completions.create(
32-
model="test", messages=[{"role": "user", "content": "test"}], stream=True
33-
)
34-
),
35-
],
36-
ids=["models.list", "chat.completions.create", "chat.completions.create_stream"],
37-
)
38-
def test_sync_client_proper_error_without_initialization(self, api_call):
39-
"""Test that sync client raises ValueError with helpful message when not initialized."""
20+
class TestLlamaStackAsLibraryClientAutoInitialization:
21+
"""Test automatic initialization of library clients."""
22+
23+
def test_sync_client_auto_initialization(self):
24+
"""Test that sync client is automatically initialized after construction."""
4025
client = LlamaStackAsLibraryClient("nvidia")
4126

42-
with pytest.raises(ValueError) as exc_info:
43-
api_call(client)
44-
45-
error_msg = str(exc_info.value)
46-
assert "Client not initialized" in error_msg
47-
assert "Please call initialize() first" in error_msg
48-
49-
@pytest.mark.parametrize(
50-
"api_call",
51-
[
52-
lambda client: client.models.list(),
53-
lambda client: client.chat.completions.create(model="test", messages=[{"role": "user", "content": "test"}]),
54-
],
55-
ids=["models.list", "chat.completions.create"],
56-
)
57-
async def test_async_client_proper_error_without_initialization(self, api_call):
58-
"""Test that async client raises ValueError with helpful message when not initialized."""
27+
# Client should be automatically initialized
28+
assert client.async_client._is_initialized is True
29+
assert client.async_client.route_impls is not None
30+
31+
async def test_async_client_auto_initialization(self):
32+
"""Test that async client can be initialized and works properly."""
5933
client = AsyncLlamaStackAsLibraryClient("nvidia")
6034

61-
with pytest.raises(ValueError) as exc_info:
62-
await api_call(client)
35+
# Initialize the client
36+
result = await client.initialize()
37+
assert result is True
38+
assert client._is_initialized is True
39+
assert client.route_impls is not None
40+
41+
def test_initialize_method_backward_compatibility(self):
42+
"""Test that initialize() method still works for backward compatibility."""
43+
client = LlamaStackAsLibraryClient("nvidia")
44+
45+
# initialize() should return None (historical behavior) and not cause errors
46+
result = client.initialize()
47+
assert result is None
6348

64-
error_msg = str(exc_info.value)
65-
assert "Client not initialized" in error_msg
66-
assert "Please call initialize() first" in error_msg
49+
# Multiple calls should be safe
50+
result2 = client.initialize()
51+
assert result2 is None
6752

68-
async def test_async_client_streaming_error_without_initialization(self):
69-
"""Test that async client streaming raises ValueError with helpful message when not initialized."""
53+
async def test_async_initialize_method_idempotent(self):
54+
"""Test that async initialize() method can be called multiple times safely."""
7055
client = AsyncLlamaStackAsLibraryClient("nvidia")
7156

72-
with pytest.raises(ValueError) as exc_info:
73-
stream = await client.chat.completions.create(
74-
model="test", messages=[{"role": "user", "content": "test"}], stream=True
75-
)
76-
await anext(stream)
57+
# First initialization
58+
result1 = await client.initialize()
59+
assert result1 is True
60+
assert client._is_initialized is True
7761

78-
error_msg = str(exc_info.value)
79-
assert "Client not initialized" in error_msg
80-
assert "Please call initialize() first" in error_msg
62+
# Second initialization should be safe and return True
63+
result2 = await client.initialize()
64+
assert result2 is True
65+
assert client._is_initialized is True
8166

82-
def test_route_impls_initialized_to_none(self):
83-
"""Test that route_impls is initialized to None to prevent AttributeError."""
84-
# Test sync client
67+
def test_route_impls_automatically_set(self):
68+
"""Test that route_impls is automatically set during construction."""
69+
# Test sync client - should be auto-initialized
8570
sync_client = LlamaStackAsLibraryClient("nvidia")
86-
assert sync_client.async_client.route_impls is None
71+
assert sync_client.async_client.route_impls is not None
8772

88-
# Test async client directly
89-
async_client = AsyncLlamaStackAsLibraryClient("nvidia")
90-
assert async_client.route_impls is None
73+
# Test that the async client is marked as initialized
74+
assert sync_client.async_client._is_initialized is True

tests/verifications/openai_api/fixtures/fixtures.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -137,8 +137,7 @@ def openai_client(base_url, api_key, provider):
137137
raise ValueError(f"Invalid config for Llama Stack: {provider}, it must be of the form 'stack:<config>'")
138138
config = parts[1]
139139
client = LlamaStackAsLibraryClient(config, skip_logger_removal=True)
140-
if not client.initialize():
141-
raise RuntimeError("Initialization failed")
140+
# Client is automatically initialized during construction
142141
return client
143142

144143
return OpenAI(

0 commit comments

Comments
 (0)