Skip to content

Commit 78f7be7

Browse files
committed
fix(test): update LlamaStackAsLibraryClient initialization tests after removing initialize method
The recent refactor (3778a4c) introduced automatic initialization for LlamaStackAsLibraryClient but the unit tests were expecting manual initalization and _is_initialized. This caused test failure. Changes: - Update test assertions to check route_impls is not None instead of _is_initialized - Add proper mocking in tests to avoid external provider dependencies - Maintain test coverage for automatic initialization behavior - Ensure backward compatibility testing for deprecated initialize() method Signed-off-by: Mustafa Elbehery <[email protected]>
1 parent a055e03 commit 78f7be7

File tree

4 files changed

+80
-31
lines changed

4 files changed

+80
-31
lines changed

llama_stack/core/library_client.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -139,16 +139,14 @@ class LlamaStackAsLibraryClient(LlamaStackClient):
139139
def __init__(
140140
self,
141141
config_path_or_distro_name: str,
142-
skip_logger_removal: bool = False,
143142
custom_provider_registry: ProviderRegistry | None = None,
144143
provider_data: dict[str, Any] | None = None,
145144
):
146145
super().__init__()
147146
self.async_client = AsyncLlamaStackAsLibraryClient(
148-
config_path_or_distro_name, custom_provider_registry, provider_data, skip_logger_removal
147+
config_path_or_distro_name, custom_provider_registry, provider_data
149148
)
150149
self.pool_executor = ThreadPoolExecutor(max_workers=4)
151-
self.skip_logger_removal = skip_logger_removal
152150
self.provider_data = provider_data
153151

154152
self.loop = asyncio.new_event_loop()
@@ -246,7 +244,7 @@ def _remove_root_logger_handlers(self):
246244

247245
async def initialize(self) -> bool:
248246
"""
249-
Initialize the async client. Can be called multiple times safely.
247+
Initialize the async client.
250248
251249
Returns:
252250
bool: True if initialization was successful
@@ -311,6 +309,9 @@ async def request(
311309
stream=False,
312310
stream_cls=None,
313311
):
312+
if self.route_impls is None:
313+
raise ValueError("Client not initialized. Please call initialize() first.")
314+
314315
# Create headers with provider data if available
315316
headers = options.headers or {}
316317
if self.provider_data:

tests/integration/fixtures/common.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -254,9 +254,7 @@ def instantiate_llama_stack_client(session):
254254
client = LlamaStackAsLibraryClient(
255255
config,
256256
provider_data=get_provider_data(),
257-
skip_logger_removal=True,
258257
)
259-
# Client is automatically initialized during construction
260258
return client
261259

262260

tests/integration/non_ci/responses/fixtures/fixtures.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -112,8 +112,7 @@ def openai_client(base_url, api_key, provider):
112112
if len(parts) != 2:
113113
raise ValueError(f"Invalid config for Llama Stack: {provider}, it must be of the form 'stack:<config>'")
114114
config = parts[1]
115-
client = LlamaStackAsLibraryClient(config, skip_logger_removal=True)
116-
# Client is automatically initialized during construction
115+
client = LlamaStackAsLibraryClient(config)
117116
return client
118117

119118
return OpenAI(

tests/unit/distribution/test_library_client_initialization.py

Lines changed: 74 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -15,60 +15,111 @@
1515
AsyncLlamaStackAsLibraryClient,
1616
LlamaStackAsLibraryClient,
1717
)
18+
from llama_stack.core.server.routes import RouteImpls
1819

1920

2021
class TestLlamaStackAsLibraryClientAutoInitialization:
2122
"""Test automatic initialization of library clients."""
2223

23-
def test_sync_client_auto_initialization(self):
24+
def test_sync_client_auto_initialization(self, monkeypatch):
2425
"""Test that sync client is automatically initialized after construction."""
25-
client = LlamaStackAsLibraryClient("nvidia")
26+
# Mock the stack construction to avoid dependency issues
27+
mock_impls = {}
28+
mock_route_impls = RouteImpls({})
29+
30+
async def mock_construct_stack(config, custom_provider_registry):
31+
return mock_impls
32+
33+
def mock_initialize_route_impls(impls):
34+
return mock_route_impls
35+
36+
monkeypatch.setattr("llama_stack.core.library_client.construct_stack", mock_construct_stack)
37+
monkeypatch.setattr("llama_stack.core.library_client.initialize_route_impls", mock_initialize_route_impls)
38+
39+
client = LlamaStackAsLibraryClient("ci-tests")
2640

27-
# Client should be automatically initialized
28-
assert client.async_client._is_initialized is True
2941
assert client.async_client.route_impls is not None
3042

31-
async def test_async_client_auto_initialization(self):
43+
async def test_async_client_auto_initialization(self, monkeypatch):
3244
"""Test that async client can be initialized and works properly."""
33-
client = AsyncLlamaStackAsLibraryClient("nvidia")
45+
# Mock the stack construction to avoid dependency issues
46+
mock_impls = {}
47+
mock_route_impls = RouteImpls({})
48+
49+
async def mock_construct_stack(config, custom_provider_registry):
50+
return mock_impls
51+
52+
def mock_initialize_route_impls(impls):
53+
return mock_route_impls
54+
55+
monkeypatch.setattr("llama_stack.core.library_client.construct_stack", mock_construct_stack)
56+
monkeypatch.setattr("llama_stack.core.library_client.initialize_route_impls", mock_initialize_route_impls)
57+
58+
client = AsyncLlamaStackAsLibraryClient("ci-tests")
3459

3560
# Initialize the client
3661
result = await client.initialize()
3762
assert result is True
38-
assert client._is_initialized is True
3963
assert client.route_impls is not None
4064

41-
def test_initialize_method_backward_compatibility(self):
65+
def test_initialize_method_backward_compatibility(self, monkeypatch):
4266
"""Test that initialize() method still works for backward compatibility."""
43-
client = LlamaStackAsLibraryClient("nvidia")
67+
# Mock the stack construction to avoid dependency issues
68+
mock_impls = {}
69+
mock_route_impls = RouteImpls({})
70+
71+
async def mock_construct_stack(config, custom_provider_registry):
72+
return mock_impls
73+
74+
def mock_initialize_route_impls(impls):
75+
return mock_route_impls
76+
77+
monkeypatch.setattr("llama_stack.core.library_client.construct_stack", mock_construct_stack)
78+
monkeypatch.setattr("llama_stack.core.library_client.initialize_route_impls", mock_initialize_route_impls)
79+
80+
client = LlamaStackAsLibraryClient("ci-tests")
4481

45-
# initialize() should return None (historical behavior) and not cause errors
4682
result = client.initialize()
4783
assert result is None
4884

49-
# Multiple calls should be safe
5085
result2 = client.initialize()
5186
assert result2 is None
5287

53-
async def test_async_initialize_method_idempotent(self):
88+
async def test_async_initialize_method_idempotent(self, monkeypatch):
5489
"""Test that async initialize() method can be called multiple times safely."""
55-
client = AsyncLlamaStackAsLibraryClient("nvidia")
90+
mock_impls = {}
91+
mock_route_impls = RouteImpls({})
92+
93+
async def mock_construct_stack(config, custom_provider_registry):
94+
return mock_impls
95+
96+
def mock_initialize_route_impls(impls):
97+
return mock_route_impls
98+
99+
monkeypatch.setattr("llama_stack.core.library_client.construct_stack", mock_construct_stack)
100+
monkeypatch.setattr("llama_stack.core.library_client.initialize_route_impls", mock_initialize_route_impls)
101+
102+
client = AsyncLlamaStackAsLibraryClient("ci-tests")
56103

57-
# First initialization
58104
result1 = await client.initialize()
59105
assert result1 is True
60-
assert client._is_initialized is True
61106

62-
# Second initialization should be safe and return True
63107
result2 = await client.initialize()
64108
assert result2 is True
65-
assert client._is_initialized is True
66109

67-
def test_route_impls_automatically_set(self):
110+
def test_route_impls_automatically_set(self, monkeypatch):
68111
"""Test that route_impls is automatically set during construction."""
69-
# Test sync client - should be auto-initialized
70-
sync_client = LlamaStackAsLibraryClient("nvidia")
71-
assert sync_client.async_client.route_impls is not None
112+
mock_impls = {}
113+
mock_route_impls = RouteImpls({})
114+
115+
async def mock_construct_stack(config, custom_provider_registry):
116+
return mock_impls
117+
118+
def mock_initialize_route_impls(impls):
119+
return mock_route_impls
72120

73-
# Test that the async client is marked as initialized
74-
assert sync_client.async_client._is_initialized is True
121+
monkeypatch.setattr("llama_stack.core.library_client.construct_stack", mock_construct_stack)
122+
monkeypatch.setattr("llama_stack.core.library_client.initialize_route_impls", mock_initialize_route_impls)
123+
124+
sync_client = LlamaStackAsLibraryClient("ci-tests")
125+
assert sync_client.async_client.route_impls is not None

0 commit comments

Comments
 (0)