Skip to content

Commit 68b5d24

Browse files
committed
fix(prompts): prevent IndexError when LLM provided via constructor with empty models config
- Add check in get_task_model to handle empty _models list gracefully - Return None instead of throwing IndexError when no models match - Add comprehensive test coverage for various model configuration scenarios Fixes the issue where providing an LLM object directly to LLMRails constructor would fail if the YAML config had an empty models list.
1 parent 52ac7ed commit 68b5d24

File tree

3 files changed

+95
-2
lines changed

3 files changed

+95
-2
lines changed

nemoguardrails/llm/prompts.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,8 @@ def get_task_model(config: RailsConfig, task: Union[str, Task]) -> Model:
139139
if not _models:
140140
_models = [model for model in config.models if model.type == "main"]
141141

142-
return _models[0]
142+
if _models:
143+
return _models[0]
143144

144145
return None
145146

tests/test_llm_task_manager.py

Lines changed: 57 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020

2121
from nemoguardrails import RailsConfig
2222
from nemoguardrails.llm.filters import conversation_to_events
23-
from nemoguardrails.llm.prompts import get_prompt
23+
from nemoguardrails.llm.prompts import get_prompt, get_task_model
2424
from nemoguardrails.llm.taskmanager import LLMTaskManager
2525
from nemoguardrails.llm.types import Task
2626

@@ -457,3 +457,59 @@ def test_reasoning_traces_not_included_in_prompt_history():
457457
"Hi there!" in rendered_prompt
458458
or "I don't have access to real-time weather information." in rendered_prompt
459459
)
460+
461+
462+
def test_get_task_model_with_empty_models():
463+
"""Test that get_task_model returns None when models list is empty.
464+
465+
This tests the fix for the IndexError that occurred when the models list was empty.
466+
"""
467+
config = RailsConfig.parse_object({"models": []})
468+
469+
result = get_task_model(config, "main")
470+
assert result is None
471+
472+
result = get_task_model(config, Task.GENERAL)
473+
assert result is None
474+
475+
476+
def test_get_task_model_with_no_matching_models():
477+
"""Test that get_task_model returns None when no models match the requested type."""
478+
config = RailsConfig.parse_object(
479+
{
480+
"models": [
481+
{
482+
"type": "embeddings",
483+
"engine": "openai",
484+
"model": "text-embedding-ada-002",
485+
}
486+
]
487+
}
488+
)
489+
490+
result = get_task_model(config, "main")
491+
assert result is None
492+
493+
494+
def test_get_task_model_with_main_model():
495+
"""Test that get_task_model returns the main model when present."""
496+
config = RailsConfig.parse_object(
497+
{"models": [{"type": "main", "engine": "openai", "model": "gpt-3.5-turbo"}]}
498+
)
499+
500+
result = get_task_model(config, "main")
501+
assert result is not None
502+
assert result.type == "main"
503+
assert result.engine == "openai"
504+
assert result.model == "gpt-3.5-turbo"
505+
506+
507+
def test_get_task_model_fallback_to_main():
508+
"""Test that get_task_model falls back to main model when specific task model not found."""
509+
config = RailsConfig.parse_object(
510+
{"models": [{"type": "main", "engine": "openai", "model": "gpt-3.5-turbo"}]}
511+
)
512+
513+
result = get_task_model(config, "some_other_task")
514+
assert result is not None
515+
assert result.type == "main"

tests/test_llmrails.py

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -856,6 +856,42 @@ async def test_other_models_honored(mock_init, llm_config_with_multiple_models):
856856
assert any(event.get("intent") == "express greeting" for event in new_events)
857857

858858

859+
@pytest.mark.asyncio
860+
async def test_llm_constructor_with_empty_models_config():
861+
"""Test that LLMRails can be initialized with constructor LLM when config has empty models list.
862+
863+
This tests the fix for the IndexError that occurred when providing an LLM via constructor
864+
but having an empty models list in the config.
865+
"""
866+
config = RailsConfig.parse_object(
867+
{
868+
"models": [],
869+
"user_messages": {
870+
"express greeting": ["Hello!"],
871+
},
872+
"flows": [
873+
{
874+
"elements": [
875+
{"user": "express greeting"},
876+
{"bot": "express greeting"},
877+
]
878+
},
879+
],
880+
"bot_messages": {
881+
"express greeting": ["Hello! How are you?"],
882+
},
883+
}
884+
)
885+
886+
injected_llm = FakeLLM(responses=["express greeting"])
887+
llm_rails = LLMRails(config=config, llm=injected_llm)
888+
assert llm_rails.llm == injected_llm
889+
890+
events = [{"type": "UtteranceUserActionFinished", "final_transcript": "Hello!"}]
891+
new_events = await llm_rails.runtime.generate_events(events)
892+
assert any(event.get("intent") == "express greeting" for event in new_events)
893+
894+
859895
@pytest.mark.asyncio
860896
@patch(
861897
"nemoguardrails.rails.llm.llmrails.init_llm_model",

0 commit comments

Comments
 (0)