Skip to content

Commit 076cef5

Browse files
committed
refactor(complexity): [3.1] Reduce code complexity in model detector
- Split detectOllamaModels into smaller functions: parseOllamaResponse, convertOllamaModels - Extract ollamaResponse and ollamaModel struct definitions for better organization - Split detectCloudModels into detectOpenAIModels and detectAnthropicModels - Refactor GetDefaultModel into selectBestModel and findModelByProvider helper functions - Reduce cyclomatic complexity from 29 to acceptable levels - Maintain all existing functionality while improving code maintainability - All tests pass with 83.1% coverage Task: 3.1 - Optimize model detector code complexity Phase: Refactoring
1 parent c57c1a3 commit 076cef5

File tree

1 file changed

+94
-57
lines changed

1 file changed

+94
-57
lines changed

pkg/testing/model_detector.go

Lines changed: 94 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -89,26 +89,29 @@ func (d *ModelDetector) detectOllamaModels(ctx context.Context) ([]DetectedModel
8989
return nil, fmt.Errorf("ollama API returned status %d", resp.StatusCode)
9090
}
9191

92-
var result struct {
93-
Models []struct {
94-
Name string `json:"name"`
95-
ModifiedAt time.Time `json:"modified_at"`
96-
Size int64 `json:"size"`
97-
Details struct {
98-
Format string `json:"format"`
99-
Family string `json:"family"`
100-
ParameterSize string `json:"parameter_size"`
101-
QuantizationLevel string `json:"quantization_level"`
102-
} `json:"details"`
103-
} `json:"models"`
92+
// Parse the response
93+
result, err := d.parseOllamaResponse(resp)
94+
if err != nil {
95+
return nil, err
10496
}
10597

98+
// Convert to DetectedModel format
99+
return d.convertOllamaModels(result.Models), nil
100+
}
101+
102+
// parseOllamaResponse parses the Ollama API response
103+
func (d *ModelDetector) parseOllamaResponse(resp *http.Response) (*ollamaResponse, error) {
104+
var result ollamaResponse
106105
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
107106
return nil, fmt.Errorf("failed to decode ollama response: %w", err)
108107
}
108+
return &result, nil
109+
}
109110

111+
// convertOllamaModels converts Ollama models to DetectedModel format
112+
func (d *ModelDetector) convertOllamaModels(ollamaModels []ollamaModel) []DetectedModel {
110113
var models []DetectedModel
111-
for _, m := range result.Models {
114+
for _, m := range ollamaModels {
112115
model := DetectedModel{
113116
Name: m.Name,
114117
Provider: "ollama",
@@ -123,8 +126,25 @@ func (d *ModelDetector) detectOllamaModels(ctx context.Context) ([]DetectedModel
123126
}
124127
models = append(models, model)
125128
}
129+
return models
130+
}
126131

127-
return models, nil
132+
// ollamaResponse represents the response from Ollama API
133+
type ollamaResponse struct {
134+
Models []ollamaModel `json:"models"`
135+
}
136+
137+
// ollamaModel represents a single model from Ollama API
138+
type ollamaModel struct {
139+
Name string `json:"name"`
140+
ModifiedAt time.Time `json:"modified_at"`
141+
Size int64 `json:"size"`
142+
Details struct {
143+
Format string `json:"format"`
144+
Family string `json:"family"`
145+
ParameterSize string `json:"parameter_size"`
146+
QuantizationLevel string `json:"quantization_level"`
147+
} `json:"details"`
128148
}
129149

130150
// detectLocalModels detects other local model files
@@ -163,45 +183,49 @@ func (d *ModelDetector) detectCloudModels() []DetectedModel {
163183
var models []DetectedModel
164184

165185
// Check for OpenAI configuration
186+
models = append(models, d.detectOpenAIModels()...)
187+
188+
// Check for Anthropic configuration
189+
models = append(models, d.detectAnthropicModels()...)
190+
191+
return models
192+
}
193+
194+
// detectOpenAIModels detects OpenAI models if API key is configured
195+
func (d *ModelDetector) detectOpenAIModels() []DetectedModel {
196+
var models []DetectedModel
166197
if apiKey := getEnvAny("OPENAI_API_KEY", "AI_OPENAI_KEY"); apiKey != "" {
167-
models = append(models, DetectedModel{
168-
Name: "gpt-4",
169-
Provider: "openai",
170-
Metadata: map[string]string{
171-
"type": "cloud",
172-
"auth": "configured",
173-
},
174-
})
175-
models = append(models, DetectedModel{
176-
Name: "gpt-3.5-turbo",
177-
Provider: "openai",
178-
Metadata: map[string]string{
179-
"type": "cloud",
180-
"auth": "configured",
181-
},
182-
})
198+
openAIModels := []string{"gpt-4", "gpt-3.5-turbo"}
199+
for _, modelName := range openAIModels {
200+
models = append(models, DetectedModel{
201+
Name: modelName,
202+
Provider: "openai",
203+
Metadata: map[string]string{
204+
"type": "cloud",
205+
"auth": "configured",
206+
},
207+
})
208+
}
183209
}
210+
return models
211+
}
184212

185-
// Check for Anthropic configuration
213+
// detectAnthropicModels detects Anthropic models if API key is configured
214+
func (d *ModelDetector) detectAnthropicModels() []DetectedModel {
215+
var models []DetectedModel
186216
if apiKey := getEnvAny("ANTHROPIC_API_KEY", "AI_ANTHROPIC_KEY"); apiKey != "" {
187-
models = append(models, DetectedModel{
188-
Name: "claude-3-opus",
189-
Provider: "anthropic",
190-
Metadata: map[string]string{
191-
"type": "cloud",
192-
"auth": "configured",
193-
},
194-
})
195-
models = append(models, DetectedModel{
196-
Name: "claude-3-sonnet",
197-
Provider: "anthropic",
198-
Metadata: map[string]string{
199-
"type": "cloud",
200-
"auth": "configured",
201-
},
202-
})
217+
anthropicModels := []string{"claude-3-opus", "claude-3-sonnet"}
218+
for _, modelName := range anthropicModels {
219+
models = append(models, DetectedModel{
220+
Name: modelName,
221+
Provider: "anthropic",
222+
Metadata: map[string]string{
223+
"type": "cloud",
224+
"auth": "configured",
225+
},
226+
})
227+
}
203228
}
204-
205229
return models
206230
}
207231

@@ -216,23 +240,36 @@ func (d *ModelDetector) GetDefaultModel(ctx context.Context) (*DetectedModel, er
216240
return nil, fmt.Errorf("no models available")
217241
}
218242

243+
// Find the best model using preference order
244+
return d.selectBestModel(models), nil
245+
}
246+
247+
// selectBestModel selects the best model based on preference order
248+
func (d *ModelDetector) selectBestModel(models []DetectedModel) *DetectedModel {
219249
// Preference order: Local models first (privacy), then cloud
250+
220251
// 1. Prefer Ollama models
221-
for _, m := range models {
222-
if m.Provider == "ollama" {
223-
return &m, nil
224-
}
252+
if model := d.findModelByProvider(models, "ollama"); model != nil {
253+
return model
225254
}
226255

227256
// 2. Then local models
228-
for _, m := range models {
229-
if m.Provider == "llama.cpp" {
230-
return &m, nil
231-
}
257+
if model := d.findModelByProvider(models, "llama.cpp"); model != nil {
258+
return model
232259
}
233260

234261
// 3. Finally cloud models
235-
return &models[0], nil
262+
return &models[0]
263+
}
264+
265+
// findModelByProvider finds the first model with the specified provider
266+
func (d *ModelDetector) findModelByProvider(models []DetectedModel, provider string) *DetectedModel {
267+
for _, m := range models {
268+
if m.Provider == provider {
269+
return &m
270+
}
271+
}
272+
return nil
236273
}
237274

238275
// formatBytes formats bytes to human readable string

0 commit comments

Comments
 (0)