@@ -89,26 +89,29 @@ func (d *ModelDetector) detectOllamaModels(ctx context.Context) ([]DetectedModel
89
89
return nil , fmt .Errorf ("ollama API returned status %d" , resp .StatusCode )
90
90
}
91
91
92
- var result struct {
93
- Models []struct {
94
- Name string `json:"name"`
95
- ModifiedAt time.Time `json:"modified_at"`
96
- Size int64 `json:"size"`
97
- Details struct {
98
- Format string `json:"format"`
99
- Family string `json:"family"`
100
- ParameterSize string `json:"parameter_size"`
101
- QuantizationLevel string `json:"quantization_level"`
102
- } `json:"details"`
103
- } `json:"models"`
92
+ // Parse the response
93
+ result , err := d .parseOllamaResponse (resp )
94
+ if err != nil {
95
+ return nil , err
104
96
}
105
97
98
+ // Convert to DetectedModel format
99
+ return d .convertOllamaModels (result .Models ), nil
100
+ }
101
+
102
+ // parseOllamaResponse parses the Ollama API response
103
+ func (d * ModelDetector ) parseOllamaResponse (resp * http.Response ) (* ollamaResponse , error ) {
104
+ var result ollamaResponse
106
105
if err := json .NewDecoder (resp .Body ).Decode (& result ); err != nil {
107
106
return nil , fmt .Errorf ("failed to decode ollama response: %w" , err )
108
107
}
108
+ return & result , nil
109
+ }
109
110
111
+ // convertOllamaModels converts Ollama models to DetectedModel format
112
+ func (d * ModelDetector ) convertOllamaModels (ollamaModels []ollamaModel ) []DetectedModel {
110
113
var models []DetectedModel
111
- for _ , m := range result . Models {
114
+ for _ , m := range ollamaModels {
112
115
model := DetectedModel {
113
116
Name : m .Name ,
114
117
Provider : "ollama" ,
@@ -123,8 +126,25 @@ func (d *ModelDetector) detectOllamaModels(ctx context.Context) ([]DetectedModel
123
126
}
124
127
models = append (models , model )
125
128
}
129
+ return models
130
+ }
126
131
127
- return models , nil
132
+ // ollamaResponse represents the response from Ollama API
133
+ type ollamaResponse struct {
134
+ Models []ollamaModel `json:"models"`
135
+ }
136
+
137
+ // ollamaModel represents a single model from Ollama API
138
+ type ollamaModel struct {
139
+ Name string `json:"name"`
140
+ ModifiedAt time.Time `json:"modified_at"`
141
+ Size int64 `json:"size"`
142
+ Details struct {
143
+ Format string `json:"format"`
144
+ Family string `json:"family"`
145
+ ParameterSize string `json:"parameter_size"`
146
+ QuantizationLevel string `json:"quantization_level"`
147
+ } `json:"details"`
128
148
}
129
149
130
150
// detectLocalModels detects other local model files
@@ -163,45 +183,49 @@ func (d *ModelDetector) detectCloudModels() []DetectedModel {
163
183
var models []DetectedModel
164
184
165
185
// Check for OpenAI configuration
186
+ models = append (models , d .detectOpenAIModels ()... )
187
+
188
+ // Check for Anthropic configuration
189
+ models = append (models , d .detectAnthropicModels ()... )
190
+
191
+ return models
192
+ }
193
+
194
+ // detectOpenAIModels detects OpenAI models if API key is configured
195
+ func (d * ModelDetector ) detectOpenAIModels () []DetectedModel {
196
+ var models []DetectedModel
166
197
if apiKey := getEnvAny ("OPENAI_API_KEY" , "AI_OPENAI_KEY" ); apiKey != "" {
167
- models = append (models , DetectedModel {
168
- Name : "gpt-4" ,
169
- Provider : "openai" ,
170
- Metadata : map [string ]string {
171
- "type" : "cloud" ,
172
- "auth" : "configured" ,
173
- },
174
- })
175
- models = append (models , DetectedModel {
176
- Name : "gpt-3.5-turbo" ,
177
- Provider : "openai" ,
178
- Metadata : map [string ]string {
179
- "type" : "cloud" ,
180
- "auth" : "configured" ,
181
- },
182
- })
198
+ openAIModels := []string {"gpt-4" , "gpt-3.5-turbo" }
199
+ for _ , modelName := range openAIModels {
200
+ models = append (models , DetectedModel {
201
+ Name : modelName ,
202
+ Provider : "openai" ,
203
+ Metadata : map [string ]string {
204
+ "type" : "cloud" ,
205
+ "auth" : "configured" ,
206
+ },
207
+ })
208
+ }
183
209
}
210
+ return models
211
+ }
184
212
185
- // Check for Anthropic configuration
213
+ // detectAnthropicModels detects Anthropic models if API key is configured
214
+ func (d * ModelDetector ) detectAnthropicModels () []DetectedModel {
215
+ var models []DetectedModel
186
216
if apiKey := getEnvAny ("ANTHROPIC_API_KEY" , "AI_ANTHROPIC_KEY" ); apiKey != "" {
187
- models = append (models , DetectedModel {
188
- Name : "claude-3-opus" ,
189
- Provider : "anthropic" ,
190
- Metadata : map [string ]string {
191
- "type" : "cloud" ,
192
- "auth" : "configured" ,
193
- },
194
- })
195
- models = append (models , DetectedModel {
196
- Name : "claude-3-sonnet" ,
197
- Provider : "anthropic" ,
198
- Metadata : map [string ]string {
199
- "type" : "cloud" ,
200
- "auth" : "configured" ,
201
- },
202
- })
217
+ anthropicModels := []string {"claude-3-opus" , "claude-3-sonnet" }
218
+ for _ , modelName := range anthropicModels {
219
+ models = append (models , DetectedModel {
220
+ Name : modelName ,
221
+ Provider : "anthropic" ,
222
+ Metadata : map [string ]string {
223
+ "type" : "cloud" ,
224
+ "auth" : "configured" ,
225
+ },
226
+ })
227
+ }
203
228
}
204
-
205
229
return models
206
230
}
207
231
@@ -216,23 +240,36 @@ func (d *ModelDetector) GetDefaultModel(ctx context.Context) (*DetectedModel, er
216
240
return nil , fmt .Errorf ("no models available" )
217
241
}
218
242
243
+ // Find the best model using preference order
244
+ return d .selectBestModel (models ), nil
245
+ }
246
+
247
+ // selectBestModel selects the best model based on preference order
248
+ func (d * ModelDetector ) selectBestModel (models []DetectedModel ) * DetectedModel {
219
249
// Preference order: Local models first (privacy), then cloud
250
+
220
251
// 1. Prefer Ollama models
221
- for _ , m := range models {
222
- if m .Provider == "ollama" {
223
- return & m , nil
224
- }
252
+ if model := d .findModelByProvider (models , "ollama" ); model != nil {
253
+ return model
225
254
}
226
255
227
256
// 2. Then local models
228
- for _ , m := range models {
229
- if m .Provider == "llama.cpp" {
230
- return & m , nil
231
- }
257
+ if model := d .findModelByProvider (models , "llama.cpp" ); model != nil {
258
+ return model
232
259
}
233
260
234
261
// 3. Finally cloud models
235
- return & models [0 ], nil
262
+ return & models [0 ]
263
+ }
264
+
265
+ // findModelByProvider finds the first model with the specified provider
266
+ func (d * ModelDetector ) findModelByProvider (models []DetectedModel , provider string ) * DetectedModel {
267
+ for _ , m := range models {
268
+ if m .Provider == provider {
269
+ return & m
270
+ }
271
+ }
272
+ return nil
236
273
}
237
274
238
275
// formatBytes formats bytes to human readable string
0 commit comments