diff --git a/lib/ruby_llm/models.rb b/lib/ruby_llm/models.rb index b1fa08d7..80e060ee 100644 --- a/lib/ruby_llm/models.rb +++ b/lib/ruby_llm/models.rb @@ -37,7 +37,7 @@ def refresh! end def fetch_from_providers - configured = Provider.configured_providers(RubyLLM.config).filter(&:remote?) + configured = Provider.configured_providers(RubyLLM.config) RubyLLM.logger.info "Fetching models from providers: #{configured.map(&:slug).join(', ')}" @@ -47,23 +47,24 @@ def fetch_from_providers end def resolve(model_id, provider: nil, assume_exists: false) # rubocop:disable Metrics/PerceivedComplexity - assume_exists = true if provider && Provider.providers[provider.to_sym].local? - - if assume_exists + if assume_exists || local_provider?(provider) + model = find_local_model(model_id, provider) if local_provider?(provider) raise ArgumentError, 'Provider must be specified if assume_exists is true' unless provider provider = Provider.providers[provider.to_sym] || raise(Error, "Unknown provider: #{provider.to_sym}") - model = Model::Info.new( - id: model_id, - name: model_id.gsub('-', ' ').capitalize, - provider: provider.slug, - capabilities: %w[function_calling streaming], - modalities: { input: %w[text image], output: %w[text] }, - metadata: { warning: 'Assuming model exists, capabilities may not be accurate' } - ) - if RubyLLM.config.log_assume_model_exists - RubyLLM.logger.warn "Assuming model '#{model_id}' exists for provider '#{provider}'. " \ - 'Capabilities may not be accurately reflected.' + unless model + model = Model::Info.new( + id: model_id, + name: model_id.gsub('-', ' ').capitalize, + provider: provider.slug, + capabilities: %w[function_calling streaming], + modalities: { input: %w[text image], output: %w[text] }, + metadata: { warning: 'Assuming model exists, capabilities may not be accurate' } + ) + if RubyLLM.config.log_assume_model_exists + RubyLLM.logger.warn "Assuming model '#{model_id}' exists for provider '#{provider}'. " \ + 'Capabilities may not be accurately reflected.' + end end else model = Models.find model_id, provider @@ -223,5 +224,15 @@ def find_without_provider(model_id) all.find { |m| m.id == Aliases.resolve(model_id) } || raise(ModelNotFoundError, "Unknown model: #{model_id}") end + + def self.local_provider?(provider) + provider && Provider.providers[provider.to_sym]&.local? + end + + def self.find_local_model(model_id, provider) + Models.find(model_id, provider) + rescue ModelNotFoundError + nil + end end end diff --git a/lib/ruby_llm/providers/ollama.rb b/lib/ruby_llm/providers/ollama.rb index 2ce784a5..c292f546 100644 --- a/lib/ruby_llm/providers/ollama.rb +++ b/lib/ruby_llm/providers/ollama.rb @@ -7,6 +7,7 @@ module Ollama extend OpenAI extend Ollama::Chat extend Ollama::Media + extend Ollama::Models module_function diff --git a/lib/ruby_llm/providers/ollama/models.rb b/lib/ruby_llm/providers/ollama/models.rb new file mode 100644 index 00000000..9ee2aefc --- /dev/null +++ b/lib/ruby_llm/providers/ollama/models.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +module RubyLLM + module Providers + module Ollama + # Models methods of the Ollama API integration + module Models + module_function + + def models_url + 'models' + end + + def parse_list_models_response(response, slug, _capabilities) + Array(response.body['data']).map do |model_data| + Model::Info.new( + id: model_data['id'], + name: model_data['id'], + provider: slug, + family: 'ollama', + created_at: model_data['created'] ? Time.at(model_data['created']) : nil, + modalities: { + input: %w[text image], # Ollama models don't advertise input modalities, so we assume text and image + output: ['text'] # Ollama models don't expose output modalities, so we assume text + }, + capabilities: %w[streaming function_calling structured_output], + pricing: {}, # Ollama does not provide pricing details + metadata: {} + ) + end + end + end + end + end +end