Skip to content

Allow local providers to fetch model list #294

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 26 additions & 15 deletions lib/ruby_llm/models.rb
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def refresh!
end

def fetch_from_providers
configured = Provider.configured_providers(RubyLLM.config).filter(&:remote?)
configured = Provider.configured_providers(RubyLLM.config)

RubyLLM.logger.info "Fetching models from providers: #{configured.map(&:slug).join(', ')}"

Expand All @@ -47,23 +47,24 @@ def fetch_from_providers
end

def resolve(model_id, provider: nil, assume_exists: false) # rubocop:disable Metrics/PerceivedComplexity
assume_exists = true if provider && Provider.providers[provider.to_sym].local?

if assume_exists
if assume_exists || local_provider?(provider)
model = find_local_model(model_id, provider) if local_provider?(provider)
raise ArgumentError, 'Provider must be specified if assume_exists is true' unless provider

provider = Provider.providers[provider.to_sym] || raise(Error, "Unknown provider: #{provider.to_sym}")
model = Model::Info.new(
id: model_id,
name: model_id.gsub('-', ' ').capitalize,
provider: provider.slug,
capabilities: %w[function_calling streaming],
modalities: { input: %w[text image], output: %w[text] },
metadata: { warning: 'Assuming model exists, capabilities may not be accurate' }
)
if RubyLLM.config.log_assume_model_exists
RubyLLM.logger.warn "Assuming model '#{model_id}' exists for provider '#{provider}'. " \
'Capabilities may not be accurately reflected.'
unless model
model = Model::Info.new(
id: model_id,
name: model_id.gsub('-', ' ').capitalize,
provider: provider.slug,
capabilities: %w[function_calling streaming],
modalities: { input: %w[text image], output: %w[text] },
metadata: { warning: 'Assuming model exists, capabilities may not be accurate' }
)
if RubyLLM.config.log_assume_model_exists
RubyLLM.logger.warn "Assuming model '#{model_id}' exists for provider '#{provider}'. " \
'Capabilities may not be accurately reflected.'
end
end
else
model = Models.find model_id, provider
Expand Down Expand Up @@ -223,5 +224,15 @@ def find_without_provider(model_id)
all.find { |m| m.id == Aliases.resolve(model_id) } ||
raise(ModelNotFoundError, "Unknown model: #{model_id}")
end

def self.local_provider?(provider)
provider && Provider.providers[provider.to_sym]&.local?
end

def self.find_local_model(model_id, provider)
Models.find(model_id, provider)
rescue ModelNotFoundError
nil
end
end
end
1 change: 1 addition & 0 deletions lib/ruby_llm/providers/ollama.rb
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ module Ollama
extend OpenAI
extend Ollama::Chat
extend Ollama::Media
extend Ollama::Models

module_function

Expand Down
35 changes: 35 additions & 0 deletions lib/ruby_llm/providers/ollama/models.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# frozen_string_literal: true

module RubyLLM
module Providers
module Ollama
# Models methods of the Ollama API integration
module Models
module_function

def models_url
'models'
end

def parse_list_models_response(response, slug, _capabilities)
Array(response.body['data']).map do |model_data|
Model::Info.new(
id: model_data['id'],
name: model_data['id'],
provider: slug,
family: 'ollama',
created_at: model_data['created'] ? Time.at(model_data['created']) : nil,
modalities: {
input: %w[text image], # Ollama models don't advertise input modalities, so we assume text and image
output: ['text'] # Ollama models don't expose output modalities, so we assume text
},
capabilities: %w[streaming function_calling structured_output],
pricing: {}, # Ollama does not provide pricing details
metadata: {}
)
end
end
end
end
end
end