Skip to content

Commit bac7301

Browse files
committed
Improve RubyLLM::Chat#with_params (crmne#265) by allowing to override default params
1 parent e1e9ede commit bac7301

File tree

4 files changed

+105
-7
lines changed

4 files changed

+105
-7
lines changed

lib/ruby_llm/provider.rb

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,17 +14,19 @@ def complete(messages, tools:, temperature:, model:, connection:, params: {}, sc
1414
normalized_temperature = maybe_normalize_temperature(temperature, model)
1515

1616
payload = Utils.deep_merge(
17-
params,
1817
render_payload(
1918
messages,
2019
tools: tools,
2120
temperature: normalized_temperature,
2221
model: model,
2322
stream: block_given?,
2423
schema: schema
25-
)
24+
),
25+
params
2626
)
2727

28+
payload.compact!
29+
2830
if block_given?
2931
stream_response connection, payload, &
3032
else

lib/ruby_llm/utils.rb

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,12 @@ def to_safe_array(item)
2424
end
2525
end
2626

27-
def deep_merge(params, payload)
28-
params.merge(payload) do |_key, params_value, payload_value|
29-
if params_value.is_a?(Hash) && payload_value.is_a?(Hash)
30-
deep_merge(params_value, payload_value)
27+
def deep_merge(hash1, hash2)
28+
hash1.merge(hash2) do |_key, value1, value2|
29+
if value1.is_a?(Hash) && value2.is_a?(Hash)
30+
deep_merge(value1, value2)
3131
else
32-
payload_value
32+
value2
3333
end
3434
end
3535
end

spec/fixtures/vcr_cassettes/chat_with_params_anthropic_claude-3-5-haiku-20241022_can_override_max_tokens_param_with_a_custom_value.yml

Lines changed: 81 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

spec/ruby_llm/chat_request_options_spec.rb

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,21 @@
7878
json_response = JSON.parse('{' + response.content) # rubocop:disable Style/StringConcatenation
7979
expect(json_response).to eq({ 'result' => 8 })
8080
end
81+
82+
it "#{provider}/#{model} can override max_tokens param with a custom value" do # rubocop:disable RSpec/ExampleLength
83+
chat = RubyLLM
84+
.chat(model: model, provider: provider)
85+
.with_params(max_tokens: 2)
86+
87+
chat.add_message(
88+
role: :user,
89+
content: 'Always answer with "Once upon a time"'
90+
)
91+
92+
response = chat.complete
93+
94+
expect(response.content).to eq('Once upon') # Only 2 tokens
95+
end
8196
end
8297

8398
# Providers [:openrouter, :bedrock] supports a {top_k: ...} param to remove low-probability next tokens.

0 commit comments

Comments
 (0)