Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ use crate::{
ErrorCode, LLMCompleteResponse, LLMCompleteResponseMetadata, LLMErrorResponse, LLMResponse,
ModelFeatures, ResolveMediaUrls,
},
request::create_client,
request::{create_client, create_client_with_env},
RuntimeContext,
};

Expand Down Expand Up @@ -154,7 +154,7 @@ impl AnthropicClient {
allowed_metadata: properties.allowed_metadata.clone(),
},
retry_policy: client.retry_policy.clone(),
client: create_client()?,
client: create_client_with_env(ctx.env_vars())?,
properties,
})
}
Expand Down Expand Up @@ -182,7 +182,7 @@ impl AnthropicClient {
allowed_metadata: properties.allowed_metadata.clone(),
},
retry_policy: client.elem().retry_policy_id.as_ref().map(String::from),
client: create_client()?,
client: create_client_with_env(ctx.env_vars())?,
properties,
})
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -513,6 +513,7 @@ impl AwsClient {
&self,
call_stack: Vec<baml_ids::FunctionCallId>,
http_request_id: baml_ids::HttpRequestId,
env: &std::collections::HashMap<String, String>,
) -> Result<bedrock::Client> {
#[cfg(target_arch = "wasm32")]
let loader = super::wasm::load_aws_config();
Expand Down Expand Up @@ -590,7 +591,7 @@ impl AwsClient {
}

let config = loader.load().await;
let http_client = custom_http_client::client()?;
let http_client = custom_http_client::client_with_env(env)?;

let bedrock_config = aws_sdk_bedrockruntime::config::Builder::from(&config)
// To support HTTPS_PROXY https://github.com/awslabs/aws-sdk-rust/issues/169
Expand Down Expand Up @@ -820,6 +821,7 @@ impl WithStreamChat for AwsClient {
.client_anyhow(
ctx.runtime_context().call_id_stack.clone(),
ctx.http_request_id().clone(),
ctx.runtime_context().env_vars(),
)
.await
{
Expand Down Expand Up @@ -1224,6 +1226,7 @@ impl WithChat for AwsClient {
.client_anyhow(
ctx.runtime_context().call_id_stack.clone(),
ctx.http_request_id().clone(),
ctx.runtime_context().env_vars(),
)
.await
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,20 +17,20 @@ use aws_smithy_types::body::SdkBody;
#[cfg(target_arch = "wasm32")]
use {futures::channel::oneshot, wasm_bindgen_futures::spawn_local};

use crate::request::create_client;
use crate::request::{create_client, create_client_with_env};

/// Returns a wrapper around the global reqwest client.
/// [HttpClient].
#[cfg(not(target_arch = "wasm32"))] // Keep function non-WASM for now
pub fn client() -> anyhow::Result<Client> {
let client = crate::request::create_client()
pub fn client_with_env(env: &std::collections::HashMap<String, String>) -> anyhow::Result<Client> {
let client = create_client_with_env(env)
.map_err(|e| anyhow::anyhow!("failed to create base http client: {}", e))?;
Ok(Client::new(client.clone()))
}

#[cfg(target_arch = "wasm32")] // Define WASM client function
pub fn client() -> anyhow::Result<Client> {
let client = crate::request::create_client()
pub fn client_with_env(env: &std::collections::HashMap<String, String>) -> anyhow::Result<Client> {
let client = create_client_with_env(env)
.map_err(|e| anyhow::anyhow!("failed to create base http client for WASM: {}", e))?;
Ok(Client::new(client.clone()))
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ use crate::{
ErrorCode, LLMCompleteResponse, LLMCompleteResponseMetadata, LLMErrorResponse, LLMResponse,
ModelFeatures, ResolveMediaUrls,
},
request::create_client,
request::{create_client, create_client_with_env},
RuntimeContext,
};

Expand Down Expand Up @@ -142,7 +142,7 @@ impl GoogleAIClient {
allowed_metadata: properties.allowed_metadata.clone(),
},
retry_policy: client.elem().retry_policy_id.as_ref().map(String::to_owned),
client: create_client()?,
client: create_client_with_env(ctx.env_vars())?,
properties,
})
}
Expand Down Expand Up @@ -171,7 +171,7 @@ impl GoogleAIClient {
allowed_metadata: properties.allowed_metadata.clone(),
},
retry_policy: client.retry_policy.clone(),
client: create_client()?,
client: create_client_with_env(ctx.env_vars())?,
properties,
})
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ use crate::{
ErrorCode, LLMCompleteResponse, LLMCompleteResponseMetadata, LLMErrorResponse, LLMResponse,
ModelFeatures, ResolveMediaUrls,
},
request::create_client,
request::{create_client, create_client_with_env},
RuntimeContext,
};

Expand Down Expand Up @@ -483,7 +483,7 @@ impl WithStreamChat for OpenAIClient {
}

macro_rules! make_openai_client {
($client:ident, $properties:ident, $provider:expr, dynamic) => {{
($client:ident, $properties:ident, $provider:expr, dynamic, $env:expr) => {{
let resolve_pdf_urls = if $provider == "openai-responses" {
ResolveMediaUrls::Never
} else {
Expand Down Expand Up @@ -512,10 +512,10 @@ macro_rules! make_openai_client {
},
properties: $properties,
retry_policy: $client.retry_policy.clone(),
client: create_client()?,
client: create_client_with_env($env)?,
})
}};
($client:ident, $properties:ident, $provider:expr) => {{
($client:ident, $properties:ident, $provider:expr, $env:expr) => {{
let resolve_pdf_urls = if $provider == "openai-responses" {
ResolveMediaUrls::Never
} else {
Expand Down Expand Up @@ -548,7 +548,7 @@ macro_rules! make_openai_client {
.retry_policy_id
.as_ref()
.map(|s| s.to_string()),
client: create_client()?,
client: create_client_with_env($env)?,
})
}};
}
Expand All @@ -557,39 +557,39 @@ impl OpenAIClient {
pub fn new(client: &ClientWalker, ctx: &RuntimeContext) -> Result<OpenAIClient> {
let properties =
properties::resolve_properties(&client.elem().provider, client.options(), ctx)?;
make_openai_client!(client, properties, "openai")
make_openai_client!(client, properties, "openai", ctx.env_vars())
}

pub fn new_generic(client: &ClientWalker, ctx: &RuntimeContext) -> Result<OpenAIClient> {
let properties =
properties::resolve_properties(&client.elem().provider, client.options(), ctx)?;
make_openai_client!(client, properties, "openai-generic")
make_openai_client!(client, properties, "openai-generic", ctx.env_vars())
}

pub fn new_ollama(client: &ClientWalker, ctx: &RuntimeContext) -> Result<OpenAIClient> {
let properties =
properties::resolve_properties(&client.elem().provider, client.options(), ctx)?;
make_openai_client!(client, properties, "ollama")
make_openai_client!(client, properties, "ollama", ctx.env_vars())
}

pub fn new_azure(client: &ClientWalker, ctx: &RuntimeContext) -> Result<OpenAIClient> {
let properties =
properties::resolve_properties(&client.elem().provider, client.options(), ctx)?;
make_openai_client!(client, properties, "azure")
make_openai_client!(client, properties, "azure", ctx.env_vars())
}

pub fn new_responses(client: &ClientWalker, ctx: &RuntimeContext) -> Result<OpenAIClient> {
let mut properties =
properties::resolve_properties(&client.elem().provider, client.options(), ctx)?;
// Override response type for responses API
properties.client_response_type = internal_llm_client::ResponseType::OpenAIResponses;
make_openai_client!(client, properties, "openai-responses")
make_openai_client!(client, properties, "openai-responses", ctx.env_vars())
}

pub fn dynamic_new(client: &ClientProperty, ctx: &RuntimeContext) -> Result<OpenAIClient> {
let properties =
properties::resolve_properties(&client.provider, &client.unresolved_options()?, ctx)?;
make_openai_client!(client, properties, "openai", dynamic)
make_openai_client!(client, properties, "openai", dynamic, ctx.env_vars())
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug: OpenAI Client Macro Ignores Env Variable

The non-dynamic make_openai_client! macro variant still uses create_client() instead of create_client_with_env(). This prevents non-dynamic OpenAI clients from respecting the DANGER_ACCEPT_INVALID_CERTS environment variable, creating inconsistent behavior with dynamic clients.

Fix in Cursor Fix in Web

}

pub fn dynamic_new_generic(
Expand All @@ -598,7 +598,13 @@ impl OpenAIClient {
) -> Result<OpenAIClient> {
let properties =
properties::resolve_properties(&client.provider, &client.unresolved_options()?, ctx)?;
make_openai_client!(client, properties, "openai-generic", dynamic)
make_openai_client!(
client,
properties,
"openai-generic",
dynamic,
ctx.env_vars()
)
}

pub fn dynamic_new_ollama(
Expand All @@ -607,7 +613,7 @@ impl OpenAIClient {
) -> Result<OpenAIClient> {
let properties =
properties::resolve_properties(&client.provider, &client.unresolved_options()?, ctx)?;
make_openai_client!(client, properties, "ollama", dynamic)
make_openai_client!(client, properties, "ollama", dynamic, ctx.env_vars())
}

pub fn dynamic_new_azure(
Expand All @@ -616,7 +622,7 @@ impl OpenAIClient {
) -> Result<OpenAIClient> {
let properties =
properties::resolve_properties(&client.provider, &client.unresolved_options()?, ctx)?;
make_openai_client!(client, properties, "azure", dynamic)
make_openai_client!(client, properties, "azure", dynamic, ctx.env_vars())
}

pub fn dynamic_new_responses(
Expand All @@ -627,7 +633,13 @@ impl OpenAIClient {
properties::resolve_properties(&client.provider, &client.unresolved_options()?, ctx)?;
// Override response type for responses API
properties.client_response_type = internal_llm_client::ResponseType::OpenAIResponses;
make_openai_client!(client, properties, "openai-responses", dynamic)
make_openai_client!(
client,
properties,
"openai-responses",
dynamic,
ctx.env_vars()
)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ use crate::{
ErrorCode, LLMCompleteResponse, LLMCompleteResponseMetadata, LLMErrorResponse, LLMResponse,
ModelFeatures, ResolveMediaUrls,
},
request::create_client,
request::{create_client, create_client_with_env},
RuntimeContext,
};

Expand Down Expand Up @@ -166,7 +166,7 @@ impl VertexClient {
allowed_metadata: properties.allowed_metadata.clone(),
},
retry_policy: client.elem().retry_policy_id.as_ref().map(String::to_owned),
client: create_client()?,
client: create_client_with_env(ctx.env_vars())?,
properties,
})
}
Expand Down Expand Up @@ -195,7 +195,7 @@ impl VertexClient {
allowed_metadata: properties.allowed_metadata.clone(),
},
retry_policy: client.retry_policy.clone(),
client: create_client()?,
client: create_client_with_env(ctx.env_vars())?,
properties,
})
}
Expand Down
26 changes: 26 additions & 0 deletions engine/baml-runtime/src/request/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,36 @@ fn builder() -> reqwest::ClientBuilder {
}
}

fn builder_with_env(env: &std::collections::HashMap<String, String>) -> reqwest::ClientBuilder {
cfg_if::cfg_if! {
if #[cfg(target_arch = "wasm32")] {
// On wasm, TLS verification cannot be disabled via reqwest; ignore this setting.
let _ = env.get("DANGER_ACCEPT_INVALID_CERTS");
reqwest::Client::builder()
} else {
let danger_accept_invalid_certs = env.get("DANGER_ACCEPT_INVALID_CERTS").map(|v| v.as_str()) == Some("1");
reqwest::Client::builder()
.connect_timeout(Duration::from_secs(10))
.danger_accept_invalid_certs(danger_accept_invalid_certs)
.http2_keep_alive_interval(Some(Duration::from_secs(10)))
.pool_max_idle_per_host(0)
.pool_idle_timeout(std::time::Duration::from_nanos(1))
}
}
}

pub fn create_client() -> Result<reqwest::Client> {
builder().build().context("Failed to create reqwest client")
}

pub fn create_client_with_env(
env: &std::collections::HashMap<String, String>,
) -> Result<reqwest::Client> {
builder_with_env(env)
.build()
.context("Failed to create reqwest client with env")
}

pub(crate) fn create_tracing_client() -> Result<reqwest::Client> {
cfg_if::cfg_if! {
if #[cfg(target_arch = "wasm32")] {
Expand Down
Loading