Skip to content

Commit f59f582

Browse files
committed
Add streaming support for Responses API.
1 parent 483c84f commit f59f582

File tree

5 files changed

+950
-2
lines changed

5 files changed

+950
-2
lines changed

async-openai/src/responses.rs

Lines changed: 28 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
use crate::{
22
config::Config,
33
error::OpenAIError,
4-
types::responses::{CreateResponse, Response},
4+
types::responses::{CreateResponse, Response, ResponseStream},
55
Client,
66
};
77

88
/// Given text input or a list of context items, the model will generate a response.
99
///
10-
/// Related guide: [Responses API](https://platform.openai.com/docs/guides/responses)
10+
/// Related guide: [Responses](https://platform.openai.com/docs/api-reference/responses)
1111
pub struct Responses<'c, C: Config> {
1212
client: &'c Client<C>,
1313
}
@@ -26,4 +26,30 @@ impl<'c, C: Config> Responses<'c, C> {
2626
pub async fn create(&self, request: CreateResponse) -> Result<Response, OpenAIError> {
2727
self.client.post("/responses", request).await
2828
}
29+
30+
/// Creates a model response for the given input with streaming.
31+
///
32+
/// Response events will be sent as server-sent events as they become available,
33+
#[crate::byot(
34+
T0 = serde::Serialize,
35+
R = serde::de::DeserializeOwned,
36+
stream = "true",
37+
where_clause = "R: std::marker::Send + 'static"
38+
)]
39+
#[allow(unused_mut)]
40+
pub async fn create_stream(
41+
&self,
42+
mut request: CreateResponse,
43+
) -> Result<ResponseStream, OpenAIError> {
44+
#[cfg(not(feature = "byot"))]
45+
{
46+
if matches!(request.stream, Some(false)) {
47+
return Err(OpenAIError::InvalidArgument(
48+
"When stream is false, use Responses::create".into(),
49+
));
50+
}
51+
request.stream = Some(true);
52+
}
53+
Ok(self.client.post_stream("/responses", request).await)
54+
}
2955
}

0 commit comments

Comments
 (0)