From bc6a8481a94cfa439130f67955732077fe027581 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 16 Jul 2025 19:06:17 +0000 Subject: [PATCH 01/10] codegen metadata --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 2b9160cf6e..bc75e5c98c 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 111 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-670ea0d2cc44f52a87dd3cadea45632953283e0636ba30788fdbdb22a232ccac.yml openapi_spec_hash: d8b7d38911fead545adf3e4297956410 -config_hash: 5525bda35e48ea6387c6175c4d1651fa +config_hash: b2a4028fdbb27a08de89831ed310e244 From 58c359ff67fd6103268e4405600fd58844b6f27b Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 21 Jul 2025 12:27:19 +0000 Subject: [PATCH 02/10] fix(parsing): ignore empty metadata --- src/openai/_models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/openai/_models.py b/src/openai/_models.py index f347a81dac..dee5551948 100644 --- a/src/openai/_models.py +++ b/src/openai/_models.py @@ -464,7 +464,7 @@ def construct_type(*, value: object, type_: object, metadata: Optional[List[Any] type_ = type_.__value__ # type: ignore[unreachable] # unwrap `Annotated[T, ...]` -> `T` - if metadata is not None: + if metadata is not None and len(metadata) > 0: meta: tuple[Any, ...] = tuple(metadata) elif is_annotated_type(type_): meta = get_args(type_)[1:] From a3860ff8618aa2d6283088ffdeed45239e31cd77 Mon Sep 17 00:00:00 2001 From: David Meadows Date: Mon, 21 Jul 2025 16:51:32 -0400 Subject: [PATCH 03/10] chore(internal): refactor stream event processing to be more future proof --- src/openai/_streaming.py | 23 ++++++++--------------- 1 file changed, 8 insertions(+), 15 deletions(-) diff --git a/src/openai/_streaming.py b/src/openai/_streaming.py index fa0a30e183..23417ce9b0 100644 --- a/src/openai/_streaming.py +++ b/src/openai/_streaming.py @@ -59,14 +59,9 @@ def __stream__(self) -> Iterator[_T]: if sse.data.startswith("[DONE]"): break - if sse.event is None or ( - sse.event.startswith("response.") or - sse.event.startswith("transcript.") or - sse.event.startswith("image_edit.") or - sse.event.startswith("image_generation.") - ): - data = sse.json() - if is_mapping(data) and data.get("error"): + data = sse.json() + if sse.event is not None and sse.event == "error": + if sse.event == "error" and is_mapping(data) and data.get("error"): message = None error = data.get("error") if is_mapping(error): @@ -79,13 +74,11 @@ def __stream__(self) -> Iterator[_T]: request=self.response.request, body=data["error"], ) - - yield process_data(data=data, cast_to=cast_to, response=response) - + elif sse.event is not None and sse.event.startswith("thread."): + # have to manually create part of the event since we don't have a full event + yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response) else: - data = sse.json() - - if sse.event == "error" and is_mapping(data) and data.get("error"): + if is_mapping(data) and data.get("error"): message = None error = data.get("error") if is_mapping(error): @@ -99,7 +92,7 @@ def __stream__(self) -> Iterator[_T]: body=data["error"], ) - yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response) + yield process_data(data=data, cast_to=cast_to, response=response) # Ensure the entire stream is consumed for _sse in iterator: From 2d21186b3e54ae8e6053af9f24f8262b8966b05a Mon Sep 17 00:00:00 2001 From: David Meadows Date: Mon, 21 Jul 2025 16:54:13 -0400 Subject: [PATCH 04/10] fixup! --- src/openai/_streaming.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/openai/_streaming.py b/src/openai/_streaming.py index 23417ce9b0..cb4146968a 100644 --- a/src/openai/_streaming.py +++ b/src/openai/_streaming.py @@ -59,9 +59,9 @@ def __stream__(self) -> Iterator[_T]: if sse.data.startswith("[DONE]"): break - data = sse.json() - if sse.event is not None and sse.event == "error": - if sse.event == "error" and is_mapping(data) and data.get("error"): + if sse.event is not None and not sse.event.startswith("thread."): + data = sse.json() + if is_mapping(data) and data.get("error"): message = None error = data.get("error") if is_mapping(error): @@ -74,11 +74,11 @@ def __stream__(self) -> Iterator[_T]: request=self.response.request, body=data["error"], ) - elif sse.event is not None and sse.event.startswith("thread."): - # have to manually create part of the event since we don't have a full event - yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response) + + yield process_data(data=data, cast_to=cast_to, response=response) else: - if is_mapping(data) and data.get("error"): + data = sse.json() + if sse.event == "error" and is_mapping(data) and data.get("error"): message = None error = data.get("error") if is_mapping(error): @@ -91,8 +91,8 @@ def __stream__(self) -> Iterator[_T]: request=self.response.request, body=data["error"], ) - - yield process_data(data=data, cast_to=cast_to, response=response) + # have to manually create part of the event since we don't have a full event + yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response) # Ensure the entire stream is consumed for _sse in iterator: From 3f132011abad437c3216e00839bbd07cae33e7e8 Mon Sep 17 00:00:00 2001 From: David Meadows Date: Mon, 21 Jul 2025 16:58:58 -0400 Subject: [PATCH 05/10] fixup! --- src/openai/_streaming.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/openai/_streaming.py b/src/openai/_streaming.py index cb4146968a..89409bc00a 100644 --- a/src/openai/_streaming.py +++ b/src/openai/_streaming.py @@ -76,8 +76,10 @@ def __stream__(self) -> Iterator[_T]: ) yield process_data(data=data, cast_to=cast_to, response=response) + else: data = sse.json() + if sse.event == "error" and is_mapping(data) and data.get("error"): message = None error = data.get("error") From 13550da2a2871d04688f6eaa0c617cad9fb32967 Mon Sep 17 00:00:00 2001 From: David Meadows Date: Tue, 22 Jul 2025 13:26:18 -0400 Subject: [PATCH 06/10] fixup! --- examples/image_stream.py | 2 +- src/openai/_streaming.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/image_stream.py b/examples/image_stream.py index c188e68717..eab5932534 100644 --- a/examples/image_stream.py +++ b/examples/image_stream.py @@ -50,4 +50,4 @@ def main() -> None: try: main() except Exception as error: - print(f"Error generating image: {error}") \ No newline at end of file + print(f"Error generating image: {error}") diff --git a/src/openai/_streaming.py b/src/openai/_streaming.py index 89409bc00a..7a9d960af7 100644 --- a/src/openai/_streaming.py +++ b/src/openai/_streaming.py @@ -76,7 +76,7 @@ def __stream__(self) -> Iterator[_T]: ) yield process_data(data=data, cast_to=cast_to, response=response) - + else: data = sse.json() From ef161032818e0aeabfd3b8a8a464167332a840ad Mon Sep 17 00:00:00 2001 From: David Meadows Date: Wed, 23 Jul 2025 09:56:02 -0400 Subject: [PATCH 07/10] update comment --- src/openai/_streaming.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/openai/_streaming.py b/src/openai/_streaming.py index 7a9d960af7..0ba2beb6be 100644 --- a/src/openai/_streaming.py +++ b/src/openai/_streaming.py @@ -93,7 +93,7 @@ def __stream__(self) -> Iterator[_T]: request=self.response.request, body=data["error"], ) - # have to manually create part of the event since we don't have a full event + # we have to special case the Assistants `thread.` events since we won't have an "event" key in the data yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response) # Ensure the entire stream is consumed From 29c22c90fd229983355089f95d0bba9de15efedb Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 24 Jul 2025 14:24:53 +0000 Subject: [PATCH 08/10] chore(project): add settings file for vscode --- .gitignore | 1 - .vscode/settings.json | 3 +++ 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 .vscode/settings.json diff --git a/.gitignore b/.gitignore index 70815df7f6..55c6ca861f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,4 @@ .prism.log -.vscode _dev __pycache__ diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000000..5b01030785 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "python.analysis.importFormat": "relative", +} From 50548106b7fafea954dd43049acc1d4a805bd8fe Mon Sep 17 00:00:00 2001 From: David Meadows Date: Fri, 25 Jul 2025 09:11:07 -0400 Subject: [PATCH 09/10] flip logic around --- examples/image_stream.py | 2 +- src/openai/_streaming.py | 30 +++++++++++++++--------------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/examples/image_stream.py b/examples/image_stream.py index eab5932534..c188e68717 100644 --- a/examples/image_stream.py +++ b/examples/image_stream.py @@ -50,4 +50,4 @@ def main() -> None: try: main() except Exception as error: - print(f"Error generating image: {error}") + print(f"Error generating image: {error}") \ No newline at end of file diff --git a/src/openai/_streaming.py b/src/openai/_streaming.py index 0ba2beb6be..f586de74ff 100644 --- a/src/openai/_streaming.py +++ b/src/openai/_streaming.py @@ -59,9 +59,11 @@ def __stream__(self) -> Iterator[_T]: if sse.data.startswith("[DONE]"): break - if sse.event is not None and not sse.event.startswith("thread."): + # we have to special case the Assistants `thread.` events since we won't have an "event" key in the data + if sse.event and sse.event.startswith("thread."): data = sse.json() - if is_mapping(data) and data.get("error"): + + if sse.event == "error" and is_mapping(data) and data.get("error"): message = None error = data.get("error") if is_mapping(error): @@ -75,12 +77,10 @@ def __stream__(self) -> Iterator[_T]: body=data["error"], ) - yield process_data(data=data, cast_to=cast_to, response=response) - + yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response) else: data = sse.json() - - if sse.event == "error" and is_mapping(data) and data.get("error"): + if is_mapping(data) and data.get("error"): message = None error = data.get("error") if is_mapping(error): @@ -93,8 +93,8 @@ def __stream__(self) -> Iterator[_T]: request=self.response.request, body=data["error"], ) - # we have to special case the Assistants `thread.` events since we won't have an "event" key in the data - yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response) + + yield process_data(data=data, cast_to=cast_to, response=response) # Ensure the entire stream is consumed for _sse in iterator: @@ -161,9 +161,11 @@ async def __stream__(self) -> AsyncIterator[_T]: if sse.data.startswith("[DONE]"): break - if sse.event is None or sse.event.startswith("response.") or sse.event.startswith("transcript."): + # we have to special case the Assistants `thread.` events since we won't have an "event" key in the data + if sse.event and sse.event.startswith("thread."): data = sse.json() - if is_mapping(data) and data.get("error"): + + if sse.event == "error" and is_mapping(data) and data.get("error"): message = None error = data.get("error") if is_mapping(error): @@ -177,12 +179,10 @@ async def __stream__(self) -> AsyncIterator[_T]: body=data["error"], ) - yield process_data(data=data, cast_to=cast_to, response=response) - + yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response) else: data = sse.json() - - if sse.event == "error" and is_mapping(data) and data.get("error"): + if is_mapping(data) and data.get("error"): message = None error = data.get("error") if is_mapping(error): @@ -196,7 +196,7 @@ async def __stream__(self) -> AsyncIterator[_T]: body=data["error"], ) - yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response) + yield process_data(data=data, cast_to=cast_to, response=response) # Ensure the entire stream is consumed async for _sse in iterator: From ab0ae43cd195243cc09bb5668e4418d12cc12098 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 30 Jul 2025 05:10:06 +0000 Subject: [PATCH 10/10] release: 1.97.2 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 9 +++++++++ pyproject.toml | 2 +- src/openai/_version.py | 2 +- 4 files changed, 12 insertions(+), 3 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 9cdfd7b049..1137af1259 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.97.1" + ".": "1.97.2" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 0c8d06cbb6..945e224cf9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +## 1.97.2 (2025-07-30) + +Full Changelog: [v1.97.1...v1.97.2](https://github.com/openai/openai-python/compare/v1.97.1...v1.97.2) + +### Chores + +* **client:** refactor streaming slightly to better future proof it ([71c0c74](https://github.com/openai/openai-python/commit/71c0c747132221b798e419bc5a37baf67173d34e)) +* **project:** add settings file for vscode ([29c22c9](https://github.com/openai/openai-python/commit/29c22c90fd229983355089f95d0bba9de15efedb)) + ## 1.97.1 (2025-07-22) Full Changelog: [v1.97.0...v1.97.1](https://github.com/openai/openai-python/compare/v1.97.0...v1.97.1) diff --git a/pyproject.toml b/pyproject.toml index af1366b34e..5b59053d02 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "openai" -version = "1.97.1" +version = "1.97.2" description = "The official Python library for the openai API" dynamic = ["readme"] license = "Apache-2.0" diff --git a/src/openai/_version.py b/src/openai/_version.py index 9073c643cc..59fb46ac23 100644 --- a/src/openai/_version.py +++ b/src/openai/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "openai" -__version__ = "1.97.1" # x-release-please-version +__version__ = "1.97.2" # x-release-please-version