Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/openai/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def __init__(
self.websocket_base_url = websocket_base_url

if base_url is None:
base_url = os.environ.get("OPENAI_BASE_URL")
base_url = os.environ.get("OPENAI_BASE_URL") or None
if base_url is None:
base_url = f"https://api.openai.com/v1"

Expand Down Expand Up @@ -466,7 +466,7 @@ def __init__(
self.websocket_base_url = websocket_base_url

if base_url is None:
base_url = os.environ.get("OPENAI_BASE_URL")
base_url = os.environ.get("OPENAI_BASE_URL") or None
if base_url is None:
base_url = f"https://api.openai.com/v1"

Expand Down
8 changes: 4 additions & 4 deletions src/openai/_streaming.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def __stream__(self) -> Iterator[_T]:
if sse.event and sse.event.startswith("thread."):
data = sse.json()

if sse.event == "error" and is_mapping(data) and data.get("error"):
if sse.event == "error" and is_mapping(data) and (data.get("error") or data.get("message")):
message = None
error = data.get("error")
if is_mapping(error):
Expand All @@ -80,7 +80,7 @@ def __stream__(self) -> Iterator[_T]:
yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response)
else:
data = sse.json()
if is_mapping(data) and data.get("error"):
if is_mapping(data) and (data.get("error") or data.get("message")):
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P1 Badge Avoid assuming error exists for top-level message events

This branch now triggers when a stream payload has only a top-level message, but the exception construction still reads data["error"]; for spec-compliant error events without an error object, this raises KeyError and bypasses the intended APIError path. In practice, streaming failures can still crash with an internal key lookup error instead of returning a usable API exception (the same pattern is duplicated in the async path).

Useful? React with 👍 / 👎.

message = None
error = data.get("error")
if is_mapping(error):
Expand Down Expand Up @@ -165,7 +165,7 @@ async def __stream__(self) -> AsyncIterator[_T]:
if sse.event and sse.event.startswith("thread."):
data = sse.json()

if sse.event == "error" and is_mapping(data) and data.get("error"):
if sse.event == "error" and is_mapping(data) and (data.get("error") or data.get("message")):
message = None
error = data.get("error")
if is_mapping(error):
Expand All @@ -182,7 +182,7 @@ async def __stream__(self) -> AsyncIterator[_T]:
yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response)
else:
data = sse.json()
if is_mapping(data) and data.get("error"):
if is_mapping(data) and (data.get("error") or data.get("message")):
message = None
error = data.get("error")
if is_mapping(error):
Expand Down
2 changes: 1 addition & 1 deletion src/openai/types/responses/response_text_delta_event.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class ResponseTextDeltaEvent(BaseModel):
item_id: str
"""The ID of the output item that the text delta was added to."""

logprobs: List[Logprob]
logprobs: Optional[List[Logprob]] = None
"""The log probabilities of the tokens in the delta."""

output_index: int
Expand Down