Skip to content

Commit b5a7071

Browse files
committed
fix(openai): guard fallback stream finalization
1 parent 514171b commit b5a7071

2 files changed

Lines changed: 84 additions & 0 deletions

File tree

langfuse/openai.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1195,6 +1195,7 @@ def __init__(
11951195
self.response = response
11961196
self.generation = generation
11971197
self.completion_start_time: Optional[datetime] = None
1198+
self._is_finalized = False
11981199

11991200
def __iter__(self) -> Any:
12001201
try:
@@ -1230,6 +1231,10 @@ def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
12301231
pass
12311232

12321233
def _finalize(self) -> None:
1234+
if self._is_finalized:
1235+
return
1236+
1237+
self._is_finalized = True
12331238
_finalize_stream_response(
12341239
resource=self.resource,
12351240
items=self.items,
@@ -1252,6 +1257,7 @@ def __init__(
12521257
self.response = response
12531258
self.generation = generation
12541259
self.completion_start_time: Optional[datetime] = None
1260+
self._is_finalized = False
12551261

12561262
async def __aiter__(self) -> Any:
12571263
try:
@@ -1287,6 +1293,10 @@ async def __aexit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None
12871293
pass
12881294

12891295
async def _finalize(self) -> None:
1296+
if self._is_finalized:
1297+
return
1298+
1299+
self._is_finalized = True
12901300
_finalize_stream_response(
12911301
resource=self.resource,
12921302
items=self.items,

tests/unit/test_openai.py

Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33

44
import pytest
55

6+
import langfuse.openai as lf_openai_module
67
from langfuse._client.attributes import LangfuseOtelSpanAttributes
78
from langfuse.openai import openai as lf_openai
89

@@ -39,6 +40,17 @@ async def _stream(self, items):
3940
yield item
4041

4142

43+
class DummyGeneration:
44+
def __init__(self) -> None:
45+
self.end_calls = 0
46+
47+
def update(self, **kwargs):
48+
return self
49+
50+
def end(self) -> None:
51+
self.end_calls += 1
52+
53+
4254
def _make_chat_stream_chunks():
4355
usage = SimpleNamespace(prompt_tokens=3, completion_tokens=1, total_tokens=4)
4456

@@ -76,6 +88,24 @@ def _make_chat_stream_chunks():
7688
]
7789

7890

91+
def _make_single_chunk_stream():
92+
return SimpleNamespace(
93+
model="gpt-4o-mini",
94+
choices=[
95+
SimpleNamespace(
96+
delta=SimpleNamespace(
97+
role="assistant",
98+
content="2",
99+
function_call=None,
100+
tool_calls=None,
101+
),
102+
finish_reason="stop",
103+
)
104+
],
105+
usage=None,
106+
)
107+
108+
79109
def test_chat_completion_exports_generation_span(
80110
langfuse_memory_client, get_span, json_attr
81111
):
@@ -439,6 +469,50 @@ async def test_openai_async_stream_supports_anext(
439469
}
440470

441471

472+
def test_fallback_sync_stream_finalizes_once():
473+
resource = SimpleNamespace(object="Completions", type="chat")
474+
generation = DummyGeneration()
475+
476+
def fallback_stream():
477+
yield _make_single_chunk_stream()
478+
479+
wrapper = lf_openai_module.LangfuseResponseGeneratorSync(
480+
resource=resource,
481+
response=fallback_stream(),
482+
generation=generation,
483+
)
484+
485+
list(wrapper)
486+
487+
with pytest.raises(StopIteration):
488+
next(wrapper)
489+
490+
assert generation.end_calls == 1
491+
492+
493+
@pytest.mark.asyncio
494+
async def test_fallback_async_stream_finalizes_once():
495+
resource = SimpleNamespace(object="Completions", type="chat")
496+
generation = DummyGeneration()
497+
498+
async def fallback_stream():
499+
yield _make_single_chunk_stream()
500+
501+
wrapper = lf_openai_module.LangfuseResponseGeneratorAsync(
502+
resource=resource,
503+
response=fallback_stream(),
504+
generation=generation,
505+
)
506+
507+
async for _ in wrapper:
508+
pass
509+
510+
with pytest.raises(StopAsyncIteration):
511+
await wrapper.__anext__()
512+
513+
assert generation.end_calls == 1
514+
515+
442516
def test_embedding_exports_dimensions_and_count(
443517
langfuse_memory_client, get_span, json_attr
444518
):

0 commit comments

Comments
 (0)