Skip to content

Commit cb29768

Browse files
fix(client): close streams without requiring full consumption
1 parent 29874b0 commit cb29768

File tree

1 file changed

+4
-6
lines changed

1 file changed

+4
-6
lines changed

src/llama_api_client/_streaming.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -64,9 +64,8 @@ def __stream__(self) -> Iterator[_T]:
6464
)
6565
yield process_data(data=sse.json(), cast_to=cast_to, response=response)
6666

67-
# Ensure the entire stream is consumed
68-
for _sse in iterator:
69-
...
67+
# As we might not fully consume the response stream, we need to close it explicitly
68+
response.close()
7069

7170
def __enter__(self) -> Self:
7271
return self
@@ -134,9 +133,8 @@ async def __stream__(self) -> AsyncIterator[_T]:
134133
)
135134
yield process_data(data=sse.json(), cast_to=cast_to, response=response)
136135

137-
# Ensure the entire stream is consumed
138-
async for _sse in iterator:
139-
...
136+
# As we might not fully consume the response stream, we need to close it explicitly
137+
await response.aclose()
140138

141139
async def __aenter__(self) -> Self:
142140
return self

0 commit comments

Comments
 (0)