From 87474f7b2c72960257511dcada096718cef64a9d Mon Sep 17 00:00:00 2001 From: lvyb Date: Wed, 12 Mar 2025 16:57:51 +0800 Subject: [PATCH] fix stream --- lightrag/llm/openai.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lightrag/llm/openai.py b/lightrag/llm/openai.py index 70aa0ceb..d2174a67 100644 --- a/lightrag/llm/openai.py +++ b/lightrag/llm/openai.py @@ -123,18 +123,21 @@ async def openai_complete_if_cache( async def inner(): try: + _content = '' async for chunk in response: content = chunk.choices[0].delta.content if content is None: continue if r"\u" in content: content = safe_unicode_decode(content.encode("utf-8")) - yield content + _content += content + return _content except Exception as e: logger.error(f"Error in stream response: {str(e)}") raise - return inner() + response_content = await inner() + return response_content else: if (