diff --git a/lightrag/llm/openai.py b/lightrag/llm/openai.py index 70aa0ceb..d2174a67 100644 --- a/lightrag/llm/openai.py +++ b/lightrag/llm/openai.py @@ -123,18 +123,21 @@ async def openai_complete_if_cache( async def inner(): try: + _content = '' async for chunk in response: content = chunk.choices[0].delta.content if content is None: continue if r"\u" in content: content = safe_unicode_decode(content.encode("utf-8")) - yield content + _content += content + return _content except Exception as e: logger.error(f"Error in stream response: {str(e)}") raise - return inner() + response_content = await inner() + return response_content else: if (