Improve error handling and response consistency in streaming endpoints

• Add error message forwarding to client
• Handle stream cancellations gracefully
• Add logging for stream errors
• Ensure clean stream termination
• Add try-catch in OpenAI streaming
This commit is contained in:
yangdx
2025-02-05 10:44:48 +08:00
parent ff40e61fad
commit 24effb127d
2 changed files with 39 additions and 17 deletions

View File

@@ -125,13 +125,17 @@ async def openai_complete_if_cache(
if hasattr(response, "__aiter__"):
async def inner():
async for chunk in response:
content = chunk.choices[0].delta.content
if content is None:
continue
if r"\u" in content:
content = safe_unicode_decode(content.encode("utf-8"))
yield content
try:
async for chunk in response:
content = chunk.choices[0].delta.content
if content is None:
continue
if r"\u" in content:
content = safe_unicode_decode(content.encode("utf-8"))
yield content
except Exception as e:
logger.error(f"Error in stream response: {str(e)}")
raise
return inner()
else: