From 56f82bdcd533afbdb34027c8eb7a537acd38dfc9 Mon Sep 17 00:00:00 2001 From: yangdx Date: Mon, 12 May 2025 17:37:28 +0800 Subject: [PATCH] Ensure OpenAI connection is closed after streaming response finished --- lightrag/llm/openai.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/lightrag/llm/openai.py b/lightrag/llm/openai.py index 690ac3f3..57f016cf 100644 --- a/lightrag/llm/openai.py +++ b/lightrag/llm/openai.py @@ -264,8 +264,17 @@ async def openai_complete_if_cache( logger.warning( f"Failed to close stream response in finally block: {close_error}" ) - # Note: We don't close the client here for streaming responses - # The client will be closed by the caller after streaming is complete + + # This prevents resource leaks since the caller doesn't handle closing + try: + await openai_async_client.close() + logger.debug( + "Successfully closed OpenAI client for streaming response" + ) + except Exception as client_close_error: + logger.warning( + f"Failed to close OpenAI client in streaming finally block: {client_close_error}" + ) return inner()