From 290a4d5ec02111b66b923f8d3f1bc62168ec8f7e Mon Sep 17 00:00:00 2001 From: yangdx Date: Thu, 6 Feb 2025 16:24:02 +0800 Subject: [PATCH] Fix linting --- lightrag/llm/openai.py | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/lightrag/llm/openai.py b/lightrag/llm/openai.py index e0ed5fa8..4ba06d2a 100644 --- a/lightrag/llm/openai.py +++ b/lightrag/llm/openai.py @@ -148,7 +148,11 @@ async def openai_complete_if_cache( async def openai_complete( - prompt, system_prompt=None, history_messages=None, keyword_extraction=False, **kwargs + prompt, + system_prompt=None, + history_messages=None, + keyword_extraction=False, + **kwargs, ) -> Union[str, AsyncIterator[str]]: if history_messages is None: history_messages = [] @@ -166,7 +170,11 @@ async def openai_complete( async def gpt_4o_complete( - prompt, system_prompt=None, history_messages=None, keyword_extraction=False, **kwargs + prompt, + system_prompt=None, + history_messages=None, + keyword_extraction=False, + **kwargs, ) -> str: if history_messages is None: history_messages = [] @@ -183,7 +191,11 @@ async def gpt_4o_complete( async def gpt_4o_mini_complete( - prompt, system_prompt=None, history_messages=None, keyword_extraction=False, **kwargs + prompt, + system_prompt=None, + history_messages=None, + keyword_extraction=False, + **kwargs, ) -> str: if history_messages is None: history_messages = [] @@ -200,7 +212,11 @@ async def gpt_4o_mini_complete( async def nvidia_openai_complete( - prompt, system_prompt=None, history_messages=None, keyword_extraction=False, **kwargs + prompt, + system_prompt=None, + history_messages=None, + keyword_extraction=False, + **kwargs, ) -> str: if history_messages is None: history_messages = []