fix: Fix potential mutable default parameter issue
This commit is contained in:
@@ -752,10 +752,12 @@ def create_app(args):
|
|||||||
async def openai_alike_model_complete(
|
async def openai_alike_model_complete(
|
||||||
prompt,
|
prompt,
|
||||||
system_prompt=None,
|
system_prompt=None,
|
||||||
history_messages=[],
|
history_messages=None,
|
||||||
keyword_extraction=False,
|
keyword_extraction=False,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> str:
|
) -> str:
|
||||||
|
if history_messages is None:
|
||||||
|
history_messages = []
|
||||||
return await openai_complete_if_cache(
|
return await openai_complete_if_cache(
|
||||||
args.llm_model,
|
args.llm_model,
|
||||||
prompt,
|
prompt,
|
||||||
@@ -769,10 +771,12 @@ def create_app(args):
|
|||||||
async def azure_openai_model_complete(
|
async def azure_openai_model_complete(
|
||||||
prompt,
|
prompt,
|
||||||
system_prompt=None,
|
system_prompt=None,
|
||||||
history_messages=[],
|
history_messages=None,
|
||||||
keyword_extraction=False,
|
keyword_extraction=False,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> str:
|
) -> str:
|
||||||
|
if history_messages is None:
|
||||||
|
history_messages = []
|
||||||
return await azure_openai_complete_if_cache(
|
return await azure_openai_complete_if_cache(
|
||||||
args.llm_model,
|
args.llm_model,
|
||||||
prompt,
|
prompt,
|
||||||
|
@@ -89,11 +89,13 @@ async def openai_complete_if_cache(
|
|||||||
model,
|
model,
|
||||||
prompt,
|
prompt,
|
||||||
system_prompt=None,
|
system_prompt=None,
|
||||||
history_messages=[],
|
history_messages=None,
|
||||||
base_url=None,
|
base_url=None,
|
||||||
api_key=None,
|
api_key=None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> str:
|
) -> str:
|
||||||
|
if history_messages is None:
|
||||||
|
history_messages = []
|
||||||
if api_key:
|
if api_key:
|
||||||
os.environ["OPENAI_API_KEY"] = api_key
|
os.environ["OPENAI_API_KEY"] = api_key
|
||||||
|
|
||||||
@@ -146,8 +148,10 @@ async def openai_complete_if_cache(
|
|||||||
|
|
||||||
|
|
||||||
async def openai_complete(
|
async def openai_complete(
|
||||||
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
|
prompt, system_prompt=None, history_messages=None, keyword_extraction=False, **kwargs
|
||||||
) -> Union[str, AsyncIterator[str]]:
|
) -> Union[str, AsyncIterator[str]]:
|
||||||
|
if history_messages is None:
|
||||||
|
history_messages = []
|
||||||
keyword_extraction = kwargs.pop("keyword_extraction", None)
|
keyword_extraction = kwargs.pop("keyword_extraction", None)
|
||||||
if keyword_extraction:
|
if keyword_extraction:
|
||||||
kwargs["response_format"] = "json"
|
kwargs["response_format"] = "json"
|
||||||
@@ -162,8 +166,10 @@ async def openai_complete(
|
|||||||
|
|
||||||
|
|
||||||
async def gpt_4o_complete(
|
async def gpt_4o_complete(
|
||||||
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
|
prompt, system_prompt=None, history_messages=None, keyword_extraction=False, **kwargs
|
||||||
) -> str:
|
) -> str:
|
||||||
|
if history_messages is None:
|
||||||
|
history_messages = []
|
||||||
keyword_extraction = kwargs.pop("keyword_extraction", None)
|
keyword_extraction = kwargs.pop("keyword_extraction", None)
|
||||||
if keyword_extraction:
|
if keyword_extraction:
|
||||||
kwargs["response_format"] = GPTKeywordExtractionFormat
|
kwargs["response_format"] = GPTKeywordExtractionFormat
|
||||||
@@ -177,8 +183,10 @@ async def gpt_4o_complete(
|
|||||||
|
|
||||||
|
|
||||||
async def gpt_4o_mini_complete(
|
async def gpt_4o_mini_complete(
|
||||||
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
|
prompt, system_prompt=None, history_messages=None, keyword_extraction=False, **kwargs
|
||||||
) -> str:
|
) -> str:
|
||||||
|
if history_messages is None:
|
||||||
|
history_messages = []
|
||||||
keyword_extraction = kwargs.pop("keyword_extraction", None)
|
keyword_extraction = kwargs.pop("keyword_extraction", None)
|
||||||
if keyword_extraction:
|
if keyword_extraction:
|
||||||
kwargs["response_format"] = GPTKeywordExtractionFormat
|
kwargs["response_format"] = GPTKeywordExtractionFormat
|
||||||
@@ -192,8 +200,10 @@ async def gpt_4o_mini_complete(
|
|||||||
|
|
||||||
|
|
||||||
async def nvidia_openai_complete(
|
async def nvidia_openai_complete(
|
||||||
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
|
prompt, system_prompt=None, history_messages=None, keyword_extraction=False, **kwargs
|
||||||
) -> str:
|
) -> str:
|
||||||
|
if history_messages is None:
|
||||||
|
history_messages = []
|
||||||
keyword_extraction = kwargs.pop("keyword_extraction", None)
|
keyword_extraction = kwargs.pop("keyword_extraction", None)
|
||||||
result = await openai_complete_if_cache(
|
result = await openai_complete_if_cache(
|
||||||
"nvidia/llama-3.1-nemotron-70b-instruct", # context length 128k
|
"nvidia/llama-3.1-nemotron-70b-instruct", # context length 128k
|
||||||
|
Reference in New Issue
Block a user