Fix: unexpected keyword argument error.

This commit is contained in:
Larfii
2024-12-05 14:11:43 +08:00
parent c352eb6d84
commit c31a2654f9
8 changed files with 8 additions and 8 deletions

View File

@@ -33,7 +33,7 @@ if not os.path.exists(WORKING_DIR):
async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str:
return await openai_complete_if_cache(
LLM_MODEL,

View File

@@ -50,7 +50,7 @@ if not os.path.exists(WORKING_DIR):
async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str:
return await openai_complete_if_cache(
LLM_MODEL,

View File

@@ -30,7 +30,7 @@ os.mkdir(WORKING_DIR)
async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str:
client = AzureOpenAI(
api_key=AZURE_OPENAI_API_KEY,

View File

@@ -12,7 +12,7 @@ if not os.path.exists(WORKING_DIR):
async def lmdeploy_model_complete(
prompt=None, system_prompt=None, history_messages=[], **kwargs
prompt=None, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str:
model_name = kwargs["hashing_kv"].global_config["llm_model_name"]
return await lmdeploy_model_if_cache(

View File

@@ -12,7 +12,7 @@ if not os.path.exists(WORKING_DIR):
async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str:
return await openai_complete_if_cache(
"solar-mini",

View File

@@ -27,7 +27,7 @@ if not os.path.exists(WORKING_DIR):
async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str:
return await openai_complete_if_cache(
CHATMODEL,

View File

@@ -12,7 +12,7 @@ if not os.path.exists(WORKING_DIR):
async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str:
return await openai_complete_if_cache(
"Qwen/Qwen2.5-7B-Instruct",