Fix: unexpected keyword argument error.
This commit is contained in:
@@ -33,7 +33,7 @@ if not os.path.exists(WORKING_DIR):
|
||||
|
||||
|
||||
async def llm_model_func(
|
||||
prompt, system_prompt=None, history_messages=[], **kwargs
|
||||
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
|
||||
) -> str:
|
||||
return await openai_complete_if_cache(
|
||||
LLM_MODEL,
|
||||
|
@@ -50,7 +50,7 @@ if not os.path.exists(WORKING_DIR):
|
||||
|
||||
|
||||
async def llm_model_func(
|
||||
prompt, system_prompt=None, history_messages=[], **kwargs
|
||||
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
|
||||
) -> str:
|
||||
return await openai_complete_if_cache(
|
||||
LLM_MODEL,
|
||||
|
@@ -30,7 +30,7 @@ os.mkdir(WORKING_DIR)
|
||||
|
||||
|
||||
async def llm_model_func(
|
||||
prompt, system_prompt=None, history_messages=[], **kwargs
|
||||
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
|
||||
) -> str:
|
||||
client = AzureOpenAI(
|
||||
api_key=AZURE_OPENAI_API_KEY,
|
||||
|
@@ -12,7 +12,7 @@ if not os.path.exists(WORKING_DIR):
|
||||
|
||||
|
||||
async def lmdeploy_model_complete(
|
||||
prompt=None, system_prompt=None, history_messages=[], **kwargs
|
||||
prompt=None, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
|
||||
) -> str:
|
||||
model_name = kwargs["hashing_kv"].global_config["llm_model_name"]
|
||||
return await lmdeploy_model_if_cache(
|
||||
|
@@ -12,7 +12,7 @@ if not os.path.exists(WORKING_DIR):
|
||||
|
||||
|
||||
async def llm_model_func(
|
||||
prompt, system_prompt=None, history_messages=[], **kwargs
|
||||
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
|
||||
) -> str:
|
||||
return await openai_complete_if_cache(
|
||||
"solar-mini",
|
||||
|
@@ -27,7 +27,7 @@ if not os.path.exists(WORKING_DIR):
|
||||
|
||||
|
||||
async def llm_model_func(
|
||||
prompt, system_prompt=None, history_messages=[], **kwargs
|
||||
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
|
||||
) -> str:
|
||||
return await openai_complete_if_cache(
|
||||
CHATMODEL,
|
||||
|
@@ -12,7 +12,7 @@ if not os.path.exists(WORKING_DIR):
|
||||
|
||||
|
||||
async def llm_model_func(
|
||||
prompt, system_prompt=None, history_messages=[], **kwargs
|
||||
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
|
||||
) -> str:
|
||||
return await openai_complete_if_cache(
|
||||
"Qwen/Qwen2.5-7B-Instruct",
|
||||
|
Reference in New Issue
Block a user