Fix: unexpected keyword argument error.

This commit is contained in:
Larfii
2024-12-05 14:11:43 +08:00
parent c352eb6d84
commit c31a2654f9
8 changed files with 8 additions and 8 deletions

View File

@@ -114,7 +114,7 @@ print(rag.query("What are the top themes in this story?", param=QueryParam(mode=
* LightRAG also supports Open AI-like chat/embeddings APIs: * LightRAG also supports Open AI-like chat/embeddings APIs:
```python ```python
async def llm_model_func( async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str: ) -> str:
return await openai_complete_if_cache( return await openai_complete_if_cache(
"solar-mini", "solar-mini",

View File

@@ -33,7 +33,7 @@ if not os.path.exists(WORKING_DIR):
async def llm_model_func( async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str: ) -> str:
return await openai_complete_if_cache( return await openai_complete_if_cache(
LLM_MODEL, LLM_MODEL,

View File

@@ -50,7 +50,7 @@ if not os.path.exists(WORKING_DIR):
async def llm_model_func( async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str: ) -> str:
return await openai_complete_if_cache( return await openai_complete_if_cache(
LLM_MODEL, LLM_MODEL,

View File

@@ -30,7 +30,7 @@ os.mkdir(WORKING_DIR)
async def llm_model_func( async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str: ) -> str:
client = AzureOpenAI( client = AzureOpenAI(
api_key=AZURE_OPENAI_API_KEY, api_key=AZURE_OPENAI_API_KEY,

View File

@@ -12,7 +12,7 @@ if not os.path.exists(WORKING_DIR):
async def lmdeploy_model_complete( async def lmdeploy_model_complete(
prompt=None, system_prompt=None, history_messages=[], **kwargs prompt=None, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str: ) -> str:
model_name = kwargs["hashing_kv"].global_config["llm_model_name"] model_name = kwargs["hashing_kv"].global_config["llm_model_name"]
return await lmdeploy_model_if_cache( return await lmdeploy_model_if_cache(

View File

@@ -12,7 +12,7 @@ if not os.path.exists(WORKING_DIR):
async def llm_model_func( async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str: ) -> str:
return await openai_complete_if_cache( return await openai_complete_if_cache(
"solar-mini", "solar-mini",

View File

@@ -27,7 +27,7 @@ if not os.path.exists(WORKING_DIR):
async def llm_model_func( async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str: ) -> str:
return await openai_complete_if_cache( return await openai_complete_if_cache(
CHATMODEL, CHATMODEL,

View File

@@ -12,7 +12,7 @@ if not os.path.exists(WORKING_DIR):
async def llm_model_func( async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str: ) -> str:
return await openai_complete_if_cache( return await openai_complete_if_cache(
"Qwen/Qwen2.5-7B-Instruct", "Qwen/Qwen2.5-7B-Instruct",