Merge pull request #658 from MdNazishArmanShorthillsAI/main

Query with your custom prompts
This commit is contained in:
zrguo
2025-01-27 15:12:14 +08:00
committed by GitHub
3 changed files with 39 additions and 4 deletions

View File

@@ -892,11 +892,13 @@ class LightRAG:
if update_storage:
await self._insert_done()
def query(self, query: str, param: QueryParam = QueryParam()):
def query(self, query: str, prompt: str = "", param: QueryParam = QueryParam()):
loop = always_get_an_event_loop()
return loop.run_until_complete(self.aquery(query, param))
return loop.run_until_complete(self.aquery(query, prompt, param))
async def aquery(self, query: str, param: QueryParam = QueryParam()):
async def aquery(
self, query: str, prompt: str = "", param: QueryParam = QueryParam()
):
if param.mode in ["local", "global", "hybrid"]:
response = await kg_query(
query,
@@ -914,6 +916,7 @@ class LightRAG:
global_config=asdict(self),
embedding_func=None,
),
prompt=prompt,
)
elif param.mode == "naive":
response = await naive_query(

View File

@@ -574,6 +574,7 @@ async def kg_query(
query_param: QueryParam,
global_config: dict,
hashing_kv: BaseKVStorage = None,
prompt: str = "",
) -> str:
# Handle cache
use_model_func = global_config["llm_model_func"]
@@ -637,7 +638,7 @@ async def kg_query(
query_param.conversation_history, query_param.history_turns
)
sys_prompt_temp = PROMPTS["rag_response"]
sys_prompt_temp = prompt if prompt else PROMPTS["rag_response"]
sys_prompt = sys_prompt_temp.format(
context_data=context,
response_type=query_param.response_type,