Merge pull request #658 from MdNazishArmanShorthillsAI/main
Query with your custom prompts
This commit is contained in:
@@ -892,11 +892,13 @@ class LightRAG:
|
||||
if update_storage:
|
||||
await self._insert_done()
|
||||
|
||||
def query(self, query: str, param: QueryParam = QueryParam()):
|
||||
def query(self, query: str, prompt: str = "", param: QueryParam = QueryParam()):
|
||||
loop = always_get_an_event_loop()
|
||||
return loop.run_until_complete(self.aquery(query, param))
|
||||
return loop.run_until_complete(self.aquery(query, prompt, param))
|
||||
|
||||
async def aquery(self, query: str, param: QueryParam = QueryParam()):
|
||||
async def aquery(
|
||||
self, query: str, prompt: str = "", param: QueryParam = QueryParam()
|
||||
):
|
||||
if param.mode in ["local", "global", "hybrid"]:
|
||||
response = await kg_query(
|
||||
query,
|
||||
@@ -914,6 +916,7 @@ class LightRAG:
|
||||
global_config=asdict(self),
|
||||
embedding_func=None,
|
||||
),
|
||||
prompt=prompt,
|
||||
)
|
||||
elif param.mode == "naive":
|
||||
response = await naive_query(
|
||||
|
@@ -574,6 +574,7 @@ async def kg_query(
|
||||
query_param: QueryParam,
|
||||
global_config: dict,
|
||||
hashing_kv: BaseKVStorage = None,
|
||||
prompt: str = "",
|
||||
) -> str:
|
||||
# Handle cache
|
||||
use_model_func = global_config["llm_model_func"]
|
||||
@@ -637,7 +638,7 @@ async def kg_query(
|
||||
query_param.conversation_history, query_param.history_turns
|
||||
)
|
||||
|
||||
sys_prompt_temp = PROMPTS["rag_response"]
|
||||
sys_prompt_temp = prompt if prompt else PROMPTS["rag_response"]
|
||||
sys_prompt = sys_prompt_temp.format(
|
||||
context_data=context,
|
||||
response_type=query_param.response_type,
|
||||
|
Reference in New Issue
Block a user