Query with your custom prompts

This commit is contained in:
MdNazishArmanShorthillsAI
2025-01-27 10:32:22 +05:30
parent 2cd4fc0258
commit f0b2024667
3 changed files with 39 additions and 4 deletions

View File

@@ -574,6 +574,7 @@ async def kg_query(
query_param: QueryParam,
global_config: dict,
hashing_kv: BaseKVStorage = None,
prompt: str = "",
) -> str:
# Handle cache
use_model_func = global_config["llm_model_func"]
@@ -637,7 +638,7 @@ async def kg_query(
query_param.conversation_history, query_param.history_turns
)
sys_prompt_temp = PROMPTS["rag_response"]
sys_prompt_temp = prompt if prompt else PROMPTS["rag_response"]
sys_prompt = sys_prompt_temp.format(
context_data=context,
response_type=query_param.response_type,