From ae1c9f8d108d2856be77f6c1ad8a7ae354567bf7 Mon Sep 17 00:00:00 2001 From: yangdx Date: Thu, 8 May 2025 03:38:47 +0800 Subject: [PATCH] Add user_prompt the QueryParam --- lightrag/base.py | 5 +++++ lightrag/operate.py | 8 ++++++++ lightrag/prompt.py | 11 +++++++++-- 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/lightrag/base.py b/lightrag/base.py index b1f63fa5..e8994bba 100644 --- a/lightrag/base.py +++ b/lightrag/base.py @@ -93,6 +93,11 @@ class QueryParam: This allows using different models for different query modes. """ + user_prompt: str | None= None + """User-provided prompt for the query. + If proivded, this will be use instead of the default vaulue from prompt template. + """ + @dataclass class StorageNameSpace(ABC): diff --git a/lightrag/operate.py b/lightrag/operate.py index 0ca3747c..9ffd6111 100644 --- a/lightrag/operate.py +++ b/lightrag/operate.py @@ -925,11 +925,14 @@ async def kg_query( query_param.conversation_history, query_param.history_turns ) + # Build system prompt + user_prompt = query_param.user_prompt if query_param.user_prompt else PROMPTS["DEFAULT_USER_PROMPT"] sys_prompt_temp = system_prompt if system_prompt else PROMPTS["rag_response"] sys_prompt = sys_prompt_temp.format( context_data=context, response_type=query_param.response_type, history=history_context, + user_prompt=user_prompt, ) if query_param.only_need_prompt: @@ -1907,11 +1910,14 @@ async def naive_query( query_param.conversation_history, query_param.history_turns ) + # Build system prompt + user_prompt = query_param.user_prompt if query_param.user_prompt else PROMPTS["DEFAULT_USER_PROMPT"] sys_prompt_temp = system_prompt if system_prompt else PROMPTS["naive_rag_response"] sys_prompt = sys_prompt_temp.format( content_data=text_units_str, response_type=query_param.response_type, history=history_context, + user_prompt=user_prompt, ) if query_param.only_need_prompt: @@ -1957,6 +1963,7 @@ async def naive_query( return response +# TODO: Deprecated, use user_prompt in QueryParam instead async def kg_query_with_keywords( query: str, knowledge_graph_inst: BaseGraphStorage, @@ -2079,6 +2086,7 @@ async def kg_query_with_keywords( return response +# TODO: Deprecated, use user_prompt in QueryParam instead async def query_with_keywords( query: str, prompt: str, diff --git a/lightrag/prompt.py b/lightrag/prompt.py index 5616f66a..201d2c15 100644 --- a/lightrag/prompt.py +++ b/lightrag/prompt.py @@ -12,6 +12,8 @@ PROMPTS["DEFAULT_COMPLETION_DELIMITER"] = "<|COMPLETE|>" PROMPTS["DEFAULT_ENTITY_TYPES"] = ["organization", "person", "geo", "event", "category"] +PROMPTS["DEFAULT_USER_PROMPT"] = "n/a" + PROMPTS["entity_extraction"] = """---Goal--- Given a text document that is potentially relevant to this activity and a list of entity types, identify all entities of those types from the text and all relationships among the identified entities. Use {language} as output language. @@ -224,7 +226,10 @@ When handling relationships with timestamps: - Ensure the response maintains continuity with the conversation history. - List up to 5 most important reference sources at the end under "References" section. Clearly indicating whether each source is from Knowledge Graph (KG) or Vector Data (DC), and include the file path if available, in the following format: [KG/DC] file_path - If you don't know the answer, just say so. -- Do not make anything up. Do not include information not provided by the Knowledge Base.""" +- Do not make anything up. Do not include information not provided by the Knowledge Base. +- Addtional user prompt: {user_prompt} + +Response:""" PROMPTS["keywords_extraction"] = """---Role--- @@ -322,8 +327,10 @@ When handling content with timestamps: - Ensure the response maintains continuity with the conversation history. - List up to 5 most important reference sources at the end under "References" section. Clearly indicating each source from Document Chunks(DC), and include the file path if available, in the following format: [DC] file_path - If you don't know the answer, just say so. -- Do not include information not provided by the Document Chunks.""" +- Do not include information not provided by the Document Chunks. +- Addtional user prompt: {user_prompt} +Response:""" PROMPTS[ "similarity_check"