Added system prompt support in all modes
This commit is contained in:
13
README.md
13
README.md
@@ -171,7 +171,7 @@ rag = LightRAG(working_dir=WORKING_DIR)
|
|||||||
|
|
||||||
# Create query parameters
|
# Create query parameters
|
||||||
query_param = QueryParam(
|
query_param = QueryParam(
|
||||||
mode="hybrid", # or other mode: "local", "global", "hybrid"
|
mode="hybrid", # or other mode: "local", "global", "hybrid", "mix" and "naive"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Example 1: Using the default system prompt
|
# Example 1: Using the default system prompt
|
||||||
@@ -184,11 +184,20 @@ print(response_default)
|
|||||||
# Example 2: Using a custom prompt
|
# Example 2: Using a custom prompt
|
||||||
custom_prompt = """
|
custom_prompt = """
|
||||||
You are an expert assistant in environmental science. Provide detailed and structured answers with examples.
|
You are an expert assistant in environmental science. Provide detailed and structured answers with examples.
|
||||||
|
---Conversation History---
|
||||||
|
{history}
|
||||||
|
|
||||||
|
---Knowledge Base---
|
||||||
|
{context_data}
|
||||||
|
|
||||||
|
---Response Rules---
|
||||||
|
|
||||||
|
- Target format and length: {response_type}
|
||||||
"""
|
"""
|
||||||
response_custom = rag.query(
|
response_custom = rag.query(
|
||||||
"What are the primary benefits of renewable energy?",
|
"What are the primary benefits of renewable energy?",
|
||||||
param=query_param,
|
param=query_param,
|
||||||
prompt=custom_prompt # Pass the custom prompt
|
system_prompt=custom_prompt # Pass the custom prompt
|
||||||
)
|
)
|
||||||
print(response_custom)
|
print(response_custom)
|
||||||
```
|
```
|
||||||
|
@@ -984,7 +984,10 @@ class LightRAG:
|
|||||||
await self._insert_done()
|
await self._insert_done()
|
||||||
|
|
||||||
def query(
|
def query(
|
||||||
self, query: str, param: QueryParam = QueryParam(), prompt: str | None = None
|
self,
|
||||||
|
query: str,
|
||||||
|
param: QueryParam = QueryParam(),
|
||||||
|
system_prompt: str | None = None,
|
||||||
) -> str | Iterator[str]:
|
) -> str | Iterator[str]:
|
||||||
"""
|
"""
|
||||||
Perform a sync query.
|
Perform a sync query.
|
||||||
@@ -999,13 +1002,13 @@ class LightRAG:
|
|||||||
"""
|
"""
|
||||||
loop = always_get_an_event_loop()
|
loop = always_get_an_event_loop()
|
||||||
|
|
||||||
return loop.run_until_complete(self.aquery(query, param, prompt)) # type: ignore
|
return loop.run_until_complete(self.aquery(query, param, system_prompt)) # type: ignore
|
||||||
|
|
||||||
async def aquery(
|
async def aquery(
|
||||||
self,
|
self,
|
||||||
query: str,
|
query: str,
|
||||||
param: QueryParam = QueryParam(),
|
param: QueryParam = QueryParam(),
|
||||||
prompt: str | None = None,
|
system_prompt: str | None = None,
|
||||||
) -> str | AsyncIterator[str]:
|
) -> str | AsyncIterator[str]:
|
||||||
"""
|
"""
|
||||||
Perform a async query.
|
Perform a async query.
|
||||||
@@ -1037,7 +1040,7 @@ class LightRAG:
|
|||||||
global_config=asdict(self),
|
global_config=asdict(self),
|
||||||
embedding_func=self.embedding_func,
|
embedding_func=self.embedding_func,
|
||||||
),
|
),
|
||||||
prompt=prompt,
|
system_prompt=system_prompt,
|
||||||
)
|
)
|
||||||
elif param.mode == "naive":
|
elif param.mode == "naive":
|
||||||
response = await naive_query(
|
response = await naive_query(
|
||||||
@@ -1056,6 +1059,7 @@ class LightRAG:
|
|||||||
global_config=asdict(self),
|
global_config=asdict(self),
|
||||||
embedding_func=self.embedding_func,
|
embedding_func=self.embedding_func,
|
||||||
),
|
),
|
||||||
|
system_prompt=system_prompt,
|
||||||
)
|
)
|
||||||
elif param.mode == "mix":
|
elif param.mode == "mix":
|
||||||
response = await mix_kg_vector_query(
|
response = await mix_kg_vector_query(
|
||||||
@@ -1077,6 +1081,7 @@ class LightRAG:
|
|||||||
global_config=asdict(self),
|
global_config=asdict(self),
|
||||||
embedding_func=self.embedding_func,
|
embedding_func=self.embedding_func,
|
||||||
),
|
),
|
||||||
|
system_prompt=system_prompt,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Unknown mode {param.mode}")
|
raise ValueError(f"Unknown mode {param.mode}")
|
||||||
|
@@ -613,7 +613,7 @@ async def kg_query(
|
|||||||
query_param: QueryParam,
|
query_param: QueryParam,
|
||||||
global_config: dict[str, str],
|
global_config: dict[str, str],
|
||||||
hashing_kv: BaseKVStorage | None = None,
|
hashing_kv: BaseKVStorage | None = None,
|
||||||
prompt: str | None = None,
|
system_prompt: str | None = None,
|
||||||
) -> str:
|
) -> str:
|
||||||
# Handle cache
|
# Handle cache
|
||||||
use_model_func = global_config["llm_model_func"]
|
use_model_func = global_config["llm_model_func"]
|
||||||
@@ -677,7 +677,7 @@ async def kg_query(
|
|||||||
query_param.conversation_history, query_param.history_turns
|
query_param.conversation_history, query_param.history_turns
|
||||||
)
|
)
|
||||||
|
|
||||||
sys_prompt_temp = prompt if prompt else PROMPTS["rag_response"]
|
sys_prompt_temp = system_prompt if system_prompt else PROMPTS["rag_response"]
|
||||||
sys_prompt = sys_prompt_temp.format(
|
sys_prompt = sys_prompt_temp.format(
|
||||||
context_data=context,
|
context_data=context,
|
||||||
response_type=query_param.response_type,
|
response_type=query_param.response_type,
|
||||||
@@ -828,6 +828,7 @@ async def mix_kg_vector_query(
|
|||||||
query_param: QueryParam,
|
query_param: QueryParam,
|
||||||
global_config: dict[str, str],
|
global_config: dict[str, str],
|
||||||
hashing_kv: BaseKVStorage | None = None,
|
hashing_kv: BaseKVStorage | None = None,
|
||||||
|
system_prompt: str | None = None,
|
||||||
) -> str | AsyncIterator[str]:
|
) -> str | AsyncIterator[str]:
|
||||||
"""
|
"""
|
||||||
Hybrid retrieval implementation combining knowledge graph and vector search.
|
Hybrid retrieval implementation combining knowledge graph and vector search.
|
||||||
@@ -962,15 +963,19 @@ async def mix_kg_vector_query(
|
|||||||
return {"kg_context": kg_context, "vector_context": vector_context}
|
return {"kg_context": kg_context, "vector_context": vector_context}
|
||||||
|
|
||||||
# 5. Construct hybrid prompt
|
# 5. Construct hybrid prompt
|
||||||
sys_prompt = PROMPTS["mix_rag_response"].format(
|
sys_prompt = (
|
||||||
kg_context=kg_context
|
system_prompt
|
||||||
if kg_context
|
if system_prompt
|
||||||
else "No relevant knowledge graph information found",
|
else PROMPTS["mix_rag_response"].format(
|
||||||
vector_context=vector_context
|
kg_context=kg_context
|
||||||
if vector_context
|
if kg_context
|
||||||
else "No relevant text information found",
|
else "No relevant knowledge graph information found",
|
||||||
response_type=query_param.response_type,
|
vector_context=vector_context
|
||||||
history=history_context,
|
if vector_context
|
||||||
|
else "No relevant text information found",
|
||||||
|
response_type=query_param.response_type,
|
||||||
|
history=history_context,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if query_param.only_need_prompt:
|
if query_param.only_need_prompt:
|
||||||
@@ -1599,6 +1604,7 @@ async def naive_query(
|
|||||||
query_param: QueryParam,
|
query_param: QueryParam,
|
||||||
global_config: dict[str, str],
|
global_config: dict[str, str],
|
||||||
hashing_kv: BaseKVStorage | None = None,
|
hashing_kv: BaseKVStorage | None = None,
|
||||||
|
system_prompt: str | None = None,
|
||||||
) -> str | AsyncIterator[str]:
|
) -> str | AsyncIterator[str]:
|
||||||
# Handle cache
|
# Handle cache
|
||||||
use_model_func = global_config["llm_model_func"]
|
use_model_func = global_config["llm_model_func"]
|
||||||
@@ -1651,7 +1657,7 @@ async def naive_query(
|
|||||||
query_param.conversation_history, query_param.history_turns
|
query_param.conversation_history, query_param.history_turns
|
||||||
)
|
)
|
||||||
|
|
||||||
sys_prompt_temp = PROMPTS["naive_rag_response"]
|
sys_prompt_temp = system_prompt if system_prompt else PROMPTS["naive_rag_response"]
|
||||||
sys_prompt = sys_prompt_temp.format(
|
sys_prompt = sys_prompt_temp.format(
|
||||||
content_data=section,
|
content_data=section,
|
||||||
response_type=query_param.response_type,
|
response_type=query_param.response_type,
|
||||||
|
Reference in New Issue
Block a user