Persistent LLM cache on error

This commit is contained in:
yangdx
2025-05-03 23:00:09 +08:00
parent 2063f6c6c8
commit b9b86df786

View File

@@ -1010,6 +1010,10 @@ class LightRAG:
if not task.done():
task.cancel()
# Persistent llm cache
if self.llm_response_cache:
await self.llm_response_cache.index_done_callback
# Update document status to failed
await self.doc_status.upsert(
{
@@ -1028,7 +1032,7 @@ class LightRAG:
}
)
# Semphore was released here
# Semphore released, concurrency controlled by graph_db_lock in merge_nodes_and_edges instead
if file_extraction_stage_ok:
try:
@@ -1082,6 +1086,10 @@ class LightRAG:
pipeline_status["latest_message"] = error_msg
pipeline_status["history_messages"].append(error_msg)
# Persistent llm cache
if self.llm_response_cache:
await self.llm_response_cache.index_done_callback
# Update document status to failed
await self.doc_status.upsert(
{