From bed5a97ae29eb90d61adbab350f98ee0e490f721 Mon Sep 17 00:00:00 2001 From: yangdx Date: Sun, 2 Feb 2025 03:09:06 +0800 Subject: [PATCH] Fix prompt respond cache fail when is_embedding_cache_enabled is true --- lightrag/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lightrag/utils.py b/lightrag/utils.py index dfd68c72..6faa0d9e 100644 --- a/lightrag/utils.py +++ b/lightrag/utils.py @@ -523,7 +523,7 @@ async def handle_cache( mode=mode, use_llm_check=use_llm_check, llm_func=llm_model_func if use_llm_check else None, - original_prompt=prompt if use_llm_check else None, + original_prompt=prompt, cache_type=cache_type, ) if best_cached_response is not None: