This commit is contained in:
zrguo
2025-02-11 11:42:46 +08:00
parent 87e6bc5c42
commit 0c3b754108
2 changed files with 3 additions and 2 deletions

View File

@@ -1,7 +1,7 @@
import os import os
from lightrag import LightRAG, QueryParam from lightrag import LightRAG, QueryParam
from lightrag.llm.openai import gpt_4o_mini_complete from lightrag.llm.openai import gpt_4o_mini_complete, openai_embed
WORKING_DIR = "./dickens" WORKING_DIR = "./dickens"
@@ -10,6 +10,7 @@ if not os.path.exists(WORKING_DIR):
rag = LightRAG( rag = LightRAG(
working_dir=WORKING_DIR, working_dir=WORKING_DIR,
embedding_func=openai_embed,
llm_model_func=gpt_4o_mini_complete, llm_model_func=gpt_4o_mini_complete,
# llm_model_func=gpt_4o_complete # llm_model_func=gpt_4o_complete
) )

View File

@@ -1504,7 +1504,7 @@ async def naive_query(
use_model_func = global_config["llm_model_func"] use_model_func = global_config["llm_model_func"]
args_hash = compute_args_hash(query_param.mode, query, cache_type="query") args_hash = compute_args_hash(query_param.mode, query, cache_type="query")
cached_response, quantized, min_val, max_val = await handle_cache( cached_response, quantized, min_val, max_val = await handle_cache(
hashing_kv, args_hash, query, "default", cache_type="query" hashing_kv, args_hash, query, query_param.mode, cache_type="query"
) )
if cached_response is not None: if cached_response is not None:
return cached_response return cached_response