Merge branch 'HKUDS:main' into main

This commit is contained in:
Saifeddine ALOUI
2025-01-24 13:38:20 +01:00
committed by GitHub
3 changed files with 5 additions and 8 deletions

View File

@@ -551,10 +551,10 @@ def get_api_key_dependency(api_key: Optional[str]):
def create_app(args):
# Verify that bindings arer correctly setup
if args.llm_binding not in ["lollms", "ollama", "openai"]:
if args.llm_binding not in ["lollms", "ollama", "openai", "azure_openai"]:
raise Exception("llm binding not supported")
if args.embedding_binding not in ["lollms", "ollama", "openai"]:
if args.embedding_binding not in ["lollms", "ollama", "openai", "azure_openai"]:
raise Exception("embedding binding not supported")
# Add SSL validation

View File

@@ -469,9 +469,8 @@ class LightRAG:
error_msg = f"Failed to process document {doc_id}: {str(e)}\n{traceback.format_exc()}"
logger.error(error_msg)
continue
finally:
# Ensure all indexes are updated after each document
else:
# Only update index when processing succeeds
await self._insert_done()
def insert_custom_chunks(self, full_text: str, text_chunks: list[str]):

View File

@@ -479,9 +479,7 @@ async def handle_cache(hashing_kv, args_hash, prompt, mode="default"):
quantized = min_val = max_val = None
if is_embedding_cache_enabled:
# Use embedding cache
embedding_model_func = hashing_kv.global_config[
"embedding_func"
].func # ["func"]
embedding_model_func = hashing_kv.global_config["embedding_func"]["func"]
llm_model_func = hashing_kv.global_config.get("llm_model_func")
current_embedding = await embedding_model_func([prompt])