diff --git a/lightrag/api/lightrag_server.py b/lightrag/api/lightrag_server.py index b25e085f..1d28c9ca 100644 --- a/lightrag/api/lightrag_server.py +++ b/lightrag/api/lightrag_server.py @@ -62,7 +62,7 @@ VECTOR_STORAGE = "NanoVectorDBStorage" # read config.ini config = configparser.ConfigParser() -config.read("config.ini") +config.read("config.ini", "utf-8") # Redis config redis_uri = config.get("redis", "uri", fallback=None) if redis_uri: @@ -734,7 +734,8 @@ def create_app(args): azure_openai_embed, ) if args.llm_binding_host == "openai-ollama" or args.embedding_binding == "ollama": - from lightrag.llm.openai import openai_complete_if_cache, openai_embed + from lightrag.llm.openai import openai_complete_if_cache + from lightrag.llm.ollama import ollama_embed async def openai_alike_model_complete( prompt, diff --git a/lightrag/kg/neo4j_impl.py b/lightrag/kg/neo4j_impl.py index 6cc88e7c..f6912aff 100644 --- a/lightrag/kg/neo4j_impl.py +++ b/lightrag/kg/neo4j_impl.py @@ -499,11 +499,11 @@ class Neo4JStorage(BaseGraphStorage): # 方法2:兼容旧版本的查询方式 query = """ - MATCH (n) - WITH DISTINCT labels(n) AS node_labels - UNWIND node_labels AS label - RETURN DISTINCT label - ORDER BY label + MATCH (n) + WITH DISTINCT labels(n) AS node_labels + UNWIND node_labels AS label + RETURN DISTINCT label + ORDER BY label """ result = await session.run(query)