From 1984da0fd6ee17d3f187a13e423ce13aaac9945f Mon Sep 17 00:00:00 2001 From: bingo Date: Mon, 13 Jan 2025 07:27:30 +0000 Subject: [PATCH] add logger.debug for mongo_impl get_by_mode_and_id() --- lightrag/kg/mongo_impl.py | 12 +++++++----- lightrag/kg/neo4j_impl.py | 6 +++++- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/lightrag/kg/mongo_impl.py b/lightrag/kg/mongo_impl.py index 5aab9c07..fbbae8c2 100644 --- a/lightrag/kg/mongo_impl.py +++ b/lightrag/kg/mongo_impl.py @@ -45,7 +45,9 @@ class MongoKVStorage(BaseKVStorage): for mode, items in data.items(): for k, v in tqdm_async(items.items(), desc="Upserting"): key = f"{mode}_{k}" - result = self._data.update_one({"_id": key}, {"$setOnInsert": v}, upsert=True) + result = self._data.update_one( + {"_id": key}, {"$setOnInsert": v}, upsert=True + ) if result.upserted_id: logger.debug(f"\nInserted new document with key: {key}") data[mode][k]["_id"] = key @@ -54,20 +56,20 @@ class MongoKVStorage(BaseKVStorage): self._data.update_one({"_id": k}, {"$set": v}, upsert=True) data[k]["_id"] = k return data - + async def get_by_mode_and_id(self, mode: str, id: str) -> Union[dict, None]: if "llm_response_cache" == self.namespace: res = {} - v = self._data.find_one({"_id": mode+"_"+id}) + v = self._data.find_one({"_id": mode + "_" + id}) if v: res[id] = v - print(f"find one by:{id}") + logger.debug(f"llm_response_cache find one by:{id}") return res else: return None else: return None - + async def drop(self): """ """ pass diff --git a/lightrag/kg/neo4j_impl.py b/lightrag/kg/neo4j_impl.py index 96247c05..8c2afb5d 100644 --- a/lightrag/kg/neo4j_impl.py +++ b/lightrag/kg/neo4j_impl.py @@ -48,7 +48,11 @@ class Neo4JStorage(BaseGraphStorage): URI, auth=(USERNAME, PASSWORD) ) _database_name = "home database" if DATABASE is None else f"database {DATABASE}" - with GraphDatabase.driver(URI, auth=(USERNAME, PASSWORD), max_connection_pool_size=MAX_CONNECTION_POOL_SIZE) as _sync_driver: + with GraphDatabase.driver( + URI, + auth=(USERNAME, PASSWORD), + max_connection_pool_size=MAX_CONNECTION_POOL_SIZE, + ) as _sync_driver: try: with _sync_driver.session(database=DATABASE) as session: try: