diff --git a/lightrag/kg/oracle_impl.py b/lightrag/kg/oracle_impl.py index 63c43ce0..0b21f620 100644 --- a/lightrag/kg/oracle_impl.py +++ b/lightrag/kg/oracle_impl.py @@ -140,8 +140,6 @@ class OracleDB: await cursor.execute(sql, params) except Exception as e: logger.error(f"Oracle database error: {e}") - print(sql) - print(params) raise columns = [column[0].lower() for column in cursor.description] if multirows: @@ -172,8 +170,6 @@ class OracleDB: await connection.commit() except Exception as e: logger.error(f"Oracle database error: {e}") - print(sql) - print(data) raise @@ -349,9 +345,7 @@ class OracleVectorDBStorage(BaseVectorStorage): "top_k": top_k, "better_than_threshold": self.cosine_better_than_threshold, } - # print(SQL) results = await self.db.query(SQL, params=params, multirows=True) - # print("vector search result:",results) return results async def upsert(self, data: dict[str, dict[str, Any]]) -> None: @@ -477,8 +471,6 @@ class OracleGraphStorage(BaseGraphStorage): """根据节点id检查节点是否存在""" SQL = SQL_TEMPLATES["has_node"] params = {"workspace": self.db.workspace, "node_id": node_id} - # print(SQL) - # print(self.db.workspace, node_id) res = await self.db.query(SQL, params) if res: # print("Node exist!",res) @@ -494,7 +486,6 @@ class OracleGraphStorage(BaseGraphStorage): "source_node_id": source_node_id, "target_node_id": target_node_id, } - # print(SQL) res = await self.db.query(SQL, params) if res: # print("Edge exist!",res) @@ -506,33 +497,25 @@ class OracleGraphStorage(BaseGraphStorage): async def node_degree(self, node_id: str) -> int: SQL = SQL_TEMPLATES["node_degree"] params = {"workspace": self.db.workspace, "node_id": node_id} - # print(SQL) res = await self.db.query(SQL, params) if res: - # print("Node degree",res["degree"]) return res["degree"] else: - # print("Edge not exist!") return 0 async def edge_degree(self, src_id: str, tgt_id: str) -> int: """根据源和目标节点id获取边的度""" degree = await self.node_degree(src_id) + await self.node_degree(tgt_id) - # print("Edge degree",degree) return degree async def get_node(self, node_id: str) -> dict[str, str] | None: """根据节点id获取节点数据""" SQL = SQL_TEMPLATES["get_node"] params = {"workspace": self.db.workspace, "node_id": node_id} - # print(self.db.workspace, node_id) - # print(SQL) res = await self.db.query(SQL, params) if res: - # print("Get node!",self.db.workspace, node_id,res) return res else: - # print("Can't get node!",self.db.workspace, node_id) return None async def get_edge( diff --git a/lightrag/kg/postgres_impl.py b/lightrag/kg/postgres_impl.py index b5e3e1e3..f7866e42 100644 --- a/lightrag/kg/postgres_impl.py +++ b/lightrag/kg/postgres_impl.py @@ -136,9 +136,9 @@ class PostgreSQLDB: data = None return data except Exception as e: - logger.error(f"PostgreSQL database error: {e}") - print(sql) - print(params) + logger.error( + f"PostgreSQL database,\nsql:{sql},\nparams:{params},\nerror:{e}" + ) raise async def execute( @@ -167,9 +167,7 @@ class PostgreSQLDB: else: logger.error(f"Upsert error: {e}") except Exception as e: - logger.error(f"PostgreSQL database error: {e.__class__} - {e}") - print(sql) - print(data) + logger.error(f"PostgreSQL database,\nsql:{sql},\ndata:{data},\nerror:{e}") raise @staticmethod @@ -266,9 +264,10 @@ class PGKVStorage(BaseKVStorage): new_keys = set([s for s in keys if s not in exist_keys]) return new_keys except Exception as e: - logger.error(f"PostgreSQL database error: {e}") - print(sql) - print(params) + logger.error( + f"PostgreSQL database,\nsql:{sql},\nparams:{params},\nerror:{e}" + ) + raise ################ INSERT METHODS ################ async def upsert(self, data: dict[str, dict[str, Any]]) -> None: @@ -333,9 +332,9 @@ class PGVectorStorage(BaseVectorStorage): "content_vector": json.dumps(item["__vector__"].tolist()), } except Exception as e: - logger.error(f"Error to prepare upsert sql: {e}") - print(item) - raise e + logger.error(f"Error to prepare upsert,\nsql: {e}\nitem: {item}") + raise + return upsert_sql, data def _upsert_entities(self, item: dict): @@ -454,9 +453,10 @@ class PGDocStatusStorage(DocStatusStorage): print(f"new_keys: {new_keys}") return new_keys except Exception as e: - logger.error(f"PostgreSQL database error: {e}") - print(sql) - print(params) + logger.error( + f"PostgreSQL database,\nsql:{sql},\nparams:{params},\nerror:{e}" + ) + raise async def get_by_id(self, id: str) -> Union[dict[str, Any], None]: sql = "select * from LIGHTRAG_DOC_STATUS where workspace=$1 and id=$2" diff --git a/lightrag/kg/tidb_impl.py b/lightrag/kg/tidb_impl.py index f0e5a45b..110a404a 100644 --- a/lightrag/kg/tidb_impl.py +++ b/lightrag/kg/tidb_impl.py @@ -76,9 +76,7 @@ class TiDB: try: result = conn.execute(text(sql), params) except Exception as e: - logger.error(f"Tidb database error: {e}") - print(sql) - print(params) + logger.error(f"Tidb database,\nsql:{sql},\nparams:{params},\nerror:{e}") raise if multirows: rows = result.all() @@ -103,9 +101,7 @@ class TiDB: else: conn.execute(text(sql), parameters=data) except Exception as e: - logger.error(f"TiDB database error: {e}") - print(sql) - print(data) + logger.error(f"Tidb database,\nsql:{sql},\ndata:{data},\nerror:{e}") raise @@ -145,8 +141,7 @@ class TiDBKVStorage(BaseKVStorage): try: await self.db.query(SQL) except Exception as e: - logger.error(f"Tidb database error: {e}") - print(SQL) + logger.error(f"Tidb database,\nsql:{SQL},\nkeys:{keys},\nerror:{e}") res = await self.db.query(SQL, multirows=True) if res: exist_keys = [key["id"] for key in res] diff --git a/lightrag/llm/openai.py b/lightrag/llm/openai.py index 024f7f52..aaaaf406 100644 --- a/lightrag/llm/openai.py +++ b/lightrag/llm/openai.py @@ -77,7 +77,7 @@ from lightrag.types import GPTKeywordExtractionFormat from lightrag.api import __api_version__ import numpy as np -from typing import Union +from typing import Any, Union class InvalidResponseError(Exception): @@ -94,13 +94,13 @@ class InvalidResponseError(Exception): ), ) async def openai_complete_if_cache( - model, - prompt, - system_prompt=None, - history_messages=None, - base_url=None, - api_key=None, - **kwargs, + model: str, + prompt: str, + system_prompt: str | None = None, + history_messages: list[dict[str, Any]] | None = None, + base_url: str | None = None, + api_key: str | None = None, + **kwargs: Any, ) -> str: if history_messages is None: history_messages = [] @@ -125,7 +125,7 @@ async def openai_complete_if_cache( ) kwargs.pop("hashing_kv", None) kwargs.pop("keyword_extraction", None) - messages = [] + messages: list[dict[str, Any]] = [] if system_prompt: messages.append({"role": "system", "content": system_prompt}) messages.extend(history_messages) @@ -147,18 +147,18 @@ async def openai_complete_if_cache( model=model, messages=messages, **kwargs ) except APIConnectionError as e: - logger.error(f"OpenAI API Connection Error: {str(e)}") + logger.error(f"OpenAI API Connection Error: {e}") raise except RateLimitError as e: - logger.error(f"OpenAI API Rate Limit Error: {str(e)}") + logger.error(f"OpenAI API Rate Limit Error: {e}") raise except APITimeoutError as e: - logger.error(f"OpenAI API Timeout Error: {str(e)}") + logger.error(f"OpenAI API Timeout Error: {e}") raise except Exception as e: - logger.error(f"OpenAI API Call Failed: {str(e)}") - logger.error(f"Model: {model}") - logger.error(f"Request parameters: {kwargs}") + logger.error( + f"OpenAI API Call Failed,\nModel: {model},\nParams: {kwargs}, Got: {e}" + ) raise if hasattr(response, "__aiter__"):