fixed return
This commit is contained in:
@@ -113,9 +113,9 @@ class ChromaVectorDBStorage(BaseVectorStorage):
|
||||
raise
|
||||
|
||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
logger.warning("Empty data provided to vector DB")
|
||||
return []
|
||||
return
|
||||
|
||||
try:
|
||||
ids = list(data.keys())
|
||||
|
@@ -84,10 +84,9 @@ class FaissVectorDBStorage(BaseVectorStorage):
|
||||
...
|
||||
}
|
||||
"""
|
||||
logger.info(f"Inserting {len(data)} vectors to {self.namespace}")
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
logger.warning("You are inserting empty data to the vector DB")
|
||||
return []
|
||||
return
|
||||
|
||||
current_time = time.time()
|
||||
|
||||
|
@@ -58,6 +58,10 @@ class JsonDocStatusStorage(DocStatusStorage):
|
||||
write_json(self._data, self._file_name)
|
||||
|
||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
return
|
||||
|
||||
self._data.update(data)
|
||||
await self.index_done_callback()
|
||||
|
||||
|
@@ -43,6 +43,9 @@ class JsonKVStorage(BaseKVStorage):
|
||||
return set(keys) - set(self._data.keys())
|
||||
|
||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
return
|
||||
left_data = {k: v for k, v in data.items() if k not in self._data}
|
||||
self._data.update(left_data)
|
||||
|
||||
|
@@ -80,11 +80,11 @@ class MilvusVectorDBStorage(BaseVectorStorage):
|
||||
)
|
||||
|
||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||
logger.info(f"Inserting {len(data)} vectors to {self.namespace}")
|
||||
if not len(data):
|
||||
logger.warning("You insert an empty data to vector DB")
|
||||
return []
|
||||
list_data = [
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
return
|
||||
|
||||
list_data: list[dict[str, Any]] = [
|
||||
{
|
||||
"id": k,
|
||||
**{k1: v1 for k1, v1 in v.items() if k1 in self.meta_fields},
|
||||
|
@@ -113,8 +113,12 @@ class MongoKVStorage(BaseKVStorage):
|
||||
return keys - existing_ids
|
||||
|
||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
return
|
||||
|
||||
if is_namespace(self.namespace, NameSpace.KV_STORE_LLM_RESPONSE_CACHE):
|
||||
update_tasks = []
|
||||
update_tasks: list[Any] = []
|
||||
for mode, items in data.items():
|
||||
for k, v in items.items():
|
||||
key = f"{mode}_{k}"
|
||||
@@ -186,7 +190,10 @@ class MongoDocStatusStorage(DocStatusStorage):
|
||||
return data - existing_ids
|
||||
|
||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||
update_tasks = []
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
return
|
||||
update_tasks: list[Any] = []
|
||||
for k, v in data.items():
|
||||
data[k]["_id"] = k
|
||||
update_tasks.append(
|
||||
@@ -860,10 +867,9 @@ class MongoVectorDBStorage(BaseVectorStorage):
|
||||
logger.debug("vector index already exist")
|
||||
|
||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||
logger.debug(f"Inserting {len(data)} vectors to {self.namespace}")
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
logger.warning("You are inserting an empty data set to vector DB")
|
||||
return []
|
||||
return
|
||||
|
||||
list_data = [
|
||||
{
|
||||
|
@@ -50,10 +50,9 @@ class NanoVectorDBStorage(BaseVectorStorage):
|
||||
)
|
||||
|
||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||
logger.info(f"Inserting {len(data)} vectors to {self.namespace}")
|
||||
if not len(data):
|
||||
logger.warning("You insert an empty data to vector DB")
|
||||
return []
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
return
|
||||
|
||||
current_time = time.time()
|
||||
list_data = [
|
||||
|
@@ -332,6 +332,10 @@ class OracleKVStorage(BaseKVStorage):
|
||||
|
||||
################ INSERT METHODS ################
|
||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
return
|
||||
|
||||
if is_namespace(self.namespace, NameSpace.KV_STORE_TEXT_CHUNKS):
|
||||
list_data = [
|
||||
{
|
||||
|
@@ -353,6 +353,10 @@ class PGKVStorage(BaseKVStorage):
|
||||
|
||||
################ INSERT METHODS ################
|
||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
return
|
||||
|
||||
if is_namespace(self.namespace, NameSpace.KV_STORE_TEXT_CHUNKS):
|
||||
pass
|
||||
elif is_namespace(self.namespace, NameSpace.KV_STORE_FULL_DOCS):
|
||||
@@ -454,10 +458,10 @@ class PGVectorStorage(BaseVectorStorage):
|
||||
return upsert_sql, data
|
||||
|
||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||
logger.info(f"Inserting {len(data)} vectors to {self.namespace}")
|
||||
if not len(data):
|
||||
logger.warning("You insert an empty data to vector DB")
|
||||
return []
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
return
|
||||
|
||||
current_time = time.time()
|
||||
list_data = [
|
||||
{
|
||||
@@ -618,6 +622,10 @@ class PGDocStatusStorage(DocStatusStorage):
|
||||
Args:
|
||||
data: dictionary of document IDs and their status data
|
||||
"""
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
return
|
||||
|
||||
sql = """insert into LIGHTRAG_DOC_STATUS(workspace,id,content,content_summary,content_length,chunks_count,status)
|
||||
values($1,$2,$3,$4,$5,$6,$7)
|
||||
on conflict(id,workspace) do update set
|
||||
|
@@ -93,9 +93,9 @@ class QdrantVectorDBStorage(BaseVectorStorage):
|
||||
)
|
||||
|
||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||
if not len(data):
|
||||
logger.warning("You insert an empty data to vector DB")
|
||||
return []
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
return
|
||||
list_data = [
|
||||
{
|
||||
"id": k,
|
||||
|
@@ -49,6 +49,9 @@ class RedisKVStorage(BaseKVStorage):
|
||||
return set(keys) - existing_ids
|
||||
|
||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
return
|
||||
pipe = self._redis.pipeline()
|
||||
|
||||
for k, v in data.items():
|
||||
|
@@ -217,6 +217,9 @@ class TiDBKVStorage(BaseKVStorage):
|
||||
|
||||
################ INSERT full_doc AND chunks ################
|
||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
return
|
||||
left_data = {k: v for k, v in data.items() if k not in self._data}
|
||||
self._data.update(left_data)
|
||||
if is_namespace(self.namespace, NameSpace.KV_STORE_TEXT_CHUNKS):
|
||||
@@ -324,12 +327,12 @@ class TiDBVectorDBStorage(BaseVectorStorage):
|
||||
|
||||
###### INSERT entities And relationships ######
|
||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||
# ignore, upsert in TiDBKVStorage already
|
||||
if not len(data):
|
||||
logger.warning("You insert an empty data to vector DB")
|
||||
return []
|
||||
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
||||
if not data:
|
||||
return
|
||||
if is_namespace(self.namespace, NameSpace.VECTOR_STORE_CHUNKS):
|
||||
return []
|
||||
return
|
||||
|
||||
logger.info(f"Inserting {len(data)} vectors to {self.namespace}")
|
||||
|
||||
list_data = [
|
||||
|
Reference in New Issue
Block a user