cleaned code
This commit is contained in:
@@ -1,24 +1,26 @@
|
|||||||
from enum import Enum
|
|
||||||
import os
|
import os
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
|
from enum import Enum
|
||||||
from typing import (
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Literal,
|
||||||
Optional,
|
Optional,
|
||||||
TypedDict,
|
TypedDict,
|
||||||
Union,
|
|
||||||
Literal,
|
|
||||||
TypeVar,
|
TypeVar,
|
||||||
Any,
|
Union,
|
||||||
)
|
)
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
from .utils import EmbeddingFunc
|
from .utils import EmbeddingFunc
|
||||||
|
|
||||||
TextChunkSchema = TypedDict(
|
|
||||||
"TextChunkSchema",
|
class TextChunkSchema(TypedDict):
|
||||||
{"tokens": int, "content": str, "full_doc_id": str, "chunk_order_index": int},
|
tokens: int
|
||||||
)
|
content: str
|
||||||
|
full_doc_id: str
|
||||||
|
chunk_order_index: int
|
||||||
|
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
|
||||||
@@ -57,11 +59,11 @@ class StorageNameSpace:
|
|||||||
global_config: dict[str, Any]
|
global_config: dict[str, Any]
|
||||||
|
|
||||||
async def index_done_callback(self):
|
async def index_done_callback(self):
|
||||||
"""commit the storage operations after indexing"""
|
"""Commit the storage operations after indexing"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
async def query_done_callback(self):
|
async def query_done_callback(self):
|
||||||
"""commit the storage operations after querying"""
|
"""Commit the storage operations after querying"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@@ -84,14 +86,14 @@ class BaseVectorStorage(StorageNameSpace):
|
|||||||
class BaseKVStorage(StorageNameSpace):
|
class BaseKVStorage(StorageNameSpace):
|
||||||
embedding_func: EmbeddingFunc
|
embedding_func: EmbeddingFunc
|
||||||
|
|
||||||
async def get_by_id(self, id: str) -> dict[str, Any]:
|
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
async def filter_keys(self, data: set[str]) -> set[str]:
|
async def filter_keys(self, data: set[str]) -> set[str]:
|
||||||
"""return un-exist keys"""
|
"""Return un-exist keys"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
async def upsert(self, data: dict[str, Any]) -> None:
|
async def upsert(self, data: dict[str, Any]) -> None:
|
||||||
|
@@ -1,16 +1,16 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import os
|
import os
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any
|
from typing import Any, Union
|
||||||
|
|
||||||
from lightrag.utils import (
|
|
||||||
logger,
|
|
||||||
load_json,
|
|
||||||
write_json,
|
|
||||||
)
|
|
||||||
from lightrag.base import (
|
from lightrag.base import (
|
||||||
BaseKVStorage,
|
BaseKVStorage,
|
||||||
)
|
)
|
||||||
|
from lightrag.utils import (
|
||||||
|
load_json,
|
||||||
|
logger,
|
||||||
|
write_json,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -25,8 +25,8 @@ class JsonKVStorage(BaseKVStorage):
|
|||||||
async def index_done_callback(self):
|
async def index_done_callback(self):
|
||||||
write_json(self._data, self._file_name)
|
write_json(self._data, self._file_name)
|
||||||
|
|
||||||
async def get_by_id(self, id: str) -> dict[str, Any]:
|
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
||||||
return self._data.get(id, {})
|
return self._data.get(id)
|
||||||
|
|
||||||
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
||||||
return [
|
return [
|
||||||
@@ -39,7 +39,7 @@ class JsonKVStorage(BaseKVStorage):
|
|||||||
]
|
]
|
||||||
|
|
||||||
async def filter_keys(self, data: set[str]) -> set[str]:
|
async def filter_keys(self, data: set[str]) -> set[str]:
|
||||||
return set([s for s in data if s not in self._data])
|
return data - set(self._data.keys())
|
||||||
|
|
||||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||||
left_data = {k: v for k, v in data.items() if k not in self._data}
|
left_data = {k: v for k, v in data.items() if k not in self._data}
|
||||||
|
@@ -76,7 +76,7 @@ class JsonDocStatusStorage(DocStatusStorage):
|
|||||||
|
|
||||||
async def filter_keys(self, data: set[str]) -> set[str]:
|
async def filter_keys(self, data: set[str]) -> set[str]:
|
||||||
"""Return keys that should be processed (not in storage or not successfully processed)"""
|
"""Return keys that should be processed (not in storage or not successfully processed)"""
|
||||||
return {k for k, _ in self._data.items() if k in data}
|
return set(k for k in data if k not in self._data)
|
||||||
|
|
||||||
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
||||||
result: list[dict[str, Any]] = []
|
result: list[dict[str, Any]] = []
|
||||||
|
@@ -1,8 +1,9 @@
|
|||||||
import os
|
import os
|
||||||
from tqdm.asyncio import tqdm as tqdm_async
|
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
import pipmaster as pm
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
import pipmaster as pm
|
||||||
|
from tqdm.asyncio import tqdm as tqdm_async
|
||||||
|
|
||||||
if not pm.is_installed("pymongo"):
|
if not pm.is_installed("pymongo"):
|
||||||
pm.install("pymongo")
|
pm.install("pymongo")
|
||||||
@@ -10,13 +11,14 @@ if not pm.is_installed("pymongo"):
|
|||||||
if not pm.is_installed("motor"):
|
if not pm.is_installed("motor"):
|
||||||
pm.install("motor")
|
pm.install("motor")
|
||||||
|
|
||||||
from pymongo import MongoClient
|
from typing import Any, List, Tuple, Union
|
||||||
from motor.motor_asyncio import AsyncIOMotorClient
|
|
||||||
from typing import Any, Union, List, Tuple
|
|
||||||
|
|
||||||
from ..utils import logger
|
from motor.motor_asyncio import AsyncIOMotorClient
|
||||||
from ..base import BaseKVStorage, BaseGraphStorage
|
from pymongo import MongoClient
|
||||||
|
|
||||||
|
from ..base import BaseGraphStorage, BaseKVStorage
|
||||||
from ..namespace import NameSpace, is_namespace
|
from ..namespace import NameSpace, is_namespace
|
||||||
|
from ..utils import logger
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -29,7 +31,7 @@ class MongoKVStorage(BaseKVStorage):
|
|||||||
self._data = database.get_collection(self.namespace)
|
self._data = database.get_collection(self.namespace)
|
||||||
logger.info(f"Use MongoDB as KV {self.namespace}")
|
logger.info(f"Use MongoDB as KV {self.namespace}")
|
||||||
|
|
||||||
async def get_by_id(self, id: str) -> dict[str, Any]:
|
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
||||||
return self._data.find_one({"_id": id})
|
return self._data.find_one({"_id": id})
|
||||||
|
|
||||||
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
||||||
@@ -170,7 +172,6 @@ class MongoGraphStorage(BaseGraphStorage):
|
|||||||
But typically for a direct edge, we might just do a find_one.
|
But typically for a direct edge, we might just do a find_one.
|
||||||
Below is a demonstration approach.
|
Below is a demonstration approach.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# We can do a single-hop graphLookup (maxDepth=0 or 1).
|
# We can do a single-hop graphLookup (maxDepth=0 or 1).
|
||||||
# Then check if the target_node appears among the edges array.
|
# Then check if the target_node appears among the edges array.
|
||||||
pipeline = [
|
pipeline = [
|
||||||
|
@@ -1,27 +1,28 @@
|
|||||||
import os
|
import array
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import os
|
||||||
|
|
||||||
# import html
|
# import html
|
||||||
# import os
|
# import os
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, Union
|
from typing import Any, Union
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import array
|
|
||||||
import pipmaster as pm
|
import pipmaster as pm
|
||||||
|
|
||||||
if not pm.is_installed("oracledb"):
|
if not pm.is_installed("oracledb"):
|
||||||
pm.install("oracledb")
|
pm.install("oracledb")
|
||||||
|
|
||||||
|
|
||||||
from ..utils import logger
|
import oracledb
|
||||||
|
|
||||||
from ..base import (
|
from ..base import (
|
||||||
BaseGraphStorage,
|
BaseGraphStorage,
|
||||||
BaseKVStorage,
|
BaseKVStorage,
|
||||||
BaseVectorStorage,
|
BaseVectorStorage,
|
||||||
)
|
)
|
||||||
from ..namespace import NameSpace, is_namespace
|
from ..namespace import NameSpace, is_namespace
|
||||||
|
from ..utils import logger
|
||||||
import oracledb
|
|
||||||
|
|
||||||
|
|
||||||
class OracleDB:
|
class OracleDB:
|
||||||
@@ -107,7 +108,7 @@ class OracleDB:
|
|||||||
"SELECT id FROM GRAPH_TABLE (lightrag_graph MATCH (a) COLUMNS (a.id)) fetch first row only"
|
"SELECT id FROM GRAPH_TABLE (lightrag_graph MATCH (a) COLUMNS (a.id)) fetch first row only"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
await self.query("SELECT 1 FROM {k}".format(k=k))
|
await self.query(f"SELECT 1 FROM {k}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to check table {k} in Oracle database")
|
logger.error(f"Failed to check table {k} in Oracle database")
|
||||||
logger.error(f"Oracle database error: {e}")
|
logger.error(f"Oracle database error: {e}")
|
||||||
@@ -181,8 +182,8 @@ class OracleKVStorage(BaseKVStorage):
|
|||||||
|
|
||||||
################ QUERY METHODS ################
|
################ QUERY METHODS ################
|
||||||
|
|
||||||
async def get_by_id(self, id: str) -> dict[str, Any]:
|
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
||||||
"""get doc_full data based on id."""
|
"""Get doc_full data based on id."""
|
||||||
SQL = SQL_TEMPLATES["get_by_id_" + self.namespace]
|
SQL = SQL_TEMPLATES["get_by_id_" + self.namespace]
|
||||||
params = {"workspace": self.db.workspace, "id": id}
|
params = {"workspace": self.db.workspace, "id": id}
|
||||||
# print("get_by_id:"+SQL)
|
# print("get_by_id:"+SQL)
|
||||||
@@ -191,7 +192,10 @@ class OracleKVStorage(BaseKVStorage):
|
|||||||
res = {}
|
res = {}
|
||||||
for row in array_res:
|
for row in array_res:
|
||||||
res[row["id"]] = row
|
res[row["id"]] = row
|
||||||
|
if res:
|
||||||
return res
|
return res
|
||||||
|
else:
|
||||||
|
return None
|
||||||
else:
|
else:
|
||||||
return await self.db.query(SQL, params)
|
return await self.db.query(SQL, params)
|
||||||
|
|
||||||
@@ -209,7 +213,7 @@ class OracleKVStorage(BaseKVStorage):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
||||||
"""get doc_chunks data based on id"""
|
"""Get doc_chunks data based on id"""
|
||||||
SQL = SQL_TEMPLATES["get_by_ids_" + self.namespace].format(
|
SQL = SQL_TEMPLATES["get_by_ids_" + self.namespace].format(
|
||||||
ids=",".join([f"'{id}'" for id in ids])
|
ids=",".join([f"'{id}'" for id in ids])
|
||||||
)
|
)
|
||||||
|
@@ -4,34 +4,35 @@ import json
|
|||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Union, List, Dict, Set, Any, Tuple
|
from typing import Any, Dict, List, Set, Tuple, Union
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
import pipmaster as pm
|
import pipmaster as pm
|
||||||
|
|
||||||
if not pm.is_installed("asyncpg"):
|
if not pm.is_installed("asyncpg"):
|
||||||
pm.install("asyncpg")
|
pm.install("asyncpg")
|
||||||
|
|
||||||
import asyncpg
|
|
||||||
import sys
|
import sys
|
||||||
from tqdm.asyncio import tqdm as tqdm_async
|
|
||||||
|
import asyncpg
|
||||||
from tenacity import (
|
from tenacity import (
|
||||||
retry,
|
retry,
|
||||||
retry_if_exception_type,
|
retry_if_exception_type,
|
||||||
stop_after_attempt,
|
stop_after_attempt,
|
||||||
wait_exponential,
|
wait_exponential,
|
||||||
)
|
)
|
||||||
|
from tqdm.asyncio import tqdm as tqdm_async
|
||||||
|
|
||||||
from ..utils import logger
|
|
||||||
from ..base import (
|
from ..base import (
|
||||||
|
BaseGraphStorage,
|
||||||
BaseKVStorage,
|
BaseKVStorage,
|
||||||
BaseVectorStorage,
|
BaseVectorStorage,
|
||||||
DocStatusStorage,
|
|
||||||
DocStatus,
|
|
||||||
DocProcessingStatus,
|
DocProcessingStatus,
|
||||||
BaseGraphStorage,
|
DocStatus,
|
||||||
|
DocStatusStorage,
|
||||||
)
|
)
|
||||||
from ..namespace import NameSpace, is_namespace
|
from ..namespace import NameSpace, is_namespace
|
||||||
|
from ..utils import logger
|
||||||
|
|
||||||
if sys.platform.startswith("win"):
|
if sys.platform.startswith("win"):
|
||||||
import asyncio.windows_events
|
import asyncio.windows_events
|
||||||
@@ -82,7 +83,7 @@ class PostgreSQLDB:
|
|||||||
async def check_tables(self):
|
async def check_tables(self):
|
||||||
for k, v in TABLES.items():
|
for k, v in TABLES.items():
|
||||||
try:
|
try:
|
||||||
await self.query("SELECT 1 FROM {k} LIMIT 1".format(k=k))
|
await self.query(f"SELECT 1 FROM {k} LIMIT 1")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to check table {k} in PostgreSQL database")
|
logger.error(f"Failed to check table {k} in PostgreSQL database")
|
||||||
logger.error(f"PostgreSQL database error: {e}")
|
logger.error(f"PostgreSQL database error: {e}")
|
||||||
@@ -183,7 +184,7 @@ class PGKVStorage(BaseKVStorage):
|
|||||||
|
|
||||||
################ QUERY METHODS ################
|
################ QUERY METHODS ################
|
||||||
|
|
||||||
async def get_by_id(self, id: str) -> dict[str, Any]:
|
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
||||||
"""Get doc_full data by id."""
|
"""Get doc_full data by id."""
|
||||||
sql = SQL_TEMPLATES["get_by_id_" + self.namespace]
|
sql = SQL_TEMPLATES["get_by_id_" + self.namespace]
|
||||||
params = {"workspace": self.db.workspace, "id": id}
|
params = {"workspace": self.db.workspace, "id": id}
|
||||||
@@ -192,9 +193,10 @@ class PGKVStorage(BaseKVStorage):
|
|||||||
res = {}
|
res = {}
|
||||||
for row in array_res:
|
for row in array_res:
|
||||||
res[row["id"]] = row
|
res[row["id"]] = row
|
||||||
return res
|
return res if res else None
|
||||||
else:
|
else:
|
||||||
return await self.db.query(sql, params)
|
response = await self.db.query(sql, params)
|
||||||
|
return response if response else None
|
||||||
|
|
||||||
async def get_by_mode_and_id(self, mode: str, id: str) -> Union[dict, None]:
|
async def get_by_mode_and_id(self, mode: str, id: str) -> Union[dict, None]:
|
||||||
"""Specifically for llm_response_cache."""
|
"""Specifically for llm_response_cache."""
|
||||||
@@ -435,12 +437,12 @@ class PGDocStatusStorage(DocStatusStorage):
|
|||||||
existed = set([element["id"] for element in result])
|
existed = set([element["id"] for element in result])
|
||||||
return set(data) - existed
|
return set(data) - existed
|
||||||
|
|
||||||
async def get_by_id(self, id: str) -> dict[str, Any]:
|
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
||||||
sql = "select * from LIGHTRAG_DOC_STATUS where workspace=$1 and id=$2"
|
sql = "select * from LIGHTRAG_DOC_STATUS where workspace=$1 and id=$2"
|
||||||
params = {"workspace": self.db.workspace, "id": id}
|
params = {"workspace": self.db.workspace, "id": id}
|
||||||
result = await self.db.query(sql, params, True)
|
result = await self.db.query(sql, params, True)
|
||||||
if result is None or result == []:
|
if result is None or result == []:
|
||||||
return {}
|
return None
|
||||||
else:
|
else:
|
||||||
return DocProcessingStatus(
|
return DocProcessingStatus(
|
||||||
content=result[0]["content"],
|
content=result[0]["content"],
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
from typing import Any
|
from typing import Any, Union
|
||||||
from tqdm.asyncio import tqdm as tqdm_async
|
from tqdm.asyncio import tqdm as tqdm_async
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
import pipmaster as pm
|
import pipmaster as pm
|
||||||
@@ -21,7 +21,7 @@ class RedisKVStorage(BaseKVStorage):
|
|||||||
self._redis = Redis.from_url(redis_url, decode_responses=True)
|
self._redis = Redis.from_url(redis_url, decode_responses=True)
|
||||||
logger.info(f"Use Redis as KV {self.namespace}")
|
logger.info(f"Use Redis as KV {self.namespace}")
|
||||||
|
|
||||||
async def get_by_id(self, id):
|
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
||||||
data = await self._redis.get(f"{self.namespace}:{id}")
|
data = await self._redis.get(f"{self.namespace}:{id}")
|
||||||
return json.loads(data) if data else None
|
return json.loads(data) if data else None
|
||||||
|
|
||||||
|
@@ -14,12 +14,12 @@ if not pm.is_installed("sqlalchemy"):
|
|||||||
from sqlalchemy import create_engine, text
|
from sqlalchemy import create_engine, text
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
|
|
||||||
from ..base import BaseVectorStorage, BaseKVStorage, BaseGraphStorage
|
from ..base import BaseGraphStorage, BaseKVStorage, BaseVectorStorage
|
||||||
from ..utils import logger
|
|
||||||
from ..namespace import NameSpace, is_namespace
|
from ..namespace import NameSpace, is_namespace
|
||||||
|
from ..utils import logger
|
||||||
|
|
||||||
|
|
||||||
class TiDB(object):
|
class TiDB:
|
||||||
def __init__(self, config, **kwargs):
|
def __init__(self, config, **kwargs):
|
||||||
self.host = config.get("host", None)
|
self.host = config.get("host", None)
|
||||||
self.port = config.get("port", None)
|
self.port = config.get("port", None)
|
||||||
@@ -108,12 +108,12 @@ class TiDBKVStorage(BaseKVStorage):
|
|||||||
|
|
||||||
################ QUERY METHODS ################
|
################ QUERY METHODS ################
|
||||||
|
|
||||||
async def get_by_id(self, id: str) -> dict[str, Any]:
|
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
||||||
"""Fetch doc_full data by id."""
|
"""Fetch doc_full data by id."""
|
||||||
SQL = SQL_TEMPLATES["get_by_id_" + self.namespace]
|
SQL = SQL_TEMPLATES["get_by_id_" + self.namespace]
|
||||||
params = {"id": id}
|
params = {"id": id}
|
||||||
# print("get_by_id:"+SQL)
|
response = await self.db.query(SQL, params)
|
||||||
return await self.db.query(SQL, params)
|
return response if response else None
|
||||||
|
|
||||||
# Query by id
|
# Query by id
|
||||||
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
||||||
@@ -178,7 +178,7 @@ class TiDBKVStorage(BaseKVStorage):
|
|||||||
"tokens": item["tokens"],
|
"tokens": item["tokens"],
|
||||||
"chunk_order_index": item["chunk_order_index"],
|
"chunk_order_index": item["chunk_order_index"],
|
||||||
"full_doc_id": item["full_doc_id"],
|
"full_doc_id": item["full_doc_id"],
|
||||||
"content_vector": f"{item['__vector__'].tolist()}",
|
"content_vector": f'{item["__vector__"].tolist()}',
|
||||||
"workspace": self.db.workspace,
|
"workspace": self.db.workspace,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -222,8 +222,7 @@ class TiDBVectorDBStorage(BaseVectorStorage):
|
|||||||
)
|
)
|
||||||
|
|
||||||
async def query(self, query: str, top_k: int) -> list[dict]:
|
async def query(self, query: str, top_k: int) -> list[dict]:
|
||||||
"""search from tidb vector"""
|
"""Search from tidb vector"""
|
||||||
|
|
||||||
embeddings = await self.embedding_func([query])
|
embeddings = await self.embedding_func([query])
|
||||||
embedding = embeddings[0]
|
embedding = embeddings[0]
|
||||||
|
|
||||||
@@ -286,7 +285,7 @@ class TiDBVectorDBStorage(BaseVectorStorage):
|
|||||||
"id": item["id"],
|
"id": item["id"],
|
||||||
"name": item["entity_name"],
|
"name": item["entity_name"],
|
||||||
"content": item["content"],
|
"content": item["content"],
|
||||||
"content_vector": f"{item['content_vector'].tolist()}",
|
"content_vector": f'{item["content_vector"].tolist()}',
|
||||||
"workspace": self.db.workspace,
|
"workspace": self.db.workspace,
|
||||||
}
|
}
|
||||||
# update entity_id if node inserted by graph_storage_instance before
|
# update entity_id if node inserted by graph_storage_instance before
|
||||||
@@ -308,7 +307,7 @@ class TiDBVectorDBStorage(BaseVectorStorage):
|
|||||||
"source_name": item["src_id"],
|
"source_name": item["src_id"],
|
||||||
"target_name": item["tgt_id"],
|
"target_name": item["tgt_id"],
|
||||||
"content": item["content"],
|
"content": item["content"],
|
||||||
"content_vector": f"{item['content_vector'].tolist()}",
|
"content_vector": f'{item["content_vector"].tolist()}',
|
||||||
"workspace": self.db.workspace,
|
"workspace": self.db.workspace,
|
||||||
}
|
}
|
||||||
# update relation_id if node inserted by graph_storage_instance before
|
# update relation_id if node inserted by graph_storage_instance before
|
||||||
|
Reference in New Issue
Block a user