Merge branch 'main' into graph-viewer-webui
This commit is contained in:
@@ -1,16 +1,16 @@
|
||||
import asyncio
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
from typing import Any, Union
|
||||
|
||||
from lightrag.utils import (
|
||||
logger,
|
||||
load_json,
|
||||
write_json,
|
||||
)
|
||||
from lightrag.base import (
|
||||
BaseKVStorage,
|
||||
)
|
||||
from lightrag.utils import (
|
||||
load_json,
|
||||
logger,
|
||||
write_json,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -25,8 +25,8 @@ class JsonKVStorage(BaseKVStorage):
|
||||
async def index_done_callback(self):
|
||||
write_json(self._data, self._file_name)
|
||||
|
||||
async def get_by_id(self, id: str) -> dict[str, Any]:
|
||||
return self._data.get(id, {})
|
||||
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
||||
return self._data.get(id)
|
||||
|
||||
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
||||
return [
|
||||
@@ -38,8 +38,8 @@ class JsonKVStorage(BaseKVStorage):
|
||||
for id in ids
|
||||
]
|
||||
|
||||
async def filter_keys(self, data: list[str]) -> set[str]:
|
||||
return set([s for s in data if s not in self._data])
|
||||
async def filter_keys(self, data: set[str]) -> set[str]:
|
||||
return set(data) - set(self._data.keys())
|
||||
|
||||
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
||||
left_data = {k: v for k, v in data.items() if k not in self._data}
|
||||
|
@@ -48,21 +48,20 @@ Usage:
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
import os
|
||||
from typing import Any, Union
|
||||
|
||||
from lightrag.utils import (
|
||||
logger,
|
||||
load_json,
|
||||
write_json,
|
||||
)
|
||||
|
||||
from lightrag.base import (
|
||||
DocStatus,
|
||||
DocProcessingStatus,
|
||||
DocStatus,
|
||||
DocStatusStorage,
|
||||
)
|
||||
from lightrag.utils import (
|
||||
load_json,
|
||||
logger,
|
||||
write_json,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -75,15 +74,17 @@ class JsonDocStatusStorage(DocStatusStorage):
|
||||
self._data: dict[str, Any] = load_json(self._file_name) or {}
|
||||
logger.info(f"Loaded document status storage with {len(self._data)} records")
|
||||
|
||||
async def filter_keys(self, data: list[str]) -> set[str]:
|
||||
async def filter_keys(self, data: set[str]) -> set[str]:
|
||||
"""Return keys that should be processed (not in storage or not successfully processed)"""
|
||||
return set(
|
||||
[
|
||||
k
|
||||
for k in data
|
||||
if k not in self._data or self._data[k]["status"] != DocStatus.PROCESSED
|
||||
]
|
||||
)
|
||||
return set(data) - set(self._data.keys())
|
||||
|
||||
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
||||
result: list[dict[str, Any]] = []
|
||||
for id in ids:
|
||||
data = self._data.get(id, None)
|
||||
if data:
|
||||
result.append(data)
|
||||
return result
|
||||
|
||||
async def get_status_counts(self) -> dict[str, int]:
|
||||
"""Get counts of documents in each status"""
|
||||
@@ -94,11 +95,19 @@ class JsonDocStatusStorage(DocStatusStorage):
|
||||
|
||||
async def get_failed_docs(self) -> dict[str, DocProcessingStatus]:
|
||||
"""Get all failed documents"""
|
||||
return {k: v for k, v in self._data.items() if v["status"] == DocStatus.FAILED}
|
||||
return {
|
||||
k: DocProcessingStatus(**v)
|
||||
for k, v in self._data.items()
|
||||
if v["status"] == DocStatus.FAILED
|
||||
}
|
||||
|
||||
async def get_pending_docs(self) -> dict[str, DocProcessingStatus]:
|
||||
"""Get all pending documents"""
|
||||
return {k: v for k, v in self._data.items() if v["status"] == DocStatus.PENDING}
|
||||
return {
|
||||
k: DocProcessingStatus(**v)
|
||||
for k, v in self._data.items()
|
||||
if v["status"] == DocStatus.PENDING
|
||||
}
|
||||
|
||||
async def index_done_callback(self):
|
||||
"""Save data to file after indexing"""
|
||||
@@ -113,12 +122,8 @@ class JsonDocStatusStorage(DocStatusStorage):
|
||||
self._data.update(data)
|
||||
await self.index_done_callback()
|
||||
|
||||
async def get_by_id(self, id: str) -> dict[str, Any]:
|
||||
return self._data.get(id, {})
|
||||
|
||||
async def get(self, doc_id: str) -> Union[DocProcessingStatus, None]:
|
||||
"""Get document status by ID"""
|
||||
return self._data.get(doc_id)
|
||||
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
||||
return self._data.get(id)
|
||||
|
||||
async def delete(self, doc_ids: list[str]):
|
||||
"""Delete document status by IDs"""
|
||||
|
@@ -1,8 +1,9 @@
|
||||
import os
|
||||
from tqdm.asyncio import tqdm as tqdm_async
|
||||
from dataclasses import dataclass
|
||||
import pipmaster as pm
|
||||
|
||||
import numpy as np
|
||||
import pipmaster as pm
|
||||
from tqdm.asyncio import tqdm as tqdm_async
|
||||
|
||||
if not pm.is_installed("pymongo"):
|
||||
pm.install("pymongo")
|
||||
@@ -10,13 +11,14 @@ if not pm.is_installed("pymongo"):
|
||||
if not pm.is_installed("motor"):
|
||||
pm.install("motor")
|
||||
|
||||
from pymongo import MongoClient
|
||||
from motor.motor_asyncio import AsyncIOMotorClient
|
||||
from typing import Any, Union, List, Tuple
|
||||
from typing import Any, List, Tuple, Union
|
||||
|
||||
from ..utils import logger
|
||||
from ..base import BaseKVStorage, BaseGraphStorage
|
||||
from motor.motor_asyncio import AsyncIOMotorClient
|
||||
from pymongo import MongoClient
|
||||
|
||||
from ..base import BaseGraphStorage, BaseKVStorage
|
||||
from ..namespace import NameSpace, is_namespace
|
||||
from ..utils import logger
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -29,13 +31,13 @@ class MongoKVStorage(BaseKVStorage):
|
||||
self._data = database.get_collection(self.namespace)
|
||||
logger.info(f"Use MongoDB as KV {self.namespace}")
|
||||
|
||||
async def get_by_id(self, id: str) -> dict[str, Any]:
|
||||
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
||||
return self._data.find_one({"_id": id})
|
||||
|
||||
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
||||
return list(self._data.find({"_id": {"$in": ids}}))
|
||||
|
||||
async def filter_keys(self, data: list[str]) -> set[str]:
|
||||
async def filter_keys(self, data: set[str]) -> set[str]:
|
||||
existing_ids = [
|
||||
str(x["_id"]) for x in self._data.find({"_id": {"$in": data}}, {"_id": 1})
|
||||
]
|
||||
@@ -170,7 +172,6 @@ class MongoGraphStorage(BaseGraphStorage):
|
||||
But typically for a direct edge, we might just do a find_one.
|
||||
Below is a demonstration approach.
|
||||
"""
|
||||
|
||||
# We can do a single-hop graphLookup (maxDepth=0 or 1).
|
||||
# Then check if the target_node appears among the edges array.
|
||||
pipeline = [
|
||||
|
@@ -1,27 +1,28 @@
|
||||
import os
|
||||
import array
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
# import html
|
||||
# import os
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Union
|
||||
|
||||
import numpy as np
|
||||
import array
|
||||
import pipmaster as pm
|
||||
|
||||
if not pm.is_installed("oracledb"):
|
||||
pm.install("oracledb")
|
||||
|
||||
|
||||
from ..utils import logger
|
||||
import oracledb
|
||||
|
||||
from ..base import (
|
||||
BaseGraphStorage,
|
||||
BaseKVStorage,
|
||||
BaseVectorStorage,
|
||||
)
|
||||
from ..namespace import NameSpace, is_namespace
|
||||
|
||||
import oracledb
|
||||
from ..utils import logger
|
||||
|
||||
|
||||
class OracleDB:
|
||||
@@ -107,7 +108,7 @@ class OracleDB:
|
||||
"SELECT id FROM GRAPH_TABLE (lightrag_graph MATCH (a) COLUMNS (a.id)) fetch first row only"
|
||||
)
|
||||
else:
|
||||
await self.query("SELECT 1 FROM {k}".format(k=k))
|
||||
await self.query(f"SELECT 1 FROM {k}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to check table {k} in Oracle database")
|
||||
logger.error(f"Oracle database error: {e}")
|
||||
@@ -181,8 +182,8 @@ class OracleKVStorage(BaseKVStorage):
|
||||
|
||||
################ QUERY METHODS ################
|
||||
|
||||
async def get_by_id(self, id: str) -> dict[str, Any]:
|
||||
"""get doc_full data based on id."""
|
||||
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
||||
"""Get doc_full data based on id."""
|
||||
SQL = SQL_TEMPLATES["get_by_id_" + self.namespace]
|
||||
params = {"workspace": self.db.workspace, "id": id}
|
||||
# print("get_by_id:"+SQL)
|
||||
@@ -191,7 +192,10 @@ class OracleKVStorage(BaseKVStorage):
|
||||
res = {}
|
||||
for row in array_res:
|
||||
res[row["id"]] = row
|
||||
return res
|
||||
if res:
|
||||
return res
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
return await self.db.query(SQL, params)
|
||||
|
||||
@@ -209,7 +213,7 @@ class OracleKVStorage(BaseKVStorage):
|
||||
return None
|
||||
|
||||
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
||||
"""get doc_chunks data based on id"""
|
||||
"""Get doc_chunks data based on id"""
|
||||
SQL = SQL_TEMPLATES["get_by_ids_" + self.namespace].format(
|
||||
ids=",".join([f"'{id}'" for id in ids])
|
||||
)
|
||||
|
@@ -4,34 +4,35 @@ import json
|
||||
import os
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from typing import Union, List, Dict, Set, Any, Tuple
|
||||
import numpy as np
|
||||
from typing import Any, Dict, List, Set, Tuple, Union
|
||||
|
||||
import numpy as np
|
||||
import pipmaster as pm
|
||||
|
||||
if not pm.is_installed("asyncpg"):
|
||||
pm.install("asyncpg")
|
||||
|
||||
import asyncpg
|
||||
import sys
|
||||
from tqdm.asyncio import tqdm as tqdm_async
|
||||
|
||||
import asyncpg
|
||||
from tenacity import (
|
||||
retry,
|
||||
retry_if_exception_type,
|
||||
stop_after_attempt,
|
||||
wait_exponential,
|
||||
)
|
||||
from tqdm.asyncio import tqdm as tqdm_async
|
||||
|
||||
from ..utils import logger
|
||||
from ..base import (
|
||||
BaseGraphStorage,
|
||||
BaseKVStorage,
|
||||
BaseVectorStorage,
|
||||
DocStatusStorage,
|
||||
DocStatus,
|
||||
DocProcessingStatus,
|
||||
BaseGraphStorage,
|
||||
DocStatus,
|
||||
DocStatusStorage,
|
||||
)
|
||||
from ..namespace import NameSpace, is_namespace
|
||||
from ..utils import logger
|
||||
|
||||
if sys.platform.startswith("win"):
|
||||
import asyncio.windows_events
|
||||
@@ -82,7 +83,7 @@ class PostgreSQLDB:
|
||||
async def check_tables(self):
|
||||
for k, v in TABLES.items():
|
||||
try:
|
||||
await self.query("SELECT 1 FROM {k} LIMIT 1".format(k=k))
|
||||
await self.query(f"SELECT 1 FROM {k} LIMIT 1")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to check table {k} in PostgreSQL database")
|
||||
logger.error(f"PostgreSQL database error: {e}")
|
||||
@@ -183,7 +184,7 @@ class PGKVStorage(BaseKVStorage):
|
||||
|
||||
################ QUERY METHODS ################
|
||||
|
||||
async def get_by_id(self, id: str) -> dict[str, Any]:
|
||||
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
||||
"""Get doc_full data by id."""
|
||||
sql = SQL_TEMPLATES["get_by_id_" + self.namespace]
|
||||
params = {"workspace": self.db.workspace, "id": id}
|
||||
@@ -192,9 +193,10 @@ class PGKVStorage(BaseKVStorage):
|
||||
res = {}
|
||||
for row in array_res:
|
||||
res[row["id"]] = row
|
||||
return res
|
||||
return res if res else None
|
||||
else:
|
||||
return await self.db.query(sql, params)
|
||||
response = await self.db.query(sql, params)
|
||||
return response if response else None
|
||||
|
||||
async def get_by_mode_and_id(self, mode: str, id: str) -> Union[dict, None]:
|
||||
"""Specifically for llm_response_cache."""
|
||||
@@ -421,7 +423,7 @@ class PGDocStatusStorage(DocStatusStorage):
|
||||
def __post_init__(self):
|
||||
pass
|
||||
|
||||
async def filter_keys(self, data: list[str]) -> set[str]:
|
||||
async def filter_keys(self, data: set[str]) -> set[str]:
|
||||
"""Return keys that don't exist in storage"""
|
||||
keys = ",".join([f"'{_id}'" for _id in data])
|
||||
sql = (
|
||||
@@ -435,12 +437,12 @@ class PGDocStatusStorage(DocStatusStorage):
|
||||
existed = set([element["id"] for element in result])
|
||||
return set(data) - existed
|
||||
|
||||
async def get_by_id(self, id: str) -> dict[str, Any]:
|
||||
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
||||
sql = "select * from LIGHTRAG_DOC_STATUS where workspace=$1 and id=$2"
|
||||
params = {"workspace": self.db.workspace, "id": id}
|
||||
result = await self.db.query(sql, params, True)
|
||||
if result is None or result == []:
|
||||
return {}
|
||||
return None
|
||||
else:
|
||||
return DocProcessingStatus(
|
||||
content=result[0]["content"],
|
||||
|
127
lightrag/kg/qdrant_impl.py
Normal file
127
lightrag/kg/qdrant_impl.py
Normal file
@@ -0,0 +1,127 @@
|
||||
import asyncio
|
||||
import os
|
||||
from tqdm.asyncio import tqdm as tqdm_async
|
||||
from dataclasses import dataclass
|
||||
import numpy as np
|
||||
import hashlib
|
||||
import uuid
|
||||
|
||||
from ..utils import logger
|
||||
from ..base import BaseVectorStorage
|
||||
|
||||
import pipmaster as pm
|
||||
|
||||
if not pm.is_installed("qdrant_client"):
|
||||
pm.install("qdrant_client")
|
||||
|
||||
from qdrant_client import QdrantClient, models
|
||||
|
||||
|
||||
def compute_mdhash_id_for_qdrant(
|
||||
content: str, prefix: str = "", style: str = "simple"
|
||||
) -> str:
|
||||
"""
|
||||
Generate a UUID based on the content and support multiple formats.
|
||||
|
||||
:param content: The content used to generate the UUID.
|
||||
:param style: The format of the UUID, optional values are "simple", "hyphenated", "urn".
|
||||
:return: A UUID that meets the requirements of Qdrant.
|
||||
"""
|
||||
if not content:
|
||||
raise ValueError("Content must not be empty.")
|
||||
|
||||
# Use the hash value of the content to create a UUID.
|
||||
hashed_content = hashlib.sha256((prefix + content).encode("utf-8")).digest()
|
||||
generated_uuid = uuid.UUID(bytes=hashed_content[:16], version=4)
|
||||
|
||||
# Return the UUID according to the specified format.
|
||||
if style == "simple":
|
||||
return generated_uuid.hex
|
||||
elif style == "hyphenated":
|
||||
return str(generated_uuid)
|
||||
elif style == "urn":
|
||||
return f"urn:uuid:{generated_uuid}"
|
||||
else:
|
||||
raise ValueError("Invalid style. Choose from 'simple', 'hyphenated', or 'urn'.")
|
||||
|
||||
|
||||
@dataclass
|
||||
class QdrantVectorDBStorage(BaseVectorStorage):
|
||||
@staticmethod
|
||||
def create_collection_if_not_exist(
|
||||
client: QdrantClient, collection_name: str, **kwargs
|
||||
):
|
||||
if client.collection_exists(collection_name):
|
||||
return
|
||||
client.create_collection(collection_name, **kwargs)
|
||||
|
||||
def __post_init__(self):
|
||||
self._client = QdrantClient(
|
||||
url=os.environ.get("QDRANT_URL"),
|
||||
api_key=os.environ.get("QDRANT_API_KEY", None),
|
||||
)
|
||||
self._max_batch_size = self.global_config["embedding_batch_num"]
|
||||
QdrantVectorDBStorage.create_collection_if_not_exist(
|
||||
self._client,
|
||||
self.namespace,
|
||||
vectors_config=models.VectorParams(
|
||||
size=self.embedding_func.embedding_dim, distance=models.Distance.COSINE
|
||||
),
|
||||
)
|
||||
|
||||
async def upsert(self, data: dict[str, dict]):
|
||||
logger.info(f"Inserting {len(data)} vectors to {self.namespace}")
|
||||
if not len(data):
|
||||
logger.warning("You insert an empty data to vector DB")
|
||||
return []
|
||||
list_data = [
|
||||
{
|
||||
"id": k,
|
||||
**{k1: v1 for k1, v1 in v.items() if k1 in self.meta_fields},
|
||||
}
|
||||
for k, v in data.items()
|
||||
]
|
||||
contents = [v["content"] for v in data.values()]
|
||||
batches = [
|
||||
contents[i : i + self._max_batch_size]
|
||||
for i in range(0, len(contents), self._max_batch_size)
|
||||
]
|
||||
|
||||
async def wrapped_task(batch):
|
||||
result = await self.embedding_func(batch)
|
||||
pbar.update(1)
|
||||
return result
|
||||
|
||||
embedding_tasks = [wrapped_task(batch) for batch in batches]
|
||||
pbar = tqdm_async(
|
||||
total=len(embedding_tasks), desc="Generating embeddings", unit="batch"
|
||||
)
|
||||
embeddings_list = await asyncio.gather(*embedding_tasks)
|
||||
|
||||
embeddings = np.concatenate(embeddings_list)
|
||||
|
||||
list_points = []
|
||||
for i, d in enumerate(list_data):
|
||||
list_points.append(
|
||||
models.PointStruct(
|
||||
id=compute_mdhash_id_for_qdrant(d["id"]),
|
||||
vector=embeddings[i],
|
||||
payload=d,
|
||||
)
|
||||
)
|
||||
|
||||
results = self._client.upsert(
|
||||
collection_name=self.namespace, points=list_points, wait=True
|
||||
)
|
||||
return results
|
||||
|
||||
async def query(self, query, top_k=5):
|
||||
embedding = await self.embedding_func([query])
|
||||
results = self._client.search(
|
||||
collection_name=self.namespace,
|
||||
query_vector=embedding[0],
|
||||
limit=top_k,
|
||||
with_payload=True,
|
||||
)
|
||||
logger.debug(f"query result: {results}")
|
||||
return [{**dp.payload, "id": dp.id, "distance": dp.score} for dp in results]
|
@@ -1,5 +1,5 @@
|
||||
import os
|
||||
from typing import Any
|
||||
from typing import Any, Union
|
||||
from tqdm.asyncio import tqdm as tqdm_async
|
||||
from dataclasses import dataclass
|
||||
import pipmaster as pm
|
||||
@@ -21,7 +21,7 @@ class RedisKVStorage(BaseKVStorage):
|
||||
self._redis = Redis.from_url(redis_url, decode_responses=True)
|
||||
logger.info(f"Use Redis as KV {self.namespace}")
|
||||
|
||||
async def get_by_id(self, id):
|
||||
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
||||
data = await self._redis.get(f"{self.namespace}:{id}")
|
||||
return json.loads(data) if data else None
|
||||
|
||||
@@ -32,7 +32,7 @@ class RedisKVStorage(BaseKVStorage):
|
||||
results = await pipe.execute()
|
||||
return [json.loads(result) if result else None for result in results]
|
||||
|
||||
async def filter_keys(self, data: list[str]) -> set[str]:
|
||||
async def filter_keys(self, data: set[str]) -> set[str]:
|
||||
pipe = self._redis.pipeline()
|
||||
for key in data:
|
||||
pipe.exists(f"{self.namespace}:{key}")
|
||||
|
@@ -14,12 +14,12 @@ if not pm.is_installed("sqlalchemy"):
|
||||
from sqlalchemy import create_engine, text
|
||||
from tqdm import tqdm
|
||||
|
||||
from ..base import BaseVectorStorage, BaseKVStorage, BaseGraphStorage
|
||||
from ..utils import logger
|
||||
from ..base import BaseGraphStorage, BaseKVStorage, BaseVectorStorage
|
||||
from ..namespace import NameSpace, is_namespace
|
||||
from ..utils import logger
|
||||
|
||||
|
||||
class TiDB(object):
|
||||
class TiDB:
|
||||
def __init__(self, config, **kwargs):
|
||||
self.host = config.get("host", None)
|
||||
self.port = config.get("port", None)
|
||||
@@ -108,12 +108,12 @@ class TiDBKVStorage(BaseKVStorage):
|
||||
|
||||
################ QUERY METHODS ################
|
||||
|
||||
async def get_by_id(self, id: str) -> dict[str, Any]:
|
||||
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
||||
"""Fetch doc_full data by id."""
|
||||
SQL = SQL_TEMPLATES["get_by_id_" + self.namespace]
|
||||
params = {"id": id}
|
||||
# print("get_by_id:"+SQL)
|
||||
return await self.db.query(SQL, params)
|
||||
response = await self.db.query(SQL, params)
|
||||
return response if response else None
|
||||
|
||||
# Query by id
|
||||
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
||||
@@ -178,7 +178,7 @@ class TiDBKVStorage(BaseKVStorage):
|
||||
"tokens": item["tokens"],
|
||||
"chunk_order_index": item["chunk_order_index"],
|
||||
"full_doc_id": item["full_doc_id"],
|
||||
"content_vector": f"{item['__vector__'].tolist()}",
|
||||
"content_vector": f'{item["__vector__"].tolist()}',
|
||||
"workspace": self.db.workspace,
|
||||
}
|
||||
)
|
||||
@@ -222,8 +222,7 @@ class TiDBVectorDBStorage(BaseVectorStorage):
|
||||
)
|
||||
|
||||
async def query(self, query: str, top_k: int) -> list[dict]:
|
||||
"""search from tidb vector"""
|
||||
|
||||
"""Search from tidb vector"""
|
||||
embeddings = await self.embedding_func([query])
|
||||
embedding = embeddings[0]
|
||||
|
||||
@@ -286,7 +285,7 @@ class TiDBVectorDBStorage(BaseVectorStorage):
|
||||
"id": item["id"],
|
||||
"name": item["entity_name"],
|
||||
"content": item["content"],
|
||||
"content_vector": f"{item['content_vector'].tolist()}",
|
||||
"content_vector": f'{item["content_vector"].tolist()}',
|
||||
"workspace": self.db.workspace,
|
||||
}
|
||||
# update entity_id if node inserted by graph_storage_instance before
|
||||
@@ -308,7 +307,7 @@ class TiDBVectorDBStorage(BaseVectorStorage):
|
||||
"source_name": item["src_id"],
|
||||
"target_name": item["tgt_id"],
|
||||
"content": item["content"],
|
||||
"content_vector": f"{item['content_vector'].tolist()}",
|
||||
"content_vector": f'{item["content_vector"].tolist()}',
|
||||
"workspace": self.db.workspace,
|
||||
}
|
||||
# update relation_id if node inserted by graph_storage_instance before
|
||||
|
Reference in New Issue
Block a user