Merge pull request #539 from ShanGor/main

Added PostgreSQL implementation
This commit is contained in:
zrguo
2025-01-04 22:13:06 +08:00
committed by GitHub
5 changed files with 1452 additions and 13 deletions

View File

@@ -0,0 +1,114 @@
import asyncio
import logging
import os
import time
from dotenv import load_dotenv
from lightrag import LightRAG, QueryParam
from lightrag.kg.postgres_impl import PostgreSQLDB
from lightrag.llm import ollama_embedding, zhipu_complete
from lightrag.utils import EmbeddingFunc
load_dotenv()
ROOT_DIR = os.environ.get("ROOT_DIR")
WORKING_DIR = f"{ROOT_DIR}/dickens-pg"
logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO)
if not os.path.exists(WORKING_DIR):
os.mkdir(WORKING_DIR)
# AGE
os.environ["AGE_GRAPH_NAME"] = "dickens"
postgres_db = PostgreSQLDB(
config={
"host": "localhost",
"port": 15432,
"user": "rag",
"password": "rag",
"database": "rag",
}
)
async def main():
await postgres_db.initdb()
# Check if PostgreSQL DB tables exist, if not, tables will be created
await postgres_db.check_tables()
rag = LightRAG(
working_dir=WORKING_DIR,
llm_model_func=zhipu_complete,
llm_model_name="glm-4-flashx",
llm_model_max_async=4,
llm_model_max_token_size=32768,
embedding_func=EmbeddingFunc(
embedding_dim=768,
max_token_size=8192,
func=lambda texts: ollama_embedding(
texts, embed_model="nomic-embed-text", host="http://localhost:11434"
),
),
kv_storage="PGKVStorage",
doc_status_storage="PGDocStatusStorage",
graph_storage="PGGraphStorage",
vector_storage="PGVectorStorage",
)
# Set the KV/vector/graph storage's `db` property, so all operation will use same connection pool
rag.doc_status.db = postgres_db
rag.full_docs.db = postgres_db
rag.text_chunks.db = postgres_db
rag.llm_response_cache.db = postgres_db
rag.key_string_value_json_storage_cls.db = postgres_db
rag.chunks_vdb.db = postgres_db
rag.relationships_vdb.db = postgres_db
rag.entities_vdb.db = postgres_db
rag.graph_storage_cls.db = postgres_db
rag.chunk_entity_relation_graph.db = postgres_db
# add embedding_func for graph database, it's deleted in commit 5661d76860436f7bf5aef2e50d9ee4a59660146c
rag.chunk_entity_relation_graph.embedding_func = rag.embedding_func
with open(f"{ROOT_DIR}/book.txt", "r", encoding="utf-8") as f:
await rag.ainsert(f.read())
print("==== Trying to test the rag queries ====")
print("**** Start Naive Query ****")
start_time = time.time()
# Perform naive search
print(
await rag.aquery(
"What are the top themes in this story?", param=QueryParam(mode="naive")
)
)
print(f"Naive Query Time: {time.time() - start_time} seconds")
# Perform local search
print("**** Start Local Query ****")
start_time = time.time()
print(
await rag.aquery(
"What are the top themes in this story?", param=QueryParam(mode="local")
)
)
print(f"Local Query Time: {time.time() - start_time} seconds")
# Perform global search
print("**** Start Global Query ****")
start_time = time.time()
print(
await rag.aquery(
"What are the top themes in this story?", param=QueryParam(mode="global")
)
)
print(f"Global Query Time: {time.time() - start_time}")
# Perform hybrid search
print("**** Start Hybrid Query ****")
print(
await rag.aquery(
"What are the top themes in this story?", param=QueryParam(mode="hybrid")
)
)
print(f"Hybrid Query Time: {time.time() - start_time} seconds")
if __name__ == "__main__":
asyncio.run(main())

1183
lightrag/kg/postgres_impl.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,125 @@
import asyncio
import asyncpg
import sys
import os
import psycopg
from psycopg_pool import AsyncConnectionPool
from lightrag.kg.postgres_impl import PostgreSQLDB, PGGraphStorage
DB = "rag"
USER = "rag"
PASSWORD = "rag"
HOST = "localhost"
PORT = "15432"
os.environ["AGE_GRAPH_NAME"] = "dickens"
if sys.platform.startswith("win"):
import asyncio.windows_events
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
async def get_pool():
return await asyncpg.create_pool(
f"postgres://{USER}:{PASSWORD}@{HOST}:{PORT}/{DB}",
min_size=10,
max_size=10,
max_queries=5000,
max_inactive_connection_lifetime=300.0,
)
async def main1():
connection_string = (
f"dbname='{DB}' user='{USER}' password='{PASSWORD}' host='{HOST}' port={PORT}"
)
pool = AsyncConnectionPool(connection_string, open=False)
await pool.open()
try:
conn = await pool.getconn(timeout=10)
async with conn.cursor() as curs:
try:
await curs.execute('SET search_path = ag_catalog, "$user", public')
await curs.execute("SELECT create_graph('dickens-2')")
await conn.commit()
print("create_graph success")
except (
psycopg.errors.InvalidSchemaName,
psycopg.errors.UniqueViolation,
):
print("create_graph already exists")
await conn.rollback()
finally:
pass
db = PostgreSQLDB(
config={
"host": "localhost",
"port": 15432,
"user": "rag",
"password": "rag",
"database": "rag",
}
)
async def query_with_age():
await db.initdb()
graph = PGGraphStorage(
namespace="chunk_entity_relation",
global_config={},
embedding_func=None,
)
graph.db = db
res = await graph.get_node('"CHRISTMAS-TIME"')
print("Node is: ", res)
async def create_edge_with_age():
await db.initdb()
graph = PGGraphStorage(
namespace="chunk_entity_relation",
global_config={},
embedding_func=None,
)
graph.db = db
await graph.upsert_node('"THE CRATCHITS"', {"hello": "world"})
await graph.upsert_node('"THE GIRLS"', {"world": "hello"})
await graph.upsert_edge(
'"THE CRATCHITS"',
'"THE GIRLS"',
edge_data={
"weight": 7.0,
"description": '"The girls are part of the Cratchit family, contributing to their collective efforts and shared experiences.',
"keywords": '"family, collective effort"',
"source_id": "chunk-1d4b58de5429cd1261370c231c8673e8",
},
)
res = await graph.get_edge("THE CRATCHITS", '"THE GIRLS"')
print("Edge is: ", res)
async def main():
pool = await get_pool()
sql = r"SELECT * FROM ag_catalog.cypher('dickens', $$ MATCH (n:帅哥) RETURN n $$) AS (n ag_catalog.agtype)"
# cypher = "MATCH (n:how_are_you_doing) RETURN n"
async with pool.acquire() as conn:
try:
await conn.execute(
"""SET search_path = ag_catalog, "$user", public;select create_graph('dickens')"""
)
except asyncpg.exceptions.InvalidSchemaNameError:
print("create_graph already exists")
# stmt = await conn.prepare(sql)
row = await conn.fetch(sql)
print("row is: ", row)
row = await conn.fetchrow("select '100'::int + 200 as result")
print(row) # <Record result=300>
if __name__ == "__main__":
asyncio.run(query_with_age())

View File

@@ -85,8 +85,12 @@ ChromaVectorDBStorage = lazy_external_import(".kg.chroma_impl", "ChromaVectorDBS
TiDBKVStorage = lazy_external_import(".kg.tidb_impl", "TiDBKVStorage") TiDBKVStorage = lazy_external_import(".kg.tidb_impl", "TiDBKVStorage")
TiDBVectorDBStorage = lazy_external_import(".kg.tidb_impl", "TiDBVectorDBStorage") TiDBVectorDBStorage = lazy_external_import(".kg.tidb_impl", "TiDBVectorDBStorage")
TiDBGraphStorage = lazy_external_import(".kg.tidb_impl", "TiDBGraphStorage") TiDBGraphStorage = lazy_external_import(".kg.tidb_impl", "TiDBGraphStorage")
PGKVStorage = lazy_external_import(".kg.postgres_impl", "PGKVStorage")
PGVectorStorage = lazy_external_import(".kg.postgres_impl", "PGVectorStorage")
AGEStorage = lazy_external_import(".kg.age_impl", "AGEStorage") AGEStorage = lazy_external_import(".kg.age_impl", "AGEStorage")
PGGraphStorage = lazy_external_import(".kg.postgres_impl", "PGGraphStorage")
GremlinStorage = lazy_external_import(".kg.gremlin_impl", "GremlinStorage") GremlinStorage = lazy_external_import(".kg.gremlin_impl", "GremlinStorage")
PGDocStatusStorage = lazy_external_import(".kg.postgres_impl", "PGDocStatusStorage")
def always_get_an_event_loop() -> asyncio.AbstractEventLoop: def always_get_an_event_loop() -> asyncio.AbstractEventLoop:
@@ -297,6 +301,10 @@ class LightRAG:
"Neo4JStorage": Neo4JStorage, "Neo4JStorage": Neo4JStorage,
"OracleGraphStorage": OracleGraphStorage, "OracleGraphStorage": OracleGraphStorage,
"AGEStorage": AGEStorage, "AGEStorage": AGEStorage,
"PGGraphStorage": PGGraphStorage,
"PGKVStorage": PGKVStorage,
"PGDocStatusStorage": PGDocStatusStorage,
"PGVectorStorage": PGVectorStorage,
"TiDBGraphStorage": TiDBGraphStorage, "TiDBGraphStorage": TiDBGraphStorage,
"GremlinStorage": GremlinStorage, "GremlinStorage": GremlinStorage,
# "ArangoDBStorage": ArangoDBStorage # "ArangoDBStorage": ArangoDBStorage

View File

@@ -1,29 +1,38 @@
accelerate accelerate
aioboto3 aioboto3~=13.3.0
aiohttp aiofiles~=24.1.0
aiohttp~=3.11.11
asyncpg~=0.30.0
# database packages # database packages
graspologic graspologic
gremlinpython gremlinpython
hnswlib hnswlib
nano-vectordb nano-vectordb
neo4j neo4j~=5.27.0
networkx networkx~=3.2.1
ollama
openai numpy~=2.2.0
ollama~=0.4.4
openai~=1.58.1
oracledb oracledb
psycopg[binary,pool] psycopg-pool~=3.2.4
psycopg[binary,pool]~=3.2.3
pydantic~=2.10.4
pymilvus pymilvus
pymongo pymongo
pymysql pymysql
pyvis python-dotenv~=1.0.1
pyvis~=0.3.2
setuptools~=70.0.0
# lmdeploy[all] # lmdeploy[all]
sqlalchemy sqlalchemy~=2.0.36
tenacity tenacity~=9.0.0
# LLM packages # LLM packages
tiktoken tiktoken~=0.8.0
torch torch~=2.5.1+cu121
transformers tqdm~=4.67.1
transformers~=4.47.1
xxhash xxhash