Merge pull request #889 from YanSte/parelle-3

Multi batches
This commit is contained in:
Yannick Stephan
2025-02-19 23:55:59 +01:00
committed by GitHub

View File

@@ -793,11 +793,17 @@ class LightRAG:
]
logger.info(f"Number of batches to process: {len(docs_batches)}.")
batches: list[Any] = []
# 3. iterate over batches
for batch_idx, docs_batch in enumerate(docs_batches):
logger.info(
f"Start processing batch {batch_idx + 1} of {len(docs_batches)}."
)
async def batch(
batch_idx: int,
docs_batch: list[tuple[str, DocProcessingStatus]],
size_batch: int,
) -> None:
logger.info(f"Start processing batch {batch_idx + 1} of {size_batch}.")
# 4. iterate over batch
for doc_id_processing_status in docs_batch:
doc_id, status_doc = doc_id_processing_status
@@ -835,7 +841,9 @@ class LightRAG:
tasks = [
self.chunks_vdb.upsert(chunks),
self._process_entity_relation_graph(chunks),
self.full_docs.upsert({doc_id: {"content": status_doc.content}}),
self.full_docs.upsert(
{doc_id: {"content": status_doc.content}}
),
self.text_chunks.upsert(chunks),
self.doc_status.upsert(
{
@@ -853,7 +861,6 @@ class LightRAG:
]
try:
await asyncio.gather(*tasks)
await self._insert_done()
except Exception as e:
logger.error(f"Failed to process document {doc_id}: {str(e)}")
@@ -873,6 +880,11 @@ class LightRAG:
continue
logger.info(f"Completed batch {batch_idx + 1} of {len(docs_batches)}.")
batches.append(batch(batch_idx, docs_batch, len(docs_batches)))
await asyncio.gather(*batches)
await self._insert_done()
async def _process_entity_relation_graph(self, chunk: dict[str, Any]) -> None:
try:
new_kg = await extract_entities(