diff --git a/lightrag/api/lightrag_server.py b/lightrag/api/lightrag_server.py index 36403241..699a961f 100644 --- a/lightrag/api/lightrag_server.py +++ b/lightrag/api/lightrag_server.py @@ -1381,15 +1381,15 @@ def create_app(args): # Insert into the RAG queue if content: - await rag.apipeline_enqueue_documents(content) - logging.info( - f"Successfully processed and enqueued file: {file_path.name}" - ) + has_new_docs = await rag.apipeline_enqueue_documents(content) + if has_new_docs: + logging.info(f"Successfully processed and enqueued file: {file_path.name}") + else: + logging.info(f"File content already exists, skipping: {file_path.name}") return True else: - logging.error( - f"No content could be extracted from file: {file_path.name}" - ) + logging.error(f"No content could be extracted from file: {file_path.name}") + return False except Exception as e: logging.error( diff --git a/lightrag/lightrag.py b/lightrag/lightrag.py index 09a8df3f..ea45929e 100644 --- a/lightrag/lightrag.py +++ b/lightrag/lightrag.py @@ -653,7 +653,7 @@ class LightRAG: if update_storage: await self._insert_done() - async def apipeline_enqueue_documents(self, input: str | list[str]): + async def apipeline_enqueue_documents(self, input: str | list[str]) -> bool: """ Pipeline for Processing Documents @@ -691,11 +691,12 @@ class LightRAG: if not new_docs: logger.info("No new unique documents were found.") - return + return False # 4. Store status document await self.doc_status.upsert(new_docs) logger.info(f"Stored {len(new_docs)} new unique documents") + return True async def apipeline_process_enqueue_documents( self,