Fix linting

This commit is contained in:
yangdx
2025-05-22 10:06:09 +08:00
parent 1b6ddcaf5b
commit a6046bf827

View File

@@ -91,26 +91,23 @@ class InsertTextRequest(BaseModel):
min_length=1,
description="The text to insert",
)
file_source: str = Field(
default=None,
min_length=0,
description="File Source"
)
file_source: str = Field(default=None, min_length=0, description="File Source")
@field_validator("text", mode="after")
@classmethod
def strip_text_after(cls, text: str) -> str:
return text.strip()
@field_validator("file_source", mode="after")
@classmethod
def strip_source_after(cls, file_source: str) -> str:
return file_source.strip()
class Config:
json_schema_extra = {
"example": {
"text": "This is a sample text to be inserted into the RAG system.",
"file_source": "Source of the text (optional)"
"file_source": "Source of the text (optional)",
}
}
@@ -128,20 +125,19 @@ class InsertTextsRequest(BaseModel):
description="The texts to insert",
)
file_sources: list[str] = Field(
default=None,
min_length=0,
description="Sources of the texts"
default=None, min_length=0, description="Sources of the texts"
)
@field_validator("texts", mode="after")
@classmethod
def strip_texts_after(cls, texts: list[str]) -> list[str]:
return [text.strip() for text in texts]
@field_validator("file_sources", mode="after")
@classmethod
def strip_sources_after(cls, file_sources: list[str]) -> list[str]:
return [file_source.strip() for file_source in file_sources]
class Config:
json_schema_extra = {
"example": {
@@ -151,7 +147,7 @@ class InsertTextsRequest(BaseModel):
],
"file_sources": [
"First file source (optional)",
]
],
}
}
@@ -680,7 +676,9 @@ async def pipeline_index_files(rag: LightRAG, file_paths: List[Path]):
logger.error(traceback.format_exc())
async def pipeline_index_texts(rag: LightRAG, texts: List[str],file_sources: List[str]=None):
async def pipeline_index_texts(
rag: LightRAG, texts: List[str], file_sources: List[str] = None
):
"""Index a list of texts
Args:
@@ -692,8 +690,11 @@ async def pipeline_index_texts(rag: LightRAG, texts: List[str],file_sources: Lis
return
if file_sources is not None:
if len(file_sources) != 0 and len(file_sources) != len(texts):
[file_sources.append("unknown_source") for _ in range(len(file_sources),len(texts))]
await rag.apipeline_enqueue_documents(input=texts,file_paths=file_sources)
[
file_sources.append("unknown_source")
for _ in range(len(file_sources), len(texts))
]
await rag.apipeline_enqueue_documents(input=texts, file_paths=file_sources)
await rag.apipeline_process_enqueue_documents()
@@ -844,7 +845,12 @@ def create_document_routes(
HTTPException: If an error occurs during text processing (500).
"""
try:
background_tasks.add_task(pipeline_index_texts, rag, [request.text],file_sources=[request.file_source])
background_tasks.add_task(
pipeline_index_texts,
rag,
[request.text],
file_sources=[request.file_source],
)
return InsertResponse(
status="success",
message="Text successfully received. Processing will continue in background.",
@@ -879,7 +885,12 @@ def create_document_routes(
HTTPException: If an error occurs during text processing (500).
"""
try:
background_tasks.add_task(pipeline_index_texts, rag, request.texts,file_sources=request.file_sources)
background_tasks.add_task(
pipeline_index_texts,
rag,
request.texts,
file_sources=request.file_sources,
)
return InsertResponse(
status="success",
message="Text successfully received. Processing will continue in background.",