From 9fd0ab185f741b58bd795807da5e22ecf82c6235 Mon Sep 17 00:00:00 2001 From: yangdx Date: Mon, 24 Feb 2025 16:28:18 +0800 Subject: [PATCH 1/2] Removed unnecessary comment --- lightrag/kg/networkx_impl.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/lightrag/kg/networkx_impl.py b/lightrag/kg/networkx_impl.py index b4321458..1f5d34d0 100644 --- a/lightrag/kg/networkx_impl.py +++ b/lightrag/kg/networkx_impl.py @@ -176,12 +176,6 @@ class NetworkXStorage(BaseGraphStorage): """ labels = set() for node in self._graph.nodes(): - # node_data = dict(self._graph.nodes[node]) - # if "entity_type" in node_data: - # if isinstance(node_data["entity_type"], list): - # labels.update(node_data["entity_type"]) - # else: - # labels.add(node_data["entity_type"]) labels.add(str(node)) # Add node id as a label # Return sorted list From 4f76b1c23e3c7544482f35f21b42a9fb66b8cdc7 Mon Sep 17 00:00:00 2001 From: Konrad Wojciechowski Date: Mon, 24 Feb 2025 03:29:39 +0100 Subject: [PATCH 2/2] fix AttributeError: 'NoneType' object has no attribute 'dim' --- lightrag/llm/hf.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/lightrag/llm/hf.py b/lightrag/llm/hf.py index d678c611..fb5208b0 100644 --- a/lightrag/llm/hf.py +++ b/lightrag/llm/hf.py @@ -139,11 +139,14 @@ async def hf_model_complete( async def hf_embed(texts: list[str], tokenizer, embed_model) -> np.ndarray: device = next(embed_model.parameters()).device - input_ids = tokenizer( + encoded_texts = tokenizer( texts, return_tensors="pt", padding=True, truncation=True - ).input_ids.to(device) + ).to(device) with torch.no_grad(): - outputs = embed_model(input_ids) + outputs = embed_model( + input_ids=encoded_texts["input_ids"], + attention_mask=encoded_texts["attention_mask"], + ) embeddings = outputs.last_hidden_state.mean(dim=1) if embeddings.dtype == torch.bfloat16: return embeddings.detach().to(torch.float32).cpu().numpy()