Separated llms from the main llm.py file and fixed some deprication bugs

This commit is contained in:
Saifeddine ALOUI
2025-01-25 00:11:00 +01:00
parent 7e1638525c
commit 34018cb1e0
55 changed files with 2144 additions and 1301 deletions

View File

@@ -3,7 +3,7 @@ import os
import inspect
import logging
from lightrag import LightRAG, QueryParam
from lightrag.llm import ollama_model_complete, ollama_embedding
from lightrag.llm.ollama import ollama_model_complete, ollama_embed
from lightrag.utils import EmbeddingFunc
WORKING_DIR = "./dickens"
@@ -23,7 +23,7 @@ rag = LightRAG(
embedding_func=EmbeddingFunc(
embedding_dim=768,
max_token_size=8192,
func=lambda texts: ollama_embedding(
func=lambda texts: ollama_embed(
texts, embed_model="nomic-embed-text", host="http://localhost:11434"
),
),