Separated llms from the main llm.py file and fixed some deprication bugs
This commit is contained in:
@@ -8,10 +8,6 @@ import time
|
||||
import re
|
||||
from typing import List, Dict, Any, Optional, Union
|
||||
from lightrag import LightRAG, QueryParam
|
||||
from lightrag.llm import lollms_model_complete, lollms_embed
|
||||
from lightrag.llm import ollama_model_complete, ollama_embed
|
||||
from lightrag.llm import openai_complete_if_cache, openai_embedding
|
||||
from lightrag.llm import azure_openai_complete_if_cache, azure_openai_embedding
|
||||
from lightrag.api import __api_version__
|
||||
|
||||
from lightrag.utils import EmbeddingFunc
|
||||
@@ -720,6 +716,20 @@ def create_app(args):
|
||||
|
||||
# Create working directory if it doesn't exist
|
||||
Path(args.working_dir).mkdir(parents=True, exist_ok=True)
|
||||
if args.llm_binding_host == "lollms" or args.embedding_binding == "lollms":
|
||||
from lightrag.llm.lollms import lollms_model_complete, lollms_embed
|
||||
if args.llm_binding_host == "ollama" or args.embedding_binding == "ollama":
|
||||
from lightrag.llm.ollama import ollama_model_complete, ollama_embed
|
||||
if args.llm_binding_host == "openai" or args.embedding_binding == "openai":
|
||||
from lightrag.llm.openai import openai_complete_if_cache, openai_embed
|
||||
if (
|
||||
args.llm_binding_host == "azure_openai"
|
||||
or args.embedding_binding == "azure_openai"
|
||||
):
|
||||
from lightrag.llm.azure_openai import (
|
||||
azure_openai_complete_if_cache,
|
||||
azure_openai_embed,
|
||||
)
|
||||
|
||||
async def openai_alike_model_complete(
|
||||
prompt,
|
||||
@@ -773,13 +783,13 @@ def create_app(args):
|
||||
api_key=args.embedding_binding_api_key,
|
||||
)
|
||||
if args.embedding_binding == "ollama"
|
||||
else azure_openai_embedding(
|
||||
else azure_openai_embed(
|
||||
texts,
|
||||
model=args.embedding_model, # no host is used for openai,
|
||||
api_key=args.embedding_binding_api_key,
|
||||
)
|
||||
if args.embedding_binding == "azure_openai"
|
||||
else openai_embedding(
|
||||
else openai_embed(
|
||||
texts,
|
||||
model=args.embedding_model, # no host is used for openai,
|
||||
api_key=args.embedding_binding_api_key,
|
||||
|
Reference in New Issue
Block a user