fix linting
This commit is contained in:
@@ -725,10 +725,7 @@ def create_app(args):
|
||||
from lightrag.llm.ollama import ollama_model_complete, ollama_embed
|
||||
if args.llm_binding == "openai" or args.embedding_binding == "openai":
|
||||
from lightrag.llm.openai import openai_complete_if_cache, openai_embed
|
||||
if (
|
||||
args.llm_binding == "azure_openai"
|
||||
or args.embedding_binding == "azure_openai"
|
||||
):
|
||||
if args.llm_binding == "azure_openai" or args.embedding_binding == "azure_openai":
|
||||
from lightrag.llm.azure_openai import (
|
||||
azure_openai_complete_if_cache,
|
||||
azure_openai_embed,
|
||||
|
@@ -1543,7 +1543,9 @@ async def naive_query(
|
||||
|
||||
sys_prompt_temp = PROMPTS["naive_rag_response"]
|
||||
sys_prompt = sys_prompt_temp.format(
|
||||
content_data=section, response_type=query_param.response_type, history=history_context
|
||||
content_data=section,
|
||||
response_type=query_param.response_type,
|
||||
history=history_context,
|
||||
)
|
||||
|
||||
if query_param.only_need_prompt:
|
||||
|
Reference in New Issue
Block a user