From 8301f0a523ae1f42dc857cc3043243bd04ec6ea0 Mon Sep 17 00:00:00 2001 From: yangdx Date: Mon, 24 Mar 2025 03:03:55 +0800 Subject: [PATCH] Move temperature parameter into kwargs for LLM calls - Applied changes to both OpenAI and Azure OpenAI calls --- lightrag/api/lightrag_server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lightrag/api/lightrag_server.py b/lightrag/api/lightrag_server.py index 15e78c40..de9e8714 100644 --- a/lightrag/api/lightrag_server.py +++ b/lightrag/api/lightrag_server.py @@ -200,6 +200,7 @@ def create_app(args): kwargs["response_format"] = GPTKeywordExtractionFormat if history_messages is None: history_messages = [] + kwargs["temperature"] = args.temperature return await openai_complete_if_cache( args.llm_model, prompt, @@ -207,7 +208,6 @@ def create_app(args): history_messages=history_messages, base_url=args.llm_binding_host, api_key=args.llm_binding_api_key, - temperature=args.temperature, **kwargs, ) @@ -223,6 +223,7 @@ def create_app(args): kwargs["response_format"] = GPTKeywordExtractionFormat if history_messages is None: history_messages = [] + kwargs["temperature"] = args.temperature return await azure_openai_complete_if_cache( args.llm_model, prompt, @@ -231,7 +232,6 @@ def create_app(args): base_url=args.llm_binding_host, api_key=os.getenv("AZURE_OPENAI_API_KEY"), api_version=os.getenv("AZURE_OPENAI_API_VERSION", "2024-08-01-preview"), - temperature=args.temperature, **kwargs, )