Update llm.py

This commit is contained in:
Saifeddine ALOUI
2025-01-20 08:58:08 +01:00
committed by GitHub
parent f18f484a87
commit e4945c9653

View File

@@ -350,7 +350,10 @@ async def ollama_model_if_cache(
timeout = kwargs.pop("timeout", None)
kwargs.pop("hashing_kv", None)
api_key = kwargs.pop("api_key", None)
headers = {"Authorization": f"Bearer {api_key}"} if api_key else {}
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {api_key}"
} if api_key else {"Content-Type": "application/json"}
ollama_client = ollama.AsyncClient(host=host, timeout=timeout, headers=headers)
messages = []
if system_prompt:
@@ -383,7 +386,10 @@ async def lollms_model_if_cache(
stream = True if kwargs.get("stream") else False
api_key = kwargs.pop("api_key", None)
headers = {"Authorization": f"Bearer {api_key}"} if api_key else {}
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {api_key}"
} if api_key else {"Content-Type": "application/json"}
# Extract lollms specific parameters
request_data = {