Update llm.py
This commit is contained in:
@@ -354,7 +354,7 @@ async def ollama_model_if_cache(
|
|||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"Authorization": f"Bearer {api_key}"
|
"Authorization": f"Bearer {api_key}"
|
||||||
} if api_key else {"Content-Type": "application/json"}
|
} if api_key else {"Content-Type": "application/json"}
|
||||||
ollama_client = ollama.AsyncClient(host=host, timeout=timeout, headers=headers)
|
ollama_client = ollama.AsyncClient(host=host, timeout=timeout)# temporary fix: (TODO: rewrite this with better compatibility), headers=headers)
|
||||||
messages = []
|
messages = []
|
||||||
if system_prompt:
|
if system_prompt:
|
||||||
messages.append({"role": "system", "content": system_prompt})
|
messages.append({"role": "system", "content": system_prompt})
|
||||||
@@ -418,7 +418,7 @@ async def lollms_model_if_cache(
|
|||||||
request_data["prompt"] = full_prompt
|
request_data["prompt"] = full_prompt
|
||||||
timeout = aiohttp.ClientTimeout(total=kwargs.get("timeout", None))
|
timeout = aiohttp.ClientTimeout(total=kwargs.get("timeout", None))
|
||||||
|
|
||||||
async with aiohttp.ClientSession(timeout=timeout, headers=headers) as session:
|
async with aiohttp.ClientSession(timeout=timeout) as session:# temporary fix: (TODO: rewrite this with better compatibility), headers=headers) as session:
|
||||||
if stream:
|
if stream:
|
||||||
|
|
||||||
async def inner():
|
async def inner():
|
||||||
|
Reference in New Issue
Block a user