Add support for Ollama streaming output and integrate Open-WebUI as the chat UI demo

This commit is contained in:
partoneplay
2024-12-06 08:48:55 +08:00
parent 2a2756d9d1
commit 335179196a
5 changed files with 203 additions and 23 deletions

View File

@@ -534,8 +534,9 @@ async def kg_query(
response = await use_model_func(
query,
system_prompt=sys_prompt,
stream=query_param.stream,
)
if len(response) > len(sys_prompt):
if isinstance(response, str) and len(response) > len(sys_prompt):
response = (
response.replace(sys_prompt, "")
.replace("user", "")