Merge pull request #436 from eltociear/patch-1

chore: update llm.py
This commit is contained in:
zrguo
2024-12-09 22:24:13 +08:00
committed by GitHub

View File

@@ -402,7 +402,7 @@ async def lmdeploy_model_if_cache(
import lmdeploy import lmdeploy
from lmdeploy import version_info, GenerationConfig from lmdeploy import version_info, GenerationConfig
except Exception: except Exception:
raise ImportError("Please install lmdeploy before intialize lmdeploy backend.") raise ImportError("Please install lmdeploy before initialize lmdeploy backend.")
kwargs.pop("hashing_kv", None) kwargs.pop("hashing_kv", None)
kwargs.pop("response_format", None) kwargs.pop("response_format", None)
max_new_tokens = kwargs.pop("max_tokens", 512) max_new_tokens = kwargs.pop("max_tokens", 512)