chore: update llm.py
intialize -> initialize
This commit is contained in:
committed by
GitHub
parent
e5cc43ad3c
commit
b8cddb6c72
@@ -402,7 +402,7 @@ async def lmdeploy_model_if_cache(
|
|||||||
import lmdeploy
|
import lmdeploy
|
||||||
from lmdeploy import version_info, GenerationConfig
|
from lmdeploy import version_info, GenerationConfig
|
||||||
except Exception:
|
except Exception:
|
||||||
raise ImportError("Please install lmdeploy before intialize lmdeploy backend.")
|
raise ImportError("Please install lmdeploy before initialize lmdeploy backend.")
|
||||||
kwargs.pop("hashing_kv", None)
|
kwargs.pop("hashing_kv", None)
|
||||||
kwargs.pop("response_format", None)
|
kwargs.pop("response_format", None)
|
||||||
max_new_tokens = kwargs.pop("max_tokens", 512)
|
max_new_tokens = kwargs.pop("max_tokens", 512)
|
||||||
|
Reference in New Issue
Block a user