fix truncation with global_config tokenizer

This commit is contained in:
drahnreb
2025-04-17 13:09:52 +02:00
parent 0e6771b503
commit 0f949dd5d7
2 changed files with 36 additions and 8 deletions

View File

@@ -424,7 +424,7 @@ def is_float_regex(value: str) -> bool:
def truncate_list_by_token_size(
list_data: list[Any], key: Callable[[Any], str], max_token_size: int
list_data: list[Any], key: Callable[[Any], str], max_token_size: int, tokenizer: Tokenizer
) -> list[int]:
"""Truncate a list of data by token size"""
if max_token_size <= 0: