2025-02-11 20:25:58 -08:00
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
|
|
|
|
2025-03-03 01:34:51 +00:00
|
|
|
from typing import TYPE_CHECKING, Any, Optional, Union
|
2025-02-11 20:25:58 -08:00
|
|
|
|
|
|
|
from vllm.transformers_utils.tokenizer import get_tokenizer
|
|
|
|
from vllm.transformers_utils.tokenizer_base import (TokenizerBase,
|
|
|
|
TokenizerRegistry)
|
|
|
|
|
|
|
|
if TYPE_CHECKING:
|
|
|
|
from vllm.entrypoints.chat_utils import ChatCompletionMessageParam
|
|
|
|
|
|
|
|
|
|
|
|
class TestTokenizer(TokenizerBase):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_pretrained(cls, *args, **kwargs) -> "TestTokenizer":
|
|
|
|
return TestTokenizer()
|
|
|
|
|
|
|
|
@property
|
2025-03-03 01:34:51 +00:00
|
|
|
def all_special_tokens_extended(self) -> list[str]:
|
2025-02-11 20:25:58 -08:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
@property
|
2025-03-03 01:34:51 +00:00
|
|
|
def all_special_tokens(self) -> list[str]:
|
2025-02-11 20:25:58 -08:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
@property
|
2025-03-03 01:34:51 +00:00
|
|
|
def all_special_ids(self) -> list[int]:
|
2025-02-11 20:25:58 -08:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def bos_token_id(self) -> int:
|
|
|
|
return 0
|
|
|
|
|
|
|
|
@property
|
|
|
|
def eos_token_id(self) -> int:
|
|
|
|
return 1
|
|
|
|
|
|
|
|
@property
|
|
|
|
def sep_token(self) -> str:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def pad_token(self) -> str:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def is_fast(self) -> bool:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def vocab_size(self) -> int:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def max_token_id(self) -> int:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
def __call__(
|
|
|
|
self,
|
2025-03-03 01:34:51 +00:00
|
|
|
text: Union[str, list[str], list[int]],
|
2025-02-11 20:25:58 -08:00
|
|
|
text_pair: Optional[str] = None,
|
|
|
|
add_special_tokens: bool = False,
|
|
|
|
truncation: bool = False,
|
|
|
|
max_length: Optional[int] = None,
|
|
|
|
):
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2025-03-03 01:34:51 +00:00
|
|
|
def get_vocab(self) -> dict[str, int]:
|
2025-02-11 20:25:58 -08:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2025-03-03 01:34:51 +00:00
|
|
|
def get_added_vocab(self) -> dict[str, int]:
|
2025-02-11 20:25:58 -08:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
def encode_one(
|
|
|
|
self,
|
|
|
|
text: str,
|
|
|
|
truncation: bool = False,
|
|
|
|
max_length: Optional[int] = None,
|
2025-03-03 01:34:51 +00:00
|
|
|
) -> list[int]:
|
2025-02-11 20:25:58 -08:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
def encode(self,
|
|
|
|
text: str,
|
2025-03-03 01:34:51 +00:00
|
|
|
add_special_tokens: Optional[bool] = None) -> list[int]:
|
2025-02-11 20:25:58 -08:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
def apply_chat_template(self,
|
2025-03-03 01:34:51 +00:00
|
|
|
messages: list["ChatCompletionMessageParam"],
|
|
|
|
tools: Optional[list[dict[str, Any]]] = None,
|
|
|
|
**kwargs) -> list[int]:
|
2025-02-11 20:25:58 -08:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2025-03-03 01:34:51 +00:00
|
|
|
def convert_tokens_to_string(self, tokens: list[str]) -> str:
|
2025-02-11 20:25:58 -08:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
def decode(self,
|
2025-03-03 01:34:51 +00:00
|
|
|
ids: Union[list[int], int],
|
2025-02-11 20:25:58 -08:00
|
|
|
skip_special_tokens: bool = True) -> str:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
def convert_ids_to_tokens(
|
|
|
|
self,
|
2025-03-03 01:34:51 +00:00
|
|
|
ids: list[int],
|
2025-02-11 20:25:58 -08:00
|
|
|
skip_special_tokens: bool = True,
|
2025-03-03 01:34:51 +00:00
|
|
|
) -> list[str]:
|
2025-02-11 20:25:58 -08:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
|
|
|
|
def test_customized_tokenizer():
|
|
|
|
TokenizerRegistry.register("test_tokenizer",
|
|
|
|
"tests.tokenization.test_tokenizer_registry",
|
|
|
|
"TestTokenizer")
|
|
|
|
|
|
|
|
tokenizer = TokenizerRegistry.get_tokenizer("test_tokenizer")
|
|
|
|
assert isinstance(tokenizer, TestTokenizer)
|
|
|
|
assert tokenizer.bos_token_id == 0
|
|
|
|
assert tokenizer.eos_token_id == 1
|
|
|
|
|
|
|
|
tokenizer = get_tokenizer("test_tokenizer", tokenizer_mode="custom")
|
|
|
|
assert isinstance(tokenizer, TestTokenizer)
|
|
|
|
assert tokenizer.bos_token_id == 0
|
|
|
|
assert tokenizer.eos_token_id == 1
|