File tree 3 files changed +3
-273
lines changed
3 files changed +3
-273
lines changed Original file line number Diff line number Diff line change 11
11
from vllm .envs import VLLM_USE_MODELSCOPE
12
12
from vllm .logger import init_logger
13
13
from vllm .lora .request import LoRARequest
14
- from vllm .transformers_utils .tokenizers import (BaichuanTokenizer ,
15
- MistralTokenizer )
14
+ from vllm .transformers_utils .tokenizers import MistralTokenizer
16
15
from vllm .transformers_utils .utils import check_gguf_file
17
16
from vllm .utils import make_async
18
17
@@ -139,19 +138,6 @@ def get_tokenizer(
139
138
raise RuntimeError (err_msg ) from e
140
139
else :
141
140
raise e
142
- except AttributeError as e :
143
- if "BaichuanTokenizer" in str (e ):
144
- # This is for the error "'BaichuanTokenizer' object has no
145
- # attribute 'sp_model'".
146
- tokenizer = BaichuanTokenizer .from_pretrained (
147
- tokenizer_name ,
148
- * args ,
149
- trust_remote_code = trust_remote_code ,
150
- revision = revision ,
151
- ** kwargs ,
152
- )
153
- else :
154
- raise e
155
141
156
142
# NOTE: We can remove this after https://github.com/THUDM/ChatGLM3/issues/1324
157
143
if type (tokenizer ).__name__ in ("ChatGLMTokenizer" ,
Original file line number Diff line number Diff line change 1
- from vllm .transformers_utils .tokenizers .baichuan import BaichuanTokenizer
2
- from vllm .transformers_utils .tokenizers .mistral import MistralTokenizer
1
+ from .mistral import MistralTokenizer
3
2
4
- __all__ = ["BaichuanTokenizer" , " MistralTokenizer" ]
3
+ __all__ = ["MistralTokenizer" ]
Load Diff This file was deleted.
You can’t perform that action at this time.
0 commit comments