fail to load BLM-Z1-9B DUE TO tokenization ISSUE

#1
by Fleinstein - opened

image.png
An UnicodeDecodeError occur when auto-tokenizer is running. When changing encoding, luck to auto-tokenizer this time, but failed to load model

model = AutoModelForCausalLM.from_pretrained(MODEL_PATH, device_map="auto")
[2025-04-15 16:31:49,114] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect)
Traceback (most recent call last):
File "C:\Python311\Lib\site-packages\transformers\utils\import_utils.py", line 1980, in get_module
return importlib.import_module("." + module_name, self.name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\importlib_init
.py", line 126, in import_module
return _bootstrap.gcd_import(name[level:], package, level)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "", line 1204, in gcd_import
File "", line 1176, in find_and_load
File "", line 1147, in find_and_load_unlocked
File "", line 690, in load_unlocked
File "", line 940, in exec_module
File "", line 241, in call_with_frames_removed
File "C:\Python311\Lib\site-packages\transformers\models\glm4\modeling_glm4.py", line 41, in
from ...modeling_utils import ALL_ATTENTION_FUNCTIONS, PreTrainedModel
File "C:\Python311\Lib\site-packages\transformers\modeling_utils.py", line 159, in
import deepspeed
File "C:\Python311\Lib\site-packages\deepspeed_init
.py", line 25, in
from . import ops
File "C:\Python311\Lib\site-packages\deepspeed\ops_init
.py", line 11, in
from . import transformer
File "C:\Python311\Lib\site-packages\deepspeed\ops\transformer_init
.py", line 7, in
from .inference.config import DeepSpeedInferenceConfig
File "C:\Python311\Lib\site-packages\deepspeed\ops\transformer\inference_init
.py", line 7, in
from ....model_implementations.transformers.ds_transformer import DeepSpeedTransformerInference
File "C:\Python311\Lib\site-packages\deepspeed\model_implementations_init
.py", line 6, in
from .transformers.ds_transformer import DeepSpeedTransformerInference
File "C:\Python311\Lib\site-packages\deepspeed\model_implementations\transformers\ds_transformer.py", line 18, in
from deepspeed.ops.transformer.inference.triton.mlp import TritonMLP
File "C:\Python311\Lib\site-packages\deepspeed\ops\transformer\inference\triton_init
.py", line 10, in
from .ops import *
File "C:\Python311\Lib\site-packages\deepspeed\ops\transformer\inference\triton\ops.py", line 6, in
import deepspeed.ops.transformer.inference.triton.matmul_ext as matmul_ext
File "C:\Python311\Lib\site-packages\deepspeed\ops\transformer\inference\triton\matmul_ext.py", line 461, in
fp16_matmul = Fp16Matmul()
^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\deepspeed\ops\transformer\inference\triton\matmul_ext.py", line 210, in init
class._read_autotune_table()
File "C:\Python311\Lib\site-packages\deepspeed\ops\transformer\inference\triton\matmul_ext.py", line 444, in _read_autotune_table
TritonMatmul._read_autotune_table(class.name + "_2d_kernel", class._2d_kernel)
File "C:\Python311\Lib\site-packages\deepspeed\ops\transformer\inference\triton\matmul_ext.py", line 165, in _read_autotune_table
cache_manager = AutotuneCacheManager(cache_key)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\deepspeed\ops\transformer\inference\triton\matmul_ext.py", line 87, in init
TritonCacheDir.warn_if_nfs(self.cache_dir)
File "C:\Python311\Lib\site-packages\deepspeed\ops\transformer\inference\triton\matmul_ext.py", line 44, in warn_if_nfs
if is_nfs_path(cache_dir) and not TritonCacheDir._warning_printed:
^^^^^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\deepspeed\ops\transformer\inference\triton\matmul_ext.py", line 27, in is_nfs_path
output = subprocess.check_output(['df', '-T', path], encoding='utf-8')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\subprocess.py", line 466, in check_output
return run(*popenargs, stdout=PIPE, timeout=timeout, check=True,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\subprocess.py", line 548, in run
with Popen(*popenargs, **kwargs) as process:
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\subprocess.py", line 1026, in init
self._execute_child(args, executable, preexec_fn, close_fds,
File "C:\Python311\Lib\subprocess.py", line 1538, in _execute_child
hp, ht, pid, tid = _winapi.CreateProcess(executable, args,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

The above exception was the direct cause of the following exception:

Traceback (most recent call last):
File "", line 1, in
File "C:\Python311\Lib\site-packages\transformers\models\auto\auto_factory.py", line 568, in from_pretrained
model_class = _get_model_class(config, cls._model_mapping)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\transformers\models\auto\auto_factory.py", line 388, in _get_model_class
supported_models = model_mapping[type(config)]
~~~~~~~~~~~~~^^^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\transformers\models\auto\auto_factory.py", line 774, in getitem
return self._load_attr_from_module(model_type, model_name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\transformers\models\auto\auto_factory.py", line 788, in _load_attr_from_module
return getattribute_from_module(self._modules[module_name], attr)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\transformers\models\auto\auto_factory.py", line 700, in getattribute_from_module
if hasattr(module, attr):
^^^^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\transformers\utils\import_utils.py", line 1968, in getattr
module = self._get_module(self._class_to_module[name])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\transformers\utils\import_utils.py", line 1982, in _get_module
raise RuntimeError(
RuntimeError: Failed to import transformers.models.glm4.modeling_glm4 because of the following error (look up to see its
traceback):
[WinError 2] 系统找不到指定的文件。
how to solve this

Knowledge Engineering Group (KEG) & Data Mining at Tsinghua University org

Have you updated the transformers version? 4.51.3

I'm receiving a similar message even after upgrading to Transformers 4.51.3:

image.png

Knowledge Engineering Group (KEG) & Data Mining at Tsinghua University org

This seems to be an issue with encoding in your code, a better suggestion would be to leave a comment with your error code on GitHub, and I will try to reproduce it.

Your need to confirm your account before you can post a new comment.

Sign up or log in to comment