Looks like metadata is missing from safetensors file.
#7
by
onkar-chougule
- opened
I tried to write my own quantizer and then load the model using transformers where it fails with below error:
/.pyenv/versions/3.10.14/envs/py310/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:564: in from_pretrained
return model_class.from_pretrained(
/.pyenv/versions/3.10.14/envs/py310/lib/python3.10/site-packages/transformers/modeling_utils.py:4014: in from_pretrained
) = cls._load_pretrained_model(
/.pyenv/versions/3.10.14/envs/py310/lib/python3.10/site-packages/transformers/modeling_utils.py:4482: in _load_pretrained_model
state_dict = load_state_dict(shard_file, is_quantized=is_quantized)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
checkpoint_file = '/hub/models--neuralmagic--Meta-Llama-3-8B-Instruct-FP8/snapshots/c5c6b5700a4178ef1fdae2ae37827382b90eb400/model-00001-of-00002.safetensors'
is_quantized = True
def load_state_dict(checkpoint_file: Union[str, os.PathLike], is_quantized: bool = False):
"""
Reads a PyTorch checkpoint file, returning properly formatted errors if they arise.
"""
if checkpoint_file.endswith(".safetensors") and is_safetensors_available():
# Check format of the archive
with safe_open(checkpoint_file, framework="pt") as f:
metadata = f.metadata()
> if metadata.get("format") not in ["pt", "tf", "flax", "mlx"]:
E AttributeError: 'NoneType' object has no attribute 'get'
/.pyenv/versions/3.10.14/envs/py310/lib/python3.10/site-packages/transformers/modeling_utils.py:551: AttributeError
refer to this similar thread for the fix https://huggingface.co/SeaLLMs/SeaLLM-7B-Hybrid/discussions/2