runtime error
Exit code: 1. Reason: ome/user/app/app.py", line 52, in <module> tokenizer, model, image_processor, context_len = load_model(args) File "/home/user/app/ferretui/eval/model_UI.py", line 135, in load_model load_pretrained_model(model_path, args.model_base, model_name, use_safetensors=True) File "/home/user/app/ferretui/model/builder.py", line 109, in load_pretrained_model tokenizer = AutoTokenizer.from_pretrained(model_path) File "/usr/local/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 950, in from_pretrained tokenizer_config = get_tokenizer_config(pretrained_model_name_or_path, **kwargs) File "/usr/local/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 782, in get_tokenizer_config resolved_config_file = cached_file( File "/usr/local/lib/python3.10/site-packages/transformers/utils/hub.py", line 312, in cached_file file = cached_files(path_or_repo_id=path_or_repo_id, filenames=[filename], **kwargs) File "/usr/local/lib/python3.10/site-packages/transformers/utils/hub.py", line 522, in cached_files resolved_files = [ File "/usr/local/lib/python3.10/site-packages/transformers/utils/hub.py", line 523, in <listcomp> _get_cache_file_to_return(path_or_repo_id, filename, cache_dir, revision) for filename in full_filenames File "/usr/local/lib/python3.10/site-packages/transformers/utils/hub.py", line 140, in _get_cache_file_to_return resolved_file = try_to_load_from_cache(path_or_repo_id, full_filename, cache_dir=cache_dir, revision=revision) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 106, in _inner_fn validate_repo_id(arg_value) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 160, in validate_repo_id raise HFValidationError( huggingface_hub.errors.HFValidationError: Repo id must use alphanumeric chars or '-', '_', '.', '--' and '..' are forbidden, '-' and '.' cannot start or end the name, max length is 96: './gemma2b-anyres'.
Container logs:
Fetching error logs...