HPT / projector /config.json
xwwu's picture
Upload folder using huggingface_hub
26c2f02 verified
raw
history blame contribute delete
472 Bytes
{
"_name_or_path": "./",
"architectures": [
"HformerModel"
],
"auto_map": {
"AutoConfig": "configuration_hformer.HformerConfig",
"AutoModel": "modeling_hformer.HformerModel"
},
"bert": "bert-base-uncased",
"bias": true,
"cross_attention_freq": 2,
"llm_hidden_size": 4096,
"model_type": "hformer",
"num_query_token": 32,
"qformer_pth": null,
"torch_dtype": "float16",
"transformers_version": "4.37.0",
"visual_hidden_size": 1024
}