prdev's picture
Upload model
a7a9aea verified
raw
history blame contribute delete
375 Bytes
{
"_name_or_path": "./diff_attention_model",
"architectures": [
"DiffAttentionPreTrainedModel"
],
"embed_dim": 768,
"hidden_dim": 768,
"lambda_": 0.5,
"lambda_init": 0.1,
"max_seq_len": 1024,
"model_type": "diff_attention_lm",
"num_heads": 6,
"num_layers": 12,
"torch_dtype": "float32",
"transformers_version": "4.46.2",
"vocab_size": 50257
}