{ "attn_cfg": {}, "attn_layer_idx": [], "d_intermediate": 0, "d_model": 2048, "fused_add_norm": true, "model_type": "mamba", "n_layer": 48, "pad_vocab_size_multiple": 16, "residual_in_fp32": true, "rms_norm": true, "ssm_cfg": { "layer": "Mamba2" }, "tie_embeddings": true, "transformers_version": "4.50.0.dev0", "use_cache": true, "vocab_size": 50277 }