{ "architectures": ["ArceeForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128003, "head_dim": 128, "hidden_act": "relu2", "hidden_size": 2560, "initializer_range": 0.02, "intermediate_size": 18432, "max_position_embeddings": 65536, "mlp_bias": false, "model_type": "arcee", "num_attention_heads": 20, "num_hidden_layers": 36, "num_key_value_heads": 4, "rms_norm_eps": 1e-5, "rope_scaling": { "beta_fast": 32.0, "beta_slow": 1.0, "factor": 20.0, "mscale": 1.0, "original_max_position_embeddings": 4096, "rope_type": "yarn", "type": "yarn" }, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.54.1", "use_cache": false, "vocab_size": 128004 }