{ "architectures": [ "DeepseekV3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_deepseek.DeepseekV3Config", "AutoModel": "modeling_deepseek.DeepseekV3Model", "AutoModelForCausalLM": "modeling_deepseek.DeepseekV3ForCausalLM" }, "aux_loss_alpha": 0.001, "bos_token_id": 163584, "eos_token_id": 163585, "first_k_dense_replace": 1, "hidden_act": "silu", "hidden_size": 7168, "initializer_range": 0.02, "intermediate_size": 18432, "kv_lora_rank": 512, "max_position_embeddings": 131072, "model_type": "kimi_k2", "moe_intermediate_size": 2048, "moe_layer_freq": 1, "n_group": 1, "n_routed_experts": 384, "n_shared_experts": 1, "norm_topk_prob": true, "num_attention_heads": 64, "num_experts_per_tok": 8, "num_hidden_layers": 61, "num_key_value_heads": 64, "num_nextn_predict_layers": 0, "pretraining_tp": 1, "q_lora_rank": 1536, "qk_nope_head_dim": 128, "qk_rope_head_dim": 64, "quantization_config": { "activation_scheme": "dynamic", "fmt": "e4m3", "quant_method": "fp8", "weight_block_size": [ 128, 128 ] }, "rms_norm_eps": 1e-06, "rope_theta": 50000.0, "routed_scaling_factor": 2.827, "rope_scaling": { "beta_fast": 1.0, "beta_slow": 1.0, "factor": 32.0, "mscale": 1.0, "mscale_all_dim": 1.0, "original_max_position_embeddings": 4096, "type": "yarn" }, "scoring_func": "sigmoid", "seq_aux": true, "tie_word_embeddings": false, "topk_group": 1, "topk_method": "noaux_tc", "torch_dtype": "bfloat16", "transformers_version": "4.48.3", "use_cache": true, "v_head_dim": 128, "vocab_size": 163840 }