{ "activation_dropout": 0.0, "activation_function": "relu", "architectures": [ "IndicTransForConditionalGeneration" ], "attention_dropout": 0.0, "attn_implementation": "eager", "bos_token_id": 0, "decoder_attention_heads": 16, "decoder_embed_dim": 1536, "decoder_ffn_dim": 4096, "decoder_layerdrop": 0, "decoder_layers": 6, "decoder_normalize_before": false, "decoder_start_token_id": 2, "decoder_vocab_size": 32088, "dropout": 0.2, "encoder_attention_heads": 16, "encoder_embed_dim": 1536, "encoder_ffn_dim": 4096, "encoder_layerdrop": 0, "encoder_layers": 6, "encoder_normalize_before": false, "encoder_vocab_size": 35904, "eos_token_id": 2, "init_std": 0.02, "is_encoder_decoder": true, "layernorm_embedding": false, "max_source_positions": 210, "max_target_positions": 210, "model_type": "IndicTrans", "num_hidden_layers": 6, "pad_token_id": 1, "scale_embedding": true, "share_decoder_input_output_embed": false, "torch_dtype": "float32", "transformers_version": "4.49.0", "use_cache": true }