SnowflakeCore-G1-Tiny2 / config.json
FlameF0X's picture
Update config.json
449933b verified
raw
history blame
993 Bytes
{
"architectures": [
"SnowflakeCoreG1"
],
"model_type": "snowflake_core",
"vocab_size": 50257,
"embed_dim": 1024,
"num_heads": 16,
"num_layers": 24,
"max_length": 2048,
"ffn_dim": 4096,
"pad_token_id": 50256,
"eos_token_id": 50256,
"bos_token_id": 50256,
"unk_token_id": 50256,
"auto_map": {
"AutoConfig": "configuration_snowflake_core.SnowflakeCoreConfig",
"AutoModelForCausalLM": "modeling_snowflake_core.SnowflakeCoreG1"
},
"training_config": {
"epochs": 5,
"actual_epochs": 5,
"batch_size": 1,
"learning_rate": 0.0002,
"grad_accum_steps": 32,
"max_length": 2048,
"val_split_ratio": 0.1,
"early_stopping": {
"patience": 3,
"min_delta": 0.001,
"triggered": false
}
},
"training_metrics": {
"final_train_loss": null,
"final_train_perplexity": null,
"final_val_loss": null,
"final_val_perplexity": null,
"best_val_loss": Infinity,
"best_val_perplexity": null
}
}