| { | |
| "model_type": "Text_to_Image", | |
| "text_encoder": { | |
| "model_type": "gpt2", | |
| "vocab_size": 50257, | |
| "n_positions": 1024, | |
| "n_embd": 768, | |
| "n_layer": 12, | |
| "n_head": 12, | |
| "activation_function": "gelu_new", | |
| "layer_norm_epsilon": 1e-5, | |
| "initializer_range": 0.02, | |
| "bos_token_id": 50256, | |
| "eos_token_id": 50256, | |
| "pad_token_id": 50256, | |
| "transformers_version": "4.0.0" | |
| }, | |
| "image_decoder": { | |
| "model_type": "vae", | |
| "image_size": 256, | |
| "latent_dim": 1024, | |
| "n_channels": 3, | |
| "hidden_dims": [64, 128, 256, 512], | |
| "beta": 4.0, | |
| "gamma": 1000, | |
| "max_capacity": 25, | |
| "Capacity_max_iter": 100000, | |
| "loss_type": "BCE" | |
| }, | |
| "training": { | |
| "learning_rate": 0.0001, | |
| "batch_size": 32, | |
| "num_epochs": 100, | |
| "gradient_accumulation_steps": 1, | |
| "fp16": true, | |
| "device": "cuda" | |
| } | |
| } | |