|
NUM_GPUS=1 |
|
MASTER_ADDR=ip-10-0-136-5 |
|
MASTER_PORT=17942 |
|
WORLD_SIZE=1 |
|
PID of this process = 1046807 |
|
------ ARGS ------- |
|
Namespace(model_suffix='beta', hcp_flat_path='/weka/proj-medarc/shared/HCP-Flat', batch_size=128, wandb_log=True, num_epochs=20, lr_scheduler_type='cycle', save_ckpt=False, seed=42, max_lr=0.1, target='age', num_workers=15, weight_decay=1e-05) |
|
Input dimension: 737280 |
|
total_steps 17400 |
|
wandb_config: |
|
{'model_name': 'HCPflat_raw_age', 'batch_size': 128, 'weight_decay': 1e-05, 'num_epochs': 20, 'seed': 42, 'lr_scheduler_type': 'cycle', 'save_ckpt': False, 'max_lr': 0.1, 'target': 'age', 'num_workers': 15} |
|
wandb_id: HCPflat_raw_beta_age_83810 |
|
Step [100/870] - Training Loss: 924.1876 - Training MSE: 207942.1405 |
|
Step [200/870] - Training Loss: 2762.3567 - Training MSE: 206121.1501 |
|
Step [300/870] - Training Loss: 22823.8477 - Training MSE: 690589.0472 |
|
Step [400/870] - Training Loss: 99405.8828 - Training MSE: 1767819.3204 |
|
Step [500/870] - Training Loss: 214733.7188 - Training MSE: 4693776.8283 |
|
Step [600/870] - Training Loss: 263763.7500 - Training MSE: 8583782.2903 |
|
Step [700/870] - Training Loss: 397232.1875 - Training MSE: 13775790.2974 |
|
Step [800/870] - Training Loss: 665133.5000 - Training MSE: 21325931.7302 |
|
Epoch [1/20] - Training Loss: 229616.0744, Training MSE: 29363571.6190 - Validation Loss: 1573111.9296, Validation MSE: 200990315.8497 |
|
|