Update core/trainer.py
Browse files- core/trainer.py +3 -3
core/trainer.py
CHANGED
|
@@ -17,12 +17,12 @@ class GraphMambaTrainer:
|
|
| 17 |
self.config = config
|
| 18 |
self.device = device
|
| 19 |
|
| 20 |
-
#
|
| 21 |
self.lr = config['training']['learning_rate']
|
| 22 |
self.epochs = config['training']['epochs']
|
| 23 |
-
self.patience = config['training'].get('patience',
|
| 24 |
self.min_lr = config['training'].get('min_lr', 1e-6)
|
| 25 |
-
self.max_gap = config['training'].get('max_gap', 0.
|
| 26 |
|
| 27 |
# Heavily regularized optimizer
|
| 28 |
self.optimizer = optim.AdamW(
|
|
|
|
| 17 |
self.config = config
|
| 18 |
self.device = device
|
| 19 |
|
| 20 |
+
# Optimized learning parameters
|
| 21 |
self.lr = config['training']['learning_rate']
|
| 22 |
self.epochs = config['training']['epochs']
|
| 23 |
+
self.patience = config['training'].get('patience', 20)
|
| 24 |
self.min_lr = config['training'].get('min_lr', 1e-6)
|
| 25 |
+
self.max_gap = config['training'].get('max_gap', 0.4)
|
| 26 |
|
| 27 |
# Heavily regularized optimizer
|
| 28 |
self.optimizer = optim.AdamW(
|