Commit
·
f23b6eb
1
Parent(s):
2a5d9cc
Refactor data loading and hyperparameter tuning for improved model performance
Browse files
train.py
CHANGED
@@ -17,7 +17,7 @@ from cfg import DROP_LIST
|
|
17 |
|
18 |
# Rest of the imports and class definitions remain unchanged...
|
19 |
|
20 |
-
def load_and_balance_data(filename, ratio=1/
|
21 |
"""
|
22 |
Loads data from a CSV file and balances classes to address potential imbalance.
|
23 |
|
@@ -55,10 +55,10 @@ def create_stock_prediction_pipeline(params=None):
|
|
55 |
|
56 |
# Use default parameters if none provided
|
57 |
if params is None:
|
58 |
-
classifier = XGBClassifier(
|
59 |
else:
|
60 |
classifier = XGBClassifier(
|
61 |
-
booster="dart",
|
62 |
n_jobs=-1,
|
63 |
**params
|
64 |
)
|
@@ -74,9 +74,10 @@ def objective(trial, X, y):
|
|
74 |
|
75 |
# Define the hyperparameters to tune
|
76 |
params = {
|
77 |
-
'
|
78 |
-
'
|
79 |
-
'
|
|
|
80 |
'subsample': trial.suggest_float('subsample', 0.5, 1.0),
|
81 |
'colsample_bytree': trial.suggest_float('colsample_bytree', 0.5, 1.0),
|
82 |
'gamma': trial.suggest_float('gamma', 0, 5),
|
@@ -97,7 +98,7 @@ def objective(trial, X, y):
|
|
97 |
def main():
|
98 |
# Load and preprocess training data
|
99 |
print("Loading and preprocessing training data...")
|
100 |
-
combined = load_and_balance_data('./cleaned_training.csv', ratio=1/
|
101 |
|
102 |
# Define features and target
|
103 |
X = combined.drop(DROP_LIST)
|
@@ -109,7 +110,7 @@ def main():
|
|
109 |
# Hyperparameter tuning with Optuna
|
110 |
print("Starting hyperparameter optimization with Optuna...")
|
111 |
study = optuna.create_study(direction='maximize') # We want to maximize the F1 score
|
112 |
-
study.optimize(lambda trial: objective(trial, X_train, y_train), n_trials=
|
113 |
|
114 |
# Print the best parameters
|
115 |
best_params = study.best_params
|
|
|
17 |
|
18 |
# Rest of the imports and class definitions remain unchanged...
|
19 |
|
20 |
+
def load_and_balance_data(filename, ratio=1/60):
|
21 |
"""
|
22 |
Loads data from a CSV file and balances classes to address potential imbalance.
|
23 |
|
|
|
55 |
|
56 |
# Use default parameters if none provided
|
57 |
if params is None:
|
58 |
+
classifier = XGBClassifier(n_jobs=-1)
|
59 |
else:
|
60 |
classifier = XGBClassifier(
|
61 |
+
# booster="dart",
|
62 |
n_jobs=-1,
|
63 |
**params
|
64 |
)
|
|
|
74 |
|
75 |
# Define the hyperparameters to tune
|
76 |
params = {
|
77 |
+
# 'booster': trial.suggest_categorical('booster', ['gbtree', 'dart']),
|
78 |
+
'learning_rate': trial.suggest_float('learning_rate', 0.001, 0.3, log=True),
|
79 |
+
'max_depth': trial.suggest_int('max_depth', 3, 140),
|
80 |
+
'n_estimators': trial.suggest_int('n_estimators', 50, 3000),
|
81 |
'subsample': trial.suggest_float('subsample', 0.5, 1.0),
|
82 |
'colsample_bytree': trial.suggest_float('colsample_bytree', 0.5, 1.0),
|
83 |
'gamma': trial.suggest_float('gamma', 0, 5),
|
|
|
98 |
def main():
|
99 |
# Load and preprocess training data
|
100 |
print("Loading and preprocessing training data...")
|
101 |
+
combined = load_and_balance_data('./cleaned_training.csv', ratio=1/90)
|
102 |
|
103 |
# Define features and target
|
104 |
X = combined.drop(DROP_LIST)
|
|
|
110 |
# Hyperparameter tuning with Optuna
|
111 |
print("Starting hyperparameter optimization with Optuna...")
|
112 |
study = optuna.create_study(direction='maximize') # We want to maximize the F1 score
|
113 |
+
study.optimize(lambda trial: objective(trial, X_train, y_train), n_trials=100, show_progress_bar=True)
|
114 |
|
115 |
# Print the best parameters
|
116 |
best_params = study.best_params
|