Skip to content

Commit

Permalink
fix: black formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
kbiniek committed May 5, 2024
1 parent 824c7e6 commit 1ad5626
Show file tree
Hide file tree
Showing 5 changed files with 952 additions and 560 deletions.
16 changes: 8 additions & 8 deletions nmrcraft/models/model_configs.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,30 +4,30 @@
"random_forest": {
"model_params": {"random_state": 42},
"hyperparameters": {
"n_estimators": hp.choice("n_estimators", range(10, 1000, 10)),
"n_estimators": hp.choice("n_estimators", range(10, 1000, 10)),
"criterion": hp.choice("criterion", ["gini", "entropy"]),
# "max_depth": hp.choice("max_depth", range(10, 1200, 10)),
"min_samples_split": hp.uniform("min_samples_split", 0.01, 1.0),
"min_samples_leaf": hp.uniform("min_samples_leaf", 0.01, 0.5),
"max_features": hp.choice("max_features", ["sqrt", "log2", None]),
},
},

"gradient_boosting": {
"model_params": {"random_state": 42},
"hyperparameters": {
"loss": hp.choice("loss", ["log_loss", "exponential"]),
"learning_rate": hp.uniform("learning_rate", 0.01, 0.5),
"n_estimators": hp.choice("n_estimators", range(10, 1000, 10)),
# "subsample": hp.uniform("subsample", 0.01, 1.0),
"criterion": hp.choice("criterion", ["friedman_mse", "squared_error"]),
# "subsample": hp.uniform("subsample", 0.01, 1.0),
"criterion": hp.choice(
"criterion", ["friedman_mse", "squared_error"]
),
# "max_depth": hp.choice("max_depth", range(10, 1200, 10)),
"min_samples_split": hp.uniform("min_samples_split", 0.01, 1.0),
"min_samples_leaf": hp.uniform("min_samples_leaf", 0.01, 0.5),
"max_features": hp.choice("max_features", ["sqrt", "log2", None]),
},
},

"logistic_regression": {
"model_params": {"random_state": 42},
"hyperparameters": {
Expand All @@ -44,19 +44,19 @@
"l1_ratio": hp.uniform("l1_ratio", 0.01, 1.0),
},
},

"svc": {
"model_params": {"random_state": 42},
"hyperparameters": {
"C": hp.uniform("C", 0.01, 10.0),
"kernel": hp.choice("kernel", ["linear", "poly", "rbf", "sigmoid"]),
"kernel": hp.choice(
"kernel", ["linear", "poly", "rbf", "sigmoid"]
),
"degree": hp.choice("degree", range(1, 10)),
"gamma": hp.choice("gamma", ["scale", "auto"]),
"coef0": hp.uniform("coef0", 0.0, 1.0),
"shrinking": hp.choice("shrinking", [True, False]),
"probability": hp.choice("probability", [True, False]),
# "max_iter": hp.choice("max_iter", range(100, 1000, 100)),

},
},
}
3 changes: 2 additions & 1 deletion nmrcraft/training/hyperparameter_tune.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import numpy as np
from hyperopt import STATUS_OK, Trials, fmin, space_eval, tpe

# from sklearn.metrics import accuracy_score
from sklearn.model_selection import cross_val_score

Expand Down Expand Up @@ -51,7 +52,7 @@ def _objective(
model.fit(X_train, y_train)
# y_pred = model.predict(X_test)
# score = accuracy_score(y_test, y_pred)
score = cross_val_score(model, X_train, y_train, cv = 5).mean()
score = cross_val_score(model, X_train, y_train, cv=5).mean()
return {"loss": -score, "status": STATUS_OK}

def tune(self, X_train, y_train, X_test, y_test) -> tuple:
Expand Down
Loading

0 comments on commit 1ad5626

Please sign in to comment.