patch metrics
parent
4b4e211569
commit
274ef679bd
55
trainer.py
55
trainer.py
|
|
@ -292,22 +292,30 @@ class BinaryTuner:
|
||||||
# make a numpy array from y_pred where all the values > 0.5 become 1 and all remaining values are 0
|
# make a numpy array from y_pred where all the values > 0.5 become 1 and all remaining values are 0
|
||||||
y_pred = np.where(y_pred > 0.5, 1, 0)
|
y_pred = np.where(y_pred > 0.5, 1, 0)
|
||||||
|
|
||||||
|
brier = None
|
||||||
|
if hasattr(optimized_model, "predict_proba"):
|
||||||
|
try:
|
||||||
|
y_prob = optimized_model.predict_proba(X_test)[:, 1]
|
||||||
|
brier = brier_score_loss(y_test, y_prob)
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
|
||||||
accuracy = accuracy_score(y_test, y_pred)
|
accuracy = accuracy_score(y_test, y_pred)
|
||||||
recall = recall_score(y_test, y_pred)
|
recall = recall_score(y_test, y_pred)
|
||||||
f1 = f1_score(y_test, y_pred)
|
f1 = f1_score(y_test, y_pred)
|
||||||
roc_auc = roc_auc_score(y_test, y_pred)
|
roc_auc = roc_auc_score(y_test, y_pred)
|
||||||
tn, fp, fn, tp = confusion_matrix(y_test, y_pred).ravel()
|
tn, fp, fn, tp = confusion_matrix(y_test, y_pred).ravel()
|
||||||
self.logger.info(confusion_matrix(y_test, y_pred))
|
|
||||||
|
|
||||||
|
sensitivity = tp / (tp + fn)
|
||||||
specificity = tn / (tn + fp)
|
specificity = tn / (tn + fp)
|
||||||
self.logger.info(f"True Negativ : {tn}")
|
|
||||||
self.logger.info(f"True Positive : {tp}")
|
npv = tn / (tn + fn)
|
||||||
self.logger.info(f"False Negative : {fn}")
|
ppv = tp / (tp + fp)
|
||||||
self.logger.info(f"False Positive : {fp}")
|
|
||||||
self.logger.info(f"Returned model val_auc : {roc_auc}")
|
|
||||||
|
|
||||||
self.trained += 1
|
self.trained += 1
|
||||||
self.bar.update()
|
self.bar.update()
|
||||||
return accuracy, specificity, recall, f1, roc_auc, optimized_model, model_params
|
return roc_auc, f1, accuracy, recall, sensitivity, specificity, npv, ppv, brier, optimized_model, model_params
|
||||||
|
|
||||||
def get_model_train(self):
|
def get_model_train(self):
|
||||||
return [
|
return [
|
||||||
|
|
@ -402,16 +410,31 @@ class BinaryTuner:
|
||||||
if type_of_target(y_pred) == "continuous":
|
if type_of_target(y_pred) == "continuous":
|
||||||
y_pred = np.where(y_pred > 0.5, 1, 0)
|
y_pred = np.where(y_pred > 0.5, 1, 0)
|
||||||
|
|
||||||
|
|
||||||
|
brier = None
|
||||||
|
if hasattr(optimized_model, "predict_proba"):
|
||||||
|
try:
|
||||||
|
y_prob = optimized_model.predict_proba(X_test)[:, 1]
|
||||||
|
brier = brier_score_loss(y_test, y_prob)
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
|
||||||
accuracy = accuracy_score(y_test, y_pred)
|
accuracy = accuracy_score(y_test, y_pred)
|
||||||
recall = recall_score(y_test, y_pred)
|
recall = recall_score(y_test, y_pred)
|
||||||
f1 = f1_score(y_test, y_pred)
|
f1 = f1_score(y_test, y_pred)
|
||||||
roc_auc = roc_auc_score(y_test, y_pred)
|
roc_auc = roc_auc_score(y_test, y_pred)
|
||||||
tn, fp, fn, tp = confusion_matrix(y_test, y_pred).ravel()
|
tn, fp, fn, tp = confusion_matrix(y_test, y_pred).ravel()
|
||||||
|
|
||||||
|
|
||||||
|
sensitivity = tp / (tp + fn)
|
||||||
specificity = tn / (tn + fp)
|
specificity = tn / (tn + fp)
|
||||||
|
|
||||||
|
npv = tn / (tn + fn)
|
||||||
|
ppv = tp / (tp + fp)
|
||||||
|
|
||||||
self.trained += 1
|
self.trained += 1
|
||||||
self.bar.update()
|
self.bar.update()
|
||||||
return accuracy, specificity, recall, f1, roc_auc, optimized_model, json.dumps(rsh.best_params_)
|
return roc_auc, f1, accuracy, recall, sensitivity, specificity, npv, ppv, brier, optimized_model, json.dumps(rsh.best_params_)
|
||||||
|
|
||||||
def run_dataset(self, label, X_train, X_test, y_train, y_test, seed, sublabel=None):
|
def run_dataset(self, label, X_train, X_test, y_train, y_test, seed, sublabel=None):
|
||||||
node = os.uname()[1]
|
node = os.uname()[1]
|
||||||
|
|
@ -433,7 +456,7 @@ class BinaryTuner:
|
||||||
self.trained -= 1
|
self.trained -= 1
|
||||||
self.ledger.drop(((self.ledger['Dataset']==model_label) & (self.ledger['Model']==type(model).__name__) & (self.ledger['Seed'] == seed)).index)
|
self.ledger.drop(((self.ledger['Dataset']==model_label) & (self.ledger['Model']==type(model).__name__) & (self.ledger['Seed'] == seed)).index)
|
||||||
|
|
||||||
accuracy, specificity, recall, f1, roc_auc, optimized_model, parms = self.train_and_score_model(model, X_train, X_test, y_train, y_test, seed)
|
roc_auc, f1, accuracy, recall, sensitivity, specificity, npv, ppv, brier, optimized_model, parms = self.train_and_score_model(model, X_train, X_test, y_train, y_test, seed)
|
||||||
ts = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
|
ts = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
|
||||||
joblib.dump(optimized_model, model_file)
|
joblib.dump(optimized_model, model_file)
|
||||||
#[, , "Parms", "Seed", "Ratio", , , , "F1", , "ts", "node"]
|
#[, , "Parms", "Seed", "Ratio", , , , "F1", , "ts", "node"]
|
||||||
|
|
@ -445,8 +468,12 @@ class BinaryTuner:
|
||||||
"Seed": seed,
|
"Seed": seed,
|
||||||
"Ratio": self.ratio,
|
"Ratio": self.ratio,
|
||||||
"Accuracy": accuracy,
|
"Accuracy": accuracy,
|
||||||
"Specificity": specificity,
|
|
||||||
"Recall": recall,
|
"Recall": recall,
|
||||||
|
"Sensitivity": sensitivity,
|
||||||
|
"Specificity": specificity,
|
||||||
|
"NPV": npv,
|
||||||
|
"PPV": ppv,
|
||||||
|
"Brier": brier,
|
||||||
"F1": f1,
|
"F1": f1,
|
||||||
"ROC_AUC": roc_auc,
|
"ROC_AUC": roc_auc,
|
||||||
}] )
|
}] )
|
||||||
|
|
@ -470,7 +497,7 @@ class BinaryTuner:
|
||||||
self.trained -= 1
|
self.trained -= 1
|
||||||
self.ledger.drop(((self.ledger['Dataset']==model_label) & (self.ledger['Model']=='DNN') & (self.ledger['Seed'] == seed)).index)
|
self.ledger.drop(((self.ledger['Dataset']==model_label) & (self.ledger['Model']=='DNN') & (self.ledger['Seed'] == seed)).index)
|
||||||
|
|
||||||
accuracy, specificity, recall, f1, roc_auc, optimized_model, parms = self.train_and_score_model_keras(X_train, X_test, y_train, y_test, seed, model_label)
|
roc_auc, f1, accuracy, recall, sensitivity, specificity, npv, ppv, brier, optimized_model, parms = self.train_and_score_model_keras(X_train, X_test, y_train, y_test, seed, model_label)
|
||||||
ts = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
|
ts = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
|
||||||
# joblib.dump(optimized_model, model_file)
|
# joblib.dump(optimized_model, model_file)
|
||||||
#[, , "Parms", "Seed", "Ratio", , , , "F1", , "ts", "node"]
|
#[, , "Parms", "Seed", "Ratio", , , , "F1", , "ts", "node"]
|
||||||
|
|
@ -482,8 +509,12 @@ class BinaryTuner:
|
||||||
"Seed": seed,
|
"Seed": seed,
|
||||||
"Ratio": self.ratio,
|
"Ratio": self.ratio,
|
||||||
"Accuracy": accuracy,
|
"Accuracy": accuracy,
|
||||||
"Specificity": specificity,
|
|
||||||
"Recall": recall,
|
"Recall": recall,
|
||||||
|
"Sensitivity": sensitivity,
|
||||||
|
"Specificity": specificity,
|
||||||
|
"NPV": npv,
|
||||||
|
"PPV": ppv,
|
||||||
|
"Brier": brier,
|
||||||
"F1": f1,
|
"F1": f1,
|
||||||
"ROC_AUC": roc_auc
|
"ROC_AUC": roc_auc
|
||||||
}] )
|
}] )
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue