Skip to content

Commit

Permalink
Fix make_metrics test
Browse files Browse the repository at this point in the history
  • Loading branch information
maurever committed Aug 31, 2023
1 parent 102cb1b commit 8fdd896
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 4 deletions.
2 changes: 1 addition & 1 deletion h2o-core/src/main/java/hex/ModelMetricsBinomialUplift.java
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ static public ModelMetricsBinomialUplift make(Vec predictedProbs, Vec actualLabe
fr.add("labels", labels);
fr.add("treatment", treatment);
MetricBuilderBinomialUplift mb;
if(customAuucThresholds == null) {
if (customAuucThresholds == null) {
mb = new UpliftBinomialMetrics(labels.domain(), AUUC.calculateQuantileThresholds(auucNbins, predictedProbs)).doAll(fr)._mb;
} else {
mb = new UpliftBinomialMetrics(labels.domain(), customAuucThresholds).doAll(fr)._mb;
Expand Down
19 changes: 16 additions & 3 deletions h2o-py/tests/testdir_misc/pyunit_make_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,6 +223,9 @@ def pyunit_make_metrics_uplift():
custom_auuc_thresholds=m1.thresholds())
m4 = h2o.make_metrics(predicted, actual, treatment=treatment, auuc_type="AUTO", auuc_nbins=nbins,
custom_auuc_thresholds=model.default_auuc_thresholds())
new_nbins = nbins - 10
m5 = h2o.make_metrics(predicted, actual, treatment=treatment, auuc_type="AUTO", auuc_nbins=new_nbins)
m6 = model.model_performance(test_data=test, auuc_type="AUTO", auuc_nbins=new_nbins)

print("Model AUUC: {}".format(model.auuc()))
print("thresholds: {}".format(model.default_auuc_thresholds()))
Expand All @@ -236,15 +239,25 @@ def pyunit_make_metrics_uplift():
print("thresholds: {}".format(m3.thresholds()))
print("Make AUUC with custom thresholds from model defaults: {}".format(m4.auuc()))
print("thresholds: {}".format(m4.thresholds()))
print("Make AUUC with no custom thresholds but change nbins parameter: {}".format(m5.auuc()))
print("thresholds: {}".format(m5.thresholds()))
print("Performance AUUC with no custom thresholds but change nbins parameter: {}".format(m6.auuc()))
print("thresholds: {}".format(m6.thresholds()))

# default model auuc is calculated from train data, default thresholds are from validation data
assert abs(model.auuc() - m0.auuc()) > 1e-5
# model performance uses default auuc thresholds
# model performance calculates new thresholds but from the same data with the same number of bins, so AUUCs are same
assert abs(m0.auuc() - m1.auuc()) < 1e-5
# make method without custom thresholds calculate own thresholds which can differ from defaults one
assert abs(m1.auuc() - m2.auuc()) > 1e-5
# make method calculates new thresholds but from the same data with the same number of bins, so AUUCs are same
assert abs(m1.auuc() - m2.auuc()) < 1e-5
# if we use thresholds from performance metric and use it as custom, it makes the same metrics
assert abs(m1.auuc() - m3.auuc()) < 1e-5
# make methods with different nbins parameter changes thresholds and AUUC
assert abs(m3.auuc() - m5.auuc()) > 1e-5
# performance methods with different nbins parameter changes thresholds and AUUC
assert abs(m3.auuc() - m6.auuc()) > 1e-5
# make and performance method with the same nbins parameter and the same data calculates the same thresholds
assert abs(m3.auuc() - m5.auuc()) < 1e-5

print("===========================")

Expand Down

0 comments on commit 8fdd896

Please sign in to comment.