Skip to content

Commit

Permalink
Remove predict option
Browse files Browse the repository at this point in the history
  • Loading branch information
pobonomo committed Nov 12, 2024
1 parent 2270910 commit 363b38d
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 38 deletions.
4 changes: 2 additions & 2 deletions docs/examples/example2_student_admission.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@
# model.
#
pred_constr = add_predictor_constr(
m, pipe, students_opt_data, predict_function="predict_proba"
m, pipe, students_opt_data, predict_function="decision_function"
)
pred_constr.print_stats()
m.update()
Expand All @@ -204,7 +204,7 @@
# model we are only interested in the probability of class 1, i.e.
# the column of index 1. We store this as a pandas series

y = pd.Series(data=pred_constr.output[:, 1].tolist(), index=x.index)
y = pd.Series(data=pred_constr.output[:, 0].tolist(), index=x.index)

y

Expand Down
8 changes: 7 additions & 1 deletion src/gurobi_ml/modeling/softmax.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,14 +100,20 @@ def _addGenConstrIndicatorMvarV10(binvar, binval, lhs, sense, rhs, name):
def logistic(predictor_model: AbstractPredictorConstr, linear_predictor: gp.MVar):
log_result = predictor_model.output[:, 1]

print(linear_predictor.shape)
print(log_result.shape)
if _HAS_NL_EXPR:
predictor_model.gp_model.addConstr(
log_result == nlfunc.logistic(linear_predictor[:, 0])
)
else:
linear_predictor_vars = predictor_model.gp_model.addMVar(
log_result.shape[0], lb=-gp.GRB.INFINITY, name="linear_predictor"
)
predictor_model.gp_model.addConstr(linear_predictor_vars == linear_predictor)
for index in np.ndindex(log_result.shape):
predictor_model.gp_model.addGenConstrLogistic(
linear_predictor[index],
linear_predictor_vars[index],
log_result[index],
name=predictor_model._indexed_name(index, "logistic"),
)
Expand Down
51 changes: 20 additions & 31 deletions src/gurobi_ml/sklearn/logistic_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@

import gurobipy as gp

from ..modeling.softmax import hardmax, logistic, max2, softmax
from ..modeling.base_predictor_constr import AbstractPredictorConstr
from ..modeling.softmax import logistic, softmax

try:
pass
Expand All @@ -37,7 +38,6 @@
_HAS_NL_EXPR = False

from ..exceptions import ParameterError
from .base_regressions import BaseSKlearnRegressionConstr
from .skgetter import SKClassifier


Expand All @@ -46,7 +46,7 @@ def add_logistic_regression_constr(
logistic_regression,
input_vars,
output_vars=None,
predict_function="predict",
predict_function="predict_proba",
epsilon=0.0,
pwl_attributes=None,
**kwargs,
Expand Down Expand Up @@ -146,7 +146,7 @@ def add_logistic_regression_constr(
)


class LogisticRegressionConstr(SKClassifier, BaseSKlearnRegressionConstr):
class LogisticRegressionConstr(SKClassifier, AbstractPredictorConstr):
"""Class to formulate a trained
:external+sklearn:py:class:`sklearn.linear_model.LogisticRegression` in a gurobipy model.
Expand All @@ -159,21 +159,21 @@ def __init__(
predictor,
input_vars,
output_vars=None,
predict_function="predict",
predict_function="predict_proba",
epsilon=0.0,
pwl_attributes=None,
**kwargs,
):
if predict_function not in ("predict", "predict_proba", "decision_function"):
if predict_function not in ("predict_proba", "decision_function"):
raise ParameterError(
"predict_function should be either 'predict' or 'predict_proba'"
)
if predict_function == "predict" and pwl_attributes is not None:
if predict_function != "predict_proba" and pwl_attributes is not None:
message = """
pwl_attributes are not required for classification. The problem is
formulated without requiring the non-linear logistic function."""
warnings.warn(message)
elif predict_function != "predict":
elif predict_function == "predict_proba":
self.attributes = (
self.default_pwl_attributes()
if pwl_attributes is None
Expand All @@ -184,10 +184,11 @@ def __init__(
self._default_name = "log_reg"
self.linear_predictor = None
SKClassifier.__init__(self, predictor, input_vars, predict_function)
BaseSKlearnRegressionConstr.__init__(
if self._output_shape == 2 and predict_function == "decision_function":
self._output_shape = 1
AbstractPredictorConstr.__init__(
self,
gp_model,
predictor,
input_vars,
output_vars,
**kwargs,
Expand Down Expand Up @@ -223,41 +224,29 @@ def default_pwl_attributes() -> dict:

def _two_classes_model(self, **kwargs):
"""Add the prediction constraints to Gurobi."""
m, _ = self.output.shape

linear_predictor = self.gp_model.addMVar(
(m, 1), lb=-gp.GRB.INFINITY, name="linear_predictor"
)
self._add_regression_constr(output=linear_predictor)
coefs = self.predictor.coef_
intercept = self.predictor.intercept_

self.gp_model.addConstr(self.output.sum(axis=1) == 1)
linreg = self.input @ coefs.T + intercept

self.linear_predictor = linear_predictor
self.linear_predictor = linreg

if self.predict_function == "predict":
max2(self, linear_predictor, self.epsilon)
if self.predict_function == "predict_proba":
self.gp_model.addConstr(self.output.sum(axis=1) == 1)
logistic(self, linreg)
else:
logistic(self, linear_predictor)
self.gp_model.addConstr(self.output[:, 0] == linreg[:, 0])

self.gp_model.update()

@property
def linear_predictor_variables(self) -> gp.MVar:
"""Variables that store the result of the linear_predictor
(i.e. before applying the logistic function).
"""
return self.linear_predictor

def _multi_class_model(self, **kwargs):
"""Add the prediction constraints to Gurobi."""
coefs = self.predictor.coef_
intercept = self.predictor.intercept_

linreg = self.input @ coefs.T + intercept

if self.predict_function == "predict":
hardmax(self, linreg, **kwargs)
elif self.predict_function == "predict_proba":
if self.predict_function == "predict_proba":
softmax(self, linreg, **kwargs)
else:
self.gp_model.addConstr(self.output == linreg)
Expand Down
2 changes: 0 additions & 2 deletions src/gurobi_ml/sklearn/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,6 @@ def _build_submodel(self, gp_model, *args, **kwargs):
self._mip_model(**kwargs)
assert self.output is not None
assert self.input is not None
# We can call validate only after the model is created
self._validate()
return self

def _mip_model(self, **kwargs):
Expand Down
6 changes: 4 additions & 2 deletions tests/test_sklearn/test_sklearn_formulations.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,8 +115,10 @@ def test_iris_clf(self):

for regressor in cases:
onecase = cases.get_case(regressor)
self.do_one_case(onecase, X, 5, "all", predict_function="predict")
self.do_one_case(onecase, X, 6, "pairs", predict_function="predict")
self.do_one_case(onecase, X, 5, "all", predict_function="decision_function")
self.do_one_case(
onecase, X, 6, "pairs", predict_function="decision_function"
)

def test_iris_pwl_args(self):
data = datasets.load_iris()
Expand Down

0 comments on commit 363b38d

Please sign in to comment.