From ec0482954b0fcd987157e0c74f7bce868914f28e Mon Sep 17 00:00:00 2001 From: ST John Date: Mon, 9 Dec 2019 12:16:10 +0000 Subject: [PATCH 1/2] turn template.RegressionModel and template.ClassificationModel into abstract base classes --- bayesian_benchmarks/models/template.py | 63 +++++++++++++++----------- 1 file changed, 36 insertions(+), 27 deletions(-) diff --git a/bayesian_benchmarks/models/template.py b/bayesian_benchmarks/models/template.py index 1d43f004..4db0426d 100644 --- a/bayesian_benchmarks/models/template.py +++ b/bayesian_benchmarks/models/template.py @@ -19,68 +19,77 @@ """ import numpy as np +from abc import ABC, abstractmethod +from typing import Tuple -class RegressionModel: - def __init__(self, is_test=False, seed=0): +class RegressionModel(ABC): + @abstractmethod + def __init__(self, is_test: bool = False, seed: int = 0) -> None: """ - If is_test is True your model should train and predict in a few seconds (i.e. suitable for travis) + :param is_test: whether to run quickly for testing purposes """ pass - def fit(self, X : np.ndarray, Y : np.ndarray): + @abstractmethod + def fit(self, X: np.ndarray, Y: np.ndarray) -> None: """ - Train the model (and probably create the model, too, since there is no shape information on the __init__) + Train the model (and probably create the model, too, since there is no + shape information on the __init__) - :param X: numpy array, of shape N, Dx - :param Y: numpy array, of shape N, Dy - :return: + :param X: numpy array, of shape [N, Dx] + :param Y: numpy array, of shape [N, Dy] """ pass - def predict(self, Xs : np.ndarray): + @abstractmethod + def predict(self, Xs: np.ndarray) -> Tuple[np.ndarray, np.ndarray]: """ The predictive mean and variance - :param Xs: numpy array, of shape N, Dx - :return: mean, var, both of shape N, Dy + :param Xs: numpy array, of shape [N, Dx] + :return: mean, var, both of shape [N, Dy] """ - raise NotImplementedError + pass - def sample(self, Xs : np.ndarray, S : int): + @abstractmethod + def sample(self, Xs: np.ndarray, S: int) -> np.ndarray: """ Samples from the posterior - :param Xs: numpy array, of shape N, Dx + :param Xs: numpy array, of shape [N, Dx] :param S: number of samples - :return: numpy array, of shape (S, N, Dy) + :return: numpy array, of shape [S, N, Dy] """ - raise NotImplementedError + pass -class ClassificationModel: - def __init__(self, K, is_test=False, seed=0): +class ClassificationModel(ABC): + @abstractmethod + def __init__(self, K: int, is_test: bool = False, seed: int = 0) -> None: """ :param K: number of classes :param is_test: whether to run quickly for testing purposes """ - def fit(self, X : np.ndarray, Y : np.ndarray): + @abstractmethod + def fit(self, X: np.ndarray, Y: np.ndarray) -> None: """ - Train the model (and probably create the model, too, since there is no shape information on the __init__) + Train the model (and probably create the model, too, since there is no + shape information on the __init__) Note Y is not onehot, but is an int array of labels in {0, 1, ..., K-1} - :param X: numpy array, of shape N, Dx - :param Y: numpy array, of shape N, 1 - :return: + :param X: numpy array, of shape [N, Dx] + :param Y: numpy array, of shape [N, 1] """ pass - def predict(self, Xs : np.ndarray): + @abstractmethod + def predict(self, Xs: np.ndarray) -> np.ndarray: """ The predictive probabilities - :param Xs: numpy array, of shape N, Dx - :return: p, of shape (N, K) + :param Xs: numpy array, of shape [N, Dx] + :return: p, of shape [N, K] """ - raise NotImplementedError + pass From 874484741f0b78b08cdfce748178b01e00fe0df5 Mon Sep 17 00:00:00 2001 From: ST John Date: Mon, 9 Dec 2019 12:17:01 +0000 Subject: [PATCH 2/2] linting --- bayesian_benchmarks/models/non_bayesian_models.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bayesian_benchmarks/models/non_bayesian_models.py b/bayesian_benchmarks/models/non_bayesian_models.py index ce102856..21e212ed 100644 --- a/bayesian_benchmarks/models/non_bayesian_models.py +++ b/bayesian_benchmarks/models/non_bayesian_models.py @@ -8,6 +8,7 @@ from sklearn import ensemble from sklearn import neural_network + def regression_model(model): class SKLWrapperRegression(object): def __init__(self, is_test=False, seed=0): @@ -92,13 +93,13 @@ def non_bayesian_model(name, task): return regression_model(ensemble.GradientBoostingRegressor()) elif name == 'gradient_boosting_machine' and task == 'classification': - return classification_model(ensemble.GradientBoostingClassifier()) # default is 100 estimators + return classification_model(ensemble.GradientBoostingClassifier()) # default is 100 estimators if name == 'adaboost' and task == 'regression': return regression_model(ensemble.AdaBoostRegressor()) elif name == 'adaboost' and task == 'classification': - return classification_model(ensemble.AdaBoostClassifier()) # default is 100 estimators + return classification_model(ensemble.AdaBoostClassifier()) # default is 100 estimators if name == 'mlp' and task == 'regression': return regression_model(neural_network.MLPRegressor())