From f8a0f898daf44484d1bb4377247a0a976d5237cd Mon Sep 17 00:00:00 2001 From: Martin Roelfs Date: Sat, 8 Sep 2018 21:17:19 +0200 Subject: [PATCH] BasinHopping -> wraps scipy.optimize.basinhopping (#177) * `parameter` now can set `value` etc per parameter * Added BasinHopping object * Overwrite minimize method for the weird ones * Added BasinHopping to the smart Fit-object * Added tests for BasinHopping * Added tests for bounds and constraints * Kick up bounds to the local minimizer of BasinHopping * Made `parameters` new interface iterable friendly. * Updated docstrings and comments, in accordance with review * cached_property object added, cache decorator removed. Test have also been provided for this new decorator. * Replaced @cache by @cached_property * Fixed typo * Use `method_name` upon .execute * Updated docstrings. * Improved cached_property tests We check an internal counter to make sure the cached function is called only when no cache is present. * Updated docs to include BasinHopping as a global minimizer * Added BasinHopping section to the docs * Fixed indentation error * Processed review, should now be finished! --- docs/fitting_types.rst | 96 ++++++++++++++++++++++++----- symfit/core/fit.py | 104 ++++++++++++++++++------------- symfit/core/minimizers.py | 125 +++++++++++++++++++++++++++++++++---- symfit/core/objectives.py | 11 ++-- symfit/core/support.py | 92 +++++++++++++++++++++++----- tests/test_minimize.py | 126 +++++++++++++++++++++++++++++++++++++- tests/test_support.py | 73 +++++++++++++++++++++- 7 files changed, 532 insertions(+), 95 deletions(-) diff --git a/docs/fitting_types.rst b/docs/fitting_types.rst index 4485cfe6..9c9d2809 100644 --- a/docs/fitting_types.rst +++ b/docs/fitting_types.rst @@ -143,6 +143,8 @@ Example:: ``fit_result`` is a normal :class:`~symfit.core.fit_results.FitResults` object. As always, bounds on parameters and even constraints are supported. +.. _minimize_maximize: + Minimize/Maximize ----------------- Minimize or Maximize a model subject to bounds and/or constraints. As an example @@ -363,11 +365,12 @@ More common examples, such as dampened harmonic oscillators also work as expecte .. _global-fitting: -Global Fitting --------------- -In a global fitting problem, we fit to multiple datasets where one or more -parameters might be shared. The same syntax used for ODE fitting makes this -problem very easy to solve in :mod:`symfit`. +Fitting multiple datasets +------------------------- +A common fitting problem is to fit to multiple datasets. This is sometimes +referred to as global fitting. In such fits parameters might be shared +between the fits to the different datasets. The same syntax used for ODE +fitting makes this problem very easy to solve in :mod:`symfit`. As a simple example, suppose we have two datasets measuring exponential decay, with the same background, but different amplitude and decay rate. @@ -397,6 +400,10 @@ normal way:: :width: 500px :alt: ODE integration +Any ``Model`` that comes to mind is fair game. Behind the scenes :mod:`symfit` +will build a least squares function where the residues of all the components +are added squared, ready to be minimized. Unlike in the above example, the +`x`-axis does not even have to be shared between the components. .. warning:: The regression coefficient is not properly defined for vector-valued models, @@ -406,8 +413,8 @@ normal way:: Do not cite the overall :math:`R^2` given by :mod:`symfit`. -Finding the optimal solution ----------------------------- +Global Minimization +------------------- Very often, there are multiple solutions to a fitting (or minimisation) problem. These are local minima of the objective function. The best solution of course is the global minimum, but most minimization algorithms will only find a @@ -416,11 +423,17 @@ your parameters. This can be incredibly annoying if you have no further knowledge about your system. Luckily, global minimizers exist which are not influenced by the initial -guesses for your parameters. In symfit the +guesses for your parameters. In symfit, two such algorithms from :mod:`scipy` +have been wrapped for this pourpose. Firstly, the :func:`~scipy.optimize.differential_evolution` algorithm from :mod:`scipy` is -wrapped as :class:`~symfit.core.minimizers.DifferentialEvolution`. To use it, +wrapped as :class:`~symfit.core.minimizers.DifferentialEvolution`. Secondly, +the :func:`~scipy.optimize.basinhopping` algorithm is available as +:class:`~symfit.core.minimizers.BasinHopping`. To use these minimizers, just tell :class:`~symfit.core.fit.Fit`:: + from symfit import Parameter, Variable, Model, Fit + from symfit.core.minimizers import DifferentialEvolution + x = Parameter('x') x.min, x.max = -100, 100 x.value = -2.5 @@ -428,8 +441,9 @@ just tell :class:`~symfit.core.fit.Fit`:: model = Model({y: x**4 - 10 * x**2 - x}) # Skewed Mexican hat fit = Fit(model, minimizer=DifferentialEvolution) + fit_result = fit.execute() -However, due to how the algorithm works, it's not great at finding the exact +However, due to how this algorithm works, it's not great at finding the exact minimum (but it will find it if given enough time). You can work around this by "chaining" minimizers: first run a global minimization to (hopefully) get close to your answer, and then polish it off using a local minimizer:: @@ -437,14 +451,68 @@ to your answer, and then polish it off using a local minimizer:: fit = Fit(model, minimizer=[DifferentialEvolution, BFGS]) .. note:: - Differential evolution is rather sensitive to it's hyperparameters. You might - need to play with those to get appropriate results:: + Global minimizers such as differential evolution and basin-hopping are + rather sensitive to their hyperparameters. You might + need to play with those to get appropriate results, e.g.:: fit.execute(DifferentialEvolution={'popsize': 20, 'recombination': 0.9}) .. note:: - There is no way to garuantee that the minimum found is actually the global - minimum. Unfortunately there is no way around this. + There is no way to guarantee that the minimum found is actually the global + minimum. Unfortunately there is no way around this. Therefore, you should + always critically inspect the results. + +Constrained Basin-Hopping +------------------------- +Worthy of special mention is the ease with which constraints or bounds can be +added to :class:`symfit.core.minimizers.BasinHopping` when used through the +:class:`symfit.core.fit.Fit` interface. As a very simple example, we shall +compare to an example from the :mod:`scipy` docs:: + + import numpy as np + from scipy.optimize import basinhopping + + def func2d(x): + f = np.cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 0.2) * x[0] + df = np.zeros(2) + df[0] = -14.5 * np.sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2 + df[1] = 2. * x[1] + 0.2 + return f, df + + minimizer_kwargs = {"method":"L-BFGS-B", "jac":True} + x0 = [1.0, 1.0] + ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, niter=200) + +Let's compare to the same functionality in :mod:`symfit`:: + + import numpy as np + from symfit.core.minimizers import BasinHopping + from symfit import parameters, Fit, cos + + x0 = [1.0, 1.0] + x1, x2 = parameters('x1, x2', value=x0) + + model = cos(14.5 * x1 - 0.3) + (x2 + 0.2) * x2 + (x1 + 0.2) * x1 + + fit = Fit(model, minimizer=BasinHopping) + fit_result = fit.execute(niter=200) + +No `minimizer_kwargs` have to be provided, as :mod:`symfit` will automatically +compute and provide the jacobian and select a minimizer. In this case, `symfit` +will choose `BFGS`. When bounds are provided, `symfit` will switch to +using `L-BFGS-B` instead. Setting bounds is as simple as:: + + x1.min = 0.0 + x1.max = 100.0 + +However, the real strength of the `symfit` syntax lies in providing constraints:: + + constraints = [Eq(x1, x2)] + fit = Fit(model, minimizer=BasinHopping, constraints=constraints) + +This artificial example will make sure `x1 == x2` after fitting. If you have +read the :ref:`minimize_maximize` section, you will know how much work this +would be in pure :mod:`scipy`. Advanced usage -------------- diff --git a/symfit/core/fit.py b/symfit/core/fit.py index b6658e0f..4862bcb0 100644 --- a/symfit/core/fit.py +++ b/symfit/core/fit.py @@ -11,12 +11,12 @@ from scipy.integrate import odeint from symfit.core.argument import Parameter, Variable -from .support import \ - seperate_symbols, keywordonly, sympy_to_py, cache, key2str, partial - +from .support import ( + seperate_symbols, keywordonly, sympy_to_py, key2str, partial, cached_property +) from .minimizers import ( BFGS, SLSQP, LBFGSB, BaseMinimizer, GradientMinimizer, ConstrainedMinimizer, - ScipyMinimize, MINPACK, ChainedMinimizer + ScipyMinimize, MINPACK, ChainedMinimizer, BasinHopping ) from .objectives import ( LeastSquares, BaseObjective, MinimizeModel, VectorLeastSquares, LogLikelihood @@ -149,8 +149,7 @@ def _init_from_dict(self, model_dict): # Make Variable object corresponding to each var. self.sigmas = {var: Variable(name='sigma_{}'.format(var.name)) for var in self.dependent_vars} - @property - @cache + @cached_property def vars(self): """ :return: Returns a list of dependent, independent and sigma variables, in that order. @@ -244,8 +243,7 @@ def __call__(self, *args, **kwargs): # return Ans(*[component(**bound_arguments.arguments) for component in self.numerical_components]) return Ans(*self.eval_components(**bound_arguments.arguments)) - @property - # @cache + @cached_property def numerical_components(self): """ :return: lambda functions of each of the components in model_dict, to be used in numerical calculation. @@ -357,7 +355,6 @@ def __str__(self): return "\n".join(parts) @property - # @cache def jacobian(self): """ :return: Jacobian 'Matrix' filled with the symbolic expressions for all the partial derivatives. @@ -367,7 +364,6 @@ def jacobian(self): return [[sympy.diff(expr, param) for param in self.params] for expr in self.values()] @property - # @cache def chi_squared(self): """ :return: Symbolic :math:`\\chi^2` @@ -375,7 +371,6 @@ def chi_squared(self): return sum(((f - y)/self.sigmas[y])**2 for y, f in self.items()) @property - # @cache def chi(self): """ :return: Symbolic Square root of :math:`\\chi^2`. Required for MINPACK optimization only. Denoted as :math:`\\sqrt(\\chi^2)` @@ -383,7 +378,6 @@ def chi(self): return sympy.sqrt(self.chi_squared) @property - # @cache def chi_jacobian(self): """ Return a symbolic jacobian of the :math:`\\sqrt(\\chi^2)` function. @@ -397,7 +391,6 @@ def chi_jacobian(self): return jac @property - # @cache def chi_squared_jacobian(self): """ Return a symbolic jacobian of the :math:`\\chi^2` function. @@ -411,48 +404,41 @@ def chi_squared_jacobian(self): return jac # @property - # # @cache # def ss_res(self): # """ # :return: Residual sum of squares. Similar to chi_squared, but without considering weights. # """ # return sum((y - f)**2 for y, f in self.items()) - - @property - # @cache + @cached_property def numerical_jacobian(self): """ :return: lambda functions of the jacobian matrix of the function, which can be used in numerical optimization. """ return [[sympy_to_py(partial_dv, self.independent_vars, self.params) for partial_dv in row] for row in self.jacobian] - @property - # @cache + @cached_property def numerical_chi_squared(self): """ :return: lambda function of the ``.chi_squared`` method, to be used in numerical optimisation. """ return sympy_to_py(self.chi_squared, self.vars, self.params) - @property - # @cache + @cached_property def numerical_chi(self): """ :return: lambda function of the ``.chi`` method, to be used in MINPACK optimisation. """ return sympy_to_py(self.chi, self.vars, self.params) - @property - # @cache + @cached_property def numerical_chi_jacobian(self): """ :return: lambda functions of the jacobian of the ``.chi`` method, which can be used in numerical optimization. """ return [sympy_to_py(component, self.vars, self.params) for component in self.chi_jacobian] - @property - # @cache + @cached_property def numerical_chi_squared_jacobian(self): """ :return: lambda functions of the jacobian of the ``.chi_squared`` method. @@ -624,7 +610,6 @@ def __neg__(self): return self.__class__(new_constraint, self.model) @property - # @cache def jacobian(self): """ :return: Jacobian 'Matrix' filled with the symbolic expressions for all the partial derivatives. @@ -633,16 +618,14 @@ def jacobian(self): """ return [[sympy.diff(expr, param) for param in self.model.params] for expr in self.values()] - @property - # @cache + @cached_property def numerical_components(self): """ :return: lambda functions of each of the components in model_dict, to be used in numerical calculation. """ return [sympy_to_py(expr, self.model.vars, self.model.params) for expr in self.values()] - @property - # @cache + @cached_property def numerical_jacobian(self): """ :return: lambda functions of the jacobian matrix of the function, which can be used in numerical optimization. @@ -1333,13 +1316,7 @@ def __init__(self, model, *ordered_data, **named_data): # Select the minimizer on the basis of the provided information. if minimizer is None: - if self.constraints: - minimizer = SLSQP - elif any([bound is not None for pair in self.model.bounds for bound in pair]): - # If any bound is set - minimizer = LBFGSB - else: - minimizer = BFGS + minimizer = self._determine_minimizer() # Initialise the minimizer if isinstance(minimizer, Sequence): @@ -1348,6 +1325,20 @@ def __init__(self, model, *ordered_data, **named_data): else: self.minimizer = self._init_minimizer(minimizer) + def _determine_minimizer(self): + """ + Determine the most suitable minimizer by the presence of bounds or + constraints. + :return: a subclass of `BaseMinimizer`. + """ + if self.constraints: + return SLSQP + elif any([bound is not None for pair in self.model.bounds for bound in pair]): + # If any bound is set + return LBFGSB + else: + return BFGS + def _init_minimizer(self, minimizer, **minimizer_options): """ Takes a :class:`~symfit.core.minimizers.BaseMinimizer` and instantiates @@ -1360,8 +1351,13 @@ def _init_minimizer(self, minimizer, **minimizer_options): minimizer on instantiation. :returns: instance of :class:`~symfit.core.minimizers.BaseMinimizer`. """ + if isinstance(minimizer, BaseMinimizer): return minimizer + if issubclass(minimizer, BasinHopping): + minimizer_options['local_minimizer'] = self._init_minimizer( + self._determine_minimizer() + ) if issubclass(minimizer, GradientMinimizer): # If an analytical version of the Jacobian exists we should use # that, otherwise we let the minimizer estimate it itself. @@ -1776,17 +1772,37 @@ def __neg__(self): new_model_dict[key] *= -1 return self.__class__(new_model_dict, initial=self.initial) - @property - @cache + @cached_property def _ncomponents(self): - return [sympy_to_py(expr, self.independent_vars + self.dependent_vars, self.params) for expr in self.values()] + """ + :return: The `numerical_components` for an ODEModel. This differs from + the traditional `numerical_components`, in that these component can + also contain dependent variables, not just the independent ones. - @property - @cache + Each of these components does not correspond to e.g. `y(t) = ...`, + but to `D(y, t) = ...`. The system spanned by these component + therefore still needs to be integrated. + """ + return [sympy_to_py(expr, self.independent_vars + self.dependent_vars, self.params) + for expr in self.values()] + + @cached_property def _njacobian(self): + """ + :return: The `numerical_jacobian` of the components of the ODEModel with + regards to the dependent variables. This is not to be confused with + the jacobian of the model as a whole, which is 2D and computed with + regards to the dependent vars and the fit parameters, and the + ODEModel still needs to integrated to compute that. + + Instead, this function is used by the ODE integrator, and is not + meant for human consumption. + """ return [ - [sympy_to_py(sympy.diff(expr, var), self.independent_vars + self.dependent_vars, self.params) for var in self.dependent_vars] - for _, expr in self.items() + [sympy_to_py( + sympy.diff(expr, var), self.independent_vars + self.dependent_vars, self.params + ) for var in self.dependent_vars + ] for _, expr in self.items() ] def eval_components(self, *args, **kwargs): diff --git a/symfit/core/minimizers.py b/symfit/core/minimizers.py index 6476cbde..36f073bb 100644 --- a/symfit/core/minimizers.py +++ b/symfit/core/minimizers.py @@ -2,7 +2,7 @@ import sys from collections import namedtuple, Counter -from scipy.optimize import minimize, differential_evolution +from scipy.optimize import minimize, differential_evolution, basinhopping import sympy import numpy as np @@ -270,6 +270,7 @@ def execute(self, bounds=None, jacobian=None, constraints=None, **minimize_optio ans = minimize( self.wrapped_objective, self.initial_guesses, + method=self.method_name(), bounds=bounds, constraints=constraints, jac=jacobian, @@ -316,6 +317,16 @@ def _pack_output(self, ans): fit_results['hessian_inv'] = ans.hess_inv return FitResults(**fit_results) + @classmethod + def method_name(cls): + """ + Returns the name of the minimize method this object represents. This is + needed because the name of the object is not always exactly what needs + to be passed on to scipy as a string. + :return: + """ + return cls.__name__ + class ScipyGradientMinimize(ScipyMinimize, GradientMinimizer): """ Base class for :func:`scipy.optimize.minimize`'s gradient-minimizers. @@ -367,8 +378,6 @@ class BFGS(ScipyGradientMinimize): """ Wrapper around :func:`scipy.optimize.minimize`'s BFGS algorithm. """ - def execute(self, **minimize_options): - return super(BFGS, self).execute(method='BFGS', **minimize_options) class DifferentialEvolution(ScipyMinimize, GlobalMinimizer, BoundedMinimizer): """ @@ -396,7 +405,6 @@ def __init__(self, *args, **kwargs): def execute(self, **minimize_options): return super(SLSQP, self).execute( - method='SLSQP', bounds=self.bounds, jacobian=self.wrapped_jacobian, **minimize_options @@ -429,11 +437,6 @@ class COBYLA(ScipyConstrainedMinimize): """ Wrapper around :func:`scipy.optimize.minimize`'s COBYLA algorithm. """ - def execute(self, **minimize_options): - return super(COBYLA, self).execute( - method='COBYLA', **minimize_options - ) - class LBFGSB(ScipyGradientMinimize, BoundedMinimizer): """ @@ -441,18 +444,118 @@ class LBFGSB(ScipyGradientMinimize, BoundedMinimizer): """ def execute(self, **minimize_options): return super(LBFGSB, self).execute( - method='L-BFGS-B', bounds=self.bounds, **minimize_options ) + @classmethod + def method_name(cls): + return "L-BFGS-B" class NelderMead(ScipyMinimize, BaseMinimizer): """ Wrapper around :func:`scipy.optimize.minimize`'s NelderMead algorithm. """ + @classmethod + def method_name(cls): + return 'Nelder-Mead' + + +class BasinHopping(ScipyMinimize, BaseMinimizer): + """ + Wrapper around :func:`scipy.optimize.basinhopping`'s basin-hopping algorithm. + + As always, the best way to use this algorithm is through + :class:`~symfit.core.fit.Fit`, as this will automatically select a local + minimizer for you depending on whether you provided bounds, constraints, etc. + + However, BasinHopping can also be used directly. Example (with jacobian):: + + import numpy as np + from symfit.core.minimizers import BFGS, BasinHopping + from symfit import parameters + + def func2d(x1, x2): + f = np.cos(14.5 * x1 - 0.3) + (x2 + 0.2) * x2 + (x1 + 0.2) * x1 + return f + + def jac2d(x1, x2): + df = np.zeros(2) + df[0] = -14.5 * np.sin(14.5 * x1 - 0.3) + 2. * x1 + 0.2 + df[1] = 2. * x2 + 0.2 + return df + + x0 = [1.0, 1.0] + np.random.seed(555) + x1, x2 = parameters('x1, x2', value=x0) + fit = BasinHopping(func2d, [x1, x2], local_minimizer=BFGS) + minimizer_kwargs = {'jac': fit.list2kwargs(jac2d)} + fit_result = fit.execute(niter=200, minimizer_kwargs=minimizer_kwargs) + + See :func:`scipy.optimize.basinhopping` for more options. + """ + @keywordonly(local_minimizer=BFGS) + def __init__(self, *args, **kwargs): + """ + :param local_minimizer: minimizer to be used for local minimization + steps. Can be any subclass of + :class:`symfit.core.minimizers.ScipyMinimize`. + :param args: positional arguments to be passed on to `super`. + :param kwargs: keyword arguments to be passed on to `super`. + """ + self.local_minimizer = kwargs.pop('local_minimizer') + super(BasinHopping, self).__init__(*args, **kwargs) + + type_error_msg = 'Currently only subclasses of ScipyMinimize are ' \ + 'supported, since `scipy.optimize.basinhopping` uses ' \ + '`scipy.optimize.minimize`.' + # self.local_minimizer has to be a subclass or instance of ScipyMinimize + # Since no one function exists to test this, we try/except instead. + try: + # Test if subclass. If this line doesn't fail, we are dealing with + # some class. If it fails, we assume that it is an instance. + issubclass(self.local_minimizer, ScipyMinimize) + except TypeError: + # It is not a class at all, so test if it's an instance instead + if not isinstance(self.local_minimizer, ScipyMinimize): + # Only ScipyMinimize subclasses supported + raise TypeError(type_error_msg) + else: + if not issubclass(self.local_minimizer, ScipyMinimize): + # Only ScipyMinimize subclasses supported + raise TypeError(type_error_msg) + self.local_minimizer = self.local_minimizer(self.objective, self.parameters) + def execute(self, **minimize_options): - return super(NelderMead, self).execute(method='Nelder-Mead', **minimize_options) + """ + Execute the basin-hopping minimization. + + :param minimize_options: options to be passed on to + :func:`scipy.optimize.basinhopping`. + :return: :class:`symfit.core.fit_results.FitResults` + """ + if 'minimizer_kwargs' not in minimize_options: + minimize_options['minimizer_kwargs'] = {} + + if 'method' not in minimize_options['minimizer_kwargs']: + # If no minimizer was set by the user upon execute, use local_minimizer + minimize_options['minimizer_kwargs']['method'] = self.local_minimizer.method_name() + if 'jac' not in minimize_options['minimizer_kwargs'] and isinstance(self.local_minimizer, GradientMinimizer): + # Assign the jacobian + minimize_options['minimizer_kwargs']['jac'] = self.local_minimizer.wrapped_jacobian + if 'constraints' not in minimize_options['minimizer_kwargs'] and isinstance(self.local_minimizer, ConstrainedMinimizer): + # Assign constraints + minimize_options['minimizer_kwargs']['constraints'] = self.local_minimizer.wrapped_constraints + if 'bounds' not in minimize_options['minimizer_kwargs'] and isinstance(self.local_minimizer, BoundedMinimizer): + # Assign bounds + minimize_options['minimizer_kwargs']['bounds'] = self.local_minimizer.bounds + + ans = basinhopping( + self.wrapped_objective, + self.initial_guesses, + **minimize_options + ) + return self._pack_output(ans) class MINPACK(ScipyMinimize, GradientMinimizer, BoundedMinimizer): diff --git a/symfit/core/objectives.py b/symfit/core/objectives.py index 6acae54e..1cf0600f 100644 --- a/symfit/core/objectives.py +++ b/symfit/core/objectives.py @@ -4,7 +4,7 @@ import numpy as np -from .support import cache, keywordonly, key2str +from .support import cached_property, keywordonly, key2str @add_metaclass(abc.ABCMeta) class BaseObjective(object): @@ -19,8 +19,7 @@ def __init__(self, model, data): self.model = model self.data = data - @property - @cache + @cached_property def dependent_data(self): """ Read-only Property @@ -31,8 +30,7 @@ def dependent_data(self): """ return OrderedDict((var, self.data[var]) for var in self.model) - @property - @cache + @cached_property def independent_data(self): """ Read-only Property @@ -44,8 +42,7 @@ def independent_data(self): return OrderedDict((var, self.data[var]) for var in self.model.independent_vars) - @property - @cache + @cached_property def sigma_data(self): """ Read-only Property diff --git a/symfit/core/support.py b/symfit/core/support.py index 2aeae12d..4544e9aa 100644 --- a/symfit/core/support.py +++ b/symfit/core/support.py @@ -135,31 +135,95 @@ def parameters(names, **kwargs): Convenience function for the creation of multiple parameters. For more control, consider using ``symbols(names, cls=Parameter, **kwargs)`` directly. + The `Parameter` attributes `value`, `min`, `max` and `fixed` can also be provided + directly. If given as a single value, the same value will be set for all + `Parameter`'s. When a sequence, it must be of the same length as the number of + parameters created. + + Example:: + x1, x2 = parameters('x1, x2', value=[2.0, 1.3], min=0.0) + :param names: string of parameter names. Example: a, b = parameters('a, b') - :param kwargs: kwargs to be passed onto :func:`sympy.core.symbol.symbols` + :param kwargs: kwargs to be passed onto :func:`sympy.core.symbol.symbols`. + `value`, `min` and `max` will be handled separately if they are sequences. :return: iterable of :class:`symfit.core.argument.Parameter` objects """ - return symbols(names, cls=Parameter, seq=True, **kwargs) + sequence_fields = ['value', 'min', 'max', 'fixed'] + sequences = {} + for attr in sequence_fields: + try: + iter(kwargs[attr]) + except (TypeError, KeyError): + # Not iterable or not provided + pass + else: + sequences[attr] = kwargs.pop(attr) + + if 'min' in sequences and 'max' in sequences: + for min, max in zip(sequences['min'], sequences['max']): + if min > max: + raise ValueError('The value of `min` should be less than or' + ' equal to the value of `max`.') + + params = symbols(names, cls=Parameter, seq=True, **kwargs) + for key, values in sequences.items(): + try: + assert len(values) == len(params) + except AssertionError: + raise ValueError( + '`len` of keyword-argument `{}` does not match the number of ' + '`Parameter`s created.'.format(attr) + ) + except TypeError: + # Iterator do not have a `len` but are allowed. + pass + finally: + for param, value in zip(params, values): + setattr(param, key, value) + return params + -def cache(func): +class cached_property(property): """ - Decorator function that gets a method as its input and either buffers the input, - or returns the buffered output. Used in conjunction with properties to take away - the standard buffering logic. + A property which cashes the output of the first ever call and always returns + that value from then on, unless delete is called on the attribute. + + This is typically used in converting `sympy` code into `scipy` compatible + code, which is computationally a very expensive step we would like to + perform only once. - :param func: - :return: + Does not allow setting of the attribute. """ - @wraps(func) - def new_func(self): + def __get__(self, obj, objtype=None): + """ + In case of a first call, this will call the decorated function and + return it's output. On every subsequent call, the same output will be + returned. + + :param obj: the parent object this property is attached to. + :param objtype: + :return: Output of the first call to the decorated function. + """ + cache_attr = '_{}'.format(self.fget.__name__) try: - return getattr(self, '_{}'.format(func.__name__)) + return getattr(obj, cache_attr) except AttributeError: - setattr(self, '_{}'.format(func.__name__), func(self)) - return getattr(self, '_{}'.format(func.__name__)) + # Call the wrapped function with the obj instance as argument + setattr(obj, cache_attr, self.fget(obj)) + return getattr(obj, cache_attr) + + def __delete__(self, obj): + """ + Calling delete on the attribute will delete the cache. + :param obj: parent object. + """ + cache_attr = '_{}'.format(self.fget.__name__) + try: + delattr(obj, cache_attr) + except AttributeError: + pass - return new_func def jacobian(expr, symbols): """ diff --git a/tests/test_minimize.py b/tests/test_minimize.py index a41aba1e..128f3b03 100644 --- a/tests/test_minimize.py +++ b/tests/test_minimize.py @@ -4,13 +4,15 @@ import warnings import numpy as np -from scipy.optimize import minimize +from scipy.optimize import minimize, basinhopping from symfit import ( - Variable, Parameter, Eq, Ge, Le, Lt, Gt, Ne, parameters, ModelError, Fit, Model + Variable, Parameter, Eq, Ge, Le, Lt, Gt, Ne, parameters, ModelError, Fit, + Model, cos ) from symfit.core.objectives import MinimizeModel -from symfit.core.minimizers import BFGS +from symfit.core.minimizers import BFGS, BasinHopping, LBFGSB, SLSQP +from symfit.core.support import partial class TestMinimize(unittest.TestCase): @@ -116,6 +118,124 @@ def test_constraint_types(self): self.assertAlmostEqual(fit_result.value(x), std_result.value(x)) self.assertAlmostEqual(fit_result.value(y), std_result.value(y)) + def test_basinhopping_large(self): + """ + Test the basinhopping method of scipy.minimize. This is based of scipy's docs + as found here: https://docs.scipy.org/doc/scipy-0.13.0/reference/generated/scipy.optimize.anneal.html + """ + def f1(z, *params): + x, y = z + a, b, c, d, e, f, g, h, i, j, k, l, scale = params + return (a * x ** 2 + b * x * y + c * y ** 2 + d * x + e * y + f) + + def f2(z, *params): + x, y = z + a, b, c, d, e, f, g, h, i, j, k, l, scale = params + return (-g * np.exp(-((x - h) ** 2 + (y - i) ** 2) / scale)) + + def f3(z, *params): + x, y = z + a, b, c, d, e, f, g, h, i, j, k, l, scale = params + return (-j * np.exp(-((x - k) ** 2 + (y - l) ** 2) / scale)) + + def func(z, *params): + x, y = z + a, b, c, d, e, f, g, h, i, j, k, l, scale = params + return f1(z, *params) + f2(z, *params) + f3(z, *params) + + def f_symfit(x1, x2, params): + z = [x1, x2] + return func(z, *params) + + params = (2, 3, 7, 8, 9, 10, 44, -1, 2, 26, 1, -2, 0.5) + + x0 = np.array([2., 2.]) + np.random.seed(555) + res = basinhopping(func, x0, minimizer_kwargs={'args': params}) + + np.random.seed(555) + x1, x2 = parameters('x1, x2', value=x0) + fit = BasinHopping(partial(f_symfit, params=params), [x1, x2]) + fit_result = fit.execute() + + self.assertEqual(res.x[0], fit_result.value(x1)) + self.assertEqual(res.x[1], fit_result.value(x2)) + self.assertEqual(res.fun, fit_result.objective_value) + + def test_basinhopping(self): + func = lambda x: np.cos(14.5 * x - 0.3) + (x + 0.2) * x + x0 = [1.] + np.random.seed(555) + res = basinhopping(func, x0, minimizer_kwargs={"method": "BFGS"}, niter=200) + print(res) + np.random.seed(555) + x, = parameters('x') + fit = BasinHopping(func, [x]) + fit_result = fit.execute(minimizer_kwargs={"method": "BFGS", 'jac': False}, niter=200) + print(fit_result) + + self.assertEqual(res.x, fit_result.value(x)) + self.assertEqual(res.fun, fit_result.objective_value) + + def test_basinhopping_2d(self): + def func2d(x): + f = np.cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 0.2) * x[0] + df = np.zeros(2) + df[0] = -14.5 * np.sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2 + df[1] = 2. * x[1] + 0.2 + return f, df + + def func2d_symfit(x1, x2): + f = np.cos(14.5 * x1 - 0.3) + (x2 + 0.2) * x2 + (x1 + 0.2) * x1 + return f + + def jac2d_symfit(x1, x2): + df = np.zeros(2) + df[0] = -14.5 * np.sin(14.5 * x1 - 0.3) + 2. * x1 + 0.2 + df[1] = 2. * x2 + 0.2 + return df + + np.random.seed(555) + minimizer_kwargs = {'method': 'BFGS', 'jac': True} + x0 = [1.0, 1.0] + res = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, niter=200) + + np.random.seed(555) + x1, x2 = parameters('x1, x2', value=x0) + fit = BasinHopping(func2d_symfit, [x1, x2], local_minimizer=BFGS) + minimizer_kwargs = {'jac': fit.list2kwargs(jac2d_symfit)} + fit_result = fit.execute(niter=200, minimizer_kwargs=minimizer_kwargs) + + self.assertEqual(res.x[0], fit_result.value(x1)) + self.assertEqual(res.x[1], fit_result.value(x2)) + self.assertEqual(res.fun, fit_result.objective_value) + + # Now compare with the symbolic equivalent + np.random.seed(555) + model = cos(14.5 * x1 - 0.3) + (x2 + 0.2) * x2 + (x1 + 0.2) * x1 + fit = Fit(model, minimizer=BasinHopping) + fit_result = fit.execute() + self.assertEqual(res.x[0], fit_result.value(x1)) + self.assertEqual(res.x[1], fit_result.value(x2)) + self.assertEqual(res.fun, fit_result.objective_value) + self.assertIsInstance(fit.minimizer.local_minimizer, BFGS) + + # Impose constrains + np.random.seed(555) + model = cos(14.5 * x1 - 0.3) + (x2 + 0.2) * x2 + (x1 + 0.2) * x1 + fit = Fit(model, minimizer=BasinHopping, constraints=[Eq(x1, x2)]) + fit_result = fit.execute() + self.assertEqual(fit_result.value(x1), fit_result.value(x2)) + self.assertIsInstance(fit.minimizer.local_minimizer, SLSQP) + + # Impose bounds + np.random.seed(555) + x1.min = 0.0 + model = cos(14.5 * x1 - 0.3) + (x2 + 0.2) * x2 + (x1 + 0.2) * x1 + fit = Fit(model, minimizer=BasinHopping) + fit_result = fit.execute() + self.assertGreaterEqual(fit_result.value(x1), x1.min) + self.assertIsInstance(fit.minimizer.local_minimizer, LBFGSB) if __name__ == '__main__': diff --git a/tests/test_support.py b/tests/test_support.py index f1f4cf56..ebe23fbb 100644 --- a/tests/test_support.py +++ b/tests/test_support.py @@ -6,9 +6,12 @@ import unittest import sys import warnings +from itertools import repeat -from symfit.core.support import \ - keywordonly, RequiredKeyword, RequiredKeywordError, partial +from symfit.core.support import ( + keywordonly, RequiredKeyword, RequiredKeywordError, partial, parameters, + cached_property +) if sys.version_info >= (3, 0): import inspect as inspect_sig @@ -155,6 +158,72 @@ def partial_me(a, b, c=None): self.assertFalse(partialed_two.args) self.assertEqual(partialed_two.keywords, {'a': 2, 'b': 'string'}) + def test_parameters(self): + """ + Test the `parameter` convenience function. + """ + x1, x2 = parameters('x1, x2', value=[2.0, 1.3], min=0.0) + self.assertEqual(x1.value, 2.0) + self.assertEqual(x2.value, 1.3) + self.assertEqual(x1.min, 0.0) + self.assertEqual(x2.min, 0.0) + self.assertEqual(x1.fixed, False) + self.assertEqual(x2.fixed, False) + with self.assertRaises(ValueError): + x1, x2 = parameters('x1, x2', value=[2.0, 1.3, 3.0], min=0.0) + + x1, x2 = parameters('x1, x2', value=[2.0, 1.3], min=[-30, -10], max=[300, 100], fixed=[True, False]) + self.assertEqual(x1.min, -30) + self.assertEqual(x2.min, -10) + self.assertEqual(x1.max, 300) + self.assertEqual(x2.max, 100) + self.assertEqual(x1.value, 2.0) + self.assertEqual(x2.value, 1.3) + self.assertEqual(x1.fixed, True) + self.assertEqual(x2.fixed, False) + + # Illegal bounds + with self.assertRaises(ValueError): + x1, x2 = parameters('x1, x2', value=[2.0, 1.3], min=[400, -10], max=[300, 100]) + # Should not raise any error, as repeat is an endless source of values + x1, x2 = parameters('x1, x2', value=[2.0, 1.3], min=repeat(0.0)) + + def test_cached_property(self): + class A(object): + def __init__(self): + self.counter = 0 + + @cached_property + def f(self): + self.counter += 1 + return 2 + + a = A() + # Deleta before a cache was set will fail silently. + del a.f + with self.assertRaises(AttributeError): + # Cache does not exist before f is called + a._f + self.assertEqual(a.f, 2) + self.assertTrue(hasattr(a, '_f')) + del a.f + # check that deletion was successful + with self.assertRaises(AttributeError): + # Does not exist before f is called + a._f + # However, the function should still be there + self.assertEqual(a.f, 2) + with self.assertRaises(AttributeError): + # Setting is not allowed. + a.f = 3 + + # Counter should read 2 at this point, the number of calls since + # object creation. + self.assertEqual(a.counter, 2) + for _ in range(10): + a.f + # Should be returning from cache, so a.f is not actually called + self.assertEqual(a.counter, 2) if __name__ == '__main__': try: