Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DEP] deprecate compose #1679

Merged
merged 2 commits into from
Jun 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file.
6 changes: 6 additions & 0 deletions aeon/transformations/adapt.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,12 @@
from aeon.transformations.base import BaseTransformer


# TODO: remove in v0.11.0
@deprecated(
version="0.10.0",
reason="TabularToSeriesAdaptor will be removed in version 0.11.0.",
category=FutureWarning,
)
class TabularToSeriesAdaptor(BaseTransformer):
"""
Adapt scikit-learn transformation interface to time series setting.
Expand Down
211 changes: 62 additions & 149 deletions aeon/transformations/compose.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@

import numpy as np
import pandas as pd
from deprecated.sphinx import deprecated
from sklearn import clone

from aeon.base import _HeterogenousMetaEstimator
from aeon.testing.mock_estimators import MockTransformer
from aeon.transformations._delegate import _DelegatedTransformer
from aeon.transformations._legacy._delegate import _DelegatedTransformer
from aeon.transformations.base import BaseTransformer
from aeon.utils.multiindex import flatten_multiindex
from aeon.utils.sklearn import is_sklearn_transformer
Expand Down Expand Up @@ -41,6 +42,12 @@ def _coerce_to_aeon(other):
return other


# TODO: remove in v0.11.0
@deprecated(
version="0.10.0",
reason="TransformerPipeline will be removed in version 0.11.0.",
category=FutureWarning,
)
class TransformerPipeline(_HeterogenousMetaEstimator, BaseTransformer):
"""
Pipeline of transformers compositor.
Expand Down Expand Up @@ -100,42 +107,6 @@ class TransformerPipeline(_HeterogenousMetaEstimator, BaseTransformer):
is always in (str, transformer) format, even if `steps` is just a list
strings not passed in `steps` are replaced by unique generated strings
i-th transformer in `steps_` is clone of i-th in `steps`.

Examples
--------
>>> from aeon.testing.mock_estimators import MockTransformer
>>> t1 = MockTransformer(power=2)
>>> t2 = MockTransformer(power=0.5)

Example 1, option A: construct without strings (unique names are generated for
the two components t1 and t2)
>>> pipe = TransformerPipeline(steps = [t1, t2])

Example 1, option B: construct with strings to give custom names to steps
>>> pipe = TransformerPipeline(
... steps = [
... ("trafo1", t1),
... ("trafo2", t2),
... ]
... )

Example 1, option C: for quick construction, the * dunder method can be used
>>> pipe = t1 * t2

Example 2: sklearn transformers can be used in the pipeline.
If applied to Series, sklearn transformers are applied by series instance.
If applied to Table, sklearn transformers are applied to the table as a whole.
>>> from sklearn.preprocessing import StandardScaler
>>> from aeon.transformations.summarize import SummaryTransformer

This applies the scaler per series, then summarizes:
>>> pipe = StandardScaler() * SummaryTransformer()

This applies the sumamrization, then scales the full summary table:
>>> pipe = SummaryTransformer() * StandardScaler()

This scales the series, then summarizes, then scales the full summary table:
>>> pipe = StandardScaler() * SummaryTransformer() * StandardScaler()
"""

_tags = {
Expand Down Expand Up @@ -400,6 +371,12 @@ def get_test_params(cls, parameter_set="default"):
return [params1, params2, params3]


# TODO: remove in v0.11.0
@deprecated(
version="0.10.0",
reason="FeatureUnion will be removed in version 0.11.0.",
category=FutureWarning,
)
class FeatureUnion(_HeterogenousMetaEstimator, BaseTransformer):
"""Concatenates results of multiple transformer objects.

Expand Down Expand Up @@ -618,6 +595,12 @@ def get_test_params(cls, parameter_set="default"):
return {"transformer_list": TRANSFORMERS}


# TODO: remove in v0.11.0
@deprecated(
version="0.10.0",
reason="FitInTransform will be removed in version 0.11.0.",
category=FutureWarning,
)
class FitInTransform(BaseTransformer):
"""
Transformer wrapper to delay fit to the transform phase.
Expand All @@ -642,30 +625,6 @@ class FitInTransform(BaseTransformer):
transformer is the inner transformer. So the inner transformer is
fitted on the inverse_transform data. This is required to have a non-
state changing transform() method of FitInTransform.

Examples
--------
>>> from aeon.datasets import load_longley
>>> from aeon.forecasting.naive import NaiveForecaster
>>> from aeon.forecasting.base import ForecastingHorizon
>>> from aeon.forecasting.compose import ForecastingPipeline
>>> from aeon.forecasting.model_selection import temporal_train_test_split
>>> from aeon.transformations.compose import FitInTransform
>>> from aeon.transformations.impute import Imputer
>>> y, X = load_longley()
>>> y_train, y_test, X_train, X_test = temporal_train_test_split(y, X)
>>> fh = ForecastingHorizon(y_test.index, is_relative=False)
>>> # we want to fit the Imputer only on the predict (=transform) data.
>>> # note that NaiveForecaster cant use X data, this is just a show case.
>>> pipe = ForecastingPipeline(
... steps=[
... ("imputer", FitInTransform(Imputer(method="mean"))),
... ("forecaster", NaiveForecaster()),
... ]
... )
>>> pipe.fit(y_train, X_train)
ForecastingPipeline(...)
>>> y_pred = pipe.predict(fh=fh, X=X_test)
"""

def __init__(self, transformer, skip_inverse_transform=True):
Expand Down Expand Up @@ -755,6 +714,12 @@ def get_test_params(cls, parameter_set="default"):
return params


# TODO: remove in v0.11.0
@deprecated(
version="0.10.0",
reason="MultiplexTransformer will be removed in version 0.11.0.",
category=FutureWarning,
)
class MultiplexTransformer(_HeterogenousMetaEstimator, _DelegatedTransformer):
"""
Facilitate an AutoML based selection of the best transformer.
Expand Down Expand Up @@ -798,40 +763,6 @@ class MultiplexTransformer(_HeterogenousMetaEstimator, _DelegatedTransformer):
_transformers : list of (name, est) tuples, where est are direct references to
the estimators passed in transformers passed. If transformers was passed
without names, those be auto-generated and put here.

Examples
--------
>>> from aeon.datasets import load_shampoo_sales
>>> from aeon.forecasting.naive import NaiveForecaster
>>> from aeon.transformations.compose import MultiplexTransformer
>>> from aeon.transformations.impute import Imputer
>>> from aeon.forecasting.compose import TransformedTargetForecaster
>>> from aeon.forecasting.model_selection import (
... ForecastingGridSearchCV,
... ExpandingWindowSplitter)
>>> # create MultiplexTransformer:
>>> multiplexer = MultiplexTransformer(transformers=[
... ("impute_mean", Imputer(method="mean", missing_values = -1)),
... ("impute_near", Imputer(method="nearest", missing_values = -1)),
... ("impute_rand", Imputer(method="random", missing_values = -1))])
>>> cv = ExpandingWindowSplitter(
... initial_window=24,
... step_length=12,
... fh=[1,2,3])
>>> pipe = TransformedTargetForecaster(steps = [
... ("multiplex", multiplexer),
... ("forecaster", NaiveForecaster())
... ])
>>> gscv = ForecastingGridSearchCV(
... cv=cv,
... param_grid={"multiplex__selected_transformer":
... ["impute_mean", "impute_near", "impute_rand"]},
... forecaster=pipe,
... )
>>> y = load_shampoo_sales()
>>> # randomly make some of the values nans:
>>> y.loc[y.sample(frac=0.1).index] = -1
>>> gscv = gscv.fit(y)
"""

# tags will largely be copied from selected_transformer
Expand Down Expand Up @@ -1013,6 +944,12 @@ def __ror__(self, other):
)


# TODO: remove in v0.11.0
@deprecated(
version="0.10.0",
reason="InvertTransform will be removed in version 0.11.0.",
category=FutureWarning,
)
class InvertTransform(_DelegatedTransformer):
"""Invert a series-to-series transformation.

Expand All @@ -1027,16 +964,6 @@ class InvertTransform(_DelegatedTransformer):
----------
transformer_: transformer,
this clone is fitted when `fit` is called and provides `transform` and inverse

Examples
--------
>>> from aeon.datasets import load_airline
>>> from aeon.transformations.compose import InvertTransform
>>> from aeon.testing.mock_estimators import MockTransformer
>>>
>>> inverse_exponent = InvertTransform(MockTransformer(power=3))
>>> X = load_airline()
>>> Xt = inverse_exponent.fit_transform(X) # computes 3rd square root
"""

_tags = {
Expand Down Expand Up @@ -1165,6 +1092,12 @@ def get_test_params(cls, parameter_set="default"):
return [params1, params2]


# TODO: remove in v0.11.0
@deprecated(
version="0.10.0",
reason="Id will be removed in version 0.11.0.",
category=FutureWarning,
)
class Id(BaseTransformer):
"""Identity transformer, returns data unchanged in transform/inverse_transform."""

Expand Down Expand Up @@ -1228,6 +1161,12 @@ def _get_fitted_params(self):
return {}


# TODO: remove in v0.11.0
@deprecated(
version="0.10.0",
reason="OptionalPassthrough will be removed in version 0.11.0.",
category=FutureWarning,
)
class OptionalPassthrough(_DelegatedTransformer):
"""
Wrap an existing transformer to tune whether to include it in a pipeline.
Expand All @@ -1254,41 +1193,6 @@ class OptionalPassthrough(_DelegatedTransformer):
this clone is fitted when `fit` is called and provides `transform` and inverse
if passthrough = False, a clone of `transformer`passed
if passthrough = True, the identity transformer `Id`

Examples
--------
>>> from aeon.datasets import load_airline
>>> from aeon.forecasting.naive import NaiveForecaster
>>> from aeon.transformations.compose import OptionalPassthrough
>>> from aeon.transformations.detrend import Deseasonalizer
>>> from aeon.transformations.adapt import TabularToSeriesAdaptor
>>> from aeon.forecasting.compose import TransformedTargetForecaster
>>> from aeon.forecasting.model_selection import (
... ForecastingGridSearchCV,
... SlidingWindowSplitter)
>>> from sklearn.preprocessing import StandardScaler
>>> # create pipeline
>>> pipe = TransformedTargetForecaster(steps=[
... ("deseasonalizer", OptionalPassthrough(Deseasonalizer())),
... ("scaler", OptionalPassthrough(TabularToSeriesAdaptor(StandardScaler()))),
... ("forecaster", NaiveForecaster())]) # doctest: +SKIP
>>> # putting it all together in a grid search
>>> cv = SlidingWindowSplitter(
... initial_window=60,
... window_length=24,
... start_with_window=True,
... step_length=48) # doctest: +SKIP
>>> param_grid = {
... "deseasonalizer__passthrough" : [True, False],
... "scaler__transformer__transformer__with_mean": [True, False],
... "scaler__passthrough" : [True, False],
... "forecaster__strategy": ["drift", "mean", "last"]} # doctest: +SKIP
>>> gscv = ForecastingGridSearchCV(
... forecaster=pipe,
... param_grid=param_grid,
... cv=cv,
... n_jobs=-1) # doctest: +SKIP
>>> gscv_fitted = gscv.fit(load_airline()) # doctest: +SKIP
"""

_tags = {
Expand Down Expand Up @@ -1359,6 +1263,12 @@ def get_test_params(cls, parameter_set="default"):
return {"transformer": BoxCoxTransformer(), "passthrough": False}


# TODO: remove in v0.11.0
@deprecated(
version="0.10.0",
reason="ColumnwiseTransformer will be removed in version 0.11.0.",
category=FutureWarning,
)
class ColumnwiseTransformer(BaseTransformer):
"""Apply a transformer columnwise to multivariate series.

Expand All @@ -1385,15 +1295,6 @@ class ColumnwiseTransformer(BaseTransformer):
See Also
--------
OptionalPassthrough

Examples
--------
>>> from aeon.datasets import load_longley
>>> from aeon.transformations.detrend import Detrender
>>> from aeon.transformations.compose import ColumnwiseTransformer
>>> _, X = load_longley()
>>> transformer = ColumnwiseTransformer(Detrender())
>>> Xt = transformer.fit_transform(X)
"""

_tags = {
Expand Down Expand Up @@ -1594,6 +1495,12 @@ def _check_is_pdseries(z):
return z, is_series


# TODO: remove in v0.11.0
@deprecated(
version="0.10.0",
reason="ColumnConcatenator will be removed in version 0.11.0.",
category=FutureWarning,
)
class ColumnConcatenator(BaseTransformer):
"""Concatenate multivariate series to a long univariate series.

Expand Down Expand Up @@ -1645,6 +1552,12 @@ def _transform(self, X, y=None):
return Xt


# TODO: remove in v0.11.0
@deprecated(
version="0.10.0",
reason="OptionalPassthrough will be removed in version 0.11.0.",
category=FutureWarning,
)
class YtoX(BaseTransformer):
"""
Create exogeneous features which are a copy of the endogenous data.
Expand Down
26 changes: 0 additions & 26 deletions docs/api_reference/transformations.rst
Original file line number Diff line number Diff line change
Expand Up @@ -46,23 +46,6 @@ Composition
Pipeline building
~~~~~~~~~~~~~~~~~

.. currentmodule:: aeon.transformations.compose

.. autosummary::
:toctree: auto_generated/
:template: class.rst

TransformerPipeline
FeatureUnion
ColumnwiseTransformer
ColumnTransformer
FitInTransform
MultiplexTransformer
OptionalPassthrough
InvertTransform
Id
YtoX

.. currentmodule:: aeon.transformations.func_transform

.. autosummary::
Expand All @@ -82,15 +65,6 @@ Sklearn and pandas adapters

Tabularizer

.. currentmodule:: aeon.transformations.adapt

.. autosummary::
:toctree: auto_generated/
:template: class.rst

TabularToSeriesAdaptor
PandasTransformAdaptor

Series-to-tabular transformers
-------------------------------

Expand Down