Skip to content

Commit

Permalink
Auto-update pre-commit hooks
Browse files Browse the repository at this point in the history
  • Loading branch information
xhochy authored Jun 4, 2023
1 parent d4c4522 commit 221c6f9
Show file tree
Hide file tree
Showing 8 changed files with 29 additions and 32 deletions.
8 changes: 4 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/Quantco/pre-commit-mirrors-black
rev: 23.1.0
rev: 23.3.0
hooks:
- id: black-conda
args:
Expand All @@ -16,7 +16,7 @@ repos:
- id: isort-conda
additional_dependencies: [-c, conda-forge, toml=0.10.2]
- repo: https://github.com/Quantco/pre-commit-mirrors-mypy
rev: "1.0.1"
rev: "1.3.0"
hooks:
- id: mypy-conda
files: plateau
Expand All @@ -30,7 +30,7 @@ repos:
- numpy=1.23
- minimalkv=1.4.2
- repo: https://github.com/Quantco/pre-commit-mirrors-pyupgrade
rev: 3.3.1
rev: 3.4.0
hooks:
- id: pyupgrade-conda
args:
Expand All @@ -41,7 +41,7 @@ repos:
- id: prettier-conda
files: "\\.md$"
- repo: https://github.com/Quantco/pre-commit-mirrors-docformatter
rev: 1.5.0.1
rev: 1.7.1
hooks:
- id: docformatter-conda
- repo: https://github.com/Quantco/pre-commit-mirrors-pre-commit-hooks
Expand Down
4 changes: 2 additions & 2 deletions plateau/core/_deprecation.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,8 @@


def deprecate_kwarg(old_arg_name, new_arg_name, mapping=None, stacklevel=2):
"""
Decorator to deprecate a keyword argument of a function.
"""Decorator to deprecate a keyword argument of a function.
Parameters
----------
old_arg_name : str
Expand Down
7 changes: 4 additions & 3 deletions plateau/core/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -856,9 +856,10 @@ def create_partition_key(
class DatasetMetadataBuilder(CopyMixin):
"""Incrementally build up a dataset.
In constrast to a :class:`plateau.core.dataset.DatasetMetadata`
instance, this object is mutable and may not be a full dataset (e.g.
partitions don't need to be fully materialised).
In constrast to a
:class: `plateau.core.dataset.DatasetMetadata` instance, this
object is mutable and may not be a full dataset (e.g.
partitions don't need to be fully materialised).
"""

def __init__(
Expand Down
14 changes: 7 additions & 7 deletions plateau/io/dask/delayed.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,13 +270,13 @@ def update_dataset_from_delayed(
factory=None,
table_name=SINGLE_TABLE,
):
"""
A dask.delayed graph to add and store a list of dictionaries containing
dataframes to a plateau dataset in store. The input should be a list
(or splitter pipeline) containing
:class:`~plateau.io_components.metapartition.MetaPartition`. If you want to use this
pipeline step for just deleting partitions without adding new ones you
have to give an empty meta partition as input (``[Metapartition(None)]``).
"""A dask.delayed graph to add and store a list of dictionaries containing
dataframes to a plateau dataset in store. The input should be a list (or
splitter pipeline) containing
:class:`~plateau.io_components.metapartition.MetaPartition`. If you want to
use this pipeline step for just deleting partitions without adding new ones
you have to give an empty meta partition as input
(``[Metapartition(None)]``).
Parameters
----------
Expand Down
10 changes: 5 additions & 5 deletions plateau/io_components/metapartition.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,9 +391,8 @@ def from_partition(
logical_conjunction: Optional[List[Tuple[Any, str, Any]]] = None,
table_name: str = SINGLE_TABLE,
):
"""
Transform a plateau :class:`~plateau.core.partition.Partition` into a
:class:`~plateau.io_components.metapartition.MetaPartition`.
"""Transform a plateau :class:`~plateau.core.partition.Partition` into
a :class:`~plateau.io_components.metapartition.MetaPartition`.
Parameters
----------
Expand Down Expand Up @@ -993,8 +992,9 @@ def build_indices(self, columns: Iterable[str]):
columns. The indices for the passed columns are rebuilt, so exisiting
index entries in the metapartition are overwritten.
:param columns: A list of columns from which the indices over all dataframes in the metapartition
are overwritten
:param columns: A list of columns from which the indices over
all dataframes in the metapartition are
overwritten
:return: self
"""
if self.label is None:
Expand Down
5 changes: 1 addition & 4 deletions plateau/io_components/read.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,7 @@ def dispatch_metapartitions_from_factory(
predicates: PredicatesType = None,
dispatch_by: Optional[List[str]] = None,
) -> Union[Iterator[MetaPartition], Iterator[List[MetaPartition]]]:
"""
:meta private:
"""
""":meta private:"""

if dispatch_by is not None and not set(dispatch_by).issubset(
set(dataset_factory.index_columns)
Expand Down
8 changes: 4 additions & 4 deletions plateau/serialization/_generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@
Available constants
**PredicatesType** - A type describing the format of predicates which is a list of ConjuntionType
**ConjunctionType** - A type describing a single Conjunction which is a list of literals
**LiteralType** - A type for a single literal
**PredicatesType** - A type describing the format of predicates which is
a list of ConjuntionType **ConjunctionType** - A type describing a
single Conjunction which is a list of literals **LiteralType** - A type
for a single literal
**LiteralValue** - A type indicating the value of a predicate literal
:meta public:
"""

Expand Down
5 changes: 2 additions & 3 deletions tests/io/dask/dataframe/test_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,9 +257,8 @@ def test_collect_dataset_metadata_fraction_precision(store_factory):


def test_collect_dataset_metadata_at_least_one_partition(store_factory):
"""
Make sure we return at leat one partition, even if none would be returned by rounding frac * n_partitions
"""
"""Make sure we return at leat one partition, even if none would be
returned by rounding frac * n_partitions."""
df = pd.DataFrame(data={"A": range(100), "B": range(100)})

store_dataframes_as_dataset(
Expand Down

0 comments on commit 221c6f9

Please sign in to comment.