diff --git a/pixi.toml b/pixi.toml index abf700aef..cf694701b 100644 --- a/pixi.toml +++ b/pixi.toml @@ -111,7 +111,7 @@ pyogrio = "*" pytest = "*" pytest-cov = "*" pytest-xdist = "*" -python = ">=3.9" +python = ">=3.10" quartodoc = "*" ruff = "*" shapely = ">=2.0" diff --git a/python/ribasim/pyproject.toml b/python/ribasim/pyproject.toml index b46d35c05..39413f09b 100644 --- a/python/ribasim/pyproject.toml +++ b/python/ribasim/pyproject.toml @@ -12,7 +12,7 @@ classifiers = [ "Intended Audience :: Science/Research", "Topic :: Scientific/Engineering :: Hydrology", ] -requires-python = ">=3.9" +requires-python = ">=3.10" dependencies = [ "geopandas", "matplotlib", diff --git a/python/ribasim/ribasim/config.py b/python/ribasim/ribasim/config.py index 71446aa5d..6a4330699 100644 --- a/python/ribasim/ribasim/config.py +++ b/python/ribasim/ribasim/config.py @@ -1,6 +1,5 @@ from enum import Enum from pathlib import Path -from typing import Dict, List from pydantic import Field @@ -54,7 +53,7 @@ class Results(BaseModel): class Solver(BaseModel): algorithm: str = "QNDF" - saveat: float | List[float] = [] + saveat: float | list[float] = [] adaptive: bool = True dt: float | None = None dtmin: float | None = None @@ -93,7 +92,7 @@ class PidControl(NodeModel): default_factory=TableModel[PidControlTimeSchema] ) - _sort_keys: Dict[str, List[str]] = {"time": ["time", "node_id"]} + _sort_keys: dict[str, list[str]] = {"time": ["time", "node_id"]} class LevelBoundary(NodeModel): @@ -104,7 +103,7 @@ class LevelBoundary(NodeModel): default_factory=TableModel[LevelBoundaryTimeSchema] ) - _sort_keys: Dict[str, List[str]] = {"time": ["time", "node_id"]} + _sort_keys: dict[str, list[str]] = {"time": ["time", "node_id"]} class Pump(NodeModel): @@ -120,7 +119,7 @@ class TabulatedRatingCurve(NodeModel): time: TableModel[TabulatedRatingCurveTimeSchema] = Field( default_factory=TableModel[TabulatedRatingCurveTimeSchema] ) - _sort_keys: Dict[str, List[str]] = { + _sort_keys: dict[str, list[str]] = { "static": ["node_id", "level"], "time": ["time", "node_id", "level"], } @@ -132,7 +131,7 @@ class User(NodeModel): ) time: TableModel[UserTimeSchema] = Field(default_factory=TableModel[UserTimeSchema]) - _sort_keys: Dict[str, List[str]] = { + _sort_keys: dict[str, list[str]] = { "static": ["node_id", "priority"], "time": ["node_id", "priority", "time"], } @@ -146,7 +145,7 @@ class FlowBoundary(NodeModel): default_factory=TableModel[FlowBoundaryTimeSchema] ) - _sort_keys: Dict[str, List[str]] = {"time": ["time", "node_id"]} + _sort_keys: dict[str, list[str]] = {"time": ["time", "node_id"]} class Basin(NodeModel): @@ -163,7 +162,7 @@ class Basin(NodeModel): default_factory=TableModel[BasinTimeSchema] ) - _sort_keys: Dict[str, List[str]] = { + _sort_keys: dict[str, list[str]] = { "profile": ["node_id", "level"], "time": ["time", "node_id"], } diff --git a/python/ribasim/ribasim/input_base.py b/python/ribasim/ribasim/input_base.py index 107c82c12..ddb80a5e5 100644 --- a/python/ribasim/ribasim/input_base.py +++ b/python/ribasim/ribasim/input_base.py @@ -1,19 +1,13 @@ import re from abc import ABC, abstractmethod +from collections.abc import Callable, Generator from contextlib import closing from contextvars import ContextVar from pathlib import Path from sqlite3 import Connection, connect from typing import ( Any, - Callable, - Dict, - Generator, Generic, - List, - Set, - Tuple, - Type, TypeVar, ) @@ -38,7 +32,7 @@ gpd.options.io_engine = "pyogrio" -context_file_loading: ContextVar[Dict[str, Any]] = ContextVar( +context_file_loading: ContextVar[dict[str, Any]] = ContextVar( "file_loading", default={} ) @@ -75,7 +69,7 @@ class BaseModel(PydanticBaseModel): ) @classmethod - def fields(cls) -> List[str]: + def fields(cls) -> list[str]: """Return the names of the fields contained in the Model.""" return list(cls.model_fields.keys()) @@ -102,7 +96,7 @@ class FileModel(BaseModel, ABC): @classmethod def check_filepath(cls, value: Any) -> Any: # Enable initialization with a Path. - if isinstance(value, (Dict,)): + if isinstance(value, dict): # Pydantic Model init requires a dict filepath = value.get("filepath", None) if filepath is not None: @@ -110,7 +104,7 @@ def check_filepath(cls, value: Any) -> Any: data = cls._load(filepath) value.update(data) return value - elif isinstance(value, (Path, str)): + elif isinstance(value, Path | str): # Pydantic Model init requires a dict data = cls._load(Path(value)) data["filepath"] = value @@ -132,7 +126,7 @@ def _save(self, directory: DirectoryPath) -> None: @classmethod @abstractmethod - def _load(cls, filepath: Path | None) -> Dict[str, Any]: + def _load(cls, filepath: Path | None) -> dict[str, Any]: """Load the data at filepath and returns it as a dictionary. If a derived FileModel does not load data from disk, this should @@ -144,7 +138,7 @@ def _load(cls, filepath: Path | None) -> Dict[str, Any]: Returns ------- - Dict: The data stored at filepath + dict: The data stored at filepath """ raise NotImplementedError() @@ -163,7 +157,7 @@ def tablename(cls) -> str: NodeSchema -> Schema TabularRatingCurveStaticSchema -> TabularRatingCurve / Static """ - names: List[str] = re.sub("([A-Z]+)", r" \1", str(cls.tableschema())).split() + names: list[str] = re.sub("([A-Z]+)", r" \1", str(cls.tableschema())).split() if len(names) > 2: return f"{''.join(names[:-2])}{delimiter}{names[-2].lower()}" else: @@ -173,20 +167,20 @@ def tablename(cls) -> str: @classmethod def check_dataframe(cls, value: Any) -> Any: # Enable initialization with a DataFrame. - if isinstance(value, (pd.DataFrame, gpd.GeoDataFrame)): + if isinstance(value, pd.DataFrame | gpd.GeoDataFrame): value = {"df": value} return value - def node_ids(self) -> Set[int]: - node_ids: Set[int] = set() + def node_ids(self) -> set[int]: + node_ids: set[int] = set() if self.df is not None and "node_id" in self.df.columns: node_ids.update(self.df["node_id"]) return node_ids @classmethod - def _load(cls, filepath: Path | None) -> Dict[str, Any]: + def _load(cls, filepath: Path | None) -> dict[str, Any]: db = context_file_loading.get().get("database") if filepath is not None: adf = cls._from_arrow(filepath) @@ -199,7 +193,7 @@ def _load(cls, filepath: Path | None) -> Dict[str, Any]: return {} def _save( - self, directory: DirectoryPath, sort_keys: List[str] = ["node_id"] + self, directory: DirectoryPath, sort_keys: list[str] = ["node_id"] ) -> None: # TODO directory could be used to save an arrow file db_path = context_file_loading.get().get("database") @@ -242,7 +236,7 @@ def _from_db(cls, path: FilePath, table: str) -> pd.DataFrame | None: def _from_arrow(cls, path: FilePath) -> pd.DataFrame: return pd.read_feather(path) - def sort(self, sort_keys: List[str] = ["node_id"]): + def sort(self, sort_keys: list[str] = ["node_id"]): """Sort all input tables as required. Tables are sorted by "node_id", unless otherwise specified. @@ -298,19 +292,19 @@ def _write_table(self, path: FilePath) -> None: gdf.to_file(path, layer=self.tablename(), driver="GPKG") - def sort(self, sort_keys: List[str] = ["node_id"]): + def sort(self, sort_keys: list[str] = ["node_id"]): self.df.sort_index(inplace=True) class NodeModel(BaseModel): """Base class to handle combining the tables for a single node type.""" - _sort_keys: Dict[str, List[str]] = {} + _sort_keys: dict[str, list[str]] = {} @model_serializer(mode="wrap") def set_modeld( - self, serializer: Callable[[Type["NodeModel"]], Dict[str, Any]] - ) -> Dict[str, Any]: + self, serializer: Callable[[type["NodeModel"]], dict[str, Any]] + ) -> dict[str, Any]: content = serializer(self) return dict(filter(lambda x: x[1], content.items())) @@ -333,12 +327,12 @@ def tables(self) -> Generator[TableModel[Any], Any, None]: yield attr def node_ids(self): - node_ids: Set[int] = set() + node_ids: set[int] = set() for table in self.tables(): node_ids.update(table.node_ids()) return node_ids - def node_ids_and_types(self) -> Tuple[List[int], List[str]]: + def node_ids_and_types(self) -> tuple[list[int], list[str]]: ids = self.node_ids() return list(ids), len(ids) * [self.get_input_type()] diff --git a/python/ribasim/ribasim/model.py b/python/ribasim/ribasim/model.py index c3f78c34b..923078a3b 100644 --- a/python/ribasim/ribasim/model.py +++ b/python/ribasim/ribasim/model.py @@ -1,7 +1,7 @@ import datetime import shutil from pathlib import Path -from typing import Any, Dict +from typing import Any import matplotlib.pyplot as plt import numpy as np @@ -48,7 +48,7 @@ def n_nodes(self): return n @classmethod - def _load(cls, filepath: Path | None) -> Dict[str, Any]: + def _load(cls, filepath: Path | None) -> dict[str, Any]: if filepath is not None: context_file_loading.get()["database"] = filepath return {} @@ -210,7 +210,7 @@ def nodes(self): return { k: getattr(self, k) for k in self.model_fields.keys() - if isinstance(getattr(self, k), (NodeModel,)) + if isinstance(getattr(self, k), NodeModel) } def validate_model_node_field_ids(self): @@ -304,7 +304,7 @@ def write(self, directory: FilePath) -> Path: return fn @classmethod - def _load(cls, filepath: Path | None) -> Dict[str, Any]: + def _load(cls, filepath: Path | None) -> dict[str, Any]: context_file_loading.set({}) if filepath is not None: diff --git a/python/ribasim/ribasim/types.py b/python/ribasim/ribasim/types.py index 5c7433399..407e07f53 100644 --- a/python/ribasim/ribasim/types.py +++ b/python/ribasim/ribasim/types.py @@ -1,6 +1,5 @@ from os import PathLike -from typing import Union -FilePath = Union[str, PathLike[str]] +FilePath = str | PathLike[str] __all__ = () diff --git a/python/ribasim/ribasim/utils.py b/python/ribasim/ribasim/utils.py index be5c684da..0028d8b29 100644 --- a/python/ribasim/ribasim/utils.py +++ b/python/ribasim/ribasim/utils.py @@ -1,4 +1,5 @@ -from typing import Any, Sequence, Tuple +from collections.abc import Sequence +from typing import Any import numpy as np import shapely @@ -39,7 +40,7 @@ def geometry_from_connectivity( def connectivity_from_geometry( node: Node, lines: NDArray[Any] -) -> Tuple[NDArray[Any], NDArray[Any]]: +) -> tuple[NDArray[Any], NDArray[Any]]: """ Derive from_node_id and to_node_id for every edge in lines. LineStrings may be used to connect multiple nodes in a sequence, but every linestring diff --git a/python/ribasim_testmodels/pyproject.toml b/python/ribasim_testmodels/pyproject.toml index 6757e4258..e5cff4975 100644 --- a/python/ribasim_testmodels/pyproject.toml +++ b/python/ribasim_testmodels/pyproject.toml @@ -12,7 +12,7 @@ classifiers = [ "Intended Audience :: Science/Research", "Topic :: Scientific/Engineering :: Hydrology", ] -requires-python = ">=3.9" +requires-python = ">=3.10" dependencies = ["ribasim", "geopandas", "numpy", "pandas"] dynamic = ["version"] diff --git a/python/ribasim_testmodels/ribasim_testmodels/__init__.py b/python/ribasim_testmodels/ribasim_testmodels/__init__.py index 2a971871f..cf011a35d 100644 --- a/python/ribasim_testmodels/ribasim_testmodels/__init__.py +++ b/python/ribasim_testmodels/ribasim_testmodels/__init__.py @@ -1,6 +1,6 @@ __version__ = "0.3.0" -from typing import Callable, Dict +from collections.abc import Callable import ribasim @@ -82,7 +82,7 @@ ] # provide a mapping from model name to its constructor, so we can iterate over all models -constructors: Dict[str, Callable[[], ribasim.Model]] = {} +constructors: dict[str, Callable[[], ribasim.Model]] = {} for model_name_model in __all__: model_name = model_name_model.removesuffix("_model") model_constructor = getattr(ribasim_testmodels, model_name_model) diff --git a/ribasim_qgis/core/geopackage.py b/ribasim_qgis/core/geopackage.py index 1af432c7b..4b549cec4 100644 --- a/ribasim_qgis/core/geopackage.py +++ b/ribasim_qgis/core/geopackage.py @@ -10,7 +10,6 @@ """ import sqlite3 from contextlib import contextmanager -from typing import List from qgis import processing from qgis.core import QgsVectorFileWriter, QgsVectorLayer @@ -27,7 +26,7 @@ def sqlite3_cursor(path): connection.close() -def layers(path: str) -> List[str]: +def layers(path: str) -> list[str]: """ Return all layers that are present in the geopackage. @@ -38,7 +37,7 @@ def layers(path: str) -> List[str]: Returns ------- - layernames: List[str] + layernames: list[str] """ with sqlite3_cursor(path) as cursor: cursor.execute("Select table_name from gpkg_contents") diff --git a/ribasim_qgis/core/nodes.py b/ribasim_qgis/core/nodes.py index 28cd7c976..f79228f03 100644 --- a/ribasim_qgis/core/nodes.py +++ b/ribasim_qgis/core/nodes.py @@ -21,7 +21,7 @@ """ import abc -from typing import Any, Dict, List, Tuple +from typing import Any from PyQt5.QtCore import Qt, QVariant from PyQt5.QtGui import QColor @@ -63,7 +63,7 @@ def nodetype(cls): return cls.input_type.split("/")[0].strip() @classmethod - def create(cls, path: str, crs: Any, names: List[str]) -> "Input": + def create(cls, path: str, crs: Any, names: list[str]) -> "Input": instance = cls(path) if instance.name in names: raise ValueError(f"Name already exists in geopackage: {instance.name}") @@ -108,7 +108,7 @@ def layer_from_geopackage(self) -> QgsVectorLayer: self.layer = QgsVectorLayer(f"{self.path}|layername={self.name}", self.name) return - def from_geopackage(self) -> Tuple[Any, Any]: + def from_geopackage(self) -> tuple[Any, Any]: self.layer_from_geopackage() return (self.layer, self.renderer, self.labels) @@ -558,7 +558,7 @@ class UserTime(Input): EDGETYPES = {"flow", "control"} -def load_nodes_from_geopackage(path: str) -> Dict[str, Input]: +def load_nodes_from_geopackage(path: str) -> dict[str, Input]: # List the names in the geopackage gpkg_names = geopackage.layers(path) nodes = {} diff --git a/ribasim_qgis/core/topology.py b/ribasim_qgis/core/topology.py index 017658af1..70d739021 100644 --- a/ribasim_qgis/core/topology.py +++ b/ribasim_qgis/core/topology.py @@ -1,5 +1,3 @@ -from typing import Tuple - import numpy as np from qgis import processing from qgis.core import QgsVectorLayer @@ -31,7 +29,7 @@ def explode_lines(edge: QgsVectorLayer) -> None: return -def derive_connectivity(node_index, node_xy, edge_xy) -> Tuple[np.ndarray, np.ndarray]: +def derive_connectivity(node_index, node_xy, edge_xy) -> tuple[np.ndarray, np.ndarray]: """ Derive connectivity on the basis of xy locations. diff --git a/ribasim_qgis/widgets/dataset_widget.py b/ribasim_qgis/widgets/dataset_widget.py index 0d1a32b67..b82e20df6 100644 --- a/ribasim_qgis/widgets/dataset_widget.py +++ b/ribasim_qgis/widgets/dataset_widget.py @@ -6,7 +6,7 @@ """ from datetime import datetime from pathlib import Path -from typing import Any, List, Set +from typing import Any import numpy as np from PyQt5.QtCore import Qt @@ -49,7 +49,7 @@ def __init__(self, parent=None): self.setColumnWidth(0, 1) self.setColumnWidth(2, 1) - def items(self) -> List[QTreeWidgetItem]: + def items(self) -> list[QTreeWidgetItem]: root = self.invisibleRootItem() return [root.child(i) for i in range(root.childCount())] @@ -320,7 +320,7 @@ def active_nodes(self): active_nodes[item.text(1)] = not (item.checkbox.isChecked() == 0) return active_nodes - def selection_names(self) -> Set[str]: + def selection_names(self) -> set[str]: selection = self.dataset_tree.items() # Append associated items return {item.element.name for item in selection} diff --git a/ruff.toml b/ruff.toml index bd72c7dd8..c00b7d04e 100644 --- a/ruff.toml +++ b/ruff.toml @@ -15,6 +15,7 @@ ignore = [ fixable = ["I"] extend-include = ["*.ipynb"] exclude = ["ribasim_qgis/tomllib/*"] +target-version = "py39" [pydocstyle] convention = "numpy"