Skip to content

Commit

Permalink
Drop Python 3.9 support (#795)
Browse files Browse the repository at this point in the history
Fixes #653. The exception here is QGIS. Since it still comes bundled
with Python 3.9 we cannot really drop it there.

Most of the changes here are from ruff:
https://docs.astral.sh/ruff/settings/#target-version
If you run it on a single package it picks up `requires-python` from
`pyproject.toml`, but that doesn't seem to be picked up when running it
from the repo root. Therefore it always defaulted to the ruff default
target of Python 3.8. I currently set it to 3.9 for QGIS. Though setting
it to 3.10 doesn't give extra ruff errors or update hints.
  • Loading branch information
visr authored Nov 16, 2023
1 parent 33691a4 commit a688b3d
Show file tree
Hide file tree
Showing 14 changed files with 51 additions and 60 deletions.
2 changes: 1 addition & 1 deletion pixi.toml
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ pyogrio = "*"
pytest = "*"
pytest-cov = "*"
pytest-xdist = "*"
python = ">=3.9"
python = ">=3.10"
quartodoc = "*"
ruff = "*"
shapely = ">=2.0"
Expand Down
2 changes: 1 addition & 1 deletion python/ribasim/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ classifiers = [
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering :: Hydrology",
]
requires-python = ">=3.9"
requires-python = ">=3.10"
dependencies = [
"geopandas",
"matplotlib",
Expand Down
15 changes: 7 additions & 8 deletions python/ribasim/ribasim/config.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from enum import Enum
from pathlib import Path
from typing import Dict, List

from pydantic import Field

Expand Down Expand Up @@ -54,7 +53,7 @@ class Results(BaseModel):

class Solver(BaseModel):
algorithm: str = "QNDF"
saveat: float | List[float] = []
saveat: float | list[float] = []
adaptive: bool = True
dt: float | None = None
dtmin: float | None = None
Expand Down Expand Up @@ -93,7 +92,7 @@ class PidControl(NodeModel):
default_factory=TableModel[PidControlTimeSchema]
)

_sort_keys: Dict[str, List[str]] = {"time": ["time", "node_id"]}
_sort_keys: dict[str, list[str]] = {"time": ["time", "node_id"]}


class LevelBoundary(NodeModel):
Expand All @@ -104,7 +103,7 @@ class LevelBoundary(NodeModel):
default_factory=TableModel[LevelBoundaryTimeSchema]
)

_sort_keys: Dict[str, List[str]] = {"time": ["time", "node_id"]}
_sort_keys: dict[str, list[str]] = {"time": ["time", "node_id"]}


class Pump(NodeModel):
Expand All @@ -120,7 +119,7 @@ class TabulatedRatingCurve(NodeModel):
time: TableModel[TabulatedRatingCurveTimeSchema] = Field(
default_factory=TableModel[TabulatedRatingCurveTimeSchema]
)
_sort_keys: Dict[str, List[str]] = {
_sort_keys: dict[str, list[str]] = {
"static": ["node_id", "level"],
"time": ["time", "node_id", "level"],
}
Expand All @@ -132,7 +131,7 @@ class User(NodeModel):
)
time: TableModel[UserTimeSchema] = Field(default_factory=TableModel[UserTimeSchema])

_sort_keys: Dict[str, List[str]] = {
_sort_keys: dict[str, list[str]] = {
"static": ["node_id", "priority"],
"time": ["node_id", "priority", "time"],
}
Expand All @@ -146,7 +145,7 @@ class FlowBoundary(NodeModel):
default_factory=TableModel[FlowBoundaryTimeSchema]
)

_sort_keys: Dict[str, List[str]] = {"time": ["time", "node_id"]}
_sort_keys: dict[str, list[str]] = {"time": ["time", "node_id"]}


class Basin(NodeModel):
Expand All @@ -163,7 +162,7 @@ class Basin(NodeModel):
default_factory=TableModel[BasinTimeSchema]
)

_sort_keys: Dict[str, List[str]] = {
_sort_keys: dict[str, list[str]] = {
"profile": ["node_id", "level"],
"time": ["time", "node_id"],
}
Expand Down
46 changes: 20 additions & 26 deletions python/ribasim/ribasim/input_base.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,13 @@
import re
from abc import ABC, abstractmethod
from collections.abc import Callable, Generator
from contextlib import closing
from contextvars import ContextVar
from pathlib import Path
from sqlite3 import Connection, connect
from typing import (
Any,
Callable,
Dict,
Generator,
Generic,
List,
Set,
Tuple,
Type,
TypeVar,
)

Expand All @@ -38,7 +32,7 @@

gpd.options.io_engine = "pyogrio"

context_file_loading: ContextVar[Dict[str, Any]] = ContextVar(
context_file_loading: ContextVar[dict[str, Any]] = ContextVar(
"file_loading", default={}
)

Expand Down Expand Up @@ -75,7 +69,7 @@ class BaseModel(PydanticBaseModel):
)

@classmethod
def fields(cls) -> List[str]:
def fields(cls) -> list[str]:
"""Return the names of the fields contained in the Model."""
return list(cls.model_fields.keys())

Expand All @@ -102,15 +96,15 @@ class FileModel(BaseModel, ABC):
@classmethod
def check_filepath(cls, value: Any) -> Any:
# Enable initialization with a Path.
if isinstance(value, (Dict,)):
if isinstance(value, dict):
# Pydantic Model init requires a dict
filepath = value.get("filepath", None)
if filepath is not None:
filepath = Path(filepath)
data = cls._load(filepath)
value.update(data)
return value
elif isinstance(value, (Path, str)):
elif isinstance(value, Path | str):
# Pydantic Model init requires a dict
data = cls._load(Path(value))
data["filepath"] = value
Expand All @@ -132,7 +126,7 @@ def _save(self, directory: DirectoryPath) -> None:

@classmethod
@abstractmethod
def _load(cls, filepath: Path | None) -> Dict[str, Any]:
def _load(cls, filepath: Path | None) -> dict[str, Any]:
"""Load the data at filepath and returns it as a dictionary.
If a derived FileModel does not load data from disk, this should
Expand All @@ -144,7 +138,7 @@ def _load(cls, filepath: Path | None) -> Dict[str, Any]:
Returns
-------
Dict: The data stored at filepath
dict: The data stored at filepath
"""
raise NotImplementedError()

Expand All @@ -163,7 +157,7 @@ def tablename(cls) -> str:
NodeSchema -> Schema
TabularRatingCurveStaticSchema -> TabularRatingCurve / Static
"""
names: List[str] = re.sub("([A-Z]+)", r" \1", str(cls.tableschema())).split()
names: list[str] = re.sub("([A-Z]+)", r" \1", str(cls.tableschema())).split()
if len(names) > 2:
return f"{''.join(names[:-2])}{delimiter}{names[-2].lower()}"
else:
Expand All @@ -173,20 +167,20 @@ def tablename(cls) -> str:
@classmethod
def check_dataframe(cls, value: Any) -> Any:
# Enable initialization with a DataFrame.
if isinstance(value, (pd.DataFrame, gpd.GeoDataFrame)):
if isinstance(value, pd.DataFrame | gpd.GeoDataFrame):
value = {"df": value}

return value

def node_ids(self) -> Set[int]:
node_ids: Set[int] = set()
def node_ids(self) -> set[int]:
node_ids: set[int] = set()
if self.df is not None and "node_id" in self.df.columns:
node_ids.update(self.df["node_id"])

return node_ids

@classmethod
def _load(cls, filepath: Path | None) -> Dict[str, Any]:
def _load(cls, filepath: Path | None) -> dict[str, Any]:
db = context_file_loading.get().get("database")
if filepath is not None:
adf = cls._from_arrow(filepath)
Expand All @@ -199,7 +193,7 @@ def _load(cls, filepath: Path | None) -> Dict[str, Any]:
return {}

def _save(
self, directory: DirectoryPath, sort_keys: List[str] = ["node_id"]
self, directory: DirectoryPath, sort_keys: list[str] = ["node_id"]
) -> None:
# TODO directory could be used to save an arrow file
db_path = context_file_loading.get().get("database")
Expand Down Expand Up @@ -242,7 +236,7 @@ def _from_db(cls, path: FilePath, table: str) -> pd.DataFrame | None:
def _from_arrow(cls, path: FilePath) -> pd.DataFrame:
return pd.read_feather(path)

def sort(self, sort_keys: List[str] = ["node_id"]):
def sort(self, sort_keys: list[str] = ["node_id"]):
"""Sort all input tables as required.
Tables are sorted by "node_id", unless otherwise specified.
Expand Down Expand Up @@ -298,19 +292,19 @@ def _write_table(self, path: FilePath) -> None:

gdf.to_file(path, layer=self.tablename(), driver="GPKG")

def sort(self, sort_keys: List[str] = ["node_id"]):
def sort(self, sort_keys: list[str] = ["node_id"]):
self.df.sort_index(inplace=True)


class NodeModel(BaseModel):
"""Base class to handle combining the tables for a single node type."""

_sort_keys: Dict[str, List[str]] = {}
_sort_keys: dict[str, list[str]] = {}

@model_serializer(mode="wrap")
def set_modeld(
self, serializer: Callable[[Type["NodeModel"]], Dict[str, Any]]
) -> Dict[str, Any]:
self, serializer: Callable[[type["NodeModel"]], dict[str, Any]]
) -> dict[str, Any]:
content = serializer(self)
return dict(filter(lambda x: x[1], content.items()))

Expand All @@ -333,12 +327,12 @@ def tables(self) -> Generator[TableModel[Any], Any, None]:
yield attr

def node_ids(self):
node_ids: Set[int] = set()
node_ids: set[int] = set()
for table in self.tables():
node_ids.update(table.node_ids())
return node_ids

def node_ids_and_types(self) -> Tuple[List[int], List[str]]:
def node_ids_and_types(self) -> tuple[list[int], list[str]]:
ids = self.node_ids()
return list(ids), len(ids) * [self.get_input_type()]

Expand Down
8 changes: 4 additions & 4 deletions python/ribasim/ribasim/model.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import datetime
import shutil
from pathlib import Path
from typing import Any, Dict
from typing import Any

import matplotlib.pyplot as plt
import numpy as np
Expand Down Expand Up @@ -48,7 +48,7 @@ def n_nodes(self):
return n

@classmethod
def _load(cls, filepath: Path | None) -> Dict[str, Any]:
def _load(cls, filepath: Path | None) -> dict[str, Any]:
if filepath is not None:
context_file_loading.get()["database"] = filepath
return {}
Expand Down Expand Up @@ -210,7 +210,7 @@ def nodes(self):
return {
k: getattr(self, k)
for k in self.model_fields.keys()
if isinstance(getattr(self, k), (NodeModel,))
if isinstance(getattr(self, k), NodeModel)
}

def validate_model_node_field_ids(self):
Expand Down Expand Up @@ -304,7 +304,7 @@ def write(self, directory: FilePath) -> Path:
return fn

@classmethod
def _load(cls, filepath: Path | None) -> Dict[str, Any]:
def _load(cls, filepath: Path | None) -> dict[str, Any]:
context_file_loading.set({})

if filepath is not None:
Expand Down
3 changes: 1 addition & 2 deletions python/ribasim/ribasim/types.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from os import PathLike
from typing import Union

FilePath = Union[str, PathLike[str]]
FilePath = str | PathLike[str]

__all__ = ()
5 changes: 3 additions & 2 deletions python/ribasim/ribasim/utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from typing import Any, Sequence, Tuple
from collections.abc import Sequence
from typing import Any

import numpy as np
import shapely
Expand Down Expand Up @@ -39,7 +40,7 @@ def geometry_from_connectivity(

def connectivity_from_geometry(
node: Node, lines: NDArray[Any]
) -> Tuple[NDArray[Any], NDArray[Any]]:
) -> tuple[NDArray[Any], NDArray[Any]]:
"""
Derive from_node_id and to_node_id for every edge in lines. LineStrings
may be used to connect multiple nodes in a sequence, but every linestring
Expand Down
2 changes: 1 addition & 1 deletion python/ribasim_testmodels/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ classifiers = [
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering :: Hydrology",
]
requires-python = ">=3.9"
requires-python = ">=3.10"
dependencies = ["ribasim", "geopandas", "numpy", "pandas"]
dynamic = ["version"]

Expand Down
4 changes: 2 additions & 2 deletions python/ribasim_testmodels/ribasim_testmodels/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
__version__ = "0.3.0"

from typing import Callable, Dict
from collections.abc import Callable

import ribasim

Expand Down Expand Up @@ -82,7 +82,7 @@
]

# provide a mapping from model name to its constructor, so we can iterate over all models
constructors: Dict[str, Callable[[], ribasim.Model]] = {}
constructors: dict[str, Callable[[], ribasim.Model]] = {}
for model_name_model in __all__:
model_name = model_name_model.removesuffix("_model")
model_constructor = getattr(ribasim_testmodels, model_name_model)
Expand Down
5 changes: 2 additions & 3 deletions ribasim_qgis/core/geopackage.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
"""
import sqlite3
from contextlib import contextmanager
from typing import List

from qgis import processing
from qgis.core import QgsVectorFileWriter, QgsVectorLayer
Expand All @@ -27,7 +26,7 @@ def sqlite3_cursor(path):
connection.close()


def layers(path: str) -> List[str]:
def layers(path: str) -> list[str]:
"""
Return all layers that are present in the geopackage.
Expand All @@ -38,7 +37,7 @@ def layers(path: str) -> List[str]:
Returns
-------
layernames: List[str]
layernames: list[str]
"""
with sqlite3_cursor(path) as cursor:
cursor.execute("Select table_name from gpkg_contents")
Expand Down
Loading

0 comments on commit a688b3d

Please sign in to comment.