Skip to content

Commit

Permalink
build: update dependencies, remove deprecated types-all (#1605)
Browse files Browse the repository at this point in the history
Co-authored-by: Till Ehrengruber <[email protected]>
  • Loading branch information
havogt and tehrengruber authored Aug 8, 2024
1 parent bd4c48e commit 7215db7
Show file tree
Hide file tree
Showing 15 changed files with 106 additions and 285 deletions.
18 changes: 9 additions & 9 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ repos:
## version = re.search('ruff==([0-9\.]*)', open("constraints.txt").read())[1]
## print(f"rev: v{version}")
##]]]
rev: v0.5.1
rev: v0.5.6
##[[[end]]]
hooks:
# Run the linter.
Expand All @@ -66,9 +66,9 @@ repos:
## version = re.search('mypy==([0-9\.]*)', open("constraints.txt").read())[1]
## print(f"#========= FROM constraints.txt: v{version} =========")
##]]]
#========= FROM constraints.txt: v1.10.1 =========
#========= FROM constraints.txt: v1.11.1 =========
##[[[end]]]
rev: v1.10.1 # MUST match version ^^^^ in constraints.txt (if the mirror is up-to-date)
rev: v1.11.1 # MUST match version ^^^^ in constraints.txt (if the mirror is up-to-date)
hooks:
- id: mypy
additional_dependencies: # versions from constraints.txt
Expand All @@ -85,12 +85,12 @@ repos:
## print(f"- {pkg}==" + str(re.search(f'\n{pkg}==([0-9\.]*)', constraints)[1]))
##]]]
- astunparse==1.6.3
- attrs==23.2.0
- black==24.4.2
- attrs==24.2.0
- black==24.8.0
- boltons==24.0.0
- cached-property==1.5.2
- click==8.1.7
- cmake==3.30.0
- cmake==3.30.2
- cytoolz==0.12.3
- deepdiff==7.0.1
- devtools==0.12.2
Expand All @@ -106,13 +106,13 @@ repos:
- numpy==1.24.4
- packaging==24.1
- pybind11==2.13.1
- setuptools==70.2.0
- setuptools==72.1.0
- tabulate==0.9.0
- typing-extensions==4.12.2
- xxhash==3.0.0
##[[[end]]]
# Add all type stubs from typeshed
- types-all
- types-tabulate
- types-typed-ast
args: [--no-install-types]
exclude: |
(?x)^(
Expand Down
160 changes: 34 additions & 126 deletions constraints.txt

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion min-extra-requirements-test.txt
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ sympy==1.9
tabulate==0.8.10
tomli==2.0.1; python_version < "3.11"
tox==3.2.0
types-all==1.0.0
types-tabulate==0.8.10
typing-extensions==4.10.0
xxhash==1.4.4
##[[[end]]]
2 changes: 1 addition & 1 deletion min-requirements-test.txt
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ sphinx_rtd_theme==1.0
tabulate==0.8.10
tomli==2.0.1; python_version < "3.11"
tox==3.2.0
types-all==1.0.0
types-tabulate==0.8.10
typing-extensions==4.10.0
xxhash==1.4.4
##[[[end]]]
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ all-cuda12 = ['gt4py[cuda12,dace,formatting,jax-cuda12,performance,testing]']
# Other extras
cuda11 = ['cupy-cuda11x>=12.0']
cuda12 = ['cupy-cuda12x>=12.0']
dace = ['dace>=0.16.1', 'sympy>=1.9']
dace = ['dace>=0.16.1', 'sympy>=1.9,<1.13'] # see https://github.com/spcl/dace/pull/1620
formatting = ['clang-format>=9.0']
gpu = ['cupy>=12.0']
jax-cpu = ['jax[cpu]>=0.4.18; python_version>="3.10"']
Expand Down
2 changes: 1 addition & 1 deletion requirements-dev.in
Original file line number Diff line number Diff line change
Expand Up @@ -41,4 +41,4 @@ sphinx>=4.4
sphinx_rtd_theme>=1.0
tomli>=2.0.1;python_version<'3.11'
tox>=3.2.0
types-all>=1.0.0
types-tabulate>=0.8.10
160 changes: 34 additions & 126 deletions requirements-dev.txt

Large diffs are not rendered by default.

8 changes: 3 additions & 5 deletions src/gt4py/_core/definitions.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,11 +167,9 @@ class DTypeKind(eve.StrEnum):


@overload
def dtype_kind(sc_type: Type[BoolT]) -> Literal[DTypeKind.BOOL]: ...


@overload
def dtype_kind(sc_type: Type[IntT]) -> Literal[DTypeKind.INT]: ...
def dtype_kind(
sc_type: Type[IntT] | Type[BoolT], # mypy doesn't distinguish IntT and BoolT
) -> Literal[DTypeKind.INT, DTypeKind.BOOL]: ...


@overload
Expand Down
2 changes: 1 addition & 1 deletion src/gt4py/cartesian/gtc/passes/oir_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,5 +106,5 @@ def run(self, oir: oir.Stencil) -> oir.Stencil:
if isinstance(step, type) and issubclass(step, NodeVisitor):
oir = step().visit(oir)
else:
oir = step(oir)
oir = step(oir) # type: ignore[call-arg, assignment]
return oir
6 changes: 3 additions & 3 deletions src/gt4py/eve/datamodels/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,7 @@ def datamodel(
cls: Literal[None] = None,
/,
*,
repr: bool = _REPR_DEFAULT, # noqa: A002 [builtin-argument-shadowing]
repr: bool = _REPR_DEFAULT,
eq: bool = _EQ_DEFAULT,
order: bool = _ORDER_DEFAULT,
unsafe_hash: bool = _UNSAFE_HASH_DEFAULT,
Expand All @@ -280,7 +280,7 @@ def datamodel( # redefinion of unused symbol
cls: Type[_T],
/,
*,
repr: bool = _REPR_DEFAULT, # noqa: A002 [builtin-argument-shadowing]
repr: bool = _REPR_DEFAULT,
eq: bool = _EQ_DEFAULT,
order: bool = _ORDER_DEFAULT,
unsafe_hash: bool = _UNSAFE_HASH_DEFAULT,
Expand Down Expand Up @@ -373,7 +373,7 @@ def datamodel( # redefinion of unused symbol
}

if cls is None: # called as: @datamodel()
return functools.partial(_make_datamodel, **datamodel_options)
return functools.partial(_make_datamodel, **datamodel_options) # type: ignore[arg-type, return-value]
else: # called as: @datamodel
return _make_datamodel(
cls,
Expand Down
10 changes: 6 additions & 4 deletions src/gt4py/eve/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -433,12 +433,14 @@ def content_hash(*args: Any, hash_algorithm: str | xtyping.HashlibAlgorithm | No
"""
if hash_algorithm is None:
hash_algorithm = xxhash.xxh64()
hasher = xxhash.xxh64()
elif isinstance(hash_algorithm, str):
hash_algorithm = hashlib.new(hash_algorithm)
hasher = hashlib.new(hash_algorithm)
else:
hasher = hash_algorithm

hash_algorithm.update(pickle.dumps(args))
result = hash_algorithm.hexdigest()
hasher.update(pickle.dumps(args))
result = hasher.hexdigest()
assert isinstance(result, str)

return result
Expand Down
2 changes: 1 addition & 1 deletion src/gt4py/next/embedded/operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ class ScanOperator(EmbeddedOperator[core_defs.ScalarT | tuple[core_defs.ScalarT
def __call__( # type: ignore[override]
self,
*args: common.Field | core_defs.Scalar,
**kwargs: common.Field | core_defs.Scalar, # type: ignore[override]
**kwargs: common.Field | core_defs.Scalar,
) -> (
common.Field[Any, core_defs.ScalarT]
| tuple[common.Field[Any, core_defs.ScalarT] | tuple, ...]
Expand Down
8 changes: 4 additions & 4 deletions src/gt4py/next/ffront/ast_passes/fix_missing_locations.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,15 +42,15 @@ def generic_visit(self, node: ast.AST) -> ast.AST:
node = copy.copy(node)
parent_node = self._parent_nodes[-1]

node.lineno = parent_node.lineno
node.col_offset = parent_node.col_offset
node.lineno = parent_node.lineno # type: ignore[attr-defined] # we are adding the attribute which breaks type checking
node.col_offset = parent_node.col_offset # type: ignore[attr-defined]

# the end positions are optional according to
# https://docs.python.org/3/library/ast.html#ast.AST.end_col_offset
if hasattr(parent_node, "end_lineno"):
node.end_lineno = parent_node.end_lineno
node.end_lineno = parent_node.end_lineno # type: ignore[attr-defined]
if hasattr(parent_node, "end_col_offset"):
node.end_col_offset = parent_node.end_col_offset
node.end_col_offset = parent_node.end_col_offset # type: ignore[attr-defined]

self._parent_nodes.append(node)
result = super().generic_visit(node)
Expand Down
5 changes: 5 additions & 0 deletions src/gt4py/next/ffront/dialect_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,9 +110,14 @@ def get_location(self, node: ast.AST) -> SourceLocation:
line_offset = self.source_definition.line_offset
col_offset = self.source_definition.column_offset

# `FixMissingLocations` ensures that all nodes have the location attributes
assert hasattr(node, "lineno")
line = node.lineno + line_offset if node.lineno is not None else None
assert hasattr(node, "end_lineno")
end_line = node.end_lineno + line_offset if node.end_lineno is not None else None
assert hasattr(node, "col_offset")
column = 1 + node.col_offset + col_offset if node.col_offset is not None else None
assert hasattr(node, "end_col_offset")
end_column = (
1 + node.end_col_offset + col_offset if node.end_col_offset is not None else None
)
Expand Down
4 changes: 2 additions & 2 deletions src/gt4py/next/iterator/embedded.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ class Column(np.lib.mixins.NDArrayOperatorsMixin):

def __init__(self, kstart: int, data: np.ndarray | Scalar) -> None:
self.kstart = kstart
assert isinstance(data, (np.ndarray, Scalar)) # type: ignore # mypy bug #11673
assert isinstance(data, (np.ndarray, Scalar))
column_range: common.NamedRange = embedded_context.closure_column_range.get()
self.data = (
data if isinstance(data, np.ndarray) else np.full(len(column_range.unit_range), data)
Expand Down Expand Up @@ -560,7 +560,7 @@ def promote_scalars(val: CompositeOfScalarOrField):
elif isinstance(val, common.Field):
return val
val_type = infer_dtype_like_type(val)
if isinstance(val, Scalar): # type: ignore # mypy bug
if isinstance(val, Scalar):
return constant_field(val)
else:
raise ValueError(
Expand Down

0 comments on commit 7215db7

Please sign in to comment.