Skip to content

Commit

Permalink
Devops: Update pyproject.toml configuration (#6085)
Browse files Browse the repository at this point in the history
Added stricter rules for `mypy` and `pytest`. Suggestions taken after
automated analysis by the following tool:
https://learn.scientific-python.org/development/guides/repo-review/
  • Loading branch information
sphuber authored Sep 1, 2023
1 parent 836419f commit 4ef293a
Show file tree
Hide file tree
Showing 53 changed files with 194 additions and 137 deletions.
10 changes: 6 additions & 4 deletions aiida/cmdline/commands/cmd_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
# For further information please visit http://www.aiida.net #
###########################################################################
"""`verdi config` command."""
from __future__ import annotations

import json
from pathlib import Path
import textwrap
Expand Down Expand Up @@ -40,7 +42,7 @@ def verdi_config_list(ctx, prefix, description: bool):
from aiida.manage.configuration import Config, Profile

config: Config = ctx.obj.config
profile: Profile = ctx.obj.get('profile', None)
profile: Profile | None = ctx.obj.get('profile', None)

if not profile:
echo.echo_warning('no profiles configured: run `verdi setup` to create one')
Expand Down Expand Up @@ -78,7 +80,7 @@ def verdi_config_show(ctx, option):
from aiida.manage.configuration.options import NO_DEFAULT

config: Config = ctx.obj.config
profile: Profile = ctx.obj.profile
profile: Profile | None = ctx.obj.profile

dct = {
'schema': option.schema,
Expand Down Expand Up @@ -124,7 +126,7 @@ def verdi_config_set(ctx, option, value, globally, append, remove):
echo.echo_critical('Cannot flag both append and remove')

config: Config = ctx.obj.config
profile: Profile = ctx.obj.profile
profile: Profile | None = ctx.obj.profile

if option.global_only:
globally = True
Expand Down Expand Up @@ -164,7 +166,7 @@ def verdi_config_unset(ctx, option, globally):
from aiida.manage.configuration import Config, Profile

config: Config = ctx.obj.config
profile: Profile = ctx.obj.profile
profile: Profile | None = ctx.obj.profile

if option.global_only:
globally = True
Expand Down
2 changes: 1 addition & 1 deletion aiida/cmdline/groups/verdi.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ class VerdiCommandGroup(click.Group):
def add_verbosity_option(cmd: click.Command):
"""Apply the ``verbosity`` option to the command, which is common to all ``verdi`` commands."""
# Only apply the option if it hasn't been already added in a previous call.
if cmd is not None and 'verbosity' not in [param.name for param in cmd.params]:
if 'verbosity' not in [param.name for param in cmd.params]:
cmd = options.VERBOSITY()(cmd)

return cmd
Expand Down
2 changes: 1 addition & 1 deletion aiida/common/lang.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def wrapped_fn(self, *args, **kwargs): # pylint: disable=missing-docstring
else:
wrapped_fn = func

return wrapped_fn # type: ignore
return wrapped_fn # type: ignore[return-value]

return wrap

Expand Down
2 changes: 1 addition & 1 deletion aiida/common/progress_reporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def set_progress_reporter(reporter: Optional[Type[ProgressReporterAbstract]] = N
if reporter is None:
PROGRESS_REPORTER = ProgressReporterNull
elif kwargs:
PROGRESS_REPORTER = partial(reporter, **kwargs) # type: ignore
PROGRESS_REPORTER = partial(reporter, **kwargs) # type: ignore[assignment]
else:
PROGRESS_REPORTER = reporter

Expand Down
3 changes: 1 addition & 2 deletions aiida/engine/daemon/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -713,8 +713,7 @@ def _start_daemon(self, number_workers: int = 1, foreground: bool = False) -> No
pidfile.create(os.getpid())

# Configure the logger
loggerconfig = None
loggerconfig = loggerconfig or arbiter.loggerconfig or None
loggerconfig = arbiter.loggerconfig or None
configure_logger(circus_logger, loglevel, logoutput, loggerconfig)

# Main loop
Expand Down
3 changes: 2 additions & 1 deletion aiida/engine/launch.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from .processes.builder import ProcessBuilder
from .processes.functions import FunctionProcess
from .processes.process import Process
from .runners import ResultAndPk
from .utils import instantiate_process, is_process_scoped # pylint: disable=no-name-in-module

__all__ = ('run', 'run_get_pk', 'run_get_node', 'submit')
Expand Down Expand Up @@ -60,7 +61,7 @@ def run_get_node(process: TYPE_RUN_PROCESS, *args: Any, **inputs: Any) -> Tuple[
return runner.run_get_node(process, *args, **inputs)


def run_get_pk(process: TYPE_RUN_PROCESS, *args: Any, **inputs: Any) -> Tuple[Dict[str, Any], int]:
def run_get_pk(process: TYPE_RUN_PROCESS, *args: Any, **inputs: Any) -> ResultAndPk:
"""Run the process with the supplied inputs in a local runner that will block until the process is completed.
:param process: the process class, instance, builder or function to run
Expand Down
2 changes: 1 addition & 1 deletion aiida/engine/processes/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def fgetter(self, name=name):
return self._data.get(name)
elif port.has_default():

def fgetter(self, name=name, default=port.default): # type: ignore # pylint: disable=cell-var-from-loop
def fgetter(self, name=name, default=port.default): # type: ignore[misc] # pylint: disable=cell-var-from-loop
return self._data.get(name, default)
else:

Expand Down
10 changes: 5 additions & 5 deletions aiida/engine/processes/calcjobs/calcjob.py
Original file line number Diff line number Diff line change
Expand Up @@ -517,7 +517,7 @@ def get_state_classes(cls) -> Dict[Hashable, Type[plumpy.process_states.State]]:

@property
def node(self) -> orm.CalcJobNode:
return super().node # type: ignore
return super().node # type: ignore[return-value]

@override
def on_terminated(self) -> None:
Expand Down Expand Up @@ -616,7 +616,7 @@ def _perform_dry_run(self):
calc_info = self.presubmit(folder)
transport.chdir(folder.abspath)
upload_calculation(self.node, transport, calc_info, folder, inputs=self.inputs, dry_run=True)
self.node.dry_run_info = { # type: ignore
self.node.dry_run_info = { # type: ignore[attr-defined]
'folder': folder.abspath,
'script_filename': self.node.get_option('submit_script_filename')
}
Expand Down Expand Up @@ -768,7 +768,7 @@ def parse_scheduler_output(self, retrieved: orm.Node) -> Optional[ExitCode]:
return None

if exit_code is not None and not isinstance(exit_code, ExitCode):
args = (scheduler.__class__.__name__, type(exit_code))
args = (scheduler.__class__.__name__, type(exit_code)) # type: ignore[unreachable]
raise ValueError('`{}.parse_output` returned neither an `ExitCode` nor None, but: {}'.format(*args))

return exit_code
Expand Down Expand Up @@ -797,7 +797,7 @@ def parse_retrieved_output(self, retrieved_temporary_folder: Optional[str] = Non
break

if exit_code is not None and not isinstance(exit_code, ExitCode):
args = (parser_class.__name__, type(exit_code))
args = (parser_class.__name__, type(exit_code)) # type: ignore[unreachable]
raise ValueError('`{}.parse` returned neither an `ExitCode` nor None, but: {}'.format(*args))

return exit_code
Expand Down Expand Up @@ -894,7 +894,7 @@ def presubmit(self, folder: Folder) -> CalcInfo:
# Set resources, also with get_default_mpiprocs_per_machine
resources = self.node.get_option('resources')
scheduler.preprocess_resources(resources or {}, computer.get_default_mpiprocs_per_machine())
job_tmpl.job_resource = scheduler.create_job_resource(**resources) # type: ignore
job_tmpl.job_resource = scheduler.create_job_resource(**resources) # type: ignore[arg-type]

subst_dict = {'tot_num_mpiprocs': job_tmpl.job_resource.get_tot_num_mpiprocs()}

Expand Down
20 changes: 12 additions & 8 deletions aiida/engine/processes/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def get_stack_size(size: int = 2) -> int: # type: ignore[return]
for size in itertools.count(size, 8): # pylint: disable=redefined-argument-from-local
frame = frame.f_back.f_back.f_back.f_back.f_back.f_back.f_back.f_back # type: ignore[assignment,union-attr]
except AttributeError:
while frame:
while frame: # type: ignore[truthy-bool]
frame = frame.f_back # type: ignore[assignment]
size += 1
return size - 1
Expand Down Expand Up @@ -234,6 +234,7 @@ def run_get_pk(*args, **kwargs) -> tuple[dict[str, t.Any] | None, int]:
"""
result, node = run_get_node(*args, **kwargs)
assert node.pk is not None
return result, node.pk

@functools.wraps(function)
Expand Down Expand Up @@ -323,10 +324,13 @@ def build(func: FunctionType, node_class: t.Type['ProcessNode']) -> t.Type['Func
"""
# pylint: disable=too-many-statements
if not issubclass(node_class, ProcessNode) or not issubclass(node_class, FunctionCalculationMixin):
if (
not issubclass(node_class, ProcessNode) or # type: ignore[redundant-expr]
not issubclass(node_class, FunctionCalculationMixin) # type: ignore[unreachable]
):
raise TypeError('the node_class should be a sub class of `ProcessNode` and `FunctionCalculationMixin`')

signature = inspect.signature(func)
signature = inspect.signature(func) # type: ignore[unreachable]

args: list[str] = []
varargs: str | None = None
Expand Down Expand Up @@ -519,7 +523,7 @@ def get_or_create_db_record(cls) -> 'ProcessNode':
def __init__(self, *args, **kwargs) -> None:
if kwargs.get('enable_persistence', False):
raise RuntimeError('Cannot persist a function process')
super().__init__(enable_persistence=False, *args, **kwargs) # type: ignore
super().__init__(enable_persistence=False, *args, **kwargs) # type: ignore[misc]

@property
def process_class(self) -> t.Callable[..., t.Any]:
Expand Down Expand Up @@ -586,11 +590,11 @@ def run(self) -> 'ExitCode' | None:

result = self._func(*args, **kwargs)

if result is None or isinstance(result, ExitCode):
return result
if result is None or isinstance(result, ExitCode): # type: ignore[redundant-expr]
return result # type: ignore[unreachable]

if isinstance(result, Data):
self.out(self.SINGLE_OUTPUT_LINKNAME, result)
if isinstance(result, Data): # type: ignore[unreachable]
self.out(self.SINGLE_OUTPUT_LINKNAME, result) # type: ignore[unreachable]
elif isinstance(result, collections.abc.Mapping):
for name, value in result.items():
self.out(name, value)
Expand Down
19 changes: 7 additions & 12 deletions aiida/engine/processes/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,6 @@ def metadata(self) -> AttributeDict:
"""
try:
assert self.inputs is not None
return self.inputs.metadata
except (AssertionError, AttributeError):
return AttributeDict()
Expand Down Expand Up @@ -297,7 +296,6 @@ def get_provenance_inputs_iterator(self) -> Iterator[Tuple[str, Union[InputPort,
:rtype: filter
"""
assert self.inputs is not None
return filter(lambda kv: not kv[0].startswith('_'), self.inputs.items())

@override
Expand All @@ -321,7 +319,7 @@ def load_instance_state(
super().load_instance_state(saved_state, load_context)

if self.SaveKeys.CALC_ID.value in saved_state:
self._node = orm.load_node(saved_state[self.SaveKeys.CALC_ID.value]) # type: ignore
self._node = orm.load_node(saved_state[self.SaveKeys.CALC_ID.value]) # type: ignore[assignment]
self._pid = self.node.pk # pylint: disable=attribute-defined-outside-init
else:
self._pid = self._create_and_setup_db_record() # pylint: disable=attribute-defined-outside-init
Expand Down Expand Up @@ -429,7 +427,7 @@ def on_entered(self, from_state: Optional[plumpy.process_states.State]) -> None:
except ValueError: # pylint: disable=try-except-raise
raise
finally:
self.node.set_process_state(self._state.LABEL) # type: ignore
self.node.set_process_state(self._state.LABEL) # type: ignore[arg-type]

self._save_checkpoint()
set_process_state_change_timestamp(self)
Expand Down Expand Up @@ -464,7 +462,7 @@ def on_except(self, exc_info: Tuple[Any, Exception, TracebackType]) -> None:
self.report(''.join(traceback.format_exception(*exc_info)))

@override
def on_finish(self, result: Union[int, ExitCode], successful: bool) -> None:
def on_finish(self, result: Union[int, ExitCode, None], successful: bool) -> None:
""" Set the finish status on the process node.
:param result: result of the process
Expand Down Expand Up @@ -559,7 +557,7 @@ def get_parent_calc(self) -> Optional[orm.ProcessNode]:
if self._parent_pid is None:
return None

return orm.load_node(pk=self._parent_pid) # type: ignore
return orm.load_node(pk=self._parent_pid) # type: ignore[return-value]

@classmethod
def build_process_type(cls) -> str:
Expand Down Expand Up @@ -702,7 +700,6 @@ def _setup_db_record(self) -> None:
In addition, the parent calculation will be setup with a CALL link if applicable and all inputs will be
linked up as well.
"""
assert self.inputs is not None
assert not self.node.is_sealed, 'process node cannot be sealed when setting up the database record'

# Store important process attributes in the node proxy
Expand Down Expand Up @@ -731,9 +728,6 @@ def _setup_version_info(self) -> None:
"""Store relevant plugin version information."""
from aiida.plugins.entry_point import format_entry_point_string

if self.inputs is None:
return

version_info = self.runner.plugin_version_provider.get_version_info(self.__class__)

for key, monitor in self.inputs.get('monitors', {}).items():
Expand Down Expand Up @@ -836,7 +830,6 @@ def _flat_inputs(self) -> Dict[str, Any]:
:return: flat dictionary of parsed inputs
"""
assert self.inputs is not None
inputs = {key: value for key, value in self.inputs.items() if key != self.spec().metadata_key}
return dict(self._flatten_inputs(self.spec().inputs, inputs))

Expand Down Expand Up @@ -890,7 +883,9 @@ def _flatten_inputs(
items.extend(sub_items)
return items

assert (port is None) or (isinstance(port, InputPort) and (port.is_metadata or port.non_db))
assert (port is None) or (
isinstance(port, InputPort) and (port.is_metadata or port.non_db) # type: ignore[redundant-expr]
)
return []

def _flatten_outputs(
Expand Down
6 changes: 4 additions & 2 deletions aiida/engine/processes/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,14 @@ def prune_mapping(value):
:param value: A nested mapping of port values.
:return: The same mapping but without any nested namespace that is completely empty.
"""
if isinstance(value, Mapping) and not isinstance(value, Node):
if isinstance(value, Mapping) and not isinstance(value, Node): # type: ignore[unreachable]
result = {}
for key, sub_value in value.items():
pruned = prune_mapping(sub_value)
# If `pruned` is an "empty'ish" mapping and not an instance of `Node`, skip it, otherwise keep it.
if not (isinstance(pruned, Mapping) and not pruned and not isinstance(pruned, Node)):
if not (
isinstance(pruned, Mapping) and not pruned and not isinstance(pruned, Node) # type: ignore[unreachable]
):
result[key] = pruned
return result

Expand Down
3 changes: 2 additions & 1 deletion aiida/engine/processes/workchains/restart.py
Original file line number Diff line number Diff line change
Expand Up @@ -427,7 +427,8 @@ def _wrap_bare_dict_inputs(self, port_namespace: 'PortNamespace', inputs: Dict[s
continue

port = port_namespace[key]
valid_types = port.valid_type if isinstance(port.valid_type, (list, tuple)) else (port.valid_type,)
valid_types = port.valid_type \
if isinstance(port.valid_type, (list, tuple)) else (port.valid_type,) # type: ignore[redundant-expr]

if isinstance(port, PortNamespace):
wrapped[key] = self._wrap_bare_dict_inputs(port, value)
Expand Down
4 changes: 2 additions & 2 deletions aiida/engine/processes/workchains/workchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def spec(cls) -> WorkChainSpec:

@property
def node(self) -> WorkChainNode:
return super().node # type: ignore
return super().node # type: ignore[return-value]

@property
def ctx(self) -> AttributeDict:
Expand Down Expand Up @@ -408,7 +408,7 @@ def _on_awaitable_finished(self, awaitable: Awaitable) -> None:
if awaitable.outputs:
value = {entry.link_label: entry.node for entry in node.base.links.get_outgoing()}
else:
value = node # type: ignore
value = node # type: ignore[assignment]

self._resolve_awaitable(awaitable, value)

Expand Down
4 changes: 3 additions & 1 deletion aiida/engine/runners.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
###########################################################################
# pylint: disable=global-statement
"""Runners that can run and submit processes."""
from __future__ import annotations

import asyncio
import functools
import logging
Expand Down Expand Up @@ -43,7 +45,7 @@ class ResultAndNode(NamedTuple):

class ResultAndPk(NamedTuple):
result: Dict[str, Any]
pk: int
pk: int | None


TYPE_RUN_PROCESS = Union[Process, Type[Process], ProcessBuilder] # pylint: disable=invalid-name
Expand Down
2 changes: 1 addition & 1 deletion aiida/manage/caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def __init__(self):

def clear(self):
"""Clear caching overrides."""
self.__init__() # type: ignore
self.__init__() # type: ignore[misc]

def enable_all(self):
self._default_all = 'enable'
Expand Down
Loading

0 comments on commit 4ef293a

Please sign in to comment.