From 99384ba601aab9e2c14c81bfbaa337b1c8314418 Mon Sep 17 00:00:00 2001 From: wpbonelli Date: Mon, 11 Mar 2024 15:49:32 -0400 Subject: [PATCH] chore: switch to ruff, misc linting fixes --- .github/workflows/ci.yml | 15 ++-------- autotest/test_build.py | 4 +-- autotest/test_download.py | 8 ++--- autotest/test_fixtures.py | 15 +++------- autotest/test_markers.py | 26 +++++++++++++--- autotest/test_misc.py | 24 +++++---------- modflow_devtools/download.py | 58 ++++++++++++++---------------------- modflow_devtools/fixtures.py | 22 ++++---------- modflow_devtools/imports.py | 4 +-- modflow_devtools/latex.py | 6 +--- modflow_devtools/markers.py | 12 +++----- modflow_devtools/misc.py | 56 +++++++++++++--------------------- modflow_devtools/ostags.py | 4 +-- pyproject.toml | 31 +++++++++---------- scripts/update_version.py | 27 +++++------------ 15 files changed, 115 insertions(+), 197 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1aa17be..7537aa2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,21 +30,10 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Python packages - run: | - pip install . - pip install ".[lint]" + run: pip install ".[lint]" - name: Run isort - run: isort --verbose --check --diff modflow_devtools - - - name: Run black - run: black --check --diff modflow_devtools - - - name: Run flake8 - run: flake8 --count --show-source --exit-zero modflow_devtools - - - name: Run pylint - run: pylint --jobs=0 --errors-only --exit-zero modflow_devtools + run: ruff check modflow_devtools build: name: Build diff --git a/autotest/test_build.py b/autotest/test_build.py index 36697f7..4effb9c 100644 --- a/autotest/test_build.py +++ b/autotest/test_build.py @@ -16,9 +16,7 @@ _system = platform.system() _exe_ext = ".exe" if _system == "Windows" else "" _lib_ext = ( - ".so" - if _system == "Linux" - else (".dylib" if _system == "Darwin" else ".dll") + ".so" if _system == "Linux" else (".dylib" if _system == "Darwin" else ".dll") ) diff --git a/autotest/test_download.py b/autotest/test_download.py index 9d5173d..dc3b43b 100644 --- a/autotest/test_download.py +++ b/autotest/test_download.py @@ -21,9 +21,7 @@ @pytest.mark.parametrize("retries", [-1, 0, 1.5]) def test_get_releases_bad_params(per_page, retries): with pytest.raises(ValueError): - get_releases( - "executables", per_page=per_page, retries=retries, verbose=True - ) + get_releases("executables", per_page=per_page, retries=retries, verbose=True) @flaky @@ -109,9 +107,7 @@ def test_download_and_unzip(function_tmpdir, delete_zip): zip_name = "mf6.3.0_linux.zip" dir_name = zip_name.replace(".zip", "") url = f"https://github.com/MODFLOW-USGS/modflow6/releases/download/6.3.0/{zip_name}" - download_and_unzip( - url, function_tmpdir, delete_zip=delete_zip, verbose=True - ) + download_and_unzip(url, function_tmpdir, delete_zip=delete_zip, verbose=True) assert (function_tmpdir / zip_name).is_file() != delete_zip diff --git a/autotest/test_fixtures.py b/autotest/test_fixtures.py index dcd7122..efddf54 100644 --- a/autotest/test_fixtures.py +++ b/autotest/test_fixtures.py @@ -39,8 +39,7 @@ def test_function_scoped_tmpdir_slash_in_name(function_tmpdir, name): replaced1 = name.replace("/", "_").replace("\\", "_").replace(":", "_") replaced2 = name.replace("/", "_").replace("\\", "__").replace(":", "_") assert ( - f"{inspect.currentframe().f_code.co_name}[{replaced1}]" - in function_tmpdir.stem + f"{inspect.currentframe().f_code.co_name}[{replaced1}]" in function_tmpdir.stem or f"{inspect.currentframe().f_code.co_name}[{replaced2}]" in function_tmpdir.stem ) @@ -144,9 +143,7 @@ def test_keep_class_scoped_tmpdir(tmp_path, arg): ] assert pytest.main(args) == ExitCode.OK assert Path( - tmp_path - / f"{TestKeepClassScopedTmpdirInner.__name__}0" - / test_keep_fname + tmp_path / f"{TestKeepClassScopedTmpdirInner.__name__}0" / test_keep_fname ).is_file() @@ -165,9 +162,7 @@ def test_keep_module_scoped_tmpdir(tmp_path, arg): ] assert pytest.main(args) == ExitCode.OK this_path = Path(__file__) - keep_path = ( - tmp_path / f"{str(this_path.parent.name)}.{str(this_path.stem)}0" - ) + keep_path = tmp_path / f"{str(this_path.parent.name)}.{str(this_path.stem)}0" assert test_keep_fname in [f.name for f in keep_path.glob("*")] @@ -206,9 +201,7 @@ def test_keep_failed_function_scoped_tmpdir(function_tmpdir, keep): args += ["--keep-failed", function_tmpdir] assert pytest.main(args) == ExitCode.TESTS_FAILED - kept_file = Path( - function_tmpdir / f"{inner_fn}0" / test_keep_fname - ).is_file() + kept_file = Path(function_tmpdir / f"{inner_fn}0" / test_keep_fname).is_file() assert kept_file if keep else not kept_file diff --git a/autotest/test_markers.py b/autotest/test_markers.py index cd105c4..e93020b 100644 --- a/autotest/test_markers.py +++ b/autotest/test_markers.py @@ -3,10 +3,24 @@ from shutil import which from packaging.version import Version - -from modflow_devtools.markers import * +import pytest + +from modflow_devtools.markers import ( + requires_exe, + require_exe, + requires_program, + require_program, + requires_pkg, + require_package, + requires_platform, + require_platform, + excludes_platform, + requires_python, + require_python, +) exe = "pytest" +pkg = exe @requires_exe(exe) @@ -14,6 +28,7 @@ def test_require_exe(): assert which(exe) require_exe(exe) require_program(exe) + requires_program(exe) exes = [exe, "python"] @@ -24,14 +39,15 @@ def test_require_exe_multiple(): assert all(which(exe) for exe in exes) -@requires_pkg("pytest") +@requires_pkg(pkg) def test_requires_pkg(): import numpy assert numpy is not None + require_package(pkg) -@requires_pkg("pytest", "pluggy") +@requires_pkg(pkg, "pluggy") def test_requires_pkg_multiple(): import pluggy import pytest @@ -42,6 +58,7 @@ def test_requires_pkg_multiple(): @requires_platform("Windows") def test_requires_platform(): assert system() == "Windows" + require_platform("Windows") @excludes_platform("Darwin", ci_only=True) @@ -57,3 +74,4 @@ def test_requires_platform_ci_only(): def test_requires_python(version): if Version(py_ver) >= Version(version): assert requires_python(version) + assert require_python(version) diff --git a/autotest/test_misc.py b/autotest/test_misc.py index 283e5ff..e6c5b00 100644 --- a/autotest/test_misc.py +++ b/autotest/test_misc.py @@ -49,9 +49,7 @@ def test_set_env(): _repos_path = Path(__file__).parent.parent.parent.parent _repos_path = Path(_repos_path).expanduser().absolute() _testmodels_repo_path = _repos_path / "modflow6-testmodels" -_testmodels_repo_paths_mf6 = sorted( - list((_testmodels_repo_path / "mf6").glob("test*")) -) +_testmodels_repo_paths_mf6 = sorted(list((_testmodels_repo_path / "mf6").glob("test*"))) _testmodels_repo_paths_mf5to6 = sorted( list((_testmodels_repo_path / "mf5to6").glob("test*")) ) @@ -60,9 +58,7 @@ def test_set_env(): _examples_repo_path = _repos_path / "modflow6-examples" _examples_path = _examples_repo_path / "examples" _example_paths = ( - sorted(list(_examples_path.glob("ex-*"))) - if _examples_path.is_dir() - else [] + sorted(list(_examples_path.glob("ex-*"))) if _examples_path.is_dir() else [] ) @@ -150,9 +146,7 @@ def get_expected_namefiles(path, pattern="mfsim.nam") -> List[Path]: return sorted(list(set(folders))) -@pytest.mark.skipif( - not any(_example_paths), reason="modflow6-examples repo not found" -) +@pytest.mark.skipif(not any(_example_paths), reason="modflow6-examples repo not found") def test_get_model_paths_examples(): expected_paths = get_expected_model_dirs(_examples_path) paths = get_model_paths(_examples_path) @@ -193,9 +187,7 @@ def test_get_model_paths_exclude_patterns(models): assert len(paths) >= expected_count -@pytest.mark.skipif( - not any(_example_paths), reason="modflow6-examples repo not found" -) +@pytest.mark.skipif(not any(_example_paths), reason="modflow6-examples repo not found") def test_get_namefile_paths_examples(): expected_paths = get_expected_namefiles(_examples_path) paths = get_namefile_paths(_examples_path) @@ -287,9 +279,9 @@ def test_get_env(): assert get_env("NO_VALUE") is None with set_env(TEST_VALUE=str(True)): - assert get_env("NO_VALUE", True) == True - assert get_env("TEST_VALUE") == True - assert get_env("TEST_VALUE", default=False) == True + assert get_env("NO_VALUE", True) + assert get_env("TEST_VALUE") + assert get_env("TEST_VALUE", default=False) assert get_env("TEST_VALUE", default=1) == 1 with set_env(TEST_VALUE=str(1)): @@ -302,4 +294,4 @@ def test_get_env(): assert get_env("NO_VALUE", 1.1) == 1.1 assert get_env("TEST_VALUE") == 1.1 assert get_env("TEST_VALUE", default=2.1) == 1.1 - assert get_env("TEST_VALUE", default=False) == False + assert not get_env("TEST_VALUE", default=False) diff --git a/modflow_devtools/download.py b/modflow_devtools/download.py index fcf045b..719220d 100644 --- a/modflow_devtools/download.py +++ b/modflow_devtools/download.py @@ -58,10 +58,10 @@ def get_releases( """ if "/" not in repo: - raise ValueError(f"repo format must be owner/name") + raise ValueError("repo format must be owner/name") if not isinstance(retries, int) or retries < 1: - raise ValueError(f"retries must be a positive int") + raise ValueError("retries must be a positive int") params = {} if per_page is not None: @@ -96,9 +96,7 @@ def get_response_json(): # GitHub sometimes returns this error for valid URLs, so retry warn(f"URL request try {tries} failed ({err})") continue - raise RuntimeError( - f"cannot retrieve data from {req_url}" - ) from err + raise RuntimeError(f"cannot retrieve data from {req_url}") from err releases = [] max_pages = max_pages if max_pages else sys.maxsize @@ -132,13 +130,13 @@ def get_release(repo, tag="latest", retries=3, verbose=False) -> dict: """ if "/" not in repo: - raise ValueError(f"repo format must be owner/name") + raise ValueError("repo format must be owner/name") if not isinstance(tag, str) or not any(tag): - raise ValueError(f"tag must be a non-empty string") + raise ValueError("tag must be a non-empty string") if not isinstance(retries, int) or retries < 1: - raise ValueError(f"retries must be a positive int") + raise ValueError("retries must be a positive int") req_url = f"https://api.github.com/repos/{repo}" req_url = ( @@ -209,10 +207,10 @@ def get_latest_version(repo, retries=3, verbose=False) -> str: """ if "/" not in repo: - raise ValueError(f"repo format must be owner/name") + raise ValueError("repo format must be owner/name") if not isinstance(retries, int) or retries < 1: - raise ValueError(f"retries must be a positive int") + raise ValueError("retries must be a positive int") release = get_release(repo, retries=retries, verbose=verbose) return release["tag_name"] @@ -245,13 +243,13 @@ def get_release_assets( """ if "/" not in repo: - raise ValueError(f"repo format must be owner/name") + raise ValueError("repo format must be owner/name") if not isinstance(tag, str) or not any(tag): - raise ValueError(f"tag must be a non-empty string") + raise ValueError("tag must be a non-empty string") if not isinstance(retries, int) or retries < 1: - raise ValueError(f"retries must be a positive int") + raise ValueError("retries must be a positive int") release = get_release(repo, tag=tag, retries=retries, verbose=verbose) return ( @@ -292,21 +290,19 @@ def list_artifacts( """ if "/" not in repo: - raise ValueError(f"repo format must be owner/name") + raise ValueError("repo format must be owner/name") if not isinstance(retries, int) or retries < 1: - raise ValueError(f"retries must be a positive int") + raise ValueError("retries must be a positive int") - msg = f"artifact(s) for {repo}" + ( - f" matching name {name}" if name else "" - ) + msg = f"artifact(s) for {repo}" + (f" matching name {name}" if name else "") req_url = f"https://api.github.com/repos/{repo}/actions/artifacts" page = 1 params = {} if name is not None: if not isinstance(name, str) or len(name) == 0: - raise ValueError(f"name must be a non-empty string") + raise ValueError("name must be a non-empty string") params["name"] = name if per_page is not None: @@ -336,9 +332,7 @@ def get_response_json(): # GitHub sometimes returns this error for valid URLs, so retry warn(f"URL request try {tries} failed ({err})") continue - raise RuntimeError( - f"cannot retrieve data from {req_url}" - ) from err + raise RuntimeError(f"cannot retrieve data from {req_url}") from err artifacts = [] diff = 1 @@ -387,10 +381,10 @@ def download_artifact( """ if "/" not in repo: - raise ValueError(f"repo format must be owner/name") + raise ValueError("repo format must be owner/name") if not isinstance(retries, int) or retries < 1: - raise ValueError(f"retries must be a positive int") + raise ValueError("retries must be a positive int") req_url = f"https://api.github.com/repos/{repo}/actions/artifacts/{id}/zip" request = urllib.request.Request(req_url) @@ -415,9 +409,7 @@ def download_artifact( warn(f"URL request try {tries} failed ({err})") continue else: - raise RuntimeError( - f"cannot retrieve data from {req_url}" - ) from err + raise RuntimeError(f"cannot retrieve data from {req_url}") from err if verbose: print(f"Uncompressing: {zip_path}") @@ -496,14 +488,8 @@ def download_and_unzip( file_size = int(file_size) bfmt = "{:" + f"{len_file_size}" + ",d}" - sbfmt = ( - "{:>" - + f"{len(bfmt.format(int(file_size)))}" - + "s} bytes" - ) - print( - f" file size: {sbfmt.format(bfmt.format(int(file_size)))}" - ) + sbfmt = "{:>" + f"{len(bfmt.format(int(file_size)))}" + "s} bytes" + print(f" file size: {sbfmt.format(bfmt.format(int(file_size)))}") break except urllib.error.HTTPError as err: @@ -526,7 +512,7 @@ def download_and_unzip( # extract the files z.extractall(str(path)) - except: + except: # noqa: E722 p = "Could not unzip the file. Stopping." raise Exception(p) z.close() diff --git a/modflow_devtools/fixtures.py b/modflow_devtools/fixtures.py index 4504f72..ec4d412 100644 --- a/modflow_devtools/fixtures.py +++ b/modflow_devtools/fixtures.py @@ -17,11 +17,7 @@ @pytest.fixture(scope="function") def function_tmpdir(tmpdir_factory, request) -> Path: - node = ( - request.node.name.replace("/", "_") - .replace("\\", "_") - .replace(":", "_") - ) + node = request.node.name.replace("/", "_").replace("\\", "_").replace(":", "_") temp = Path(tmpdir_factory.mktemp(node)) yield Path(temp) @@ -269,9 +265,7 @@ def get_repo_path(repo_name: str) -> Optional[Path]: if repo_path else [] ) - metafunc.parametrize( - key, namefile_paths, ids=[str(m) for m in namefile_paths] - ) + metafunc.parametrize(key, namefile_paths, ids=[str(m) for m in namefile_paths]) key = "test_model_mf5to6" if key in metafunc.fixturenames: @@ -288,9 +282,7 @@ def get_repo_path(repo_name: str) -> Optional[Path]: if repo_path else [] ) - metafunc.parametrize( - key, namefile_paths, ids=[str(m) for m in namefile_paths] - ) + metafunc.parametrize(key, namefile_paths, ids=[str(m) for m in namefile_paths]) key = "large_test_model" if key in metafunc.fixturenames: @@ -307,9 +299,7 @@ def get_repo_path(repo_name: str) -> Optional[Path]: if repo_path else [] ) - metafunc.parametrize( - key, namefile_paths, ids=[str(m) for m in namefile_paths] - ) + metafunc.parametrize(key, namefile_paths, ids=[str(m) for m in namefile_paths]) key = "example_scenario" if key in metafunc.fixturenames: @@ -384,9 +374,7 @@ def get_examples(): filtered.append(name) break examples = { - name: nfps - for name, nfps in examples.items() - if name in filtered + name: nfps for name, nfps in examples.items() if name in filtered } # exclude mf6gwf and mf6gwt subdirs diff --git a/modflow_devtools/imports.py b/modflow_devtools/imports.py index 2521fcc..625c59d 100644 --- a/modflow_devtools/imports.py +++ b/modflow_devtools/imports.py @@ -130,9 +130,7 @@ def import_optional_dependency( module_to_get = sys.modules[install_name] else: module_to_get = module - minimum_version = ( - min_version if min_version is not None else VERSIONS.get(parent) - ) + minimum_version = min_version if min_version is not None else VERSIONS.get(parent) if minimum_version: version = get_version(module_to_get) if Version(version) < Version(minimum_version): diff --git a/modflow_devtools/latex.py b/modflow_devtools/latex.py index bbe766c..4670c2d 100644 --- a/modflow_devtools/latex.py +++ b/modflow_devtools/latex.py @@ -92,11 +92,7 @@ def get_header( header = "\\small\n" header += "\\begin{longtable}[!htbp]{\n" for col_width in col_widths: - header += ( - 38 * " " - + f"{align}" - + f"{{{col_width}\\linewidth-2\\arraycolsep}}\n" - ) + header += 38 * " " + f"{align}" + f"{{{col_width}\\linewidth-2\\arraycolsep}}\n" header += 38 * " " + "}\n" header += f"\t\\caption{{{caption}}} \\label{{{label}}} \\\\\n\n" diff --git a/modflow_devtools/markers.py b/modflow_devtools/markers.py index 1c21e43..7bfeaf3 100644 --- a/modflow_devtools/markers.py +++ b/modflow_devtools/markers.py @@ -31,7 +31,7 @@ def requires_exe(*exes): def requires_python(version, bound="lower"): if not isinstance(version, str): - raise ValueError(f"Version must a string") + raise ValueError("Version must a string") py_tgt = Version(version) if bound == "lower": @@ -57,25 +57,21 @@ def requires_pkg(*pkgs): def requires_platform(platform, ci_only=False): return pytest.mark.skipif( - system().lower() != platform.lower() - and (is_in_ci() if ci_only else True), + system().lower() != platform.lower() and (is_in_ci() if ci_only else True), reason=f"only compatible with platform: {platform.lower()}", ) def excludes_platform(platform, ci_only=False): return pytest.mark.skipif( - system().lower() == platform.lower() - and (is_in_ci() if ci_only else True), + system().lower() == platform.lower() and (is_in_ci() if ci_only else True), reason=f"not compatible with platform: {platform.lower()}", ) def requires_branch(branch): current = get_current_branch() - return pytest.mark.skipif( - current != branch, reason=f"must run on branch: {branch}" - ) + return pytest.mark.skipif(current != branch, reason=f"must run on branch: {branch}") def excludes_branch(branch): diff --git a/modflow_devtools/misc.py b/modflow_devtools/misc.py index c7fb0de..94983e0 100644 --- a/modflow_devtools/misc.py +++ b/modflow_devtools/misc.py @@ -16,6 +16,7 @@ from urllib import request from _warnings import warn +from urllib.error import URLError @contextmanager @@ -167,8 +168,8 @@ def get_packages(namefile_path: PathLike) -> List[str]: packages = [] path = Path(namefile_path).expanduser().absolute() lines = open(path, "r").readlines() - gwf_lines = [l for l in lines if l.strip().lower().startswith("gwf6 ")] - gwt_lines = [l for l in lines if l.strip().lower().startswith("gwt6 ")] + gwf_lines = [ln for ln in lines if ln.strip().lower().startswith("gwf6 ")] + gwt_lines = [ln for ln in lines if ln.strip().lower().startswith("gwt6 ")] def parse_model_namefile(line): nf_path = [path.parent / s for s in line.split(" ") if s != ""][1] @@ -181,24 +182,20 @@ def parse_model_namefile(line): # load model namefiles try: for line in gwf_lines: - packages = ( - packages + get_packages(parse_model_namefile(line)) + ["gwf"] - ) + packages = packages + get_packages(parse_model_namefile(line)) + ["gwf"] for line in gwt_lines: - packages = ( - packages + get_packages(parse_model_namefile(line)) + ["gwt"] - ) - except: + packages = packages + get_packages(parse_model_namefile(line)) + ["gwt"] + except: # noqa: E722 warn(f"Invalid namefile format: {traceback.format_exc()}") for line in lines: # Skip over blank and commented lines - ll = line.strip().split() - if len(ll) < 2: + line = line.strip().split() + if len(line) < 2: continue - l = ll[0].lower() - if any(l.startswith(c) for c in ["#", "!", "data", "list"]) or l in [ + line = line[0].lower() + if any(line.startswith(c) for c in ["#", "!", "data", "list"]) or line in [ "begin", "end", "memory_print_option", @@ -206,9 +203,9 @@ def parse_model_namefile(line): continue # strip "6" from package name - l = l.replace("6", "") + line = line.replace("6", "") - packages.append(l.lower()) + packages.append(line.lower()) return list(set(packages)) @@ -242,17 +239,12 @@ def get_namefile_paths( # find simulation namefiles paths = [ - p - for p in Path(path).rglob( - f"{prefix}*/**/{namefile}" if prefix else namefile - ) + p for p in Path(path).rglob(f"{prefix}*/**/{namefile}" if prefix else namefile) ] # remove excluded paths = [ - p - for p in paths - if (not excluded or not any(e in str(p) for e in excluded)) + p for p in paths if (not excluded or not any(e in str(p) for e in excluded)) ] # filter by package @@ -260,9 +252,7 @@ def get_namefile_paths( filtered = [] for nfp in paths: nf_pkgs = get_packages(nfp) - shared = set(nf_pkgs).intersection( - set([p.lower() for p in packages]) - ) + shared = set(nf_pkgs).intersection(set([p.lower() for p in packages])) if any(shared): filtered.append(nfp) paths = filtered @@ -271,9 +261,7 @@ def get_namefile_paths( if selected: paths = [ namfile_path - for (namfile_path, model_path) in zip( - paths, [p.parent for p in paths] - ) + for (namfile_path, model_path) in zip(paths, [p.parent for p in paths]) if any(s in model_path.name for s in selected) ] @@ -298,9 +286,7 @@ def get_model_paths( namefile_paths = get_namefile_paths( path, prefix, namefile, excluded, selected, packages ) - model_paths = sorted( - list(set([p.parent for p in namefile_paths if p.parent.name])) - ) + model_paths = sorted(list(set([p.parent for p in namefile_paths if p.parent.name]))) return model_paths @@ -341,16 +327,14 @@ def is_github_rate_limited() -> Optional[bool]: True if rate-limiting is applied, otherwise False (or None if the connection fails). """ try: - with request.urlopen( - "https://api.github.com/users/octocat" - ) as response: + with request.urlopen("https://api.github.com/users/octocat") as response: remaining = int(response.headers["x-ratelimit-remaining"]) if remaining < 10: warn( f"Only {remaining} GitHub API requests remaining before rate-limiting" ) return remaining > 0 - except: + except (ValueError, URLError): return None @@ -481,7 +465,7 @@ def get_env(name: str, default: object = None) -> Optional[object]: if isinstance(default, bool): v = v.lower().title() v = literal_eval(v) - except: + except ValueError | TypeError | SyntaxError | MemoryError | RecursionError: return default if default is None: return v diff --git a/modflow_devtools/ostags.py b/modflow_devtools/ostags.py index 653f533..14186fc 100644 --- a/modflow_devtools/ostags.py +++ b/modflow_devtools/ostags.py @@ -73,10 +73,10 @@ def _suffixes(tag): try: return _suffixes(ostag.lower()) - except: + except KeyError: try: return _suffixes(python_to_modflow_ostag(ostag)) - except: + except KeyError: return _suffixes(github_to_modflow_ostag(ostag)) diff --git a/pyproject.toml b/pyproject.toml index 16c2018..cbbd53f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,11 +44,7 @@ dynamic = ["version"] [project.optional-dependencies] lint = [ - "black", - "cffconvert", - "flake8", - "isort", - "pylint" + "ruff" ] test = [ "modflow-devtools[lint]", @@ -75,19 +71,20 @@ docs = [ "Bug Tracker" = "https://github.com/MODFLOW-USGS/modflow-devtools/issues" "Source Code" = "https://github.com/MODFLOW-USGS/modflow-devtools" +[tool.ruff] +target-version = "py38" +include = [ + "pyproject.toml", + "modflow_devtools/**/*.py", + "autotest/**/*.py", + "docs/**/*.py", + "scripts/**/*.py", + ".github/**/*.py", +] -[tool.black] -line-length = 79 -target_version = ["py37"] - -[tool.flynt] -line-length = 79 -verbose = true - -[tool.isort] -profile = "black" -src_paths = ["modflow_devtools"] -line_length = 79 +[tool.ruff.lint] +ignore = [ +] [tool.setuptools] packages = ["modflow_devtools"] diff --git a/scripts/update_version.py b/scripts/update_version.py index c79e5bf..92a7724 100644 --- a/scripts/update_version.py +++ b/scripts/update_version.py @@ -1,10 +1,7 @@ import argparse import textwrap from datetime import datetime -from enum import Enum -from os import PathLike from pathlib import Path -from typing import NamedTuple from filelock import FileLock from packaging.version import Version @@ -13,9 +10,7 @@ _project_root_path = Path(__file__).parent.parent _version_txt_path = _project_root_path / "version.txt" _package_init_path = _project_root_path / "modflow_devtools" / "__init__.py" -_readme_path = _project_root_path / "README.md" _docs_config_path = _project_root_path / "docs" / "conf.py" -_initial_version = Version("0.0.1") _current_version = Version(_version_txt_path.read_text().strip()) @@ -37,7 +32,7 @@ def update_init_py(timestamp: datetime, version: Version): print(f"Updated {_package_init_path} to version {version}") -def update_docs_config(timestamp: datetime, version: Version): +def update_docs_config(version: Version): lines = _docs_config_path.read_text().rstrip().split("\n") with open(_docs_config_path, "w") as f: for line in lines: @@ -52,8 +47,8 @@ def update_version( version: Version = None, ): lock_path = Path(_version_txt_path.name + ".lock") - try: - lock = FileLock(lock_path) + lock = FileLock(lock_path) + with lock: previous = Version(_version_txt_path.read_text().strip()) version = ( version @@ -61,15 +56,9 @@ def update_version( else Version(previous.major, previous.minor, previous.micro) ) - with lock: - update_version_txt(version) - update_init_py(timestamp, version) - update_docs_config(timestamp, version) - finally: - try: - lock_path.unlink() - except: - pass + update_version_txt(version) + update_init_py(timestamp, version) + update_docs_config(version) if __name__ == "__main__": @@ -106,7 +95,5 @@ def update_version( else: update_version( timestamp=datetime.now(), - version=( - Version(args.version) if args.version else _current_version - ), + version=(Version(args.version) if args.version else _current_version), )