diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 48a2914f3..ce1c37d3c 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -1,18 +1,20 @@
{
- "name": "Ubuntu",
- "build": {
- "dockerfile": "Dockerfile",
- "args": { "VARIANT": "ubuntu-22.04" }
- },
- "remoteUser": "vscode",
- "customizations": {
- "vscode": {
- "extensions": [
- "ms-python.python"
- ]
- }
- },
- "features": {
- "ghcr.io/devcontainers/features/docker-in-docker:2": {}
- }
+ "build": {
+ "args": {
+ "VARIANT": "ubuntu-22.04"
+ },
+ "dockerfile": "Dockerfile"
+ },
+ "customizations": {
+ "vscode": {
+ "extensions": [
+ "ms-python.python"
+ ]
+ }
+ },
+ "features": {
+ "ghcr.io/devcontainers/features/docker-in-docker:2": {}
+ },
+ "name": "Ubuntu",
+ "remoteUser": "vscode"
}
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
index 152a39858..c6ac0148b 100644
--- a/.github/CONTRIBUTING.md
+++ b/.github/CONTRIBUTING.md
@@ -2,6 +2,6 @@
To view our [Getting Started] guide for developers and [Contribution Requirements], please refer to the official [documentation].
-[Contribution Requirements]: https://docs.onica.com/projects/runway/page/developers/contributing.html
-[Getting Started]: https://docs.onica.com/projects/runway/page/developers/getting_started.html
+[contribution requirements]: https://docs.onica.com/projects/runway/page/developers/contributing.html
[documentation]: https://docs.onica.com/projects/runway
+[getting started]: https://docs.onica.com/projects/runway/page/developers/getting_started.html
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
index 433e735e6..8ed73f25d 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -1,13 +1,12 @@
---
name: Feature request
about: Suggest an idea for this project
-title: "[REQUEST] feature"
+title: '[REQUEST] feature'
labels: feature, priority:low, status:review_needed
-
---
**Is your feature request related to a problem? Please describe.**
-A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+A clear and concise description of what the problem is. Ex. I'm always frustrated when ...
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
diff --git a/.github/ISSUE_TEMPLATE/general_question.md b/.github/ISSUE_TEMPLATE/general_question.md
index 10e2e5fb8..677763637 100644
--- a/.github/ISSUE_TEMPLATE/general_question.md
+++ b/.github/ISSUE_TEMPLATE/general_question.md
@@ -1,9 +1,8 @@
---
name: General Question
about: General question about the project, usage, design, etc.
-title: "[QUESTION]"
+title: '[QUESTION]'
labels: priority:low, status:review_needed, question
-
---
**Question**
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index 0b3065c6c..286daf76c 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -37,6 +37,7 @@
# Checklist
+
- [ ] Have you followed the guidelines in our [Contribution Requirements](https://docs.onica.com/projects/runway/page/developers/contributing.html)?
- [ ] Have you checked to ensure there aren't other open [Pull Requests](../../../pulls) for the same update/change?
- [ ] Does your submission pass tests?
diff --git a/.github/scripts/urlshortener/Makefile b/.github/scripts/urlshortener/Makefile
index d6b804bb6..4234def39 100644
--- a/.github/scripts/urlshortener/Makefile
+++ b/.github/scripts/urlshortener/Makefile
@@ -1,14 +1,8 @@
install:
@poetry install
-lint: lint-flake8 lint-pylint
-
-lint-flake8:
- @poetry run flake8 update_urls.py
-
-lint-pylint:
- @poetry run pylint update_urls.py \
- --rcfile=./../../../pyproject.toml
+lint:
+ @echo "no linters configured currently"
test:
@poetry run pytest ./test_update_urls.py \
diff --git a/.github/scripts/urlshortener/test_update_urls.py b/.github/scripts/urlshortener/test_update_urls.py
index d61ba85cf..67dc10381 100644
--- a/.github/scripts/urlshortener/test_update_urls.py
+++ b/.github/scripts/urlshortener/test_update_urls.py
@@ -1,18 +1,21 @@
-"""Tests for update_urls."""
+"""Tests for update_urls.""" # noqa: INP001
-# pylint: disable=no-member
+# ruff: noqa: S101
+from typing import TYPE_CHECKING
from unittest.mock import ANY, Mock, call, patch
import boto3
import pytest
from botocore.stub import Stubber
from click.testing import CliRunner
-from mypy_boto3_dynamodb.service_resource import Table
from update_urls import command, handler, put_item, sanitize_version
+if TYPE_CHECKING:
+ from mypy_boto3_dynamodb.service_resource import Table
-def test_sanitize_version():
+
+def test_sanitize_version() -> None:
"""Test sanitize_version."""
assert sanitize_version(None, None, "1.0.0") == "1.0.0"
assert sanitize_version(None, None, "v1.0.0") == "1.0.0"
@@ -20,11 +23,11 @@ def test_sanitize_version():
assert sanitize_version(None, None, "refs/tags/v1.0.0") == "1.0.0"
assert sanitize_version(None, None, "refs/tags/v1.0.0-dev1") == "1.0.0-dev1"
- with pytest.raises(ValueError):
+ with pytest.raises(ValueError): # noqa: PT011
assert not sanitize_version(None, None, "refs/tags/stable")
-def test_put_item():
+def test_put_item() -> None:
"""Test put_item."""
table_name = "test-table"
id_val = "my_id"
@@ -32,16 +35,14 @@ def test_put_item():
table: Table = boto3.resource("dynamodb").Table(table_name)
stubber = Stubber(table.meta.client)
- stubber.add_response(
- "put_item", {"Attributes": {"id": {"S": id_val}, "target": {"S": target}}}
- )
+ stubber.add_response("put_item", {"Attributes": {"id": {"S": id_val}, "target": {"S": target}}})
with stubber:
assert not put_item(table, id_val, target)
@patch("update_urls.put_item")
-def test_handler(mock_put_item: Mock):
+def test_handler(mock_put_item: Mock) -> None:
"""Test handler."""
table = Mock()
assert not handler(table, "test-bucket", "us-west-2", "1.0.0", True)
@@ -49,26 +50,22 @@ def test_handler(mock_put_item: Mock):
call(
table=table,
id_val="runway/latest/linux",
- target="https://test-bucket.s3-us-west-2.amazonaws.com/"
- "runway/1.0.0/linux/runway",
+ target="https://test-bucket.s3-us-west-2.amazonaws.com/runway/1.0.0/linux/runway",
),
call(
table=table,
id_val="runway/1.0.0/linux",
- target="https://test-bucket.s3-us-west-2.amazonaws.com/"
- "runway/1.0.0/linux/runway",
+ target="https://test-bucket.s3-us-west-2.amazonaws.com/runway/1.0.0/linux/runway",
),
call(
table=table,
id_val="runway/latest/osx",
- target="https://test-bucket.s3-us-west-2.amazonaws.com/"
- "runway/1.0.0/osx/runway",
+ target="https://test-bucket.s3-us-west-2.amazonaws.com/runway/1.0.0/osx/runway",
),
call(
table=table,
id_val="runway/1.0.0/osx",
- target="https://test-bucket.s3-us-west-2.amazonaws.com/"
- "runway/1.0.0/osx/runway",
+ target="https://test-bucket.s3-us-west-2.amazonaws.com/runway/1.0.0/osx/runway",
),
call(
table=table,
@@ -89,16 +86,14 @@ def test_handler(mock_put_item: Mock):
call(
table=table,
id_val="runway/1.1.0/linux",
- target="https://test-bucket.s3-us-east-1.amazonaws.com/"
- "runway/1.1.0/linux/runway",
+ target="https://test-bucket.s3-us-east-1.amazonaws.com/runway/1.1.0/linux/runway",
)
)
calls.append(
call(
table=table,
id_val="runway/1.1.0/osx",
- target="https://test-bucket.s3-us-east-1.amazonaws.com/"
- "runway/1.1.0/osx/runway",
+ target="https://test-bucket.s3-us-east-1.amazonaws.com/runway/1.1.0/osx/runway",
)
)
calls.append(
@@ -114,7 +109,7 @@ def test_handler(mock_put_item: Mock):
@patch("update_urls.handler")
-def test_command(mock_handler: Mock):
+def test_command(mock_handler: Mock) -> None:
"""Test command."""
runner = CliRunner()
result = runner.invoke(
diff --git a/.github/scripts/urlshortener/update_urls.py b/.github/scripts/urlshortener/update_urls.py
index 6db7e8b60..bdc78460d 100755
--- a/.github/scripts/urlshortener/update_urls.py
+++ b/.github/scripts/urlshortener/update_urls.py
@@ -1,10 +1,9 @@
-"""Update Runway release URLs."""
+"""Update Runway release URLs.""" # noqa: INP001
-# pylint: disable=no-member
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Optional, Union
+from typing import TYPE_CHECKING
import boto3
import click
@@ -18,16 +17,14 @@
HDLR.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
ID_TEMPLATE = "runway/{release}/{os}"
-TARGET_TEMPLATE = (
- "https://{bucket_name}.s3-{region}.amazonaws.com/runway/{version}/{os}/runway"
-)
+TARGET_TEMPLATE = "https://{bucket_name}.s3-{region}.amazonaws.com/runway/{version}/{os}/runway"
OS_NAMES = ["linux", "osx", "windows"]
def sanitize_version(
- _ctx: Optional[click.Context],
- _param: Optional[Union[click.Option, click.Parameter]],
+ _ctx: click.Context | None,
+ _param: click.Option | click.Parameter | None,
value: str,
) -> str:
"""Sanitize a version number by stripping git tag ref and leading "v".
@@ -67,7 +64,7 @@ def handler(
"""Handle the command.
Core logic executed by the command aside from boto3 session/resource
- initializeion and logging setup.
+ initialization and logging setup.
Args:
table: DynamoDB table resource.
@@ -122,7 +119,7 @@ def handler(
"table_name",
metavar="
",
required=True,
- help="Name of the DynamoDB table containing entries for the URL " "shortener.",
+ help="Name of the DynamoDB table containing entries for the URL shortener.",
)
@click.option(
"--version",
@@ -156,4 +153,4 @@ def command(
if __name__ == "__main__":
- command() # pylint: disable=E
+ command()
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index b43487cbb..9194daa8f 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,11 +1,19 @@
-minimum_pre_commit_version: 2.6.0
+default_language_version:
+ node: system
+
+exclude: |
+ (?x)^(
+ (.*/)?package-lock\.json|
+ (.*/)?poetry\.lock
+ )$
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.3.0
+ rev: v4.6.0
hooks:
- id: check-json
- id: check-merge-conflict
+ - id: check-toml
- id: check-yaml
args:
- --unsafe # needed for parsing CFN
@@ -14,56 +22,47 @@ repos:
- id: file-contents-sorter
files: |
(?x)^(
+ \.dockerignore|
\.gitignore|
\.vscode/dictionaries/.*\.txt|
- MANIFEST.in
)$
- id: pretty-format-json
args: [--autofix, --indent, '4']
- files: |
+ exclude: |
(?x)^(
- \.vscode/.*\.json
+ (.*)?(angular|cdk|package|tsconfig(\.spec)?|tslint)\.json
)$
- id: pretty-format-json
args: [--autofix, --indent, '2']
files: |
(?x)^(
- (.*)?(cdk|tsconfig|tslint).json
+ (.*)?(angular|cdk|package|tsconfig(\.spec)?|tslint)\.json
)$
- id: trailing-whitespace
+ - repo: https://github.com/pappasam/toml-sort
+ rev: v0.23.1
+ hooks:
+ - id: toml-sort-fix
- repo: https://github.com/ITProKyle/pre-commit-hook-yamlfmt
- rev: v0.2.0
+ rev: v0.3.0
hooks:
- id: yamlfmt
args: [--mapping, '2', --offset, '2', --sequence, '4']
- files: |
+ exclude: |
(?x)^(
- \.github/(?!dependabot).*\.(yaml|yml)|
- \.markdownlint.yml|
- \.pre-commit-config.yaml|
- \.readthedocs.yml|
- buildspec.yml
+ tests/unit/module/staticsite/fixtures/expected_yaml/.*\.(yaml|yml)|
+ docs/runway-example\.yml
)$
- - repo: https://github.com/timothycrosley/isort
- rev: 5.12.0
- hooks:
- - id: isort
- - repo: https://github.com/psf/black
- rev: 24.1.1
- hooks:
- - id: black
- args:
- - --color
- - repo: https://github.com/pycqa/flake8
- rev: 4.0.1
+ - repo: https://github.com/executablebooks/mdformat
+ rev: 0.7.17
hooks:
- - id: flake8
+ - id: mdformat
additional_dependencies:
- - flake8-bugbear
- - flake8-docstrings
- - flake8-print==5.0.0
- - flake8-use-fstring
+ - mdformat-frontmatter
+ - mdformat-gfm
+ - mdformat-gfm-alerts
+ - mdformat-tables
- repo: https://github.com/igorshubovych/markdownlint-cli
- rev: v0.31.1
+ rev: v0.41.0
hooks:
- id: markdownlint
diff --git a/.vscode/cspell.json b/.vscode/cspell.json
index d06031501..6a7acece2 100644
--- a/.vscode/cspell.json
+++ b/.vscode/cspell.json
@@ -59,164 +59,170 @@
"maxNumberOfProblems": 100,
"version": "0.2",
"words": [
+ "absolutepath",
"abstractmethod",
+ "accesspoint",
+ "addoption",
"ALGS",
+ "appendleft",
+ "arcname",
+ "argparsing",
+ "assumerole",
+ "authmap",
"autoattribute",
+ "autobuild",
+ "autodetected",
+ "autofind",
+ "autoloaded",
+ "autoscale",
+ "autoscaler",
+ "autouse",
+ "awslogbucket",
+ "backported",
+ "barfoo",
+ "blogpost",
+ "caplog",
"certifi",
+ "certificatemanager",
+ "chunksize",
+ "classdir",
+ "classmethods",
+ "clienterror",
"cmds",
"codecov",
+ "configvars",
+ "copydir",
"devel",
+ "dockerized",
+ "domparator",
+ "downstreams",
+ "dryrun",
+ "dunder",
+ "edgelambda",
+ "ekscluster",
+ "eksservicerole",
"EOCD",
+ "excinfo",
+ "execglobals",
+ "Fakhreddine",
+ "filedes",
+ "filedir",
+ "filehandle",
+ "fileinfo",
+ "fileinfos",
+ "fileout",
+ "foobarfoo",
"FQDNs",
+ "frontmatter",
"fstring",
+ "getgid",
+ "getpreferredencoding",
"getuid",
+ "graphviz",
"hashextra",
+ "hashfile",
"hashicorp",
+ "htmlhelp",
+ "humanreadable",
+ "identless",
"igittigitt",
+ "indentless",
+ "instancerole",
+ "intersphinx",
+ "invalidtestkey",
+ "keylist",
"kwoa",
"libltdl",
"libmysqlclient",
"libxmlsec",
+ "lintfix",
+ "locallocal",
"ltdl",
"lxml",
+ "managementpolicy",
"markexpr",
+ "maxsplit",
+ "mdformat",
+ "mynamespace",
+ "mystack",
+ "nameextra",
+ "nameserver",
+ "nestedkey",
+ "nestedval",
+ "newdir",
+ "newfile",
"Ngin",
+ "nitpicky",
+ "nodegroup",
+ "nodeinstanceprofile",
+ "nodeinstancerole",
+ "nodelaunchtemplate",
+ "nodesecuritygroup",
+ "nonbool",
+ "noninteractive",
+ "nonseekable",
+ "nosetests",
+ "onezone",
"openid",
+ "outputquery",
+ "paravirtual",
+ "partitionkey",
"Pipefile",
+ "prehook",
+ "prepad",
+ "prevdir",
"PYXMLSEC",
+ "readacl",
+ "refreshable",
"rglob",
+ "rootdir",
"runtimes",
- "tomap",
- "tomli",
- "typeshed",
- "unsubscriptable",
- "xmlsec",
- "intersphinx",
- "viewcode",
- "nitpicky",
- "htmlhelp",
+ "runwayconfig",
"runwaydoc",
- "typehints",
- "templatedir",
- "getpreferredencoding",
- "execglobals",
- "refreshable",
- "nodeinstancerole",
- "nodeinstanceprofile",
- "autoscaler",
- "thisfile",
- "eksservicerole",
- "ekscluster",
- "nodegroup",
- "nodesecuritygroup",
- "blogpost",
- "awslogbucket",
- "edgelambda",
- "terraformlocktable",
- "terraformstatebucket",
- "managementpolicy",
- "graphviz",
- "classdir",
- "configvars",
- "nosetests",
- "noninteractive",
- "downstreams",
- "appendleft",
- "dockerized",
- "certificatemanager",
- "copydir",
- "maxsplit",
- "getpreferredencoding",
- "absolutepath",
- "getgid",
- "assumerole",
- "excinfo",
- "caplog",
- "classmethods",
- "autoloaded",
- "autouse",
- "accesspoint",
- "readacl",
- "writeacl",
- "nonseekable",
- "chunksize",
- "fileinfo",
- "fileinfos",
- "dryrun",
+ "safehaven",
+ "savingsplans",
+ "searchpath",
+ "shasums",
+ "shelloutexc",
+ "shouldraise",
"sourcebucket",
"sourcekey",
- "locallocal",
- "rootdir",
- "onezone",
- "backported",
- "usefixtures",
+ "SPHINXAUTOBUILD",
+ "SPHINXAUTOBUILDPORT",
+ "ssmstore",
+ "ssword",
+ "subclasscheck",
"tagset",
- "testtemplate",
- "safehaven",
- "barfoo",
- "dunder",
- "testval",
+ "tempdirectory",
+ "templatedir",
+ "temppath",
+ "terraformlocktable",
+ "terraformstatebucket",
"testkey",
- "invalidtestkey",
- "subclasscheck",
- "paravirtual",
- "autouse",
- "nonbool",
- "ssword",
"teststack",
- "mynamespace",
- "foobarfoo",
- "unittests",
- "outputquery",
- "clienterror",
- "autofind",
- "mystack",
- "shouldraise",
- "tempdirectory",
- "nameextra",
- "argparsing",
- "hashfile",
- "newdir",
- "prevdir",
- "identless",
- "humanreadable",
- "runwayconfig",
- "instancerole",
- "authmap",
- "tmpdirname",
- "shelloutexc",
- "nestedkey",
- "nestedval",
- "fileout",
- "lintfix",
- "autoscale",
- "shasums",
+ "testtemplate",
+ "testval",
+ "thisfile",
"threadsafe",
- "nameserver",
- "keylist",
- "filehandle",
- "prepad",
- "newfile",
- "filedir",
- "temppath",
- "prehook",
- "nodelaunchtemplate",
- "indentless",
- "arcname",
- "searchpath",
- "savingsplans",
- "topdown",
- "partitionkey",
- "timestnonce",
"timestampamp",
- "filedes",
- "autodetected",
- "addoption",
- "domparator",
- "Fakhreddine",
- "SPHINXAUTOBUILD",
- "autobuild",
- "SPHINXAUTOBUILDPORT",
- "ssmstore"
+ "timestnonce",
+ "tmpdirname",
+ "tomap",
+ "tomli",
+ "topdown",
+ "typehints",
+ "typeshed",
+ "unittests",
+ "unsubscriptable",
+ "usefixtures",
+ "viewcode",
+ "writeacl",
+ "xmlsec",
+ "troyready",
+ "tomlsort",
+ "pyupgrade",
+ "tryceratops",
+ "errmsg",
+ "datetimez"
]
}
diff --git a/.vscode/dictionaries/pypi.txt b/.vscode/dictionaries/pypi.txt
index 128d093b8..b8df9ffc3 100644
--- a/.vscode/dictionaries/pypi.txt
+++ b/.vscode/dictionaries/pypi.txt
@@ -10,7 +10,6 @@ ctypes
distutils
dunamai
gitpython
-isort
moto
numpy
pefile
@@ -22,7 +21,6 @@ pydantic
pydocstyle
pyhcl
pyinstaller
-pylint
pywin
pyyaml
runpy
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 0fb3e52a8..149f8b2ed 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -1,5 +1,10 @@
{
"[python]": {
+ "editor.codeActionsOnSave": {
+ "source.fixAll.ruff": "explicit",
+ "source.organizeImports": "always"
+ },
+ "editor.defaultFormatter": "ms-python.black-formatter",
"editor.detectIndentation": false,
"editor.formatOnSave": true,
"editor.insertSpaces": true,
@@ -35,17 +40,7 @@
"**/__pycache__": true
},
"files.insertFinalNewline": true,
- "python.analysis.typeCheckingMode": "strict",
- "python.formatting.provider": "black",
- "python.linting.flake8Args": [
- "--docstring-convention=all"
- ],
- "python.linting.flake8Enabled": true,
- "python.linting.mypyEnabled": false,
- "python.linting.pylintArgs": [
- "--rcfile=pyproject.toml"
- ],
- "python.linting.pylintEnabled": true,
+ "python.analysis.typeCheckingMode": "off",
"python.pythonPath": "${workspaceFolder}/.venv/",
"python.testing.pytestArgs": [
"tests",
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index 1f34d15f2..0cc59f1d4 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -17,23 +17,23 @@ diverse, inclusive, and healthy community.
Examples of behavior that contributes to a positive environment for our
community include:
-* Demonstrating empathy and kindness toward other people
-* Being respectful of differing opinions, viewpoints, and experiences
-* Giving and gracefully accepting constructive feedback
-* Accepting responsibility and apologizing to those affected by our mistakes,
+- Demonstrating empathy and kindness toward other people
+- Being respectful of differing opinions, viewpoints, and experiences
+- Giving and gracefully accepting constructive feedback
+- Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
-* Focusing on what is best not just for us as individuals, but for the
+- Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
-* The use of sexualized language or imagery, and sexual attention or
+- The use of sexualized language or imagery, and sexual attention or
advances of any kind
-* Trolling, insulting or derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or email
+- Trolling, insulting or derogatory comments, and personal or political attacks
+- Public or private harassment
+- Publishing others' private information, such as a physical or email
address, without their explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
+- Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
@@ -59,8 +59,7 @@ representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported to the community leaders responsible for enforcement at
-opensource@onica.com.
+reported to the community leaders responsible for enforcement at .
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
@@ -121,8 +120,8 @@ version 2.0, available at
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
-[homepage]: https://www.contributor-covenant.org
-
For answers to common questions about this code of conduct, see the FAQ at
. Translations are available at
.
+
+[homepage]: https://www.contributor-covenant.org
diff --git a/Makefile b/Makefile
index 0b977b583..e00bbb94d 100644
--- a/Makefile
+++ b/Makefile
@@ -30,14 +30,13 @@ build-pyinstaller-folder: clean create-tfenv-ver-file version ## build Pyinstall
bash ./.github/scripts/cicd/build_pyinstaller.sh folder
clean: ## remove generated file from the project directory
- rm -rf build/
- rm -rf dist/
- rm -rf runway.egg-info/
- rm -rf tmp/
- rm -rf src/
- rm -rf postinstall.js preuninstall.js .coverage .npmignore
- find . -name ".runway" -type d -prune -exec rm -rf '{}' +
- @make -C docs clean
+ rm -rf ./build/ ./dist/ ./src/ ./tmp/ ./runway.egg-info/;
+ rm -rf ./.pytest_cache
+ find . -type d -name "node_modules" -prune -exec rm -rf '{}' +;
+ find . -type d -name ".runway" -prune -exec rm -rf '{}' +;
+ find . -type f -name "*.py[co]" -delete;
+ find . -type d -name "__pycache__" -prune -exec rm -rf '{}' +;
+ @$(MAKE) --no-print-directory -C docs clean;
cov-report: ## display a report in the terminal of files missing coverage
@poetry run coverage report \
@@ -59,42 +58,40 @@ create-tfenv-ver-file: ## create a tfenv version file using the latest version
curl --silent https://releases.hashicorp.com/index.json | jq -r '.terraform.versions | to_entries | map(select(.key | contains ("-") | not)) | sort_by(.key | split(".") | map(tonumber))[-1].key' | egrep -o '^[0-9]*\.[0-9]*\.[0-9]*' > runway/templates/terraform/.terraform-version
docs: ## delete current HTML docs & build fresh HTML docs
- @make -C docs docs
+ @$(MAKE) --no-print-directory -C docs docs
docs-changes: ## build HTML docs; only builds changes detected by Sphinx
- @make -C docs html
+ @$(MAKE) --no-print-directory -C docs html
+
+fix: fix-ruff fix-black run-pre-commit ## run all automatic fixes
fix-black: ## automatically fix all black errors
@poetry run black .
-fix-isort: ## automatically fix all isort errors
- @poetry run isort .
+fix-imports: ## automatically fix all import sorting errors
+ @poetry run ruff check . --fix-only --fixable I001
+
+fix-ruff: ## automatically fix everything ruff can fix (implies fix-imports)
+ @poetry run ruff check . --fix-only
+
+fix-ruff-tests:
+ @poetry run ruff check ./tests --fix-only --unsafe-fixes
-lint: lint-isort lint-black lint-pyright lint-flake8 lint-pylint ## run all linters
+lint: lint-black lint-ruff lint-pyright ## run all linters
lint-black: ## run black
@echo "Running black... If this fails, run 'make fix-black' to resolve."
@poetry run black . --check --color --diff
@echo ""
-lint-flake8: ## run flake8
- @echo "Running flake8..."
- @poetry run flake8 --config=setup.cfg
- @echo ""
-
-lint-isort: ## run isort
- @echo "Running isort... If this fails, run 'make fix-isort' to resolve."
- @poetry run isort . --check-only
- @echo ""
-
-lint-pylint: ## run pylint
- @echo "Running pylint..."
- @poetry run pylint runway tests --rcfile=pyproject.toml
- @echo ""
-
lint-pyright: ## run pyright
@echo "Running pyright..."
- @npm run-script py-type-check
+ @npm exec --no -- pyright --venv-path ./
+ @echo ""
+
+lint-ruff: ## run ruff
+ @echo "Running ruff... If this fails, run 'make fix-ruff' to resolve some error automatically, other require manual action."
+ @poetry run ruff check .
@echo ""
npm-ci: ## run "npm ci" with the option to ignore scripts - required to succeed for this project
diff --git a/README.md b/README.md
index bc0223f7e..cfbaff764 100644
--- a/README.md
+++ b/README.md
@@ -12,25 +12,23 @@ Runway is a lightweight integration app designed to ease management of infrastru
Its main goals are to encourage GitOps best-practices, avoid convoluted Makefiles/scripts (enabling identical deployments from a workstation or CI job), and enable developers/admins to use the best tool for any given job.
-
## Features
-* Centralized environment-specific configuration
-* Automatic environment identification from git branches
-* Automatic linting/verification
-* Support of IAM roles to assume for each deployment
-* Terraform backend/workspace config management w/per-environment tfvars
-* Automatic kubectl/terraform version management per-environment
+- Centralized environment-specific configuration
+- Automatic environment identification from git branches
+- Automatic linting/verification
+- Support of IAM roles to assume for each deployment
+- Terraform backend/workspace config management w/per-environment tfvars
+- Automatic kubectl/terraform version management per-environment
### Supported Deployment Tools
-* AWS CDK
-* Kubectl
-* Serverless Framework
-* CFNgin (CloudFormation)
-* Static websites (build & deploy to S3+CloudFront)
-* Terraform
-
+- AWS CDK
+- Kubectl
+- Serverless Framework
+- CFNgin (CloudFormation)
+- Static websites (build & deploy to S3+CloudFront)
+- Terraform
## Example
@@ -51,7 +49,6 @@ deployments:
The example above contains enough information for Runway to deploy all resources, lambda functions and a static website backed by S3 and Cloudfront in either dev or prod environments
-
## Install
Runway is available via any of the following installation methods. Use whatever works best for your project/team (it's the same application no matter how you obtain it).
@@ -61,7 +58,7 @@ Runway is available via any of the following installation methods. Use whatever
Use one of the endpoints below to download a single-binary executable version of Runway based on your operating system.
| Operating System | Endpoint |
-|------------------|----------------------------------------|
+| ---------------- | -------------------------------------- |
| Linux | |
| macOS | |
| Windows | |
@@ -74,7 +71,6 @@ $ ./runway new
**Suggested use:** CloudFormation or Terraform projects
-
### npm
```shell
@@ -84,7 +80,6 @@ $ npx runway new
**Suggested use:** Serverless or AWS CDK projects
-
### pip (or poetry, etc)
```shell
@@ -97,7 +92,6 @@ $ poetry run runway new
**Suggested use:** Python projects
-
## Documentation
See the [doc site](https://docs.onica.com/projects/runway) for full documentation.
diff --git a/docs/README.md b/docs/README.md
index 91be77776..c5edb8201 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -14,7 +14,7 @@ To record or render a new gif, terminalizer must be installed (globally is fine)
### Caveats
-- node <= 10 is required due to dependency requirements (`nvm install 10` or `nvm use 10`)
+- node \<= 10 is required due to dependency requirements (`nvm install 10` or `nvm use 10`)
- `terminalizer@0.6.1` must be used (`npm i -g terminalizer@0.6.1`)
- 0.7 changed the resolution of the GIF which increases the size 3x
@@ -41,4 +41,4 @@ To render a new copy of the gif, just run `terminalizer render runway-example.ym
This will take some time to complete.
We need to reduce the size of the rendered GIF so it can be served from GitHub to be viewable on PyPi.
-To do this, the GIF must be compressed ([GIF Compressor](https://gifcompressor.com/) was used) to achieve the <5MB size required (GitHub restriction).
+To do this, the GIF must be compressed ([GIF Compressor](https://gifcompressor.com/) was used) to achieve the \<5MB size required (GitHub restriction).
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 2c28c6fcc..290fc2d3b 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -2,9 +2,8 @@
https://www.sphinx-doc.org/en/master/usage/configuration.html
-"""
+""" # noqa: INP001
-# pylint: skip-file
import os
from pathlib import Path
@@ -18,7 +17,7 @@
# -- Project information -----------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
project = "Runway"
-copyright = "2021, Onica Group"
+copyright = "2021, Onica Group" # noqa: A001
author = "Onica Group"
release = Version.from_git().serialize(metadata=False, style=Style.SemVer)
version = ".".join(release.split(".")[:2]) # short X.Y version
@@ -52,7 +51,7 @@
master_doc = "index"
needs_extensions = {}
needs_sphinx = "3.5"
-nitpicky = False # TODO enable nitpicky
+nitpicky = False # TODO (kyle): enable nitpicky
primary_domain = "py"
pygments_style = "material" # syntax highlighting style
# Appended to the end of each rendered file
diff --git a/infrastructure/blueprints/admin_role.py b/infrastructure/blueprints/admin_role.py
index d98cbf31b..caf91209d 100644
--- a/infrastructure/blueprints/admin_role.py
+++ b/infrastructure/blueprints/admin_role.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, ClassVar, Dict, Optional
+from typing import TYPE_CHECKING, ClassVar, Optional
import awacs.sts
from awacs.aws import Allow, AWSPrincipal, PolicyDocument, Statement
@@ -19,7 +19,7 @@
class AdminRole(Blueprint):
"""Blueprint for an admin role."""
- VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = {
+ VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = {
"CrossAccountAccessAccountIds": {"type": list, "default": []},
"PermissionsBoundary": {"type": str},
"RoleName": {"type": str, "default": ""},
@@ -34,9 +34,7 @@ def assume_role_policy(self) -> PolicyDocument:
Statement(
Action=[awacs.sts.AssumeRole],
Effect=Allow,
- Principal=AWSPrincipal(
- self.variables["CrossAccountAccessAccountIds"]
- ),
+ Principal=AWSPrincipal(self.variables["CrossAccountAccessAccountIds"]),
)
)
return policy_doc
diff --git a/infrastructure/blueprints/admin_user.py b/infrastructure/blueprints/admin_user.py
index cecd3bb9a..c3c8315b9 100644
--- a/infrastructure/blueprints/admin_user.py
+++ b/infrastructure/blueprints/admin_user.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, ClassVar, Dict, Optional
+from typing import TYPE_CHECKING, ClassVar
from troposphere import NoValue
from troposphere.iam import User
@@ -17,7 +17,7 @@
class AdminUser(Blueprint):
"""Blueprint for an admin user."""
- VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = {
+ VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = {
"PermissionsBoundary": {"type": str},
"UserName": {"type": str, "default": ""},
}
@@ -42,7 +42,7 @@ def user(self) -> User:
return user
@cached_property
- def username(self) -> Optional[str]:
+ def username(self) -> str | None:
"""Name of the user being created."""
val = self.variables["UserName"]
if val == "":
@@ -53,4 +53,4 @@ def create_template(self) -> None:
"""Create a template from the Blueprint."""
self.template.set_description("Admin user")
self.template.set_version("2010-09-09")
- self.user # pylint: disable=pointless-statement
+ self.user # noqa: B018
diff --git a/infrastructure/blueprints/cfngin_bucket.py b/infrastructure/blueprints/cfngin_bucket.py
index 20718958c..ce41af817 100644
--- a/infrastructure/blueprints/cfngin_bucket.py
+++ b/infrastructure/blueprints/cfngin_bucket.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, ClassVar, Dict
+from typing import TYPE_CHECKING, ClassVar
from troposphere import And, Equals, If, Not, NoValue, s3
@@ -17,7 +17,7 @@
class CfnginBucket(Blueprint):
"""Blueprint for a CFNgin Bucket."""
- VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = {
+ VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = {
"BucketName": {
"type": CFNString,
"description": "Name for the S3 bucket",
@@ -63,11 +63,7 @@ def create_template(self) -> None:
BucketName=self.bucket_name,
DeletionPolicy=self.variables["DeletionPolicy"],
LifecycleConfiguration=s3.LifecycleConfiguration(
- Rules=[
- s3.LifecycleRule(
- NoncurrentVersionExpirationInDays=30, Status="Enabled"
- )
- ]
+ Rules=[s3.LifecycleRule(NoncurrentVersionExpirationInDays=30, Status="Enabled")]
),
VersioningConfiguration=s3.VersioningConfiguration(Status="Enabled"),
)
diff --git a/infrastructure/blueprints/prevent_privilege_escalation.py b/infrastructure/blueprints/prevent_privilege_escalation.py
index 5cd3420e6..b806a2158 100644
--- a/infrastructure/blueprints/prevent_privilege_escalation.py
+++ b/infrastructure/blueprints/prevent_privilege_escalation.py
@@ -6,7 +6,7 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, ClassVar, Dict, List, Union
+from typing import TYPE_CHECKING, ClassVar, Union
import awacs.iam
import awacs.sts
@@ -35,7 +35,7 @@ class AdminPreventPrivilegeEscalation(Blueprint):
DESCRIPTION: ClassVar[str] = "Permission boundary for admin users."
POLICY_NAME: ClassVar[str] = "AdminPreventPrivilegeEscalation"
- VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = {
+ VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = {
"ApprovedPermissionBoundaries": {
"default": [],
"description": "List of policy names (not ARNs) that are approved to "
@@ -55,42 +55,34 @@ def namespace(self) -> str:
return self.context.namespace
@cached_property
- def approved_boundary_policies(self) -> List[Sub]:
+ def approved_boundary_policies(self) -> list[Sub]:
"""List of approved permission boundary policies."""
- tmp = [self.policy_arn]
- for policy_name in self.variables["ApprovedPermissionBoundaries"]:
- tmp.append(
- Sub(
- f"arn:${{AWS::Partition}}:iam::${{AWS::AccountId}}:policy/{policy_name}"
- )
- )
- return tmp
+ return [
+ self.policy_arn,
+ *[
+ Sub(f"arn:${{AWS::Partition}}:iam::${{AWS::AccountId}}:policy/{policy_name}")
+ for policy_name in self.variables["ApprovedPermissionBoundaries"]
+ ],
+ ]
@cached_property
- def deny_assume_role_not_resources(self) -> List[Union[str, Sub]]:
+ def deny_assume_role_not_resources(self) -> list[Union[str, Sub]]:
"""List of IAM Role ARNs that can be assumed."""
- tmp: List[Union[str, Sub]] = [
- Sub(
- f"arn:${{AWS::Partition}}:iam::${{AWS::AccountId}}:role/{self.namespace}-*"
- )
+ tmp: list[Union[str, Sub]] = [
+ Sub(f"arn:${{AWS::Partition}}:iam::${{AWS::AccountId}}:role/{self.namespace}-*")
]
- for arn in self.variables["DenyAssumeRoleNotResources"]:
- tmp.append(arn)
+ tmp.extend(self.variables["DenyAssumeRoleNotResources"])
return tmp
@property
def policy_arn(self) -> Sub:
"""ARN of the IAM policy that will be created."""
- return Sub(
- f"arn:${{AWS::Partition}}:iam::${{AWS::AccountId}}:policy/{self.POLICY_NAME}"
- )
+ return Sub(f"arn:${{AWS::Partition}}:iam::${{AWS::AccountId}}:policy/{self.POLICY_NAME}")
@cached_property
def statement_allow_admin_access(self) -> Statement:
"""Statement to allow admin access."""
- return Statement(
- Action=[Action("*")], Effect=Allow, Resource=["*"], Sid="AllowAdminAccess"
- )
+ return Statement(Action=[Action("*")], Effect=Allow, Resource=["*"], Sid="AllowAdminAccess")
@cached_property
def statement_deny_alter_boundary_policy(self) -> Statement:
@@ -143,9 +135,7 @@ def statement_deny_create_without_boundary(self) -> Statement:
return Statement(
Action=[awacs.iam.CreateRole, awacs.iam.CreateUser],
Condition=Condition(
- StringNotEquals(
- {"iam:PermissionsBoundary": self.approved_boundary_policies}
- )
+ StringNotEquals({"iam:PermissionsBoundary": self.approved_boundary_policies})
),
Effect=Deny,
Resource=[
@@ -162,14 +152,8 @@ def statement_deny_onica_sso(self) -> Statement:
Action=[Action("*")],
Effect=Deny,
Resource=[
- Sub(
- "arn:${AWS::Partition}:cloudformation:*:${AWS::AccountId}:stack/"
- "onica-sso"
- ),
- Sub(
- "arn:${AWS::Partition}:cloudformation:*:${AWS::AccountId}:stack/"
- "onica-sso-*"
- ),
+ Sub("arn:${AWS::Partition}:cloudformation:*:${AWS::AccountId}:stack/onica-sso"),
+ Sub("arn:${AWS::Partition}:cloudformation:*:${AWS::AccountId}:stack/onica-sso-*"),
Sub("arn:${AWS::Partition}:iam::${AWS::AccountId}:policy/onica-sso"),
Sub("arn:${AWS::Partition}:iam::${AWS::AccountId}:policy/onica-sso-*"),
Sub("arn:${AWS::Partition}:iam::${AWS::AccountId}:role/onica-sso"),
@@ -186,9 +170,7 @@ def statement_deny_put_boundary(self) -> Statement:
awacs.iam.PutUserPermissionsBoundary,
],
Condition=Condition(
- StringNotEquals(
- {"iam:PermissionsBoundary": self.approved_boundary_policies}
- )
+ StringNotEquals({"iam:PermissionsBoundary": self.approved_boundary_policies})
),
Effect=Deny,
Resource=[
@@ -206,16 +188,14 @@ def statement_deny_remove_boundary_policy(self) -> Statement:
awacs.iam.DeleteRolePermissionsBoundary,
awacs.iam.DeleteUserPermissionsBoundary,
],
- Condition=Condition(
- StringEquals({"iam:PermissionsBoundary": self.policy_arn})
- ),
+ Condition=Condition(StringEquals({"iam:PermissionsBoundary": self.policy_arn})),
Effect=Deny,
Resource=["*"],
Sid="DenyRemovalOfBoundaryFromUserOrRole",
)
@cached_property
- def statements(self) -> List[Statement]:
+ def statements(self) -> list[Statement]:
"""List of statements to add to the policy."""
return [
self.statement_allow_admin_access,
diff --git a/infrastructure/blueprints/test_runner_boundary.py b/infrastructure/blueprints/test_runner_boundary.py
index 4c774e461..d1c811931 100644
--- a/infrastructure/blueprints/test_runner_boundary.py
+++ b/infrastructure/blueprints/test_runner_boundary.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import ClassVar, List
+from typing import ClassVar
import awacs.iam
import awacs.s3
@@ -123,10 +123,7 @@ def statement_deny_namespace(self) -> Statement:
Action("cloudformation", "List*"),
],
Resource=[
- Sub(
- "arn:aws:cloudformation:*:${AWS::AccountId}:stack/"
- f"{self.namespace}-*"
- ),
+ Sub(f"arn:aws:cloudformation:*:${{AWS::AccountId}}:stack/{self.namespace}-*"),
f"arn:aws:s3:::{self.namespace}",
f"arn:aws:s3:::{self.namespace}/*",
f"arn:aws:s3:::{self.namespace}-*",
@@ -135,9 +132,10 @@ def statement_deny_namespace(self) -> Statement:
)
@cached_property
- def statements(self) -> List[Statement]:
+ def statements(self) -> list[Statement]:
"""List of statements to add to the policy."""
- return super().statements + [
+ return [
+ *super().statements,
self.statement_deny_change_cfngin_bucket,
self.statement_deny_cloudtrail,
self.statement_deny_iam,
diff --git a/infrastructure/blueprints/test_runner_user.py b/infrastructure/blueprints/test_runner_user.py
index abf6ee80b..0d8e81779 100644
--- a/infrastructure/blueprints/test_runner_user.py
+++ b/infrastructure/blueprints/test_runner_user.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, ClassVar, Dict
+from typing import TYPE_CHECKING, ClassVar
import awacs.sts
from awacs.aws import Deny, PolicyDocument, Statement
@@ -17,7 +17,7 @@
class TestRunnerUser(AdminUser):
"""Blueprint for a test runner user."""
- VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = {
+ VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = {
"DenyAssumeRoleNotResources": {"type": list, "default": []},
"PermissionsBoundary": {"type": str},
"UserName": {"type": str, "default": ""},
@@ -34,8 +34,7 @@ def create_template(self) -> None:
Statement(
Action=[awacs.sts.AssumeRole],
Effect=Deny,
- NotResource=self.variables["DenyAssumeRoleNotResources"]
- or ["*"],
+ NotResource=self.variables["DenyAssumeRoleNotResources"] or ["*"],
)
],
Version="2012-10-17",
diff --git a/infrastructure/public/common/bucket-and-user.cdk/package.json b/infrastructure/public/common/bucket-and-user.cdk/package.json
index b883231ef..7e24b9189 100644
--- a/infrastructure/public/common/bucket-and-user.cdk/package.json
+++ b/infrastructure/public/common/bucket-and-user.cdk/package.json
@@ -1,26 +1,26 @@
{
- "name": "myapp",
- "version": "1.0.0",
- "scripts": {
- "build": "tsc",
- "lint": "tslint -c tslint.json 'bin/**/*.ts' 'lib/**/*.ts'",
- "watch": "tsc -w",
- "cdk": "cdk"
- },
- "devDependencies": {
- "@types/node": "8.10.40",
- "@types/source-map-support": "^0.5.0",
- "aws-cdk": "^2.101.1",
- "aws-sdk": "^2.1511.0",
- "prompt": "^1.0.0",
- "ts-node": "^8.1.0",
- "tslint": "^5.20.0",
- "typescript": "^3.3.3333"
- },
- "dependencies": {
- "@aws-cdk/aws-iam": "^1.204.0",
- "@aws-cdk/aws-s3": "^1.204.0",
- "@aws-cdk/core": "^1.15.0",
- "source-map-support": "^0.5.9"
- }
+ "dependencies": {
+ "@aws-cdk/aws-iam": "^1.204.0",
+ "@aws-cdk/aws-s3": "^1.204.0",
+ "@aws-cdk/core": "^1.15.0",
+ "source-map-support": "^0.5.9"
+ },
+ "devDependencies": {
+ "@types/node": "8.10.40",
+ "@types/source-map-support": "^0.5.0",
+ "aws-cdk": "^2.101.1",
+ "aws-sdk": "^2.1511.0",
+ "prompt": "^1.0.0",
+ "ts-node": "^8.1.0",
+ "tslint": "^5.20.0",
+ "typescript": "^3.3.3333"
+ },
+ "name": "myapp",
+ "scripts": {
+ "build": "tsc",
+ "cdk": "cdk",
+ "lint": "tslint -c tslint.json 'bin/**/*.ts' 'lib/**/*.ts'",
+ "watch": "tsc -w"
+ },
+ "version": "1.0.0"
}
diff --git a/package.json b/package.json
index 02ba66bf1..f833534a6 100644
--- a/package.json
+++ b/package.json
@@ -1,11 +1,19 @@
{
- "name": "runway",
+ "author": {
+ "email": "opensource@onica.com",
+ "name": "Onica Group LLC",
+ "url": "https://onica.com"
+ },
+ "bugs": {
+ "url": "https://github.com/onicagroup/runway/issues"
+ },
+ "dependencies": {
+ "tar": "^7.2.0"
+ },
"description": "Simplify infrastructure/app testing/deployment",
- "main": "NA",
- "scripts": {
- "postinstall": "node ./postinstall.js",
- "preuninstall": "node ./preuninstall.js",
- "py-type-check": "pyright --venv-path ./"
+ "devDependencies": {
+ "cspell": "^8.11.0",
+ "pyright": "^1.1.223"
},
"files": [
"src/osx/*",
@@ -14,10 +22,7 @@
"postinstall.js",
"preuninstall.js"
],
- "repository": {
- "type": "git",
- "url": "https://github.com/onicagroup/runway"
- },
+ "homepage": "https://github.com/onicagroup/runway",
"keywords": [
"aws",
"ci",
@@ -30,11 +35,6 @@
"cloudformation",
"cdk"
],
- "author": {
- "name": "Onica Group LLC",
- "email": "opensource@onica.com",
- "url": "https://onica.com"
- },
"license": "Apache-2.0",
"licenses": [
{
@@ -42,24 +42,18 @@
"url": "http://www.apache.org/licenses/LICENSE-2.0"
}
],
- "bugs": {
- "url": "https://github.com/onicagroup/runway/issues"
- },
- "homepage": "https://github.com/onicagroup/runway",
+ "main": "NA",
+ "name": "runway",
"os": [
"darwin",
"linux",
"win32"
],
- "version": "2.0.0-dev",
- "dependencies": {
- "tar": "^7.2.0"
- },
"publishConfig": {
"access": "public"
},
- "devDependencies": {
- "cspell": "^8.11.0",
- "pyright": "^1.1.223"
+ "scripts": {
+ "postinstall": "node ./postinstall.js",
+ "preuninstall": "node ./preuninstall.js"
}
}
diff --git a/poetry.lock b/poetry.lock
index a70c80f7b..681a69d05 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -22,25 +22,6 @@ files = [
{file = "altgraph-0.17.4.tar.gz", hash = "sha256:1b5afbb98f6c4dcadb2e2ae6ab9fa994bbb8c1d75f4fa96d340f9437ae454406"},
]
-[[package]]
-name = "astroid"
-version = "2.15.5"
-description = "An abstract syntax tree for Python with inference support."
-optional = false
-python-versions = ">=3.7.2"
-files = [
- {file = "astroid-2.15.5-py3-none-any.whl", hash = "sha256:078e5212f9885fa85fbb0cf0101978a336190aadea6e13305409d099f71b2324"},
- {file = "astroid-2.15.5.tar.gz", hash = "sha256:1039262575027b441137ab4a62a793a9b43defb42c32d5670f38686207cd780f"},
-]
-
-[package.dependencies]
-lazy-object-proxy = ">=1.4.0"
-typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""}
-wrapt = [
- {version = ">=1.11,<2", markers = "python_version < \"3.11\""},
- {version = ">=1.14,<2", markers = "python_version >= \"3.11\""},
-]
-
[[package]]
name = "attrs"
version = "22.2.0"
@@ -1034,20 +1015,6 @@ ssh = ["bcrypt (>=3.1.5)"]
test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
test-randomorder = ["pytest-randomly"]
-[[package]]
-name = "dill"
-version = "0.3.6"
-description = "serialize all of python"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"},
- {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"},
-]
-
-[package.extras]
-graph = ["objgraph (>=1.7.2)"]
-
[[package]]
name = "distlib"
version = "0.3.6"
@@ -1184,102 +1151,6 @@ files = [
docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
testing = ["covdefaults (>=2.3)", "coverage (>=7.2.1)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"]
-[[package]]
-name = "flake8"
-version = "7.1.0"
-description = "the modular source code checker: pep8 pyflakes and co"
-optional = false
-python-versions = ">=3.8.1"
-files = [
- {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"},
- {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"},
-]
-
-[package.dependencies]
-mccabe = ">=0.7.0,<0.8.0"
-pycodestyle = ">=2.12.0,<2.13.0"
-pyflakes = ">=3.2.0,<3.3.0"
-
-[[package]]
-name = "flake8-bugbear"
-version = "24.4.26"
-description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle."
-optional = false
-python-versions = ">=3.8.1"
-files = [
- {file = "flake8_bugbear-24.4.26-py3-none-any.whl", hash = "sha256:cb430dd86bc821d79ccc0b030789a9c87a47a369667f12ba06e80f11305e8258"},
- {file = "flake8_bugbear-24.4.26.tar.gz", hash = "sha256:ff8d4ba5719019ebf98e754624c30c05cef0dadcf18a65d91c7567300e52a130"},
-]
-
-[package.dependencies]
-attrs = ">=19.2.0"
-flake8 = ">=6.0.0"
-
-[package.extras]
-dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"]
-
-[[package]]
-name = "flake8-comprehensions"
-version = "3.14.0"
-description = "A flake8 plugin to help you write better list/set/dict comprehensions."
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "flake8_comprehensions-3.14.0-py3-none-any.whl", hash = "sha256:7b9d07d94aa88e62099a6d1931ddf16c344d4157deedf90fe0d8ee2846f30e97"},
- {file = "flake8_comprehensions-3.14.0.tar.gz", hash = "sha256:81768c61bfc064e1a06222df08a2580d97de10cb388694becaf987c331c6c0cf"},
-]
-
-[package.dependencies]
-flake8 = ">=3.0,<3.2.0 || >3.2.0"
-
-[[package]]
-name = "flake8-docstrings"
-version = "1.7.0"
-description = "Extension for flake8 which uses pydocstyle to check docstrings"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"},
- {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"},
-]
-
-[package.dependencies]
-flake8 = ">=3"
-pydocstyle = ">=2.1"
-
-[[package]]
-name = "flake8-print"
-version = "5.0.0"
-description = "print statement checker plugin for flake8"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "flake8-print-5.0.0.tar.gz", hash = "sha256:76915a2a389cc1c0879636c219eb909c38501d3a43cc8dae542081c9ba48bdf9"},
- {file = "flake8_print-5.0.0-py3-none-any.whl", hash = "sha256:84a1a6ea10d7056b804221ac5e62b1cee1aefc897ce16f2e5c42d3046068f5d8"},
-]
-
-[package.dependencies]
-flake8 = ">=3.0"
-pycodestyle = "*"
-
-[[package]]
-name = "flake8-use-fstring"
-version = "1.4"
-description = "Flake8 plugin for string formatting style."
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "flake8-use-fstring-1.4.tar.gz", hash = "sha256:6550bf722585eb97dffa8343b0f1c372101f5c4ab5b07ebf0edd1c79880cdd39"},
-]
-
-[package.dependencies]
-flake8 = ">=3"
-
-[package.extras]
-ci = ["coverage (==4.*)", "coveralls", "flake8-builtins", "flake8-commas", "flake8-fixme", "flake8-print", "flake8-quotes", "flake8-todo", "pytest (>=4)", "pytest-cov (>=2)"]
-dev = ["coverage (==4.*)", "flake8-builtins", "flake8-commas", "flake8-fixme", "flake8-print", "flake8-quotes", "flake8-todo", "pytest (>=4)", "pytest-cov (>=2)"]
-test = ["coverage (==4.*)", "flake8-builtins", "flake8-commas", "flake8-fixme", "flake8-print", "flake8-quotes", "flake8-todo", "pytest (>=4)", "pytest-cov (>=2)"]
-
[[package]]
name = "formic2"
version = "1.0.3"
@@ -1425,20 +1296,6 @@ files = [
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
-[[package]]
-name = "isort"
-version = "5.13.2"
-description = "A Python utility / library to sort Python imports."
-optional = false
-python-versions = ">=3.8.0"
-files = [
- {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"},
- {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"},
-]
-
-[package.extras]
-colors = ["colorama (>=0.4.6)"]
-
[[package]]
name = "jinja2"
version = "3.1.4"
@@ -1587,51 +1444,6 @@ atomic-cache = ["atomicwrites"]
nearley = ["js2py"]
regex = ["regex"]
-[[package]]
-name = "lazy-object-proxy"
-version = "1.9.0"
-description = "A fast and thorough lazy object proxy."
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"},
- {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"},
- {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"},
- {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"},
- {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"},
- {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"},
- {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"},
- {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"},
- {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"},
- {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"},
- {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"},
- {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"},
- {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"},
- {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"},
- {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"},
- {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"},
- {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"},
- {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"},
- {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"},
- {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"},
- {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"},
- {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"},
- {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"},
- {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"},
- {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"},
- {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"},
- {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"},
- {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"},
- {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"},
- {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"},
- {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"},
- {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"},
- {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"},
- {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"},
- {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"},
- {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"},
-]
-
[[package]]
name = "lib-detect-testenv"
version = "2.0.3"
@@ -1720,17 +1532,6 @@ files = [
{file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"},
]
-[[package]]
-name = "mccabe"
-version = "0.7.0"
-description = "McCabe checker, plugin for flake8"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
- {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
-]
-
[[package]]
name = "mock"
version = "5.1.0"
@@ -2123,20 +1924,6 @@ files = [
{file = "pefile-2023.2.7.tar.gz", hash = "sha256:82e6114004b3d6911c77c3953e3838654b04511b8b66e8583db70c65998017dc"},
]
-[[package]]
-name = "pep8-naming"
-version = "0.14.1"
-description = "Check PEP-8 naming conventions, plugin for flake8"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pep8-naming-0.14.1.tar.gz", hash = "sha256:1ef228ae80875557eb6c1549deafed4dabbf3261cfcafa12f773fe0db9be8a36"},
- {file = "pep8_naming-0.14.1-py3-none-any.whl", hash = "sha256:63f514fc777d715f935faf185dedd679ab99526a7f2f503abb61587877f7b1c5"},
-]
-
-[package.dependencies]
-flake8 = ">=5.0.0"
-
[[package]]
name = "pip"
version = "23.3.1"
@@ -2218,17 +2005,6 @@ nodeenv = ">=0.11.1"
pyyaml = ">=5.1"
virtualenv = ">=20.10.0"
-[[package]]
-name = "pycodestyle"
-version = "2.12.0"
-description = "Python style guide checker"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"},
- {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"},
-]
-
[[package]]
name = "pycparser"
version = "2.21"
@@ -2292,34 +2068,6 @@ typing-extensions = ">=4.2.0"
dotenv = ["python-dotenv (>=0.10.4)"]
email = ["email-validator (>=1.0.3)"]
-[[package]]
-name = "pydocstyle"
-version = "6.3.0"
-description = "Python docstring style checker"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"},
- {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"},
-]
-
-[package.dependencies]
-snowballstemmer = ">=2.2.0"
-
-[package.extras]
-toml = ["tomli (>=1.2.3)"]
-
-[[package]]
-name = "pyflakes"
-version = "3.2.0"
-description = "passive checker of Python programs"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"},
- {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"},
-]
-
[[package]]
name = "pygments"
version = "2.17.2"
@@ -2397,35 +2145,6 @@ importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""}
packaging = ">=22.0"
setuptools = ">=42.0.0"
-[[package]]
-name = "pylint"
-version = "2.17.4"
-description = "python code static checker"
-optional = false
-python-versions = ">=3.7.2"
-files = [
- {file = "pylint-2.17.4-py3-none-any.whl", hash = "sha256:7a1145fb08c251bdb5cca11739722ce64a63db479283d10ce718b2460e54123c"},
- {file = "pylint-2.17.4.tar.gz", hash = "sha256:5dcf1d9e19f41f38e4e85d10f511e5b9c35e1aa74251bf95cdd8cb23584e2db1"},
-]
-
-[package.dependencies]
-astroid = ">=2.15.4,<=2.17.0-dev0"
-colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
-dill = [
- {version = ">=0.2", markers = "python_version < \"3.11\""},
- {version = ">=0.3.6", markers = "python_version >= \"3.11\""},
-]
-isort = ">=4.2.5,<6"
-mccabe = ">=0.6,<0.8"
-platformdirs = ">=2.2.0"
-tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
-tomlkit = ">=0.10.1"
-typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""}
-
-[package.extras]
-spelling = ["pyenchant (>=3.2,<4.0)"]
-testutils = ["gitpython (>3)"]
-
[[package]]
name = "pyopenssl"
version = "24.1.0"
@@ -2907,6 +2626,33 @@ files = [
[package.dependencies]
docutils = ">=0.11,<1.0"
+[[package]]
+name = "ruff"
+version = "0.5.4"
+description = "An extremely fast Python linter and code formatter, written in Rust."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "ruff-0.5.4-py3-none-linux_armv6l.whl", hash = "sha256:82acef724fc639699b4d3177ed5cc14c2a5aacd92edd578a9e846d5b5ec18ddf"},
+ {file = "ruff-0.5.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:da62e87637c8838b325e65beee485f71eb36202ce8e3cdbc24b9fcb8b99a37be"},
+ {file = "ruff-0.5.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e98ad088edfe2f3b85a925ee96da652028f093d6b9b56b76fc242d8abb8e2059"},
+ {file = "ruff-0.5.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c55efbecc3152d614cfe6c2247a3054cfe358cefbf794f8c79c8575456efe19"},
+ {file = "ruff-0.5.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9b85eaa1f653abd0a70603b8b7008d9e00c9fa1bbd0bf40dad3f0c0bdd06793"},
+ {file = "ruff-0.5.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0cf497a47751be8c883059c4613ba2f50dd06ec672692de2811f039432875278"},
+ {file = "ruff-0.5.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:09c14ed6a72af9ccc8d2e313d7acf7037f0faff43cde4b507e66f14e812e37f7"},
+ {file = "ruff-0.5.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:628f6b8f97b8bad2490240aa84f3e68f390e13fabc9af5c0d3b96b485921cd60"},
+ {file = "ruff-0.5.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3520a00c0563d7a7a7c324ad7e2cde2355733dafa9592c671fb2e9e3cd8194c1"},
+ {file = "ruff-0.5.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93789f14ca2244fb91ed481456f6d0bb8af1f75a330e133b67d08f06ad85b516"},
+ {file = "ruff-0.5.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:029454e2824eafa25b9df46882f7f7844d36fd8ce51c1b7f6d97e2615a57bbcc"},
+ {file = "ruff-0.5.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9492320eed573a13a0bc09a2957f17aa733fff9ce5bf00e66e6d4a88ec33813f"},
+ {file = "ruff-0.5.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a6e1f62a92c645e2919b65c02e79d1f61e78a58eddaebca6c23659e7c7cb4ac7"},
+ {file = "ruff-0.5.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:768fa9208df2bec4b2ce61dbc7c2ddd6b1be9fb48f1f8d3b78b3332c7d71c1ff"},
+ {file = "ruff-0.5.4-py3-none-win32.whl", hash = "sha256:e1e7393e9c56128e870b233c82ceb42164966f25b30f68acbb24ed69ce9c3a4e"},
+ {file = "ruff-0.5.4-py3-none-win_amd64.whl", hash = "sha256:58b54459221fd3f661a7329f177f091eb35cf7a603f01d9eb3eb11cc348d38c4"},
+ {file = "ruff-0.5.4-py3-none-win_arm64.whl", hash = "sha256:bd53da65f1085fb5b307c38fd3c0829e76acf7b2a912d8d79cadcdb4875c1eb7"},
+ {file = "ruff-0.5.4.tar.gz", hash = "sha256:2795726d5f71c4f4e70653273d1c23a8182f07dd8e48c12de5d867bfb7557eed"},
+]
+
[[package]]
name = "s3transfer"
version = "0.10.0"
@@ -3335,17 +3081,6 @@ files = [
{file = "tomli_w-1.0.0.tar.gz", hash = "sha256:f463434305e0336248cac9c2dc8076b707d8a12d019dd349f5c1e382dd1ae1b9"},
]
-[[package]]
-name = "tomlkit"
-version = "0.11.6"
-description = "Style preserving TOML library"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "tomlkit-0.11.6-py3-none-any.whl", hash = "sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b"},
- {file = "tomlkit-0.11.6.tar.gz", hash = "sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"},
-]
-
[[package]]
name = "troposphere"
version = "4.8.0"
@@ -3502,90 +3237,6 @@ files = [
[package.extras]
test = ["pytest (>=6.0.0)", "setuptools (>=65)"]
-[[package]]
-name = "wrapt"
-version = "1.15.0"
-description = "Module for decorators, wrappers and monkey patching."
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
-files = [
- {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"},
- {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"},
- {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"},
- {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"},
- {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"},
- {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"},
- {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"},
- {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"},
- {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"},
- {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"},
- {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"},
- {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"},
- {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"},
- {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"},
- {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"},
- {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"},
- {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"},
- {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"},
- {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"},
- {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"},
- {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"},
- {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"},
- {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"},
- {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"},
- {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"},
- {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"},
- {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"},
- {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"},
- {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"},
- {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"},
- {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"},
- {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"},
- {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"},
- {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"},
- {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"},
- {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"},
- {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"},
- {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"},
- {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"},
- {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"},
- {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"},
- {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"},
- {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"},
- {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"},
- {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"},
- {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"},
- {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"},
- {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"},
- {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"},
- {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"},
- {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"},
- {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"},
- {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"},
- {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"},
- {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"},
- {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"},
- {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"},
- {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"},
- {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"},
- {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"},
- {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"},
- {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"},
- {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"},
- {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"},
- {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"},
- {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"},
- {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"},
- {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"},
- {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"},
- {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"},
- {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"},
- {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"},
- {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"},
- {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"},
- {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"},
-]
-
[[package]]
name = "xmltodict"
version = "0.13.0"
@@ -3633,4 +3284,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools",
[metadata]
lock-version = "2.0"
python-versions = ">=3.9, <3.13"
-content-hash = "0c57981b4583118568443d4438c10a1bac20399d10faac78989c209b16fc904f"
+content-hash = "c2bf0dfcf1f2093cf1c4ecad97d68c84738bf2c447fb7b94f67bdea7726c4a47"
diff --git a/pyproject.toml b/pyproject.toml
index a241026b0..bcc0541af 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,38 +1,37 @@
[tool.poetry]
name = "runway"
version = "2.0.0-dev" # do not change
-description = "Simplify infrastructure/app testing/deployment"
-license = "Apache-2.0"
authors = [
"Onica Group LLC ",
]
-maintainers = [
- "Kyle Finley ", "Sam Fakhreddine "
-]
-readme = "README.md"
-homepage = "https://github.com/onicagroup/runway"
-repository = "https://github.com/onicagroup/runway"
-documentation = "https://docs.onica.com/projects/runway"
-keywords = ["cli"]
classifiers = [
"Intended Audience :: Developers",
- "Topic :: Utilities",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
- "Programming Language :: Python :: 3.12"
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.9",
+ "Topic :: Utilities",
+]
+description = "Simplify infrastructure/app testing/deployment"
+documentation = "https://docs.onica.com/projects/runway"
+homepage = "https://github.com/onicagroup/runway"
+keywords = ["cli"]
+license = "Apache-2.0"
+maintainers = [
+ "Kyle Finley ",
+ "Sam Fakhreddine ",
]
packages = [
- { include = "runway" },
+ {include = "runway"},
]
+readme = "README.md"
+repository = "https://github.com/onicagroup/runway"
[tool.poetry.dependencies]
python = ">=3.9, <3.13"
-
-"backports.cached_property" = { version = "*", python = "<3.8" }
awacs = "*"
boto3 = "^1.16"
cfn-lint = "*"
@@ -43,46 +42,34 @@ docker = ">=3.0.0" # used in runway.cfngin.hooks
formic2 = "*" # only used in runway.cfngin.hooks.aws_lambda
gitpython = "*"
igittigitt = ">=2.0.5"
-importlib-metadata = { version = "*", python = "<3.8" }
jinja2 = ">=2.7" # used in runway.cfngin.blueprints.raw
+moto = "3.0.5"
packaging = "*" # component of setuptools needed for version compare
+pipenv = "2022.1.8"
pyOpenSSL = "*" # For embedded hook & associated script usage
pydantic = "^1.4"
pyhcl = "^0.4" # does not support HCL2, possibly move to extras_require in the future
+pyinstaller = "^6.2.0"
python-hcl2 = ">=3.0.0"
pyyaml = ">5.4"
requests = "*"
send2trash = "*"
+testfixtures = "^7.0.3"
tomli = ">=1.2.2"
troposphere = ">=2.4, <5"
typing_extensions = "*" # only really needed for < 3.8 but can still be used in >= 3.8
urllib3 = "*" # allow us to follow botocore's hard pinning without needing to update our own
-yamllint = "*"
-pipenv = "2022.1.8"
-moto = "3.0.5"
-testfixtures = "^7.0.3"
wheel = "^0.42.0"
-pyinstaller = "^6.2.0"
+yamllint = "*"
[tool.poetry.group.dev.dependencies]
-black = ">=22.1"
-coverage = { version = ">=6.3", extras = ["toml"] }
+coverage = {extras = ["toml"], version = ">=6.3"}
doc8 = ">=0.10" # for linting with vscode rst extension
dunamai = "^1.5"
-flake8 = ">=4.0.1"
-flake8-bugbear = ">=21.9.2" # flake8 plugin
-flake8-comprehensions = ">=3.7.0" # flake8 plugin
-flake8-docstrings = ">=1.6" # flake8 plugin
-flake8-print = ">=4.0.0" # flake8 plugin
-flake8-use-fstring = ">=1.3" # flake8 plugin
-isort = ">=5.12"
mock = ">=4.0"
-moto = { version = ">=3.0", extras = ["ec2", "ecs", "iam", "s3", "ssm"] }
-pep8-naming = ">=0.12.1" # flake8 plugin
+moto = {extras = ["ec2", "ecs", "iam", "s3", "ssm"], version = ">=3.0"}
pipenv = "^2022.1.8" # only used in tests
-pre-commit = ">=2.14"
-pydocstyle = ">=6.1.1" # flake8 plugin
-pylint = ">=2.12"
+pre-commit = "^3.7.1"
pytest = ">=7.0"
pytest-cov = ">=3.0" # pytest plugin
pytest-mock = ">=3.7" # pytest plugin
@@ -104,6 +91,10 @@ sphinx-tabs = "^3.2"
sphinxcontrib-apidoc = "^0.3"
sphinxcontrib-programoutput = "^0.17"
+[tool.poetry.group.lint.dependencies]
+black = "^24.4.2"
+ruff = "^0.5.4"
+
[tool.poetry.group.types.dependencies]
mypy-boto3 = "^1.16" # importable boto3 type annotations
@@ -134,11 +125,6 @@ runway = "runway._cli.main:cli"
[tool.poetry.urls]
"Bug Tracker" = "https://github.com/onicagroup/runway/issues"
-[build-system]
-requires = ["poetry_core>=1.0.7"]
-build-backend = "poetry.core.masonry.api"
-
-
[tool.black]
force-exclude = '''
/(
@@ -158,187 +144,52 @@ force-exclude = '''
)/
'''
include = '\.pyi?$'
-line-length = 88
-target-version = ["py38", "py39"]
-
+line-length = 100
+target-version = ["py310", "py311", "py312", "py39"]
[tool.coverage.report]
exclude_lines = [
+ "@overload",
"cov: ignore", # standard exclude comment
+ "from pathlib import Path",
"if TYPE_CHECKING:", # excluded blocks
"if __name__ == .__main__.:",
"raise AssertionError", # defensive exceptions
"raise NotImplementedError",
- "from pathlib import Path",
- "@overload",
]
fail_under = 85
precision = 2
show_missing = true
-
[tool.coverage.run]
concurrency = [
"multiprocessing",
"thread",
]
omit = [
+ "*/compat.py",
"*/runway/aws_sso_botocore/*", # TODO remove native support is added to botocore
"*/runway/cfngin/hooks/staticsite/auth_at_edge/templates/*",
- "*/compat.py",
"*/type_defs.py",
]
-[tool.isort]
-profile = "black"
-known_local_folder = [
- "jwks_rsa",
- "shared",
- "update_urls",
-]
-skip = [
- ".demo",
- ".eggs",
- ".git",
- ".mypy_cache",
- ".runway",
- ".runway_cache",
- ".venv",
- "_build",
- "build",
- "dist",
- "integration_tests",
- "node_modules",
- "venv",
-]
-
-
-[tool.pylint.basic]
-# http://pylint.pycqa.org/en/latest/technical_reference/features.html#basic-checker
-attr-rgx = "([a-z_][a-z0-9_]{2,50}|VARIABLES)$"
-# attr-name-hint = "([a-z_][a-z0-9_]{2,50}|VARIABLES)$"
-good-names = [
- "_",
- "a",
- "b",
- "ci",
- "db",
- "f",
- "fn",
- "fp",
- "gb",
- "i",
- "id",
- "j",
- "k",
- "kb",
- "mb",
- "ok",
- "os",
- "ui",
- "v",
-]
-
-[tool.pylint.classes]
-# http://pylint.pycqa.org/en/latest/technical_reference/features.html#classes-checker
-defining-attr-methods = [
- "__init__",
- "__new__",
- "setUp",
-]
-exclude-protected=[
- "_asdict",
- "_fields",
- "_replace",
- "_source",
- "_make",
- "_session", # for boto3.session.Session
- "_prompter",
- "_client_config", # boto3.client.Client._client_config contains info like region
- "_endpoint", # boto3.client.Client._endpoint contains s3 endpoint info
- "_validate_props" # called on troposphere resources
-]
-
-[tool.pylint.design]
-# http://pylint.pycqa.org/en/latest/technical_reference/features.html#design-checker-options
-max-args = 10
-max-attributes = 20
-max-bool-expr = 5
-max-branches = 20
-max-locals = 25
-max-parents = 10
-max-public-methods = 30
-max-returns = 10
-max-statements = 50
-min-public-methods = 0
-
-[tool.pylint.format]
-# http://pylint.pycqa.org/en/latest/technical_reference/features.html#format-checker
-max-line-length = 120
-max-module-lines = 1000
-
-[tool.pylint.imports]
-# http://pylint.pycqa.org/en/latest/technical_reference/features.html#imports-checker
-allow-wildcard-with-all = "no"
-
-[tool.pylint.logging]
-# http://pylint.pycqa.org/en/latest/technical_reference/features.html#logging-checker
-logging-format-style = "old" # TODO update to new
-
-[tool.pylint.master]
-# http://pylint.pycqa.org/en/latest/technical_reference/features.html#general-options
-extension-pkg-whitelist = [
- "pydantic", # https://github.com/samuelcolvin/pydantic/issues/992#issuecomment-553545180
-]
-ignore-patterns = [
- ".+py[ci]$",
-]
-jobs = 0
-
-[tool.pylint.miscellaneous]
-# http://pylint.pycqa.org/en/latest/technical_reference/features.html#miscellaneous-checker
-notes = ["FIXME"]
-
-[tool.pylint.message_control]
-# http://pylint.pycqa.org/en/latest/technical_reference/features.html#messages-control-options
-disable = [
- "line-too-long", # flake8 overlap
- "missing-class-docstring", # flake8 (pydocstyle) overlap
- "missing-function-docstring", # flake8 (pydocstyle) overlap
- "missing-module-docstring", # flake8 (pydocstyle) overlap
- "similarities", # black overcomplicated this
- "ungrouped-imports", # false positive when using TYPE_CHECKING; isort should cover this
- "unused-import", # flake8 overlap (F401)
- "broad-exception-raised",
- "missing-timeout"
-]
-
-[tool.pylint.typecheck]
-# http://pylint.pycqa.org/en/latest/technical_reference/features.html#typecheck-checker
-ignored-classes = [
- "runway.config.ConfigComponent",
- "runway.utils.MutableMap",
-]
-ignored-modules = ["_typeshed", "distutils"]
-
-
[tool.pyright]
exclude = [
- "**/__pycache__",
"**/.demo",
"**/.eggs",
"**/.git",
"**/.runway",
"**/.venv",
+ "**/__pycache__",
"**/docs",
"**/node_modules",
"**/quickstarts",
- "**/typings",
"**/runway/aws_sso_botocore",
"**/runway/cfngin/hooks/staticsite/auth_at_edge/templates",
"**/runway/templates/cdk-py",
"**/tests/functional/cfngin/test_aws_lambda_hook/lambda_src",
- "**/tests/unit"
+ "**/tests/unit",
+ "**/typings",
]
extraPaths = [
"./.github/scripts/urlshortener",
@@ -360,7 +211,6 @@ typeCheckingMode = "strict"
useLibraryCodeForTypes = true
venv = ".venv"
-
[tool.pytest.ini_options]
addopts = [
"--cov-config=pyproject.toml",
@@ -378,3 +228,131 @@ python_classes = ["Test*"]
python_files = ["test_*.py"]
python_functions = ["test_*"]
testpaths = ["tests"]
+
+[tool.ruff] # https://docs.astral.sh/ruff/settings/#top-level
+extend-exclude = [
+ "runway/aws_sso_botocore", # NOTE (kyle): ignoring vendored code
+ "runway/cfngin/hooks/staticsite/auth_at_edge/templates", # TODO (kyle): resolve lint error
+ "typings",
+]
+force-exclude = true
+line-length = 120
+show-fixes = true
+target-version = "py39" # important to set before applying fixes
+
+[tool.ruff.lint] # https://docs.astral.sh/ruff/settings/#lint
+extend-safe-fixes = [
+ "UP007",
+ "UP038",
+ "UP040",
+]
+ignore = [
+ "ANN101", # Missing type annotation for `self` in method
+ "ANN102", # Missing type annotation for `cls` in classmethod
+ "ANN401", # Dynamically typed expressions (typing.Any) are disallowed # TODO (kyle): improve type annotations
+ "COM812", # Trailing comma missing
+ "D203", # 1 blank line required before class docstring
+ "D213", # Multi-line docstring summary should start at the second line
+ "D215", # Section underline is over-indented
+ "D403", # First word of the first line should be capitalized
+ "D406", # Section name should end with a newline
+ "D407", # Missing dashed underline after section
+ "D408", # Section underline should be in the line following the section's name
+ "D409", # Section underline should match the length of its name
+ "DTZ", # flake8-datetimez # NOTE (kyle): this is fine here
+ "EM", # flake8-errmsg
+ "ERA001", # Found commented-out code # NOTE (kyle): incorrectly detects cspell
+ "FA100", # Missing `from __future__ import annotations`, but uses `typing.Optional`
+ "FBT001", # Boolean positional arg in function definition
+ "FBT002", # Boolean default value in function definition
+ "FBT003", # Boolean positional value in function call
+ "FIX002", # Line contains TODO
+ "N818", # Exception name should be named with an Error suffix # TODO (kyle): resolve in next major release
+ "PERF203", # `try`-`except` within a loop incurs performance overhead
+ "PGH003", # Use specific rule codes when ignoring type issues # TODO (kyle): resolve this eventually
+ "RUF012", # TODO (kyle): remove when resolved - https://github.com/astral-sh/ruff/issues/5243
+ "S105", # (hardcoded-password-string) Possible hardcoded password
+ "S106", # (hardcoded-password-func-arg) Possible hardcoded password
+ "S107", # (hardcoded-password-default) Possible hardcoded password
+ "S108", # Probable insecure usage of temporary file or directory
+ "S301", # `pickle` and modules that wrap it can be unsafe when used to deserialize untrusted data
+ "S60", # flake8-bandit # NOTE (kyle): most of these are for subprocess which we don't care about right now
+ "S604", # Function call with `shell=True` parameter identified # NOTE (kyle): required for runway
+ "TD003", # Missing issue link on the line following this TODO
+ "TID252", # Relative imports from parent modules are banned
+ "TRY", # tryceratops
+]
+select = ["ALL"]
+
+[tool.ruff.lint.extend-per-file-ignores] # https://docs.astral.sh/ruff/settings/#lintextend-per-file-ignores
+"*.py" = [
+ "PYI024", # Use `typing.NamedTuple` instead of `collections.namedtuple` # NOTE (kyle): should only apply to pyi
+]
+".github/scripts/*" = [
+ "EXE002", # The file is executable but no shebang is present # NOTE (kyle): fails linting on windows
+]
+"runway/templates/*" = [
+ "N999", # Invalid module name # NOTE (kyle): these are fine here
+]
+"tests/*" = [
+ "PT004", # Fixture does not return anything, add leading underscore
+ "S101", # Use of `assert` detected # NOTE (kyle): this is fine here
+ "SLF001", # Private member accessed # NOTE (kyle): fine in tests
+]
+
+[tool.ruff.lint.flake8-annotations] # https://docs.astral.sh/ruff/settings/#lintflake8-annotations
+allow-star-arg-any = true
+
+[tool.ruff.lint.flake8-pytest-style] # https://docs.astral.sh/ruff/settings/#lintflake8-pytest-style
+parametrize-names-type = "csv" # TODO (kyle): update tests to remove the need for this
+
+[tool.ruff.lint.flake8-self]
+ignore-names = [
+ "_Environ",
+ "_Hash",
+ "_session",
+]
+
+[tool.ruff.lint.flake8-type-checking] # https://docs.astral.sh/ruff/settings/#lint_flake8-type-checking_runtime-evaluated-base-classes
+runtime-evaluated-base-classes = [
+ "pydantic.BaseModel",
+ "pydantic.BeforeValidator",
+ "runway.cfngin.hooks.base.HookArgsBaseModel",
+ "runway.config.models.base.ConfigProperty",
+ "runway.utils.BaseModel",
+]
+
+[tool.ruff.lint.isort] # https://docs.astral.sh/ruff/settings/#lintisort
+known-local-folder = [
+ "jwks_rsa",
+ "shared",
+ "update_urls",
+]
+known-third-party = [
+ "docker", # NOTE (kyle): the `docker/` directory confuses isort
+]
+
+[tool.ruff.lint.pydocstyle] # https://docs.astral.sh/ruff/settings/#lintpydocstyle
+convention = "google"
+
+[tool.ruff.lint.pylint] # https://docs.astral.sh/ruff/settings/#lintpylint
+allow-magic-value-types = ["bytes", "int", "str"]
+max-args = 15
+max-returns = 10
+max-statements = 50
+
+[tool.ruff.lint.pyupgrade] # https://docs.astral.sh/ruff/settings/#pyupgrade-keep-runtime-typing
+keep-runtime-typing = true # TODO (kyle): remove when dropping support for python 3.9
+
+[tool.tomlsort]
+all = true
+in_place = true
+sort_first = ["tool", "tool.poetry"]
+spaces_before_inline_comment = 2
+trailing_comma_inline_array = true
+overrides."tool.poetry".first = ["name", "version"]
+overrides."tool.poetry.dependencies".first = ["python"]
+
+[build-system]
+build-backend = "poetry.core.masonry.api"
+requires = ["poetry_core>=1.0.7"]
diff --git a/quickstarts/conduit/pyproject.toml b/quickstarts/conduit/pyproject.toml
index 8062507ee..3c647bd2e 100644
--- a/quickstarts/conduit/pyproject.toml
+++ b/quickstarts/conduit/pyproject.toml
@@ -1,10 +1,10 @@
[tool.poetry]
name = "runway-quickstart-conduit"
version = "0.0.0"
-description = "Runway Quickstart"
authors = [
"Onica Group LLC ",
]
+description = "Runway Quickstart"
license = "Apache-2.0"
[tool.poetry.dependencies]
@@ -14,5 +14,5 @@ python = "^3.9"
runway = "^2.0"
[build-system]
-requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
+requires = ["poetry-core>=1.0.0"]
diff --git a/quickstarts/conduit/update_env_endpoint.py b/quickstarts/conduit/update_env_endpoint.py
index d4e527d30..43fba72fe 100755
--- a/quickstarts/conduit/update_env_endpoint.py
+++ b/quickstarts/conduit/update_env_endpoint.py
@@ -9,31 +9,23 @@
STACK_PREFIX = "realworld-"
-def update_api_endpoint():
+def update_api_endpoint() -> None:
"""Update app environment file with backend endpoint."""
- environment = (
- subprocess.check_output(["poetry", "run", "runway", "whichenv"])
- .decode()
- .strip()
- )
- environment_file = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
+ environment = subprocess.check_output(["poetry", "run", "runway", "whichenv"]).decode().strip()
+ environment_file = os.path.join( # noqa: PTH118
+ os.path.dirname(os.path.realpath(__file__)), # noqa: PTH120
"src",
"environments",
"environment.prod.ts" if environment == "prod" else "environment.ts",
)
cloudformation = boto3.resource("cloudformation")
stack = cloudformation.Stack(STACK_PREFIX + environment)
- endpoint = [
- i["OutputValue"] for i in stack.outputs if i["OutputKey"] == "ServiceEndpoint"
- ][0]
+ endpoint = next(i["OutputValue"] for i in stack.outputs if i["OutputKey"] == "ServiceEndpoint")
- with open(environment_file, "r") as stream:
+ with open(environment_file) as stream: # noqa: PTH123
content = stream.read()
- content = re.sub(
- r"api_url: \'.*\'$", f"api_url: '{endpoint}/api'", content, flags=re.M
- )
- with open(environment_file, "w") as stream:
+ content = re.sub(r"api_url: \'.*\'$", f"api_url: '{endpoint}/api'", content, flags=re.MULTILINE)
+ with open(environment_file, "w") as stream: # noqa: PTH123
stream.write(content)
diff --git a/quickstarts/runway/runway-quickstart.yml b/quickstarts/runway/runway-quickstart.yml
index 2fff4f7ac..1276ba9fb 100644
--- a/quickstarts/runway/runway-quickstart.yml
+++ b/quickstarts/runway/runway-quickstart.yml
@@ -10,31 +10,22 @@ Parameters:
- Linux
KeyPair:
- Description: "The existing EC2 KeyPair to be used to access the Runway \
- instance"
+ Description: "The existing EC2 KeyPair to be used to access the Runway instance"
Type: AWS::EC2::KeyPair::KeyName
SourceIP:
- Description: "The egress (public) IPv4 address from which you plan to \
- access your Runway instance. Hint- https://whatismyip.com . \
- Specify address only, do not include /CIDR designator, \
- example 157.123.231.123"
+ Description: "The egress (public) IPv4 address from which you plan to access your Runway instance. Hint- https://whatismyip.com . Specify address only, do not include /CIDR designator, example 157.123.231.123"
Type: String
IamRole:
Type: String
- Description: "Choose 'auto/admin' to have this CloudFormation template \
- deploy an ADMIN IAM Role for Runway to use to call AWS \
- services. Choose 'manual' to specify an existing IAM Role \
- with more restrictive permissions."
+ Description: "Choose 'auto/admin' to have this CloudFormation template deploy an ADMIN IAM Role for Runway to use to call AWS services. Choose 'manual' to specify an existing IAM Role with more restrictive permissions."
AllowedValues:
- auto/admin
- manual
IamRoleName:
- Description: "If you chose 'manual' for IamRole, specify the name of an \
- existing IAM Role here, otherwise leave as the default value \
- of 'none'"
+ Description: "If you chose 'manual' for IamRole, specify the name of an existing IAM Role here, otherwise leave as the default value of 'none'"
Type: String
Default: none
@@ -209,7 +200,7 @@ Resources:
Properties:
VpcId: !Ref VPC
CidrBlock: 10.0.0.0/24
- AvailabilityZone: !Select ['0', !GetAZs {Ref: 'AWS::Region'}]
+ AvailabilityZone: !Select ['0', !GetAZs Ref: 'AWS::Region']
PublicRouteTable:
Type: AWS::EC2::RouteTable
@@ -266,20 +257,17 @@ Resources:
AssumeRolePolicyDocument:
Version: 2012-10-17
Statement:
- -
- Effect: Allow
+ - Effect: Allow
Principal:
Service:
- ec2.amazonaws.com
Action:
- sts:AssumeRole
Policies:
- -
- PolicyName: RunwayIamRolePolicy
+ - PolicyName: RunwayIamRolePolicy
PolicyDocument:
Version: 2012-10-17
Statement:
- -
- Effect: Allow
+ - Effect: Allow
Action: '*'
Resource: '*'
diff --git a/runway.file.spec b/runway.file.spec
index 3b8b79db0..a02017b88 100644
--- a/runway.file.spec
+++ b/runway.file.spec
@@ -3,8 +3,6 @@
This file should be considered a python file and linted as such.
"""
-# pylint: disable=undefined-variable,wrong-import-order,invalid-name
-# pylint: disable=wrong-import-position,import-self
import os
import pkgutil
from pkg_resources import get_distribution, get_entry_info
@@ -18,7 +16,7 @@ import distutils
if getattr(distutils, "distutils_path", "").endswith("__init__.py"):
distutils.distutils_path = os.path.dirname(distutils.distutils_path)
-CLI_PATH = os.path.join(os.path.dirname(os.path.dirname(workpath)), "runway") # noqa
+CLI_PATH = os.path.join(os.path.dirname(os.path.dirname(workpath)), "runway")
def get_submodules(package):
@@ -45,17 +43,17 @@ def get_submodules(package):
]
-def Entrypoint(dist, group, name, **kwargs): # noqa
+def Entrypoint(dist, group, name, **kwargs):
"""Get entrypoint info for packages using setuptools."""
ep = get_entry_info(dist, group, name)
# script name must not be a valid module name to avoid name clashes on import
- script_path = os.path.join(workpath, name + "-script.py") # noqa: F821
+ script_path = os.path.join(workpath, name + "-script.py")
print("creating script for entry point", dist, group, name)
with open(script_path, "w") as fh:
print("import", ep.module_name, file=fh)
print("%s.%s()" % (ep.module_name, ".".join(ep.attrs)), file=fh)
- return Analysis([script_path] + kwargs.get("scripts", []), **kwargs) # noqa: F821
+ return Analysis([script_path] + kwargs.get("scripts", []), **kwargs)
# files that are not explicitly imported but consumed at runtime
@@ -87,14 +85,14 @@ data_files.append(copy_metadata("runway")[0]) # support scm version
hiddenimports = []
# these packages do not have pyinstaller hooks so we need to import
# them to collect a list of submodules to include as hidden imports.
-import runway # noqa
-import troposphere # noqa
-import awacs # noqa
-import botocore # noqa
-import pip # noqa
-import wheel # noqa
-import yamllint # noqa
-import cfnlint # noqa
+import runway
+import troposphere
+import awacs
+import botocore
+import pip
+import wheel
+import yamllint
+import cfnlint
hiddenimports.extend(get_submodules(runway))
hiddenimports.extend(get_submodules(troposphere))
@@ -123,9 +121,9 @@ a = Entrypoint(
noarchive=False,
binaries=[],
)
-pyz = PYZ(a.pure, a.zipped_data, cipher=None) # noqa: F821
+pyz = PYZ(a.pure, a.zipped_data, cipher=None)
exe = EXE(
- pyz, # noqa: F821
+ pyz,
a.scripts,
a.binaries,
a.zipfiles,
diff --git a/runway.folder.spec b/runway.folder.spec
index 16de8ac5c..1b8ee4c52 100644
--- a/runway.folder.spec
+++ b/runway.folder.spec
@@ -3,8 +3,6 @@
This file should be considered a python file and linted as such.
"""
-# pylint: disable=undefined-variable,wrong-import-order,invalid-name
-# pylint: disable=wrong-import-position,import-self
import os
import pkgutil
from pkg_resources import get_distribution, get_entry_info
@@ -18,7 +16,7 @@ import distutils
if getattr(distutils, "distutils_path", "").endswith("__init__.py"):
distutils.distutils_path = os.path.dirname(distutils.distutils_path)
-CLI_PATH = os.path.join(os.path.dirname(os.path.dirname(workpath)), "runway") # noqa
+CLI_PATH = os.path.join(os.path.dirname(os.path.dirname(workpath)), "runway")
def get_submodules(package):
@@ -45,17 +43,17 @@ def get_submodules(package):
]
-def Entrypoint(dist, group, name, **kwargs): # noqa
+def Entrypoint(dist, group, name, **kwargs):
"""Get entrypoint info for packages using setuptools."""
ep = get_entry_info(dist, group, name)
# script name must not be a valid module name to avoid name clashes on import
- script_path = os.path.join(workpath, name + "-script.py") # noqa: F821
+ script_path = os.path.join(workpath, name + "-script.py")
print("creating script for entry point", dist, group, name)
with open(script_path, "w") as fh:
print("import", ep.module_name, file=fh)
print("%s.%s()" % (ep.module_name, ".".join(ep.attrs)), file=fh)
- return Analysis([script_path] + kwargs.get("scripts", []), **kwargs) # noqa: F821
+ return Analysis([script_path] + kwargs.get("scripts", []), **kwargs)
# files that are not explicitly imported but consumed at runtime
@@ -87,14 +85,14 @@ data_files.append(copy_metadata("runway")[0]) # support scm version
hiddenimports = []
# these packages do not have pyinstaller hooks so we need to import
# them to collect a list of submodules to include as hidden imports.
-import runway # noqa
-import troposphere # noqa
-import awacs # noqa
-import botocore # noqa
-import pip # noqa
-import wheel # noqa
-import yamllint # noqa
-import cfnlint # noqa
+import runway
+import troposphere
+import awacs
+import botocore
+import pip
+import wheel
+import yamllint
+import cfnlint
hiddenimports.extend(get_submodules(runway))
hiddenimports.extend(get_submodules(troposphere))
@@ -123,9 +121,9 @@ a = Entrypoint(
noarchive=False,
binaries=[],
)
-pyz = PYZ(a.pure, a.zipped_data, cipher=None) # noqa: F821
+pyz = PYZ(a.pure, a.zipped_data, cipher=None)
exe = EXE(
- pyz, # noqa: F821
+ pyz,
a.scripts,
[],
exclude_binaries=True,
@@ -137,7 +135,7 @@ exe = EXE(
console=True,
)
coll = COLLECT(
- exe, # noqa: F821
+ exe,
a.binaries,
a.zipfiles,
a.datas,
diff --git a/runway/__init__.py b/runway/__init__.py
index aeda2c1bb..1a381e765 100644
--- a/runway/__init__.py
+++ b/runway/__init__.py
@@ -1,18 +1,12 @@
"""Set package version."""
import logging
-import sys
+from importlib.metadata import PackageNotFoundError, version # type: ignore
from ._logging import LogLevels, RunwayLogger # noqa: F401
logging.setLoggerClass(RunwayLogger)
-if sys.version_info < (3, 8):
- # importlib.metadata is standard lib for python>=3.8, use backport
- from importlib_metadata import PackageNotFoundError, version # type: ignore
-else:
- from importlib.metadata import PackageNotFoundError, version # type: ignore
-
try:
__version__ = version(__name__)
except PackageNotFoundError:
diff --git a/runway/_cli/commands/_deploy.py b/runway/_cli/commands/_deploy.py
index 7c20ecde7..3498ab03b 100644
--- a/runway/_cli/commands/_deploy.py
+++ b/runway/_cli/commands/_deploy.py
@@ -2,7 +2,7 @@
# docs: file://./../../../docs/source/commands.rst
import logging
-from typing import Any, Tuple
+from typing import Any
import click
from pydantic import ValidationError
@@ -23,7 +23,7 @@
@options.tags
@options.verbose
@click.pass_context
-def deploy(ctx: click.Context, debug: bool, tags: Tuple[str, ...], **_: Any) -> None:
+def deploy(ctx: click.Context, debug: bool, tags: tuple[str, ...], **_: Any) -> None:
"""Deploy infrastructure as code.
\b
diff --git a/runway/_cli/commands/_destroy.py b/runway/_cli/commands/_destroy.py
index 8a2a14a8c..ddc8802e1 100644
--- a/runway/_cli/commands/_destroy.py
+++ b/runway/_cli/commands/_destroy.py
@@ -2,7 +2,7 @@
# docs: file://./../../../docs/source/commands.rst
import logging
-from typing import Any, Tuple
+from typing import Any
import click
from pydantic import ValidationError
@@ -23,7 +23,7 @@
@options.tags
@options.verbose
@click.pass_context
-def destroy(ctx: click.Context, debug: bool, tags: Tuple[str, ...], **_: Any) -> None:
+def destroy(ctx: click.Context, debug: bool, tags: tuple[str, ...], **_: Any) -> None:
"""Destroy infrastructure as code.
\b
diff --git a/runway/_cli/commands/_envvars.py b/runway/_cli/commands/_envvars.py
index ebf939d2f..1b5cda697 100644
--- a/runway/_cli/commands/_envvars.py
+++ b/runway/_cli/commands/_envvars.py
@@ -4,7 +4,7 @@
import logging
import os
import platform
-from typing import TYPE_CHECKING, Any, Dict, cast
+from typing import TYPE_CHECKING, Any, cast
import click
from pydantic import ValidationError
@@ -41,9 +41,7 @@ def envvars(ctx: click.Context, debug: bool, **_: Any) -> None:
ctx.obj.env.ci = True
LOGGER.verbose("forced Runway to non-interactive mode to suppress prompts")
try:
- env_vars = Runway(
- ctx.obj.runway_config, ctx.obj.get_runway_context()
- ).get_env_vars()
+ env_vars = Runway(ctx.obj.runway_config, ctx.obj.get_runway_context()).get_env_vars()
except ValidationError as err:
LOGGER.error(err, exc_info=debug)
ctx.exit(1)
@@ -58,7 +56,7 @@ def envvars(ctx: click.Context, debug: bool, **_: Any) -> None:
print_env_vars(env_vars)
-def print_env_vars(env_vars: Dict[str, Any]) -> None:
+def print_env_vars(env_vars: dict[str, Any]) -> None:
"""Print environment variables."""
if platform.system() == "Windows":
if os.getenv("MSYSTEM", "").startswith("MINGW"):
@@ -67,14 +65,14 @@ def print_env_vars(env_vars: Dict[str, Any]) -> None:
return __print_env_vars_posix(env_vars)
-def __print_env_vars_posix(env_vars: Dict[str, Any]) -> None:
+def __print_env_vars_posix(env_vars: dict[str, Any]) -> None:
"""Print environment variables for bash."""
LOGGER.debug("using posix formatting for environment variable export")
for key, val in env_vars.items():
click.echo(f'export {key}="{val}"')
-def __print_env_vars_psh(env_vars: Dict[str, Any]) -> None:
+def __print_env_vars_psh(env_vars: dict[str, Any]) -> None:
"""Print environment variables for Powershell."""
LOGGER.debug("using powershell formatting for environment variable export")
for key, val in env_vars.items():
diff --git a/runway/_cli/commands/_gen_sample/_k8s_flux_repo.py b/runway/_cli/commands/_gen_sample/_k8s_flux_repo.py
index 75d6959fd..3c7f15981 100644
--- a/runway/_cli/commands/_gen_sample/_k8s_flux_repo.py
+++ b/runway/_cli/commands/_gen_sample/_k8s_flux_repo.py
@@ -42,9 +42,7 @@ def k8s_flux_repo(ctx: click.Context, **_: Any) -> None:
copy_sample(ctx, tfstate_src_dir, dest / tfstate_src_dir.parts[-1])
tfstate_templates_dir = dest / "tfstate.cfn/templates"
tfstate_templates_dir.mkdir()
- write_tfstate_template(
- tfstate_templates_dir / "tf_state.yml", bucket_deletion_policy="Delete"
- )
+ write_tfstate_template(tfstate_templates_dir / "tf_state.yml", bucket_deletion_policy="Delete")
LOGGER.success("Sample k8s infrastructure repo created at %s", dest)
LOGGER.notice("See the README for setup and deployment instructions.")
diff --git a/runway/_cli/commands/_gen_sample/_k8s_tf_repo.py b/runway/_cli/commands/_gen_sample/_k8s_tf_repo.py
index 3e9907ee9..c406ed618 100644
--- a/runway/_cli/commands/_gen_sample/_k8s_tf_repo.py
+++ b/runway/_cli/commands/_gen_sample/_k8s_tf_repo.py
@@ -35,9 +35,7 @@ def k8s_tf_repo(ctx: click.Context, **_: Any) -> None:
tfstate_dir = dest / "tfstate.cfn/templates"
tfstate_dir.mkdir()
- write_tfstate_template(
- tfstate_dir / "tf_state.yml", bucket_deletion_policy="Delete"
- )
+ write_tfstate_template(tfstate_dir / "tf_state.yml", bucket_deletion_policy="Delete")
LOGGER.success("Sample k8s infrastructure repo created at %s", dest)
LOGGER.notice("See the README for setup and deployment instructions.")
diff --git a/runway/_cli/commands/_gen_sample/_tf.py b/runway/_cli/commands/_gen_sample/_tf.py
index a6577edac..35129e483 100644
--- a/runway/_cli/commands/_gen_sample/_tf.py
+++ b/runway/_cli/commands/_gen_sample/_tf.py
@@ -22,7 +22,7 @@
@options.no_color
@options.verbose
@click.pass_context
-def tf(ctx: click.Context, **_: Any) -> None: # pylint: disable=invalid-name
+def tf(ctx: click.Context, **_: Any) -> None:
"""Generate a sample Terraform project."""
src = TEMPLATES / "terraform"
dest = Path.cwd() / "sampleapp.tf"
diff --git a/runway/_cli/commands/_gen_sample/utils.py b/runway/_cli/commands/_gen_sample/utils.py
index fd80fc191..4d5ff87cf 100644
--- a/runway/_cli/commands/_gen_sample/utils.py
+++ b/runway/_cli/commands/_gen_sample/utils.py
@@ -20,7 +20,7 @@ def convert_gitignore(src: Path) -> Path:
"""Rename a gitignore template.
Keyword Args:
- Path object for source file.
+ src: Path object for source file.
Returns:
The renamed file if it was created.
diff --git a/runway/_cli/commands/_init.py b/runway/_cli/commands/_init.py
index a4e08af90..a983cd122 100644
--- a/runway/_cli/commands/_init.py
+++ b/runway/_cli/commands/_init.py
@@ -2,7 +2,7 @@
# docs: file://./../../../docs/source/commands.rst
import logging
-from typing import Any, Tuple
+from typing import Any
import click
from pydantic import ValidationError
@@ -23,7 +23,7 @@
@options.tags
@options.verbose
@click.pass_context
-def init(ctx: click.Context, debug: bool, tags: Tuple[str, ...], **_: Any) -> None:
+def init(ctx: click.Context, debug: bool, tags: tuple[str, ...], **_: Any) -> None:
"""Run initialization/bootstrap steps.
\b
diff --git a/runway/_cli/commands/_kbenv/__init__.py b/runway/_cli/commands/_kbenv/__init__.py
index 656d10b1f..db30f094f 100644
--- a/runway/_cli/commands/_kbenv/__init__.py
+++ b/runway/_cli/commands/_kbenv/__init__.py
@@ -1,7 +1,7 @@
"""``runway kbenv`` command group."""
# docs: file://./../../../../docs/source/commands.rst
-from typing import Any, List
+from typing import Any
import click
@@ -13,7 +13,7 @@
__all__ = ["install", "list_installed", "run", "uninstall"]
-COMMANDS: List[click.Command] = [install, list_installed, run, uninstall]
+COMMANDS: list[click.Command] = [install, list_installed, run, uninstall]
@click.group("kbenv", short_help="kubectl (install|run)")
diff --git a/runway/_cli/commands/_kbenv/_list.py b/runway/_cli/commands/_kbenv/_list.py
index 78abc9d8e..0d2478e0b 100644
--- a/runway/_cli/commands/_kbenv/_list.py
+++ b/runway/_cli/commands/_kbenv/_list.py
@@ -26,6 +26,4 @@ def list_installed(**_: Any) -> None:
LOGGER.info("kubectl versions installed:")
click.echo("\n".join(v.name for v in versions))
else:
- LOGGER.warning(
- "no versions of kubectl installed at path %s", kbenv.versions_dir
- )
+ LOGGER.warning("no versions of kubectl installed at path %s", kbenv.versions_dir)
diff --git a/runway/_cli/commands/_kbenv/_run.py b/runway/_cli/commands/_kbenv/_run.py
index c5ea3e4dc..8b01fc9fa 100644
--- a/runway/_cli/commands/_kbenv/_run.py
+++ b/runway/_cli/commands/_kbenv/_run.py
@@ -3,7 +3,7 @@
# docs: file://./../../../../docs/source/commands.rst
import logging
import subprocess
-from typing import Any, Tuple
+from typing import Any
import click
@@ -13,15 +13,13 @@
LOGGER = logging.getLogger(__name__.replace("._", "."))
-@click.command(
- "run", short_help="run kubectl", context_settings={"ignore_unknown_options": True}
-)
+@click.command("run", short_help="run kubectl", context_settings={"ignore_unknown_options": True})
@click.argument("args", metavar="", nargs=-1, required=True)
@options.debug
@options.no_color
@options.verbose
@click.pass_context
-def run(ctx: click.Context, args: Tuple[str, ...], **_: Any) -> None:
+def run(ctx: click.Context, args: tuple[str, ...], **_: Any) -> None:
"""Run a kubectl command.
Uses the version of kubectl specified in the ".kubectl-version" file
@@ -31,4 +29,4 @@ def run(ctx: click.Context, args: Tuple[str, ...], **_: Any) -> None:
before the kubectl command.
"""
- ctx.exit(subprocess.call([KBEnvManager().install()] + list(args)))
+ ctx.exit(subprocess.call([KBEnvManager().install(), *list(args)]))
diff --git a/runway/_cli/commands/_kbenv/_uninstall.py b/runway/_cli/commands/_kbenv/_uninstall.py
index 5e76b1c0a..08a05c7b7 100644
--- a/runway/_cli/commands/_kbenv/_uninstall.py
+++ b/runway/_cli/commands/_kbenv/_uninstall.py
@@ -1,8 +1,10 @@
"""Uninstall kubectl version(s) that were installed by Runway and/or kbenv."""
# docs: file://./../../../../docs/source/commands.rst
+from __future__ import annotations
+
import logging
-from typing import TYPE_CHECKING, Any, Optional, cast
+from typing import TYPE_CHECKING, Any, cast
import click
@@ -33,7 +35,7 @@
def uninstall(
ctx: click.Context,
*,
- version: Optional[str] = None,
+ version: str | None = None,
all_versions: bool = False,
**_: Any,
) -> None:
@@ -45,10 +47,7 @@ def uninstall(
"""
kbenv = KBEnvManager()
version = version or (str(kbenv.version) if kbenv.version else None)
- if version:
- version_tuple = KBEnvManager.parse_version_string(version)
- else:
- version_tuple = kbenv.version
+ version_tuple = KBEnvManager.parse_version_string(version) if version else kbenv.version
if version_tuple and not all_versions:
if not kbenv.uninstall(version_tuple):
ctx.exit(1)
diff --git a/runway/_cli/commands/_new.py b/runway/_cli/commands/_new.py
index 180d183a5..e09aa4c1c 100644
--- a/runway/_cli/commands/_new.py
+++ b/runway/_cli/commands/_new.py
@@ -37,14 +37,10 @@ def new(ctx: click.Context, **_: Any) -> None:
LOGGER.verbose("checking for preexisting runway.yml file...")
if runway_yml.is_file():
- LOGGER.error(
- "There is already a %s file in the current directory", runway_yml.name
- )
+ LOGGER.error("There is already a %s file in the current directory", runway_yml.name)
ctx.exit(1)
- runway_yml.write_text(
- RUNWAY_YML, encoding=locale.getpreferredencoding(do_setlocale=False)
- )
+ runway_yml.write_text(RUNWAY_YML, encoding=locale.getpreferredencoding(do_setlocale=False))
LOGGER.success("runway.yml generated")
LOGGER.notice(
"See addition getting started information at "
diff --git a/runway/_cli/commands/_plan.py b/runway/_cli/commands/_plan.py
index ff9c10f2a..b78fd50c0 100644
--- a/runway/_cli/commands/_plan.py
+++ b/runway/_cli/commands/_plan.py
@@ -2,7 +2,7 @@
# docs: file://./../../../docs/source/commands.rst
import logging
-from typing import Any, Tuple
+from typing import Any
import click
from pydantic import ValidationError
@@ -23,7 +23,7 @@
@options.tags
@options.verbose
@click.pass_context
-def plan(ctx: click.Context, debug: bool, tags: Tuple[str, ...], **_: Any) -> None:
+def plan(ctx: click.Context, debug: bool, tags: tuple[str, ...], **_: Any) -> None:
"""Determine what infrastructure changes will occur during the next deploy.
\b
diff --git a/runway/_cli/commands/_run_python.py b/runway/_cli/commands/_run_python.py
index 336a1b2e0..ff3c91abb 100644
--- a/runway/_cli/commands/_run_python.py
+++ b/runway/_cli/commands/_run_python.py
@@ -33,10 +33,8 @@ def run_python(filename: str, **_: Any) -> None:
execglobals = globals().copy()
# override name & file so script operates as if it were invoked directly
execglobals.update({"__name__": "__main__", "__file__": filename})
- exec( # pylint: disable=exec-used
- Path(filename).read_text(
- encoding=locale.getpreferredencoding(do_setlocale=False)
- ),
+ exec( # noqa: S102
+ Path(filename).read_text(encoding=locale.getpreferredencoding(do_setlocale=False)),
execglobals,
execglobals,
)
diff --git a/runway/_cli/commands/_tfenv/_install.py b/runway/_cli/commands/_tfenv/_install.py
index 4bfa49064..ffc38c774 100644
--- a/runway/_cli/commands/_tfenv/_install.py
+++ b/runway/_cli/commands/_tfenv/_install.py
@@ -1,8 +1,10 @@
"""Install a version of Terraform."""
# docs: file://./../../../../docs/source/commands.rst
+from __future__ import annotations
+
import logging
-from typing import Any, Optional
+from typing import Any
import click
@@ -19,7 +21,7 @@
@options.no_color
@options.verbose
@click.pass_context
-def install(ctx: click.Context, version: Optional[str] = None, **_: Any) -> None:
+def install(ctx: click.Context, version: str | None = None, **_: Any) -> None:
"""Install the specified of Terraform (e.g. 0.12.0).
If no version is specified, Runway will attempt to find and read a
@@ -28,13 +30,11 @@ def install(ctx: click.Context, version: Optional[str] = None, **_: Any) -> None
"""
try:
- LOGGER.debug(
- "terraform path: %s", TFEnvManager().install(version_requested=version)
- )
+ LOGGER.debug("terraform path: %s", TFEnvManager().install(version_requested=version))
except ValueError as err:
LOGGER.debug("terraform install failed", exc_info=True)
if "unable to find" not in str(err):
- LOGGER.error(
+ LOGGER.error( # noqa: G201
"unexpected error encountered when trying to install Terraform",
exc_info=True,
)
diff --git a/runway/_cli/commands/_tfenv/_list.py b/runway/_cli/commands/_tfenv/_list.py
index a9a46685c..e54a41b5d 100644
--- a/runway/_cli/commands/_tfenv/_list.py
+++ b/runway/_cli/commands/_tfenv/_list.py
@@ -26,6 +26,4 @@ def list_installed(**_: Any) -> None:
LOGGER.info("Terraform versions installed:")
click.echo("\n".join(v.name for v in versions))
else:
- LOGGER.warning(
- "no versions of Terraform installed at path %s", tfenv.versions_dir
- )
+ LOGGER.warning("no versions of Terraform installed at path %s", tfenv.versions_dir)
diff --git a/runway/_cli/commands/_tfenv/_run.py b/runway/_cli/commands/_tfenv/_run.py
index 31465c776..031f15602 100644
--- a/runway/_cli/commands/_tfenv/_run.py
+++ b/runway/_cli/commands/_tfenv/_run.py
@@ -3,7 +3,7 @@
# docs: file://./../../../../docs/source/commands.rst
import logging
import subprocess
-from typing import Any, Tuple
+from typing import Any
import click
@@ -14,15 +14,13 @@
LOGGER = logging.getLogger(__name__.replace("._", "."))
-@click.command(
- "run", short_help="run terraform", context_settings={"ignore_unknown_options": True}
-)
+@click.command("run", short_help="run terraform", context_settings={"ignore_unknown_options": True})
@click.argument("args", metavar="", nargs=-1, required=True)
@options.debug
@options.no_color
@options.verbose
@click.pass_context
-def run(ctx: click.Context, args: Tuple[str, ...], **_: Any) -> None:
+def run(ctx: click.Context, args: tuple[str, ...], **_: Any) -> None:
"""Run a Terraform command.
Uses the version of Terraform specified in the ".terraform-version" file
@@ -33,11 +31,11 @@ def run(ctx: click.Context, args: Tuple[str, ...], **_: Any) -> None:
"""
try:
- ctx.exit(subprocess.call([TFEnvManager().install()] + list(args)))
+ ctx.exit(subprocess.call([TFEnvManager().install(), *list(args)]))
except ValueError as err:
LOGGER.debug("terraform install failed", exc_info=True)
if "unable to find" not in str(err):
- LOGGER.error(
+ LOGGER.error( # noqa: G201
"unexpected error encountered when trying to install Terraform",
exc_info=True,
)
diff --git a/runway/_cli/logs.py b/runway/_cli/logs.py
index 9abf1b8f1..93b49474c 100644
--- a/runway/_cli/logs.py
+++ b/runway/_cli/logs.py
@@ -2,7 +2,7 @@
import logging
import os
-from typing import Any, Dict
+from typing import Any
import coloredlogs
@@ -15,7 +15,7 @@
LOG_FORMAT = "[runway] %(message)s"
LOG_FORMAT_VERBOSE = logging.BASIC_FORMAT
-LOG_FIELD_STYLES: Dict[str, Dict[str, Any]] = {
+LOG_FIELD_STYLES: dict[str, dict[str, Any]] = {
"asctime": {},
"hostname": {},
"levelname": {},
@@ -24,7 +24,7 @@
"prefix": {},
"programname": {},
}
-LOG_LEVEL_STYLES: Dict[str, Dict[str, Any]] = {
+LOG_LEVEL_STYLES: dict[str, dict[str, Any]] = {
"critical": {"color": "red", "bold": True},
"debug": {"color": "green"},
"error": {"color": "red"},
@@ -46,9 +46,7 @@ class LogSettings:
"level_styles": os.getenv("RUNWAY_LOG_LEVEL_STYLES"),
}
- def __init__(
- self, *, debug: int = 0, no_color: bool = False, verbose: bool = False
- ):
+ def __init__(self, *, debug: int = 0, no_color: bool = False, verbose: bool = False) -> None:
"""Instantiate class.
Args:
@@ -62,7 +60,7 @@ def __init__(
self.verbose = verbose
@property
- def coloredlogs(self) -> Dict[str, Any]:
+ def coloredlogs(self) -> dict[str, Any]:
"""Return settings for coloredlogs."""
return {
"fmt": self.fmt,
@@ -85,7 +83,7 @@ def fmt(self) -> str:
return LOG_FORMAT
@cached_property
- def field_styles(self) -> Dict[str, Any]:
+ def field_styles(self) -> dict[str, Any]:
"""Return log field styles.
If "RUNWAY_LOG_FIELD_STYLES" exists in the environment, it will be
@@ -98,14 +96,12 @@ def field_styles(self) -> Dict[str, Any]:
result = LOG_FIELD_STYLES.copy()
if self.ENV["field_styles"]:
result.update(
- coloredlogs.parse_encoded_styles( # type: ignore
- self.ENV["field_styles"]
- )
+ coloredlogs.parse_encoded_styles(self.ENV["field_styles"]) # type: ignore
)
return result
@cached_property
- def level_styles(self) -> Dict[str, Any]:
+ def level_styles(self) -> dict[str, Any]:
"""Return log level styles.
If "RUNWAY_LOG_LEVEL_STYLES" exists in the environment, it will be
@@ -118,9 +114,7 @@ def level_styles(self) -> Dict[str, Any]:
result = LOG_LEVEL_STYLES.copy()
if self.ENV["level_styles"]:
result.update(
- coloredlogs.parse_encoded_styles( # type: ignore
- self.ENV["level_styles"]
- )
+ coloredlogs.parse_encoded_styles(self.ENV["level_styles"]) # type: ignore
)
return result
@@ -134,9 +128,7 @@ def log_level(self) -> LogLevels:
return LogLevels.INFO
-def setup_logging(
- *, debug: int = 0, no_color: bool = False, verbose: bool = False
-) -> None:
+def setup_logging(*, debug: int = 0, no_color: bool = False, verbose: bool = False) -> None:
"""Configure log settings for Runway CLI.
Keyword Args:
diff --git a/runway/_cli/main.py b/runway/_cli/main.py
index c6ef0dec1..0250008b3 100644
--- a/runway/_cli/main.py
+++ b/runway/_cli/main.py
@@ -3,7 +3,7 @@
import argparse
import logging
import os
-from typing import Any, Dict
+from typing import Any
import click
@@ -15,7 +15,7 @@
LOGGER = logging.getLogger("runway.cli")
-CLICK_CONTEXT_SETTINGS: Dict[str, Any] = {
+CLICK_CONTEXT_SETTINGS: dict[str, Any] = {
"help_option_names": ["-h", "--help"],
"max_content_width": 999,
}
@@ -34,7 +34,7 @@ def invoke(self, ctx: click.Context) -> Any:
return super().invoke(ctx)
@staticmethod
- def __parse_global_options(ctx: click.Context) -> Dict[str, Any]:
+ def __parse_global_options(ctx: click.Context) -> dict[str, Any]:
"""Parse global options.
These options are passed to subcommands but, should be parsed by the
@@ -44,20 +44,14 @@ def __parse_global_options(ctx: click.Context) -> Dict[str, Any]:
"""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument("--ci", action="store_true", default=bool(os.getenv("CI")))
- parser.add_argument(
- "--debug", default=int(os.getenv("DEBUG", "0")), action="count"
- )
- parser.add_argument(
- "-e", "--deploy-environment", default=os.getenv("DEPLOY_ENVIRONMENT")
- )
+ parser.add_argument("--debug", default=int(os.getenv("DEBUG", "0")), action="count")
+ parser.add_argument("-e", "--deploy-environment", default=os.getenv("DEPLOY_ENVIRONMENT"))
parser.add_argument(
"--no-color",
action="store_true",
default=bool(os.getenv("RUNWAY_NO_COLOR")),
)
- parser.add_argument(
- "--verbose", action="store_true", default=bool(os.getenv("VERBOSE"))
- )
+ parser.add_argument("--verbose", action="store_true", default=bool(os.getenv("VERBOSE")))
args, _ = parser.parse_known_args(list(ctx.args))
return vars(args)
@@ -75,9 +69,7 @@ def cli(ctx: click.Context, **_: Any) -> None:
"""
opts = ctx.meta["global.options"]
- setup_logging(
- debug=opts["debug"], no_color=opts["no_color"], verbose=opts["verbose"]
- )
+ setup_logging(debug=opts["debug"], no_color=opts["no_color"], verbose=opts["verbose"])
ctx.obj = CliContext(**opts)
diff --git a/runway/_cli/options.py b/runway/_cli/options.py
index 463ca6630..2844fa4f6 100644
--- a/runway/_cli/options.py
+++ b/runway/_cli/options.py
@@ -14,8 +14,7 @@
"--debug",
count=True,
envvar="DEBUG",
- help="Supply once to display Runway debug logs. "
- "Supply twice to display all debug logs.",
+ help="Supply once to display Runway debug logs. Supply twice to display all debug logs.",
)
deploy_environment = click.option(
diff --git a/runway/_cli/utils.py b/runway/_cli/utils.py
index be895ed40..ca6f4ccce 100644
--- a/runway/_cli/utils.py
+++ b/runway/_cli/utils.py
@@ -6,21 +6,25 @@
import os
import sys
from pathlib import Path
-from typing import Any, Iterator, List, Optional, Tuple
+from typing import TYPE_CHECKING, Any
import click
import yaml
from ..compat import cached_property
from ..config import RunwayConfig
-from ..config.components.runway import (
- RunwayDeploymentDefinition,
- RunwayModuleDefinition,
-)
from ..context import RunwayContext
from ..core.components import DeployEnvironment
from ..exceptions import ConfigNotFound
+if TYPE_CHECKING:
+ from collections.abc import Iterator
+
+ from ..config.components.runway import (
+ RunwayDeploymentDefinition,
+ RunwayModuleDefinition,
+ )
+
LOGGER = logging.getLogger(__name__)
@@ -32,7 +36,7 @@ def __init__(
*,
ci: bool = False,
debug: int = 0,
- deploy_environment: Optional[str] = None,
+ deploy_environment: str | None = None,
verbose: bool = False,
**_: Any,
) -> None:
@@ -95,7 +99,7 @@ def runway_config_path(self) -> Path:
sys.exit(1)
def get_runway_context(
- self, deploy_environment: Optional[DeployEnvironment] = None
+ self, deploy_environment: DeployEnvironment | None = None
) -> RunwayContext:
"""Get a Runway context object.
@@ -104,7 +108,7 @@ def get_runway_context(
Args:
deploy_environment: Object representing the current deploy environment.
- Returns
+ Returns:
RunwayContext
"""
@@ -168,9 +172,9 @@ def __str__(self) -> str:
def select_deployments(
ctx: click.Context,
- deployments: List[RunwayDeploymentDefinition],
- tags: Optional[Tuple[str, ...]] = None,
-) -> List[RunwayDeploymentDefinition]:
+ deployments: list[RunwayDeploymentDefinition],
+ tags: tuple[str, ...] | None = None,
+) -> list[RunwayDeploymentDefinition]:
"""Select which deployments to run.
Uses tags, interactive prompts, or selects all.
@@ -178,6 +182,7 @@ def select_deployments(
Args:
ctx: Current click context.
deployments: List of deployment(s) to choose from.
+ tags: Deployment tags to filter.
Returns:
Selected deployment(s).
@@ -192,9 +197,7 @@ def select_deployments(
LOGGER.debug("only one deployment detected; no selection necessary")
else:
# build the menu before displaying it so debug logs don't break up what is printed
- deployment_menu = yaml.safe_dump(
- {i + 1: d.menu_entry for i, d in enumerate(deployments)}
- )
+ deployment_menu = yaml.safe_dump({i + 1: d.menu_entry for i, d in enumerate(deployments)})
click.secho("\nConfigured deployments\n", bold=True, underline=True)
click.echo(deployment_menu)
if ctx.command.name == "destroy":
@@ -206,9 +209,7 @@ def select_deployments(
'Enter number of deployment to run (or "all")',
default="all",
show_choices=False,
- type=click.Choice(
- [str(n) for n in range(1, len(deployments) + 1)] + ["all"]
- ),
+ type=click.Choice([str(n) for n in range(1, len(deployments) + 1)] + ["all"]),
)
if choice != "all":
deployments = [deployments[int(choice) - 1]]
@@ -217,8 +218,8 @@ def select_deployments(
def select_modules(
- ctx: click.Context, modules: List[RunwayModuleDefinition]
-) -> List[RunwayModuleDefinition]:
+ ctx: click.Context, modules: list[RunwayModuleDefinition]
+) -> list[RunwayModuleDefinition]:
"""Interactively select which modules to run.
Args:
@@ -233,8 +234,7 @@ def select_modules(
LOGGER.debug("only one module detected; no selection necessary")
if ctx.command.name == "destroy":
LOGGER.info(
- "Only one module detected; all modules "
- "automatically selected for deletion."
+ "Only one module detected; all modules automatically selected for deletion."
)
if not click.confirm("Proceed?"):
ctx.exit(0)
@@ -243,8 +243,7 @@ def select_modules(
click.echo(yaml.safe_dump({i + 1: m.menu_entry for i, m in enumerate(modules)}))
if ctx.command.name == "destroy":
click.echo(
- '(operating in destroy mode -- "all" will destroy all '
- "modules in reverse order)\n"
+ '(operating in destroy mode -- "all" will destroy all modules in reverse order)\n'
)
choice = click.prompt(
'Enter number of module to run (or "all")',
@@ -263,9 +262,9 @@ def select_modules(
def select_modules_using_tags(
ctx: click.Context,
- deployments: List[RunwayDeploymentDefinition],
- tags: Tuple[str, ...],
-) -> List[RunwayDeploymentDefinition]:
+ deployments: list[RunwayDeploymentDefinition],
+ tags: tuple[str, ...],
+) -> list[RunwayDeploymentDefinition]:
"""Select modules to run using tags.
Args:
@@ -277,9 +276,9 @@ def select_modules_using_tags(
List of selected deployments with selected modules.
"""
- deployments_to_run: List[RunwayDeploymentDefinition] = []
+ deployments_to_run: list[RunwayDeploymentDefinition] = []
for deployment in deployments:
- modules_to_run: List[RunwayModuleDefinition] = []
+ modules_to_run: list[RunwayModuleDefinition] = []
for module in deployment.modules:
if module.child_modules:
module.child_modules = [
diff --git a/runway/_logging.py b/runway/_logging.py
index b80df2b67..15ea7592f 100644
--- a/runway/_logging.py
+++ b/runway/_logging.py
@@ -1,8 +1,13 @@
"""Runway logging."""
+from __future__ import annotations
+
import logging
from enum import IntEnum
-from typing import Any, MutableMapping, Tuple, Union
+from typing import TYPE_CHECKING, Any
+
+if TYPE_CHECKING:
+ from collections.abc import MutableMapping
class LogLevels(IntEnum):
@@ -21,7 +26,7 @@ class LogLevels(IntEnum):
@classmethod
def has_value(cls, value: int) -> bool:
"""Check if IntEnum has a value."""
- return value in cls._value2member_map_ # pylint: disable=no-member
+ return value in cls._value2member_map_
# Issue with this version of LoggerAdapter https://github.com/python/typeshed/issues/7855
@@ -54,18 +59,20 @@ def __init__(
self.prefix = prefix
self.prefix_template = prefix_template
- def notice(self, msg: Union[Exception, str], *args: Any, **kwargs: Any) -> None:
+ def notice(self, msg: Exception | str, *args: Any, **kwargs: Any) -> None:
"""Delegate a notice call to the underlying logger.
Args:
msg: String template or exception to use for the log record.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
self.log(LogLevels.NOTICE, msg, *args, **kwargs)
def process(
- self, msg: Union[Exception, str], kwargs: MutableMapping[str, Any]
- ) -> Tuple[str, MutableMapping[str, Any]]:
+ self, msg: Exception | str, kwargs: MutableMapping[str, Any]
+ ) -> tuple[str, MutableMapping[str, Any]]:
"""Process the message to append the prefix.
Args:
@@ -75,7 +82,7 @@ def process(
"""
return self.prefix_template.format(prefix=self.prefix, msg=msg), kwargs
- def setLevel(self, level: Union[int, str]) -> None: # noqa
+ def setLevel(self, level: int | str) -> None: # noqa: N802
"""Set the specified level on the underlying logger.
Python 2 backport.
@@ -83,20 +90,24 @@ def setLevel(self, level: Union[int, str]) -> None: # noqa
"""
self.logger.setLevel(level)
- def success(self, msg: Union[Exception, str], *args: Any, **kwargs: Any) -> None:
+ def success(self, msg: Exception | str, *args: Any, **kwargs: Any) -> None:
"""Delegate a success call to the underlying logger.
Args:
msg: String template or exception to use for the log record.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
self.log(LogLevels.SUCCESS, msg, *args, **kwargs)
- def verbose(self, msg: Union[Exception, str], *args: Any, **kwargs: Any) -> None:
+ def verbose(self, msg: Exception | str, *args: Any, **kwargs: Any) -> None:
"""Delegate a verbose call to the underlying logger.
Args:
msg: String template or exception to use for the log record.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
self.log(LogLevels.VERBOSE, msg, *args, **kwargs)
@@ -105,7 +116,7 @@ def verbose(self, msg: Union[Exception, str], *args: Any, **kwargs: Any) -> None
class RunwayLogger(logging.Logger):
"""Extend built-in logger with additional levels."""
- def __init__(self, name: str, level: Union[int, str] = logging.NOTSET) -> None:
+ def __init__(self, name: str, level: int | str = logging.NOTSET) -> None:
"""Instantiate the class.
Args:
@@ -118,31 +129,37 @@ def __init__(self, name: str, level: Union[int, str] = logging.NOTSET) -> None:
logging.addLevelName(LogLevels.NOTICE, LogLevels.NOTICE.name)
logging.addLevelName(LogLevels.SUCCESS, LogLevels.SUCCESS.name)
- def notice(self, msg: Union[Exception, str], *args: Any, **kwargs: Any) -> None:
+ def notice(self, msg: Exception | str, *args: Any, **kwargs: Any) -> None:
"""Log 'msg % args' with severity `NOTICE`.
Args:
msg: String template or exception to use for the log record.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
if self.isEnabledFor(LogLevels.NOTICE):
self._log(LogLevels.NOTICE, msg, args, **kwargs)
- def success(self, msg: Union[Exception, str], *args: Any, **kwargs: Any) -> None:
+ def success(self, msg: Exception | str, *args: Any, **kwargs: Any) -> None:
"""Log 'msg % args' with severity `SUCCESS`.
Args:
msg: String template or exception to use for the log record.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
if self.isEnabledFor(LogLevels.SUCCESS):
self._log(LogLevels.SUCCESS, msg, args, **kwargs)
- def verbose(self, msg: Union[Exception, str], *args: Any, **kwargs: Any) -> None:
+ def verbose(self, msg: Exception | str, *args: Any, **kwargs: Any) -> None:
"""Log 'msg % args' with severity `VERBOSE`.
Args:
msg: String template or exception to use for the log record.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
if self.isEnabledFor(LogLevels.VERBOSE):
diff --git a/runway/aws_sso_botocore/credentials.py b/runway/aws_sso_botocore/credentials.py
index e842a9767..ee532ed4d 100644
--- a/runway/aws_sso_botocore/credentials.py
+++ b/runway/aws_sso_botocore/credentials.py
@@ -248,7 +248,6 @@ class SSOProvider(CredentialProvider):
"sso_account_id",
]
- # pylint: disable=super-init-not-called
def __init__(
self, load_config, client_creator, profile_name, cache=None, token_cache=None
):
diff --git a/runway/blueprints/k8s/k8s_iam.py b/runway/blueprints/k8s/k8s_iam.py
index 75c33443d..57397dd71 100755
--- a/runway/blueprints/k8s/k8s_iam.py
+++ b/runway/blueprints/k8s/k8s_iam.py
@@ -52,9 +52,7 @@ def create_template(self) -> None:
)
nodeinstanceprofile = template.add_resource(
- iam.InstanceProfile(
- "NodeInstanceProfile", Path="/", Roles=[nodeinstancerole.ref()]
- )
+ iam.InstanceProfile("NodeInstanceProfile", Path="/", Roles=[nodeinstancerole.ref()])
)
template.add_output(
Output(
@@ -105,6 +103,4 @@ def create_template(self) -> None:
if __name__ == "__main__":
from runway.context import CfnginContext
- print( # noqa: T201
- Iam("test", CfnginContext(parameters={"namespace": "test"})).to_json()
- )
+ print(Iam("test", CfnginContext(parameters={"namespace": "test"})).to_json()) # noqa: T201
diff --git a/runway/blueprints/k8s/k8s_master.py b/runway/blueprints/k8s/k8s_master.py
index d253f2457..7fc8c7318 100755
--- a/runway/blueprints/k8s/k8s_master.py
+++ b/runway/blueprints/k8s/k8s_master.py
@@ -159,6 +159,4 @@ def create_template(self) -> None:
if __name__ == "__main__":
from runway.context import CfnginContext
- print( # noqa: T201
- Cluster("test", CfnginContext(parameters={"namespace": "test"})).to_json()
- )
+ print(Cluster("test", CfnginContext(parameters={"namespace": "test"})).to_json()) # noqa: T201
diff --git a/runway/blueprints/k8s/k8s_workers.py b/runway/blueprints/k8s/k8s_workers.py
index 8181b5749..18caa7c55 100755
--- a/runway/blueprints/k8s/k8s_workers.py
+++ b/runway/blueprints/k8s/k8s_workers.py
@@ -22,18 +22,18 @@
def get_valid_instance_types() -> Any:
"""Return list of instance types from either a JSON or gzipped JSON file."""
- base_path = os.path.join(
- os.path.dirname(botocore.__file__), "data", "ec2", "2016-11-15"
+ base_path = os.path.join( # noqa: PTH118
+ os.path.dirname(botocore.__file__), "data", "ec2", "2016-11-15" # noqa: PTH120
)
- json_path = os.path.join(base_path, "service-2.json")
- gzip_path = os.path.join(base_path, "service-2.json.gz")
+ json_path = os.path.join(base_path, "service-2.json") # noqa: PTH118
+ gzip_path = os.path.join(base_path, "service-2.json.gz") # noqa: PTH118
- if os.path.exists(gzip_path):
+ if os.path.exists(gzip_path): # noqa: PTH110
with gzip.open(gzip_path, "rt", encoding="utf-8") as stream:
data = json.load(stream)
- elif os.path.exists(json_path):
- with open(json_path, "r", encoding="utf-8") as stream:
+ elif os.path.exists(json_path): # noqa: PTH110
+ with open(json_path, encoding="utf-8") as stream: # noqa: PTH123
data = json.load(stream)
else:
raise FileNotFoundError("Neither JSON nor gzipped JSON file found.")
@@ -47,8 +47,7 @@ class NodeGroup(Blueprint):
VARIABLES = {
"KeyName": {
"type": CFNString, # string to allow it to be unset
- "description": "(Optional) EC2 Key Pair to allow SSH "
- "access to the instances",
+ "description": "(Optional) EC2 Key Pair to allow SSH access to the instances",
"default": "",
},
"NodeImageId": {
@@ -57,10 +56,10 @@ class NodeGroup(Blueprint):
},
"NodeInstanceType": {
"type": CFNString,
- "description": "EC2 instance type for the node " "instances",
+ "description": "EC2 instance type for the node instances",
"default": "t2.medium",
"allowed_values": get_valid_instance_types(),
- "constraint_description": "Must be a valid EC2 " "instance type",
+ "constraint_description": "Must be a valid EC2 instance type",
},
"NodeInstanceProfile": {
"type": CFNString,
@@ -68,12 +67,12 @@ class NodeGroup(Blueprint):
},
"NodeAutoScalingGroupMinSize": {
"type": CFNNumber,
- "description": "Minimum size of Node " "Group ASG.",
+ "description": "Minimum size of Node Group ASG.",
"default": 1,
},
"NodeAutoScalingGroupMaxSize": {
"type": CFNNumber,
- "description": "Maximum size of Node " "Group ASG.",
+ "description": "Maximum size of Node Group ASG.",
"default": 3,
},
"NodeVolumeSize": {
@@ -98,16 +97,16 @@ class NodeGroup(Blueprint):
},
"NodeGroupName": {
"type": CFNString,
- "description": "Unique identifier for the Node " "Group.",
+ "description": "Unique identifier for the Node Group.",
},
"ClusterControlPlaneSecurityGroup": {
"type": EC2SecurityGroupId,
- "description": "The security " "group of the " "cluster control " "plane.",
+ "description": "The security group of the cluster control plane.",
},
"VpcId": {"type": EC2VPCId, "description": "The VPC of the worker instances"},
"Subnets": {
"type": EC2SubnetIdList,
- "description": "The subnets where workers can be " "created.",
+ "description": "The subnets where workers can be created.",
},
"UseDesiredInstanceCount": {
"type": CFNString,
@@ -120,8 +119,7 @@ def create_template(self) -> None:
template = self.template
template.set_version("2010-09-09")
template.set_description(
- "Kubernetes workers via EKS - V1.0.0 "
- "- compatible with amazon-eks-node-v23+"
+ "Kubernetes workers via EKS - V1.0.0 - compatible with amazon-eks-node-v23+"
)
# Metadata
@@ -159,9 +157,7 @@ def create_template(self) -> None:
},
{
"Label": {"default": "Worker Network Configuration"},
- "Parameters": [
- self.variables[i].name for i in ["VpcId", "Subnets"]
- ],
+ "Parameters": [self.variables[i].name for i in ["VpcId", "Subnets"]],
},
]
}
@@ -173,9 +169,7 @@ def create_template(self) -> None:
"DesiredInstanceCountSpecified",
Equals(self.variables["UseDesiredInstanceCount"].ref, "true"),
)
- template.add_condition(
- "KeyNameSpecified", Not(Equals(self.variables["KeyName"].ref, ""))
- )
+ template.add_condition("KeyNameSpecified", Not(Equals(self.variables["KeyName"].ref, "")))
# Resources
nodesecuritygroup = template.add_resource(
@@ -215,9 +209,7 @@ def create_template(self) -> None:
Description="Allow worker Kubelets and pods to receive "
"communication from the cluster control plane",
GroupId=nodesecuritygroup.ref(),
- SourceSecurityGroupId=self.variables[
- "ClusterControlPlaneSecurityGroup"
- ].ref,
+ SourceSecurityGroupId=self.variables["ClusterControlPlaneSecurityGroup"].ref,
IpProtocol="tcp",
FromPort=1025,
ToPort=65535,
@@ -242,9 +234,7 @@ def create_template(self) -> None:
"443 to receive communication from cluster "
"control plane",
GroupId=nodesecuritygroup.ref(),
- SourceSecurityGroupId=self.variables[
- "ClusterControlPlaneSecurityGroup"
- ].ref, # noqa
+ SourceSecurityGroupId=self.variables["ClusterControlPlaneSecurityGroup"].ref,
IpProtocol="tcp",
FromPort=443,
ToPort=443,
@@ -266,7 +256,7 @@ def create_template(self) -> None:
template.add_resource(
ec2.SecurityGroupIngress(
"ClusterControlPlaneSecurityGroupIngress",
- Description="Allow pods to communicate with the cluster API " "Server",
+ Description="Allow pods to communicate with the cluster API Server",
GroupId=self.variables["ClusterControlPlaneSecurityGroup"].ref,
SourceSecurityGroupId=nodesecuritygroup.ref(),
IpProtocol="tcp",
@@ -294,9 +284,7 @@ def create_template(self) -> None:
),
ImageId=self.variables["NodeImageId"].ref,
InstanceType=self.variables["NodeInstanceType"].ref,
- KeyName=If(
- "KeyNameSpecified", self.variables["KeyName"].ref, NoValue
- ),
+ KeyName=If("KeyNameSpecified", self.variables["KeyName"].ref, NoValue),
MetadataOptions=ec2.MetadataOptions(
HttpPutResponseHopLimit=2,
HttpEndpoint="enabled",
@@ -305,7 +293,7 @@ def create_template(self) -> None:
SecurityGroupIds=[nodesecuritygroup.ref()],
UserData=Base64(
Sub(
- "\n".join(
+ "\n".join( # noqa: FLY002
[
"#!/bin/bash",
"set -o xtrace",
@@ -342,12 +330,8 @@ def create_template(self) -> None:
MinSize=self.variables["NodeAutoScalingGroupMinSize"].ref,
MaxSize=self.variables["NodeAutoScalingGroupMaxSize"].ref,
Tags=[
- autoscaling.Tag(
- "Name", Sub("${ClusterName}-${NodeGroupName}-Node"), True
- ),
- autoscaling.Tag(
- Sub("kubernetes.io/cluster/${ClusterName}"), "owned", True
- ),
+ autoscaling.Tag("Name", Sub("${ClusterName}-${NodeGroupName}-Node"), True),
+ autoscaling.Tag(Sub("kubernetes.io/cluster/${ClusterName}"), "owned", True),
],
VPCZoneIdentifier=self.variables["Subnets"].ref,
UpdatePolicy=UpdatePolicy(
diff --git a/runway/blueprints/staticsite/auth_at_edge.py b/runway/blueprints/staticsite/auth_at_edge.py
index 04ed244b6..1e7bbeb36 100644
--- a/runway/blueprints/staticsite/auth_at_edge.py
+++ b/runway/blueprints/staticsite/auth_at_edge.py
@@ -8,7 +8,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
import awacs.logs
import awacs.s3
@@ -27,7 +27,7 @@
class AuthAtEdge(StaticSite):
"""Auth@Edge Blueprint."""
- AUTH_VARIABLES: Dict[str, BlueprintVariableTypeDef] = {
+ AUTH_VARIABLES: dict[str, BlueprintVariableTypeDef] = {
"OAuthScopes": {"type": list, "default": [], "description": "OAuth2 Scopes"},
"PriceClass": {
"type": str,
@@ -44,8 +44,7 @@ class AuthAtEdge(StaticSite):
"RedirectPathAuthRefresh": {
"type": str,
"default": "/refreshauth",
- "description": "The URL path that should "
- "handle the JWT refresh request.",
+ "description": "The URL path that should handle the JWT refresh request.",
},
"NonSPAMode": {
"type": bool,
@@ -59,13 +58,13 @@ class AuthAtEdge(StaticSite):
},
}
IAM_ARN_PREFIX = "arn:aws:iam::aws:policy/service-role/"
- VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = {}
+ VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = {}
def __init__(
self,
name: str,
context: CfnginContext,
- mappings: Optional[Dict[str, Dict[str, Any]]] = None,
+ mappings: Optional[dict[str, dict[str, Any]]] = None,
description: Optional[str] = None,
) -> None:
"""Initialize the Blueprint.
@@ -77,9 +76,7 @@ def __init__(
description: Used to describe the resulting CloudFormation template.
"""
- super().__init__(
- name=name, context=context, description=description, mappings=mappings
- )
+ super().__init__(name=name, context=context, description=description, mappings=mappings)
self.VARIABLES.update(StaticSite.VARIABLES)
self.VARIABLES.update(self.AUTH_VARIABLES)
@@ -94,8 +91,8 @@ def create_template(self) -> None:
bucket = self.add_bucket()
oai = self.add_origin_access_identity()
bucket_policy = self.add_cloudfront_bucket_policy(bucket, oai)
- # TODO Make this available in Auth@Edge
- lambda_function_associations: List[cloudfront.LambdaFunctionAssociation] = []
+ # TODO (kyle): make this available in Auth@Edge
+ lambda_function_associations: list[cloudfront.LambdaFunctionAssociation] = []
if self.directory_index_specified:
index_rewrite = self._get_index_rewrite_role_function_and_version()
@@ -109,36 +106,28 @@ def create_template(self) -> None:
check_auth_name,
"Check Authorization information for request",
"check_auth",
- self.add_lambda_execution_role(
- "CheckAuthLambdaExecutionRole", check_auth_name
- ),
+ self.add_lambda_execution_role("CheckAuthLambdaExecutionRole", check_auth_name),
)
http_headers_name = "HttpHeaders"
http_headers_lambda = self.get_auth_at_edge_lambda_and_ver(
http_headers_name,
"Additional Headers added to every response",
"http_headers",
- self.add_lambda_execution_role(
- "HttpHeadersLambdaExecutionRole", http_headers_name
- ),
+ self.add_lambda_execution_role("HttpHeadersLambdaExecutionRole", http_headers_name),
)
parse_auth_name = "ParseAuth"
parse_auth_lambda = self.get_auth_at_edge_lambda_and_ver(
parse_auth_name,
"Parse the Authorization Headers/Cookies for the request",
"parse_auth",
- self.add_lambda_execution_role(
- "ParseAuthLambdaExecutionRole", parse_auth_name
- ),
+ self.add_lambda_execution_role("ParseAuthLambdaExecutionRole", parse_auth_name),
)
refresh_auth_name = "RefreshAuth"
refresh_auth_lambda = self.get_auth_at_edge_lambda_and_ver(
refresh_auth_name,
"Refresh the Authorization information when expired",
"refresh_auth",
- self.add_lambda_execution_role(
- "RefreshAuthLambdaExecutionRole", refresh_auth_name
- ),
+ self.add_lambda_execution_role("RefreshAuthLambdaExecutionRole", refresh_auth_name),
)
sign_out_name = "SignOut"
sign_out_lambda = self.get_auth_at_edge_lambda_and_ver(
@@ -163,7 +152,7 @@ def create_template(self) -> None:
def get_auth_at_edge_lambda_and_ver(
self, title: str, description: str, handle: str, role: iam.Role
- ) -> Dict[str, Any]:
+ ) -> dict[str, Any]:
"""Create a lambda function and its version.
Args:
@@ -217,9 +206,7 @@ def get_auth_at_edge_lambda(
return lamb
- def add_version(
- self, title: str, lambda_function: awslambda.Function
- ) -> awslambda.Version:
+ def add_version(self, title: str, lambda_function: awslambda.Function) -> awslambda.Version:
"""Create a version association with a Lambda@Edge function.
In order to ensure different versions of the function
@@ -235,22 +222,20 @@ def add_version(
s3_key = lambda_function.properties["Code"].to_dict()["S3Key"]
code_hash = s3_key.split(".")[0].split("-")[-1]
return self.template.add_resource(
- awslambda.Version(
- title + "Ver" + code_hash, FunctionName=lambda_function.ref()
- )
+ awslambda.Version(title + "Ver" + code_hash, FunctionName=lambda_function.ref())
)
def get_distribution_options(
self,
bucket: s3.Bucket,
oai: cloudfront.CloudFrontOriginAccessIdentity,
- lambda_funcs: List[cloudfront.LambdaFunctionAssociation],
+ lambda_funcs: list[cloudfront.LambdaFunctionAssociation],
check_auth_lambda_version: awslambda.Version,
http_headers_lambda_version: awslambda.Version,
parse_auth_lambda_version: awslambda.Version,
refresh_auth_lambda_version: awslambda.Version,
sign_out_lambda_version: awslambda.Version,
- ) -> Dict[str, Any]:
+ ) -> dict[str, Any]:
"""Retrieve the options for our CloudFront distribution.
Keyword Args:
@@ -353,7 +338,7 @@ def get_distribution_options(
"ViewerCertificate": self.add_acm_cert(),
}
- def _get_error_responses(self) -> List[cloudfront.CustomErrorResponse]:
+ def _get_error_responses(self) -> list[cloudfront.CustomErrorResponse]:
"""Return error response based on site stack variables.
When custom_error_responses are defined return those, if running
@@ -379,9 +364,9 @@ def _get_error_responses(self) -> List[cloudfront.CustomErrorResponse]:
]
# pyright: reportIncompatibleMethodOverride=none
- def _get_cloudfront_bucket_policy_statements( # pylint: disable=arguments-differ
+ def _get_cloudfront_bucket_policy_statements(
self, bucket: s3.Bucket, oai: cloudfront.CloudFrontOriginAccessIdentity
- ) -> List[Statement]:
+ ) -> list[Statement]:
return [
Statement(
Action=[awacs.s3.GetObject],
diff --git a/runway/blueprints/staticsite/dependencies.py b/runway/blueprints/staticsite/dependencies.py
index 595de7622..f055462b2 100755
--- a/runway/blueprints/staticsite/dependencies.py
+++ b/runway/blueprints/staticsite/dependencies.py
@@ -98,12 +98,8 @@ def create_template(self) -> None:
Statement(
Action=[awacs.s3.PutObject],
Effect=Allow,
- Principal=AWSPrincipal(
- Join(":", ["arn:aws:iam:", AccountId, "root"])
- ),
- Resource=[
- Join("", ["arn:aws:s3:::", awslogbucket.ref(), "/*"])
- ],
+ Principal=AWSPrincipal(Join(":", ["arn:aws:iam:", AccountId, "root"])),
+ Resource=[Join("", ["arn:aws:s3:::", awslogbucket.ref(), "/*"])],
)
],
),
@@ -114,11 +110,7 @@ def create_template(self) -> None:
"Artifacts",
AccessControl=s3.Private,
LifecycleConfiguration=s3.LifecycleConfiguration(
- Rules=[
- s3.LifecycleRule(
- NoncurrentVersionExpirationInDays=90, Status="Enabled"
- )
- ]
+ Rules=[s3.LifecycleRule(NoncurrentVersionExpirationInDays=90, Status="Enabled")]
),
VersioningConfiguration=s3.VersioningConfiguration(Status="Enabled"),
)
@@ -142,11 +134,8 @@ def create_template(self) -> None:
"SupportedIdentityProviders"
]
- redirect_domains = [
- add_url_scheme(x) for x in self.variables["Aliases"]
- ] + [
- add_url_scheme(x)
- for x in self.variables["AdditionalRedirectDomains"]
+ redirect_domains = [add_url_scheme(x) for x in self.variables["Aliases"]] + [
+ add_url_scheme(x) for x in self.variables["AdditionalRedirectDomains"]
]
redirect_uris = get_redirect_uris(
redirect_domains,
@@ -161,9 +150,7 @@ def create_template(self) -> None:
]["callback_urls"]
if self.variables["CreateUserPool"]:
- user_pool = template.add_resource(
- cognito.UserPool("AuthAtEdgeUserPool")
- )
+ user_pool = template.add_resource(cognito.UserPool("AuthAtEdgeUserPool"))
user_pool_id = user_pool.ref()
@@ -175,9 +162,7 @@ def create_template(self) -> None:
)
)
else:
- user_pool_id = self.context.hook_data["aae_user_pool_id_retriever"][
- "id"
- ]
+ user_pool_id = self.context.hook_data["aae_user_pool_id_retriever"]["id"]
userpool_client_params["UserPoolId"] = user_pool_id
client = template.add_resource(
diff --git a/runway/blueprints/staticsite/staticsite.py b/runway/blueprints/staticsite/staticsite.py
index 1cd91c701..9722a7383 100755
--- a/runway/blueprints/staticsite/staticsite.py
+++ b/runway/blueprints/staticsite/staticsite.py
@@ -5,7 +5,7 @@
import hashlib
import logging
import os
-from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Union
+from typing import TYPE_CHECKING, Any, ClassVar, Union
import awacs.awslambda
import awacs.iam
@@ -52,7 +52,7 @@ class _IndexRewriteFunctionInfoTypeDef(TypedDict):
class StaticSite(Blueprint):
"""CFNgin blueprint for creating S3 bucket and CloudFront distribution."""
- VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = {
+ VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = {
"AcmCertificateArn": {
"type": str,
"default": "",
@@ -61,7 +61,7 @@ class StaticSite(Blueprint):
"Aliases": {
"type": list,
"default": [],
- "description": "(Optional) Domain aliases the " "distribution",
+ "description": "(Optional) Domain aliases the distribution",
},
"Compress": {
"type": bool,
@@ -87,9 +87,7 @@ class StaticSite(Blueprint):
"RewriteDirectoryIndex": {
"type": str,
"default": "",
- "description": "(Optional) File name to "
- "append to directory "
- "requests.",
+ "description": "(Optional) File name to append to directory requests.",
},
"RoleBoundaryArn": {
"type": str,
@@ -101,17 +99,17 @@ class StaticSite(Blueprint):
"WAFWebACL": {
"type": str,
"default": "",
- "description": "(Optional) WAF id to associate with the " "distribution.",
+ "description": "(Optional) WAF id to associate with the distribution.",
},
"custom_error_responses": {
"type": list,
"default": [],
- "description": "(Optional) Custom error " "responses.",
+ "description": "(Optional) Custom error responses.",
},
"lambda_function_associations": {
"type": list,
"default": [],
- "description": "(Optional) Lambda " "function " "associations.",
+ "description": "(Optional) Lambda function associations.",
},
}
@@ -165,10 +163,8 @@ def create_template(self) -> None:
if self.directory_index_specified:
index_rewrite = self._get_index_rewrite_role_function_and_version()
- lambda_function_associations = (
- self.get_directory_index_lambda_association(
- lambda_function_associations, index_rewrite["version"]
- )
+ lambda_function_associations = self.get_directory_index_lambda_association(
+ lambda_function_associations, index_rewrite["version"]
)
distribution_options = self.get_cloudfront_distribution_options(
@@ -178,7 +174,7 @@ def create_template(self) -> None:
else:
self.add_bucket_policy(bucket)
- def get_lambda_associations(self) -> List[cloudfront.LambdaFunctionAssociation]:
+ def get_lambda_associations(self) -> list[cloudfront.LambdaFunctionAssociation]:
"""Retrieve any lambda associations from the instance variables."""
# If custom associations defined, use them
if self.variables["lambda_function_associations"]:
@@ -192,9 +188,9 @@ def get_lambda_associations(self) -> List[cloudfront.LambdaFunctionAssociation]:
@staticmethod
def get_directory_index_lambda_association(
- lambda_associations: List[cloudfront.LambdaFunctionAssociation],
+ lambda_associations: list[cloudfront.LambdaFunctionAssociation],
directory_index_rewrite_version: awslambda.Version,
- ) -> List[cloudfront.LambdaFunctionAssociation]:
+ ) -> list[cloudfront.LambdaFunctionAssociation]:
"""Retrieve the directory index lambda associations with the added rewriter.
Args:
@@ -214,8 +210,8 @@ def get_cloudfront_distribution_options(
self,
bucket: s3.Bucket,
oai: cloudfront.CloudFrontOriginAccessIdentity,
- lambda_function_associations: List[cloudfront.LambdaFunctionAssociation],
- ) -> Dict[str, Any]:
+ lambda_function_associations: list[cloudfront.LambdaFunctionAssociation],
+ ) -> dict[str, Any]:
"""Retrieve the options for our CloudFront distribution.
Args:
@@ -275,7 +271,7 @@ def get_cloudfront_distribution_options(
"ViewerCertificate": self.add_acm_cert(),
}
- def add_aliases(self) -> Union[List[str], Ref]:
+ def add_aliases(self) -> Union[list[str], Ref]:
"""Add aliases."""
if self.aliases_specified:
return self.variables["Aliases"]
@@ -309,7 +305,7 @@ def add_origin_access_identity(self) -> cloudfront.CloudFrontOriginAccessIdentit
return self.template.add_resource(
cloudfront.CloudFrontOriginAccessIdentity(
"OAI",
- CloudFrontOriginAccessIdentityConfig=cloudfront.CloudFrontOriginAccessIdentityConfig( # noqa
+ CloudFrontOriginAccessIdentityConfig=cloudfront.CloudFrontOriginAccessIdentityConfig(
Comment="CF access to website"
),
)
@@ -364,19 +360,13 @@ def add_bucket(self) -> s3.Bucket:
Rules=[s3.OwnershipControlsRule(ObjectOwnership="ObjectWriter")]
),
LifecycleConfiguration=s3.LifecycleConfiguration(
- Rules=[
- s3.LifecycleRule(
- NoncurrentVersionExpirationInDays=90, Status="Enabled"
- )
- ]
+ Rules=[s3.LifecycleRule(NoncurrentVersionExpirationInDays=90, Status="Enabled")]
),
VersioningConfiguration=s3.VersioningConfiguration(Status="Enabled"),
)
)
self.template.add_output(
- Output(
- "BucketName", Description="Name of website bucket", Value=bucket.ref()
- )
+ Output("BucketName", Description="Name of website bucket", Value=bucket.ref())
)
if not self.cf_enabled:
@@ -413,9 +403,7 @@ def add_cloudfront_bucket_policy(
Bucket=bucket.ref(),
PolicyDocument=PolicyDocument(
Version="2012-10-17",
- Statement=self._get_cloudfront_bucket_policy_statements(
- bucket, oai
- ),
+ Statement=self._get_cloudfront_bucket_policy_statements(bucket, oai),
),
)
)
@@ -464,9 +452,7 @@ def add_lambda_execution_role(
"lambda.amazonaws.com", "edgelambda.amazonaws.com"
),
PermissionsBoundary=(
- self.variables["RoleBoundaryArn"]
- if self.role_boundary_specified
- else NoValue
+ self.variables["RoleBoundaryArn"] if self.role_boundary_specified else NoValue
),
Policies=[
iam.Policy(
@@ -490,9 +476,7 @@ def add_lambda_execution_role(
)
)
- def add_cloudfront_directory_index_rewrite(
- self, role: iam.Role
- ) -> awslambda.Function:
+ def add_cloudfront_directory_index_rewrite(self, role: iam.Role) -> awslambda.Function:
"""Add an index CloudFront directory index rewrite lambda function to the template.
Keyword Args:
@@ -503,11 +487,11 @@ def add_cloudfront_directory_index_rewrite(
"""
code_str = ""
- path = os.path.join(
- os.path.dirname(__file__),
+ path = os.path.join( # noqa: PTH118
+ os.path.dirname(__file__), # noqa: PTH120
"templates/cf_directory_index_rewrite.template.js",
)
- with open(path, encoding="utf-8") as file_:
+ with open(path, encoding="utf-8") as file_: # noqa: PTH123
code_str = file_.read().replace(
"{{RewriteDirectoryIndex}}", self.variables["RewriteDirectoryIndex"]
)
@@ -546,10 +530,8 @@ def add_cloudfront_directory_index_rewrite_version(
The CloudFront directory index rewrite version.
"""
- code_hash = hashlib.md5(
- str(
- directory_index_rewrite.properties["Code"].properties["ZipFile"]
- ).encode()
+ code_hash = hashlib.md5( # noqa: S324
+ str(directory_index_rewrite.properties["Code"].properties["ZipFile"]).encode()
).hexdigest()
return self.template.add_resource(
@@ -562,7 +544,7 @@ def add_cloudfront_directory_index_rewrite_version(
def add_cloudfront_distribution(
self,
bucket_policy: s3.BucketPolicy,
- cloudfront_distribution_options: Dict[str, Any],
+ cloudfront_distribution_options: dict[str, Any],
) -> cloudfront.Distribution:
"""Add the CloudFront distribution to the template / output the id and domain name.
@@ -578,9 +560,7 @@ def add_cloudfront_distribution(
cloudfront.Distribution(
"CFDistribution",
DependsOn=bucket_policy.title,
- DistributionConfig=cloudfront.DistributionConfig(
- **cloudfront_distribution_options
- ),
+ DistributionConfig=cloudfront.DistributionConfig(**cloudfront_distribution_options),
)
)
self.template.add_output(
@@ -602,7 +582,7 @@ def add_cloudfront_distribution(
@staticmethod
def _get_cloudfront_bucket_policy_statements(
bucket: s3.Bucket, oai: cloudfront.CloudFrontOriginAccessIdentity
- ) -> List[Statement]:
+ ) -> list[Statement]:
return [
Statement(
Action=[awacs.s3.GetObject],
@@ -621,9 +601,7 @@ def _get_index_rewrite_role_function_and_version(
)
function = self.add_cloudfront_directory_index_rewrite(role)
version = self.add_cloudfront_directory_index_rewrite_version(function)
- return _IndexRewriteFunctionInfoTypeDef(
- function=function, role=role, version=version
- )
+ return _IndexRewriteFunctionInfoTypeDef(function=function, role=role, version=version)
# Helper section to enable easy blueprint -> template generation
diff --git a/runway/blueprints/tf_state.py b/runway/blueprints/tf_state.py
index d6417aaba..7adc14424 100755
--- a/runway/blueprints/tf_state.py
+++ b/runway/blueprints/tf_state.py
@@ -2,7 +2,7 @@
"""Module with Terraform state resources."""
from __future__ import annotations
-from typing import TYPE_CHECKING, ClassVar, Dict
+from typing import TYPE_CHECKING, ClassVar
import awacs.dynamodb
import awacs.s3
@@ -19,7 +19,7 @@
class TfState(Blueprint):
"""CFNgin blueprint for creating Terraform state resources."""
- VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = {
+ VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = {
"BucketDeletionPolicy": {
"type": str,
"allowed_values": ["Delete", "Retain"],
@@ -58,17 +58,13 @@ def create_template(self) -> None:
dynamodb.Table(
"TerraformStateTable",
AttributeDefinitions=[
- dynamodb.AttributeDefinition(
- AttributeName="LockID", AttributeType="S"
- )
+ dynamodb.AttributeDefinition(AttributeName="LockID", AttributeType="S")
],
KeySchema=[dynamodb.KeySchema(AttributeName="LockID", KeyType="HASH")],
ProvisionedThroughput=dynamodb.ProvisionedThroughput(
ReadCapacityUnits=2, WriteCapacityUnits=2
),
- TableName=If(
- "TableNameOmitted", NoValue, self.variables["TableName"].ref
- ),
+ TableName=If("TableNameOmitted", NoValue, self.variables["TableName"].ref),
)
)
self.template.add_output(
@@ -84,15 +80,9 @@ def create_template(self) -> None:
"TerraformStateBucket",
DeletionPolicy=self.variables["BucketDeletionPolicy"],
AccessControl=s3.Private,
- BucketName=If(
- "BucketNameOmitted", NoValue, self.variables["BucketName"].ref
- ),
+ BucketName=If("BucketNameOmitted", NoValue, self.variables["BucketName"].ref),
LifecycleConfiguration=s3.LifecycleConfiguration(
- Rules=[
- s3.LifecycleRule(
- NoncurrentVersionExpirationInDays=90, Status="Enabled"
- )
- ]
+ Rules=[s3.LifecycleRule(NoncurrentVersionExpirationInDays=90, Status="Enabled")]
),
VersioningConfiguration=s3.VersioningConfiguration(Status="Enabled"),
)
@@ -129,9 +119,7 @@ def create_template(self) -> None:
Statement(
Action=[awacs.s3.GetObject, awacs.s3.PutObject],
Effect=Allow,
- Resource=[
- Join("", [terraformstatebucket.get_att("Arn"), "/*"])
- ],
+ Resource=[Join("", [terraformstatebucket.get_att("Arn"), "/*"])],
),
Statement(
Action=[
@@ -160,6 +148,4 @@ def create_template(self) -> None:
if __name__ == "__main__":
from runway.context import CfnginContext
- print( # noqa: T201
- TfState("test", CfnginContext(parameters={"namespace": "test"})).to_json()
- )
+ print(TfState("test", CfnginContext(parameters={"namespace": "test"})).to_json()) # noqa: T201
diff --git a/runway/cfngin/actions/base.py b/runway/cfngin/actions/base.py
index 5e98739af..522a9c330 100644
--- a/runway/cfngin/actions/base.py
+++ b/runway/cfngin/actions/base.py
@@ -6,7 +6,7 @@
import os
import sys
import threading
-from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, Union
+from typing import TYPE_CHECKING, Any, Callable, ClassVar, Union
import botocore.exceptions
@@ -66,7 +66,7 @@ def build_walker(concurrency: int) -> Callable[..., Any]:
return ThreadedWalker(semaphore).walk
-def stack_template_url(bucket_name: str, blueprint: Blueprint, endpoint: str):
+def stack_template_url(bucket_name: str, blueprint: Blueprint, endpoint: str) -> str:
"""Produce an s3 url for a given blueprint.
Args:
@@ -99,21 +99,21 @@ class BaseAction:
"""
DESCRIPTION: ClassVar[str] = "Base action"
- NAME: ClassVar[Optional[str]] = None
+ NAME: ClassVar[str | None] = None
- bucket_name: Optional[str]
- bucket_region: Optional[str]
+ bucket_name: str | None
+ bucket_region: str | None
cancel: threading.Event
context: CfnginContext
- provider_builder: Optional[ProviderBuilder]
+ provider_builder: ProviderBuilder | None
s3_conn: S3Client
def __init__(
self,
context: CfnginContext,
- provider_builder: Optional[ProviderBuilder] = None,
- cancel: Optional[threading.Event] = None,
- ):
+ provider_builder: ProviderBuilder | None = None,
+ cancel: threading.Event | None = None,
+ ) -> None:
"""Instantiate class.
Args:
@@ -158,9 +158,7 @@ def ensure_cfn_bucket(self) -> None:
"""CloudFormation bucket where templates will be stored."""
if self.bucket_name:
try:
- ensure_s3_bucket(
- self.s3_conn, self.bucket_name, self.bucket_region, create=False
- )
+ ensure_s3_bucket(self.s3_conn, self.bucket_name, self.bucket_region, create=False)
except botocore.exceptions.ClientError:
raise CfnginBucketNotFound(bucket_name=self.bucket_name) from None
@@ -214,8 +212,7 @@ def s3_stack_push(self, blueprint: Blueprint, force: bool = False) -> str:
template_url = self.stack_template_url(blueprint)
try:
template_exists = (
- self.s3_conn.head_object(Bucket=self.bucket_name, Key=key_name)
- is not None
+ self.s3_conn.head_object(Bucket=self.bucket_name, Key=key_name) is not None
)
except botocore.exceptions.ClientError as err:
if err.response["Error"]["Code"] == "404":
@@ -240,9 +237,7 @@ def stack_template_url(self, blueprint: Blueprint) -> str:
"""S3 URL for CloudFormation template object."""
if not self.bucket_name:
raise ValueError("bucket_name required")
- return stack_template_url(
- self.bucket_name, blueprint, get_s3_endpoint(self.s3_conn)
- )
+ return stack_template_url(self.bucket_name, blueprint, get_s3_endpoint(self.s3_conn))
def _generate_plan(
self,
@@ -266,8 +261,7 @@ def _generate_plan(
tail_fn = self._tail_stack if tail else None
steps = [
- Step(stack, fn=self._stack_action, watch_func=tail_fn)
- for stack in self.context.stacks
+ Step(stack, fn=self._stack_action, watch_func=tail_fn) for stack in self.context.stacks
]
graph = Graph.from_steps(steps)
@@ -295,6 +289,4 @@ def _tail_stack(
) -> None:
"""Tail a stack's event stream."""
provider = self.build_provider()
- return provider.tail_stack(
- stack, cancel, action=self.NAME, retries=retries, **kwargs
- )
+ return provider.tail_stack(stack, cancel, action=self.NAME, retries=retries, **kwargs)
diff --git a/runway/cfngin/actions/deploy.py b/runway/cfngin/actions/deploy.py
index 8b4081315..cdb031669 100644
--- a/runway/cfngin/actions/deploy.py
+++ b/runway/cfngin/actions/deploy.py
@@ -3,9 +3,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
-
-from typing_extensions import Literal
+from typing import TYPE_CHECKING, Any, Callable, Union
from ..exceptions import (
CancelExecution,
@@ -35,6 +33,7 @@
if TYPE_CHECKING:
from mypy_boto3_cloudformation.type_defs import ParameterTypeDef, StackTypeDef
+ from typing_extensions import Literal
from ...config.models.cfngin import CfnginHookDefinitionModel
from ...context import CfnginContext
@@ -50,7 +49,7 @@
DESTROYING_STATUS = SubmittedStatus("submitted for destruction")
-def build_stack_tags(stack: Stack) -> List[TagTypeDef]:
+def build_stack_tags(stack: Stack) -> list[TagTypeDef]:
"""Build a common set of tags to attach to a stack."""
return [{"Key": t[0], "Value": t[1]} for t in stack.tags.items()]
@@ -64,9 +63,7 @@ def should_update(stack: Stack) -> bool:
"""
if stack.locked:
if not stack.force:
- LOGGER.debug(
- "%s:locked and not in --force list; refusing to update", stack.name
- )
+ LOGGER.debug("%s:locked and not in --force list; refusing to update", stack.name)
return False
LOGGER.debug("%s:locked but is in --force list", stack.name)
return True
@@ -100,9 +97,7 @@ def should_ensure_cfn_bucket(outline: bool, dump: bool) -> bool:
return not outline and not dump
-def _resolve_parameters(
- parameters: Dict[str, Any], blueprint: Blueprint
-) -> Dict[str, Any]:
+def _resolve_parameters(parameters: dict[str, Any], blueprint: Blueprint) -> dict[str, Any]:
"""Resolve CloudFormation Parameters for a given blueprint.
Given a list of parameters, handles:
@@ -118,7 +113,7 @@ def _resolve_parameters(
The resolved parameters.
"""
- params: Dict[str, Any] = {}
+ params: dict[str, Any] = {}
for key, value in parameters.items():
if key not in blueprint.parameter_definitions:
LOGGER.debug("blueprint %s does not use parameter %s", blueprint.name, key)
@@ -132,7 +127,7 @@ def _resolve_parameters(
continue
if isinstance(value, bool):
LOGGER.debug('converting parameter %s boolean "%s" to string', key, value)
- value = str(value).lower()
+ value = str(value).lower() # noqa: PLW2901
params[key] = value
return params
@@ -142,11 +137,11 @@ class UsePreviousParameterValue:
def _handle_missing_parameters(
- parameter_values: Dict[str, Any],
- all_params: List[str],
- required_params: List[str],
- existing_stack: Optional[StackTypeDef] = None,
-) -> List[Tuple[str, Any]]:
+ parameter_values: dict[str, Any],
+ all_params: list[str],
+ required_params: list[str],
+ existing_stack: StackTypeDef | None = None,
+) -> list[tuple[str, Any]]:
"""Handle any missing parameters.
If an existing_stack is provided, look up missing parameters there.
@@ -175,9 +170,7 @@ def _handle_missing_parameters(
]
for param in missing_params:
if param in stack_parameters:
- LOGGER.debug(
- "using previous value for parameter %s from existing stack", param
- )
+ LOGGER.debug("using previous value for parameter %s from existing stack", param)
parameter_values[param] = UsePreviousParameterValue
final_missing = list(set(required_params) - set(parameter_values.keys()))
if final_missing:
@@ -188,7 +181,7 @@ def _handle_missing_parameters(
def handle_hooks(
stage: Literal["post_deploy", "pre_deploy"],
- hooks: List[CfnginHookDefinitionModel],
+ hooks: list[CfnginHookDefinitionModel],
provider: Provider,
context: CfnginContext,
*,
@@ -239,9 +232,7 @@ def upload_disabled(self) -> bool:
"""Whether the CloudFormation template should be uploaded to S3."""
if self.upload_explicitly_disabled:
return True
- if not self.bucket_name:
- return True
- return False
+ return bool(not self.bucket_name)
@upload_disabled.setter
def upload_disabled(self, value: bool) -> None:
@@ -261,8 +252,8 @@ def upload_disabled(self, value: bool) -> None:
@staticmethod
def build_parameters(
- stack: Stack, provider_stack: Optional[StackTypeDef] = None
- ) -> List[ParameterTypeDef]:
+ stack: Stack, provider_stack: StackTypeDef | None = None
+ ) -> list[ParameterTypeDef]:
"""Build the CloudFormation Parameters for our stack.
Args:
@@ -280,7 +271,7 @@ def build_parameters(
resolved, all_parameters, required_parameters, provider_stack
)
- param_list: List[ParameterTypeDef] = []
+ param_list: list[ParameterTypeDef] = []
for key, value in parameters:
param_dict: ParameterTypeDef = {"ParameterKey": key}
@@ -293,9 +284,7 @@ def build_parameters(
return param_list
- def _destroy_stack(
- self, stack: Stack, *, status: Optional[Status] = None, **_: Any
- ) -> Status:
+ def _destroy_stack(self, stack: Stack, *, status: Status | None = None, **_: Any) -> Status:
"""Delete a CloudFormation stack.
Used to remove stacks that exist in the persistent graph but not
@@ -344,9 +333,10 @@ def _destroy_stack(
except CancelExecution:
return SkippedStatus(reason="canceled execution")
- # TODO refactor long if, elif, else block
- # pylint: disable=too-many-return-statements,too-many-branches,too-many-statements
- def _launch_stack(self, stack: Stack, *, status: Status, **_: Any) -> Status:
+ # TODO (kyle): refactor long if, elif, else block
+ def _launch_stack( # noqa: C901, PLR0911, PLR0915, PLR0912
+ self, stack: Stack, *, status: Status, **_: Any
+ ) -> Status:
"""Handle the creating or updating of a stack in CloudFormation.
Also makes sure that we don't try to create or update a stack while
@@ -383,9 +373,7 @@ def _launch_stack(self, stack: Stack, *, status: Status, **_: Any) -> Status:
provider.get_stack_status(provider_stack),
)
- if provider.is_stack_rolling_back( # pylint: disable=no-else-return
- provider_stack
- ):
+ if provider.is_stack_rolling_back(provider_stack):
if status.reason and "rolling back" in status.reason:
return status
@@ -396,10 +384,10 @@ def _launch_stack(self, stack: Stack, *, status: Status, **_: Any) -> Status:
reason = "rolling back new stack"
return SubmittedStatus(reason)
- elif provider.is_stack_in_progress(provider_stack):
+ if provider.is_stack_in_progress(provider_stack):
LOGGER.debug("%s:in progress", stack.fqn)
return status
- elif provider.is_stack_destroyed(provider_stack):
+ if provider.is_stack_destroyed(provider_stack):
LOGGER.debug("%s:finished deleting", stack.fqn)
recreate = True
# Continue with creation afterwards
@@ -502,7 +490,7 @@ def _template(self, blueprint: Blueprint) -> Template:
return Template(url=self.s3_stack_push(blueprint))
@staticmethod
- def _stack_policy(stack: Stack) -> Optional[Template]:
+ def _stack_policy(stack: Stack) -> Template | None:
"""Return a Template object for the stacks stack policy."""
return Template(body=stack.stack_policy) if stack.stack_policy else None
@@ -523,7 +511,7 @@ def __generate_plan(self, tail: bool = False) -> Plan:
graph = Graph()
config_stack_names = [stack.name for stack in self.context.stacks]
- inverse_steps: List[Step] = []
+ inverse_steps: list[Step] = []
persist_graph = self.context.persistent_graph.transposed()
for ind_node, dep_nodes in persist_graph.dag.graph.items():
@@ -556,9 +544,7 @@ def __generate_plan(self, tail: bool = False) -> Plan:
return Plan(context=self.context, description=self.DESCRIPTION, graph=graph)
- def pre_run(
- self, *, dump: Union[bool, str] = False, outline: bool = False, **_: Any
- ) -> None:
+ def pre_run(self, *, dump: bool | str = False, outline: bool = False, **_: Any) -> None:
"""Any steps that need to be taken prior to running the action."""
if should_ensure_cfn_bucket(outline, bool(dump)):
self.ensure_cfn_bucket()
@@ -575,8 +561,8 @@ def run(
self,
*,
concurrency: int = 0,
- dump: Union[bool, str] = False,
- force: bool = False, # pylint: disable=unused-argument
+ dump: bool | str = False,
+ force: bool = False, # noqa: ARG002
outline: bool = False,
tail: bool = False,
upload_disabled: bool = False,
@@ -616,9 +602,7 @@ def run(
if isinstance(dump, str):
plan.dump(directory=dump, context=self.context, provider=self.provider)
- def post_run(
- self, *, dump: Union[bool, str] = False, outline: bool = False, **_: Any
- ) -> None:
+ def post_run(self, *, dump: Union[bool, str] = False, outline: bool = False, **_: Any) -> None:
"""Any steps that need to be taken after running the action."""
handle_hooks(
"post_deploy",
diff --git a/runway/cfngin/actions/destroy.py b/runway/cfngin/actions/destroy.py
index e83dae0c5..7396327dc 100644
--- a/runway/cfngin/actions/destroy.py
+++ b/runway/cfngin/actions/destroy.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, Callable, Optional, Union
+from typing import TYPE_CHECKING, Any, Callable
from ..exceptions import StackDoesNotExist
from ..hooks.utils import handle_hooks
@@ -49,9 +49,7 @@ def _stack_action(self) -> Callable[..., Status]:
"""Run against a step."""
return self._destroy_stack
- def _destroy_stack(
- self, stack: Stack, *, status: Optional[Status], **_: Any
- ) -> Status:
+ def _destroy_stack(self, stack: Stack, *, status: Status | None, **_: Any) -> Status:
wait_time = 0 if status is PENDING else STACK_POLL_TIME
if self.cancel.wait(wait_time):
return INTERRUPTED
@@ -82,17 +80,15 @@ def _destroy_stack(
LOGGER.debug("%s:destroying stack", stack.fqn)
provider.destroy_stack(stack_data)
return DESTROYING_STATUS
- LOGGER.critical(
- "%s: %s", stack.fqn, provider.get_delete_failed_status_reason(stack.fqn)
- )
+ LOGGER.critical("%s: %s", stack.fqn, provider.get_delete_failed_status_reason(stack.fqn))
return FailedStatus(provider.get_stack_status_reason(stack_data))
def pre_run(
self,
*,
- dump: Union[bool, str] = False, # pylint: disable=unused-argument
+ dump: bool | str = False, # noqa: ARG002
outline: bool = False,
- **__kwargs: Any,
+ **_kwargs: Any,
) -> None:
"""Any steps that need to be taken prior to running the action."""
pre_destroy = self.context.config.pre_destroy
@@ -108,17 +104,15 @@ def run(
self,
*,
concurrency: int = 0,
- dump: Union[bool, str] = False, # pylint: disable=unused-argument
+ dump: bool | str = False, # noqa: ARG002
force: bool = False,
- outline: bool = False, # pylint: disable=unused-argument
+ outline: bool = False, # noqa: ARG002
tail: bool = False,
- upload_disabled: bool = False, # pylint: disable=unused-argument
+ upload_disabled: bool = False, # noqa: ARG002
**_kwargs: Any,
) -> None:
"""Kicks off the destruction of the stacks in the stack_definitions."""
- plan = self._generate_plan(
- tail=tail, reverse=True, include_persistent_graph=True
- )
+ plan = self._generate_plan(tail=tail, reverse=True, include_persistent_graph=True)
if not plan.keys():
LOGGER.warning("no stacks detected (error in config?)")
if force:
@@ -137,9 +131,9 @@ def run(
def post_run(
self,
*,
- dump: Union[bool, str] = False, # pylint: disable=unused-argument
+ dump: bool | str = False, # noqa: ARG002
outline: bool = False,
- **__kwargs: Any,
+ **_kwargs: Any,
) -> None:
"""Any steps that need to be taken after running the action."""
if not outline and self.context.config.post_destroy:
diff --git a/runway/cfngin/actions/diff.py b/runway/cfngin/actions/diff.py
index 620027742..e63ec16d5 100644
--- a/runway/cfngin/actions/diff.py
+++ b/runway/cfngin/actions/diff.py
@@ -9,10 +9,7 @@
TYPE_CHECKING,
Any,
Callable,
- Dict,
Generic,
- List,
- Tuple,
TypeVar,
Union,
cast,
@@ -64,14 +61,14 @@ def __eq__(self, other: object) -> bool:
"""Compare if self is equal to another object."""
return self.__dict__ == other.__dict__
- def changes(self) -> List[str]:
+ def changes(self) -> list[str]:
"""Return changes to represent the diff between old and new value.
Returns:
Representation of the change (if any) between old and new value.
"""
- output: List[str] = []
+ output: list[str] = []
if self.status() is self.UNMODIFIED:
output = [self.formatter % (" ", self.key, self.old_value)]
elif self.status() is self.ADDED:
@@ -95,8 +92,8 @@ def status(self) -> str:
def diff_dictionaries(
- old_dict: Dict[str, _OV], new_dict: Dict[str, _NV]
-) -> Tuple[int, List[DictValue[_OV, _NV]]]:
+ old_dict: dict[str, _OV], new_dict: dict[str, _NV]
+) -> tuple[int, list[DictValue[_OV, _NV]]]:
"""Calculate the diff two single dimension dictionaries.
Args:
@@ -116,7 +113,7 @@ def diff_dictionaries(
common_set = old_set & new_set
changes = 0
- output: List[DictValue[Any, Any]] = []
+ output: list[DictValue[Any, Any]] = []
for key in added_set:
changes += 1
output.append(DictValue(key, None, new_dict[key]))
@@ -134,7 +131,7 @@ def diff_dictionaries(
return changes, output
-def format_params_diff(parameter_diff: List[DictValue[Any, Any]]) -> str:
+def format_params_diff(parameter_diff: list[DictValue[Any, Any]]) -> str:
"""Handle the formatting of differences in parameters.
Args:
@@ -155,8 +152,8 @@ def format_params_diff(parameter_diff: List[DictValue[Any, Any]]) -> str:
def diff_parameters(
- old_params: Dict[str, _OV], new_params: Dict[str, _NV]
-) -> List[DictValue[_OV, _NV]]:
+ old_params: dict[str, _OV], new_params: dict[str, _NV]
+) -> list[DictValue[_OV, _NV]]:
"""Compare the old vs. new parameters and returns a "diff".
If there are no changes, we return an empty list.
@@ -195,7 +192,7 @@ def _stack_action(self) -> Callable[..., Status]:
"""Run against a step."""
return self._diff_stack
- def _diff_stack(self, stack: Stack, **_: Any) -> Status:
+ def _diff_stack(self, stack: Stack, **_: Any) -> Status: # noqa: C901
"""Handle diffing a stack in CloudFormation vs our config."""
if self.cancel.wait(0):
return INTERRUPTED
@@ -228,15 +225,10 @@ def _diff_stack(self, stack: Stack, **_: Any) -> Status:
stack.set_outputs(provider.get_outputs(stack.fqn))
except exceptions.StackDoesNotExist:
if self.context.persistent_graph:
- return SkippedStatus(
- "persistent graph: stack does not exist, will be removed"
- )
+ return SkippedStatus("persistent graph: stack does not exist, will be removed")
return DoesNotExistInCloudFormation()
except AttributeError as err:
- if (
- self.context.persistent_graph
- and "defined class or template path" in str(err)
- ):
+ if self.context.persistent_graph and "defined class or template path" in str(err):
return SkippedStatus("persistent graph: will be destroyed")
raise
except ClientError as err:
@@ -245,8 +237,7 @@ def _diff_stack(self, stack: Stack, **_: Any) -> Status:
and "length less than or equal to" in err.response["Error"]["Message"]
):
LOGGER.error(
- "%s:template is too large to provide directly to the API; "
- "S3 must be used",
+ "%s:template is too large to provide directly to the API; S3 must be used",
stack.name,
)
return SkippedStatus("cfngin_bucket: existing bucket required")
@@ -257,17 +248,15 @@ def run(
self,
*,
concurrency: int = 0,
- dump: Union[bool, str] = False, # pylint: disable=unused-argument
- force: bool = False, # pylint: disable=unused-argument
- outline: bool = False, # pylint: disable=unused-argument
- tail: bool = False, # pylint: disable=unused-argument
- upload_disabled: bool = False, # pylint: disable=unused-argument
+ dump: bool | str = False, # noqa: ARG002
+ force: bool = False, # noqa: ARG002
+ outline: bool = False, # noqa: ARG002
+ tail: bool = False, # noqa: ARG002
+ upload_disabled: bool = False, # noqa: ARG002
**_kwargs: Any,
) -> None:
"""Kicks off the diffing of the stacks in the stack_definitions."""
- plan = self._generate_plan(
- require_unlocked=False, include_persistent_graph=True
- )
+ plan = self._generate_plan(require_unlocked=False, include_persistent_graph=True)
plan.outline(logging.DEBUG)
if plan.keys():
LOGGER.info("diffing stacks: %s", ", ".join(plan.keys()))
@@ -279,9 +268,9 @@ def run(
def pre_run(
self,
*,
- dump: Union[bool, str] = False, # pylint: disable=unused-argument
- outline: bool = False, # pylint: disable=unused-argument
- **__kwargs: Any,
+ dump: bool | str = False, # noqa: ARG002
+ outline: bool = False, # noqa: ARG002
+ **_kwargs: Any,
) -> None:
"""Any steps that need to be taken prior to running the action.
@@ -296,8 +285,7 @@ def pre_run(
sys.exit(1)
if bucket.not_found:
LOGGER.warning(
- 'cfngin_bucket "%s" does not exist and will be creating '
- "during the next deploy",
+ 'cfngin_bucket "%s" does not exist and will be creating during the next deploy',
bucket.name,
)
LOGGER.verbose("proceeding without a cfngin_bucket...")
diff --git a/runway/cfngin/actions/graph.py b/runway/cfngin/actions/graph.py
index aa44e5acc..2f74c020d 100644
--- a/runway/cfngin/actions/graph.py
+++ b/runway/cfngin/actions/graph.py
@@ -5,18 +5,20 @@
import json
import logging
import sys
-from typing import TYPE_CHECKING, Any, Iterable, List, TextIO, Tuple, Union
+from typing import TYPE_CHECKING, Any, TextIO
from ..plan import merge_graphs
from .base import BaseAction
if TYPE_CHECKING:
+ from collections.abc import Iterable
+
from ..plan import Graph, Step
LOGGER = logging.getLogger(__name__)
-def each_step(graph: Graph) -> Iterable[Tuple[Step, List[Step]]]:
+def each_step(graph: Graph) -> Iterable[tuple[Step, list[Step]]]:
"""Yield each step and it's direct dependencies.
Args:
@@ -56,10 +58,7 @@ def json_format(out: TextIO, graph: Graph) -> None:
graph: Graph to be output.
"""
- steps = {
- step.name: {"deps": [dep.name for dep in deps]}
- for step, deps in each_step(graph)
- }
+ steps = {step.name: {"deps": [dep.name for dep in deps]} for step, deps in each_step(graph)}
json.dump({"steps": steps}, out, indent=4)
out.write("\n")
@@ -85,18 +84,16 @@ def _stack_action(self) -> Any:
def run(
self,
*,
- concurrency: int = 0, # pylint: disable=unused-argument
- dump: Union[bool, str] = False, # pylint: disable=unused-argument
- force: bool = False, # pylint: disable=unused-argument
- outline: bool = False, # pylint: disable=unused-argument
- tail: bool = False, # pylint: disable=unused-argument
- upload_disabled: bool = False, # pylint: disable=unused-argument
+ concurrency: int = 0, # noqa: ARG002
+ dump: bool | str = False, # noqa: ARG002
+ force: bool = False, # noqa: ARG002
+ outline: bool = False, # noqa: ARG002
+ tail: bool = False, # noqa: ARG002
+ upload_disabled: bool = False, # noqa: ARG002
**kwargs: Any,
) -> None:
"""Generate the underlying graph and prints it."""
- graph = self._generate_plan(
- require_unlocked=False, include_persistent_graph=True
- ).graph
+ graph = self._generate_plan(require_unlocked=False, include_persistent_graph=True).graph
if self.context.persistent_graph:
graph = merge_graphs(self.context.persistent_graph, graph)
if kwargs.get("reduce"):
diff --git a/runway/cfngin/actions/info.py b/runway/cfngin/actions/info.py
index 2d0770911..81ca4eb93 100644
--- a/runway/cfngin/actions/info.py
+++ b/runway/cfngin/actions/info.py
@@ -40,6 +40,4 @@ def run(self, *_args: Any, **_kwargs: Any) -> None:
LOGGER.info("%s:", stack.fqn)
if "Outputs" in provider_stack:
for output in provider_stack["Outputs"]:
- LOGGER.info(
- "\t%s: %s", output.get("OutputKey"), output.get("OutputValue")
- )
+ LOGGER.info("\t%s: %s", output.get("OutputKey"), output.get("OutputValue"))
diff --git a/runway/cfngin/actions/init.py b/runway/cfngin/actions/init.py
index 9fec7f05d..9d59de217 100644
--- a/runway/cfngin/actions/init.py
+++ b/runway/cfngin/actions/init.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, Optional, Union, cast
+from typing import TYPE_CHECKING, Any, cast
from ...compat import cached_property
from ...config.models.cfngin import CfnginStackDefinitionModel
@@ -31,9 +31,9 @@ class Action(BaseAction):
def __init__(
self,
context: CfnginContext,
- provider_builder: Optional[ProviderBuilder] = None,
- cancel: Optional[threading.Event] = None,
- ):
+ provider_builder: ProviderBuilder | None = None,
+ cancel: threading.Event | None = None,
+ ) -> None:
"""Instantiate class.
This class creates a copy of the context object prior to initialization
@@ -46,9 +46,7 @@ def __init__(
cancel: Cancel handler.
"""
- super().__init__(
- context=context.copy(), provider_builder=provider_builder, cancel=cancel
- )
+ super().__init__(context=context.copy(), provider_builder=provider_builder, cancel=cancel)
@property
def _stack_action(self) -> Any:
@@ -56,7 +54,7 @@ def _stack_action(self) -> Any:
return None
@cached_property
- def cfngin_bucket(self) -> Optional[Bucket]:
+ def cfngin_bucket(self) -> Bucket | None:
"""CFNgin bucket.
Raises:
@@ -86,11 +84,11 @@ def run(
self,
*,
concurrency: int = 0,
- dump: Union[bool, str] = False, # pylint: disable=unused-argument
- force: bool = False, # pylint: disable=unused-argument
- outline: bool = False, # pylint: disable=unused-argument
+ dump: bool | str = False, # noqa: ARG002
+ force: bool = False, # noqa: ARG002
+ outline: bool = False, # noqa: ARG002
tail: bool = False,
- upload_disabled: bool = True, # pylint: disable=unused-argument
+ upload_disabled: bool = True, # noqa: ARG002
**_kwargs: Any,
) -> None:
"""Run the action.
@@ -125,9 +123,7 @@ def run(
LOGGER.notice("using default blueprint to create cfngin_bucket...")
self.context.config.stacks = [self.default_cfngin_bucket_stack]
# clear cached values that were populated by checking the previous condition
- self.context._del_cached_property( # pylint: disable=protected-access
- "stacks", "stacks_dict"
- )
+ self.context._del_cached_property("stacks", "stacks_dict") # noqa: SLF001
if self.provider_builder:
self.provider_builder.region = self.context.bucket_region
deploy.Action(
@@ -144,7 +140,7 @@ def run(
def pre_run(
self,
*,
- dump: Union[bool, str] = False,
+ dump: bool | str = False,
outline: bool = False,
**__kwargs: Any,
) -> None:
@@ -153,7 +149,7 @@ def pre_run(
def post_run(
self,
*,
- dump: Union[bool, str] = False,
+ dump: bool | str = False,
outline: bool = False,
**__kwargs: Any,
) -> None:
diff --git a/runway/cfngin/awscli_yamlhelper.py b/runway/cfngin/awscli_yamlhelper.py
index 723cb7b79..cfe1b5db0 100644
--- a/runway/cfngin/awscli_yamlhelper.py
+++ b/runway/cfngin/awscli_yamlhelper.py
@@ -15,14 +15,15 @@
from __future__ import annotations
import json
-from typing import Any, Dict, MutableMapping, MutableSequence, cast
+from collections.abc import MutableMapping, MutableSequence
+from typing import Any, cast
import yaml
-def intrinsics_multi_constructor( # pylint: disable=unused-argument
- loader: yaml.Loader, tag_prefix: str, node: yaml.Node
-) -> Dict[str, Any]:
+def intrinsics_multi_constructor(
+ loader: yaml.Loader, tag_prefix: str, node: yaml.Node # noqa: ARG001
+) -> dict[str, Any]:
"""YAML constructor to parse CloudFormation intrinsics.
This will return a dictionary with key being the intrinsic name
@@ -59,12 +60,12 @@ def intrinsics_multi_constructor( # pylint: disable=unused-argument
return {cfntag: value}
-def yaml_dump(dict_to_dump: Dict[str, Any]) -> str:
+def yaml_dump(dict_to_dump: dict[str, Any]) -> str:
"""Dump the dictionary as a YAML document."""
return yaml.safe_dump(dict_to_dump, default_flow_style=False)
-def yaml_parse(yamlstr: str) -> Dict[str, Any]:
+def yaml_parse(yamlstr: str) -> dict[str, Any]:
"""Parse a yaml string."""
try:
# PyYAML doesn't support json as well as it should, so if the input
diff --git a/runway/cfngin/blueprints/base.py b/runway/cfngin/blueprints/base.py
index f404a987c..5f90d9063 100644
--- a/runway/cfngin/blueprints/base.py
+++ b/runway/cfngin/blueprints/base.py
@@ -6,18 +6,7 @@
import hashlib
import logging
import string
-from typing import (
- TYPE_CHECKING,
- Any,
- ClassVar,
- Dict,
- List,
- Optional,
- Tuple,
- Type,
- TypeVar,
- Union,
-)
+from typing import TYPE_CHECKING, Any, ClassVar
from troposphere import Output, Parameter, Ref, Template
@@ -54,15 +43,11 @@
"constraint_description": "ConstraintDescription",
}
-_T = TypeVar("_T")
-
class CFNParameter:
"""Wrapper around a value to indicate a CloudFormation Parameter."""
- def __init__(
- self, name: str, value: Union[bool, float, int, List[Any], str, Any]
- ) -> None:
+ def __init__(self, name: str, value: bool | float | list[Any] | str | Any) -> None:
"""Instantiate class.
Args:
@@ -82,7 +67,7 @@ def __init__(
else:
raise TypeError(
f"CFNParameter ({name}) value must be one of bool, float, int, str, "
- f"List[str] but got: {type(value)}"
+ f"list[str] but got: {type(value)}"
)
def __repr__(self) -> str:
@@ -94,7 +79,7 @@ def ref(self) -> Ref:
"""Ref the value of a parameter."""
return Ref(self.name)
- def to_parameter_value(self) -> Union[List[Any], str]:
+ def to_parameter_value(self) -> list[Any] | str:
"""Return the value to be submitted to CloudFormation."""
return self.value
@@ -110,7 +95,7 @@ def build_parameter(name: str, properties: BlueprintVariableTypeDef) -> Paramete
Returns:
The created parameter object.
- """ # noqa: E501
+ """
param = Parameter(name, Type=properties.get("type"))
for name_, attr in PARAMETER_PROPERTIES.items():
if name_ in properties:
@@ -120,7 +105,7 @@ def build_parameter(name: str, properties: BlueprintVariableTypeDef) -> Paramete
def validate_variable_type(
var_name: str,
- var_type: Union[Type[CFNType], TroposphereType[Any], type],
+ var_type: type[CFNType] | TroposphereType[Any] | type,
value: Any,
) -> Any:
"""Ensure the value is the correct variable type.
@@ -143,21 +128,18 @@ def validate_variable_type(
try:
value = var_type.create(value)
except Exception as exc:
- raise ValidatorError(
- var_name, f"{var_type.resource_name}.create", value, exc
- ) from exc
+ raise ValidatorError(var_name, f"{var_type.resource_name}.create", value, exc) from exc
elif issubclass(var_type, CFNType):
value = CFNParameter(name=var_name, value=value)
- else:
- if not isinstance(value, var_type):
- raise TypeError(
- f"Value for variable {var_name} must be of type {var_type}. Actual "
- f"type: {type(value)}"
- )
+ elif not isinstance(value, var_type):
+ raise TypeError(
+ f"Value for variable {var_name} must be of type {var_type}. Actual "
+ f"type: {type(value)}"
+ )
return value
-def validate_allowed_values(allowed_values: Optional[List[Any]], value: Any) -> bool:
+def validate_allowed_values(allowed_values: list[Any] | None, value: Any) -> bool:
"""Support a variable defining which values it allows.
Args:
@@ -177,7 +159,7 @@ def validate_allowed_values(allowed_values: Optional[List[Any]], value: Any) ->
def resolve_variable(
var_name: str,
var_def: BlueprintVariableTypeDef,
- provided_variable: Optional[Variable],
+ provided_variable: Variable | None,
blueprint_name: str,
) -> Any:
"""Resolve a provided variable value against the variable definition.
@@ -241,9 +223,7 @@ def resolve_variable(
return value
-def parse_user_data(
- variables: Dict[str, Any], raw_user_data: str, blueprint_name: str
-) -> str:
+def parse_user_data(variables: dict[str, Any], raw_user_data: str, blueprint_name: str) -> str:
"""Parse the given user data and renders it as a template.
It supports referencing template variables to create userdata
@@ -271,7 +251,7 @@ def parse_user_data(
is not given in the blueprint
"""
- variable_values: Dict[str, Any] = {}
+ variable_values: dict[str, Any] = {}
for key, value in variables.items():
if isinstance(value, CFNParameter):
@@ -311,11 +291,11 @@ class Blueprint(DelCachedPropMixin):
"""
- VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = {}
+ VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = {}
context: CfnginContext
- description: Optional[str]
- mappings: Optional[Dict[str, Dict[str, Any]]]
+ description: str | None
+ mappings: dict[str, dict[str, Any]] | None
name: str
template: Template
@@ -324,11 +304,11 @@ def __init__(
name: str,
context: CfnginContext,
*,
- description: Optional[str] = None,
- mappings: Optional[Dict[str, Dict[str, Any]]] = None,
- template: Optional[Template] = None,
+ description: str | None = None,
+ mappings: dict[str, dict[str, Any]] | None = None,
+ template: Template | None = None,
**_: Any,
- ):
+ ) -> None:
"""Instantiate class.
Args:
@@ -349,7 +329,7 @@ def __init__(
"""
self._rendered = None
- self._resolved_variables: Optional[Dict[str, Any]] = None
+ self._resolved_variables: dict[str, Any] | None = None
self._version = None
self.context = context
self.description = description
@@ -368,7 +348,7 @@ def __init__(
)
@cached_property
- def cfn_parameters(self) -> Dict[str, Union[List[Any], str]]:
+ def cfn_parameters(self) -> dict[str, list[Any] | str]:
"""Return a dict of variables with type :class:`~runway.cfngin.blueprints.variables.types.CFNType`.
.. versionadded:: 2.0.0
@@ -376,8 +356,8 @@ def cfn_parameters(self) -> Dict[str, Union[List[Any], str]]:
Returns:
Variables that need to be submitted as CloudFormation Parameters.
- """ # noqa
- output: Dict[str, Union[List[Any], str]] = {}
+ """
+ output: dict[str, list[Any] | str] = {}
for key, value in self.variables.items():
if hasattr(value, "to_parameter_value"):
output[key] = value.to_parameter_value()
@@ -388,7 +368,7 @@ def create_template(self) -> None:
raise NotImplementedError
@property
- def defined_variables(self) -> Dict[str, BlueprintVariableTypeDef]:
+ def defined_variables(self) -> dict[str, BlueprintVariableTypeDef]:
"""Return a copy of :attr:`VARIABLES` to avoid accidental modification of the ClassVar.
.. versionchanged:: 2.0.0
@@ -398,7 +378,7 @@ def defined_variables(self) -> Dict[str, BlueprintVariableTypeDef]:
return copy.deepcopy(self.VARIABLES)
@property
- def output_definitions(self) -> Dict[str, Dict[str, Any]]:
+ def output_definitions(self) -> dict[str, dict[str, Any]]:
"""Get the output definitions.
.. versionadded:: 2.0.0
@@ -411,7 +391,7 @@ def output_definitions(self) -> Dict[str, Dict[str, Any]]:
return {k: output.to_dict() for k, output in self.template.outputs.items()}
@cached_property
- def parameter_definitions(self) -> Dict[str, BlueprintVariableTypeDef]:
+ def parameter_definitions(self) -> dict[str, BlueprintVariableTypeDef]:
"""Get the parameter definitions to submit to CloudFormation.
Any variable definition whose type is an instance of
@@ -425,7 +405,7 @@ def parameter_definitions(self) -> Dict[str, BlueprintVariableTypeDef]:
containing key/values for various parameter properties.
"""
- output: Dict[str, BlueprintVariableTypeDef] = {}
+ output: dict[str, BlueprintVariableTypeDef] = {}
for var_name, attrs in self.defined_variables.items():
var_type = attrs.get("type")
if isinstance(var_type, type) and issubclass(var_type, CFNType):
@@ -435,7 +415,7 @@ def parameter_definitions(self) -> Dict[str, BlueprintVariableTypeDef]:
return output
@cached_property
- def parameter_values(self) -> Dict[str, Union[List[Any], str]]:
+ def parameter_values(self) -> dict[str, list[Any] | str]:
"""Return a dict of variables with type :class:`~runway.cfngin.blueprints.variables.types.CFNType`.
.. versionadded:: 2.0.0
@@ -444,8 +424,8 @@ def parameter_values(self) -> Dict[str, Union[List[Any], str]]:
Variables that need to be submitted as CloudFormation Parameters.
Will be a dictionary of : .
- """ # noqa
- output: Dict[str, Any] = {}
+ """
+ output: dict[str, Any] = {}
for key, value in self.variables.items():
try:
output[key] = value.to_parameter_value()
@@ -461,7 +441,7 @@ def rendered(self) -> str:
return self._rendered
@cached_property
- def required_parameter_definitions(self) -> Dict[str, BlueprintVariableTypeDef]:
+ def required_parameter_definitions(self) -> dict[str, BlueprintVariableTypeDef]:
"""Return all template parameters that do not have a default value.
.. versionadded:: 2.0.0
@@ -483,7 +463,7 @@ def requires_change_set(self) -> bool:
return self.template.transform is not None
@property
- def variables(self) -> Dict[str, Any]:
+ def variables(self) -> dict[str, Any]:
"""Return a Dict of variables available to the Template.
These variables will have been defined within :attr:`VARIABLES` or
@@ -504,7 +484,7 @@ def variables(self) -> Dict[str, Any]:
return self._resolved_variables
@variables.setter
- def variables(self, value: Dict[str, Any]) -> None:
+ def variables(self, value: dict[str, Any]) -> None:
"""Setter for :meth:`variables`.
.. versionadded:: 2.0.0
@@ -533,7 +513,7 @@ def add_output(self, name: str, value: Any) -> None:
"""
self.template.add_output(Output(name, Value=value))
- def get_cfn_parameters(self) -> Dict[str, Union[List[Any], str]]:
+ def get_cfn_parameters(self) -> dict[str, list[Any] | str]:
"""Return a dictionary of variables with `type` :class:`CFNType`.
.. deprecated:: 2.0.0
@@ -549,7 +529,7 @@ def get_cfn_parameters(self) -> Dict[str, Union[List[Any], str]]:
)
return self.cfn_parameters
- def get_output_definitions(self) -> Dict[str, Dict[str, Any]]:
+ def get_output_definitions(self) -> dict[str, dict[str, Any]]:
"""Get the output definitions.
.. deprecated:: 2.0.0
@@ -566,7 +546,7 @@ def get_output_definitions(self) -> Dict[str, Dict[str, Any]]:
)
return self.output_definitions
- def get_parameter_definitions(self) -> Dict[str, BlueprintVariableTypeDef]:
+ def get_parameter_definitions(self) -> dict[str, BlueprintVariableTypeDef]:
"""Get the parameter definitions to submit to CloudFormation.
Any variable definition whose `type` is an instance of
@@ -587,7 +567,7 @@ def get_parameter_definitions(self) -> Dict[str, BlueprintVariableTypeDef]:
)
return self.parameter_definitions
- def get_parameter_values(self) -> Dict[str, Union[List[Any], str]]:
+ def get_parameter_values(self) -> dict[str, list[Any] | str]:
"""Return a dict of variables with type :class:`~runway.cfngin.blueprints.variables.types.CFNType`.
.. deprecated:: 2.0.0
@@ -597,14 +577,14 @@ def get_parameter_values(self) -> Dict[str, Union[List[Any], str]]:
Variables that need to be submitted as CloudFormation Parameters.
Will be a dictionary of : .
- """ # noqa
+ """
LOGGER.warning(
"%s.get_parameter_values is deprecated and will be removed in a future release",
self.__class__.__name__,
)
return self.parameter_values
- def get_required_parameter_definitions(self) -> Dict[str, BlueprintVariableTypeDef]:
+ def get_required_parameter_definitions(self) -> dict[str, BlueprintVariableTypeDef]:
"""Return all template parameters that do not have a default value.
.. deprecated:: 2.0.0
@@ -622,7 +602,7 @@ def get_required_parameter_definitions(self) -> Dict[str, BlueprintVariableTypeD
)
return self.required_parameter_definitions
- def get_variables(self) -> Dict[str, Any]:
+ def get_variables(self) -> dict[str, Any]:
"""Return a dictionary of variables available to the template.
These variables will have been defined within `VARIABLES` or
@@ -664,7 +644,7 @@ def read_user_data(self, user_data_path: str) -> str:
raw_user_data = read_value_from_path(user_data_path)
return parse_user_data(self.variables, raw_user_data, self.name)
- def render_template(self) -> Tuple[str, str]:
+ def render_template(self) -> tuple[str, str]:
"""Render the Blueprint to a CloudFormation template."""
self.import_mappings()
self.create_template()
@@ -672,7 +652,7 @@ def render_template(self) -> Tuple[str, str]:
self.set_template_description(self.description)
self.setup_parameters()
rendered = self.template.to_json(indent=self.context.template_indent)
- version = hashlib.md5(rendered.encode()).hexdigest()[:8]
+ version = hashlib.md5(rendered.encode()).hexdigest()[:8] # noqa: S324
return version, rendered
def reset_template(self) -> None:
@@ -681,7 +661,7 @@ def reset_template(self) -> None:
self._rendered = None
self._version = None
- def resolve_variables(self, provided_variables: List[Variable]) -> None:
+ def resolve_variables(self, provided_variables: list[Variable]) -> None:
"""Resolve the values of the blueprint variables.
This will resolve the values of the `VARIABLES` with values from the
@@ -694,9 +674,7 @@ def resolve_variables(self, provided_variables: List[Variable]) -> None:
self._resolved_variables = {}
variable_dict = {var.name: var for var in provided_variables}
for var_name, var_def in self.defined_variables.items():
- value = resolve_variable(
- var_name, var_def, variable_dict.get(var_name), self.name
- )
+ value = resolve_variable(var_name, var_def, variable_dict.get(var_name), self.name)
self._resolved_variables[var_name] = value
def set_template_description(self, description: str) -> None:
@@ -720,14 +698,14 @@ def setup_parameters(self) -> None:
built_param = build_parameter(name, attrs)
template.add_parameter(built_param)
- def to_json(self, variables: Optional[Dict[str, Any]] = None) -> str:
+ def to_json(self, variables: dict[str, Any] | None = None) -> str:
"""Render the blueprint and return the template in json form.
Args:
variables: Dictionary providing/overriding variable values.
"""
- variables_to_resolve: List[Variable] = []
+ variables_to_resolve: list[Variable] = []
if variables:
for key, value in variables.items():
variables_to_resolve.append(Variable(key, value, "cfngin"))
@@ -736,7 +714,7 @@ def to_json(self, variables: Optional[Dict[str, Any]] = None) -> str:
# The provided value for a CFN parameter has no effect in this
# context (generating the CFN template), so any string can be
# provided for its value - just needs to be something
- variables_to_resolve.append(Variable(k, "unused_value", "cfngin"))
+ variables_to_resolve.append(Variable(k, "unused_value", "cfngin")) # noqa: PERF401
self.resolve_variables(variables_to_resolve)
return self.render_template()[1]
diff --git a/runway/cfngin/blueprints/cfngin_bucket.py b/runway/cfngin/blueprints/cfngin_bucket.py
index 2e528d302..ebd9dff8e 100644
--- a/runway/cfngin/blueprints/cfngin_bucket.py
+++ b/runway/cfngin/blueprints/cfngin_bucket.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, ClassVar, Dict, Union
+from typing import TYPE_CHECKING, ClassVar, Union
from troposphere import Equals, If, Not, NoValue, Or, Tag, Tags, s3
@@ -21,7 +21,7 @@ class CfnginBucket(Blueprint):
"""CFNgin Bucket Blueprint."""
DESCRIPTION: ClassVar[str] = f"{__name__}.CFNginBucket (v{__version__})"
- VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = {
+ VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = {
"AccessControl": {
"allowed_values": [
"AuthenticatedRead",
@@ -66,9 +66,7 @@ def bucket(self) -> s3.Bucket:
self.add_output("BucketArn", bucket.get_att("Arn"))
self.add_output("BucketDomainName", bucket.get_att("DomainName"))
self.add_output("BucketName", bucket.ref())
- self.add_output(
- "BucketRegionalDomainName", bucket.get_att("RegionalDomainName")
- )
+ self.add_output("BucketRegionalDomainName", bucket.get_att("RegionalDomainName"))
return bucket
@cached_property
diff --git a/runway/cfngin/blueprints/raw.py b/runway/cfngin/blueprints/raw.py
index 8a5e141a2..3218eba77 100644
--- a/runway/cfngin/blueprints/raw.py
+++ b/runway/cfngin/blueprints/raw.py
@@ -5,10 +5,9 @@
import hashlib
import json
import logging
-import os
import sys
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
+from typing import TYPE_CHECKING, Any, Union
from jinja2 import Environment, FileSystemLoader
@@ -24,7 +23,7 @@
LOGGER = logging.getLogger(__name__)
-def get_template_path(file_path: Path) -> Optional[Path]:
+def get_template_path(file_path: Path) -> Path | None:
"""Find raw template in working directory or in sys.path.
template_path from config may refer to templates co-located with the CFNgin
@@ -32,7 +31,7 @@ def get_template_path(file_path: Path) -> Optional[Path]:
loading to find the path to the template.
Args:
- filename: Template path.
+ file_path: Template path.
Returns:
Path to file, or None if no file found
@@ -47,7 +46,7 @@ def get_template_path(file_path: Path) -> Optional[Path]:
return None
-def resolve_variable(provided_variable: Optional[Variable], blueprint_name: str) -> Any:
+def resolve_variable(provided_variable: Variable | None, blueprint_name: str) -> Any:
"""Resolve a provided variable value against the variable definition.
This acts as a subset of resolve_variable logic in the base module, leaving
@@ -73,7 +72,7 @@ def resolve_variable(provided_variable: Optional[Variable], blueprint_name: str)
return value
-class RawTemplateBlueprint(Blueprint): # pylint: disable=abstract-method
+class RawTemplateBlueprint(Blueprint):
"""Blueprint class for blueprints auto-generated from raw templates.
Attributes:
@@ -89,13 +88,13 @@ class RawTemplateBlueprint(Blueprint): # pylint: disable=abstract-method
raw_template_path: Path
- def __init__( # pylint: disable=super-init-not-called
+ def __init__(
self,
name: str,
context: CfnginContext,
*,
- description: Optional[str] = None,
- mappings: Optional[Dict[str, Any]] = None,
+ description: str | None = None,
+ mappings: dict[str, Any] | None = None,
raw_template_path: Path,
**_: Any,
) -> None:
@@ -116,7 +115,7 @@ def __init__( # pylint: disable=super-init-not-called
self.raw_template_path = raw_template_path
@property
- def output_definitions(self) -> Dict[str, Dict[str, Any]]:
+ def output_definitions(self) -> dict[str, dict[str, Any]]:
"""Get the output definitions.
.. versionadded:: 2.0.0
@@ -129,7 +128,7 @@ def output_definitions(self) -> Dict[str, Dict[str, Any]]:
return self.to_dict().get("Outputs", {})
@cached_property
- def parameter_definitions(self) -> Dict[str, Any]:
+ def parameter_definitions(self) -> dict[str, Any]:
"""Get the parameter definitions to submit to CloudFormation.
.. versionadded:: 2.0.0
@@ -142,7 +141,7 @@ def parameter_definitions(self) -> Dict[str, Any]:
return self.to_dict().get("Parameters", {})
@cached_property
- def parameter_values(self) -> Dict[str, Union[List[Any], str]]:
+ def parameter_values(self) -> dict[str, Union[list[Any], str]]:
"""Return a dict of variables with type :class:`~runway.cfngin.blueprints.variables.types.CFNType`.
.. versionadded:: 2.0.0
@@ -151,25 +150,21 @@ def parameter_values(self) -> Dict[str, Union[List[Any], str]]:
Variables that need to be submitted as CloudFormation Parameters.
Will be a dictionary of ``: ``.
- """ # noqa
+ """
return self._resolved_variables or {}
@property
def rendered(self) -> str:
- """Return (generating first if needed) rendered template."""
+ """Return (generating first if needed) rendered Template."""
if not self._rendered:
template_path = get_template_path(self.raw_template_path)
if template_path:
- if len(os.path.splitext(template_path)) == 2 and (
- os.path.splitext(template_path)[1] == ".j2"
- ):
+ if template_path.suffix == ".j2":
self._rendered = (
- Environment(
- loader=FileSystemLoader(
- searchpath=os.path.dirname(template_path)
- )
+ Environment( # noqa: S701
+ loader=FileSystemLoader(searchpath=template_path.parent)
)
- .get_template(os.path.basename(template_path))
+ .get_template(template_path.name)
.render(
context=self.context,
mappings=self.mappings,
@@ -178,10 +173,10 @@ def rendered(self) -> str:
)
)
else:
- with open(template_path, "r", encoding="utf-8") as template:
+ with template_path.open(encoding="utf-8") as template:
self._rendered = template.read()
else:
- raise InvalidConfig(f"Could not find template {self.raw_template_path}")
+ raise InvalidConfig(f"Could not find Template {self.raw_template_path}")
# clear cached properties that rely on this property
self._del_cached_property("parameter_definitions")
@@ -196,10 +191,10 @@ def requires_change_set(self) -> bool:
def version(self) -> str:
"""Return (generating first if needed) version hash."""
if not self._version:
- self._version = hashlib.md5(self.rendered.encode()).hexdigest()[:8]
+ self._version = hashlib.md5(self.rendered.encode()).hexdigest()[:8] # noqa: S324
return self._version
- def to_dict(self) -> Dict[str, Any]:
+ def to_dict(self) -> dict[str, Any]:
"""Return the template as a python dictionary.
Returns:
@@ -208,7 +203,7 @@ def to_dict(self) -> Dict[str, Any]:
"""
return parse_cloudformation_template(self.rendered)
- def to_json(self, variables: Optional[Dict[str, Any]] = None) -> str:
+ def to_json(self, variables: dict[str, Any] | None = None) -> str: # noqa: ARG002
"""Return the template in JSON.
Args:
@@ -218,11 +213,11 @@ def to_json(self, variables: Optional[Dict[str, Any]] = None) -> str:
# load -> dumps will produce json from json or yaml templates
return json.dumps(self.to_dict(), sort_keys=True, indent=4)
- def render_template(self) -> Tuple[str, str]:
+ def render_template(self) -> tuple[str, str]:
"""Load template and generate its md5 hash."""
return (self.version, self.rendered)
- def resolve_variables(self, provided_variables: List[Variable]) -> None:
+ def resolve_variables(self, provided_variables: list[Variable]) -> None:
"""Resolve the values of the blueprint variables.
This will resolve the values of the template parameters with values
@@ -237,7 +232,7 @@ def resolve_variables(self, provided_variables: List[Variable]) -> None:
# Pass 1 to set resolved_variables to provided variables
self._resolved_variables = {}
variable_dict = {var.name: var for var in provided_variables}
- for var_name, _var_def in variable_dict.items():
+ for var_name in variable_dict:
value = resolve_variable(variable_dict.get(var_name), self.name)
if value is not None:
self._resolved_variables[var_name] = value
@@ -248,7 +243,7 @@ def resolve_variables(self, provided_variables: List[Variable]) -> None:
defined_variables = self.parameter_definitions.copy()
self._resolved_variables = {}
variable_dict = {var.name: var for var in provided_variables}
- for var_name, _var_def in defined_variables.items():
+ for var_name in defined_variables:
value = resolve_variable(variable_dict.get(var_name), self.name)
if value is not None:
self._resolved_variables[var_name] = value
diff --git a/runway/cfngin/blueprints/testutil.py b/runway/cfngin/blueprints/testutil.py
index b2d943bbd..363b11944 100644
--- a/runway/cfngin/blueprints/testutil.py
+++ b/runway/cfngin/blueprints/testutil.py
@@ -8,7 +8,7 @@
import unittest
from glob import glob
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Iterator, List, Optional, Type, cast
+from typing import TYPE_CHECKING, Any, cast
from ...config import CfnginConfig
from ...context import CfnginContext
@@ -16,15 +16,15 @@
from ...variables import Variable
if TYPE_CHECKING:
+ from collections.abc import Iterator
+
from ...config.models.cfngin import CfnginStackDefinitionModel
from .base import Blueprint
def diff(first: str, second: str) -> str:
"""Human readable differ."""
- return "\n".join(
- list(difflib.Differ().compare(first.splitlines(), second.splitlines()))
- )
+ return "\n".join(list(difflib.Differ().compare(first.splitlines(), second.splitlines())))
class BlueprintTestCase(unittest.TestCase):
@@ -32,9 +32,7 @@ class BlueprintTestCase(unittest.TestCase):
OUTPUT_PATH: str = "tests/fixtures/blueprints"
- def assertRenderedBlueprint( # noqa: N802 pylint: disable=invalid-name
- self, blueprint: Blueprint
- ) -> None:
+ def assertRenderedBlueprint(self, blueprint: Blueprint) -> None: # noqa: N802
"""Test that the rendered blueprint json matches the expected result.
Result files are to be stored in the repo as
@@ -46,18 +44,16 @@ def assertRenderedBlueprint( # noqa: N802 pylint: disable=invalid-name
rendered_dict = blueprint.template.to_dict()
rendered_text = json.dumps(rendered_dict, indent=4, sort_keys=True)
- with open(
+ with open( # noqa: PTH123
expected_output + "-result", "w", encoding="utf-8"
) as expected_output_file:
expected_output_file.write(rendered_text)
- with open(expected_output, encoding="utf-8") as expected_output_file:
+ with open(expected_output, encoding="utf-8") as expected_output_file: # noqa: PTH123
expected_dict = json.loads(expected_output_file.read())
expected_text = json.dumps(expected_dict, indent=4, sort_keys=True)
- self.assertEqual(
- rendered_dict, expected_dict, diff(rendered_text, expected_text)
- )
+ assert rendered_dict == expected_dict, diff(rendered_text, expected_text) # noqa: S101
class YamlDirTestGenerator:
@@ -107,17 +103,17 @@ class YamlDirTestGenerator:
def __init__(self) -> None:
"""Instantiate class."""
self.classdir = os.path.relpath(self.__class__.__module__.replace(".", "/"))
- if not os.path.isdir(self.classdir):
- self.classdir = os.path.dirname(self.classdir)
+ if not os.path.isdir(self.classdir): # noqa: PTH112
+ self.classdir = os.path.dirname(self.classdir) # noqa: PTH120
# These properties can be overridden from the test generator subclass.
@property
- def base_class(self) -> Type[BlueprintTestCase]:
+ def base_class(self) -> type[BlueprintTestCase]:
"""Return the baseclass."""
return BlueprintTestCase
@property
- def yaml_dirs(self) -> List[str]:
+ def yaml_dirs(self) -> list[str]:
"""Yaml directories."""
return ["."]
@@ -126,22 +122,23 @@ def yaml_filename(self) -> str:
"""Yaml filename."""
return "test_*.yaml"
- # pylint incorrectly detects this
def test_generator(
self,
) -> Iterator[BlueprintTestCase]:
"""Test generator."""
# Search for tests in given paths
- configs: List[str] = []
+ configs: list[str] = []
for directory in self.yaml_dirs:
- configs.extend(glob(f"{self.classdir}/{directory}/{self.yaml_filename}"))
+ configs.extend(
+ glob(f"{self.classdir}/{directory}/{self.yaml_filename}") # noqa: PTH207
+ )
class ConfigTest(self.base_class): # type: ignore
"""Config test."""
context: CfnginContext
- def __init__( # pylint: disable=super-init-not-called
+ def __init__(
self,
config: CfnginConfig,
stack: CfnginStackDefinitionModel,
@@ -152,23 +149,19 @@ def __init__( # pylint: disable=super-init-not-called
self.stack = stack
self.description = f"{stack.name} ({filepath})"
- def __call__(self) -> None: # pylint: disable=arguments-differ
+ def __call__(self) -> None:
"""Run when the class instance is called directly."""
# Use the context property of the baseclass, if present.
# If not, default to a basic context.
try:
ctx = self.context
except AttributeError:
- ctx = CfnginContext(
- config=self.config, parameters={"environment": "test"}
- )
+ ctx = CfnginContext(config=self.config, parameters={"environment": "test"})
configvars = self.stack.variables or {}
variables = [Variable(k, v, "cfngin") for k, v in configvars.items()]
- blueprint_class = load_object_from_string(
- cast(str, self.stack.class_path)
- )
+ blueprint_class = load_object_from_string(cast(str, self.stack.class_path))
blueprint = blueprint_class(self.stack.name, ctx)
blueprint.resolve_variables(variables or [])
blueprint.setup_parameters()
@@ -176,14 +169,14 @@ def __call__(self) -> None: # pylint: disable=arguments-differ
self.assertRenderedBlueprint(blueprint)
def assertEqual( # noqa: N802
- self, first: Any, second: Any, msg: Optional[str] = None
+ self, first: Any, second: Any, msg: str | None = None
) -> None:
"""Test that first and second are equal.
If the values do not compare equal, the test will fail.
"""
- assert first == second, msg
+ assert first == second, msg # noqa: S101
for config_file in configs:
config_path = Path(config_file)
diff --git a/runway/cfngin/blueprints/type_defs.py b/runway/cfngin/blueprints/type_defs.py
index 9bfa9d678..30d5a1469 100644
--- a/runway/cfngin/blueprints/type_defs.py
+++ b/runway/cfngin/blueprints/type_defs.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import Any, Callable, List
+from typing import Any, Callable
from typing_extensions import TypedDict
@@ -17,7 +17,7 @@ class _OptionalBlueprintVariableTypeDef(TypedDict, total=False):
"""Type definition for runway.cfngin.blueprints.base.Blueprint.VARIABLES items."""
allowed_pattern: str
- allowed_values: List[Any]
+ allowed_values: list[Any]
constraint_description: str
default: Any
description: str
@@ -74,4 +74,4 @@ class BlueprintVariableTypeDef(
If there is an issue validating the value, an exception
(``ValueError``, ``TypeError``, etc) should be raised by the function.
- """ # noqa
+ """
diff --git a/runway/cfngin/blueprints/variables/types.py b/runway/cfngin/blueprints/variables/types.py
index ce1ea3f11..d4b027f11 100644
--- a/runway/cfngin/blueprints/variables/types.py
+++ b/runway/cfngin/blueprints/variables/types.py
@@ -2,19 +2,7 @@
from __future__ import annotations
-from typing import (
- TYPE_CHECKING,
- Any,
- ClassVar,
- Dict,
- Generic,
- List,
- Optional,
- Type,
- TypeVar,
- Union,
- overload,
-)
+from typing import TYPE_CHECKING, Any, ClassVar, Generic, TypeVar, overload
from troposphere import BaseAWSObject
@@ -22,7 +10,6 @@
from typing_extensions import Literal
TroposphereT = TypeVar("TroposphereT", bound=BaseAWSObject)
-# https://github.com/PyCQA/pylint/issues/6003
class TroposphereType(Generic[TroposphereT]):
@@ -46,7 +33,7 @@ class TroposphereType(Generic[TroposphereT]):
def __init__(
self,
- defined_type: Type[TroposphereT],
+ defined_type: type[TroposphereT],
*,
many: bool = False,
optional: bool = False,
@@ -78,7 +65,7 @@ def __init__(
self._validate = validate
@staticmethod
- def _validate_type(defined_type: Type[TroposphereT]) -> None:
+ def _validate_type(defined_type: type[TroposphereT]) -> None:
if not hasattr(defined_type, "from_dict"):
raise ValueError("Type must have `from_dict` attribute")
@@ -88,17 +75,17 @@ def resource_name(self) -> str:
return str(getattr(self._type, "resource_name", None) or self._type.__name__)
@overload
- def create(self, value: Dict[str, Any]) -> TroposphereT: ...
+ def create(self, value: dict[str, Any]) -> TroposphereT: ...
@overload
- def create(self, value: List[Dict[str, Any]]) -> List[TroposphereT]: ...
+ def create(self, value: list[dict[str, Any]]) -> list[TroposphereT]: ...
@overload
def create(self, value: None) -> None: ...
def create(
- self, value: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]]
- ) -> Optional[Union[TroposphereT, List[TroposphereT]]]:
+ self, value: dict[str, Any] | list[dict[str, Any]] | None
+ ) -> TroposphereT | list[TroposphereT] | None:
"""Create the troposphere type from the value.
Args:
@@ -119,33 +106,27 @@ def create(
# Our type is a resource, so ensure we have a dict of title to
# parameters
if not isinstance(value, dict):
- raise ValueError(
- "Resources must be specified as a dict of title to parameters"
- )
+ raise ValueError("Resources must be specified as a dict of title to parameters")
if not self._many and len(value) > 1:
raise ValueError(
- "Only one resource can be provided for this "
- "TroposphereType variable"
+ "Only one resource can be provided for this TroposphereType variable"
)
result = [self._type.from_dict(title, v) for title, v in value.items()]
+ elif self._many and isinstance(value, list):
+ result = [self._type.from_dict(None, v) for v in value]
+ elif not isinstance(value, dict):
+ raise ValueError(
+ "TroposphereType for a single non-resource"
+ "type must be specified as a dict of "
+ "parameters"
+ )
else:
- # Our type is for properties, not a resource, so don't use
- # titles
- if self._many and isinstance(value, list):
- result = [self._type.from_dict(None, v) for v in value]
- elif not isinstance(value, dict):
- raise ValueError(
- "TroposphereType for a single non-resource"
- "type must be specified as a dict of "
- "parameters"
- )
- else:
- result = [self._type.from_dict(None, value)]
+ result = [self._type.from_dict(None, value)]
if self._validate:
for v in result:
- v._validate_props()
+ v._validate_props() # noqa: SLF001
return result[0] if not self._many else result
@@ -235,17 +216,13 @@ class EC2ImageId(CFNType):
class EC2InstanceId(CFNType):
"""An Amazon EC2 instance ID, such as i-1e731a32."""
- parameter_type: ClassVar[Literal["AWS::EC2::Instance::Id"]] = (
- "AWS::EC2::Instance::Id"
- )
+ parameter_type: ClassVar[Literal["AWS::EC2::Instance::Id"]] = "AWS::EC2::Instance::Id"
class EC2KeyPairKeyName(CFNType):
"""An Amazon EC2 key pair name."""
- parameter_type: ClassVar[Literal["AWS::EC2::KeyPair::KeyName"]] = (
- "AWS::EC2::KeyPair::KeyName"
- )
+ parameter_type: ClassVar[Literal["AWS::EC2::KeyPair::KeyName"]] = "AWS::EC2::KeyPair::KeyName"
class EC2SecurityGroupGroupName(CFNType):
@@ -259,9 +236,7 @@ class EC2SecurityGroupGroupName(CFNType):
class EC2SecurityGroupId(CFNType):
"""A security group ID, such as sg-a123fd85."""
- parameter_type: ClassVar[Literal["AWS::EC2::SecurityGroup::Id"]] = (
- "AWS::EC2::SecurityGroup::Id"
- )
+ parameter_type: ClassVar[Literal["AWS::EC2::SecurityGroup::Id"]] = "AWS::EC2::SecurityGroup::Id"
class EC2SubnetId(CFNType):
@@ -306,9 +281,7 @@ class EC2ImageIdList(CFNType):
"""
- parameter_type: ClassVar[Literal["List"]] = (
- "List"
- )
+ parameter_type: ClassVar[Literal["List"]] = "List"
class EC2InstanceIdList(CFNType):
@@ -338,25 +311,19 @@ class EC2SecurityGroupIdList(CFNType):
class EC2SubnetIdList(CFNType):
"""An array of subnet IDs, such as subnet-123a351e, subnet-456b351e."""
- parameter_type: ClassVar[Literal["List"]] = (
- "List"
- )
+ parameter_type: ClassVar[Literal["List"]] = "List"
class EC2VolumeIdList(CFNType):
"""An array of Amazon EBS volume IDs, such as vol-3cdd3f56, vol-4cdd3f56."""
- parameter_type: ClassVar[Literal["List"]] = (
- "List"
- )
+ parameter_type: ClassVar[Literal["List"]] = "List"
class EC2VPCIdList(CFNType):
"""An array of VPC IDs, such as vpc-a123baa3, vpc-b456baa3."""
- parameter_type: ClassVar[Literal["List"]] = (
- "List"
- )
+ parameter_type: ClassVar[Literal["List"]] = "List"
class Route53HostedZoneIdList(CFNType):
@@ -377,9 +344,7 @@ class SSMParameterName(CFNType):
"""
- parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Name"]] = (
- "AWS::SSM::Parameter::Name"
- )
+ parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Name"]] = "AWS::SSM::Parameter::Name"
class SSMParameterValueString(CFNType):
@@ -413,9 +378,9 @@ class SSMParameterValueCommaDelimitedList(CFNType):
"""
- parameter_type: ClassVar[
- Literal["AWS::SSM::Parameter::Value"]
- ] = "AWS::SSM::Parameter::Value"
+ parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value"]] = (
+ "AWS::SSM::Parameter::Value"
+ )
class SSMParameterValueEC2AvailabilityZoneName(CFNType):
@@ -429,25 +394,25 @@ class SSMParameterValueEC2AvailabilityZoneName(CFNType):
class SSMParameterValueEC2ImageId(CFNType):
"""A Systems Manager parameter whose value is an AWS-specific parameter type."""
- parameter_type: ClassVar[
- Literal["AWS::SSM::Parameter::Value"]
- ] = "AWS::SSM::Parameter::Value"
+ parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value"]] = (
+ "AWS::SSM::Parameter::Value"
+ )
class SSMParameterValueEC2InstanceId(CFNType):
"""A Systems Manager parameter whose value is an AWS-specific parameter type."""
- parameter_type: ClassVar[
- Literal["AWS::SSM::Parameter::Value"]
- ] = "AWS::SSM::Parameter::Value"
+ parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value"]] = (
+ "AWS::SSM::Parameter::Value"
+ )
class SSMParameterValueEC2KeyPairKeyName(CFNType):
"""A Systems Manager parameter whose value is an AWS-specific parameter type."""
- parameter_type: ClassVar[
- Literal["AWS::SSM::Parameter::Value"]
- ] = "AWS::SSM::Parameter::Value"
+ parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value"]] = (
+ "AWS::SSM::Parameter::Value"
+ )
class SSMParameterValueEC2SecurityGroupGroupName(CFNType):
@@ -461,33 +426,33 @@ class SSMParameterValueEC2SecurityGroupGroupName(CFNType):
class SSMParameterValueEC2SecurityGroupId(CFNType):
"""A Systems Manager parameter whose value is an AWS-specific parameter type."""
- parameter_type: ClassVar[
- Literal["AWS::SSM::Parameter::Value"]
- ] = "AWS::SSM::Parameter::Value"
+ parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value"]] = (
+ "AWS::SSM::Parameter::Value"
+ )
class SSMParameterValueEC2SubnetId(CFNType):
"""A Systems Manager parameter whose value is an AWS-specific parameter type."""
- parameter_type: ClassVar[
- Literal["AWS::SSM::Parameter::Value"]
- ] = "AWS::SSM::Parameter::Value"
+ parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value"]] = (
+ "AWS::SSM::Parameter::Value"
+ )
class SSMParameterValueEC2VolumeId(CFNType):
"""A Systems Manager parameter whose value is an AWS-specific parameter type."""
- parameter_type: ClassVar[
- Literal["AWS::SSM::Parameter::Value"]
- ] = "AWS::SSM::Parameter::Value"
+ parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value"]] = (
+ "AWS::SSM::Parameter::Value"
+ )
class SSMParameterValueEC2VPCId(CFNType):
"""A Systems Manager parameter whose value is an AWS-specific parameter type."""
- parameter_type: ClassVar[
- Literal["AWS::SSM::Parameter::Value"]
- ] = "AWS::SSM::Parameter::Value"
+ parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value"]] = (
+ "AWS::SSM::Parameter::Value"
+ )
class SSMParameterValueRoute53HostedZoneId(CFNType):
@@ -509,9 +474,9 @@ class SSMParameterValueEC2AvailabilityZoneNameList(CFNType):
class SSMParameterValueEC2ImageIdList(CFNType):
"""A Systems Manager parameter whose value is an AWS-specific parameter type."""
- parameter_type: ClassVar[
- Literal["AWS::SSM::Parameter::Value>"]
- ] = "AWS::SSM::Parameter::Value>"
+ parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value>"]] = (
+ "AWS::SSM::Parameter::Value>"
+ )
class SSMParameterValueEC2InstanceIdList(CFNType):
@@ -541,25 +506,25 @@ class SSMParameterValueEC2SecurityGroupIdList(CFNType):
class SSMParameterValueEC2SubnetIdList(CFNType):
"""A Systems Manager parameter whose value is an AWS-specific parameter type."""
- parameter_type: ClassVar[
- Literal["AWS::SSM::Parameter::Value>"]
- ] = "AWS::SSM::Parameter::Value>"
+ parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value>"]] = (
+ "AWS::SSM::Parameter::Value>"
+ )
class SSMParameterValueEC2VolumeIdList(CFNType):
"""A Systems Manager parameter whose value is an AWS-specific parameter type."""
- parameter_type: ClassVar[
- Literal["AWS::SSM::Parameter::Value>"]
- ] = "AWS::SSM::Parameter::Value>"
+ parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value>"]] = (
+ "AWS::SSM::Parameter::Value>"
+ )
class SSMParameterValueEC2VPCIdList(CFNType):
"""A Systems Manager parameter whose value is an AWS-specific parameter type."""
- parameter_type: ClassVar[
- Literal["AWS::SSM::Parameter::Value>"]
- ] = "AWS::SSM::Parameter::Value>"
+ parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value>"]] = (
+ "AWS::SSM::Parameter::Value>"
+ )
class SSMParameterValueRoute53HostedZoneIdList(CFNType):
diff --git a/runway/cfngin/cfngin.py b/runway/cfngin/cfngin.py
index 0dd876783..fbd9e8597 100644
--- a/runway/cfngin/cfngin.py
+++ b/runway/cfngin/cfngin.py
@@ -3,9 +3,8 @@
from __future__ import annotations
import logging
-import os
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, cast
+from typing import TYPE_CHECKING, Any, cast
from .._logging import PrefixAdaptor
from ..compat import cached_property
@@ -52,8 +51,8 @@ class and any environment files that are found.
def __init__(
self,
ctx: RunwayContext,
- parameters: Optional[Dict[str, Any]] = None,
- sys_path: Optional[Path] = None,
+ parameters: dict[str, Any] | None = None,
+ sys_path: Path | None = None,
) -> None:
"""Instantiate class.
@@ -84,21 +83,21 @@ def __init__(
@cached_property
def env_file(self) -> MutableMap:
"""Contents of a CFNgin environment file."""
- result: Dict[str, Any] = {}
+ result: dict[str, Any] = {}
supported_names = [
f"{self.__ctx.env.name}.env",
f"{self.__ctx.env.name}-{self.region}.env",
]
for _, file_name in enumerate(supported_names):
- file_path = os.path.join(self.sys_path, file_name)
- if os.path.isfile(file_path):
+ file_path = self.sys_path / file_name
+ if file_path.is_file():
LOGGER.info("found environment file: %s", file_path)
self._env_file_name = file_path
- with open(file_path, "r", encoding="utf-8") as file_:
+ with file_path.open(encoding="utf-8") as file_:
result.update(parse_environment(file_.read()))
return MutableMap(**result)
- def deploy(self, force: bool = False, sys_path: Optional[Path] = None) -> None:
+ def deploy(self, force: bool = False, sys_path: Path | None = None) -> None:
"""Run the CFNgin deploy action.
Args:
@@ -113,24 +112,20 @@ def deploy(self, force: bool = False, sys_path: Optional[Path] = None) -> None:
sys_path = sys_path or self.sys_path
config_file_paths = self.find_config_files(sys_path=sys_path)
- with SafeHaven(
- environ=self.__ctx.env.vars, sys_modules_exclude=["awacs", "troposphere"]
- ):
+ with SafeHaven(environ=self.__ctx.env.vars, sys_modules_exclude=["awacs", "troposphere"]):
for config_path in config_file_paths:
- logger = PrefixAdaptor(os.path.basename(config_path), LOGGER)
+ logger = PrefixAdaptor(config_path.name, LOGGER)
logger.notice("deploy (in progress)")
with SafeHaven(sys_modules_exclude=["awacs", "troposphere"]):
ctx = self.load(config_path)
action = deploy.Action(
context=ctx,
- provider_builder=self._get_provider_builder(
- ctx.config.service_role
- ),
+ provider_builder=self._get_provider_builder(ctx.config.service_role),
)
action.execute(concurrency=self.concurrency, tail=self.tail)
logger.success("deploy (complete)")
- def destroy(self, force: bool = False, sys_path: Optional[Path] = None) -> None:
+ def destroy(self, force: bool = False, sys_path: Path | None = None) -> None:
"""Run the CFNgin destroy action.
Args:
@@ -155,35 +150,27 @@ def destroy(self, force: bool = False, sys_path: Optional[Path] = None) -> None:
ctx = self.load(config_path)
action = destroy.Action(
context=ctx,
- provider_builder=self._get_provider_builder(
- ctx.config.service_role
- ),
- )
- action.execute(
- concurrency=self.concurrency, force=True, tail=self.tail
+ provider_builder=self._get_provider_builder(ctx.config.service_role),
)
+ action.execute(concurrency=self.concurrency, force=True, tail=self.tail)
logger.success("destroy (complete)")
- def init(self, force: bool = False, sys_path: Optional[Path] = None) -> None:
+ def init(self, force: bool = False, sys_path: Path | None = None) -> None:
"""Initialize environment."""
if self.should_skip(force):
return
sys_path = sys_path or self.sys_path
config_file_paths = self.find_config_files(sys_path=sys_path)
- with SafeHaven(
- environ=self.__ctx.env.vars, sys_modules_exclude=["awacs", "troposphere"]
- ):
+ with SafeHaven(environ=self.__ctx.env.vars, sys_modules_exclude=["awacs", "troposphere"]):
for config_path in config_file_paths:
- logger = PrefixAdaptor(os.path.basename(config_path), LOGGER)
+ logger = PrefixAdaptor(config_path.name, LOGGER)
logger.notice("init (in progress)")
with SafeHaven(sys_modules_exclude=["awacs", "troposphere"]):
ctx = self.load(config_path)
action = init.Action(
context=ctx,
- provider_builder=self._get_provider_builder(
- ctx.config.service_role
- ),
+ provider_builder=self._get_provider_builder(ctx.config.service_role),
)
action.execute(concurrency=self.concurrency, tail=self.tail)
logger.success("init (complete)")
@@ -210,7 +197,7 @@ def load(self, config_path: Path) -> CfnginContext:
config.load()
return self._get_context(config, config_path)
- def plan(self, force: bool = False, sys_path: Optional[Path] = None):
+ def plan(self, force: bool = False, sys_path: Path | None = None) -> None:
"""Run the CFNgin plan action.
Args:
@@ -232,9 +219,7 @@ def plan(self, force: bool = False, sys_path: Optional[Path] = None):
ctx = self.load(config_path)
action = diff.Action(
context=ctx,
- provider_builder=self._get_provider_builder(
- ctx.config.service_role
- ),
+ provider_builder=self._get_provider_builder(ctx.config.service_role),
)
action.execute()
logger.success("plan (complete)")
@@ -284,9 +269,7 @@ def _get_context(self, config: CfnginConfig, config_path: Path) -> CfnginContext
work_dir=self.__ctx.work_dir,
)
- def _get_provider_builder(
- self, service_role: Optional[str] = None
- ) -> ProviderBuilder:
+ def _get_provider_builder(self, service_role: str | None = None) -> ProviderBuilder:
"""Initialize provider builder.
Args:
@@ -330,8 +313,8 @@ def _inject_common_parameters(self) -> None:
@classmethod
def find_config_files(
- cls, exclude: Optional[List[str]] = None, sys_path: Optional[Path] = None
- ) -> List[Path]:
+ cls, exclude: list[str] | None = None, sys_path: Path | None = None
+ ) -> list[Path]:
"""Find CFNgin config files.
Args:
diff --git a/runway/cfngin/dag/__init__.py b/runway/cfngin/dag/__init__.py
index 1966f812f..e962973ed 100644
--- a/runway/cfngin/dag/__init__.py
+++ b/runway/cfngin/dag/__init__.py
@@ -4,25 +4,16 @@
import collections
import collections.abc
+import contextlib
import logging
+from collections import OrderedDict
from copy import copy, deepcopy
from threading import Thread
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- Dict,
- Iterable,
- List,
- OrderedDict,
- Set,
- Tuple,
- Union,
- cast,
-)
+from typing import TYPE_CHECKING, Any, Callable, Union, cast
if TYPE_CHECKING:
import threading
+ from collections.abc import Iterable
LOGGER = logging.getLogger(__name__)
@@ -34,7 +25,7 @@ class DAGValidationError(Exception):
class DAG:
"""Directed acyclic graph implementation."""
- graph: OrderedDict[str, Set[str]]
+ graph: OrderedDict[str, set[str]]
def __init__(self) -> None:
"""Instantiate a new DAG with no nodes or edges."""
@@ -54,7 +45,7 @@ def add_node(self, node_name: str) -> None:
graph = self.graph
if node_name in graph:
raise KeyError(f"node {node_name} already exists")
- graph[node_name] = cast(Set[str], set())
+ graph[node_name] = cast(set[str], set())
def add_node_if_not_exists(self, node_name: str) -> None:
"""Add a node if it does not exist yet, ignoring duplicates.
@@ -63,10 +54,8 @@ def add_node_if_not_exists(self, node_name: str) -> None:
node_name: The name of the node to add.
"""
- try:
+ with contextlib.suppress(KeyError):
self.add_node(node_name)
- except KeyError:
- pass
def delete_node(self, node_name: str) -> None:
"""Delete this node and all edges referencing it.
@@ -83,7 +72,7 @@ def delete_node(self, node_name: str) -> None:
raise KeyError(f"node {node_name} does not exist")
graph.pop(node_name)
- for _node, edges in graph.items():
+ for edges in graph.values():
if node_name in edges:
edges.remove(node_name)
@@ -97,10 +86,8 @@ def delete_node_if_exists(self, node_name: str) -> None:
node_name: The name of the node to delete.
"""
- try:
+ with contextlib.suppress(KeyError):
self.delete_node(node_name)
- except KeyError:
- pass
def add_edge(self, ind_node: str, dep_node: str) -> None:
"""Add an edge (dependency) between the specified nodes.
@@ -150,7 +137,7 @@ def transpose(self) -> DAG:
"""Build a new graph with the edges reversed."""
graph = self.graph
transposed = DAG()
- for node, _edges in graph.items():
+ for node in graph:
transposed.add_node(node)
for node, edges in graph.items():
# for each edge A -> B, transpose it so that B -> A
@@ -185,12 +172,12 @@ def transitive_reduction(self) -> None:
See https://en.wikipedia.org/wiki/Transitive_reduction
"""
- combinations: List[List[str]] = []
+ combinations: list[list[str]] = []
for node, edges in self.graph.items():
combinations += [[node, edge] for edge in edges]
while True:
- new_combinations: List[List[str]] = []
+ new_combinations: list[list[str]] = []
for comb1 in combinations:
for comb2 in combinations:
if comb1[-1] != comb2[0]:
@@ -221,25 +208,24 @@ def rename_edges(self, old_node_name: str, new_node_name: str) -> None:
graph[new_node_name] = copy(edges)
del graph[old_node_name]
- else:
- if old_node_name in edges:
- edges.remove(old_node_name)
- edges.add(new_node_name)
+ elif old_node_name in edges:
+ edges.remove(old_node_name)
+ edges.add(new_node_name)
- def predecessors(self, node: str) -> List[str]:
+ def predecessors(self, node: str) -> list[str]:
"""Return a list of all immediate predecessors of the given node.
Args:
node (str): The node whose predecessors you want to find.
Returns:
- List[str]: A list of nodes that are immediate predecessors to node.
+ list[str]: A list of nodes that are immediate predecessors to node.
"""
graph = self.graph
return [key for key in graph if node in graph[key]]
- def downstream(self, node: str) -> List[str]:
+ def downstream(self, node: str) -> list[str]:
"""Return a list of all nodes this node has edges towards.
Args:
@@ -254,7 +240,7 @@ def downstream(self, node: str) -> List[str]:
raise KeyError(f"node {node} is not in graph")
return list(graph[node])
- def all_downstreams(self, node: str) -> List[str]:
+ def all_downstreams(self, node: str) -> list[str]:
"""Return a list of all nodes downstream in topological order.
Args:
@@ -265,7 +251,7 @@ def all_downstreams(self, node: str) -> List[str]:
"""
nodes = [node]
- nodes_seen: Set[str] = set()
+ nodes_seen: set[str] = set()
nodes_iter = nodes
for node__ in nodes_iter:
downstreams = self.downstream(node__)
@@ -275,7 +261,7 @@ def all_downstreams(self, node: str) -> List[str]:
nodes.append(downstream_node)
return [node_ for node_ in self.topological_sort() if node_ in nodes_seen]
- def filter(self, nodes: List[str]) -> DAG:
+ def filter(self, nodes: list[str]) -> DAG:
"""Return a new DAG with only the given nodes and their dependencies.
Args:
@@ -297,12 +283,12 @@ def filter(self, nodes: List[str]) -> DAG:
return filtered_dag
- def all_leaves(self) -> List[str]:
+ def all_leaves(self) -> list[str]:
"""Return a list of all leaves (nodes with no downstreams)."""
graph = self.graph
return [key for key in graph if not graph[key]]
- def from_dict(self, graph_dict: Dict[str, Union[Iterable[str], Any]]) -> None:
+ def from_dict(self, graph_dict: dict[str, Union[Iterable[str], Any]]) -> None:
"""Reset the graph and build it from the passed dictionary.
The dictionary takes the form of {node_name: [directed edges]}
@@ -327,7 +313,7 @@ def reset_graph(self) -> None:
"""Restore the graph to an empty state."""
self.graph = collections.OrderedDict()
- def ind_nodes(self) -> List[str]:
+ def ind_nodes(self) -> list[str]:
"""Return a list of all nodes in the graph with no dependencies."""
graph = self.graph
@@ -335,7 +321,7 @@ def ind_nodes(self) -> List[str]:
return [node_ for node_ in graph if node_ not in dependent_nodes]
- def validate(self) -> Tuple[bool, str]:
+ def validate(self) -> tuple[bool, str]:
"""Return (Boolean, message) of whether DAG is valid."""
if not self.ind_nodes():
return (False, "no independent nodes detected")
@@ -345,7 +331,7 @@ def validate(self) -> Tuple[bool, str]:
return False, str(err)
return True, "valid"
- def topological_sort(self) -> List[str]:
+ def topological_sort(self) -> list[str]:
"""Return a topological ordering of the DAG.
Raises:
@@ -359,12 +345,12 @@ def topological_sort(self) -> List[str]:
for val in graph[node]:
in_degree[val] += 1
- queue: "collections.deque[str]" = collections.deque()
+ queue: collections.deque[str] = collections.deque()
for node, value in in_degree.items():
if value == 0:
queue.appendleft(node)
- sorted_graph: List[str] = []
+ sorted_graph: list[str] = []
while queue:
node = queue.pop()
sorted_graph.append(node)
@@ -404,7 +390,7 @@ def release(self) -> Any:
class ThreadedWalker:
"""Walk a DAG as quickly as the graph topology allows, using threads."""
- def __init__(self, semaphore: Union[threading.Semaphore, UnlimitedSemaphore]):
+ def __init__(self, semaphore: Union[threading.Semaphore, UnlimitedSemaphore]) -> None:
"""Instantiate class.
Args:
@@ -431,11 +417,11 @@ def walk(self, dag: DAG, walk_func: Callable[[str], Any]) -> None:
nodes.reverse()
# This maps a node name to a thread of execution.
- threads: Dict[str, Any] = {}
+ threads: dict[str, Any] = {}
# Blocks until all of the given nodes have completed execution (whether
# successfully, or errored). Returns True if all nodes returned True.
- def wait_for(nodes: List[str]):
+ def wait_for(nodes: list[str]) -> None:
"""Wait for nodes."""
for node in nodes:
thread = threads[node]
@@ -447,11 +433,9 @@ def wait_for(nodes: List[str]):
# nodes dependencies have executed.
for node in nodes:
- def _fn(node_: str, deps: List[str]) -> Any:
+ def _fn(node_: str, deps: list[str]) -> Any:
if deps:
- LOGGER.debug(
- "%s waiting for %s to complete", node_, ", ".join(deps)
- )
+ LOGGER.debug("%s waiting for %s to complete", node_, ", ".join(deps))
# Wait for all dependencies to complete.
wait_for(deps)
diff --git a/runway/cfngin/environment.py b/runway/cfngin/environment.py
index c9a7c3c73..0cf405a5e 100644
--- a/runway/cfngin/environment.py
+++ b/runway/cfngin/environment.py
@@ -1,18 +1,18 @@
"""CFNgin environment file parsing."""
-from typing import Any, Dict
+from typing import Any
-def parse_environment(raw_environment: str) -> Dict[str, Any]:
+def parse_environment(raw_environment: str) -> dict[str, Any]:
"""Parse environment file contents.
Args:
raw_environment: Environment file read into a string.
"""
- environment: Dict[str, Any] = {}
- for line in raw_environment.split("\n"):
- line = line.strip()
+ environment: dict[str, Any] = {}
+ for raw_line in raw_environment.split("\n"):
+ line = raw_line.strip()
if not line:
continue
diff --git a/runway/cfngin/exceptions.py b/runway/cfngin/exceptions.py
index e1ebee2be..3edd4d06b 100644
--- a/runway/cfngin/exceptions.py
+++ b/runway/cfngin/exceptions.py
@@ -3,7 +3,7 @@
from __future__ import annotations
from pathlib import Path
-from typing import TYPE_CHECKING, Any, List, Optional, Union
+from typing import TYPE_CHECKING, Any
from ..exceptions import RunwayError
@@ -74,12 +74,10 @@ def __init__(self, *, bucket_name: str) -> None:
class CfnginBucketRequired(CfnginError):
"""CFNgin bucket is required to use a feature but it not provided/disabled."""
- config_path: Optional[Path]
+ config_path: Path | None
message: str
- def __init__(
- self, *, config_path: Optional[AnyPath] = None, reason: Optional[str] = None
- ) -> None:
+ def __init__(self, *, config_path: AnyPath | None = None, reason: str | None = None) -> None:
"""Instantiate class.
Args:
@@ -106,9 +104,7 @@ class CfnginOnlyLookupError(CfnginError):
def __init__(self, lookup_name: str) -> None:
"""Instantiate class."""
self.lookup_name = lookup_name
- self.message = (
- f"attempted to use CFNgin only lookup {lookup_name} outside of CFNgin"
- )
+ self.message = f"attempted to use CFNgin only lookup {lookup_name} outside of CFNgin"
super().__init__()
@@ -165,6 +161,8 @@ def __init__(self, kls: Any, error: Exception, *args: Any, **kwargs: Any) -> Non
Args:
kls: The class that was improperly configured.
error: The exception that was raised when trying to use cls.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
self.message = f'Class "{kls}" is improperly configured: {error}'
@@ -174,10 +172,10 @@ def __init__(self, kls: Any, error: Exception, *args: Any, **kwargs: Any) -> Non
class InvalidConfig(CfnginError):
"""Provided config file is invalid."""
- errors: Union[str, List[Union[Exception, str]]]
+ errors: str | list[Exception | str]
message: str
- def __init__(self, errors: Union[str, List[Union[Exception, str]]]) -> None:
+ def __init__(self, errors: str | list[Exception | str]) -> None:
"""Instantiate class.
Args:
@@ -227,6 +225,8 @@ def __init__(
blueprint_name: Name of the blueprint with invalid userdata placeholder.
exception_message: Message from the exception that was raised while
parsing the userdata.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
self.message = (
@@ -246,6 +246,8 @@ def __init__(self, key: str, *args: Any, **kwargs: Any) -> None:
Args:
key: The key that was used but doesn't exist in the environment.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
self.key = key
@@ -258,17 +260,17 @@ class MissingParameterException(CfnginError):
message: str
- def __init__(self, parameters: List[str], *args: Any, **kwargs: Any) -> None:
+ def __init__(self, parameters: list[str], *args: Any, **kwargs: Any) -> None:
"""Instantiate class.
Args:
parameters: A list of the parameters that are missing.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
self.parameters = parameters
- self.message = (
- f"Missing required cloudformation parameters: {', '.join(parameters)}"
- )
+ self.message = f"Missing required cloudformation parameters: {', '.join(parameters)}"
super().__init__(*args, **kwargs)
@@ -277,19 +279,17 @@ class MissingVariable(CfnginError):
message: str
- def __init__(
- self, blueprint_name: str, variable_name: str, *args: Any, **kwargs: Any
- ) -> None:
+ def __init__(self, blueprint_name: str, variable_name: str, *args: Any, **kwargs: Any) -> None:
"""Instantiate class.
Args:
blueprint_name: Name of the blueprint.
variable_name: Name of the variable missing a value.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
- self.message = (
- f'Variable "{variable_name}" in blueprint "{blueprint_name}" is missing'
- )
+ self.message = f'Variable "{variable_name}" in blueprint "{blueprint_name}" is missing'
super().__init__(*args, **kwargs)
@@ -339,7 +339,7 @@ class PersistentGraphCannotUnlock(CfnginError):
message: str
- def __init__(self, reason: Union[Exception, str]) -> None:
+ def __init__(self, reason: Exception | str) -> None:
"""Instantiate class."""
self.message = f"Could not unlock persistent graph; {reason}"
super().__init__()
@@ -354,17 +354,12 @@ class PersistentGraphLocked(CfnginError):
message: str
- def __init__(
- self, *, message: Optional[str] = None, reason: Optional[str] = None
- ) -> None:
+ def __init__(self, *, message: str | None = None, reason: str | None = None) -> None:
"""Instantiate class."""
if message:
self.message = message
else:
- reason = (
- reason
- or "This action requires the graph to be unlocked to be executed."
- )
+ reason = reason or "This action requires the graph to be unlocked to be executed."
self.message = f"Persistent graph is locked. {reason}"
super().__init__()
@@ -379,7 +374,7 @@ class PersistentGraphLockCodeMismatch(CfnginError):
message: str
- def __init__(self, provided_code: str, s3_code: Optional[str]) -> None:
+ def __init__(self, provided_code: str, s3_code: str | None) -> None:
"""Instantiate class."""
self.message = (
f"The provided lock code '{provided_code}' does not match the S3 "
@@ -397,16 +392,12 @@ class PersistentGraphUnlocked(CfnginError):
message: str
- def __init__(
- self, message: Optional[str] = None, reason: Optional[str] = None
- ) -> None:
+ def __init__(self, message: str | None = None, reason: str | None = None) -> None:
"""Instantiate class."""
if message:
self.message = message
else:
- reason = (
- reason or "This action requires the graph to be locked to be executed."
- )
+ reason = reason or "This action requires the graph to be locked to be executed."
self.message = f"Persistent graph is unlocked. {reason}"
super().__init__()
@@ -416,11 +407,13 @@ class PlanFailed(CfnginError):
message: str
- def __init__(self, failed_steps: List[Step], *args: Any, **kwargs: Any) -> None:
+ def __init__(self, failed_steps: list[Step], *args: Any, **kwargs: Any) -> None:
"""Instantiate class.
Args:
failed_steps: The steps that failed.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
self.failed_steps = failed_steps
@@ -447,6 +440,8 @@ def __init__(self, stack_name: str, *args: Any, **kwargs: Any) -> None:
Args:
stack_name: Name of the stack that does not exist.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
self.message = (
@@ -470,6 +465,8 @@ def __init__(
stack_name: Name of the stack.
stack_status: The stack's status.
reason: The reason for the current status.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
self.stack_name = stack_name
@@ -491,7 +488,7 @@ class StackFailed(CfnginError):
message: str
- def __init__(self, stack_name: str, status_reason: Optional[str] = None) -> None:
+ def __init__(self, stack_name: str, status_reason: str | None = None) -> None:
"""Instantiate class.
Args:
@@ -513,9 +510,7 @@ class UnableToExecuteChangeSet(CfnginError):
message: str
- def __init__(
- self, stack_name: str, change_set_id: str, execution_status: str
- ) -> None:
+ def __init__(self, stack_name: str, change_set_id: str, execution_status: str) -> None:
"""Instantiate class.
Args:
@@ -575,20 +570,19 @@ class UnresolvedBlueprintVariable(CfnginError):
message: str
- def __init__(
- self, blueprint_name: str, variable: Variable, *args: Any, **kwargs: Any
- ) -> None:
+ def __init__(self, blueprint_name: str, variable: Variable, *args: Any, **kwargs: Any) -> None:
"""Instantiate class.
Args:
blueprint_name: Name of the blueprint that tried to use
the unresolved variables.
variable: The unresolved variable.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
self.message = (
- f'Variable "{variable.name}" in blueprint "{blueprint_name}" '
- "hasn't been resolved"
+ f'Variable "{variable.name}" in blueprint "{blueprint_name}" hasn\'t been resolved'
)
super().__init__(*args, **kwargs)
@@ -604,6 +598,8 @@ def __init__(self, blueprint_name: str, *args: Any, **kwargs: Any) -> None:
Args:
blueprint_name: Name of the blueprint that tried to use the unresolved
variables.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
self.message = f"Blueprint: \"{blueprint_name}\" hasn't resolved it's variables"
@@ -620,7 +616,7 @@ def __init__(
variable: str,
validator: str,
value: str,
- exception: Optional[Exception] = None,
+ exception: Exception | None = None,
) -> None:
"""Instantiate class.
@@ -641,12 +637,10 @@ def __init__(
)
if self.exception:
- self.message += (
- f": {self.exception.__class__.__name__}: {str(self.exception)}"
- )
+ self.message += f": {self.exception.__class__.__name__}: {self.exception!s}"
super().__init__()
- def __str__(self):
+ def __str__(self) -> str:
"""Return the exception's message when converting to a string."""
return self.message
@@ -656,18 +650,17 @@ class VariableTypeRequired(CfnginError):
message: str
- def __init__(
- self, blueprint_name: str, variable_name: str, *args: Any, **kwargs: Any
- ) -> None:
+ def __init__(self, blueprint_name: str, variable_name: str, *args: Any, **kwargs: Any) -> None:
"""Instantiate class.
Args:
blueprint_name: Name of the blueprint.
variable_name: Name of the variable missing a type.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
self.message = (
- f'Variable "{variable_name}" in blueprint "{blueprint_name}" '
- "does not have a type"
+ f'Variable "{variable_name}" in blueprint "{blueprint_name}" does not have a type'
)
super().__init__(*args, **kwargs)
diff --git a/runway/cfngin/hooks/acm.py b/runway/cfngin/hooks/acm.py
index fb1cc22cc..5b34c8067 100644
--- a/runway/cfngin/hooks/acm.py
+++ b/runway/cfngin/hooks/acm.py
@@ -4,12 +4,11 @@
import logging
import time
-from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Type
+from typing import TYPE_CHECKING, Any, ClassVar, List, Optional # noqa: UP035
from botocore.exceptions import ClientError
from troposphere import Ref
from troposphere.certificatemanager import Certificate as CertificateResource
-from typing_extensions import Literal
from ...utils import MutableMap
from ..blueprints.variables.types import CFNString
@@ -23,6 +22,7 @@
from mypy_boto3_acm.type_defs import ResourceRecordTypeDef
from mypy_boto3_route53.client import Route53Client
from mypy_boto3_route53.type_defs import ChangeTypeDef
+ from typing_extensions import Literal
from ...context import CfnginContext
from ..blueprints.base import Blueprint
@@ -36,7 +36,7 @@
class HookArgs(HookArgsBaseModel):
"""Hook arguments."""
- alt_names: List[str] = []
+ alt_names: List[str] = [] # noqa: UP006
domain: str
hosted_zone_id: str
stack_name: Optional[str] = None
@@ -47,7 +47,7 @@ class Certificate(Hook):
r"""Hook for managing a **AWS::CertificateManager::Certificate**.
Keyword Args:
- alt_names (Optional[List[str]]): Additional FQDNs to be included in the
+ alt_names (Optional[list[str]]): Additional FQDNs to be included in the
Subject Alternative Name extension of the ACM certificate. For
example, you can add www.example.net to a certificate for which the
domain field is www.example.com if users can reach your site by
@@ -80,7 +80,7 @@ class Certificate(Hook):
"""
- ARGS_PARSER: ClassVar[Type[HookArgs]] = HookArgs
+ ARGS_PARSER: ClassVar[type[HookArgs]] = HookArgs
acm_client: ACMClient
args: HookArgs
@@ -89,14 +89,13 @@ class Certificate(Hook):
stack: Stack
template_description: str
- def __init__(
- self, context: CfnginContext, provider: Provider, **kwargs: Any
- ) -> None:
+ def __init__(self, context: CfnginContext, provider: Provider, **kwargs: Any) -> None:
"""Instantiate class.
Args:
context: Context instance. (passed in by CFNgin)
provider: Provider instance. (passed in by CFNgin)
+ **kwargs: Arbitrary keyword arguments.
"""
super().__init__(context, provider, **kwargs)
@@ -105,12 +104,10 @@ def __init__(
self.stack_name = self.args.stack_name or self.args.domain.replace(".", "-")
self.properties = MutableMap(
- **{
- "DomainName": self.args.domain,
- "SubjectAlternativeNames": self.args.alt_names,
- "Tags": self.tags,
- "ValidationMethod": "DNS",
- }
+ DomainName=self.args.domain,
+ SubjectAlternativeNames=self.args.alt_names,
+ Tags=self.tags,
+ ValidationMethod="DNS",
)
self.blueprint = self._create_blueprint()
@@ -158,22 +155,16 @@ def domain_changed(self) -> bool:
try:
stack_info = self.provider.get_stack(self.stack.fqn)
if self.provider.is_stack_recreatable(stack_info):
- LOGGER.debug(
- "stack is in a recreatable state; domain change does not matter"
- )
+ LOGGER.debug("stack is in a recreatable state; domain change does not matter")
return False
if self.provider.is_stack_in_progress(
stack_info
) or self.provider.is_stack_rolling_back(stack_info):
LOGGER.debug("stack is in progress; can't check for domain change")
return False
- if (
- self.args.domain
- != self.provider.get_outputs(self.stack.fqn)["DomainName"]
- ):
+ if self.args.domain != self.provider.get_outputs(self.stack.fqn)["DomainName"]:
LOGGER.error(
- '"domain" can\'t be changed for existing '
- 'certificate in stack "%s"',
+ '"domain" can\'t be changed for existing certificate in stack "%s"',
self.stack.fqn,
)
return True
@@ -199,10 +190,9 @@ def get_certificate(self, interval: int = 5) -> str:
response = self.provider.cloudformation.describe_stack_resources(
StackName=self.stack.fqn, LogicalResourceId="Certificate"
)["StackResources"]
- if response:
- # can be returned without having a PhysicalResourceId
- if "PhysicalResourceId" in response[0]:
- return response[0]["PhysicalResourceId"]
+ # can be returned without having a PhysicalResourceId
+ if response and "PhysicalResourceId" in response[0]:
+ return response[0]["PhysicalResourceId"]
LOGGER.debug("waiting for certificate to be created...")
time.sleep(interval)
return self.get_certificate(interval=interval)
@@ -228,25 +218,18 @@ def get_validation_record(
"""
if not cert_arn:
cert_arn = self.get_certificate()
- cert = self.acm_client.describe_certificate(CertificateArn=cert_arn).get(
- "Certificate", {}
- )
+ cert = self.acm_client.describe_certificate(CertificateArn=cert_arn).get("Certificate", {})
try:
domain_validation = [
- opt
- for opt in cert["DomainValidationOptions"]
- if opt["ValidationStatus"] == status
+ opt for opt in cert["DomainValidationOptions"] if opt["ValidationStatus"] == status
]
except KeyError:
LOGGER.debug(
- "waiting for DomainValidationOptions to become "
- "available for the certificate..."
+ "waiting for DomainValidationOptions to become available for the certificate..."
)
time.sleep(interval)
- return self.get_validation_record(
- cert_arn=cert_arn, interval=interval, status=status
- )
+ return self.get_validation_record(cert_arn=cert_arn, interval=interval, status=status)
if not domain_validation:
raise ValueError(
@@ -266,9 +249,7 @@ def get_validation_record(
"to become available for the certificate..."
)
time.sleep(interval)
- return self.get_validation_record(
- cert_arn=cert_arn, interval=interval, status=status
- )
+ return self.get_validation_record(cert_arn=cert_arn, interval=interval, status=status)
def put_record_set(self, record_set: ResourceRecordTypeDef) -> None:
"""Create/update a record set on a Route 53 Hosted Zone.
@@ -277,13 +258,11 @@ def put_record_set(self, record_set: ResourceRecordTypeDef) -> None:
record_set: Record set to be added to Route 53.
"""
- LOGGER.info(
- "adding validation record to hosted zone: %s", self.args.hosted_zone_id
- )
+ LOGGER.info("adding validation record to hosted zone: %s", self.args.hosted_zone_id)
self.__change_record_set("CREATE", [record_set])
def remove_validation_records(
- self, records: Optional[List[ResourceRecordTypeDef]] = None
+ self, records: Optional[list[ResourceRecordTypeDef]] = None
) -> None:
"""Remove all record set entries used to validate an ACM Certificate.
@@ -324,7 +303,7 @@ def update_record_set(self, record_set: ResourceRecordTypeDef) -> None:
def __change_record_set(
self,
action: Literal["CREATE", "DELETE", "UPSERT"],
- record_sets: List[ResourceRecordTypeDef],
+ record_sets: list[ResourceRecordTypeDef],
) -> None:
"""Wrap boto3.client('acm').change_resource_record_sets.
@@ -336,7 +315,7 @@ def __change_record_set(
if not record_sets:
raise ValueError("Must provide one of more record sets")
- changes: List[ChangeTypeDef] = [
+ changes: list[ChangeTypeDef] = [
{
"Action": action,
"ResourceRecordSet": {
@@ -360,7 +339,7 @@ def __change_record_set(
ChangeBatch={"Comment": self.template_description, "Changes": changes},
)
- def deploy(self, status: Optional[Status] = None) -> Dict[str, str]:
+ def deploy(self, status: Optional[Status] = None) -> dict[str, str]:
"""Deploy an ACM Certificate."""
record = None
try:
@@ -425,7 +404,7 @@ def deploy(self, status: Optional[Status] = None) -> Dict[str, str]:
def destroy(
self,
- records: Optional[List[ResourceRecordTypeDef]] = None,
+ records: Optional[list[ResourceRecordTypeDef]] = None,
skip_r53: bool = False,
) -> bool:
"""Destroy an ACM certificate.
@@ -447,23 +426,19 @@ def destroy(
) as err:
# these error are fine if they happen during destruction but
# could require manual steps to finish cleanup.
- LOGGER.warning(
- "deletion of the validation records failed with error:\n%s", err
- )
+ LOGGER.warning("deletion of the validation records failed with error:\n%s", err)
except ClientError as err:
if err.response["Error"]["Message"] != (
f"Stack with id {self.stack.fqn} does not exist"
):
raise
- LOGGER.warning(
- "deletion of the validation records failed with error:\n%s", err
- )
+ LOGGER.warning("deletion of the validation records failed with error:\n%s", err)
else:
LOGGER.info("deletion of validation records was skipped")
self.destroy_stack(wait=True)
return True
- def post_deploy(self) -> Dict[str, str]:
+ def post_deploy(self) -> dict[str, str]:
"""Run during the **post_deploy** stage."""
return self.deploy()
@@ -471,7 +446,7 @@ def post_destroy(self) -> bool:
"""Run during the **post_destroy** stage."""
return self.destroy()
- def pre_deploy(self) -> Dict[str, str]:
+ def pre_deploy(self) -> dict[str, str]:
"""Run during the **pre_deploy** stage."""
return self.deploy()
diff --git a/runway/cfngin/hooks/aws_lambda.py b/runway/cfngin/hooks/aws_lambda.py
index f8ab897d8..48286851e 100644
--- a/runway/cfngin/hooks/aws_lambda.py
+++ b/runway/cfngin/hooks/aws_lambda.py
@@ -1,6 +1,5 @@
"""AWS Lambda hook."""
-# pylint: disable=too-many-lines
from __future__ import annotations
import hashlib
@@ -13,18 +12,14 @@
import subprocess
import sys
import tempfile
+from collections.abc import Iterable, Iterator
from io import BytesIO as StringIO
from pathlib import Path
from shutil import copyfile
from typing import (
TYPE_CHECKING,
Any,
- Dict,
- Iterable,
- Iterator,
- List,
Optional,
- Tuple,
Union,
cast,
)
@@ -62,9 +57,7 @@
DockerizePipArgTypeDef = Optional[
Union[
bool,
- Literal[
- "false", "False", "no", "No", "non-linux", "true", "True", "yes", "Yes"
- ],
+ Literal["false", "False", "no", "No", "non-linux", "true", "True", "yes", "Yes"],
]
]
@@ -72,8 +65,8 @@
def copydir(
source: str,
destination: str,
- includes: List[str],
- excludes: Optional[List[str]] = None,
+ includes: list[str],
+ excludes: Optional[list[str]] = None,
follow_symlinks: bool = False,
) -> None:
"""Extend the functionality of shutil.
@@ -93,24 +86,24 @@ def copydir(
def _mkdir(dir_name: str) -> None:
"""Recursively create directories."""
- parent = os.path.dirname(dir_name)
- if not os.path.isdir(parent):
+ parent = os.path.dirname(dir_name) # noqa: PTH120
+ if not os.path.isdir(parent): # noqa: PTH112
_mkdir(parent)
LOGGER.debug("creating directory: %s", dir_name)
- os.mkdir(dir_name)
+ os.mkdir(dir_name) # noqa: PTH102
for file_name in files:
- src = os.path.join(source, file_name)
- dest = os.path.join(destination, file_name)
+ src = os.path.join(source, file_name) # noqa: PTH118
+ dest = os.path.join(destination, file_name) # noqa: PTH118
try:
LOGGER.debug('copying file "%s" to "%s"', src, dest)
copyfile(src, dest)
except OSError:
- _mkdir(os.path.dirname(dest))
+ _mkdir(os.path.dirname(dest)) # noqa: PTH120
copyfile(src, dest)
-def find_requirements(root: str) -> Optional[Dict[str, bool]]:
+def find_requirements(root: str) -> Optional[dict[str, bool]]:
"""Identify Python requirement files.
Args:
@@ -122,7 +115,7 @@ def find_requirements(root: str) -> Optional[Dict[str, bool]]:
"""
findings = {
- file_name: os.path.isfile(os.path.join(root, file_name))
+ file_name: os.path.isfile(os.path.join(root, file_name)) # noqa: PTH118, PTH113
for file_name in ["requirements.txt", "Pipfile", "Pipfile.lock"]
}
@@ -151,12 +144,12 @@ def should_use_docker(dockerize_pip: DockerizePipArgTypeDef = None) -> bool:
return False
-def str2bool(v: str):
+def str2bool(v: str) -> bool:
"""Return boolean value of string."""
return v.lower() in ("yes", "true", "t", "1", "on", "y")
-def _zip_files(files: Iterable[str], root: str) -> Tuple[bytes, str]:
+def _zip_files(files: Iterable[str], root: str) -> tuple[bytes, str]:
"""Generate a ZIP file in-memory from a list of files.
Files will be stored in the archive with relative names, and have their
@@ -175,7 +168,7 @@ def _zip_files(files: Iterable[str], root: str) -> Tuple[bytes, str]:
files = list(files) # create copy of list also converts generator to list
with ZipFile(zip_data, "w", ZIP_DEFLATED) as zip_file:
for file_name in files:
- zip_file.write(os.path.join(root, file_name), file_name)
+ zip_file.write(os.path.join(root, file_name), file_name) # noqa: PTH118
# Fix file permissions to avoid any issues - only care whether a file
# is executable or not, choosing between modes 755 and 644 accordingly.
@@ -183,12 +176,8 @@ def _zip_files(files: Iterable[str], root: str) -> Tuple[bytes, str]:
perms = (zip_entry.external_attr & ZIP_PERMS_MASK) >> 16
new_perms = 0o755 if perms & stat.S_IXUSR != 0 else 0o644
if new_perms != perms:
- LOGGER.debug(
- "fixing perms: %s: %o => %o", zip_entry.filename, perms, new_perms
- )
- new_attr = (zip_entry.external_attr & ~ZIP_PERMS_MASK) | (
- new_perms << 16
- )
+ LOGGER.debug("fixing perms: %s: %o => %o", zip_entry.filename, perms, new_perms)
+ new_attr = (zip_entry.external_attr & ~ZIP_PERMS_MASK) | (new_perms << 16)
zip_entry.external_attr = new_attr
contents = zip_data.getvalue()
@@ -207,25 +196,24 @@ def _calculate_hash(files: Iterable[str], root: str) -> str:
root: base directory to analyze files in.
"""
- file_hash = hashlib.md5()
+ file_hash = hashlib.md5() # noqa: S324
for file_name in sorted(files):
- file_path = os.path.join(root, file_name)
+ file_path = os.path.join(root, file_name) # noqa: PTH118
file_hash.update((file_name + "\0").encode())
- with open(file_path, "rb") as file_:
- # pylint: disable=cell-var-from-loop
+ with open(file_path, "rb") as file_: # noqa: PTH123
for chunk in iter(lambda: file_.read(4096), ""):
if not chunk:
break
file_hash.update(chunk)
- file_hash.update("\0".encode())
+ file_hash.update(b"\0")
return file_hash.hexdigest()
def _find_files(
root: str,
- includes: Union[List[str], str],
- excludes: Optional[List[str]] = None,
+ includes: Union[list[str], str],
+ excludes: Optional[list[str]] = None,
follow_symlinks: bool = False,
) -> Iterator[str]:
"""List files inside a directory based on include and exclude rules.
@@ -249,7 +237,7 @@ def _find_files(
http://www.aviser.asia/formic/doc/index.html
"""
- root = os.path.abspath(root)
+ root = os.path.abspath(root) # noqa: PTH100
file_set = formic.FileSet(
directory=root, include=includes, exclude=excludes, symlinks=follow_symlinks
)
@@ -257,8 +245,8 @@ def _find_files(
def _zip_from_file_patterns(
- root: str, includes: List[str], excludes: List[str], follow_symlinks: bool
-) -> Tuple[bytes, str]:
+ root: str, includes: list[str], excludes: list[str], follow_symlinks: bool
+) -> tuple[bytes, str]:
"""Generate a ZIP file in-memory from file search patterns.
Args:
@@ -296,9 +284,9 @@ def _zip_from_file_patterns(
def handle_requirements(
package_root: str,
dest_path: str,
- requirements: Dict[str, bool],
+ requirements: dict[str, bool],
pipenv_timeout: int = 300,
- python_path: Optional[str] = None,
+ python_path: str | None = None,
use_pipenv: bool = False,
) -> str:
"""Use the correct requirements file.
@@ -330,7 +318,7 @@ def handle_requirements(
)
if requirements["requirements.txt"]:
LOGGER.info("using requirements.txt for dependencies")
- return os.path.join(dest_path, "requirements.txt")
+ return os.path.join(dest_path, "requirements.txt") # noqa: PTH118
if requirements["Pipfile"] or requirements["Pipfile.lock"]:
LOGGER.info("using pipenv for dependencies")
return _handle_use_pipenv(
@@ -368,30 +356,26 @@ def _handle_use_pipenv(
LOGGER.error("pipenv can only be used with python installed from PyPi")
sys.exit(1)
LOGGER.info("creating requirements.txt from Pipfile...")
- req_path = os.path.join(dest_path, "requirements.txt")
+ req_path = os.path.join(dest_path, "requirements.txt") # noqa: PTH118
cmd = ["pipenv", "lock", "--requirements", "--keep-outdated"]
if python_path:
cmd.insert(0, python_path)
cmd.insert(1, "-m")
- with open(req_path, "w", encoding="utf-8") as requirements:
- with subprocess.Popen(
+ with (
+ open(req_path, "w", encoding="utf-8") as requirements, # noqa: PTH123
+ subprocess.Popen(
cmd, cwd=package_root, stdout=requirements, stderr=subprocess.PIPE
- ) as pipenv_process:
- if int(sys.version[0]) > 2:
- _stdout, stderr = pipenv_process.communicate(timeout=timeout)
- else:
- _stdout, stderr = pipenv_process.communicate()
- if pipenv_process.returncode == 0:
- return req_path
- if int(sys.version[0]) > 2:
- stderr = stderr.decode("UTF-8")
- LOGGER.error(
- '"%s" failed with the following output:\n%s', " ".join(cmd), stderr
- )
- raise PipenvError
+ ) as pipenv_process,
+ ):
+ _stdout, stderr = pipenv_process.communicate(timeout=timeout)
+ if pipenv_process.returncode == 0:
+ return req_path
+ stderr = stderr.decode("UTF-8")
+ LOGGER.error('"%s" failed with the following output:\n%s', " ".join(cmd), stderr)
+ raise PipenvError
-def dockerized_pip(
+def dockerized_pip( # noqa: C901, PLR0912
work_dir: str,
client: Optional[docker.DockerClient] = None,
runtime: Optional[str] = None,
@@ -414,31 +398,30 @@ def dockerized_pip(
python_dontwritebytecode: Don't write bytecode.
"""
- # TODO use kwargs to pass args to docker for advanced config
+ # TODO (craig): use kwargs to pass args to docker for advanced config
if bool(docker_file) + bool(docker_image) + bool(runtime) != 1:
# exactly one of these is needed. converting to bool will give us a
# 'False' (0) for 'None' and 'True' (1) for anything else.
raise InvalidDockerizePipConfiguration(
- "exactly only one of [docker_file, docker_file, runtime] must be "
- "provided"
+ "exactly only one of [docker_file, docker_file, runtime] must be provided"
)
if not client:
client = docker.from_env()
if docker_file:
- if not os.path.isfile(docker_file):
+ if not os.path.isfile(docker_file): # noqa: PTH113
raise ValueError(f'could not find docker_file "{docker_file}"')
LOGGER.info('building docker image from "%s"', docker_file)
response = cast(
- Union[Image, Tuple[Image, Iterator[Dict[str, str]]]],
+ Union[Image, tuple[Image, Iterator[dict[str, str]]]],
client.images.build(
- path=os.path.dirname(docker_file),
- dockerfile=os.path.basename(docker_file),
+ path=os.path.dirname(docker_file), # noqa: PTH120
+ dockerfile=os.path.basename(docker_file), # noqa: PTH119
forcerm=True,
),
)
- # the response can be either a tuple of (Image, Generator[Dict[str, str]])
+ # the response can be either a tuple of (Image, Generator[dict[str, str]])
# or just Image depending on API version.
if isinstance(response, tuple):
docker_image = response[0].id
@@ -450,26 +433,20 @@ def dockerized_pip(
LOGGER.info('docker image "%s" created', docker_image)
if runtime:
if runtime not in SUPPORTED_RUNTIMES:
- raise ValueError(
- f'invalid runtime "{runtime}" must be one of {SUPPORTED_RUNTIMES}'
- )
+ raise ValueError(f'invalid runtime "{runtime}" must be one of {SUPPORTED_RUNTIMES}')
docker_image = f"lambci/lambda:build-{runtime}"
- LOGGER.debug(
- 'selected docker image "%s" based on provided runtime', docker_image
- )
+ LOGGER.debug('selected docker image "%s" based on provided runtime', docker_image)
if sys.platform.lower() == "win32":
LOGGER.debug("formatted docker mount path for Windows")
work_dir = work_dir.replace("\\", "/")
- work_dir_mount = docker.types.Mount(
- target="/var/task", source=work_dir, type="bind"
- )
+ work_dir_mount = docker.types.Mount(target="/var/task", source=work_dir, type="bind")
pip_cmd = "python -m pip install -t /var/task -r /var/task/requirements.txt"
LOGGER.info('using docker image "%s" to build deployment package...', docker_image)
- docker_run_args: Dict[str, Any] = {}
+ docker_run_args: dict[str, Any] = {}
if python_dontwritebytecode:
docker_run_args["environment"] = "1"
@@ -512,9 +489,7 @@ def _pip_has_no_color_option(python_path: str) -> bool:
[
python_path,
"-c",
- "from __future__ import print_function;"
- "import pip;"
- "print(pip.__version__)",
+ "from __future__ import print_function;import pip;print(pip.__version__)",
]
)
if isinstance(pip_version_string, bytes): # type: ignore
@@ -526,24 +501,24 @@ def _pip_has_no_color_option(python_path: str) -> bool:
return False
-# TODO refactor logic to breakup logic into smaller chunks
-def _zip_package( # pylint: disable=too-many-locals,too-many-statements
+# TODO (kyle): refactor logic to breakup logic into smaller chunks
+def _zip_package( # noqa: PLR0915, PLR0912, C901, D417
package_root: str,
*,
dockerize_pip: DockerizePipArgTypeDef = False,
- excludes: Optional[List[str]] = None,
+ excludes: Optional[list[str]] = None,
follow_symlinks: bool = False,
- includes: List[str],
+ includes: list[str],
pipenv_timeout: int = 300,
python_dontwritebytecode: bool = False,
python_exclude_bin_dir: bool = False,
python_exclude_setuptools_dirs: bool = False,
python_path: Optional[str] = None,
- requirements_files: Dict[str, bool],
+ requirements_files: dict[str, bool],
use_pipenv: bool = False,
work_dir: Path,
**kwargs: Any,
-) -> Tuple[bytes, str]:
+) -> tuple[bytes, str]:
"""Create zip file in memory with package dependencies.
Args:
@@ -578,9 +553,8 @@ def _zip_package( # pylint: disable=too-many-locals,too-many-statements
excludes = excludes or []
excludes.append(".venv/")
- # pylint: disable=consider-using-with
tmpdir = tempfile.TemporaryDirectory(prefix="cfngin", dir=work_dir)
- tmp_req = os.path.join(tmpdir.name, "requirements.txt")
+ tmp_req = os.path.join(tmpdir.name, "requirements.txt") # noqa: PTH118
copydir(package_root, tmpdir.name, includes, excludes, follow_symlinks)
tmp_req = handle_requirements(
package_root=package_root,
@@ -607,7 +581,7 @@ def _zip_package( # pylint: disable=too-many-locals,too-many-statements
"--no-color",
]
- subprocess_args: Dict[str, Any] = {}
+ subprocess_args: dict[str, Any] = {}
if python_dontwritebytecode:
subprocess_args["env"] = dict(os.environ, PYTHONDONTWRITEBYTECODE="1")
@@ -646,14 +620,16 @@ def _zip_package( # pylint: disable=too-many-locals,too-many-statements
if tmp_script.is_file():
tmp_script.unlink()
- if python_exclude_bin_dir and os.path.isdir(os.path.join(tmpdir.name, "bin")):
+ if python_exclude_bin_dir and os.path.isdir( # noqa: PTH112
+ os.path.join(tmpdir.name, "bin") # noqa: PTH118
+ ):
LOGGER.debug("Removing python /bin directory from Lambda files")
- shutil.rmtree(os.path.join(tmpdir.name, "bin"))
+ shutil.rmtree(os.path.join(tmpdir.name, "bin")) # noqa: PTH118
if python_exclude_setuptools_dirs:
for i in os.listdir(tmpdir.name):
- if i.endswith(".egg-info") or i.endswith(".dist-info"):
+ if i.endswith((".egg-info", ".dist-info")):
LOGGER.debug("Removing directory %s from Lambda files", i)
- shutil.rmtree(os.path.join(tmpdir.name, i))
+ shutil.rmtree(os.path.join(tmpdir.name, i)) # noqa: PTH118
req_files = _find_files(tmpdir.name, includes="**", follow_symlinks=False)
contents, content_hash = _zip_files(req_files, tmpdir.name)
@@ -673,9 +649,7 @@ def _zip_package( # pylint: disable=too-many-locals,too-many-statements
return contents, content_hash
-def _head_object(
- s3_conn: S3Client, bucket: str, key: str
-) -> Optional[HeadObjectOutputTypeDef]:
+def _head_object(s3_conn: S3Client, bucket: str, key: str) -> Optional[HeadObjectOutputTypeDef]:
"""Retrieve information about an object in S3 if it exists.
Args:
@@ -753,10 +727,10 @@ def _upload_code(
def _check_pattern_list(
- patterns: Optional[Union[List[str], str]],
+ patterns: Optional[Union[list[str], str]],
key: str,
- default: Optional[List[str]] = None,
-) -> Optional[List[str]]:
+ default: Optional[list[str]] = None,
+) -> Optional[list[str]]:
"""Validate file search patterns from user configuration.
Acceptable input is a string (which will be converted to a singleton list),
@@ -785,9 +759,7 @@ def _check_pattern_list(
if isinstance(patterns, list) and all(isinstance(p, str) for p in patterns): # type: ignore
return patterns
- raise ValueError(
- f"Invalid file patterns in key '{key}': must be a string or " "list of strings"
- )
+ raise ValueError(f"Invalid file patterns in key '{key}': must be a string or list of strings")
class _UploadFunctionOptionsTypeDef(TypedDict):
@@ -802,8 +774,8 @@ class _UploadFunctionOptionsTypeDef(TypedDict):
"""
- exclude: Optional[List[str]]
- include: Optional[List[str]]
+ exclude: Optional[list[str]]
+ include: Optional[list[str]]
path: str
@@ -845,11 +817,9 @@ def _upload_function(
"""
try:
- root = os.path.expanduser(options["path"])
+ root = os.path.expanduser(options["path"]) # noqa: PTH111
except KeyError as exc:
- raise ValueError(
- f"missing required property '{exc.args[0]}' in function '{name}'"
- ) from exc
+ raise ValueError(f"missing required property '{exc.args[0]}' in function '{name}'") from exc
includes = _check_pattern_list(options.get("include"), "include", default=["**"])
excludes = _check_pattern_list(options.get("exclude"), "exclude", default=[])
@@ -858,13 +828,13 @@ def _upload_function(
# os.path.join will ignore other parameters if the right-most one is an
# absolute path, which is exactly what we want.
- if not os.path.isabs(root):
- root = os.path.abspath(os.path.join(sys_path, root))
+ if not os.path.isabs(root): # noqa: PTH117
+ root = os.path.abspath(os.path.join(sys_path, root)) # noqa: PTH118, PTH100
requirements_files = find_requirements(root)
if requirements_files:
zip_contents, content_hash = _zip_package(
root,
- includes=cast(List[str], includes),
+ includes=cast("list[str]", includes),
excludes=excludes,
follow_symlinks=follow_symlinks,
requirements_files=requirements_files,
@@ -873,12 +843,10 @@ def _upload_function(
)
else:
zip_contents, content_hash = _zip_from_file_patterns(
- root, cast(List[str], includes), cast(List[str], excludes), follow_symlinks
+ root, cast("list[str]", includes), cast("list[str]", excludes), follow_symlinks
)
- return _upload_code(
- s3_conn, bucket, prefix, name, zip_contents, content_hash, payload_acl
- )
+ return _upload_code(s3_conn, bucket, prefix, name, zip_contents, content_hash, payload_acl)
def select_bucket_region(
@@ -908,7 +876,9 @@ def select_bucket_region(
return region or provider_region
-def upload_lambda_functions(context: CfnginContext, provider: Provider, **kwargs: Any):
+def upload_lambda_functions( # noqa: D417
+ context: CfnginContext, provider: Provider, **kwargs: Any
+) -> dict[str, Any]:
"""Build Lambda payloads from user configuration and upload them to S3.
Constructs ZIP archives containing files matching specified patterns for
@@ -945,7 +915,7 @@ def upload_lambda_functions(context: CfnginContext, provider: Provider, **kwargs
``False``)
payload_acl (Optional[str]): The canned S3 object ACL to be applied
to the uploaded payload. (*default: private*)
- functions (Dict[str, Any]): Configurations of desired payloads to
+ functions (dict[str, Any]): Configurations of desired payloads to
build. Keys correspond to function names, used to derive key
names for the payload. Each value should itself be a dictionary,
with the following data:
@@ -966,7 +936,7 @@ def upload_lambda_functions(context: CfnginContext, provider: Provider, **kwargs
which will only run on non Linux systems. To use this option
Docker must be installed.
- **exclude (Optional[Union[str, List[str]]])**
+ **exclude (Optional[Union[str, list[str]]])**
Pattern or list of patterns of files to exclude from the
payload. If provided, any files that match will be ignored,
regardless of whether they match an inclusion pattern.
@@ -975,7 +945,7 @@ def upload_lambda_functions(context: CfnginContext, provider: Provider, **kwargs
such as ``.git``, ``.svn``, ``__pycache__``, ``*.pyc``,
``.gitignore``, etc.
- **include (Optional[Union[str, List[str]]])**
+ **include (Optional[Union[str, list[str]]])**
Pattern or list of patterns of files to include in the
payload. If provided, only files that match these
patterns will be included in the payload.
@@ -1074,8 +1044,8 @@ def create_template(self):
"see documentation for replacement",
__name__,
)
- # TODO add better handling for misconfiguration (e.g. forgetting function names)
- # TODO support defining dockerize_pip options at the top level of args
+ # TODO (craig): add better handling for misconfiguration (e.g. forgetting function names)
+ # TODO (craig): support defining dockerize_pip options at the top level of args
custom_bucket = cast(str, kwargs.get("bucket", ""))
if not custom_bucket:
if not context.bucket_name:
@@ -1114,11 +1084,11 @@ def create_template(self):
prefix = kwargs.get("prefix", "")
- results: Dict[str, Any] = {}
+ results: dict[str, Any] = {}
for name, options in kwargs["functions"].items():
sys_path = (
- os.path.dirname(context.config_path)
- if os.path.isfile(context.config_path)
+ os.path.dirname(context.config_path) # noqa: PTH120
+ if os.path.isfile(context.config_path) # noqa: PTH113
else context.config_path
)
results[name] = _upload_function(
diff --git a/runway/cfngin/hooks/awslambda/_python_hooks.py b/runway/cfngin/hooks/awslambda/_python_hooks.py
index a6eb1e9ee..303d96995 100644
--- a/runway/cfngin/hooks/awslambda/_python_hooks.py
+++ b/runway/cfngin/hooks/awslambda/_python_hooks.py
@@ -1,7 +1,5 @@
"""Hook for creating an AWS Lambda Function using Python runtime."""
-# pylint errors are python3.7 only
-# pylint: disable=inherit-non-class,no-value-for-parameter
from __future__ import annotations
import logging
diff --git a/runway/cfngin/hooks/awslambda/base_classes.py b/runway/cfngin/hooks/awslambda/base_classes.py
index f4846ab6d..a15e30e4e 100644
--- a/runway/cfngin/hooks/awslambda/base_classes.py
+++ b/runway/cfngin/hooks/awslambda/base_classes.py
@@ -3,23 +3,16 @@
from __future__ import annotations
import logging
-from pathlib import Path
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Generic,
- List,
- Optional,
- Set,
- Tuple,
TypeVar,
cast,
overload,
)
-from typing_extensions import Literal
-
from ....compat import cached_property
from ..protocols import CfnginHookProtocol
from .exceptions import RuntimeMismatchError
@@ -28,6 +21,10 @@
from .source_code import SourceCode
if TYPE_CHECKING:
+ from pathlib import Path
+
+ from typing_extensions import Literal
+
from ...._logging import RunwayLogger
from ....context import CfnginContext
from ....utils import BaseModel
@@ -54,9 +51,7 @@ class Project(Generic[_AwsLambdaHookArgsTypeVar_co]):
ctx: CfnginContext
"""CFNgin context object."""
- def __init__(
- self, args: _AwsLambdaHookArgsTypeVar_co, context: CfnginContext
- ) -> None:
+ def __init__(self, args: _AwsLambdaHookArgsTypeVar_co, context: CfnginContext) -> None:
"""Instantiate class.
Args:
@@ -78,7 +73,7 @@ def build_directory(self) -> Path:
return result
@cached_property
- def cache_dir(self) -> Optional[Path]:
+ def cache_dir(self) -> Path | None:
"""Directory where a dependency manager's cache data will be stored.
Returns:
@@ -98,12 +93,12 @@ def cache_dir(self) -> Optional[Path]:
return cache_dir
@cached_property
- def compatible_architectures(self) -> Optional[List[str]]:
+ def compatible_architectures(self) -> list[str] | None:
"""List of compatible instruction set architectures."""
return getattr(self.args, "compatible_architectures", None)
@cached_property
- def compatible_runtimes(self) -> Optional[List[str]]:
+ def compatible_runtimes(self) -> list[str] | None:
"""List of compatible runtimes.
Value should be valid Lambda Function runtimes
@@ -114,7 +109,7 @@ def compatible_runtimes(self) -> Optional[List[str]]:
compatible runtimes.
"""
- runtimes = getattr(self.args, "compatible_runtimes", cast(List[str], []))
+ runtimes = getattr(self.args, "compatible_runtimes", cast("list[str]", []))
if runtimes and self.runtime not in runtimes:
raise ValueError(
f"runtime ({self.runtime}) not in compatible runtimes ({', '.join(runtimes)})"
@@ -129,7 +124,7 @@ def dependency_directory(self) -> Path:
return result
@cached_property
- def license(self) -> Optional[str]:
+ def license(self) -> str | None:
"""Software license for the project.
Can be any of the following:
@@ -142,8 +137,8 @@ def license(self) -> Optional[str]:
"""
return getattr(self.args, "license", None)
- @cached_property # pylint error is python3.7 only
- def metadata_files(self) -> Tuple[Path, ...]:
+ @cached_property
+ def metadata_files(self) -> tuple[Path, ...]:
"""Project metadata files (e.g. ``project.json``, ``pyproject.toml``)."""
return ()
@@ -163,9 +158,9 @@ def runtime(self) -> str:
raise ValueError("runtime could not be determined from the build system")
@cached_property
- def _runtime_from_docker(self) -> Optional[str]:
+ def _runtime_from_docker(self) -> str | None:
"""runtime from Docker if class can use Docker."""
- docker: Optional[DockerDependencyInstaller] = getattr(self, "docker", None)
+ docker: DockerDependencyInstaller | None = getattr(self, "docker", None)
if not docker:
return None
return docker.runtime
@@ -226,11 +221,7 @@ def project_root(self) -> Path:
top_lvl_dir = (
self.ctx.config_path.parent
if self.ctx.config_path.is_file()
- else (
- self.ctx.config_path
- if self.ctx.config_path.is_dir()
- else self.args.source_code
- )
+ else (self.ctx.config_path if self.ctx.config_path.is_dir() else self.args.source_code)
)
if top_lvl_dir == self.args.source_code:
return top_lvl_dir
@@ -238,8 +229,7 @@ def project_root(self) -> Path:
parents = list(self.args.source_code.parents)
if top_lvl_dir not in parents:
LOGGER.info(
- "ignoring project directory; "
- "source code located outside of project directory"
+ "ignoring project directory; source code located outside of project directory"
)
return self.args.source_code
@@ -269,8 +259,8 @@ def project_type(self) -> str:
"""
raise NotImplementedError
- @cached_property # pylint error is python3.7 only
- def supported_metadata_files(self) -> Set[str]:
+ @cached_property
+ def supported_metadata_files(self) -> set[str]:
"""Names of all supported metadata files.
Returns:
@@ -326,7 +316,6 @@ class AwsLambdaHook(CfnginHookProtocol, Generic[_ProjectTypeVar]):
ctx: CfnginContext
"""CFNgin context object."""
- # pylint: disable=super-init-not-called
def __init__(self, context: CfnginContext, **_kwargs: Any) -> None:
"""Instantiate class.
@@ -350,19 +339,15 @@ def project(self) -> _ProjectTypeVar:
raise NotImplementedError
@overload
- def build_response(
- self, stage: Literal["deploy"]
- ) -> AwsLambdaHookDeployResponse: ...
+ def build_response(self, stage: Literal["deploy"]) -> AwsLambdaHookDeployResponse: ...
@overload
- def build_response(self, stage: Literal["destroy"]) -> Optional[BaseModel]: ...
+ def build_response(self, stage: Literal["destroy"]) -> BaseModel | None: ...
@overload
def build_response(self, stage: Literal["plan"]) -> AwsLambdaHookDeployResponse: ...
- def build_response(
- self, stage: Literal["deploy", "destroy", "plan"]
- ) -> Optional[BaseModel]:
+ def build_response(self, stage: Literal["deploy", "destroy", "plan"]) -> BaseModel | None:
"""Build response object that will be returned by this hook.
Args:
@@ -390,7 +375,7 @@ def _build_response_deploy(self) -> AwsLambdaHookDeployResponse:
runtime=self.deployment_package.runtime,
)
- def _build_response_destroy(self) -> Optional[BaseModel]:
+ def _build_response_destroy(self) -> BaseModel | None:
"""Build response for destroy stage."""
return None
diff --git a/runway/cfngin/hooks/awslambda/deployment_package.py b/runway/cfngin/hooks/awslambda/deployment_package.py
index 6724c6ea3..9c25b0842 100644
--- a/runway/cfngin/hooks/awslambda/deployment_package.py
+++ b/runway/cfngin/hooks/awslambda/deployment_package.py
@@ -7,26 +7,10 @@
import logging
import mimetypes
import stat
-import sys
import zipfile
-from contextlib import suppress
-from typing import (
- TYPE_CHECKING,
- ClassVar,
- Dict,
- Generic,
- Iterator,
- List,
- Optional,
- TypeVar,
- Union,
- cast,
- overload,
-)
+from typing import TYPE_CHECKING, ClassVar, Final, Generic, TypeVar, cast, overload
from urllib.parse import urlencode
-from typing_extensions import Final, Literal
-
from ....compat import cached_property
from ....core.providers.aws.s3 import Bucket
from ....core.providers.aws.s3.exceptions import (
@@ -42,10 +26,12 @@
from .models.args import AwsLambdaHookArgs
if TYPE_CHECKING:
+ from collections.abc import Iterator
from pathlib import Path
import igittigitt
from mypy_boto3_s3.type_defs import HeadObjectOutputTypeDef, PutObjectOutputTypeDef
+ from typing_extensions import Literal
from ...._logging import RunwayLogger
@@ -64,7 +50,7 @@ class DeploymentPackage(DelCachedPropMixin, Generic[_ProjectTypeVar]):
"""
- META_TAGS: ClassVar[Dict[str, str]] = {
+ META_TAGS: ClassVar[dict[str, str]] = {
"code_sha256": "runway.cfngin:awslambda.code_sha256",
"compatible_architectures": "runway.cfngin:awslambda.compatible_architectures",
"compatible_runtimes": "runway.cfngin:awslambda.compatible_runtimes",
@@ -78,9 +64,7 @@ class DeploymentPackage(DelCachedPropMixin, Generic[_ProjectTypeVar]):
SIZE_EOCD: Final[Literal[22]] = 22
"""Size of a zip file's End of Central Directory Record (empty zip)."""
- ZIPFILE_PERMISSION_MASK: ClassVar[int] = (
- stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO
- ) << 16
+ ZIPFILE_PERMISSION_MASK: ClassVar[int] = (stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) << 16
"""Mask to retrieve unix file permissions from the external attributes
property of a ``zipfile.ZipInfo``.
"""
@@ -91,7 +75,7 @@ class DeploymentPackage(DelCachedPropMixin, Generic[_ProjectTypeVar]):
usage_type: Literal["function", "layer"]
"""How the deployment package can be used by AWS Lambda."""
- _put_object_response: Optional[PutObjectOutputTypeDef] = None
+ _put_object_response: PutObjectOutputTypeDef | None = None
def __init__(
self,
@@ -154,26 +138,24 @@ def code_sha256(self) -> str:
return base64.b64encode(file_hash.digest).decode()
@cached_property
- def compatible_architectures(self) -> Optional[List[str]]:
+ def compatible_architectures(self) -> list[str] | None:
"""List of compatible instruction set architectures."""
return self.project.compatible_architectures
@cached_property
- def compatible_runtimes(self) -> Optional[List[str]]:
+ def compatible_runtimes(self) -> list[str] | None:
"""List of compatible runtimes."""
return self.project.compatible_runtimes
@cached_property
def exists(self) -> bool:
"""Whether the deployment package exists."""
- if self.archive_file.exists():
- return True
- return False
+ return bool(self.archive_file.exists())
@cached_property
def gitignore_filter(
self,
- ) -> Optional[igittigitt.IgnoreParser]:
+ ) -> igittigitt.IgnoreParser | None:
"""Filter to use when zipping dependencies.
This should be overridden by subclasses if a filter should be used.
@@ -182,7 +164,7 @@ def gitignore_filter(
return None
@cached_property
- def license(self) -> Optional[str]:
+ def license(self) -> str | None:
"""Software license for the project."""
return self.project.license
@@ -197,7 +179,7 @@ def md5_checksum(self) -> str:
FileNotFoundError: Property accessed before archive file has been built.
"""
- file_hash = FileHash(hashlib.md5())
+ file_hash = FileHash(hashlib.md5()) # noqa: S324
file_hash.add_file(self.archive_file)
return base64.b64encode(file_hash.digest).decode()
@@ -206,16 +188,14 @@ def object_key(self) -> str:
"""Key to use when upload object to AWS S3."""
prefix = f"awslambda/{self.usage_type}s"
if self.project.args.object_prefix:
- prefix = (
- f"{prefix}/{self.project.args.object_prefix.lstrip('/').rstrip('/')}"
- )
+ prefix = f"{prefix}/{self.project.args.object_prefix.lstrip('/').rstrip('/')}"
return ( # this can't contain runtime - causes a cyclic dependency
f"{prefix}/{self.project.source_code.root_directory.name}."
f"{self.project.source_code.md5_hash}.zip"
)
@cached_property
- def object_version_id(self) -> Optional[str]:
+ def object_version_id(self) -> str | None:
"""S3 object version ID.
Returns:
@@ -223,10 +203,7 @@ def object_version_id(self) -> Optional[str]:
if versioning is enabled on the bucket.
"""
- if (
- not self._put_object_response
- or "VersionId" not in self._put_object_response
- ):
+ if not self._put_object_response or "VersionId" not in self._put_object_response:
return None
return self._put_object_response["VersionId"]
@@ -244,9 +221,7 @@ def build(self) -> Path:
# we need to use runtime BEFORE the build process starts to allow runtime
# errors to be raised early.
LOGGER.info("building %s (%s)...", self.archive_file.name, self.runtime)
- with zipfile.ZipFile(
- self.archive_file, "w", zipfile.ZIP_DEFLATED
- ) as archive_file:
+ with zipfile.ZipFile(self.archive_file, "w", zipfile.ZIP_DEFLATED) as archive_file:
self._build_zip_dependencies(archive_file)
self._build_zip_source_code(archive_file)
self._build_fix_file_permissions(archive_file)
@@ -273,9 +248,7 @@ def _build_fix_file_permissions(self, archive_file: zipfile.ZipFile) -> None:
"""
for file_info in archive_file.filelist:
- current_perms = (
- file_info.external_attr & self.ZIPFILE_PERMISSION_MASK
- ) >> 16
+ current_perms = (file_info.external_attr & self.ZIPFILE_PERMISSION_MASK) >> 16
required_perm = 0o755 if current_perms & stat.S_IXUSR != 0 else 0o644
if current_perms != required_perm:
LOGGER.debug(
@@ -304,9 +277,9 @@ def _build_zip_dependencies(
archive_file.write(
dep,
(
- self.insert_layer_dir(
- dep, self.project.dependency_directory
- ).relative_to(self.project.dependency_directory)
+ self.insert_layer_dir(dep, self.project.dependency_directory).relative_to(
+ self.project.dependency_directory
+ )
if self.usage_type == "layer"
else dep.relative_to(self.project.dependency_directory)
),
@@ -336,14 +309,12 @@ def _build_zip_source_code(self, archive_file: zipfile.ZipFile) -> None:
def build_tag_set(self, *, url_encoded: Literal[True] = ...) -> str: ...
@overload
- def build_tag_set(self, *, url_encoded: Literal[False] = ...) -> Dict[str, str]: ...
+ def build_tag_set(self, *, url_encoded: Literal[False] = ...) -> dict[str, str]: ...
@overload
- def build_tag_set(
- self, *, url_encoded: bool = ...
- ) -> Union[Dict[str, str], str]: ...
+ def build_tag_set(self, *, url_encoded: bool = ...) -> dict[str, str] | str: ...
- def build_tag_set(self, *, url_encoded: bool = True) -> Union[Dict[str, str], str]:
+ def build_tag_set(self, *, url_encoded: bool = True) -> dict[str, str] | str:
"""Build tag set to be applied to the S3 object.
Args:
@@ -382,21 +353,13 @@ def build_tag_set(self, *, url_encoded: bool = True) -> Union[Dict[str, str], st
def delete(self) -> None:
"""Delete deployment package."""
- if sys.version_info < (3, 8): # cov: ignore
- with suppress(FileNotFoundError): # acts the same as `missing_ok=true`
- self.archive_file.unlink() # python3.7 does not support `missing_ok`
- else: # cov: ignore
- self.archive_file.unlink(missing_ok=True)
+ self.archive_file.unlink(missing_ok=True)
LOGGER.verbose("deleted local deployment package %s", self.archive_file)
# clear cached properties so they can recalculate
- self._del_cached_property(
- "code_sha256", "exists", "md5_checksum", "object_version_id"
- )
+ self._del_cached_property("code_sha256", "exists", "md5_checksum", "object_version_id")
@staticmethod
- def insert_layer_dir(
- file_path: Path, relative_to: Path # pylint: disable=unused-argument
- ) -> Path:
+ def insert_layer_dir(file_path: Path, relative_to: Path) -> Path: # noqa: ARG004
"""Insert directory into local file path for layer archive.
If required, this should be overridden by a subclass for language
@@ -518,16 +481,14 @@ def code_sha256(self) -> str:
return self.object_tags[self.META_TAGS["code_sha256"]]
@cached_property
- def compatible_architectures(self) -> Optional[List[str]]:
+ def compatible_architectures(self) -> list[str] | None:
"""List of compatible instruction set architectures."""
if self.META_TAGS["compatible_architectures"] in self.object_tags:
- return self.object_tags[self.META_TAGS["compatible_architectures"]].split(
- "+"
- )
+ return self.object_tags[self.META_TAGS["compatible_architectures"]].split("+")
return None
@cached_property
- def compatible_runtimes(self) -> Optional[List[str]]:
+ def compatible_runtimes(self) -> list[str] | None:
"""List of compatible runtimes."""
if self.META_TAGS["compatible_runtimes"] in self.object_tags:
return self.object_tags[self.META_TAGS["compatible_runtimes"]].split("+")
@@ -536,21 +497,15 @@ def compatible_runtimes(self) -> Optional[List[str]]:
@cached_property
def exists(self) -> bool:
"""Whether the S3 object exists."""
- if self.head and not self.head.get("DeleteMarker", False):
- return True
- return False
+ return bool(self.head and not self.head.get("DeleteMarker", False))
@cached_property
- def head(self) -> Optional[HeadObjectOutputTypeDef]:
+ def head(self) -> HeadObjectOutputTypeDef | None:
"""Response from HeadObject API call."""
try:
- return self.bucket.client.head_object(
- Bucket=self.bucket.name, Key=self.object_key
- )
+ return self.bucket.client.head_object(Bucket=self.bucket.name, Key=self.object_key)
except self.bucket.client.exceptions.ClientError as exc:
- status_code = exc.response.get("ResponseMetadata", {}).get(
- "HTTPStatusCode", 0
- )
+ status_code = exc.response.get("ResponseMetadata", {}).get("HTTPStatusCode", 0)
if status_code == 404:
LOGGER.verbose(
"%s not found",
@@ -566,7 +521,7 @@ def head(self) -> Optional[HeadObjectOutputTypeDef]:
raise
@cached_property
- def license(self) -> Optional[str]:
+ def license(self) -> str | None:
"""Software license for the project."""
if self.META_TAGS["license"] in self.object_tags:
return self.object_tags[self.META_TAGS["license"]]
@@ -591,7 +546,7 @@ def md5_checksum(self) -> str:
return self.object_tags[self.META_TAGS["md5_checksum"]]
@cached_property
- def object_tags(self) -> Dict[str, str]:
+ def object_tags(self) -> dict[str, str]:
"""S3 object tags."""
response = self.bucket.client.get_object_tagging(
Bucket=self.bucket.name, Key=self.object_key
@@ -602,7 +557,7 @@ def object_tags(self) -> Dict[str, str]:
return {t["Key"]: t["Value"] for t in response["TagSet"]}
@cached_property
- def object_version_id(self) -> Optional[str]:
+ def object_version_id(self) -> str | None:
"""S3 object version ID.
Returns:
@@ -649,9 +604,7 @@ def build(self) -> Path:
def delete(self) -> None:
"""Delete deployment package."""
if self.exists:
- self.bucket.client.delete_object(
- Bucket=self.bucket.name, Key=self.object_key
- )
+ self.bucket.client.delete_object(Bucket=self.bucket.name, Key=self.object_key)
LOGGER.verbose(
"deleted deployment package S3 object %s",
self.bucket.format_bucket_path_uri(key=self.object_key),
@@ -682,8 +635,7 @@ def update_tags(self) -> None:
)
LOGGER.info("updated S3 object's tags")
- # pylint: disable=unused-argument
- def upload(self, *, build: bool = True) -> None:
+ def upload(self, *, build: bool = True) -> None: # noqa: ARG002
"""Upload deployment package.
The object should already exist. This method only exists as a "placeholder"
diff --git a/runway/cfngin/hooks/awslambda/docker.py b/runway/cfngin/hooks/awslambda/docker.py
index 2612e76b7..1dc1cf6b2 100644
--- a/runway/cfngin/hooks/awslambda/docker.py
+++ b/runway/cfngin/hooks/awslambda/docker.py
@@ -5,23 +5,10 @@
import logging
import os
import platform
-from typing import (
- TYPE_CHECKING,
- Any,
- ClassVar,
- Dict,
- Iterator,
- List,
- Optional,
- Type,
- TypeVar,
- Union,
- cast,
-)
+from typing import TYPE_CHECKING, Any, ClassVar, TypeVar, cast
from docker import DockerClient
from docker.errors import DockerException, ImageNotFound
-from docker.models.images import Image
from docker.types import Mount
from ...._logging import PrefixAdaptor
@@ -30,8 +17,11 @@
from .constants import AWS_SAM_BUILD_IMAGE_PREFIX, DEFAULT_IMAGE_NAME, DEFAULT_IMAGE_TAG
if TYPE_CHECKING:
+ from collections.abc import Iterator
from pathlib import Path
+ from docker.models.images import Image
+
from ...._logging import RunwayLogger
from ....context import CfnginContext, RunwayContext
from .base_classes import Project
@@ -61,7 +51,7 @@ class DockerDependencyInstaller:
client: DockerClient
"""Docker client."""
- ctx: Union[CfnginContext, RunwayContext]
+ ctx: CfnginContext | RunwayContext
"""Context object."""
options: DockerOptions
@@ -71,8 +61,8 @@ def __init__(
self,
project: Project[AwsLambdaHookArgs],
*,
- client: Optional[DockerClient] = None,
- context: Optional[Union[CfnginContext, RunwayContext]] = None,
+ client: DockerClient | None = None,
+ context: CfnginContext | RunwayContext | None = None,
) -> None:
"""Instantiate class.
@@ -96,7 +86,7 @@ def __init__(
self.project = project
@cached_property
- def bind_mounts(self) -> List[Mount]:
+ def bind_mounts(self) -> list[Mount]:
"""Bind mounts that will be used by the container."""
mounts = [
Mount(
@@ -121,7 +111,7 @@ def bind_mounts(self) -> List[Mount]:
return mounts
@cached_property
- def environment_variables(self) -> Dict[str, str]:
+ def environment_variables(self) -> dict[str, str]:
"""Environment variables to pass to the Docker container.
This is a subset of the environment variables stored in the context
@@ -131,7 +121,7 @@ def environment_variables(self) -> Dict[str, str]:
return {k: v for k, v in self.ctx.env.vars.items() if k.startswith("DOCKER")}
@cached_property
- def image(self) -> Union[Image, str]:
+ def image(self) -> Image | str:
"""Docker image that will be used.
Raises:
@@ -149,13 +139,13 @@ def image(self) -> Union[Image, str]:
)
raise ValueError("docker.file, docker.image, or runtime is required")
- @cached_property # pylint error is python3.7 only
- def install_commands(self) -> List[str]:
+ @cached_property
+ def install_commands(self) -> list[str]:
"""Commands to run to install dependencies."""
return []
@cached_property
- def post_install_commands(self) -> List[str]:
+ def post_install_commands(self) -> list[str]:
"""Commands to run after dependencies have been installed."""
cmds = [
*[
@@ -171,7 +161,7 @@ def post_install_commands(self) -> List[str]:
]
if platform.system() != "Windows":
# methods only exist on POSIX systems
- gid, uid = os.getgid(), os.getuid() # pylint: disable=no-member
+ gid, uid = os.getgid(), os.getuid()
cmds.append(
f"chown -R {uid}:{gid} {self.DEPENDENCY_DIR}",
)
@@ -180,7 +170,7 @@ def post_install_commands(self) -> List[str]:
return cmds
@cached_property
- def pre_install_commands(self) -> List[str]:
+ def pre_install_commands(self) -> list[str]:
"""Commands to run before dependencies have been installed."""
cmds = [
f"chown -R 0:0 {self.DEPENDENCY_DIR}",
@@ -189,8 +179,8 @@ def pre_install_commands(self) -> List[str]:
cmds.append(f"chown -R 0:0 {self.CACHE_DIR}")
return cmds
- @cached_property # pylint error is python3.7 only
- def runtime(self) -> Optional[str]:
+ @cached_property
+ def runtime(self) -> str | None:
"""AWS Lambda runtime determined from the Docker container."""
return None
@@ -198,8 +188,8 @@ def build_image(
self,
docker_file: Path,
*,
- name: Optional[str] = None,
- tag: Optional[str] = None,
+ name: str | None = None,
+ tag: str | None = None,
) -> Image:
"""Build Docker image from Dockerfile.
@@ -233,7 +223,7 @@ def build_image(
def log_docker_msg_bytes(
self, stream: Iterator[bytes], *, level: int = logging.INFO
- ) -> List[str]:
+ ) -> list[str]:
"""Log Docker output message from blocking generator that return bytes.
Args:
@@ -244,7 +234,7 @@ def log_docker_msg_bytes(
List of log messages.
"""
- result: List[str] = []
+ result: list[str] = []
for raw_msg in stream:
msg = raw_msg.decode().strip()
result.append(msg)
@@ -252,8 +242,8 @@ def log_docker_msg_bytes(
return result
def log_docker_msg_dict(
- self, stream: Iterator[Dict[str, Any]], *, level: int = logging.INFO
- ) -> List[str]:
+ self, stream: Iterator[dict[str, Any]], *, level: int = logging.INFO
+ ) -> list[str]:
"""Log Docker output message from blocking generator that return dict.
Args:
@@ -264,7 +254,7 @@ def log_docker_msg_dict(
list of log messages.
"""
- result: List[str] = []
+ result: list[str] = []
for raw_msg in stream:
for key in ["stream", "status"]:
if key in raw_msg:
@@ -283,7 +273,7 @@ def install(self) -> None:
- :attr:`~runway.cfngin.hooks.awslambda.docker.DockerDependencyInstaller.install_commands`
- :attr:`~runway.cfngin.hooks.awslambda.docker.DockerDependencyInstaller.post_install_commands`
- """ # noqa
+ """
for cmd in self.pre_install_commands:
self.run_command(cmd)
for cmd in self.install_commands:
@@ -315,7 +305,7 @@ def pull_image(self, name: str, *, force: bool = True) -> Image:
LOGGER.info("image not found; pulling docker image %s...", name)
return self.client.images.pull(repository=name)
- def run_command(self, command: str, *, level: int = logging.INFO) -> List[str]:
+ def run_command(self, command: str, *, level: int = logging.INFO) -> list[str]:
"""Execute equivalent of ``docker container run``.
Args:
@@ -350,9 +340,7 @@ def run_command(self, command: str, *, level: int = logging.INFO) -> List[str]:
raise DockerExecFailedError(response)
@classmethod
- def from_project(
- cls: Type[_T], project: Project[AwsLambdaHookArgs]
- ) -> Optional[_T]:
+ def from_project(cls: type[_T], project: Project[AwsLambdaHookArgs]) -> _T | None:
"""Instantiate class from a project.
High-level method that wraps instantiation in error handling.
diff --git a/runway/cfngin/hooks/awslambda/models/args.py b/runway/cfngin/hooks/awslambda/models/args.py
index 5729a711f..be586a124 100644
--- a/runway/cfngin/hooks/awslambda/models/args.py
+++ b/runway/cfngin/hooks/awslambda/models/args.py
@@ -1,10 +1,10 @@
"""Argument data models."""
-# pylint: disable=no-self-argument
+# ruff: noqa: UP006, UP035
from __future__ import annotations
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, cast
+from typing import Any, List, Optional
from pydantic import DirectoryPath, Extra, Field, FilePath, validator
@@ -12,9 +12,6 @@
from .....utils import BaseModel
from ...base import HookArgsBaseModel
-if TYPE_CHECKING:
- from typing import Callable
-
class DockerOptions(BaseModel):
"""Docker options."""
@@ -126,10 +123,7 @@ class Config:
extra = Extra.ignore
- _resolve_path_fields = cast(
- "classmethod[Callable[..., Any]]",
- validator("file", allow_reuse=True)(resolve_path_field),
- )
+ _resolve_path_fields = validator("file", allow_reuse=True)(resolve_path_field) # type: ignore
class AwsLambdaHookArgs(HookArgsBaseModel):
@@ -270,15 +264,12 @@ class AwsLambdaHookArgs(HookArgsBaseModel):
use_cache: bool = True
"""Whether to use a cache directory with pip that will persist builds (default ``True``)."""
- _resolve_path_fields = cast(
- "classmethod[Callable[..., Any]]",
- validator("cache_dir", "source_code", allow_reuse=True)(resolve_path_field),
- )
+ _resolve_path_fields = validator("cache_dir", "source_code", allow_reuse=True)(resolve_path_field) # type: ignore
@validator("runtime", always=True, allow_reuse=True)
def _validate_runtime_or_docker(
- cls, v: Optional[str], values: Dict[str, Any]
- ) -> Optional[str]:
+ cls, v: str | None, values: dict[str, Any] # noqa: N805
+ ) -> str | None:
"""Validate that either runtime is provided or Docker image is provided."""
if v: # if runtime was provided, we don't need to check anything else
return v
diff --git a/runway/cfngin/hooks/awslambda/models/responses.py b/runway/cfngin/hooks/awslambda/models/responses.py
index a2b6a4578..4d2003df7 100644
--- a/runway/cfngin/hooks/awslambda/models/responses.py
+++ b/runway/cfngin/hooks/awslambda/models/responses.py
@@ -1,5 +1,6 @@
"""Response data models."""
+# ruff: noqa: UP006, UP035
from typing import List, Optional
from pydantic import Extra
diff --git a/runway/cfngin/hooks/awslambda/python_requirements/_deployment_package.py b/runway/cfngin/hooks/awslambda/python_requirements/_deployment_package.py
index 2e625bacd..90938d87b 100644
--- a/runway/cfngin/hooks/awslambda/python_requirements/_deployment_package.py
+++ b/runway/cfngin/hooks/awslambda/python_requirements/_deployment_package.py
@@ -2,8 +2,7 @@
from __future__ import annotations
-from pathlib import Path
-from typing import TYPE_CHECKING, Optional
+from typing import TYPE_CHECKING
from igittigitt import IgnoreParser
@@ -11,6 +10,8 @@
from ..deployment_package import DeploymentPackage
if TYPE_CHECKING:
+ from pathlib import Path
+
from . import PythonProject
@@ -20,7 +21,7 @@ class PythonDeploymentPackage(DeploymentPackage["PythonProject"]):
project: PythonProject
@cached_property
- def gitignore_filter(self) -> Optional[IgnoreParser]:
+ def gitignore_filter(self) -> IgnoreParser | None:
"""Filter to use when zipping dependencies.
This should be overridden by subclasses if a filter should be used.
@@ -28,15 +29,9 @@ def gitignore_filter(self) -> Optional[IgnoreParser]:
"""
if self.project.args.slim:
gitignore_filter = IgnoreParser()
- gitignore_filter.add_rule(
- "**/*.dist-info*", self.project.dependency_directory
- )
- gitignore_filter.add_rule(
- "**/*.py[c|d|i|o]", self.project.dependency_directory
- )
- gitignore_filter.add_rule(
- "**/__pycache__*", self.project.dependency_directory
- )
+ gitignore_filter.add_rule("**/*.dist-info*", self.project.dependency_directory)
+ gitignore_filter.add_rule("**/*.py[c|d|i|o]", self.project.dependency_directory)
+ gitignore_filter.add_rule("**/__pycache__*", self.project.dependency_directory)
if self.project.args.strip:
gitignore_filter.add_rule("**/*.so", self.project.dependency_directory)
return gitignore_filter
diff --git a/runway/cfngin/hooks/awslambda/python_requirements/_docker.py b/runway/cfngin/hooks/awslambda/python_requirements/_docker.py
index ed968dd0c..a883e87c5 100644
--- a/runway/cfngin/hooks/awslambda/python_requirements/_docker.py
+++ b/runway/cfngin/hooks/awslambda/python_requirements/_docker.py
@@ -4,7 +4,7 @@
import logging
import re
-from typing import TYPE_CHECKING, Dict, List, Optional, Union
+from typing import TYPE_CHECKING, Optional, Union
from docker.types.services import Mount
@@ -42,7 +42,7 @@ def __init__(
super().__init__(project, client=client, context=context)
@cached_property
- def bind_mounts(self) -> List[Mount]:
+ def bind_mounts(self) -> list[Mount]:
"""Bind mounts that will be used by the container."""
mounts = [*super().bind_mounts]
if self.project.requirements_txt:
@@ -56,7 +56,7 @@ def bind_mounts(self) -> List[Mount]:
return mounts
@cached_property
- def environment_variables(self) -> Dict[str, str]:
+ def environment_variables(self) -> dict[str, str]:
"""Environment variables to pass to the docker container.
This is a subset of the environment variables stored in the context
@@ -64,13 +64,11 @@ def environment_variables(self) -> Dict[str, str]:
"""
docker_env_vars = super().environment_variables
- pip_env_vars = {
- k: v for k, v in self.ctx.env.vars.items() if k.startswith("PIP")
- }
+ pip_env_vars = {k: v for k, v in self.ctx.env.vars.items() if k.startswith("PIP")}
return {**docker_env_vars, **pip_env_vars}
@cached_property
- def install_commands(self) -> List[str]:
+ def install_commands(self) -> list[str]:
"""Commands to run to install dependencies."""
if self.project.requirements_txt:
return [
diff --git a/runway/cfngin/hooks/awslambda/python_requirements/_project.py b/runway/cfngin/hooks/awslambda/python_requirements/_project.py
index 25eaacd4d..7b66dc6cf 100644
--- a/runway/cfngin/hooks/awslambda/python_requirements/_project.py
+++ b/runway/cfngin/hooks/awslambda/python_requirements/_project.py
@@ -4,9 +4,7 @@
import logging
import shutil
-from typing import TYPE_CHECKING, ClassVar, Optional, Set, Tuple
-
-from typing_extensions import Literal
+from typing import TYPE_CHECKING, ClassVar, Optional
from .....compat import cached_property
from .....dependency_managers import (
@@ -23,6 +21,8 @@
if TYPE_CHECKING:
from pathlib import Path
+ from typing_extensions import Literal
+
LOGGER = logging.getLogger(__name__.replace("._", "."))
@@ -38,24 +38,18 @@ def docker(self) -> Optional[PythonDockerDependencyInstaller]:
return PythonDockerDependencyInstaller.from_project(self)
@cached_property
- def metadata_files(self) -> Tuple[Path, ...]:
+ def metadata_files(self) -> tuple[Path, ...]:
"""Project metadata files.
Files are only included in return value if they exist.
"""
if self.project_type == "poetry":
- config_files = [
- self.project_root / config_file for config_file in Poetry.CONFIG_FILES
- ]
+ config_files = [self.project_root / config_file for config_file in Poetry.CONFIG_FILES]
elif self.project_type == "pipenv":
- config_files = [
- self.project_root / config_file for config_file in Pipenv.CONFIG_FILES
- ]
+ config_files = [self.project_root / config_file for config_file in Pipenv.CONFIG_FILES]
else:
- config_files = [
- self.project_root / config_file for config_file in Pip.CONFIG_FILES
- ]
+ config_files = [self.project_root / config_file for config_file in Pip.CONFIG_FILES]
return tuple(path for path in config_files if path.exists())
@cached_property
@@ -119,15 +113,11 @@ def project_type(self) -> Literal["pip", "pipenv", "poetry"]:
if Poetry.dir_is_project(self.project_root):
if self.args.use_poetry:
return "poetry"
- LOGGER.warning(
- "poetry project detected but use of poetry is explicitly disabled"
- )
+ LOGGER.warning("poetry project detected but use of poetry is explicitly disabled")
if Pipenv.dir_is_project(self.project_root):
if self.args.use_pipenv:
return "pipenv"
- LOGGER.warning(
- "pipenv project detected but use of pipenv is explicitly disabled"
- )
+ LOGGER.warning("pipenv project detected but use of pipenv is explicitly disabled")
return "pip"
@cached_property
@@ -143,7 +133,7 @@ def requirements_txt(self) -> Optional[Path]:
return None
@cached_property
- def supported_metadata_files(self) -> Set[str]:
+ def supported_metadata_files(self) -> set[str]:
"""Names of all supported metadata files.
Returns:
@@ -190,8 +180,6 @@ def install_dependencies(self) -> None:
requirements=self.requirements_txt,
target=self.dependency_directory,
)
- LOGGER.debug(
- "dependencies successfully installed to %s", self.dependency_directory
- )
+ LOGGER.debug("dependencies successfully installed to %s", self.dependency_directory)
else:
LOGGER.info("skipped installing dependencies; none found")
diff --git a/runway/cfngin/hooks/awslambda/source_code.py b/runway/cfngin/hooks/awslambda/source_code.py
index e815c79ca..c351958e2 100644
--- a/runway/cfngin/hooks/awslambda/source_code.py
+++ b/runway/cfngin/hooks/awslambda/source_code.py
@@ -5,7 +5,7 @@
import hashlib
import logging
from pathlib import Path
-from typing import TYPE_CHECKING, Iterator, List, Optional, Sequence, Union
+from typing import TYPE_CHECKING
import igittigitt
@@ -13,6 +13,8 @@
from runway.utils import FileHash
if TYPE_CHECKING:
+ from collections.abc import Iterator, Sequence
+
from _typeshed import StrPath
LOGGER = logging.getLogger(__name__)
@@ -41,9 +43,9 @@ def __init__(
self,
root_directory: StrPath,
*,
- gitignore_filter: Optional[igittigitt.IgnoreParser] = None,
- include_files_in_hash: Optional[Sequence[Path]] = None,
- project_root: Optional[StrPath] = None,
+ gitignore_filter: igittigitt.IgnoreParser | None = None,
+ include_files_in_hash: Sequence[Path] | None = None,
+ project_root: StrPath | None = None,
) -> None:
"""Instantiate class.
@@ -88,7 +90,7 @@ def md5_hash(self) -> str:
for include_file in self._include_files_in_hash:
if include_file not in sorted_files:
sorted_files.append(include_file)
- file_hash = FileHash(hashlib.md5())
+ file_hash = FileHash(hashlib.md5()) # noqa: S324
file_hash.add_files(sorted(sorted_files), relative_to=self.project_root)
return file_hash.hexdigest
@@ -101,7 +103,7 @@ def add_filter_rule(self, pattern: str) -> None:
"""
self.gitignore_filter.add_rule(pattern=pattern, base_path=self.root_directory)
- def sorted(self, *, reverse: bool = False) -> List[Path]:
+ def sorted(self, *, reverse: bool = False) -> list[Path]:
"""Sorted list of source code files.
Args:
@@ -120,7 +122,7 @@ def __eq__(self, other: object) -> bool:
return self.root_directory == other.root_directory
return False
- def __fspath__(self) -> Union[str, bytes]:
+ def __fspath__(self) -> str | bytes:
"""Return the file system path representation of the object."""
return str(self.root_directory)
diff --git a/runway/cfngin/hooks/awslambda/type_defs.py b/runway/cfngin/hooks/awslambda/type_defs.py
index d3aacb37b..168a86396 100644
--- a/runway/cfngin/hooks/awslambda/type_defs.py
+++ b/runway/cfngin/hooks/awslambda/type_defs.py
@@ -2,16 +2,14 @@
from __future__ import annotations
-from typing import Optional
-
from typing_extensions import TypedDict
class AwsLambdaHookDeployResponseTypedDict(TypedDict):
- """Dict output of :class:`runway.cfngin.hooks.awslambda.models.response.AwsLambdaHookDeployResponse` using aliases.""" # noqa
+ """Dict output of :class:`runway.cfngin.hooks.awslambda.models.response.AwsLambdaHookDeployResponse` using aliases.""" # noqa: E501
CodeSha256: str
Runtime: str
S3Bucket: str
S3Key: str
- S3ObjectVersion: Optional[str]
+ S3ObjectVersion: str | None
diff --git a/runway/cfngin/hooks/base.py b/runway/cfngin/hooks/base.py
index 9c48d517d..e515b16b4 100644
--- a/runway/cfngin/hooks/base.py
+++ b/runway/cfngin/hooks/base.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, ClassVar, Dict, Optional, Type, Union, cast
+from typing import TYPE_CHECKING, Any, ClassVar, Dict, cast # noqa: UP035
from troposphere import Tags
@@ -28,7 +28,7 @@
class HookArgsBaseModel(BaseModel):
"""Base model for hook args."""
- tags: Dict[str, str] = {}
+ tags: Dict[str, str] = {} # noqa: UP006
class Hook(CfnginHookProtocol):
@@ -46,24 +46,23 @@ class Hook(CfnginHookProtocol):
"""
- ARGS_PARSER: ClassVar[Type[HookArgsBaseModel]] = HookArgsBaseModel
+ ARGS_PARSER: ClassVar[type[HookArgsBaseModel]] = HookArgsBaseModel
"""Class used to parse arguments passed to the hook."""
args: HookArgsBaseModel
- blueprint: Optional[Blueprint] = None
+ blueprint: Blueprint | None = None
context: CfnginContext
provider: Provider
- stack: Optional[Stack] = None
+ stack: Stack | None = None
stack_name: str = "stack"
- def __init__( # pylint: disable=super-init-not-called
- self, context: CfnginContext, provider: Provider, **kwargs: Any
- ) -> None:
+ def __init__(self, context: CfnginContext, provider: Provider, **kwargs: Any) -> None:
"""Instantiate class.
Args:
context: Context instance. (passed in by CFNgin)
provider: Provider instance. (passed in by CFNgin)
+ **kwargs: Arbitrary keyword arguments.
"""
kwargs.setdefault("tags", {})
@@ -72,7 +71,7 @@ def __init__( # pylint: disable=super-init-not-called
self.args.tags.update(context.tags)
self.context = context
self.provider = provider
- # TODO BREAKING remove these from the primary base class
+ # TODO (kyle): BREAKING remove these from the primary base class
self._deploy_action = HookDeployAction(self.context, self.provider)
self._destroy_action = HookDestroyAction(self.context, self.provider)
@@ -87,10 +86,10 @@ def generate_stack(self, **kwargs: Any) -> Stack:
name=self.stack_name, tags=self.args.tags, **kwargs
)
stack = Stack(definition, self.context)
- stack._blueprint = self.blueprint # pylint: disable=protected-access
+ stack._blueprint = self.blueprint # noqa: SLF001
return stack
- def get_template_description(self, suffix: Optional[str] = None) -> str:
+ def get_template_description(self, suffix: str | None = None) -> str:
"""Generate a template description.
Args:
@@ -104,7 +103,7 @@ def get_template_description(self, suffix: Optional[str] = None) -> str:
return template.format(self.__class__.__module__, suffix)
return template.format(self.__class__.__module__)
- def deploy_stack(self, stack: Optional[Stack] = None, wait: bool = False) -> Status:
+ def deploy_stack(self, stack: Stack | None = None, wait: bool = False) -> Status:
"""Deploy a stack.
Args:
@@ -115,13 +114,9 @@ def deploy_stack(self, stack: Optional[Stack] = None, wait: bool = False) -> Sta
Ending status of the stack.
"""
- return self._run_stack_action(
- action=self._deploy_action, stack=stack, wait=wait
- )
+ return self._run_stack_action(action=self._deploy_action, stack=stack, wait=wait)
- def destroy_stack(
- self, stack: Optional[Stack] = None, wait: bool = False
- ) -> Status:
+ def destroy_stack(self, stack: Stack | None = None, wait: bool = False) -> Status:
"""Destroy a stack.
Args:
@@ -132,9 +127,7 @@ def destroy_stack(
Ending status of the stack.
"""
- return self._run_stack_action(
- action=self._destroy_action, stack=stack, wait=wait
- )
+ return self._run_stack_action(action=self._destroy_action, stack=stack, wait=wait)
def post_deploy(self) -> Any:
"""Run during the **post_deploy** stage."""
@@ -175,8 +168,8 @@ def _log_stack(stack: Stack, status: Status) -> None:
def _run_stack_action(
self,
- action: Union[HookDeployAction, HookDestroyAction],
- stack: Optional[Stack] = None,
+ action: HookDeployAction | HookDestroyAction,
+ stack: Stack | None = None,
wait: bool = False,
) -> Status:
"""Run a CFNgin hook modified for use in hooks.
@@ -197,18 +190,16 @@ def _run_stack_action(
self._log_stack(stack, status)
if wait and status != SKIPPED:
- status = self._wait_for_stack(
- action=action, stack=stack, last_status=status
- )
+ status = self._wait_for_stack(action=action, stack=stack, last_status=status)
return status
def _wait_for_stack(
self,
- action: Union[HookDeployAction, HookDestroyAction],
- last_status: Optional[Status] = None,
- stack: Optional[Stack] = None,
- till_reason: Optional[str] = None,
- ):
+ action: HookDeployAction | HookDestroyAction,
+ last_status: Status | None = None,
+ stack: Stack | None = None,
+ till_reason: str | None = None,
+ ) -> Status:
"""Wait for a CloudFormation stack to complete.
Args:
@@ -249,11 +240,11 @@ def _wait_for_stack(
return status
-# TODO BREAKING find a better place for this - can cause cyclic imports
+# TODO (kyle): BREAKING find a better place for this - can cause cyclic imports
class HookDeployAction(deploy.Action):
"""Deploy action that can be used from hooks."""
- def __init__(self, context: CfnginContext, provider: Provider):
+ def __init__(self, context: CfnginContext, provider: Provider) -> None:
"""Instantiate class.
Args:
diff --git a/runway/cfngin/hooks/cleanup_s3.py b/runway/cfngin/hooks/cleanup_s3.py
index f6fcfad66..860f58e1b 100644
--- a/runway/cfngin/hooks/cleanup_s3.py
+++ b/runway/cfngin/hooks/cleanup_s3.py
@@ -31,9 +31,7 @@ def purge_bucket(context: CfnginContext, *__args: Any, **kwargs: Any) -> bool:
s3_resource.meta.client.head_bucket(Bucket=args.bucket_name)
except ClientError as exc:
if exc.response["Error"]["Code"] == "404":
- LOGGER.info(
- 'bucket "%s" does not exist; unable to complete purge', args.bucket_name
- )
+ LOGGER.info('bucket "%s" does not exist; unable to complete purge', args.bucket_name)
return True
raise
diff --git a/runway/cfngin/hooks/command.py b/runway/cfngin/hooks/command.py
index 110bf5b32..1cf202281 100644
--- a/runway/cfngin/hooks/command.py
+++ b/runway/cfngin/hooks/command.py
@@ -3,7 +3,7 @@
import logging
import os
import subprocess
-from typing import Any, Dict, List, Optional, Union
+from typing import Any, Dict, List, Optional, Union # noqa: UP035
from typing_extensions import TypedDict
@@ -19,10 +19,10 @@ class RunCommandHookArgs(BaseModel):
capture: bool = False
"""If enabled, capture the command's stdout and stderr, and return them in the hook result."""
- command: Union[str, List[str]]
+ command: Union[str, List[str]] # noqa: UP006
"""Command(s) to run."""
- env: Optional[Dict[str, str]] = None
+ env: Optional[Dict[str, str]] = None # noqa: UP006
"""Dictionary of environment variable overrides for the command context.
Will be merged with the current environment.
@@ -55,7 +55,7 @@ class RunCommandResponseTypeDef(TypedDict, total=False):
stdout: str
-def run_command(*__args: Any, **kwargs: Any) -> RunCommandResponseTypeDef:
+def run_command(*_args: Any, **kwargs: Any) -> RunCommandResponseTypeDef: # noqa: C901, PLR0912
"""Run a custom command as a hook.
Arguments not parsed by the data model will be forwarded to the
@@ -106,7 +106,7 @@ def run_command(*__args: Any, **kwargs: Any) -> RunCommandResponseTypeDef:
ValueError("Cannot enable `quiet` and `capture` options simultaneously"),
)
- with open(os.devnull, "wb") as devnull:
+ with open(os.devnull, "wb") as devnull: # noqa: PTH123
if args.quiet:
out_err_type = devnull
elif args.capture:
@@ -147,10 +147,8 @@ def run_command(*__args: Any, **kwargs: Any) -> RunCommandResponseTypeDef:
if LOGGER.isEnabledFor(logging.INFO): # cov: ignore
LOGGER.warning("command failed with returncode %d", status)
else:
- LOGGER.warning(
- "command failed with returncode %d: %s", status, args.command
- )
+ LOGGER.warning("command failed with returncode %d: %s", status, args.command)
return {}
- except Exception: # pylint: disable=broad-except # cov: ignore
+ except Exception: # cov: ignore # noqa: BLE001
return {}
diff --git a/runway/cfngin/hooks/docker/_login.py b/runway/cfngin/hooks/docker/_login.py
index 9eebfbc8f..52d54a8e2 100644
--- a/runway/cfngin/hooks/docker/_login.py
+++ b/runway/cfngin/hooks/docker/_login.py
@@ -1,20 +1,17 @@
"""Docker login hook."""
-# pylint: disable=no-self-argument
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, Dict, Optional
+from typing import Any, Optional
from pydantic import Field, validator
+from ....context import CfnginContext
from ....utils import BaseModel
from .data_models import ElasticContainerRegistry
from .hook_data import DockerHookData
-if TYPE_CHECKING:
- from ....context import CfnginContext
-
LOGGER = logging.getLogger(__name__.replace("._", "."))
@@ -42,21 +39,19 @@ class LoginArgs(BaseModel):
"""The registry username."""
@validator("ecr", pre=True, allow_reuse=True)
- def _set_ecr(cls, v: Any, values: Dict[str, Any]) -> Any:
+ def _set_ecr(cls, v: Any, values: dict[str, Any]) -> Any: # noqa: N805
"""Set the value of ``ecr``."""
if v and isinstance(v, dict):
- return ElasticContainerRegistry.parse_obj(
- {"context": values.get("context"), **v}
- )
+ return ElasticContainerRegistry.parse_obj({"context": values.get("context"), **v})
return v
@validator("registry", pre=True, always=True, allow_reuse=True)
- def _set_registry(cls, v: Any, values: Dict[str, Any]) -> Any:
+ def _set_registry(cls, v: Any, values: dict[str, Any]) -> Any: # noqa: N805
"""Set the value of ``registry``."""
if v:
return v
- ecr: Optional[ElasticContainerRegistry] = values.get("ecr")
+ ecr: ElasticContainerRegistry | None = values.get("ecr")
if ecr:
return ecr.fqn
diff --git a/runway/cfngin/hooks/docker/data_models.py b/runway/cfngin/hooks/docker/data_models.py
index d8b97f5fc..c8ffc10f8 100644
--- a/runway/cfngin/hooks/docker/data_models.py
+++ b/runway/cfngin/hooks/docker/data_models.py
@@ -5,24 +5,18 @@
"""
-# pylint: disable=no-self-argument
from __future__ import annotations
-from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, cast
+from typing import Any, ClassVar, Optional, cast
from docker.models.images import Image
from pydantic import Field, root_validator
+from ....context import CfnginContext
from ....core.providers.aws import AccountDetails
from ....utils import BaseModel, MutableMap
-if TYPE_CHECKING:
- from ....context import CfnginContext
-
-
-ECR_REPO_FQN_TEMPLATE = (
- "{aws_account_id}.dkr.ecr.{aws_region}.amazonaws.com/{repo_name}"
-)
+ECR_REPO_FQN_TEMPLATE = "{aws_account_id}.dkr.ecr.{aws_region}.amazonaws.com/{repo_name}"
class ElasticContainerRegistry(BaseModel):
@@ -51,18 +45,16 @@ def fqn(self) -> str:
"""Fully qualified ECR name."""
if self.public:
return self.PUBLIC_URI_TEMPLATE.format(registry_alias=self.alias)
- return self.URI_TEMPLATE.format(
- aws_account_id=self.account_id, aws_region=self.region
- )
+ return self.URI_TEMPLATE.format(aws_account_id=self.account_id, aws_region=self.region)
@root_validator(allow_reuse=True, pre=True)
- def _set_defaults(cls, values: Dict[str, Any]) -> Dict[str, Any]:
+ def _set_defaults(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
"""Set default values based on other values."""
values.setdefault("public", bool(values.get("alias")))
if not values["public"]:
account_id = values.get("account_id")
- ctx: Optional[CfnginContext] = values.get("context")
+ ctx: CfnginContext | None = values.get("context")
region = values.get("aws_region")
if not ctx and not (account_id or region):
raise ValueError("context is required to resolve values")
@@ -106,7 +98,7 @@ def short_id(self) -> str:
return self.image.short_id
@property
- def tags(self) -> List[str]:
+ def tags(self) -> list[str]:
"""List of image tags."""
self.image.reload()
return [uri.split(":")[-1] for uri in self.image.tags]
diff --git a/runway/cfngin/hooks/docker/hook_data.py b/runway/cfngin/hooks/docker/hook_data.py
index ad58cce04..7c8e45908 100644
--- a/runway/cfngin/hooks/docker/hook_data.py
+++ b/runway/cfngin/hooks/docker/hook_data.py
@@ -17,7 +17,7 @@
class DockerHookData(MutableMap):
"""Docker hook_data object."""
- image: Optional["DockerImage"] = None
+ image: Optional[DockerImage] = None
@cached_property
def client(self) -> DockerClient:
@@ -25,16 +25,12 @@ def client(self) -> DockerClient:
return DockerClient.from_env()
@overload
- def update_context(self, context: CfnginContext = ...) -> DockerHookData: # noqa
- ...
+ def update_context(self, context: CfnginContext = ...) -> DockerHookData: ...
@overload
- def update_context(self, context: None = ...) -> None: # noqa
- ...
+ def update_context(self, context: None = ...) -> None: ...
- def update_context(
- self, context: Optional[CfnginContext] = None
- ) -> Optional[DockerHookData]:
+ def update_context(self, context: Optional[CfnginContext] = None) -> Optional[DockerHookData]:
"""Update context object with new the current DockerHookData."""
if not context:
return None
diff --git a/runway/cfngin/hooks/docker/image/_build.py b/runway/cfngin/hooks/docker/image/_build.py
index 7cc31eeb1..af1fa2aec 100644
--- a/runway/cfngin/hooks/docker/image/_build.py
+++ b/runway/cfngin/hooks/docker/image/_build.py
@@ -4,26 +4,20 @@
"""
-# pylint: disable=no-self-argument
from __future__ import annotations
import logging
from pathlib import Path
-from typing import (
- TYPE_CHECKING,
+from typing import ( # noqa: UP035
Any,
Dict,
- Iterator,
List,
Optional,
- Tuple,
- Union,
- cast,
)
-from docker.models.images import Image
from pydantic import DirectoryPath, Field, validator
+from .....context import CfnginContext
from .....utils import BaseModel
from ..data_models import (
DockerImage,
@@ -32,22 +26,19 @@
)
from ..hook_data import DockerHookData
-if TYPE_CHECKING:
- from .....context import CfnginContext
-
LOGGER = logging.getLogger(__name__.replace("._", "."))
class DockerImageBuildApiOptions(BaseModel):
"""Options for controlling Docker."""
- buildargs: Dict[str, Any] = {}
+ buildargs: Dict[str, Any] = {} # noqa: UP006
"""Dict of build-time variables that will be passed to Docker."""
custom_context: bool = False
"""Whether to use custom context when providing a file object."""
- extra_hosts: Dict[str, str] = {}
+ extra_hosts: Dict[str, str] = {} # noqa: UP006
"""Extra hosts to add to `/etc/hosts` in the build containers.
Defined as a mapping of hostname to IP address.
@@ -125,14 +116,14 @@ class ImageBuildArgs(BaseModel):
docker: DockerImageBuildApiOptions = DockerImageBuildApiOptions() # depends on repo
"""Options for ``docker image build``."""
- tags: List[str] = ["latest"]
+ tags: List[str] = ["latest"] # noqa: UP006
"""List of tags to apply to the image."""
@validator("docker", pre=True, always=True, allow_reuse=True)
def _set_docker(
- cls,
- v: Union[Dict[str, Any], DockerImageBuildApiOptions, Any],
- values: Dict[str, Any],
+ cls, # noqa: N805
+ v: dict[str, Any] | DockerImageBuildApiOptions | Any,
+ values: dict[str, Any],
) -> Any:
"""Set the value of ``docker``."""
repo = values["repo"]
@@ -144,7 +135,7 @@ def _set_docker(
return v
@validator("ecr_repo", pre=True, allow_reuse=True)
- def _set_ecr_repo(cls, v: Any, values: Dict[str, Any]) -> Any:
+ def _set_ecr_repo(cls, v: Any, values: dict[str, Any]) -> Any: # noqa: N805
"""Set the value of ``ecr_repo``."""
if v and isinstance(v, dict):
return ElasticContainerRegistryRepository.parse_obj(
@@ -163,26 +154,24 @@ def _set_ecr_repo(cls, v: Any, values: Dict[str, Any]) -> Any:
return v
@validator("repo", pre=True, always=True, allow_reuse=True)
- def _set_repo(cls, v: Optional[str], values: Dict[str, Any]) -> Optional[str]:
+ def _set_repo(cls, v: str | None, values: dict[str, Any]) -> str | None: # noqa: N805
"""Set the value of ``repo``."""
if v:
return v
- ecr_repo: Optional[ElasticContainerRegistryRepository] = values.get("ecr_repo")
+ ecr_repo: ElasticContainerRegistryRepository | None = values.get("ecr_repo")
if ecr_repo:
return ecr_repo.fqn
return None
@validator("dockerfile", pre=True, always=True, allow_reuse=True)
- def _validate_dockerfile(cls, v: Any, values: Dict[str, Any]) -> Any:
+ def _validate_dockerfile(cls, v: Any, values: dict[str, Any]) -> Any: # noqa: N805
"""Validate ``dockerfile``."""
path: Path = values["path"]
dockerfile = path / v
if not dockerfile.is_file():
- raise ValueError(
- f"Dockerfile does not exist at path provided: {dockerfile}"
- )
+ raise ValueError(f"Dockerfile does not exist at path provided: {dockerfile}")
return v
@@ -196,10 +185,7 @@ def build(*, context: CfnginContext, **kwargs: Any) -> DockerHookData:
"""
args = ImageBuildArgs.parse_obj({"context": context, **kwargs})
docker_hook_data = DockerHookData.from_cfngin_context(context)
- image, logs = cast(
- Tuple[Image, Iterator[Dict[str, str]]],
- docker_hook_data.client.images.build(path=str(args.path), **args.docker.dict()),
- )
+ image, logs = docker_hook_data.client.images.build(path=str(args.path), **args.docker.dict())
for msg in logs: # iterate through JSON log messages
if "stream" in msg: # log if they contain a message
LOGGER.info(msg["stream"].strip()) # remove any new line characters
diff --git a/runway/cfngin/hooks/docker/image/_push.py b/runway/cfngin/hooks/docker/image/_push.py
index 4f4921afb..cec7fdd01 100644
--- a/runway/cfngin/hooks/docker/image/_push.py
+++ b/runway/cfngin/hooks/docker/image/_push.py
@@ -4,14 +4,14 @@
"""
-# pylint: disable=no-self-argument
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, Dict, List, Optional
+from typing import Any, List, Optional # noqa: UP035
from pydantic import Field, validator
+from .....context import CfnginContext
from .....utils import BaseModel
from ..data_models import (
DockerImage,
@@ -20,9 +20,6 @@
)
from ..hook_data import DockerHookData
-if TYPE_CHECKING:
- from .....context import CfnginContext
-
LOGGER = logging.getLogger(__name__.replace("._", "."))
@@ -47,11 +44,11 @@ class ImagePushArgs(BaseModel):
repo: Optional[str] = None # depends on ecr_repo & image
"""URI of a non Docker Hub repository where the image will be stored."""
- tags: List[str] = [] # depends on image
+ tags: List[str] = [] # depends on image # noqa: UP006
"""List of tags to push."""
@validator("ecr_repo", pre=True, allow_reuse=True)
- def _set_ecr_repo(cls, v: Any, values: Dict[str, Any]) -> Any:
+ def _set_ecr_repo(cls, v: Any, values: dict[str, Any]) -> Any: # noqa: N805
"""Set the value of ``ecr_repo``."""
if v and isinstance(v, dict):
return ElasticContainerRegistryRepository.parse_obj(
@@ -70,28 +67,28 @@ def _set_ecr_repo(cls, v: Any, values: Dict[str, Any]) -> Any:
return v
@validator("repo", pre=True, always=True, allow_reuse=True)
- def _set_repo(cls, v: Optional[str], values: Dict[str, Any]) -> Optional[str]:
+ def _set_repo(cls, v: str | None, values: dict[str, Any]) -> str | None: # noqa: N805
"""Set the value of ``repo``."""
if v:
return v
- image: Optional[DockerImage] = values.get("image")
+ image: DockerImage | None = values.get("image")
if image:
return image.repo
- ecr_repo: Optional[ElasticContainerRegistryRepository] = values.get("ecr_repo")
+ ecr_repo: ElasticContainerRegistryRepository | None = values.get("ecr_repo")
if ecr_repo:
return ecr_repo.fqn
return None
@validator("tags", pre=True, always=True, allow_reuse=True)
- def _set_tags(cls, v: List[str], values: Dict[str, Any]) -> List[str]:
+ def _set_tags(cls, v: list[str], values: dict[str, Any]) -> list[str]: # noqa: N805
"""Set the value of ``tags``."""
if v:
return v
- image: Optional[DockerImage] = values.get("image")
+ image: DockerImage | None = values.get("image")
if image:
return image.tags
diff --git a/runway/cfngin/hooks/docker/image/_remove.py b/runway/cfngin/hooks/docker/image/_remove.py
index d2476c388..5f7a7f289 100644
--- a/runway/cfngin/hooks/docker/image/_remove.py
+++ b/runway/cfngin/hooks/docker/image/_remove.py
@@ -4,15 +4,15 @@
"""
-# pylint: disable=no-self-argument
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, Dict, List, Optional
+from typing import Any, List, Optional # noqa: UP035
from docker.errors import ImageNotFound
from pydantic import Field, validator
+from .....context import CfnginContext
from .....utils import BaseModel
from ..data_models import (
DockerImage,
@@ -21,9 +21,6 @@
)
from ..hook_data import DockerHookData
-if TYPE_CHECKING:
- from .....context import CfnginContext
-
LOGGER = logging.getLogger(__name__.replace("._", "."))
@@ -54,11 +51,11 @@ class ImageRemoveArgs(BaseModel):
repo: Optional[str] = None # depends on ecr_repo & image
"""URI of a non Docker Hub repository where the image will be stored."""
- tags: List[str] = [] # depends on image
+ tags: List[str] = [] # depends on image # noqa: UP006
"""List of tags to remove."""
@validator("ecr_repo", pre=True, allow_reuse=True)
- def _set_ecr_repo(cls, v: Any, values: Dict[str, Any]) -> Any:
+ def _set_ecr_repo(cls, v: Any, values: dict[str, Any]) -> Any: # noqa: N805
"""Set the value of ``ecr_repo``."""
if v and isinstance(v, dict):
return ElasticContainerRegistryRepository.parse_obj(
@@ -77,28 +74,28 @@ def _set_ecr_repo(cls, v: Any, values: Dict[str, Any]) -> Any:
return v
@validator("repo", pre=True, always=True, allow_reuse=True)
- def _set_repo(cls, v: Optional[str], values: Dict[str, Any]) -> Optional[str]:
+ def _set_repo(cls, v: str | None, values: dict[str, Any]) -> str | None: # noqa: N805
"""Set the value of ``repo``."""
if v:
return v
- image: Optional[DockerImage] = values.get("image")
+ image: DockerImage | None = values.get("image")
if image:
return image.repo
- ecr_repo: Optional[ElasticContainerRegistryRepository] = values.get("ecr_repo")
+ ecr_repo: ElasticContainerRegistryRepository | None = values.get("ecr_repo")
if ecr_repo:
return ecr_repo.fqn
return None
@validator("tags", pre=True, always=True, allow_reuse=True)
- def _set_tags(cls, v: List[str], values: Dict[str, Any]) -> List[str]:
+ def _set_tags(cls, v: list[str], values: dict[str, Any]) -> list[str]: # noqa: N805
"""Set the value of ``tags``."""
if v:
return v
- image: Optional[DockerImage] = values.get("image")
+ image: DockerImage | None = values.get("image")
if image:
return image.tags
@@ -125,7 +122,10 @@ def remove(*, context: CfnginContext, **kwargs: Any) -> DockerHookData:
LOGGER.info("successfully removed local image %s", image)
except ImageNotFound:
LOGGER.warning("local image %s does not exist", image)
- if docker_hook_data.image and kwargs.get("image"):
- if kwargs["image"].id == docker_hook_data.image.id:
- docker_hook_data.image = None # clear out the image that was set
+ if (
+ docker_hook_data.image
+ and kwargs.get("image")
+ and kwargs["image"].id == docker_hook_data.image.id
+ ):
+ docker_hook_data.image = None # clear out the image that was set
return docker_hook_data.update_context(context)
diff --git a/runway/cfngin/hooks/ecr/_purge_repository.py b/runway/cfngin/hooks/ecr/_purge_repository.py
index 34ef64a23..013eb543c 100644
--- a/runway/cfngin/hooks/ecr/_purge_repository.py
+++ b/runway/cfngin/hooks/ecr/_purge_repository.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, Dict, List
+from typing import TYPE_CHECKING, Any
from ....utils import BaseModel
@@ -25,34 +25,27 @@ class HookArgs(BaseModel):
def delete_ecr_images(
client: ECRClient,
- image_ids: List[ImageIdentifierTypeDef],
+ image_ids: list[ImageIdentifierTypeDef],
repository_name: str,
) -> None:
"""Delete images from an ECR repository."""
- response = client.batch_delete_image(
- repositoryName=repository_name, imageIds=image_ids
- )
- if "failures" in response and response["failures"]:
+ response = client.batch_delete_image(repositoryName=repository_name, imageIds=image_ids)
+ if response.get("failures"):
for msg in response["failures"]:
LOGGER.info(
"failed to delete image %s: (%s) %s",
- msg.get("imageId", {}).get("imageDigest")
- or msg.get("imageId", {}).get("imageTag"),
+ msg.get("imageId", {}).get("imageDigest") or msg.get("imageId", {}).get("imageTag"),
msg.get("failureCode"),
msg.get("failureReason"),
)
raise ValueError("failures present in response")
-def list_ecr_images(
- client: ECRClient, repository_name: str
-) -> List[ImageIdentifierTypeDef]:
+def list_ecr_images(client: ECRClient, repository_name: str) -> list[ImageIdentifierTypeDef]:
"""List all images in an ECR repository."""
- image_ids: List[ImageIdentifierTypeDef] = []
+ image_ids: list[ImageIdentifierTypeDef] = []
try:
- response = client.list_images(
- repositoryName=repository_name, filter={"tagStatus": "ANY"}
- )
+ response = client.list_images(repositoryName=repository_name, filter={"tagStatus": "ANY"})
image_ids.extend(response["imageIds"])
while response.get("nextToken"):
response = client.list_images(
@@ -63,22 +56,19 @@ def list_ecr_images(
image_ids.extend(response["imageIds"])
return [
{"imageDigest": digest}
- for digest in {
- image["imageDigest"] for image in image_ids if image.get("imageDigest")
- }
+ for digest in {image["imageDigest"] for image in image_ids if image.get("imageDigest")}
]
except client.exceptions.RepositoryNotFoundException:
LOGGER.info("repository %s does not exist", repository_name)
return []
-def purge_repository(
- context: CfnginContext, *__args: Any, **kwargs: Any
-) -> Dict[str, str]:
+def purge_repository(context: CfnginContext, *_args: Any, **kwargs: Any) -> dict[str, str]:
"""Purge all images from an ECR repository.
Args:
context: CFNgin context object.
+ **kwargs: Arbitrary keyword arguments.
"""
args = HookArgs.parse_obj(kwargs)
diff --git a/runway/cfngin/hooks/ecs.py b/runway/cfngin/hooks/ecs.py
index c5950b29a..5a44f8786 100644
--- a/runway/cfngin/hooks/ecs.py
+++ b/runway/cfngin/hooks/ecs.py
@@ -1,10 +1,9 @@
"""AWS ECS hook."""
-# pylint: disable=no-self-argument
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, Dict, List, Union
+from typing import TYPE_CHECKING, Any, List # noqa: UP035
from pydantic import validator
from typing_extensions import TypedDict
@@ -22,11 +21,11 @@
class CreateClustersHookArgs(BaseModel):
"""Hook arguments for ``create_clusters``."""
- clusters: List[str]
+ clusters: List[str] # noqa: UP006
"""List of cluster names to create."""
@validator("clusters", allow_reuse=True, pre=True)
- def _convert_clusters(cls, v: Union[List[str], str]) -> List[str]:
+ def _convert_clusters(cls, v: list[str] | str) -> list[str]: # noqa: N805
"""Convert value of ``clusters`` from str to list."""
if isinstance(v, str):
return [v]
@@ -36,22 +35,23 @@ def _convert_clusters(cls, v: Union[List[str], str]) -> List[str]:
class CreateClustersResponseTypeDef(TypedDict):
"""Response from create_clusters."""
- clusters: Dict[str, CreateClusterResponseTypeDef]
+ clusters: dict[str, CreateClusterResponseTypeDef]
def create_clusters(
- context: CfnginContext, *__args: Any, **kwargs: Any
+ context: CfnginContext, *_args: Any, **kwargs: Any
) -> CreateClustersResponseTypeDef:
"""Create ECS clusters.
Args:
context: CFNgin context object.
+ **kwargs: Arbitrary keyword arguments.
"""
args = CreateClustersHookArgs.parse_obj(kwargs)
ecs_client = context.get_session().client("ecs")
- cluster_info: Dict[str, Any] = {}
+ cluster_info: dict[str, Any] = {}
for cluster in args.clusters:
LOGGER.debug("creating ECS cluster: %s", cluster)
response = ecs_client.create_cluster(clusterName=cluster)
diff --git a/runway/cfngin/hooks/iam.py b/runway/cfngin/hooks/iam.py
index b23998ba9..edf9a6ab4 100644
--- a/runway/cfngin/hooks/iam.py
+++ b/runway/cfngin/hooks/iam.py
@@ -4,7 +4,7 @@
import copy
import logging
-from typing import TYPE_CHECKING, Any, Dict, Optional, Union, cast
+from typing import TYPE_CHECKING, Any, Optional, cast
from awacs import ecs
from awacs.aws import Allow, Policy, Statement
@@ -69,15 +69,14 @@ class EnsureServerCertExistsHookArgs(BaseModel):
"""Whether to prompt to upload a certificate if one does not exist."""
-def create_ecs_service_role(
- context: CfnginContext, *__args: Any, **kwargs: Any
-) -> bool:
+def create_ecs_service_role(context: CfnginContext, *_args: Any, **kwargs: Any) -> bool:
"""Create ecsServiceRole IAM role.
https://docs.aws.amazon.com/AmazonECS/latest/developerguide/using-service-linked-roles.html
Args:
context: Context instance. (passed in by CFNgin)
+ **kwargs: Arbitrary keyword arguments.
"""
args = CreateEcsServiceRoleHookArgs.parse_obj(kwargs)
@@ -100,9 +99,7 @@ def create_ecs_service_role(
def _get_cert_arn_from_response(
- response: Union[
- GetServerCertificateResponseTypeDef, UploadServerCertificateResponseTypeDef
- ]
+ response: GetServerCertificateResponseTypeDef | UploadServerCertificateResponseTypeDef,
) -> str:
result = copy.deepcopy(response)
# GET response returns this extra key
@@ -117,7 +114,7 @@ def _get_cert_arn_from_response(
)
-def _get_cert_contents(kwargs: Dict[str, Any]) -> Dict[str, Any]:
+def _get_cert_contents(kwargs: dict[str, Any]) -> dict[str, Any]: # noqa: C901
"""Build parameters with server cert file contents.
Args:
@@ -145,7 +142,7 @@ def _get_cert_contents(kwargs: Dict[str, Any]) -> Dict[str, Any]:
paths[key] = path
- parameters: Dict[str, str] = {}
+ parameters: dict[str, str] = {}
for key, path in paths.items():
if not path:
@@ -155,7 +152,7 @@ def _get_cert_contents(kwargs: Dict[str, Any]) -> Dict[str, Any]:
try:
contents = path.read()
except AttributeError:
- with open(utils.full_path(path), encoding="utf-8") as read_file:
+ with open(utils.full_path(path), encoding="utf-8") as read_file: # noqa: PTH123
contents = read_file.read()
if key == "certificate":
@@ -171,13 +168,12 @@ def _get_cert_contents(kwargs: Dict[str, Any]) -> Dict[str, Any]:
return parameters
-def ensure_server_cert_exists(
- context: CfnginContext, *__args: Any, **kwargs: Any
-) -> Dict[str, str]:
+def ensure_server_cert_exists(context: CfnginContext, *_args: Any, **kwargs: Any) -> dict[str, str]:
"""Ensure server cert exists.
Args:
context: CFNgin context object.
+ **kwargs: Arbitrary keyword arguments.
Returns:
Dict containing ``status``, ``cert_name``, and ``cert_arn``.
@@ -193,9 +189,7 @@ def ensure_server_cert_exists(
LOGGER.info("certificate exists: %s (%s)", args.cert_name, cert_arn)
except ClientError:
if args.prompt:
- upload = input(
- f"Certificate '{args.cert_name}' wasn't found. Upload it now? (yes/no) "
- )
+ upload = input(f"Certificate '{args.cert_name}' wasn't found. Upload it now? (yes/no) ")
if upload != "yes":
return {}
diff --git a/runway/cfngin/hooks/keypair.py b/runway/cfngin/hooks/keypair.py
index d9c799384..88bd1f176 100644
--- a/runway/cfngin/hooks/keypair.py
+++ b/runway/cfngin/hooks/keypair.py
@@ -5,7 +5,7 @@
import logging
import sys
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple
+from typing import TYPE_CHECKING, Any, Optional
from botocore.exceptions import ClientError
from typing_extensions import Literal, TypedDict
@@ -142,7 +142,7 @@ def create_key_pair_in_ssm(
keypair = create_key_pair(ec2, keypair_name)
try:
kms_key_label = "default"
- kms_args: Dict[str, Any] = {}
+ kms_args: dict[str, Any] = {}
if kms_key_id:
kms_key_label = kms_key_id
kms_args = {"KeyId": kms_key_id}
@@ -219,7 +219,7 @@ def create_key_pair_local(
def interactive_prompt(
keypair_name: str,
-) -> Tuple[Optional[Literal["create", "import"]], Optional[str]]:
+) -> tuple[Optional[Literal["create", "import"]], Optional[str]]:
"""Interactive prompt."""
if not sys.stdin.isatty():
return None, None
@@ -246,9 +246,7 @@ def interactive_prompt(
return None, None
-def ensure_keypair_exists(
- context: CfnginContext, *__args: Any, **kwargs: Any
-) -> KeyPairInfo:
+def ensure_keypair_exists(context: CfnginContext, *__args: Any, **kwargs: Any) -> KeyPairInfo:
"""Ensure a specific keypair exists within AWS.
If the key doesn't exist, upload it.
@@ -257,10 +255,7 @@ def ensure_keypair_exists(
args = EnsureKeypairExistsHookArgs.parse_obj(kwargs)
if args.public_key_path and args.ssm_parameter_name:
- LOGGER.error(
- "public_key_path and ssm_parameter_name cannot be "
- "specified at the same time"
- )
+ LOGGER.error("public_key_path and ssm_parameter_name cannot be specified at the same time")
return {}
session = context.get_session()
@@ -282,9 +277,7 @@ def ensure_keypair_exists(
else:
action, path = interactive_prompt(args.keypair)
if action == "import" and path:
- keypair_info = create_key_pair_from_public_key_file(
- ec2, args.keypair, Path(path)
- )
+ keypair_info = create_key_pair_from_public_key_file(ec2, args.keypair, Path(path))
elif action == "create" and path:
keypair_info = create_key_pair_local(ec2, args.keypair, Path(path))
else:
diff --git a/runway/cfngin/hooks/protocols.py b/runway/cfngin/hooks/protocols.py
index 68368cc21..9eb3bc43c 100644
--- a/runway/cfngin/hooks/protocols.py
+++ b/runway/cfngin/hooks/protocols.py
@@ -8,7 +8,7 @@
from __future__ import annotations
from abc import abstractmethod
-from typing import TYPE_CHECKING, Any, Optional, TypeVar, Union, overload
+from typing import TYPE_CHECKING, Any, TypeVar, overload
from typing_extensions import Protocol, runtime_checkable
@@ -30,14 +30,14 @@ class CfnginHookArgsProtocol(Protocol):
@overload
@abstractmethod
- def get(self, __name: str) -> Optional[Any]: ...
+ def get(self, __name: str) -> Any | None: ...
@overload
@abstractmethod
- def get(self, __name: str, __default: Union[Any, _T]) -> Union[Any, _T]: ...
+ def get(self, __name: str, __default: Any | _T) -> Any | _T: ...
@abstractmethod
- def get(self, __name: str, __default: Union[Any, _T] = None) -> Union[Any, _T]:
+ def get(self, __name: str, __default: Any | _T = None) -> Any | _T:
"""Safely get the value of an attribute.
Args:
@@ -48,19 +48,19 @@ def get(self, __name: str, __default: Union[Any, _T] = None) -> Union[Any, _T]:
raise NotImplementedError
@abstractmethod
- def __contains__(self, __name: str) -> bool:
+ def __contains__(self, __name: str) -> bool: # noqa: D105
raise NotImplementedError
@abstractmethod
- def __getattribute__(self, __name: str) -> Any:
+ def __getattribute__(self, __name: str) -> Any: # noqa: D105
raise NotImplementedError
@abstractmethod
- def __getitem__(self, __name: str) -> Any:
+ def __getitem__(self, __name: str) -> Any: # noqa: D105
raise NotImplementedError
@abstractmethod
- def __setitem__(self, __name: str, _value: Any) -> None:
+ def __setitem__(self, __name: str, _value: Any) -> None: # noqa: D105
raise NotImplementedError
@@ -79,16 +79,8 @@ class CfnginHookProtocol(Protocol):
"""Arguments passed to the hook and parsed into an object."""
@abstractmethod
- def __init__( # pylint: disable=super-init-not-called
- self, context: CfnginContext, **_kwargs: Any
- ) -> None:
- """Structural __init__ method.
-
- This should not be called. Pylint will erroneously warn about
- "super-init-not-called" if using this class as a subclass. This should
- be disabled in-line until the bug reports for this issue is resolved.
-
- """
+ def __init__(self, context: CfnginContext, **_kwargs: Any) -> None:
+ """Structural __init__ method."""
raise NotImplementedError
@abstractmethod
diff --git a/runway/cfngin/hooks/route53.py b/runway/cfngin/hooks/route53.py
index f5885bfb2..6cffb4a36 100644
--- a/runway/cfngin/hooks/route53.py
+++ b/runway/cfngin/hooks/route53.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, Dict
+from typing import TYPE_CHECKING, Any
from ...utils import BaseModel
from ..utils import create_route53_zone
@@ -21,13 +21,12 @@ class CreateDomainHookArgs(BaseModel):
"""Domain name for the Route 53 hosted zone to be created."""
-def create_domain(
- context: CfnginContext, *__args: Any, **kwargs: Any
-) -> Dict[str, str]:
+def create_domain(context: CfnginContext, *_args: Any, **kwargs: Any) -> dict[str, str]:
"""Create a domain within route53.
Args:
context: CFNgin context object.
+ **kwargs: Arbitrary keyword arguments.
Returns:
Dict containing ``domain`` and ``zone_id``.
diff --git a/runway/cfngin/hooks/ssm/parameter.py b/runway/cfngin/hooks/ssm/parameter.py
index e03589b00..4ace0e92c 100644
--- a/runway/cfngin/hooks/ssm/parameter.py
+++ b/runway/cfngin/hooks/ssm/parameter.py
@@ -1,11 +1,10 @@
"""AWS SSM Parameter Store hooks."""
-# pylint: disable=no-self-argument
from __future__ import annotations
import json
import logging
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast
+from typing import TYPE_CHECKING, Any, List, Optional, cast # noqa: UP035
from pydantic import Extra, validator
from typing_extensions import Literal, TypedDict
@@ -27,10 +26,11 @@
LOGGER = cast("RunwayLogger", logging.getLogger(__name__))
+
# PutParameterResultTypeDef but without metadata
-_PutParameterResultTypeDef = TypedDict(
- "_PutParameterResultTypeDef", {"Tier": ParameterTierType, "Version": int}
-)
+class _PutParameterResultTypeDef(TypedDict):
+ Tier: ParameterTierType
+ Version: int
class ArgsDataModel(BaseModel):
@@ -69,7 +69,7 @@ class ArgsDataModel(BaseModel):
name: str
overwrite: bool = True
policies: Optional[str] = None
- tags: Optional[List[TagDataModel]] = None
+ tags: Optional[List[TagDataModel]] = None # noqa: UP006
tier: ParameterTierType = "Standard"
type: Literal["String", "StringList", "SecureString"]
value: Optional[str] = None
@@ -94,28 +94,25 @@ class Config:
}
@validator("policies", allow_reuse=True, pre=True)
- def _convert_policies(cls, v: Union[List[Dict[str, Any]], str, Any]) -> str:
+ def _convert_policies(cls, v: list[dict[str, Any]] | str | Any) -> str: # noqa: N805
"""Convert policies to acceptable value."""
if isinstance(v, str):
return v
if isinstance(v, list):
return json.dumps(v, cls=JsonEncoder)
- raise TypeError(
- f"unexpected type {type(v)}; permitted: Optional[Union[List[Dict[str, Any]], str]]"
- )
+ raise TypeError(f"unexpected type {type(v)}; permitted: list[dict[str, Any]] | str | None")
@validator("tags", allow_reuse=True, pre=True)
def _convert_tags(
- cls, v: Union[Dict[str, str], List[Dict[str, str]], Any]
- ) -> List[Dict[str, str]]:
+ cls, v: dict[str, str] | list[dict[str, str]] | Any # noqa: N805
+ ) -> list[dict[str, str]]:
"""Convert tags to acceptable value."""
if isinstance(v, list):
return v
if isinstance(v, dict):
return [{"Key": k, "Value": v} for k, v in v.items()]
raise TypeError(
- f"unexpected type {type(v)}; permitted: "
- "Optional[Union[Dict[str, str], List[Dict[str, str]]]"
+ f"unexpected type {type(v)}; permitted: dict[str, str] | list[dict[str, str] | None"
)
@@ -124,14 +121,12 @@ class _Parameter(CfnginHookProtocol):
args: ArgsDataModel
- def __init__( # pylint: disable=super-init-not-called
+ def __init__(
self,
context: CfnginContext,
*,
name: str,
- type: Literal[ # pylint: disable=redefined-builtin
- "String", "StringList", "SecureString"
- ],
+ type: Literal["String", "StringList", "SecureString"], # noqa: A002
**kwargs: Any,
) -> None:
"""Instantiate class.
@@ -141,6 +136,7 @@ def __init__( # pylint: disable=super-init-not-called
name: The fully qualified name of the parameter that you want to add to
the system.
type: The type of parameter.
+ **kwargs: Arbitrary keyword arguments.
"""
self.args = ArgsDataModel.parse_obj({"name": name, "type": type, **kwargs})
@@ -165,14 +161,14 @@ def get(self) -> ParameterTypeDef:
if self.args.force: # bypass getting current value
return {}
try:
- return self.client.get_parameter(
- Name=self.args.name, WithDecryption=True
- ).get("Parameter", {})
+ return self.client.get_parameter(Name=self.args.name, WithDecryption=True).get(
+ "Parameter", {}
+ )
except self.client.exceptions.ParameterNotFound:
LOGGER.verbose("parameter %s does not exist", self.args.name)
return {}
- def get_current_tags(self) -> List[TagTypeDef]:
+ def get_current_tags(self) -> list[TagTypeDef]:
"""Get Tags currently applied to Parameter."""
try:
return self.client.list_tags_for_resource(
@@ -216,9 +212,7 @@ def put(self) -> _PutParameterResultTypeDef:
if current_param.get("Value") != self.args.value:
try:
result = self.client.put_parameter(
- **self.args.dict(
- by_alias=True, exclude_none=True, exclude={"force", "tags"}
- )
+ **self.args.dict(by_alias=True, exclude_none=True, exclude={"force", "tags"})
)
except self.client.exceptions.ParameterAlreadyExists:
LOGGER.warning(
@@ -242,9 +236,7 @@ def update_tags(self) -> None:
"""Update tags."""
current_tags = self.get_current_tags()
if self.args.tags and current_tags:
- diff_tag_keys = list(
- {i["Key"] for i in current_tags} ^ {i.key for i in self.args.tags}
- )
+ diff_tag_keys = list({i["Key"] for i in current_tags} ^ {i.key for i in self.args.tags})
elif self.args.tags:
diff_tag_keys = []
else:
@@ -258,14 +250,11 @@ def update_tags(self) -> None:
ResourceType="Parameter",
TagKeys=diff_tag_keys,
)
- LOGGER.debug(
- "removed tags for parameter %s: %s", self.args.name, diff_tag_keys
- )
+ LOGGER.debug("removed tags for parameter %s: %s", self.args.name, diff_tag_keys)
if self.args.tags:
tags_to_add = [
- cast("TagTypeDef", tag.dict(by_alias=True))
- for tag in self.args.tags
+ cast("TagTypeDef", tag.dict(by_alias=True)) for tag in self.args.tags
]
self.client.add_tags_to_resource(
ResourceId=self.args.name,
@@ -278,9 +267,7 @@ def update_tags(self) -> None:
[tag["Key"] for tag in tags_to_add],
)
except self.client.exceptions.InvalidResourceId:
- LOGGER.info(
- "skipped updating tags; parameter %s does not exist", self.args.name
- )
+ LOGGER.info("skipped updating tags; parameter %s does not exist", self.args.name)
else:
LOGGER.info("updated tags for parameter %s", self.args.name)
@@ -301,6 +288,7 @@ def __init__(
context: CFNgin context object.
name: The fully qualified name of the parameter that you want to add to
the system.
+ **kwargs: Arbitrary keyword arguments.
"""
for k in ["Type", "type"]: # ensure neither of these are set
diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/callback_url_retriever.py b/runway/cfngin/hooks/staticsite/auth_at_edge/callback_url_retriever.py
index 9c944e49d..8ecdd37e5 100644
--- a/runway/cfngin/hooks/staticsite/auth_at_edge/callback_url_retriever.py
+++ b/runway/cfngin/hooks/staticsite/auth_at_edge/callback_url_retriever.py
@@ -8,7 +8,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, Dict, Optional
+from typing import TYPE_CHECKING, Any, Optional
from ...base import HookArgsBaseModel
@@ -28,7 +28,7 @@ class HookArgs(HookArgsBaseModel):
"""The ARN of the User Pool to check for a client."""
-def get(context: CfnginContext, *__args: Any, **kwargs: Any) -> Dict[str, Any]:
+def get(context: CfnginContext, *_args: Any, **kwargs: Any) -> dict[str, Any]:
"""Retrieve the callback URLs for User Pool Client Creation.
When the User Pool is created a Callback URL is required. During a post
@@ -42,6 +42,7 @@ def get(context: CfnginContext, *__args: Any, **kwargs: Any) -> Dict[str, Any]:
Args:
context: The context instance.
+ **kwargs: Arbitrary keyword arguments.
"""
args = HookArgs.parse_obj(kwargs)
@@ -59,20 +60,14 @@ def get(context: CfnginContext, *__args: Any, **kwargs: Any) -> Dict[str, Any]:
if args.user_pool_arn:
user_pool_id = args.user_pool_arn.split("/")[-1:][0]
else:
- user_pool_id = [
- o["OutputValue"]
- for o in outputs
- if o["OutputKey"] == "AuthAtEdgeUserPoolId"
- ][0]
+ user_pool_id = next(
+ o["OutputValue"] for o in outputs if o["OutputKey"] == "AuthAtEdgeUserPoolId"
+ )
- client_id = [
- o["OutputValue"] for o in outputs if o["OutputKey"] == "AuthAtEdgeClient"
- ][0]
+ client_id = next(o["OutputValue"] for o in outputs if o["OutputKey"] == "AuthAtEdgeClient")
# Poll the user pool client information
- resp = cognito_client.describe_user_pool_client(
- UserPoolId=user_pool_id, ClientId=client_id
- )
+ resp = cognito_client.describe_user_pool_client(UserPoolId=user_pool_id, ClientId=client_id)
# Retrieve the callbacks
callbacks = resp["UserPoolClient"]["CallbackURLs"]
@@ -80,5 +75,5 @@ def get(context: CfnginContext, *__args: Any, **kwargs: Any) -> Dict[str, Any]:
if callbacks:
context_dict["callback_urls"] = callbacks
return context_dict
- except Exception: # pylint: disable=broad-except
+ except Exception: # noqa: BLE001
return context_dict
diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/client_updater.py b/runway/cfngin/hooks/staticsite/auth_at_edge/client_updater.py
index d62c6082d..e4be5b8ad 100644
--- a/runway/cfngin/hooks/staticsite/auth_at_edge/client_updater.py
+++ b/runway/cfngin/hooks/staticsite/auth_at_edge/client_updater.py
@@ -8,7 +8,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, Dict, List
+from typing import TYPE_CHECKING, Any, List # noqa: UP035
from ...base import HookArgsBaseModel
@@ -21,7 +21,7 @@
class HookArgs(HookArgsBaseModel):
"""Hook arguments."""
- alternate_domains: List[str]
+ alternate_domains: List[str] # noqa: UP006
"""A list of any alternate domains that need to be listed with the primary
distribution domain.
@@ -33,7 +33,7 @@ class HookArgs(HookArgsBaseModel):
distribution_domain: str
"""Distribution domain."""
- oauth_scopes: List[str]
+ oauth_scopes: List[str] # noqa: UP006
"""A list of all available validation scopes for oauth."""
redirect_path_sign_in: str
@@ -42,13 +42,13 @@ class HookArgs(HookArgsBaseModel):
redirect_path_sign_out: str
"""The redirect path after sign out."""
- supported_identity_providers: List[str] = []
+ supported_identity_providers: List[str] = [] # noqa: UP006
"""Supported identity providers."""
def get_redirect_uris(
- domains: List[str], redirect_path_sign_in: str, redirect_path_sign_out: str
-) -> Dict[str, List[str]]:
+ domains: list[str], redirect_path_sign_in: str, redirect_path_sign_out: str
+) -> dict[str, list[str]]:
"""Create dict of redirect URIs for AppClient."""
return {
"sign_in": [f"{domain}{redirect_path_sign_in}" for domain in domains],
@@ -56,7 +56,7 @@ def get_redirect_uris(
}
-def update(context: CfnginContext, *__args: Any, **kwargs: Any) -> bool:
+def update(context: CfnginContext, *_args: Any, **kwargs: Any) -> bool:
"""Update the callback urls for the User Pool Client.
Required to match the redirect_uri being sent which contains
@@ -67,6 +67,7 @@ def update(context: CfnginContext, *__args: Any, **kwargs: Any) -> bool:
Args:
context: The context instance.
+ **kwargs: Arbitrary keyword arguments.
"""
args = HookArgs.parse_obj(kwargs)
@@ -74,7 +75,7 @@ def update(context: CfnginContext, *__args: Any, **kwargs: Any) -> bool:
cognito_client = session.client("cognito-idp")
# Combine alternate domains with main distribution
- redirect_domains = args.alternate_domains + ["https://" + args.distribution_domain]
+ redirect_domains = [*args.alternate_domains, "https://" + args.distribution_domain]
# Create a list of all domains with their redirect paths
redirect_uris = get_redirect_uris(
@@ -93,6 +94,6 @@ def update(context: CfnginContext, *__args: Any, **kwargs: Any) -> bool:
UserPoolId=context.hook_data["aae_user_pool_id_retriever"]["id"],
)
return True
- except Exception: # pylint: disable=broad-except
+ except Exception:
LOGGER.exception("unable to update user pool client callback urls")
return False
diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/domain_updater.py b/runway/cfngin/hooks/staticsite/auth_at_edge/domain_updater.py
index 8a2e8d10a..eb9754989 100644
--- a/runway/cfngin/hooks/staticsite/auth_at_edge/domain_updater.py
+++ b/runway/cfngin/hooks/staticsite/auth_at_edge/domain_updater.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, Dict, Union
+from typing import TYPE_CHECKING, Any
from ...base import HookArgsBaseModel
@@ -20,9 +20,7 @@ class HookArgs(HookArgsBaseModel):
"""The ID of the Cognito User Pool Client."""
-def update(
- context: CfnginContext, *__args: Any, **kwargs: Any
-) -> Union[Dict[str, Any], bool]:
+def update(context: CfnginContext, *_args: Any, **kwargs: Any) -> dict[str, Any] | bool:
"""Retrieve/Update the domain name of the specified client.
A domain name is required in order to make authorization and token
@@ -34,18 +32,17 @@ def update(
Args:
context: The context instance.
+ **kwargs: Arbitrary keyword arguments.
"""
args = HookArgs.parse_obj(kwargs)
session = context.get_session()
cognito_client = session.client("cognito-idp")
- context_dict: Dict[str, Any] = {}
+ context_dict: dict[str, Any] = {}
user_pool_id = context.hook_data["aae_user_pool_id_retriever"]["id"]
- user_pool = cognito_client.describe_user_pool(UserPoolId=user_pool_id).get(
- "UserPool", {}
- )
+ user_pool = cognito_client.describe_user_pool(UserPoolId=user_pool_id).get("UserPool", {})
(user_pool_region, user_pool_hash) = user_pool_id.split("_")
domain_prefix = user_pool.get("CustomDomain", user_pool.get("Domain"))
@@ -58,19 +55,15 @@ def update(
try:
domain_prefix = (f"{user_pool_hash}-{args.client_id}").lower()
- cognito_client.create_user_pool_domain(
- Domain=domain_prefix, UserPoolId=user_pool_id
- )
+ cognito_client.create_user_pool_domain(Domain=domain_prefix, UserPoolId=user_pool_id)
context_dict["domain"] = get_user_pool_domain(domain_prefix, user_pool_region)
return context_dict
- except Exception: # pylint: disable=broad-except
+ except Exception:
LOGGER.exception("could not update user pool domain: %s", user_pool_id)
return False
-def delete(
- context: CfnginContext, *__args: Any, **kwargs: Any
-) -> Union[Dict[str, Any], bool]:
+def delete(context: CfnginContext, *_args: Any, **kwargs: Any) -> dict[str, Any] | bool:
"""Delete the domain if the user pool was created by Runway.
If a User Pool was created by Runway, and populated with a domain, that
@@ -83,6 +76,7 @@ def delete(
Args:
context: The context instance.
+ **kwargs: Arbitrary keyword arguments.
"""
args = HookArgs.parse_obj(kwargs)
@@ -94,14 +88,12 @@ def delete(
domain_prefix = (f"{user_pool_hash}-{args.client_id}").lower()
try:
- cognito_client.delete_user_pool_domain(
- UserPoolId=user_pool_id, Domain=domain_prefix
- )
+ cognito_client.delete_user_pool_domain(UserPoolId=user_pool_id, Domain=domain_prefix)
return True
except cognito_client.exceptions.InvalidParameterException:
LOGGER.info('skipped deletion; no domain with prefix "%s"', domain_prefix)
return True
- except Exception: # pylint: disable=broad-except
+ except Exception:
LOGGER.exception("could not delete user pool domain")
return False
diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/lambda_config.py b/runway/cfngin/hooks/staticsite/auth_at_edge/lambda_config.py
index 5e403172a..97e67494a 100644
--- a/runway/cfngin/hooks/staticsite/auth_at_edge/lambda_config.py
+++ b/runway/cfngin/hooks/staticsite/auth_at_edge/lambda_config.py
@@ -9,7 +9,7 @@
import shutil
import tempfile
from tempfile import mkstemp
-from typing import TYPE_CHECKING, Any, Dict, List, Optional
+from typing import TYPE_CHECKING, Any, Dict, List, Optional # noqa: UP035
from ... import aws_lambda
from ...base import HookArgsBaseModel
@@ -34,16 +34,16 @@ class HookArgs(HookArgsBaseModel):
client_id: str
"""The ID of the Cognito User Pool Client."""
- cookie_settings: Dict[str, Any]
+ cookie_settings: Dict[str, Any] # noqa: UP006
"""The settings for our customized cookies."""
- http_headers: Dict[str, Any]
+ http_headers: Dict[str, Any] # noqa: UP006
"""The additional headers added to our requests."""
nonce_signing_secret_param_name: str
"""SSM param name to store nonce signing secret."""
- oauth_scopes: List[str]
+ oauth_scopes: List[str] # noqa: UP006
"""The validation scopes for our OAuth requests."""
redirect_path_refresh: str
@@ -62,10 +62,9 @@ class HookArgs(HookArgsBaseModel):
"""Optional User Pool group to which access should be restricted."""
-# pylint: disable=too-many-locals
def write(
context: CfnginContext, provider: Provider, *__args: Any, **kwargs: Any
-) -> Dict[str, Any]:
+) -> dict[str, Any]:
"""Writes/Uploads the configured lambdas for Auth@Edge.
Lambda@Edge does not have the ability to allow Environment variables
@@ -96,10 +95,10 @@ def write(
}
# Shared file that contains the method called for configuration data
- path = os.path.join(os.path.dirname(__file__), "templates", "shared.py")
- context_dict: Dict[str, Any] = {}
+ path = os.path.join(os.path.dirname(__file__), "templates", "shared.py") # noqa: PTH120, PTH118
+ context_dict: dict[str, Any] = {}
- with open(path, encoding="utf-8") as file_:
+ with open(path, encoding="utf-8") as file_: # noqa: PTH123
# Dynamically replace our configuration values
# in the shared.py template file with actual
# calculated values
@@ -114,7 +113,7 @@ def write(
filedir, temppath = mkstemp()
# Save the file to a temp path
- with open(temppath, "w", encoding="utf-8") as tmp:
+ with open(temppath, "w", encoding="utf-8") as tmp: # noqa: PTH123
tmp.write(shared)
config = temppath
os.close(filedir)
@@ -127,23 +126,27 @@ def write(
# Copy the template code for the specific Lambda function
# to the temporary folder
shutil.copytree(
- os.path.join(os.path.dirname(__file__), "templates", handler),
+ os.path.join( # noqa: PTH118
+ os.path.dirname(__file__), "templates", handler # noqa: PTH120
+ ),
dirpath,
dirs_exist_ok=True,
)
# Save our dynamic configuration shared file to the
# temporary folder
- with open(config, encoding="utf-8") as shared:
+ with open(config, encoding="utf-8") as shared: # noqa: PTH123
raw = shared.read()
filename = "shared.py"
- with open(os.path.join(dirpath, filename), "wb") as newfile:
+ with open(os.path.join(dirpath, filename), "wb") as newfile: # noqa: PTH118, PTH123
newfile.write(raw.encode())
# Copy the shared jose-dependent util module to the temporary folder
shutil.copyfile(
- os.path.join(os.path.dirname(__file__), "templates", "shared_jose.py"),
- os.path.join(dirpath, "shared_jose.py"),
+ os.path.join( # noqa: PTH118
+ os.path.dirname(__file__), "templates", "shared_jose.py" # noqa: PTH120
+ ),
+ os.path.join(dirpath, "shared_jose.py"), # noqa: PTH118
)
# Upload our temporary folder to our S3 bucket for
@@ -193,7 +196,5 @@ def random_key(length: int = 16) -> str:
length: The length of the random key.
"""
- secret_allowed_chars = (
- "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~"
- )
+ secret_allowed_chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~"
return "".join(secrets.choice(secret_allowed_chars) for _ in range(length))
diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/check_auth/__init__.py b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/check_auth/__init__.py
index a2929b225..c96288459 100644
--- a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/check_auth/__init__.py
+++ b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/check_auth/__init__.py
@@ -10,7 +10,6 @@
"""
-# pylint: disable=consider-using-f-string
import base64
import datetime
import hashlib
@@ -20,9 +19,9 @@
import secrets
from urllib.parse import quote_plus, urlencode
-from shared_jose import validate_jwt # noqa pylint: disable=import-error
+from shared_jose import validate_jwt
-from shared import ( # noqa pylint: disable=import-error
+from shared import (
decode_token,
extract_and_parse_cookies,
get_config,
@@ -32,9 +31,7 @@
LOGGER = logging.getLogger(__file__)
-SECRET_ALLOWED_CHARS = (
- "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~"
-)
+SECRET_ALLOWED_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~"
NONCE_LENGTH = 16
PKCE_LENGTH = 43
CONFIG = get_config()
@@ -44,7 +41,7 @@ def handler(event, _context):
"""Handle the request passed in.
Args:
- event (Dict[str, Any]): The Lambda Event.
+ event (dict[str, Any]): The Lambda Event.
_context (Any): Lambda context object.
"""
@@ -90,9 +87,7 @@ def handler(event, _context):
% (
domain_name,
CONFIG.get("redirect_path_auth_refresh"),
- urlencode(
- {"requestedUri": requested_uri, "nonce": nonce}
- ),
+ urlencode({"requestedUri": requested_uri, "nonce": nonce}),
),
}
],
@@ -129,7 +124,7 @@ def handler(event, _context):
)
return request
- except Exception: # noqa pylint: disable=broad-except
+ except Exception:
# We need new authorization. Get the user over to Cognito
nonce = generate_nonce()
state = {
@@ -139,8 +134,7 @@ def handler(event, _context):
}
login_query_string = urlencode(
{
- "redirect_uri": "https://%s%s"
- % (domain_name, CONFIG["redirect_path_sign_in"]),
+ "redirect_uri": "https://%s%s" % (domain_name, CONFIG["redirect_path_sign_in"]),
"response_type": "code",
"client_id": CONFIG["client_id"],
"state": base64.urlsafe_b64encode(
diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/http_headers/__init__.py b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/http_headers/__init__.py
index 9e60c44e9..c2b93a849 100644
--- a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/http_headers/__init__.py
+++ b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/http_headers/__init__.py
@@ -1,6 +1,6 @@
"""Add all configured (CloudFront compatible) headers to origin response."""
-from shared import as_cloud_front_headers, get_config # pylint: disable=import-error
+from shared import as_cloud_front_headers, get_config
CONFIG = get_config()
diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/parse_auth/__init__.py b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/parse_auth/__init__.py
index 6d27b1683..7e8cef76d 100644
--- a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/parse_auth/__init__.py
+++ b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/parse_auth/__init__.py
@@ -6,7 +6,6 @@
add to the cookie headers.
"""
-# pylint: disable=consider-using-f-string
import base64
import hmac
import json
@@ -14,12 +13,12 @@
from datetime import datetime
from urllib.parse import parse_qs
-from shared_jose import ( # noqa pylint: disable=import-error
+from shared_jose import (
MissingRequiredGroupError,
validate_and_check_id_token,
)
-from shared import ( # noqa pylint: disable=import-error
+from shared import (
create_error_html,
extract_and_parse_cookies,
generate_cookie_headers,
@@ -44,7 +43,7 @@ def validate_querystring_and_cookies(request, cookies):
Args:
request (Any): Cloudfront request.
- cookies (Dict[str, Any]): Cookies.
+ cookies (dict[str, Any]): Cookies.
"""
qsp = parse_qs(request.get("querystring"))
@@ -107,8 +106,7 @@ def validate_querystring_and_cookies(request, cookies):
calculated_hmac = sign(current_nonce, CONFIG["nonce_signing_secret"])
if not hmac.compare_digest(calculated_hmac, nonce_hmac):
raise RequiresConfirmationError(
- "Nonce signature mismatch; expected %s but got %s"
- % (calculated_hmac, nonce_hmac)
+ "Nonce signature mismatch; expected %s but got %s" % (calculated_hmac, nonce_hmac)
)
return [code, pkce, requested_uri]
@@ -140,8 +138,7 @@ def handler(event, _context):
body = {
"grant_type": "authorization_code",
"client_id": CONFIG["client_id"],
- "redirect_uri": "https://%s%s"
- % (domain_name, CONFIG.get("redirect_path_sign_in")),
+ "redirect_uri": "https://%s%s" % (domain_name, CONFIG.get("redirect_path_sign_in")),
"code": code[0],
"code_verifier": pkce,
}
@@ -183,7 +180,7 @@ def handler(event, _context):
},
}
return response
- except Exception as err: # pylint: disable=broad-except
+ except Exception as err:
if id_token:
# ID token found; checking if it is valid
try:
@@ -203,7 +200,7 @@ def handler(event, _context):
**CONFIG.get("cloud_front_headers", {}),
},
}
- except Exception as err2: # pylint: disable=broad-except
+ except Exception as err2:
LOGGER.debug("Id token not valid")
LOGGER.debug(err2)
@@ -235,8 +232,6 @@ def handler(event, _context):
"status": "200",
"headers": {
**CONFIG.get("cloud_front_headers", {}),
- "content-type": [
- {"key": "Content-Type", "value": "text/html; charset=UTF-8"}
- ],
+ "content-type": [{"key": "Content-Type", "value": "text/html; charset=UTF-8"}],
},
}
diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/refresh_auth/__init__.py b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/refresh_auth/__init__.py
index 41c9efd88..79ddf7631 100644
--- a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/refresh_auth/__init__.py
+++ b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/refresh_auth/__init__.py
@@ -4,7 +4,7 @@
import traceback
from urllib.parse import parse_qs
-from shared import ( # noqa pylint: disable=import-error
+from shared import (
create_error_html,
extract_and_parse_cookies,
generate_cookie_headers,
@@ -62,7 +62,7 @@ def handler(event, _context):
tokens["id_token"] = res.get("id_token")
tokens["access_token"] = res.get("access_token")
cookie_headers_event_type = "new_tokens"
- except Exception as err: # pylint: disable=broad-except
+ except Exception as err:
LOGGER.debug(err)
cookie_headers_event_type = "refresh_failed"
@@ -88,7 +88,7 @@ def handler(event, _context):
# Send a basic html error response and inform the user
# why refresh was unsuccessful
- except Exception as err: # pylint: disable=broad-except
+ except Exception as err:
LOGGER.info(err)
LOGGER.info(traceback.print_exc())
@@ -101,9 +101,7 @@ def handler(event, _context):
),
"status": "400",
"headers": {
- "content-type": [
- {"key": "Content-Type", "value": "text/html; charset=UTF-8"}
- ],
+ "content-type": [{"key": "Content-Type", "value": "text/html; charset=UTF-8"}],
**CONFIG.get("cloud_front_headers", {}),
},
}
@@ -117,7 +115,7 @@ def validate_refresh_request(current_nonce, original_nonce, tokens):
Args:
current_nonce (str): The current nonce code.
original_nonce (str): The original nonce code.
- tokens (Dict[str, str]): A dictionary of all the token_types
+ tokens (dict[str, str]): A dictionary of all the token_types
and their corresponding token values (id, auth, refresh).
"""
diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/shared.py b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/shared.py
index 69cb6c86a..c0284273e 100644
--- a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/shared.py
+++ b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/shared.py
@@ -1,6 +1,5 @@
"""Shared functionality for the Auth@Edge Lambda suite."""
-# pylint: disable=consider-using-f-string, inconsistent-return-statements
import base64
import hmac
import json
@@ -57,7 +56,7 @@ def as_cloud_front_headers(headers):
"""Convert a series of headers to CloudFront compliant ones.
Args:
- headers (Dict[str, str]): The request/response headers in
+ headers (dict[str, str]): The request/response headers in
dictionary format.
"""
@@ -71,7 +70,7 @@ def extract_and_parse_cookies(headers, client_id, cookie_compatibility="amplify"
"""Extract and parse the Cognito cookies from the headers.
Args:
- headers (Dict[str, str]): The request/response headers in
+ headers (dict[str, str]): The request/response headers in
dictionary format.
client_id (str): The Cognito UserPool Client ID.
cookie_compatibility (str): "amplify" or "elasticsearch".
@@ -88,18 +87,12 @@ def extract_and_parse_cookies(headers, client_id, cookie_compatibility="amplify"
return {
"token_user_name": (
- cookies.get(cookie_names["last_user_key"])
- if "last_user_key" in cookie_names
- else None
+ cookies.get(cookie_names["last_user_key"]) if "last_user_key" in cookie_names else None
),
"id_token": cookies.get(cookie_names["id_token_key"]),
"access_token": cookies.get(cookie_names["access_token_key"]),
"refresh_token": cookies.get(cookie_names["refresh_token_key"]),
- "scopes": (
- cookies.get(cookie_names["scope_key"])
- if "scope_key" in cookie_names
- else None
- ),
+ "scopes": (cookies.get(cookie_names["scope_key"]) if "scope_key" in cookie_names else None),
"nonce": cookies.get("spa-auth-edge-nonce"),
"nonce_hmac": cookies.get("spa-auth-edge-nonce-hmac"),
"pkce": cookies.get("spa-auth-edge-pkce"),
@@ -110,7 +103,7 @@ def extract_cookies_from_headers(headers):
"""Extract all cookies from the response headers.
Args:
- headers (Dict[str, Dict[str, str]]): The request/response headers in
+ headers (dict[str, dict[str, str]]): The request/response headers in
dictionary format.
"""
@@ -204,11 +197,11 @@ def generate_cookie_headers(
event (str): "new_tokens" | "sign_out" | "refresh_failed".
client_id (str): The Cognito UserPool Client ID.
oauth_scopes (List): The scopes for oauth validation.
- tokens (Dict[str, str]): The tokens received from
+ tokens (dict[str, str]): The tokens received from
the Cognito Request (id, access, refresh).
domain_name (str): The Domain name the cookies are
to be associated with.
- cookie_settings (Dict[str, str]): The various settings
+ cookie_settings (dict[str, str]): The various settings
that we would like for the various tokens.
cookie_compatibility (str): "amplify" | "elasticsearch".
@@ -249,9 +242,7 @@ def generate_cookie_headers(
cookie_names = get_elasticsearch_cookie_names()
cookies = {
cookie_names["cognito_enabled_key"]: "True; "
- + str(
- with_cookie_domain(domain_name, cookie_settings.get("cognitoEnabled"))
- ),
+ + str(with_cookie_domain(domain_name, cookie_settings.get("cognitoEnabled"))),
}
cookies[cookie_names["id_token_key"]] = f"{tokens.get('id_token')}; " + str(
with_cookie_domain(domain_name, cookie_settings.get("idToken")),
@@ -259,9 +250,8 @@ def generate_cookie_headers(
cookies[cookie_names["access_token_key"]] = f"{tokens.get('access_token')}; " + str(
with_cookie_domain(domain_name, cookie_settings.get("accessToken")),
)
- cookies[cookie_names["refresh_token_key"]] = (
- f"{tokens.get('refresh_token')}; "
- + str(with_cookie_domain(domain_name, cookie_settings.get("refreshToken")))
+ cookies[cookie_names["refresh_token_key"]] = f"{tokens.get('refresh_token')}; " + str(
+ with_cookie_domain(domain_name, cookie_settings.get("refreshToken"))
)
cookies_iter = cookies # type: ignore
if event == "sign_out":
@@ -278,9 +268,7 @@ def generate_cookie_headers(
cookies[i] = expire_cookie(cookies[i])
# Return cookies in the form of CF headers
- return [
- {"key": "set-cookie", "value": f"{key}={val}"} for key, val in cookies.items()
- ]
+ return [{"key": "set-cookie", "value": f"{key}={val}"} for key, val in cookies.items()]
def expire_cookie_filter(cookie):
@@ -319,9 +307,9 @@ def http_post_with_retry(url, data, headers):
Args:
url (str): The URL to make the POST request to.
- data (Dict[str, str]): The dictionary of data elements to
+ data (dict[str, str]): The dictionary of data elements to
send with the request (urlencoded internally).
- headers (Dict[str, str]): Any headers to send with
+ headers (dict[str, str]): Any headers to send with
the POST request.
"""
@@ -335,7 +323,6 @@ def http_post_with_retry(url, data, headers):
read = res.decode("utf-8")
json_data = json.loads(read)
return json_data
- # pylint: disable=broad-except
except Exception as err:
LOGGER.error("HTTP POST to %s failed (attempt %s)", url, attempts)
LOGGER.error(err)
diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/shared_jose.py b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/shared_jose.py
index 78cb9ae1d..b6b1c2567 100644
--- a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/shared_jose.py
+++ b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/shared_jose.py
@@ -7,7 +7,7 @@
import re
from urllib import request
-from jose import jwt # noqa pylint: disable=import-error
+from jose import jwt
LOGGER = logging.getLogger(__name__)
@@ -92,7 +92,7 @@ def __init__(self, options=None):
"""Initialize.
Args:
- options (Optional[Dict[str, str]]): Options for the client.
+ options (Optional[dict[str, str]]): Options for the client.
"""
self.options = options
@@ -102,17 +102,14 @@ def get_keys(self):
LOGGER.info("Fetching keys from %s", self.options.get("jwks_uri"))
try:
- # pylint: disable=consider-using-with
request_res = request.urlopen(self.options.get("jwks_uri"))
data = json.loads(
- request_res.read().decode(
- request_res.info().get_param("charset") or "utf-8"
- )
+ request_res.read().decode(request_res.info().get_param("charset") or "utf-8")
)
keys = data["keys"]
LOGGER.info("Keys: %s", keys)
return keys
- except Exception as err: # pylint: disable=broad-except
+ except Exception as err:
LOGGER.info("Failure: ConnectionError")
LOGGER.info(err)
return {}
@@ -167,7 +164,6 @@ def create_jwk(key):
else:
try:
jwk["rsaPublicKey"] = rsa_public_key_to_pem(key.get("n"), key.get("e"))
- # pylint: disable=broad-except
except Exception as err:
LOGGER.error(err)
jwk["rsaPublicKey"] = None
@@ -178,7 +174,7 @@ def is_signing_key(key):
"""Filter to determine if this is a signing key.
Args:
- key (Dict[str, str]): The key.
+ key (dict[str, str]): The key.
"""
if key.get("kty", "") != "RSA":
@@ -240,9 +236,7 @@ def validate_jwt(jwt_token, jwks_uri, issuer, audience):
)
-def validate_and_check_id_token(
- id_token, jwks_uri, issuer, audience, required_group=None
-):
+def validate_and_check_id_token(id_token, jwks_uri, issuer, audience, required_group=None):
"""Validate JWT and (optionally) check group membership."""
id_token_payload = validate_jwt(id_token, jwks_uri, issuer, audience)
if required_group:
diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/sign_out/__init__.py b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/sign_out/__init__.py
index ddb22583b..02e45c4b9 100644
--- a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/sign_out/__init__.py
+++ b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/sign_out/__init__.py
@@ -1,10 +1,9 @@
"""Sign user out of Cognito and remove all Cookie Headers."""
-# pylint: disable=consider-using-f-string
import logging
from urllib.parse import urlencode
-from shared import ( # noqa pylint: disable=import-error
+from shared import (
create_error_html,
extract_and_parse_cookies,
generate_cookie_headers,
@@ -31,9 +30,7 @@ def handler(event, _context):
),
"status": "200",
"headers": {
- "content-type": [
- {"key": "Content-Type", "value": "text/html; charset=UTF-8"}
- ],
+ "content-type": [{"key": "Content-Type", "value": "text/html; charset=UTF-8"}],
**CONFIG.get("cloud_front_headers", {}),
},
}
diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/user_pool_id_retriever.py b/runway/cfngin/hooks/staticsite/auth_at_edge/user_pool_id_retriever.py
index f44a4489c..5594861fa 100644
--- a/runway/cfngin/hooks/staticsite/auth_at_edge/user_pool_id_retriever.py
+++ b/runway/cfngin/hooks/staticsite/auth_at_edge/user_pool_id_retriever.py
@@ -1,7 +1,7 @@
"""Retrieve the ID of the Cognito User Pool."""
import logging
-from typing import Any, Dict, Optional
+from typing import Any, Optional
from ...base import HookArgsBaseModel
@@ -18,7 +18,7 @@ class HookArgs(HookArgsBaseModel):
"""The ARN of the supplied User pool."""
-def get(*__args: Any, **kwargs: Any) -> Dict[str, Any]:
+def get(*__args: Any, **kwargs: Any) -> dict[str, Any]:
"""Retrieve the ID of the Cognito User Pool.
The User Pool can either be supplied via an ARN or by being generated.
diff --git a/runway/cfngin/hooks/staticsite/build_staticsite.py b/runway/cfngin/hooks/staticsite/build_staticsite.py
index 22311bf30..597995266 100644
--- a/runway/cfngin/hooks/staticsite/build_staticsite.py
+++ b/runway/cfngin/hooks/staticsite/build_staticsite.py
@@ -7,7 +7,7 @@
import tempfile
import zipfile
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
+from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union # noqa: UP035
import boto3
from boto3.s3.transfer import S3Transfer # type: ignore
@@ -33,7 +33,7 @@ class HookArgsOptions(HookArgsBaseModel):
build_output: Optional[str] = None
"""Path were the build static site will be stored locally before upload."""
- build_steps: List[Union[str, List[str], Dict[str, Union[str, List[str]]]]] = []
+ build_steps: List[Union[str, List[str], Dict[str, Union[str, List[str]]]]] = [] # noqa: UP006
"""Steps to execute to build the static site."""
name: str = "undefined"
@@ -45,7 +45,9 @@ class HookArgsOptions(HookArgsBaseModel):
path: str
"""Working directory/path to the static site's source code."""
- pre_build_steps: List[Union[str, List[str], Dict[str, Union[str, List[str]]]]] = []
+ pre_build_steps: List[ # noqa: UP006
+ Union[str, List[str], Dict[str, Union[str, List[str]]]] # noqa: UP006
+ ] = []
"""Steps to run before building the static site."""
source_hashing: RunwayStaticSiteSourceHashingDataModel = (
@@ -74,26 +76,25 @@ def zip_and_upload(
filedes, temp_file = tempfile.mkstemp()
os.close(filedes)
LOGGER.info("archiving %s to s3://%s/%s", app_dir, bucket, key)
- with zipfile.ZipFile(temp_file, "w", zipfile.ZIP_DEFLATED) as filehandle:
- with change_dir(app_dir):
- for dirname, _subdirs, files in os.walk("./"):
- if dirname != "./":
- filehandle.write(dirname)
- for filename in files:
- filehandle.write(os.path.join(dirname, filename))
+ with zipfile.ZipFile(temp_file, "w", zipfile.ZIP_DEFLATED) as filehandle, change_dir(app_dir):
+ for dirname, _subdirs, files in os.walk("./"):
+ if dirname != "./":
+ filehandle.write(dirname)
+ for filename in files:
+ filehandle.write(os.path.join(dirname, filename)) # noqa: PTH118
transfer.upload_file(temp_file, bucket, key)
- os.remove(temp_file)
+ os.remove(temp_file) # noqa: PTH107
class OptionsArgTypeDef(TypedDict, total=False):
"""Options argument type definition."""
build_output: str
- build_steps: List[Union[str, List[str], Dict[str, Union[str, List[str]]]]]
+ build_steps: list[Union[str, list[str], dict[str, Union[str, list[str]]]]]
name: str
namespace: str
path: str
- pre_build_steps: List[Union[str, List[str], Dict[str, Union[str, List[str]]]]]
+ pre_build_steps: list[Union[str, list[str], dict[str, Union[str, list[str]]]]]
def build(
@@ -102,7 +103,7 @@ def build(
*,
options: Optional[OptionsArgTypeDef] = None,
**kwargs: Any,
-) -> Dict[str, Any]:
+) -> dict[str, Any]:
"""Build static site.
Arguments parsed by :class:`~runway.cfngin.hooks.staticsite.build_staticsite.HookArgs`.
@@ -114,12 +115,12 @@ def build(
args = HookArgs.parse_obj({"options": options, **kwargs})
session = context.get_session()
- context_dict: Dict[str, Any] = {
+ context_dict: dict[str, Any] = {
"artifact_key_prefix": f"{args.options.namespace}-{args.options.name}-"
}
if args.options.build_output:
- build_output = os.path.join(args.options.path, args.options.build_output)
+ build_output = os.path.join(args.options.path, args.options.build_output) # noqa: PTH118
else:
build_output = args.options.path
@@ -132,17 +133,14 @@ def build(
context_dict["hash"] = get_hash_of_files(
root_path=Path(args.options.path),
- directories=options.get("source_hashing", {"directories": None}).get(
- "directories"
- ),
+ directories=options.get("source_hashing", {"directories": None}).get("directories"),
)
LOGGER.debug("application hash: %s", context_dict["hash"])
# Now determine if the current staticsite has already been deployed
if args.options.source_hashing.enabled:
context_dict["hash_tracking_parameter"] = (
- args.options.source_hashing.parameter
- or f"{context_dict['artifact_key_prefix']}hash"
+ args.options.source_hashing.parameter or f"{context_dict['artifact_key_prefix']}hash"
)
ssm_client = session.client("ssm")
diff --git a/runway/cfngin/hooks/staticsite/cleanup.py b/runway/cfngin/hooks/staticsite/cleanup.py
index 75380947b..4692e2513 100644
--- a/runway/cfngin/hooks/staticsite/cleanup.py
+++ b/runway/cfngin/hooks/staticsite/cleanup.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, List
+from typing import TYPE_CHECKING, Any
from ..base import HookArgsBaseModel
@@ -41,16 +41,12 @@ class HookArgs(HookArgsBaseModel):
"""Name of the CloudFormation Stack as defined in the config file (no namespace)."""
-def get_replicated_function_names(outputs: List[OutputTypeDef]) -> List[str]:
+def get_replicated_function_names(outputs: list[OutputTypeDef]) -> list[str]:
"""Extract replicated function names from CFN outputs."""
- function_names: List[str] = []
+ function_names: list[str] = []
for i in REPLICATED_FUNCTION_OUTPUTS:
function_arn = next(
- (
- output.get("OutputValue")
- for output in outputs
- if output.get("OutputKey") == i
- ),
+ (output.get("OutputValue") for output in outputs if output.get("OutputKey") == i),
None,
)
if function_arn:
@@ -58,30 +54,26 @@ def get_replicated_function_names(outputs: List[OutputTypeDef]) -> List[str]:
return function_names
-def warn(context: CfnginContext, *__args: Any, **kwargs: Any) -> bool:
+def warn(context: CfnginContext, *_args: Any, **kwargs: Any) -> bool:
"""Notify the user of Lambda functions to delete.
Arguments parsed by :class:`~runway.cfngin.hooks.staticsite.cleanup.HookArgs`.
Args:
context: The context instance.
+ **kwargs: Arbitrary keyword arguments.
"""
args = HookArgs.parse_obj(kwargs)
cfn_client = context.get_session().client("cloudformation")
try:
describe_response = cfn_client.describe_stacks(
- StackName=context.namespace
- + context.namespace_delimiter
- + args.stack_relative_name
+ StackName=context.namespace + context.namespace_delimiter + args.stack_relative_name
)
stack = next(
x
for x in describe_response.get("Stacks", [])
- if (
- x.get("StackStatus")
- and x.get("StackStatus") not in STACK_STATUSES_TO_IGNORE
- )
+ if (x.get("StackStatus") and x.get("StackStatus") not in STACK_STATUSES_TO_IGNORE)
)
functions = get_replicated_function_names(stack["Outputs"])
if functions:
@@ -101,7 +93,7 @@ def warn(context: CfnginContext, *__args: Any, **kwargs: Any) -> bool:
LOGGER.warning("for x in %s; do %s; done", (" ").join(functions), cmd)
LOGGER.warning("On Windows:")
LOGGER.warning('Foreach ($x in "%s") { %s }', ('","').join(functions), cmd)
- except Exception: # pylint: disable=broad-except
+ except Exception: # noqa: S110, BLE001
# There's no harm in continuing on in the event of an error
# Orphaned functions have no cost
pass
diff --git a/runway/cfngin/hooks/staticsite/upload_staticsite.py b/runway/cfngin/hooks/staticsite/upload_staticsite.py
index 5e9d173dd..b5183dc53 100644
--- a/runway/cfngin/hooks/staticsite/upload_staticsite.py
+++ b/runway/cfngin/hooks/staticsite/upload_staticsite.py
@@ -8,7 +8,7 @@
import os
import time
from operator import itemgetter
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, cast
+from typing import TYPE_CHECKING, Any, List, Optional, cast # noqa: UP035
import yaml
@@ -43,16 +43,14 @@ class HookArgs(HookArgsBaseModel):
distribution_path: str = "/*"
"""Path in the CloudFront distribution to invalidate."""
- extra_files: List[RunwayStaticSiteExtraFileDataModel] = []
+ extra_files: List[RunwayStaticSiteExtraFileDataModel] = [] # noqa: UP006
"""Extra files to sync to the S3 bucket."""
website_url: Optional[str] = None
"""S3 bucket website URL."""
-def get_archives_to_prune(
- archives: List[Dict[str, Any]], hook_data: Dict[str, Any]
-) -> List[str]:
+def get_archives_to_prune(archives: list[dict[str, Any]], hook_data: dict[str, Any]) -> list[str]:
"""Return list of keys to delete.
Args:
@@ -66,9 +64,7 @@ def get_archives_to_prune(
if hook_data.get(i)
]
- archives.sort( # sort from oldest to newest
- key=itemgetter("LastModified"), reverse=False
- )
+ archives.sort(key=itemgetter("LastModified"), reverse=False) # sort from oldest to newest
# Drop all but last 15 files
return [i["Key"] for i in archives[:-15] if i["Key"] not in files_to_skip]
@@ -81,6 +77,7 @@ def sync(context: CfnginContext, *__args: Any, **kwargs: Any) -> bool:
Args:
context: The context instance.
+ **kwargs: Arbitrary keyword arguments.
"""
args = HookArgs.parse_obj(kwargs)
@@ -195,7 +192,7 @@ def prune_archives(context: CfnginContext, session: Session) -> bool:
"""
LOGGER.info("cleaning up old site archives...")
- archives: List[Dict[str, Any]] = []
+ archives: list[dict[str, Any]] = []
s3_client = session.client("s3")
list_objects_v2_paginator = s3_client.get_paginator("list_objects_v2")
response_iterator = list_objects_v2_paginator.paginate(
@@ -231,7 +228,7 @@ def auto_detect_content_type(filename: Optional[str]) -> Optional[str]:
if not filename:
return None
- _, ext = os.path.splitext(filename)
+ _, ext = os.path.splitext(filename) # noqa: PTH122
if ext == ".json":
return "application/json"
@@ -274,9 +271,7 @@ def get_content(extra_file: RunwayStaticSiteExtraFileDataModel) -> Optional[str]
if extra_file.content_type == "text/yaml":
return yaml.safe_dump(extra_file.content)
- raise ValueError(
- '"content_type" must be json or yaml if "content" is not a string'
- )
+ raise ValueError('"content_type" must be json or yaml if "content" is not a string')
if not isinstance(extra_file.content, str):
raise TypeError(f"unsupported content: {type(extra_file.content)}")
@@ -285,7 +280,7 @@ def get_content(extra_file: RunwayStaticSiteExtraFileDataModel) -> Optional[str]
def calculate_hash_of_extra_files(
- extra_files: List[RunwayStaticSiteExtraFileDataModel],
+ extra_files: list[RunwayStaticSiteExtraFileDataModel],
) -> str:
"""Return a hash of all of the given extra files.
@@ -299,7 +294,7 @@ def calculate_hash_of_extra_files(
The hash of all the files.
"""
- file_hash = hashlib.md5()
+ file_hash = hashlib.md5() # noqa: S324
for extra_file in sorted(extra_files, key=lambda x: x.name):
file_hash.update((extra_file.name + "\0").encode())
@@ -312,15 +307,13 @@ def calculate_hash_of_extra_files(
file_hash.update((cast(str, extra_file.content) + "\0").encode())
if extra_file.file:
- with open(extra_file.file, "rb") as f:
+ with open(extra_file.file, "rb") as f: # noqa: PTH123
LOGGER.debug("hashing file: %s", extra_file.file)
- for chunk in iter(
- lambda: f.read(4096), "" # pylint: disable=cell-var-from-loop
- ):
+ for chunk in iter(lambda: f.read(4096), ""):
if not chunk:
break
file_hash.update(chunk)
- file_hash.update("\0".encode())
+ file_hash.update(b"\0")
return file_hash.hexdigest()
@@ -344,9 +337,7 @@ def get_ssm_value(session: Session, name: str) -> Optional[str]:
return None
-def set_ssm_value(
- session: Session, name: str, value: Any, description: str = ""
-) -> None:
+def set_ssm_value(session: Session, name: str, value: Any, description: str = "") -> None:
"""Set the ssm parameter.
Args:
@@ -363,18 +354,19 @@ def set_ssm_value(
)
-def sync_extra_files(
+def sync_extra_files( # noqa: C901
context: CfnginContext,
bucket: str,
- extra_files: List[RunwayStaticSiteExtraFileDataModel],
+ extra_files: list[RunwayStaticSiteExtraFileDataModel],
**kwargs: Any,
-) -> List[str]:
+) -> list[str]:
"""Sync static website extra files to S3 bucket.
Args:
context: The context instance.
bucket: The static site bucket name.
extra_files: List of files and file content that should be uploaded.
+ **kwargs: Arbitrary keyword arguments.
"""
LOGGER.debug("extra_files to sync: %s", json.dumps(extra_files, cls=JsonEncoder))
@@ -384,7 +376,7 @@ def sync_extra_files(
session = context.get_session()
s3_client = session.client("s3")
- uploaded: List[str] = []
+ uploaded: list[str] = []
hash_param = cast(str, kwargs.get("hash_tracking_parameter", ""))
hash_new = None
@@ -404,9 +396,7 @@ def sync_extra_files(
hash_new = calculate_hash_of_extra_files(extra_files)
if hash_new == hash_old:
- LOGGER.info(
- "skipped upload of extra files; latest version already deployed"
- )
+ LOGGER.info("skipped upload of extra files; latest version already deployed")
return []
for extra_file in extra_files:
@@ -423,9 +413,7 @@ def sync_extra_files(
uploaded.append(extra_file.name)
if extra_file.file:
- LOGGER.info(
- "uploading extra file: %s as %s ", extra_file.file, extra_file.name
- )
+ LOGGER.info("uploading extra file: %s as %s ", extra_file.file, extra_file.name)
extra_args = ""
@@ -449,9 +437,7 @@ def sync_extra_files(
uploaded.append(extra_file.name)
if hash_new:
- LOGGER.info(
- "updating extra files SSM parameter %s with hash %s", hash_param, hash_new
- )
+ LOGGER.info("updating extra files SSM parameter %s with hash %s", hash_param, hash_new)
set_ssm_value(session, hash_param, hash_new)
return uploaded
diff --git a/runway/cfngin/hooks/staticsite/utils.py b/runway/cfngin/hooks/staticsite/utils.py
index 1f8ea2e22..6450fa193 100644
--- a/runway/cfngin/hooks/staticsite/utils.py
+++ b/runway/cfngin/hooks/staticsite/utils.py
@@ -6,13 +6,15 @@
import logging
import os
from pathlib import Path
-from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Union, cast
+from typing import TYPE_CHECKING, Optional, Union, cast
import igittigitt
from ....utils import FileHash, change_dir
if TYPE_CHECKING:
+ from collections.abc import Iterable
+
from _typeshed import StrPath
LOGGER = logging.getLogger(__name__)
@@ -29,14 +31,14 @@ def calculate_hash_of_files(files: Iterable[StrPath], root: Path) -> str:
A hash of the hashes of the given files.
"""
- file_hash = FileHash(hashlib.md5())
+ file_hash = FileHash(hashlib.md5()) # noqa: S324
file_hash.add_files(sorted(str(f) for f in files), relative_to=root)
return file_hash.hexdigest
def get_hash_of_files(
root_path: Path,
- directories: Optional[List[Dict[str, Union[List[str], str]]]] = None,
+ directories: Optional[list[dict[str, Union[list[str], str]]]] = None,
) -> str:
"""Generate md5 hash of files.
@@ -49,11 +51,11 @@ def get_hash_of_files(
"""
directories = directories or [{"path": "./"}]
- files_to_hash: List[StrPath] = []
+ files_to_hash: list[StrPath] = []
for i in directories:
gitignore = get_ignorer(
root_path / cast(str, i["path"]),
- cast(Optional[List[str]], i.get("exclusions")),
+ cast("list[str] | None", i.get("exclusions")),
)
with change_dir(root_path):
@@ -72,7 +74,7 @@ def get_hash_of_files(
def get_ignorer(
- path: Path, additional_exclusions: Optional[List[str]] = None
+ path: Path, additional_exclusions: list[str] | None = None
) -> igittigitt.IgnoreParser:
"""Create gitignore filter from directory ``.gitignore`` file.
diff --git a/runway/cfngin/hooks/utils.py b/runway/cfngin/hooks/utils.py
index 565c99fc5..87f653ab7 100644
--- a/runway/cfngin/hooks/utils.py
+++ b/runway/cfngin/hooks/utils.py
@@ -4,9 +4,9 @@
import collections.abc
import logging
-import os
import sys
-from typing import TYPE_CHECKING, Any, Dict, List, cast
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, cast
import pydantic
@@ -30,7 +30,7 @@ def create_template(self) -> None:
"""Create template without raising NotImplementedError."""
-# TODO BREAKING find a better place for this
+# TODO (kyle): BREAKING move to runway.providers.aws.models.TagModel
class TagDataModel(BaseModel):
"""AWS Resource Tag data model."""
@@ -50,16 +50,16 @@ class Config:
def full_path(path: str) -> str:
"""Return full path."""
- return os.path.abspath(os.path.expanduser(path))
+ return str(Path(path).absolute())
-# TODO split up to reduce number of statements
-def handle_hooks( # pylint: disable=too-many-statements
+# TODO (kyle): split up to reduce number of statements
+def handle_hooks( # noqa: C901, PLR0912, PLR0915
stage: str,
- hooks: List[CfnginHookDefinitionModel],
+ hooks: list[CfnginHookDefinitionModel],
provider: Provider,
context: CfnginContext,
-):
+) -> None:
"""Handle pre/post_deploy hooks.
These are pieces of code that we want to run before/after deploying
@@ -76,7 +76,7 @@ def handle_hooks( # pylint: disable=too-many-statements
LOGGER.debug("no %s hooks defined", stage)
return
- hook_paths: List[str] = []
+ hook_paths: list[str] = []
for i, hook in enumerate(hooks):
try:
hook_paths.append(hook.path)
@@ -111,18 +111,16 @@ def handle_hooks( # pylint: disable=too-many-statements
"does not exist yet"
)
raise
- kwargs: Dict[str, Any] = {v.name: v.value for v in args}
+ kwargs: dict[str, Any] = {v.name: v.value for v in args}
else:
kwargs = {}
try:
if isinstance(method, type):
- result: Any = getattr(
- method(context=context, provider=provider, **kwargs), stage
- )()
+ result: Any = getattr(method(context=context, provider=provider, **kwargs), stage)()
else:
result = cast(Any, method(context=context, provider=provider, **kwargs))
- except Exception: # pylint: disable=broad-except
+ except Exception:
LOGGER.exception("hook %s threw an exception", hook.path)
if hook.required:
raise
@@ -130,24 +128,19 @@ def handle_hooks( # pylint: disable=too-many-statements
if not result:
if hook.required:
- LOGGER.error(
- "required hook %s failed; return value: %s", hook.path, result
- )
+ LOGGER.error("required hook %s failed; return value: %s", hook.path, result)
sys.exit(1)
- LOGGER.warning(
- "non-required hook %s failed; return value: %s", hook.path, result
- )
- else:
- if isinstance(result, (collections.abc.Mapping, pydantic.BaseModel)):
- if hook.data_key:
- LOGGER.debug(
- "adding result for hook %s to context in data_key %s",
- hook.path,
- hook.data_key,
- )
- context.set_hook_data(hook.data_key, result)
- else:
- LOGGER.debug(
- "hook %s returned result data but no data key set; ignoring",
- hook.path,
- )
+ LOGGER.warning("non-required hook %s failed; return value: %s", hook.path, result)
+ elif isinstance(result, (collections.abc.Mapping, pydantic.BaseModel)):
+ if hook.data_key:
+ LOGGER.debug(
+ "adding result for hook %s to context in data_key %s",
+ hook.path,
+ hook.data_key,
+ )
+ context.set_hook_data(hook.data_key, result)
+ else:
+ LOGGER.debug(
+ "hook %s returned result data but no data key set; ignoring",
+ hook.path,
+ )
diff --git a/runway/cfngin/logger/__init__.py b/runway/cfngin/logger/__init__.py
index da3e36b3f..ebe0e5447 100644
--- a/runway/cfngin/logger/__init__.py
+++ b/runway/cfngin/logger/__init__.py
@@ -1,12 +1,13 @@
"""CFNgin logger."""
+from __future__ import annotations
+
import logging
import sys
-from typing import Any, Dict, Optional
+from typing import Any
DEBUG_FORMAT = (
- "[%(asctime)s] %(levelname)s %(threadName)s "
- "%(name)s:%(lineno)d(%(funcName)s): %(message)s"
+ "[%(asctime)s] %(levelname)s %(threadName)s %(name)s:%(lineno)d(%(funcName)s): %(message)s"
)
INFO_FORMAT = "[%(asctime)s] %(message)s"
COLOR_FORMAT = "[%(asctime)s] \033[%(color)sm%(message)s\033[39m"
@@ -24,7 +25,7 @@ def format(self, record: logging.LogRecord) -> str:
return super().format(record)
-def setup_logging(verbosity: int, formats: Optional[Dict[str, Any]] = None):
+def setup_logging(verbosity: int, formats: dict[str, Any] | None = None) -> None:
"""Configure a proper logger based on verbosity and optional log formats.
Args:
diff --git a/runway/cfngin/lookups/handlers/ami.py b/runway/cfngin/lookups/handlers/ami.py
index 6ecde4dd8..01b498b3d 100644
--- a/runway/cfngin/lookups/handlers/ami.py
+++ b/runway/cfngin/lookups/handlers/ami.py
@@ -1,21 +1,21 @@
"""AMI lookup."""
-# pylint: disable=no-self-argument
# pyright: reportIncompatibleMethodOverride=none
from __future__ import annotations
import operator
import re
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
+from typing import TYPE_CHECKING, Any, Final, List, Optional, Union # noqa: UP035
from pydantic import validator
-from typing_extensions import Final, Literal
from ....lookups.handlers.base import LookupHandler
from ....utils import BaseModel
from ...utils import read_value_from_path
if TYPE_CHECKING:
+ from typing_extensions import Literal
+
from ....context import CfnginContext
@@ -27,10 +27,10 @@ class ArgsDataModel(BaseModel):
"""
- executable_users: Optional[List[str]] = None
+ executable_users: Optional[List[str]] = None # noqa: UP006
"""List of executable users."""
- owners: List[str]
+ owners: List[str] # noqa: UP006
"""At least one owner is required.
Should be ``amazon``, ``self``, or an AWS account ID.
@@ -41,7 +41,7 @@ class ArgsDataModel(BaseModel):
"""AWS region."""
@validator("executable_users", "owners", allow_reuse=True, pre=True)
- def _convert_str_to_list(cls, v: Union[List[str], str]) -> List[str]:
+ def _convert_str_to_list(cls, v: Union[list[str], str]) -> list[str]: # noqa: N805
"""Convert str to list."""
if isinstance(v, str):
return v.split(",")
@@ -56,9 +56,7 @@ class ImageNotFound(Exception):
def __init__(self, search_string: str) -> None:
"""Instantiate class."""
self.search_string = search_string
- super().__init__(
- f"Unable to find ec2 image with search string: {search_string}"
- )
+ super().__init__(f"Unable to find ec2 image with search string: {search_string}")
class AmiLookup(LookupHandler):
@@ -68,7 +66,7 @@ class AmiLookup(LookupHandler):
"""Name that the Lookup is registered as."""
@classmethod
- def parse(cls, value: str) -> Tuple[str, Dict[str, str]]:
+ def parse(cls, value: str) -> tuple[str, dict[str, str]]:
"""Parse the value passed to the lookup.
This overrides the default parsing to account for special requirements.
@@ -81,7 +79,7 @@ def parse(cls, value: str) -> Tuple[str, Dict[str, str]]:
"""
raw_value = read_value_from_path(value)
- args: Dict[str, str] = {}
+ args: dict[str, str] = {}
if "@" in raw_value:
args["region"], raw_value = raw_value.split("@", 1)
@@ -95,9 +93,7 @@ def parse(cls, value: str) -> Tuple[str, Dict[str, str]]:
return args.pop("name_regex"), args
@classmethod
- def handle( # pylint: disable=arguments-differ
- cls, value: str, context: CfnginContext, *__args: Any, **__kwargs: Any
- ) -> str:
+ def handle(cls, value: str, context: CfnginContext, *__args: Any, **__kwargs: Any) -> str:
"""Fetch the most recent AMI Id using a filter.
Args:
@@ -116,18 +112,16 @@ def handle( # pylint: disable=arguments-differ
You can also optionally specify the region in which to perform the
AMI lookup.
- """ # noqa
+ """
query, raw_args = cls.parse(value)
args = ArgsDataModel.parse_obj(raw_args)
ec2 = context.get_session(region=args.region).client("ec2")
- describe_args: Dict[str, Any] = {
+ describe_args: dict[str, Any] = {
"Filters": [
{"Name": key, "Values": val.split(",") if val else val}
for key, val in {
- k: v
- for k, v in raw_args.items()
- if k not in ArgsDataModel.__fields__
+ k: v for k, v in raw_args.items() if k not in ArgsDataModel.__fields__
}.items()
],
"Owners": args.owners,
diff --git a/runway/cfngin/lookups/handlers/awslambda.py b/runway/cfngin/lookups/handlers/awslambda.py
index 99cf925f1..682783422 100644
--- a/runway/cfngin/lookups/handlers/awslambda.py
+++ b/runway/cfngin/lookups/handlers/awslambda.py
@@ -8,22 +8,23 @@
The :attr:`~cfngin.hook.data_key` is then passed to the lookup as it's input/query.
This allows the lookup to function during a ``runway plan``.
-""" # noqa
+"""
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, List, Optional, Union, cast
+from typing import TYPE_CHECKING, Any, Final, Optional, Union, cast
from pydantic import ValidationError
from troposphere.awslambda import Code, Content
-from typing_extensions import Final, Literal
from ....lookups.handlers.base import LookupHandler
from ....utils import load_object_from_string
from ...exceptions import CfnginOnlyLookupError
if TYPE_CHECKING:
+ from typing_extensions import Literal
+
from ....config import CfnginConfig
from ....config.models.cfngin import CfnginHookDefinitionModel
from ....context import CfnginContext, RunwayContext
@@ -62,7 +63,6 @@ def get_deployment_package_data(
"""
# needs to be imported here to avoid cyclic imports for conditional code
# caused by import of runway.cfngin.actions.deploy in runway.cfngin.hooks.base
- # pylint: disable=import-outside-toplevel
from ...hooks.awslambda.models.responses import (
AwsLambdaHookDeployResponse as _AwsLambdaHookDeployResponse,
)
@@ -108,13 +108,11 @@ def get_required_hook_definition(
if not hooks_with_data_key:
raise ValueError(f"no hook definition found with data_key {data_key}")
if len(hooks_with_data_key) > 1:
- raise ValueError(
- f"more than one hook definition found with data_key {data_key}"
- )
+ raise ValueError(f"more than one hook definition found with data_key {data_key}")
return hooks_with_data_key.pop()
@classmethod
- def handle( # pylint: disable=arguments-differ
+ def handle(
cls,
value: str,
context: Union[CfnginContext, RunwayContext],
@@ -156,7 +154,6 @@ def init_hook_class(
"""
# needs to be imported here to avoid cyclic imports for conditional code
# caused by import of runway.cfngin.actions.deploy in runway.cfngin.hooks.base
- # pylint: disable=import-outside-toplevel
from ...hooks.awslambda.base_classes import AwsLambdaHook as _AwsLambdaHook
kls = load_object_from_string(hook_def.path)
@@ -177,7 +174,7 @@ class Code(LookupHandler):
TYPE_NAME: Final[Literal["awslambda.Code"]] = "awslambda.Code"
@classmethod
- def handle( # pylint: disable=arguments-differ
+ def handle(
cls,
value: str,
context: Union[CfnginContext, RunwayContext],
@@ -189,6 +186,8 @@ def handle( # pylint: disable=arguments-differ
Args:
value: Value to resolve.
context: The current context object.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
Returns:
Value that can be passed into CloudFormation property
@@ -209,7 +208,7 @@ class CodeSha256(LookupHandler):
TYPE_NAME: Final[Literal["awslambda.CodeSha256"]] = "awslambda.CodeSha256"
@classmethod
- def handle( # pylint: disable=arguments-differ
+ def handle(
cls,
value: str,
context: Union[CfnginContext, RunwayContext],
@@ -221,6 +220,8 @@ def handle( # pylint: disable=arguments-differ
Args:
value: Value to resolve.
context: The current context object.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
Returns:
Value that can be passed into CloudFormation property
@@ -237,18 +238,20 @@ class CompatibleArchitectures(LookupHandler):
)
@classmethod
- def handle( # pylint: disable=arguments-differ
+ def handle(
cls,
value: str,
context: Union[CfnginContext, RunwayContext],
*args: Any,
**kwargs: Any,
- ) -> Optional[List[str]]:
+ ) -> Optional[list[str]]:
"""Retrieve metadata for an AWS Lambda deployment package.
Args:
value: Value to resolve.
context: The current context object.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
Returns:
Value that can be passed into CloudFormation property
@@ -257,21 +260,17 @@ def handle( # pylint: disable=arguments-differ
"""
_query, lookup_args = cls.parse(value)
return cls.format_results(
- AwsLambdaLookup.handle(
- value, context, *args, **kwargs
- ).compatible_architectures,
+ AwsLambdaLookup.handle(value, context, *args, **kwargs).compatible_architectures,
**lookup_args,
)
class CompatibleRuntimes(LookupHandler):
"""Lookup for AwsLambdaHook responses."""
- TYPE_NAME: Final[Literal["awslambda.CompatibleRuntimes"]] = (
- "awslambda.CompatibleRuntimes"
- )
+ TYPE_NAME: Final[Literal["awslambda.CompatibleRuntimes"]] = "awslambda.CompatibleRuntimes"
@classmethod
- def handle( # pylint: disable=arguments-differ
+ def handle(
cls,
value: str,
context: Union[CfnginContext, RunwayContext],
@@ -283,6 +282,8 @@ def handle( # pylint: disable=arguments-differ
Args:
value: Value to resolve.
context: The current context object.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
Returns:
Value that can be passed into CloudFormation property
@@ -291,9 +292,7 @@ def handle( # pylint: disable=arguments-differ
"""
_query, lookup_args = cls.parse(value)
return cls.format_results(
- AwsLambdaLookup.handle(
- value, context, *args, **kwargs
- ).compatible_runtimes,
+ AwsLambdaLookup.handle(value, context, *args, **kwargs).compatible_runtimes,
**lookup_args,
)
@@ -303,7 +302,7 @@ class Content(LookupHandler):
TYPE_NAME: Final[Literal["awslambda.Content"]] = "awslambda.Content"
@classmethod
- def handle( # pylint: disable=arguments-differ
+ def handle(
cls,
value: str,
context: Union[CfnginContext, RunwayContext],
@@ -315,6 +314,8 @@ def handle( # pylint: disable=arguments-differ
Args:
value: Value to resolve.
context: The current context object.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
Returns:
Value that can be passed into CloudFormation property
@@ -335,7 +336,7 @@ class LicenseInfo(LookupHandler):
TYPE_NAME: Final[Literal["awslambda.LicenseInfo"]] = "awslambda.LicenseInfo"
@classmethod
- def handle( # pylint: disable=arguments-differ
+ def handle(
cls,
value: str,
context: Union[CfnginContext, RunwayContext],
@@ -347,6 +348,8 @@ def handle( # pylint: disable=arguments-differ
Args:
value: Value to resolve.
context: The current context object.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
Returns:
Value that can be passed into CloudFormation property
@@ -365,7 +368,7 @@ class Runtime(LookupHandler):
TYPE_NAME: Final[Literal["awslambda.Runtime"]] = "awslambda.Runtime"
@classmethod
- def handle( # pylint: disable=arguments-differ
+ def handle(
cls,
value: str,
context: Union[CfnginContext, RunwayContext],
@@ -377,6 +380,8 @@ def handle( # pylint: disable=arguments-differ
Args:
value: Value to resolve.
context: The current context object.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
Returns:
Value that can be passed into CloudFormation property
@@ -391,7 +396,7 @@ class S3Bucket(LookupHandler):
TYPE_NAME: Final[Literal["awslambda.S3Bucket"]] = "awslambda.S3Bucket"
@classmethod
- def handle( # pylint: disable=arguments-differ
+ def handle(
cls,
value: str,
context: Union[CfnginContext, RunwayContext],
@@ -403,6 +408,8 @@ def handle( # pylint: disable=arguments-differ
Args:
value: Value to resolve.
context: The current context object.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
Returns:
Value that can be passed into CloudFormation property
@@ -418,7 +425,7 @@ class S3Key(LookupHandler):
TYPE_NAME: Final[Literal["awslambda.S3Key"]] = "awslambda.S3Key"
@classmethod
- def handle( # pylint: disable=arguments-differ
+ def handle(
cls,
value: str,
context: Union[CfnginContext, RunwayContext],
@@ -430,6 +437,8 @@ def handle( # pylint: disable=arguments-differ
Args:
value: Value to resolve.
context: The current context object.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
Returns:
Value that can be passed into CloudFormation property
@@ -442,12 +451,10 @@ def handle( # pylint: disable=arguments-differ
class S3ObjectVersion(LookupHandler):
"""Lookup for AwsLambdaHook responses."""
- TYPE_NAME: Final[Literal["awslambda.S3ObjectVersion"]] = (
- "awslambda.S3ObjectVersion"
- )
+ TYPE_NAME: Final[Literal["awslambda.S3ObjectVersion"]] = "awslambda.S3ObjectVersion"
@classmethod
- def handle( # pylint: disable=arguments-differ
+ def handle(
cls,
value: str,
context: Union[CfnginContext, RunwayContext],
@@ -459,6 +466,8 @@ def handle( # pylint: disable=arguments-differ
Args:
value: Value to resolve.
context: The current context object.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
Returns:
Value that can be passed into CloudFormation property
@@ -466,6 +475,4 @@ def handle( # pylint: disable=arguments-differ
``AWS::Lambda::LayerVersion.Content.S3ObjectVersion``.
"""
- return AwsLambdaLookup.handle(
- value, context, *args, **kwargs
- ).object_version_id
+ return AwsLambdaLookup.handle(value, context, *args, **kwargs).object_version_id
diff --git a/runway/cfngin/lookups/handlers/default.py b/runway/cfngin/lookups/handlers/default.py
index 5fed55eb1..0d55df1d5 100644
--- a/runway/cfngin/lookups/handlers/default.py
+++ b/runway/cfngin/lookups/handlers/default.py
@@ -3,13 +3,13 @@
# pyright: reportIncompatibleMethodOverride=none
from __future__ import annotations
-from typing import TYPE_CHECKING, Any, Optional
-
-from typing_extensions import Final, Literal
+from typing import TYPE_CHECKING, Any, Final, Optional
from ....lookups.handlers.base import LookupHandler
if TYPE_CHECKING:
+ from typing_extensions import Literal
+
from ....context import CfnginContext
@@ -20,9 +20,7 @@ class DefaultLookup(LookupHandler):
"""Name that the Lookup is registered as."""
@classmethod
- def handle( # pylint: disable=arguments-differ
- cls, value: str, context: Optional[CfnginContext] = None, **_: Any
- ) -> Any:
+ def handle(cls, value: str, context: Optional[CfnginContext] = None, **_: Any) -> Any:
"""Use a value from the environment or fall back to a default value.
Allows defaults to be set at the config file level.
diff --git a/runway/cfngin/lookups/handlers/dynamodb.py b/runway/cfngin/lookups/handlers/dynamodb.py
index 61b6d4d57..3701b822b 100644
--- a/runway/cfngin/lookups/handlers/dynamodb.py
+++ b/runway/cfngin/lookups/handlers/dynamodb.py
@@ -4,10 +4,10 @@
from __future__ import annotations
import re
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, cast
+from typing import TYPE_CHECKING, Any, Final, Optional, cast
from botocore.exceptions import ClientError
-from typing_extensions import Final, Literal, TypedDict
+from typing_extensions import Literal, TypedDict
from ....lookups.handlers.base import LookupHandler
from ....utils import BaseModel
@@ -61,7 +61,7 @@ class QueryDataModel(BaseModel):
"""Name of the DynamoDB Table to query."""
@property
- def item_key(self) -> Dict[str, Dict[Literal["B", "N", "S"], Any]]:
+ def item_key(self) -> dict[str, dict[Literal["B", "N", "S"], Any]]:
"""Value to pass to boto3 ``.get_item()`` call as the ``Key`` argument.
Raises:
@@ -78,9 +78,9 @@ def item_key(self) -> Dict[str, Dict[Literal["B", "N", "S"], Any]]:
)
return {
self.partition_key: {
- cast(
- Literal["B", "N", "S"], match.groupdict("S")["data_type"]
- ): match.group("value")
+ cast(Literal["B", "N", "S"], match.groupdict("S")["data_type"]): match.group(
+ "value"
+ )
}
}
@@ -92,7 +92,7 @@ class DynamodbLookup(LookupHandler):
"""Name that the Lookup is registered as."""
@classmethod
- def parse(cls, value: str) -> Tuple[str, Dict[str, str]]:
+ def parse(cls, value: str) -> tuple[str, dict[str, str]]:
"""Parse the value passed to the lookup.
This overrides the default parsing to account for special requirements.
@@ -109,7 +109,7 @@ def parse(cls, value: str) -> Tuple[str, Dict[str, str]]:
"""
raw_value = read_value_from_path(value)
- args: Dict[str, str] = {}
+ args: dict[str, str] = {}
if "@" not in raw_value:
raise ValueError(
@@ -120,7 +120,7 @@ def parse(cls, value: str) -> Tuple[str, Dict[str, str]]:
if ":" in table_info:
args["region"], table_info = table_info.split(":", 1)
- return "@".join([table_info, table_keys]), args
+ return f"{table_info}@{table_keys}", args
@classmethod
def parse_query(cls, value: str) -> QueryDataModel:
@@ -139,9 +139,7 @@ def parse_query(cls, value: str) -> QueryDataModel:
return QueryDataModel.parse_obj(match.groupdict())
@classmethod
- def handle( # pylint: disable=arguments-differ
- cls, value: str, context: CfnginContext, *__args: Any, **__kwargs: Any
- ) -> Any:
+ def handle(cls, value: str, context: CfnginContext, *__args: Any, **__kwargs: Any) -> Any:
"""Get a value from a DynamoDB table.
Args:
@@ -169,22 +167,16 @@ def handle( # pylint: disable=arguments-differ
response = dynamodb.get_item(
TableName=query.table_name,
Key=query.item_key,
- ProjectionExpression=",".join(
- [query.partition_key, *key_dict["clean_table_keys"]]
- ),
+ ProjectionExpression=",".join([query.partition_key, *key_dict["clean_table_keys"]]),
)
except dynamodb.exceptions.ResourceNotFoundException as exc:
- raise ValueError(
- f"Can't find the DynamoDB table: {query.table_name}"
- ) from exc
+ raise ValueError(f"Can't find the DynamoDB table: {query.table_name}") from exc
except ClientError as exc:
if exc.response["Error"]["Code"] == "ValidationException":
raise ValueError(
f"No DynamoDB record matched the partition key: {query.partition_key}"
) from exc
- raise ValueError(
- f"The DynamoDB lookup '{value}' encountered an error: {exc}"
- ) from exc
+ raise ValueError(f"The DynamoDB lookup '{value}' encountered an error: {exc}") from exc
# find and return the key from the dynamo data returned
if "Item" in response:
return _get_val_from_ddb_data(response["Item"], key_dict["new_keys"])
@@ -196,11 +188,11 @@ def handle( # pylint: disable=arguments-differ
class ParsedLookupKey(TypedDict):
"""Return value of _lookup_key_parse."""
- clean_table_keys: List[str]
- new_keys: List[Dict[Literal["L", "M", "N", "S"], str]]
+ clean_table_keys: list[str]
+ new_keys: list[dict[Literal["L", "M", "N", "S"], str]]
-def _lookup_key_parse(table_keys: List[str]) -> ParsedLookupKey:
+def _lookup_key_parse(table_keys: list[str]) -> ParsedLookupKey:
"""Return the order in which the stacks should be executed.
Args:
@@ -217,8 +209,8 @@ def _lookup_key_parse(table_keys: List[str]) -> ParsedLookupKey:
# we need to parse the key lookup passed in
regex_matcher = r"\[([^\]]+)]"
valid_dynamodb_datatypes = ["L", "M", "N", "S"]
- clean_table_keys: List[str] = []
- new_keys: List[Dict[Literal["L", "M", "N", "S"], str]] = []
+ clean_table_keys: list[str] = []
+ new_keys: list[dict[Literal["L", "M", "N", "S"], str]] = []
for key in table_keys:
match = re.search(regex_matcher, key)
@@ -229,7 +221,7 @@ def _lookup_key_parse(table_keys: List[str]) -> ParsedLookupKey:
f"CFNgin does not support looking up the data type: {match.group(1)}"
)
match_val = cast(Literal["L", "M", "N", "S"], match.group(1))
- key = key.replace(match.group(0), "")
+ key = key.replace(match.group(0), "") # noqa: PLW2901
new_keys.append({match_val: key})
else:
new_keys.append({"S": key})
@@ -237,7 +229,7 @@ def _lookup_key_parse(table_keys: List[str]) -> ParsedLookupKey:
return {"new_keys": new_keys, "clean_table_keys": clean_table_keys}
-def _get_val_from_ddb_data(data: Dict[str, Any], keylist: List[Dict[str, str]]) -> Any:
+def _get_val_from_ddb_data(data: dict[str, Any], keylist: list[dict[str, str]]) -> Any:
"""Return the value of the lookup.
Args:
@@ -263,14 +255,14 @@ def _get_val_from_ddb_data(data: Dict[str, Any], keylist: List[Dict[str, str]])
# if type is list, convert it to a list and return
return _convert_ddb_list_to_list(data[cast(str, next_type)])
if next_type == "N":
- # TODO: handle various types of 'number' datatypes, (e.g. int, double)
+ # TODO (troyready): handle various types of 'number' datatypes, (e.g. int, double)
# if a number, convert to an int and return
return int(data[cast(str, next_type)])
# else, just assume its a string and return
return str(data[cast(str, next_type)])
-def _convert_ddb_list_to_list(conversion_list: List[Dict[str, Any]]) -> List[Any]:
+def _convert_ddb_list_to_list(conversion_list: list[dict[str, Any]]) -> list[Any]:
"""Return a python list without the DynamoDB datatypes.
Args:
@@ -280,8 +272,4 @@ def _convert_ddb_list_to_list(conversion_list: List[Dict[str, Any]]) -> List[Any
Returns A sanitized list without the datatypes.
"""
- ret_list: List[Any] = []
- for val in conversion_list:
- for v in val:
- ret_list.append(val[v])
- return ret_list
+ return [val[v] for val in conversion_list for v in val]
diff --git a/runway/cfngin/lookups/handlers/envvar.py b/runway/cfngin/lookups/handlers/envvar.py
index a38d3251a..3c93ef447 100644
--- a/runway/cfngin/lookups/handlers/envvar.py
+++ b/runway/cfngin/lookups/handlers/envvar.py
@@ -3,9 +3,9 @@
# pyright: reportIncompatibleMethodOverride=none
import logging
import os
-from typing import Any
+from typing import Any, Final
-from typing_extensions import Final, Literal
+from typing_extensions import Literal
from ....lookups.handlers.base import LookupHandler
from ...utils import read_value_from_path
@@ -21,7 +21,7 @@ class EnvvarLookup(LookupHandler):
"""Name that the Lookup is registered as."""
@classmethod
- def handle(cls, value: str, **_: Any) -> str: # pylint: disable=arguments-differ
+ def handle(cls, value: str, **_: Any) -> str:
"""Retrieve an environment variable.
Args:
diff --git a/runway/cfngin/lookups/handlers/file.py b/runway/cfngin/lookups/handlers/file.py
index dcd73dfe0..b81b9b487 100644
--- a/runway/cfngin/lookups/handlers/file.py
+++ b/runway/cfngin/lookups/handlers/file.py
@@ -1,6 +1,5 @@
"""File lookup."""
-# pylint: disable=arguments-differ,no-self-argument
# pyright: reportIncompatibleMethodOverride=none
from __future__ import annotations
@@ -8,24 +7,27 @@
import collections.abc
import json
import re
-from typing import Any, Callable, Dict, List, Mapping, Sequence, Tuple, Union, overload
+from collections.abc import Mapping, Sequence
+from typing import TYPE_CHECKING, Any, Callable, Final, Union, overload
import yaml
from pydantic import validator
from troposphere import Base64, GenericHelperFn
-from typing_extensions import Final, Literal
from ....lookups.handlers.base import LookupHandler
from ....utils import BaseModel
from ...utils import read_value_from_path
+if TYPE_CHECKING:
+ from typing_extensions import Literal
+
_PARAMETER_PATTERN = re.compile(r"{{([::|\w]+)}}")
ParameterizedObjectTypeDef = Union[str, Mapping[str, Any], Sequence[Any], Any]
ParameterizedObjectReturnTypeDef = Union[
- Dict[str, "ParameterizedObjectReturnTypeDef"],
+ dict[str, "ParameterizedObjectReturnTypeDef"],
GenericHelperFn,
- List["ParameterizedObjectReturnTypeDef"],
+ list["ParameterizedObjectReturnTypeDef"],
]
@@ -36,7 +38,7 @@ class ArgsDataModel(BaseModel):
"""Codec that will be used to parse and/or manipulate the data."""
@validator("codec", allow_reuse=True)
- def _validate_supported_codec(cls, v: str) -> str:
+ def _validate_supported_codec(cls, v: str) -> str: # noqa: N805
"""Validate that the selected codec is supported."""
if v in CODECS:
return v
@@ -50,7 +52,7 @@ class FileLookup(LookupHandler):
"""Name that the Lookup is registered as."""
@classmethod
- def parse(cls, value: str) -> Tuple[str, Dict[str, str]]:
+ def parse(cls, value: str) -> tuple[str, dict[str, str]]:
"""Parse the value passed to the lookup.
This overrides the default parsing to account for special requirements.
@@ -65,13 +67,12 @@ def parse(cls, value: str) -> Tuple[str, Dict[str, str]]:
ValueError: The value provided does not match the expected regex.
"""
- args: Dict[str, str] = {}
+ args: dict[str, str] = {}
try:
args["codec"], data_or_path = value.split(":", 1)
except ValueError:
raise ValueError(
- f"Query '{value}' doesn't match regex: "
- rf"^(?P[{'|'.join(CODECS)}]:.+$)"
+ rf"Query '{value}' doesn't match regex: ^(?P[{'|'.join(CODECS)}]:.+$)"
) from None
return read_value_from_path(data_or_path), args
@@ -97,7 +98,7 @@ def _parameterize_string(raw: str) -> GenericHelperFn:
are found, and a composition of CloudFormation calls otherwise.
"""
- parts: List[Any] = []
+ parts: list[Any] = []
s_index = 0
for match in _PARAMETER_PATTERN.finditer(raw):
@@ -148,7 +149,7 @@ def _parameterize_obj(obj: Mapping[str, Any]) -> ParameterizedObjectReturnTypeDe
@overload
-def _parameterize_obj(obj: List[Any]) -> ParameterizedObjectReturnTypeDef: ...
+def _parameterize_obj(obj: list[Any]) -> ParameterizedObjectReturnTypeDef: ...
def _parameterize_obj(
@@ -189,7 +190,7 @@ def json_codec(raw: str, parameterized: bool = False) -> Any:
return _parameterize_obj(data) if parameterized else data
-CODECS: Dict[str, Callable[[str], Any]] = {
+CODECS: dict[str, Callable[[str], Any]] = {
"base64": lambda x: base64.b64encode(x.encode("utf8")).decode("utf-8"),
"json": lambda x: json_codec(x, parameterized=False),
"json-parameterized": lambda x: json_codec(x, parameterized=True),
diff --git a/runway/cfngin/lookups/handlers/hook_data.py b/runway/cfngin/lookups/handlers/hook_data.py
index 433573c5d..1736cc677 100644
--- a/runway/cfngin/lookups/handlers/hook_data.py
+++ b/runway/cfngin/lookups/handlers/hook_data.py
@@ -4,15 +4,16 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Final
from troposphere import BaseAWSObject
-from typing_extensions import Final, Literal
from ....lookups.handlers.base import LookupHandler
from ....utils import MutableMap
if TYPE_CHECKING:
+ from typing_extensions import Literal
+
from ....context import CfnginContext
LOGGER = logging.getLogger(__name__)
@@ -25,9 +26,7 @@ class HookDataLookup(LookupHandler):
"""Name that the Lookup is registered as."""
@classmethod
- def handle( # pylint: disable=arguments-differ
- cls, value: str, context: CfnginContext, **_: Any
- ) -> Any:
+ def handle(cls, value: str, context: CfnginContext, **_: Any) -> Any:
"""Return the data from ``hook_data``.
Args:
@@ -41,11 +40,7 @@ def handle( # pylint: disable=arguments-differ
result = hook_data.find(query, args.get("default"))
- if (
- isinstance(result, BaseAWSObject)
- and args.get("get")
- and not args.get("load")
- ):
+ if isinstance(result, BaseAWSObject) and args.get("get") and not args.get("load"):
args["load"] = "troposphere"
if not result:
diff --git a/runway/cfngin/lookups/handlers/kms.py b/runway/cfngin/lookups/handlers/kms.py
index e31699ffe..c9aef5e1c 100644
--- a/runway/cfngin/lookups/handlers/kms.py
+++ b/runway/cfngin/lookups/handlers/kms.py
@@ -5,15 +5,15 @@
import codecs
import logging
-from typing import TYPE_CHECKING, Any, BinaryIO, Dict, Tuple, Union, cast
-
-from typing_extensions import Final, Literal
+from typing import TYPE_CHECKING, Any, BinaryIO, Final, Union, cast
from ....lookups.handlers.base import LookupHandler
from ....utils import DOC_SITE
from ...utils import read_value_from_path
if TYPE_CHECKING:
+ from typing_extensions import Literal
+
from ....context import CfnginContext
LOGGER = logging.getLogger(__name__)
@@ -31,7 +31,7 @@ class KmsLookup(LookupHandler):
"""Name that the Lookup is registered as."""
@classmethod
- def legacy_parse(cls, value: str) -> Tuple[str, Dict[str, str]]:
+ def legacy_parse(cls, value: str) -> tuple[str, dict[str, str]]:
"""Retain support for legacy lookup syntax.
Format of value::
@@ -44,9 +44,7 @@ def legacy_parse(cls, value: str) -> Tuple[str, Dict[str, str]]:
return value, {"region": region}
@classmethod
- def handle( # pylint: disable=arguments-differ
- cls, value: str, context: CfnginContext, **_: Any
- ) -> str:
+ def handle(cls, value: str, context: CfnginContext, **_: Any) -> str:
r"""Decrypt the specified value with a master key in KMS.
Args:
diff --git a/runway/cfngin/lookups/handlers/output.py b/runway/cfngin/lookups/handlers/output.py
index 32d2ebf8b..655b054bd 100644
--- a/runway/cfngin/lookups/handlers/output.py
+++ b/runway/cfngin/lookups/handlers/output.py
@@ -5,9 +5,7 @@
import logging
import re
-from typing import TYPE_CHECKING, Any, Dict, NamedTuple, Set, Tuple
-
-from typing_extensions import Final, Literal
+from typing import TYPE_CHECKING, Any, Final, NamedTuple
from ....exceptions import OutputDoesNotExist
from ....lookups.handlers.base import LookupHandler
@@ -15,6 +13,8 @@
from ...exceptions import StackDoesNotExist
if TYPE_CHECKING:
+ from typing_extensions import Literal
+
from ....context import CfnginContext
from ....variables import VariableValue
@@ -40,7 +40,7 @@ class OutputLookup(LookupHandler):
"""Name that the Lookup is registered as."""
@classmethod
- def legacy_parse(cls, value: str) -> Tuple[OutputQuery, Dict[str, str]]:
+ def legacy_parse(cls, value: str) -> tuple[OutputQuery, dict[str, str]]:
"""Retain support for legacy lookup syntax.
Format of value:
@@ -51,9 +51,7 @@ def legacy_parse(cls, value: str) -> Tuple[OutputQuery, Dict[str, str]]:
return deconstruct(value), {}
@classmethod
- def handle( # pylint: disable=arguments-differ
- cls, value: str, context: CfnginContext, **_: Any
- ) -> str:
+ def handle(cls, value: str, context: CfnginContext, **_: Any) -> str:
"""Fetch an output from the designated stack.
Args:
@@ -82,9 +80,7 @@ def handle( # pylint: disable=arguments-differ
raise StackDoesNotExist(context.get_fqn(query.stack_name))
if "default" in args: # handle falsy default
- return cls.format_results(
- stack.outputs.get(query.output_name, args["default"]), **args
- )
+ return cls.format_results(stack.outputs.get(query.output_name, args["default"]), **args)
try:
return cls.format_results(stack.outputs[query.output_name], **args)
@@ -94,7 +90,7 @@ def handle( # pylint: disable=arguments-differ
) from None
@classmethod
- def dependencies(cls, lookup_query: VariableValue) -> Set[str]:
+ def dependencies(cls, lookup_query: VariableValue) -> set[str]:
"""Calculate any dependencies required to perform this lookup.
Note that lookup_query may not be (completely) resolved at this time.
@@ -127,7 +123,7 @@ def dependencies(cls, lookup_query: VariableValue) -> Set[str]:
return set()
-def deconstruct(value: str) -> OutputQuery: # TODO remove in next major release
+def deconstruct(value: str) -> OutputQuery: # TODO (kyle): remove in next major release
"""Deconstruct the value."""
try:
stack_name, output_name = value.split("::")
diff --git a/runway/cfngin/lookups/handlers/rxref.py b/runway/cfngin/lookups/handlers/rxref.py
index ab8285586..4eab30edd 100644
--- a/runway/cfngin/lookups/handlers/rxref.py
+++ b/runway/cfngin/lookups/handlers/rxref.py
@@ -4,9 +4,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, Dict, Tuple
-
-from typing_extensions import Final, Literal
+from typing import TYPE_CHECKING, Any, Final
from ....lookups.handlers.base import LookupHandler
from ....lookups.handlers.cfn import CfnLookup
@@ -14,6 +12,8 @@
from .output import OutputQuery, deconstruct
if TYPE_CHECKING:
+ from typing_extensions import Literal
+
from ....context import CfnginContext
from ...providers.aws.default import Provider
@@ -32,7 +32,7 @@ class RxrefLookup(LookupHandler):
"""Name that the Lookup is registered as."""
@classmethod
- def legacy_parse(cls, value: str) -> Tuple[OutputQuery, Dict[str, str]]:
+ def legacy_parse(cls, value: str) -> tuple[OutputQuery, dict[str, str]]:
"""Retain support for legacy lookup syntax.
Format of value:
@@ -43,9 +43,7 @@ def legacy_parse(cls, value: str) -> Tuple[OutputQuery, Dict[str, str]]:
return deconstruct(value), {}
@classmethod
- def handle( # pylint: disable=arguments-differ
- cls, value: str, context: CfnginContext, provider: Provider, **_: Any
- ) -> Any:
+ def handle(cls, value: str, context: CfnginContext, provider: Provider, **_: Any) -> Any:
"""Fetch an output from the designated stack in the current namespace.
The ``output`` lookup supports fetching outputs from stacks created
diff --git a/runway/cfngin/lookups/handlers/split.py b/runway/cfngin/lookups/handlers/split.py
index 0b4b0e5c8..0fd0b9d1a 100644
--- a/runway/cfngin/lookups/handlers/split.py
+++ b/runway/cfngin/lookups/handlers/split.py
@@ -1,9 +1,9 @@
"""Split lookup."""
# pyright: reportIncompatibleMethodOverride=none
-from typing import Any, List
+from typing import Any, Final
-from typing_extensions import Final, Literal
+from typing_extensions import Literal
from ....lookups.handlers.base import LookupHandler
@@ -15,9 +15,7 @@ class SplitLookup(LookupHandler):
"""Name that the Lookup is registered as."""
@classmethod
- def handle( # pylint: disable=arguments-differ
- cls, value: str, **_: Any
- ) -> List[str]:
+ def handle(cls, value: str, **_: Any) -> list[str]:
"""Split the supplied string on the given delimiter, providing a list.
Args:
diff --git a/runway/cfngin/lookups/handlers/xref.py b/runway/cfngin/lookups/handlers/xref.py
index a5a7a1747..a057e68de 100644
--- a/runway/cfngin/lookups/handlers/xref.py
+++ b/runway/cfngin/lookups/handlers/xref.py
@@ -4,14 +4,14 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any
-
-from typing_extensions import Final, Literal
+from typing import TYPE_CHECKING, Any, Final
from ....lookups.handlers.base import LookupHandler
from .output import deconstruct
if TYPE_CHECKING:
+ from typing_extensions import Literal
+
from ...providers.aws.default import Provider
LOGGER = logging.getLogger(__name__)
@@ -27,9 +27,7 @@ class XrefLookup(LookupHandler):
"""Name that the Lookup is registered as."""
@classmethod
- def handle( # pylint: disable=arguments-differ,arguments-renamed
- cls, value: str, provider: Provider, **_: Any
- ) -> str:
+ def handle(cls, value: str, provider: Provider, **_: Any) -> str:
"""Fetch an output from the designated, fully qualified stack.
The `output` handler supports fetching outputs from stacks created
diff --git a/runway/cfngin/lookups/registry.py b/runway/cfngin/lookups/registry.py
index eac3e2e34..bf72124a0 100644
--- a/runway/cfngin/lookups/registry.py
+++ b/runway/cfngin/lookups/registry.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import logging
-from typing import Dict, Type, Union, cast
+from typing import cast
from ...lookups.handlers.base import LookupHandler
from ...lookups.handlers.cfn import CfnLookup
@@ -25,13 +25,11 @@
from .handlers.split import SplitLookup
from .handlers.xref import XrefLookup
-CFNGIN_LOOKUP_HANDLERS: Dict[str, Type[LookupHandler]] = {}
+CFNGIN_LOOKUP_HANDLERS: dict[str, type[LookupHandler]] = {}
LOGGER = logging.getLogger(__name__)
-def register_lookup_handler(
- lookup_type: str, handler_or_path: Union[str, Type[LookupHandler]]
-) -> None:
+def register_lookup_handler(lookup_type: str, handler_or_path: str | type[LookupHandler]) -> None:
"""Register a lookup handler.
Args:
@@ -52,7 +50,7 @@ def register_lookup_handler(
CFNGIN_LOOKUP_HANDLERS[lookup_type] = handler
return
# Handler is a not a new-style handler
- except Exception: # pylint: disable=broad-except
+ except Exception: # noqa: BLE001
LOGGER.debug("failed to validate lookup handler", exc_info=True)
LOGGER.error(
'lookup "%s" uses an unsupported format; to learn how to write '
@@ -82,9 +80,7 @@ def unregister_lookup_handler(lookup_type: str) -> None:
register_lookup_handler(AmiLookup.TYPE_NAME, AmiLookup)
register_lookup_handler(AwsLambdaLookup.TYPE_NAME, AwsLambdaLookup)
register_lookup_handler(AwsLambdaLookup.Code.TYPE_NAME, AwsLambdaLookup.Code)
-register_lookup_handler(
- AwsLambdaLookup.CodeSha256.TYPE_NAME, AwsLambdaLookup.CodeSha256
-)
+register_lookup_handler(AwsLambdaLookup.CodeSha256.TYPE_NAME, AwsLambdaLookup.CodeSha256)
register_lookup_handler(
AwsLambdaLookup.CompatibleArchitectures.TYPE_NAME,
AwsLambdaLookup.CompatibleArchitectures,
@@ -93,15 +89,11 @@ def unregister_lookup_handler(lookup_type: str) -> None:
AwsLambdaLookup.CompatibleRuntimes.TYPE_NAME, AwsLambdaLookup.CompatibleRuntimes
)
register_lookup_handler(AwsLambdaLookup.Content.TYPE_NAME, AwsLambdaLookup.Content)
-register_lookup_handler(
- AwsLambdaLookup.LicenseInfo.TYPE_NAME, AwsLambdaLookup.LicenseInfo
-)
+register_lookup_handler(AwsLambdaLookup.LicenseInfo.TYPE_NAME, AwsLambdaLookup.LicenseInfo)
register_lookup_handler(AwsLambdaLookup.Runtime.TYPE_NAME, AwsLambdaLookup.Runtime)
register_lookup_handler(AwsLambdaLookup.S3Bucket.TYPE_NAME, AwsLambdaLookup.S3Bucket)
register_lookup_handler(AwsLambdaLookup.S3Key.TYPE_NAME, AwsLambdaLookup.S3Key)
-register_lookup_handler(
- AwsLambdaLookup.S3ObjectVersion.TYPE_NAME, AwsLambdaLookup.S3ObjectVersion
-)
+register_lookup_handler(AwsLambdaLookup.S3ObjectVersion.TYPE_NAME, AwsLambdaLookup.S3ObjectVersion)
register_lookup_handler(CfnLookup.TYPE_NAME, CfnLookup)
register_lookup_handler(DefaultLookup.TYPE_NAME, DefaultLookup)
register_lookup_handler(DynamodbLookup.TYPE_NAME, DynamodbLookup)
diff --git a/runway/cfngin/plan.py b/runway/cfngin/plan.py
index c5f7c059e..af503563e 100644
--- a/runway/cfngin/plan.py
+++ b/runway/cfngin/plan.py
@@ -4,22 +4,16 @@
import json
import logging
-import os
import threading
import time
import uuid
+from pathlib import Path
from typing import (
TYPE_CHECKING,
Any,
Callable,
- Dict,
- List,
NoReturn,
- Optional,
- OrderedDict,
- Set,
TypeVar,
- Union,
overload,
)
@@ -41,6 +35,8 @@
from .utils import stack_template_key_name
if TYPE_CHECKING:
+ from collections import OrderedDict
+
from ..context import CfnginContext
from .providers.aws.default import Provider
from .status import Status
@@ -51,14 +47,14 @@
@overload
-def json_serial(obj: Set[_T]) -> List[_T]: ...
+def json_serial(obj: set[_T]) -> list[_T]: ...
@overload
-def json_serial(obj: Union[Dict[Any, Any], int, List[Any], str]) -> NoReturn: ...
+def json_serial(obj: dict[Any, Any] | int | list[Any] | str) -> NoReturn: ...
-def json_serial(obj: Union[Set[Any], Any]) -> Any:
+def json_serial(obj: set[Any] | Any) -> Any:
"""Serialize json.
Args:
@@ -82,10 +78,7 @@ def merge_graphs(graph1: Graph, graph2: Graph) -> Graph:
"""
merged_graph_dict = merge_dicts(graph1.to_dict().copy(), graph2.to_dict())
- steps = [
- graph1.steps.get(name, graph2.steps.get(name))
- for name in merged_graph_dict.keys()
- ]
+ steps = [graph1.steps.get(name, graph2.steps.get(name)) for name in merged_graph_dict]
return Graph.from_steps([step for step in steps if step])
@@ -103,19 +96,19 @@ class Step:
"""
- fn: Optional[Callable[..., Any]]
+ fn: Callable[..., Any] | None
last_updated: float
logger: PrefixAdaptor
stack: Stack
status: Status
- watch_func: Optional[Callable[..., Any]]
+ watch_func: Callable[..., Any] | None
def __init__(
self,
stack: Stack,
*,
- fn: Optional[Callable[..., Any]] = None,
- watch_func: Optional[Callable[..., Any]] = None,
+ fn: Callable[..., Any] | None = None,
+ watch_func: Callable[..., Any] | None = None,
) -> None:
"""Instantiate class.
@@ -139,9 +132,7 @@ def run(self) -> bool:
stop_watcher = threading.Event()
watcher = None
if self.watch_func:
- watcher = threading.Thread(
- target=self.watch_func, args=(self.stack, stop_watcher)
- )
+ watcher = threading.Thread(target=self.watch_func, args=(self.stack, stop_watcher))
watcher.start()
try:
@@ -161,7 +152,7 @@ def _run_once(self) -> Status:
status = self.fn(self.stack, status=self.status)
except CancelExecution:
status = SkippedStatus("canceled execution")
- except Exception as err: # pylint: disable=broad-except
+ except Exception as err:
LOGGER.exception(err)
status = FailedStatus(reason=str(err))
self.set_status(status)
@@ -177,12 +168,12 @@ def name(self) -> str:
return self.stack.name
@property
- def requires(self) -> Set[str]:
+ def requires(self) -> set[str]:
"""Return a list of step names this step depends on."""
return self.stack.requires
@property
- def required_by(self) -> Set[str]:
+ def required_by(self) -> set[str]:
"""Return a list of step names that depend on this step."""
return self.stack.required_by
@@ -265,9 +256,9 @@ def from_stack_name(
cls,
stack_name: str,
context: CfnginContext,
- requires: Optional[Union[List[str], Set[str]]] = None,
- fn: Optional[Callable[..., Status]] = None,
- watch_func: Optional[Callable[..., Any]] = None,
+ requires: list[str] | set[str] | None = None,
+ fn: Callable[..., Status] | None = None,
+ watch_func: Callable[..., Any] | None = None,
) -> Step:
"""Create a step using only a stack name.
@@ -282,25 +273,20 @@ def from_stack_name(
step action.
"""
- # pylint: disable=import-outside-toplevel
from runway.config.models.cfngin import CfnginStackDefinitionModel
- stack_def = CfnginStackDefinitionModel.construct(
- name=stack_name, requires=requires or []
- )
+ stack_def = CfnginStackDefinitionModel.construct(name=stack_name, requires=requires or [])
stack = Stack(stack_def, context)
return cls(stack, fn=fn, watch_func=watch_func)
@classmethod
def from_persistent_graph(
cls,
- graph_dict: Union[
- Dict[str, List[str]], Dict[str, Set[str]], OrderedDict[str, Set[str]]
- ],
+ graph_dict: dict[str, list[str]] | dict[str, set[str]] | OrderedDict[str, set[str]],
context: CfnginContext,
- fn: Optional[Callable[..., Status]] = None,
- watch_func: Optional[Callable[..., Any]] = None,
- ) -> List[Step]:
+ fn: Callable[..., Status] | None = None,
+ watch_func: Callable[..., Any] | None = None,
+ ) -> list[Step]:
"""Create a steps for a persistent graph dict.
Args:
@@ -347,11 +333,9 @@ class Graph:
"""
dag: DAG
- steps: Dict[str, Step]
+ steps: dict[str, Step]
- def __init__(
- self, steps: Optional[Dict[str, Step]] = None, dag: Optional[DAG] = None
- ) -> None:
+ def __init__(self, steps: dict[str, Step] | None = None, dag: DAG | None = None) -> None:
"""Instantiate class.
Args:
@@ -423,7 +407,7 @@ def add_step_if_not_exists(
except GraphError:
continue
- def add_steps(self, steps: List[Step]) -> None:
+ def add_steps(self, steps: list[Step]) -> None:
"""Add a list of steps.
Args:
@@ -501,7 +485,7 @@ def fn(step_name: str) -> Any:
return walker(self.dag, fn)
- def downstream(self, step_name: str) -> List[Step]:
+ def downstream(self, step_name: str) -> list[Step]:
"""Return the direct dependencies of the given step."""
return [self.steps[dep] for dep in self.dag.downstream(step_name)]
@@ -513,7 +497,7 @@ def transposed(self) -> Graph:
"""
return Graph(steps=self.steps, dag=self.dag.transpose())
- def filtered(self, step_names: List[str]) -> Graph:
+ def filtered(self, step_names: list[str]) -> Graph:
"""Return a "filtered" version of this graph.
Args:
@@ -522,16 +506,16 @@ def filtered(self, step_names: List[str]) -> Graph:
"""
return Graph(steps=self.steps, dag=self.dag.filter(step_names))
- def topological_sort(self) -> List[Step]:
+ def topological_sort(self) -> list[Step]:
"""Perform a topological sort of the underlying DAG."""
nodes = self.dag.topological_sort()
return [self.steps[step_name] for step_name in nodes]
- def to_dict(self) -> OrderedDict[str, Set[str]]:
+ def to_dict(self) -> OrderedDict[str, set[str]]:
"""Return the underlying DAG as a dictionary."""
return self.dag.graph
- def dumps(self, indent: Optional[int] = None) -> str:
+ def dumps(self, indent: int | None = None) -> str:
"""Output the graph as a json serialized string for storage.
Args:
@@ -543,9 +527,7 @@ def dumps(self, indent: Optional[int] = None) -> str:
@classmethod
def from_dict(
cls,
- graph_dict: Union[
- Dict[str, List[str]], Dict[str, Set[str]], OrderedDict[str, Set[str]]
- ],
+ graph_dict: dict[str, list[str]] | dict[str, set[str]] | OrderedDict[str, set[str]],
context: CfnginContext,
) -> Graph:
"""Create a Graph from a graph dict.
@@ -558,7 +540,7 @@ def from_dict(
return cls.from_steps(Step.from_persistent_graph(graph_dict, context))
@classmethod
- def from_steps(cls, steps: List[Step]) -> Graph:
+ def from_steps(cls, steps: list[Step]) -> Graph:
"""Create a Graph from Steps.
Args:
@@ -588,7 +570,7 @@ class Plan:
"""
- context: Optional[CfnginContext]
+ context: CfnginContext | None
description: str
graph: Graph
id: uuid.UUID
@@ -599,7 +581,7 @@ def __init__(
self,
description: str,
graph: Graph,
- context: Optional[CfnginContext] = None,
+ context: CfnginContext | None = None,
reverse: bool = False,
require_unlocked: bool = True,
) -> None:
@@ -627,11 +609,7 @@ def __init__(
self.locked = self.context.persistent_graph_locked
if self.context.stack_names:
- nodes = [
- target
- for target in self.context.stack_names
- if graph.steps.get(target)
- ]
+ nodes = [target for target in self.context.stack_names if graph.steps.get(target)]
graph = graph.filtered(nodes)
else:
@@ -639,7 +617,7 @@ def __init__(
self.graph = graph
- def outline(self, level: int = logging.INFO, message: str = ""):
+ def outline(self, level: int = logging.INFO, message: str = "") -> None:
"""Print an outline of the actions the plan is going to take.
The outline will represent the rough ordering of the steps that will be
@@ -669,7 +647,7 @@ def dump(
*,
directory: str,
context: CfnginContext,
- provider: Optional[Provider] = None,
+ provider: Provider | None = None,
) -> Any:
"""Output the rendered blueprint for all stacks in the plan.
@@ -680,30 +658,26 @@ def dump(
"""
LOGGER.info('dumping "%s"...', self.description)
- directory = os.path.expanduser(directory)
- if not os.path.exists(directory):
- os.makedirs(directory)
+ dir_path = Path(directory).expanduser()
+ dir_path.mkdir(exist_ok=True, parents=True)
def walk_func(step: Step) -> bool:
"""Walk function."""
step.stack.resolve(context=context, provider=provider)
blueprint = step.stack.blueprint
filename = stack_template_key_name(blueprint)
- path = os.path.join(directory, filename)
-
- blueprint_dir = os.path.dirname(path)
- if not os.path.exists(blueprint_dir):
- os.makedirs(blueprint_dir)
+ path = dir_path / filename
+ path.parent.mkdir(exist_ok=True, parents=True)
LOGGER.info('writing stack "%s" -> %s', step.name, path)
- with open(path, "w", encoding="utf-8") as _file:
+ with Path(path).open("w", encoding="utf-8") as _file:
_file.write(blueprint.rendered)
return True
return self.graph.walk(walk, walk_func)
- def execute(self, *args: Any, **kwargs: Any):
+ def execute(self, *args: Any, **kwargs: Any) -> None:
"""Walk each step in the underlying graph.
Raises:
@@ -752,15 +726,12 @@ def walk_func(step: Step) -> bool:
return result
if step.completed or (
- step.skipped
- and step.status.reason == ("does not exist in cloudformation")
+ step.skipped and step.status.reason == ("does not exist in cloudformation")
):
fn_name = step.fn.__name__ if callable(step.fn) else step.fn
if fn_name == "_destroy_stack":
self.context.persistent_graph.pop(step)
- LOGGER.debug(
- "removed step '%s' from the persistent graph", step.name
- )
+ LOGGER.debug("removed step '%s' from the persistent graph", step.name)
elif fn_name == "_launch_stack":
self.context.persistent_graph.add_step_if_not_exists(
step, add_dependencies=True, add_dependents=True
@@ -779,17 +750,17 @@ def lock_code(self) -> str:
return str(self.id)
@property
- def steps(self) -> List[Step]:
+ def steps(self) -> list[Step]:
"""Return a list of all steps in the plan."""
steps = self.graph.topological_sort()
steps.reverse()
return steps
@property
- def step_names(self) -> List[str]:
+ def step_names(self) -> list[str]:
"""Return a list of all step names."""
return [step.name for step in self.steps]
- def keys(self) -> List[str]:
+ def keys(self) -> list[str]:
"""Return a list of all step names."""
return self.step_names
diff --git a/runway/cfngin/providers/aws/default.py b/runway/cfngin/providers/aws/default.py
index b91e707e5..763f71cf4 100644
--- a/runway/cfngin/providers/aws/default.py
+++ b/runway/cfngin/providers/aws/default.py
@@ -1,23 +1,19 @@
"""Default AWS Provider."""
-# pylint: disable=too-many-lines,too-many-public-methods
from __future__ import annotations
+import functools
import json
import logging
+import operator
import sys
import threading
import time
+from collections.abc import Iterable
from typing import (
TYPE_CHECKING,
Any,
Callable,
- Dict,
- Iterable,
- List,
- Optional,
- Set,
- Tuple,
Union,
cast,
)
@@ -88,7 +84,7 @@ def get_cloudformation_client(session: boto3.Session) -> CloudFormationClient:
return session.client("cloudformation", config=config)
-def get_output_dict(stack: StackTypeDef) -> Dict[str, str]:
+def get_output_dict(stack: StackTypeDef) -> dict[str, str]:
"""Return a dict of key/values for the outputs for a given CF stack.
Args:
@@ -112,11 +108,11 @@ def get_output_dict(stack: StackTypeDef) -> Dict[str, str]:
def s3_fallback(
fqn: str,
template: Template,
- parameters: List[ParameterTypeDef],
- tags: List[TagTypeDef],
+ parameters: list[ParameterTypeDef],
+ tags: list[TagTypeDef],
method: Callable[..., Any],
- change_set_name: Optional[str] = None,
- service_role: Optional[str] = None,
+ change_set_name: str | None = None,
+ service_role: str | None = None,
) -> Any:
"""Falling back to legacy CFNgin S3 bucket region for templates."""
LOGGER.warning(
@@ -132,7 +128,9 @@ def s3_fallback(
template_url = template.url
if template_url:
template_url_parsed = urlparse(template_url)
- template_url_parsed = template_url_parsed._replace(netloc="s3.amazonaws.com")
+ template_url_parsed = template_url_parsed._replace( # noqa: SLF001
+ netloc="s3.amazonaws.com"
+ )
template_url = urlunparse(template_url_parsed)
LOGGER.debug("using template_url: %s", template_url)
args = generate_cloudformation_args(
@@ -160,25 +158,21 @@ def get_change_set_name() -> str:
return f"change-set-{int(time.time())}"
-def requires_replacement(changeset: List[ChangeTypeDef]) -> List[ChangeTypeDef]:
+def requires_replacement(changeset: list[ChangeTypeDef]) -> list[ChangeTypeDef]:
"""Return the changes within the changeset that require replacement.
Args:
changeset: List of changes
"""
- return [
- r
- for r in changeset
- if r.get("ResourceChange", {}).get("Replacement", False) == "True"
- ]
+ return [r for r in changeset if r.get("ResourceChange", {}).get("Replacement", False) == "True"]
def output_full_changeset(
- full_changeset: Optional[List[ChangeTypeDef]] = None,
- params_diff: Optional[List[DictValue[Any, Any]]] = None,
- answer: Optional[str] = None,
- fqn: Optional[str] = None,
+ full_changeset: list[ChangeTypeDef] | None = None,
+ params_diff: list[DictValue[Any, Any]] | None = None,
+ answer: str | None = None,
+ fqn: str | None = None,
) -> None:
"""Optionally output full changeset.
@@ -211,10 +205,10 @@ def output_full_changeset(
def ask_for_approval(
- full_changeset: Optional[List[ChangeTypeDef]] = None,
- params_diff: Optional[List[DictValue[Any, Any]]] = None,
+ full_changeset: list[ChangeTypeDef] | None = None,
+ params_diff: list[DictValue[Any, Any]] | None = None,
include_verbose: bool = False,
- fqn: Optional[str] = None,
+ fqn: str | None = None,
) -> None:
"""Prompt the user for approval to execute a change set.
@@ -234,9 +228,7 @@ def ask_for_approval(
if include_verbose:
approval_options.append("v")
- approve = ui.ask(
- f"Execute the above changes? [{'/'.join(approval_options)}] "
- ).lower()
+ approve = ui.ask(f"Execute the above changes? [{'/'.join(approval_options)}] ").lower()
if include_verbose and approve == "v":
output_full_changeset(
@@ -254,8 +246,8 @@ def ask_for_approval(
def output_summary(
fqn: str,
action: str,
- changeset: List[ChangeTypeDef],
- params_diff: List[DictValue[Any, Any]],
+ changeset: list[ChangeTypeDef],
+ params_diff: list[DictValue[Any, Any]],
replacements_only: bool = False,
) -> None:
"""Log a summary of the changeset.
@@ -271,8 +263,8 @@ def output_summary(
replacements.
"""
- replacements: List[Any] = []
- changes: List[Any] = []
+ replacements: list[Any] = []
+ changes: list[Any] = []
for change in changeset:
resource = change.get("ResourceChange", {})
replacement = resource.get("Replacement", "") == "True"
@@ -299,12 +291,12 @@ def output_summary(
LOGGER.info("%s %s:\n%s", fqn, action, summary)
-def format_params_diff(params_diff: List[DictValue[Any, Any]]) -> str:
+def format_params_diff(params_diff: list[DictValue[Any, Any]]) -> str:
"""Wrap :func:`runway.cfngin.actions.diff.format_params_diff` for testing."""
return format_diff(params_diff)
-def summarize_params_diff(params_diff: List[DictValue[Any, Any]]) -> str:
+def summarize_params_diff(params_diff: list[DictValue[Any, Any]]) -> str:
"""Summarize parameter diff."""
summary = ""
@@ -366,15 +358,13 @@ def create_change_set(
cfn_client: CloudFormationClient,
fqn: str,
template: Template,
- parameters: List[ParameterTypeDef],
- tags: List[TagTypeDef],
+ parameters: list[ParameterTypeDef],
+ tags: list[TagTypeDef],
change_set_type: str = "UPDATE",
- service_role: Optional[str] = None,
-) -> Tuple[List[ChangeTypeDef], str]:
+ service_role: str | None = None,
+) -> tuple[list[ChangeTypeDef], str]:
"""Create CloudFormation change set."""
- LOGGER.debug(
- "attempting to create change set of type %s for stack: %s", change_set_type, fqn
- )
+ LOGGER.debug("attempting to create change set of type %s for stack: %s", change_set_type, fqn)
args = generate_cloudformation_args(
fqn,
parameters,
@@ -410,20 +400,16 @@ def create_change_set(
"didn't contain changes" in status_reason
or "No updates are to be performed" in status_reason
):
- LOGGER.debug(
- "%s:stack did not change; not updating and removing changeset", fqn
- )
+ LOGGER.debug("%s:stack did not change; not updating and removing changeset", fqn)
cfn_client.delete_change_set(ChangeSetName=change_set_id)
- raise exceptions.StackDidNotChange()
+ raise exceptions.StackDidNotChange
LOGGER.warning(
"got strange status, '%s' for changeset '%s'; not deleting for "
"further investigation - you will need to delete the changeset manually",
status,
change_set_id,
)
- raise exceptions.UnhandledChangeSetStatus(
- fqn, change_set_id, status, status_reason
- )
+ raise exceptions.UnhandledChangeSetStatus(fqn, change_set_id, status, status_reason)
execution_status = response["ExecutionStatus"]
if execution_status != "AVAILABLE":
@@ -433,7 +419,7 @@ def create_change_set(
return changes, change_set_id
-def check_tags_contain(actual: List[TagTypeDef], expected: List[TagTypeDef]) -> bool:
+def check_tags_contain(actual: list[TagTypeDef], expected: list[TagTypeDef]) -> bool:
"""Check if a set of AWS resource tags is contained in another.
Every tag key in ``expected`` must be present in ``actual``, and have the
@@ -455,15 +441,15 @@ def check_tags_contain(actual: List[TagTypeDef], expected: List[TagTypeDef]) ->
def generate_cloudformation_args(
stack_name: str,
- parameters: List[ParameterTypeDef],
- tags: List[TagTypeDef],
+ parameters: list[ParameterTypeDef],
+ tags: list[TagTypeDef],
template: Template,
- capabilities: Optional[List[str]] = None,
- change_set_type: Optional[str] = None,
- service_role: Optional[str] = None,
- stack_policy: Optional[Template] = None,
- change_set_name: Optional[str] = None,
-) -> Dict[str, Any]:
+ capabilities: list[str] | None = None,
+ change_set_type: str | None = None,
+ service_role: str | None = None,
+ stack_policy: Template | None = None,
+ change_set_name: str | None = None,
+) -> dict[str, Any]:
"""Generate the args for common CloudFormation API interactions.
This is used for ``create_stack``/``update_stack``/``create_change_set``
@@ -505,9 +491,7 @@ def generate_cloudformation_args(
elif template.body:
args["TemplateBody"] = template.body
else:
- raise ValueError(
- "either template.body or template.url is required; neither were provided"
- )
+ raise ValueError("either template.body or template.url is required; neither were provided")
# When creating args for CreateChangeSet, don't include the stack policy,
# since ChangeSets don't support it.
@@ -518,15 +502,15 @@ def generate_cloudformation_args(
def generate_stack_policy_args(
- stack_policy: Optional[Template] = None,
-) -> Dict[str, str]:
+ stack_policy: Template | None = None,
+) -> dict[str, str]:
"""Convert a stack policy object into keyword args.
Args:
stack_policy: A template object representing a stack policy.
"""
- args: Dict[str, str] = {}
+ args: dict[str, str] = {}
if stack_policy:
LOGGER.debug("stack has a stack policy")
if stack_policy.url:
@@ -544,21 +528,19 @@ def generate_stack_policy_args(
class ProviderBuilder:
"""Implements a Memorized ProviderBuilder for the AWS provider."""
- kwargs: Dict[str, Any]
+ kwargs: dict[str, Any]
lock: threading.Lock
- providers: Dict[str, Provider]
- region: Optional[str]
+ providers: dict[str, Provider]
+ region: str | None
- def __init__(self, *, region: Optional[str] = None, **kwargs: Any) -> None:
+ def __init__(self, *, region: str | None = None, **kwargs: Any) -> None:
"""Instantiate class."""
self.region = region
self.kwargs = kwargs
self.providers = {}
self.lock = threading.Lock()
- def build(
- self, *, profile: Optional[str] = None, region: Optional[str] = None
- ) -> Provider:
+ def build(self, *, profile: str | None = None, region: str | None = None) -> Provider:
"""Get or create the provider for the given region and profile."""
with self.lock:
# memorization lookup key derived from region + profile.
@@ -567,9 +549,7 @@ def build(
# assume provider is in provider dictionary.
provider = self.providers[key]
except KeyError:
- LOGGER.debug(
- "missed memorized lookup (%s); creating new AWS provider", key
- )
+ LOGGER.debug("missed memorized lookup (%s); creating new AWS provider", key)
if not region:
region = self.region
# memoize the result for later.
@@ -627,9 +607,9 @@ class Provider(BaseProvider):
cloudformation: CloudFormationClient
interactive: bool
recreate_failed: bool
- region: Optional[str]
+ region: str | None
replacements_only: bool
- service_role: Optional[str]
+ service_role: str | None
def __init__(
self,
@@ -637,12 +617,12 @@ def __init__(
*,
interactive: bool = False,
recreate_failed: bool = False,
- region: Optional[str] = None,
+ region: str | None = None,
replacements_only: bool = False,
- service_role: Optional[str] = None,
- ):
+ service_role: str | None = None,
+ ) -> None:
"""Instantiate class."""
- self._outputs: Dict[str, Dict[str, str]] = {}
+ self._outputs: dict[str, dict[str, str]] = {}
self.cloudformation = get_cloudformation_client(session)
self.interactive = interactive
self.recreate_failed = interactive or recreate_failed
@@ -654,13 +634,11 @@ def __init__(
def get_stack(self, stack_name: str, *_args: Any, **_kwargs: Any) -> StackTypeDef:
"""Get stack."""
try:
- return self.cloudformation.describe_stacks(StackName=stack_name)["Stacks"][
- 0
- ]
+ return self.cloudformation.describe_stacks(StackName=stack_name)["Stacks"][0]
except botocore.exceptions.ClientError as err:
if "does not exist" not in str(err):
raise
- raise exceptions.StackDoesNotExist(stack_name)
+ raise exceptions.StackDoesNotExist(stack_name) from None
@staticmethod
def get_stack_status(stack: StackTypeDef, *_args: Any, **_kwargs: Any) -> str:
@@ -668,7 +646,7 @@ def get_stack_status(stack: StackTypeDef, *_args: Any, **_kwargs: Any) -> str:
return stack["StackStatus"]
@staticmethod
- def get_stack_status_reason(stack: StackTypeDef) -> Optional[str]:
+ def get_stack_status_reason(stack: StackTypeDef) -> str | None:
"""Get stack status reason."""
return stack.get("StackStatusReason")
@@ -712,9 +690,9 @@ def tail_stack(
self,
stack: Stack,
cancel: threading.Event,
- action: Optional[str] = None,
- log_func: Optional[Callable[[StackEventTypeDef], None]] = None,
- retries: Optional[int] = None,
+ action: str | None = None,
+ log_func: Callable[[StackEventTypeDef], None] | None = None,
+ retries: int | None = None,
) -> None:
"""Tail the events of a stack."""
@@ -740,19 +718,13 @@ def _log_func(event: StackEventTypeDef) -> None:
while True:
attempts += 1
try:
- self.tail(
- stack.fqn, cancel=cancel, log_func=log_func, include_initial=False
- )
+ self.tail(stack.fqn, cancel=cancel, log_func=log_func, include_initial=False)
break
except botocore.exceptions.ClientError as err:
if "does not exist" in str(err):
- LOGGER.debug(
- "%s:unable to tail stack; it does not exist", stack.fqn
- )
+ LOGGER.debug("%s:unable to tail stack; it does not exist", stack.fqn)
if action == "destroy":
- LOGGER.debug(
- "%s:stack was deleted before it could be tailed", stack.fqn
- )
+ LOGGER.debug("%s:stack was deleted before it could be tailed", stack.fqn)
return
if attempts < retries:
# stack might be in the process of launching, wait for a
@@ -768,7 +740,7 @@ def _tail_print(event: StackEventTypeDef) -> None:
f'{event.get("ResourceStatus")} {event.get("ResourceType")} {event.get("EventId")}'
)
- def get_delete_failed_status_reason(self, stack_name: str) -> Optional[str]:
+ def get_delete_failed_status_reason(self, stack_name: str) -> str | None:
"""Process events and return latest delete failed reason.
Args:
@@ -778,17 +750,14 @@ def get_delete_failed_status_reason(self, stack_name: str) -> Optional[str]:
Reason for the Stack's DELETE_FAILED status if one can be found.
"""
- event: Union[Dict[str, str], StackEventTypeDef] = (
- self.get_event_by_resource_status(
- stack_name, "DELETE_FAILED", chronological=True
- )
- or {}
+ event: Union[dict[str, str], StackEventTypeDef] = (
+ self.get_event_by_resource_status(stack_name, "DELETE_FAILED", chronological=True) or {}
)
return event.get("ResourceStatusReason")
def get_event_by_resource_status(
self, stack_name: str, status: str, *, chronological: bool = True
- ) -> Optional[StackEventTypeDef]:
+ ) -> StackEventTypeDef | None:
"""Get Stack Event of a given set of resource status.
Args:
@@ -815,7 +784,7 @@ def get_events(
) -> Iterable[StackEventTypeDef]:
"""Get the events in batches and return in chronological order."""
next_token = None
- event_list: List[List[StackEventTypeDef]] = []
+ event_list: list[list[StackEventTypeDef]] = []
while True:
if next_token is not None:
events = self.cloudformation.describe_stack_events(
@@ -831,13 +800,11 @@ def get_events(
if chronological:
return cast(
Iterable["StackEventTypeDef"],
- reversed(
- cast(List["StackEventTypeDef"], sum(event_list, [])) # type: ignore
- ),
+ reversed(cast("list[StackEventTypeDef]", functools.reduce(operator.iadd, event_list, []))), # type: ignore
)
- return cast(Iterable["StackEventTypeDef"], sum(event_list, [])) # type: ignore
+ return cast(Iterable["StackEventTypeDef"], functools.reduce(operator.iadd, event_list, [])) # type: ignore
- def get_rollback_status_reason(self, stack_name: str) -> Optional[str]:
+ def get_rollback_status_reason(self, stack_name: str) -> str | None:
"""Process events and returns latest roll back reason.
Args:
@@ -847,7 +814,7 @@ def get_rollback_status_reason(self, stack_name: str) -> Optional[str]:
Reason for the Stack's rollback status if one can be found.
"""
- event: Union[Dict[str, str], StackEventTypeDef] = (
+ event: Union[dict[str, str], StackEventTypeDef] = (
self.get_event_by_resource_status(
stack_name, "UPDATE_ROLLBACK_IN_PROGRESS", chronological=False
)
@@ -869,7 +836,7 @@ def tail(
"""Show and then tail the event log."""
# First dump the full list of events in chronological order and keep
# track of the events we've seen already
- seen: Set[str] = set()
+ seen: set[str] = set()
initial_events = self.get_events(stack_name)
for event in initial_events:
if include_initial:
@@ -891,7 +858,7 @@ def destroy_stack(
stack: StackTypeDef,
*,
action: str = "destroy",
- approval: Optional[str] = None,
+ approval: str | None = None,
force_interactive: bool = False,
**kwargs: Any,
) -> None:
@@ -902,15 +869,14 @@ def destroy_stack(
action: Name of the action being executed. This impacts the log message used.
approval: Response to approval prompt.
force_interactive: Always ask for approval.
+ **kwargs: Arbitrary keyword arguments.
"""
fqn = self.get_stack_name(stack)
LOGGER.debug("%s:attempting to delete stack", fqn)
if action == "deploy":
- LOGGER.info(
- "%s:removed from the CFNgin config file; it is being destroyed", fqn
- )
+ LOGGER.info("%s:removed from the CFNgin config file; it is being destroyed", fqn)
destroy_method = self.select_destroy_method(force_interactive)
return destroy_method(fqn=fqn, action=action, approval=approval, **kwargs)
@@ -919,13 +885,13 @@ def create_stack(
self,
fqn: str,
template: Template,
- parameters: List[ParameterTypeDef],
- tags: List[TagTypeDef],
+ parameters: list[ParameterTypeDef],
+ tags: list[TagTypeDef],
*,
force_change_set: bool = False,
- stack_policy: Optional[Template] = None,
+ stack_policy: Template | None = None,
termination_protection: bool = False,
- timeout: Optional[int] = None,
+ timeout: int | None = None,
**kwargs: Any,
) -> None:
"""Create a new Cloudformation stack.
@@ -943,14 +909,13 @@ def create_stack(
protection.
timeout: The amount of time that can pass before the stack status becomes
``CREATE_FAILED``.
+ **kwargs: Arbitrary keyword arguments.
"""
LOGGER.debug(
"attempting to create stack %s: %s",
fqn,
- json.dumps(
- {"parameters": parameters, "tags": tags, "template_url": template.url}
- ),
+ json.dumps({"parameters": parameters, "tags": tags, "template_url": template.url}),
)
if not template.url:
LOGGER.debug("no template url; uploading template directly")
@@ -988,8 +953,7 @@ def create_stack(
self.cloudformation.create_stack(**args)
except botocore.exceptions.ClientError as err:
if err.response["Error"]["Message"] == (
- "TemplateURL must reference a valid S3 object to which you "
- "have access."
+ "TemplateURL must reference a valid S3 object to which you have access."
):
s3_fallback(
fqn,
@@ -1022,9 +986,7 @@ def select_update_method(
return self.noninteractive_changeset_update
return self.default_update_stack
- def prepare_stack_for_update(
- self, stack: StackTypeDef, tags: List[TagTypeDef]
- ) -> bool:
+ def prepare_stack_for_update(self, stack: StackTypeDef, tags: list[TagTypeDef]) -> bool:
"""Prepare a stack for updating.
It may involve deleting the stack if is has failed it's initial
@@ -1076,8 +1038,7 @@ def prepare_stack_for_update(
raise exceptions.StackUpdateBadStatus(
stack_name,
stack_status,
- "Tags differ from current configuration, possibly not created "
- "with CFNgin",
+ "Tags differ from current configuration, possibly not created with CFNgin",
)
if self.interactive:
@@ -1100,12 +1061,12 @@ def update_stack(
self,
fqn: str,
template: Template,
- old_parameters: List[ParameterTypeDef],
- parameters: List[ParameterTypeDef],
- tags: List[TagTypeDef],
+ old_parameters: list[ParameterTypeDef],
+ parameters: list[ParameterTypeDef],
+ tags: list[TagTypeDef],
force_interactive: bool = False,
force_change_set: bool = False,
- stack_policy: Optional[Template] = None,
+ stack_policy: Template | None = None,
termination_protection: bool = False,
**kwargs: Any,
) -> None:
@@ -1128,14 +1089,13 @@ def update_stack(
executed with a change set.
stack_policy: A template object representing a stack policy.
termination_protection: End state of the stack's termination protection.
+ **kwargs: Arbitrary keyword arguments.
"""
LOGGER.debug(
"attempting to update stack %s: %s",
fqn,
- json.dumps(
- {"parameters": parameters, "tags": tags, "template_url": template.url}
- ),
+ json.dumps({"parameters": parameters, "tags": tags, "template_url": template.url}),
)
if not template.url:
LOGGER.debug("no template url; uploading template directly")
@@ -1152,9 +1112,7 @@ def update_stack(
**kwargs,
)
- def update_termination_protection(
- self, fqn: str, termination_protection: bool
- ) -> None:
+ def update_termination_protection(self, fqn: str, termination_protection: bool) -> None:
"""Update a Stack's termination protection if needed.
Runs before the normal stack update process.
@@ -1177,7 +1135,7 @@ def update_termination_protection(
)
def deal_with_changeset_stack_policy(
- self, fqn: str, stack_policy: Optional[Template] = None
+ self, fqn: str, stack_policy: Template | None = None
) -> None:
"""Set a stack policy when using changesets.
@@ -1197,13 +1155,14 @@ def deal_with_changeset_stack_policy(
self.cloudformation.set_stack_policy(**kwargs)
def interactive_destroy_stack(
- self, fqn: str, approval: Optional[str] = None, **kwargs: Any
+ self, fqn: str, approval: str | None = None, **kwargs: Any
) -> None:
"""Delete a CloudFormation stack in interactive mode.
Args:
fqn: A fully qualified stack name.
approval: Response to approval prompt.
+ **kwargs: Arbitrary keyword arguments.
"""
LOGGER.debug("%s:using interactive provider mode", fqn)
@@ -1242,10 +1201,10 @@ def interactive_update_stack(
self,
fqn: str,
template: Template,
- old_parameters: List[ParameterTypeDef],
- parameters: List[ParameterTypeDef],
+ old_parameters: list[ParameterTypeDef],
+ parameters: list[ParameterTypeDef],
stack_policy: Template,
- tags: List[TagTypeDef],
+ tags: list[TagTypeDef],
) -> None:
"""Update a Cloudformation stack in interactive mode.
@@ -1279,9 +1238,7 @@ def interactive_update_stack(
if "ParameterValue" in x
else {
"ParameterKey": x["ParameterKey"], # type: ignore
- "ParameterValue": old_parameters_as_dict[
- x["ParameterKey"] # type: ignore
- ],
+ "ParameterValue": old_parameters_as_dict[x["ParameterKey"]], # type: ignore
}
)
for x in parameters
@@ -1328,14 +1285,14 @@ def noninteractive_destroy_stack(self, fqn: str, **_kwargs: Any) -> None:
self.cloudformation.delete_stack(**args)
- def noninteractive_changeset_update( # pylint: disable=unused-argument
+ def noninteractive_changeset_update(
self,
fqn: str,
template: Template,
- old_parameters: List[ParameterTypeDef],
- parameters: List[ParameterTypeDef],
- stack_policy: Optional[Template],
- tags: List[TagTypeDef],
+ old_parameters: list[ParameterTypeDef], # noqa: ARG002
+ parameters: list[ParameterTypeDef],
+ stack_policy: Template | None,
+ tags: list[TagTypeDef],
) -> None:
"""Update a Cloudformation stack using a change set.
@@ -1383,14 +1340,14 @@ def select_destroy_method(self, force_interactive: bool) -> Callable[..., None]:
return self.interactive_destroy_stack
return self.noninteractive_destroy_stack
- def default_update_stack( # pylint: disable=unused-argument
+ def default_update_stack(
self,
fqn: str,
template: Template,
- old_parameters: List[ParameterTypeDef],
- parameters: List[ParameterTypeDef],
- tags: List[TagTypeDef],
- stack_policy: Optional[Template] = None,
+ old_parameters: list[ParameterTypeDef], # noqa: ARG002
+ parameters: list[ParameterTypeDef],
+ tags: list[TagTypeDef],
+ stack_policy: Template | None = None,
) -> None:
"""Update a Cloudformation stack in default mode.
@@ -1421,7 +1378,7 @@ def default_update_stack( # pylint: disable=unused-argument
except botocore.exceptions.ClientError as err:
if "No updates are to be performed." in str(err):
LOGGER.debug("%s:stack did not change; not updating", fqn)
- raise exceptions.StackDidNotChange
+ raise exceptions.StackDidNotChange from None
if err.response["Error"]["Message"] == (
"TemplateURL must reference a valid S3 object to which you have access."
):
@@ -1441,13 +1398,11 @@ def get_stack_name(stack: StackTypeDef) -> str:
return stack["StackName"]
@staticmethod
- def get_stack_tags(stack: StackTypeDef) -> List[TagTypeDef]:
+ def get_stack_tags(stack: StackTypeDef) -> list[TagTypeDef]:
"""Get stack tags."""
return stack.get("Tags", [])
- def get_outputs(
- self, stack_name: str, *_args: Any, **_kwargs: Any
- ) -> Dict[str, str]:
+ def get_outputs(self, stack_name: str, *_args: Any, **_kwargs: Any) -> dict[str, str]:
"""Get stack outputs."""
if not self._outputs.get(stack_name):
stack = self.get_stack(stack_name)
@@ -1455,24 +1410,20 @@ def get_outputs(
return self._outputs[stack_name]
@staticmethod
- def get_output_dict(stack: StackTypeDef) -> Dict[str, str]:
+ def get_output_dict(stack: StackTypeDef) -> dict[str, str]:
"""Get stack outputs dict."""
return get_output_dict(stack)
- def get_stack_info(
- self, stack: StackTypeDef
- ) -> Tuple[str, Dict[str, Union[List[str], str]]]:
+ def get_stack_info(self, stack: StackTypeDef) -> tuple[str, dict[str, Union[list[str], str]]]:
"""Get the template and parameters of the stack currently in AWS."""
stack_name = stack.get("StackId", "None")
try:
- template = self.cloudformation.get_template(StackName=stack_name)[
- "TemplateBody"
- ]
+ template = self.cloudformation.get_template(StackName=stack_name)["TemplateBody"]
except botocore.exceptions.ClientError as err:
if "does not exist" not in str(err):
raise
- raise exceptions.StackDoesNotExist(stack_name)
+ raise exceptions.StackDoesNotExist(stack_name) from None
parameters = self.params_as_dict(stack.get("Parameters", []))
@@ -1482,13 +1433,13 @@ def get_stack_info(
return json.dumps(template, cls=JsonEncoder), parameters
- def get_stack_changes(
+ def get_stack_changes( # noqa: C901, PLR0912
self,
stack: Stack,
template: Template,
- parameters: List[ParameterTypeDef],
- tags: List[TagTypeDef],
- ) -> Dict[str, str]:
+ parameters: list[ParameterTypeDef],
+ tags: list[TagTypeDef],
+ ) -> dict[str, str]:
"""Get the changes from a ChangeSet.
Args:
@@ -1509,12 +1460,10 @@ def get_stack_changes(
if self.get_stack_status(stack_details) == self.REVIEW_STATUS:
raise exceptions.StackDoesNotExist(stack.fqn)
old_template_raw, old_params = self.get_stack_info(stack_details)
- old_template: Dict[str, Any] = parse_cloudformation_template(
- old_template_raw
- )
+ old_template: dict[str, Any] = parse_cloudformation_template(old_template_raw)
change_type = "UPDATE"
except exceptions.StackDoesNotExist:
- old_params: Dict[str, Union[List[str], str]] = {}
+ old_params: dict[str, Union[list[str], str]] = {}
old_template = {}
change_type = "CREATE"
@@ -1569,7 +1518,7 @@ def get_stack_changes(
self.get_outputs(stack.fqn)
# infer which outputs may have changed
- refs_to_invalidate: List[str] = []
+ refs_to_invalidate: list[str] = []
for change in changes:
resc_change = change.get("ResourceChange", {})
if resc_change.get("Type") == "Add":
@@ -1629,8 +1578,8 @@ def get_stack_changes(
@staticmethod
def params_as_dict(
- parameters_list: List[ParameterTypeDef],
- ) -> Dict[str, Union[List[str], str]]:
+ parameters_list: list[ParameterTypeDef],
+ ) -> dict[str, Union[list[str], str]]:
"""Parameters as dict."""
return {
param["ParameterKey"]: param["ParameterValue"] # type: ignore
diff --git a/runway/cfngin/providers/base.py b/runway/cfngin/providers/base.py
index 24b4d07eb..7007365bc 100644
--- a/runway/cfngin/providers/base.py
+++ b/runway/cfngin/providers/base.py
@@ -1,7 +1,8 @@
"""Provider base class."""
-# pylint: disable=unused-argument
-from typing import Any, Optional
+from __future__ import annotations
+
+from typing import Any
def not_implemented(method: str) -> None:
@@ -12,7 +13,7 @@ def not_implemented(method: str) -> None:
class BaseProviderBuilder:
"""ProviderBuilder base class."""
- def build(self, region: Optional[str] = None) -> Any:
+ def build(self, region: str | None = None) -> Any: # noqa: ARG002
"""Abstract method."""
not_implemented("build")
@@ -20,11 +21,11 @@ def build(self, region: Optional[str] = None) -> Any:
class BaseProvider:
"""Provider base class."""
- def get_stack(self, stack_name: str, *args: Any, **kwargs: Any) -> Any:
+ def get_stack(self, stack_name: str, *_args: Any, **_kwargs: Any) -> Any: # noqa: ARG002
"""Abstract method."""
not_implemented("get_stack")
- def get_outputs(self, stack_name: str, *args: Any, **kwargs: Any) -> Any:
+ def get_outputs(self, stack_name: str, *_args: Any, **_kwargs: Any) -> Any: # noqa: ARG002
"""Abstract method."""
not_implemented("get_outputs")
@@ -42,7 +43,7 @@ class Template:
"""
- def __init__(self, url: Optional[str] = None, body: Optional[str] = None) -> None:
+ def __init__(self, url: str | None = None, body: str | None = None) -> None:
"""Instantiate class."""
self.url = url
self.body = body
diff --git a/runway/cfngin/session_cache.py b/runway/cfngin/session_cache.py
index 48c9ef5f1..cba1b09d2 100644
--- a/runway/cfngin/session_cache.py
+++ b/runway/cfngin/session_cache.py
@@ -58,5 +58,5 @@ def get_session(
cred_provider = session._session.get_component("credential_provider") # type: ignore
provider = cred_provider.get_provider("assume-role") # type: ignore
provider.cache = BOTO3_CREDENTIAL_CACHE
- provider._prompter = ui.getpass
+ provider._prompter = ui.getpass # noqa: SLF001
return session
diff --git a/runway/cfngin/stack.py b/runway/cfngin/stack.py
index a1b51381e..8e5dd6fc9 100644
--- a/runway/cfngin/stack.py
+++ b/runway/cfngin/stack.py
@@ -3,9 +3,7 @@
from __future__ import annotations
from copy import deepcopy
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, cast
-
-from typing_extensions import Literal
+from typing import TYPE_CHECKING, Any, cast
from runway.utils import load_object_from_string
from runway.variables import Variable, resolve_variables
@@ -13,6 +11,8 @@
from .blueprints.raw import RawTemplateBlueprint
if TYPE_CHECKING:
+ from typing_extensions import Literal
+
from ..config.models.cfngin import CfnginStackDefinitionModel
from ..context import CfnginContext
from .blueprints.base import Blueprint
@@ -20,8 +20,8 @@
def _initialize_variables(
- stack_def: CfnginStackDefinitionModel, variables: Optional[Dict[str, Any]] = None
-) -> List[Variable]:
+ stack_def: CfnginStackDefinitionModel, variables: dict[str, Any] | None = None
+) -> list[Variable]:
"""Convert defined variables into a list of ``Variable`` for consumption.
Args:
@@ -65,36 +65,36 @@ class Stack:
"""
- _blueprint: Optional[Blueprint]
- _stack_policy: Optional[str]
+ _blueprint: Blueprint | None
+ _stack_policy: str | None
context: CfnginContext
definition: CfnginStackDefinitionModel
enabled: bool
force: bool
fqn: str
- in_progress_behavior: Optional[Literal["wait"]]
+ in_progress_behavior: Literal["wait"] | None
locked: bool
logging: bool
- mappings: Dict[str, Dict[str, Dict[str, Any]]]
+ mappings: dict[str, dict[str, dict[str, Any]]]
name: str
- outputs: Dict[str, Any]
+ outputs: dict[str, Any]
protected: bool
termination_protection: bool
- variables: List[Variable]
+ variables: list[Variable]
def __init__(
self,
definition: CfnginStackDefinitionModel,
context: CfnginContext,
*,
- variables: Optional[Dict[str, Any]] = None,
- mappings: Dict[str, Dict[str, Dict[str, Any]]] = None,
+ variables: dict[str, Any] | None = None,
+ mappings: dict[str, dict[str, dict[str, Any]]] | None = None,
locked: bool = False,
force: bool = False,
enabled: bool = True,
protected: bool = False,
- ):
+ ) -> None:
"""Instantiate class.
Args:
@@ -127,12 +127,12 @@ def __init__(
self.variables = _initialize_variables(definition, variables)
@property
- def required_by(self) -> Set[str]:
+ def required_by(self) -> set[str]:
"""Return a list of stack names that depend on this stack."""
return set(self.definition.required_by)
@property
- def requires(self) -> Set[str]:
+ def requires(self) -> set[str]:
"""Return a list of stack names this stack depends on."""
requires = set(self.definition.requires or [])
@@ -147,21 +147,17 @@ def requires(self) -> Set[str]:
return requires
@property
- def stack_policy(self) -> Optional[str]:
+ def stack_policy(self) -> str | None:
"""Return the Stack Policy to use for this stack."""
- if not self._stack_policy:
- self._stack_policy = None
- if self.definition.stack_policy_path:
- with open(self.definition.stack_policy_path, encoding="utf-8") as file_:
- self._stack_policy = file_.read()
-
- return self._stack_policy
+ if self.definition.stack_policy_path:
+ return self.definition.stack_policy_path.read_text() or None
+ return None
@property
def blueprint(self) -> Blueprint:
"""Return the blueprint associated with this stack."""
if not self._blueprint:
- kwargs: Dict[str, Any] = {}
+ kwargs: dict[str, Any] = {}
if self.definition.class_path:
class_path = self.definition.class_path
blueprint_class = load_object_from_string(class_path)
@@ -173,9 +169,7 @@ def blueprint(self) -> Blueprint:
blueprint_class = RawTemplateBlueprint
kwargs["raw_template_path"] = self.definition.template_path
else:
- raise AttributeError(
- "Stack does not have a defined class or template path."
- )
+ raise AttributeError("Stack does not have a defined class or template path.")
self._blueprint = cast(
"Blueprint",
@@ -190,7 +184,7 @@ def blueprint(self) -> Blueprint:
return self._blueprint
@property
- def tags(self) -> Dict[str, Any]:
+ def tags(self) -> dict[str, Any]:
"""Return the tags that should be set on this stack.
Includes both the global tags, as well as any stack specific tags
@@ -201,7 +195,7 @@ def tags(self) -> Dict[str, Any]:
return dict(self.context.tags, **tags)
@property
- def parameter_values(self) -> Dict[str, Any]:
+ def parameter_values(self) -> dict[str, Any]:
"""Return all CloudFormation Parameters for the stack.
CloudFormation Parameters can be specified via Blueprint Variables
@@ -215,18 +209,16 @@ def parameter_values(self) -> Dict[str, Any]:
return self.blueprint.parameter_values
@property
- def all_parameter_definitions(self) -> Dict[str, Any]:
+ def all_parameter_definitions(self) -> dict[str, Any]:
"""Return all parameters in the blueprint/template."""
return self.blueprint.parameter_definitions
@property
- def required_parameter_definitions(self) -> Dict[str, Any]:
+ def required_parameter_definitions(self) -> dict[str, Any]:
"""Return all CloudFormation Parameters without a default value."""
return self.blueprint.required_parameter_definitions
- def resolve(
- self, context: CfnginContext, provider: Optional[Provider] = None
- ) -> None:
+ def resolve(self, context: CfnginContext, provider: Provider | None = None) -> None:
"""Resolve the Stack variables.
This resolves the Stack variables and then prepares the Blueprint for
@@ -240,7 +232,7 @@ def resolve(
resolve_variables(self.variables, context, provider)
self.blueprint.resolve_variables(self.variables)
- def set_outputs(self, outputs: Dict[str, Any]) -> None:
+ def set_outputs(self, outputs: dict[str, Any]) -> None:
"""Set stack outputs to the provided value.
Args:
diff --git a/runway/cfngin/status.py b/runway/cfngin/status.py
index 456a6c153..c7f00dc9b 100644
--- a/runway/cfngin/status.py
+++ b/runway/cfngin/status.py
@@ -46,11 +46,11 @@ def _comparison(self, operator_: Callable[[Any, Any], bool], other: Any) -> bool
return operator_(self.code, other.code)
return NotImplemented
- def __eq__(self, other: Any) -> bool:
+ def __eq__(self, other: object) -> bool:
"""Compare if self is equal to another object."""
return self._comparison(operator.eq, other)
- def __ne__(self, other: Any) -> bool:
+ def __ne__(self, other: object) -> bool:
"""Compare if self is not equal to another object."""
return self._comparison(operator.ne, other)
diff --git a/runway/cfngin/tokenize_userdata.py b/runway/cfngin/tokenize_userdata.py
index cd5b48ed0..d0432f1ac 100644
--- a/runway/cfngin/tokenize_userdata.py
+++ b/runway/cfngin/tokenize_userdata.py
@@ -1,7 +1,6 @@
"""Resources to tokenize userdata."""
import re
-from typing import List
from troposphere import GetAtt, Ref
@@ -14,7 +13,7 @@
REPLACE_RE = re.compile(REPLACE_STRING)
-def cf_tokenize(raw_userdata: str) -> List[str]:
+def cf_tokenize(raw_userdata: str) -> list[str]:
"""Parse UserData for Cloudformation helper functions.
http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/user-data.html
@@ -39,7 +38,7 @@ def cf_tokenize(raw_userdata: str) -> List[str]:
Base64(Join('', cf_tokenize(userdata_string)))
"""
- result: List[str] = []
+ result: list[str] = []
parts = SPLIT_RE.split(raw_userdata)
for part in parts:
cf_func = REPLACE_RE.search(part)
diff --git a/runway/cfngin/ui.py b/runway/cfngin/ui.py
index 861d0158a..dadf76869 100644
--- a/runway/cfngin/ui.py
+++ b/runway/cfngin/ui.py
@@ -4,12 +4,15 @@
import logging
import threading
+from contextlib import AbstractContextManager
from getpass import getpass
-from typing import TYPE_CHECKING, Any, ContextManager, Optional, TextIO, Type, Union
+from typing import TYPE_CHECKING, Any, TextIO
if TYPE_CHECKING:
from types import TracebackType
+ from typing_extensions import Self
+
LOGGER = logging.getLogger(__name__)
@@ -18,7 +21,7 @@ def get_raw_input(message: str) -> str:
return input(message)
-class UI(ContextManager["UI"]):
+class UI(AbstractContextManager["UI"]):
"""Used internally from terminal output in a multithreaded environment.
Ensures that two threads don't write over each other while asking a user
@@ -33,9 +36,9 @@ def __init__(self) -> None:
def log(
self,
lvl: int,
- msg: Union[Exception, str],
+ msg: Exception | str,
*args: Any,
- logger: Union[logging.Logger, logging.LoggerAdapter[Any]] = LOGGER,
+ logger: logging.Logger | logging.LoggerAdapter[Any] = LOGGER,
**kwargs: Any,
) -> None:
"""Log the message if the current thread owns the underlying lock.
@@ -44,8 +47,11 @@ def log(
lvl: Log level.
msg: String template or exception to use for the log record.
logger: Specific logger to log to.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
+ kwargs["stacklevel"] = kwargs.get("stacklevel", 1) + 1
with self:
return logger.log(lvl, msg, *args, **kwargs)
@@ -53,7 +59,7 @@ def info(
self,
msg: str,
*args: Any,
- logger: Union[logging.Logger, logging.LoggerAdapter[Any]] = LOGGER,
+ logger: logging.Logger | logging.LoggerAdapter[Any] = LOGGER,
**kwargs: Any,
) -> None:
"""Log the line if the current thread owns the underlying lock.
@@ -62,6 +68,8 @@ def info(
msg: String template or exception to use
for the log record.
logger: Specific logger to log to.
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
"""
kwargs["logger"] = logger
@@ -79,21 +87,21 @@ def ask(self, message: str) -> str:
with self:
return get_raw_input(message)
- def getpass(self, prompt: str, stream: Optional[TextIO] = None) -> str:
+ def getpass(self, prompt: str, stream: TextIO | None = None) -> str:
"""Wrap getpass to lock the UI."""
with self:
return getpass(prompt, stream)
- def __enter__(self) -> UI:
+ def __enter__(self) -> Self:
"""Enter the context manager."""
self._lock.__enter__()
return self
def __exit__(
self,
- exc_type: Optional[Type[BaseException]],
- exc_value: Optional[BaseException],
- traceback: Optional[TracebackType],
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: TracebackType | None,
) -> None:
"""Exit the context manager."""
self._lock.__exit__(exc_type, exc_value, traceback)
diff --git a/runway/cfngin/utils.py b/runway/cfngin/utils.py
index 1f44abda6..4fa7fd846 100644
--- a/runway/cfngin/utils.py
+++ b/runway/cfngin/utils.py
@@ -14,17 +14,13 @@
import tempfile
import uuid
import zipfile
+from collections import OrderedDict
from pathlib import Path
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
- Dict,
- Iterator,
- List,
Optional,
- OrderedDict,
- Type,
Union,
cast,
)
@@ -39,6 +35,8 @@
from .session_cache import get_session
if TYPE_CHECKING:
+ from collections.abc import Iterator
+
from mypy_boto3_route53.client import Route53Client
from mypy_boto3_route53.type_defs import ResourceRecordSetTypeDef
from mypy_boto3_s3.client import S3Client
@@ -147,16 +145,9 @@ def __init__(self, record_text: str) -> None:
def __str__(self) -> str:
"""Convert an instance of this class to a string."""
- return " ".join(
- [
- self.nameserver,
- self.contact,
- self.serial,
- self.refresh,
- self.retry,
- self.expire,
- self.min_ttl,
- ]
+ return (
+ f"{self.nameserver} {self.contact} {self.serial} {self.refresh} "
+ f"{self.retry} {self.expire} {self.min_ttl}"
)
@@ -166,9 +157,7 @@ class SOARecord:
def __init__(self, record: ResourceRecordSetTypeDef) -> None:
"""Instantiate class."""
self.name = record["Name"]
- self.text = SOARecordText(
- record.get("ResourceRecords", [{"Value": ""}])[0]["Value"]
- )
+ self.text = SOARecordText(record.get("ResourceRecords", [{"Value": ""}])[0]["Value"])
self.ttl = record.get("TTL", 0)
@@ -238,9 +227,9 @@ def create_route53_zone(client: Route53Client, zone_name: str) -> str:
return zone_id
-def yaml_to_ordered_dict(
+def yaml_to_ordered_dict( # noqa: C901
stream: str,
- loader: Union[Type[yaml.Loader], Type[yaml.SafeLoader]] = yaml.SafeLoader,
+ loader: type[yaml.Loader | yaml.SafeLoader] = yaml.SafeLoader,
) -> OrderedDict[str, Any]:
"""yaml.load alternative with preserved dictionary order.
@@ -267,12 +256,12 @@ class OrderedUniqueLoader(loader): # type: ignore
@staticmethod
def _error_mapping_on_dupe(
- node: Union[yaml.MappingNode, yaml.ScalarNode, yaml.SequenceNode],
+ node: yaml.MappingNode | yaml.ScalarNode | yaml.SequenceNode,
node_name: str,
) -> None:
"""Check mapping node for dupe children keys."""
if isinstance(node, yaml.MappingNode):
- mapping: Dict[str, Any] = {}
+ mapping: dict[str, Any] = {}
for val in node.value:
a = val[0]
b = mapping.get(a.value, None)
@@ -285,7 +274,7 @@ def _error_mapping_on_dupe(
def _validate_mapping(
self,
- node: Union[yaml.MappingNode, yaml.ScalarNode, yaml.SequenceNode],
+ node: yaml.MappingNode | yaml.ScalarNode | yaml.SequenceNode,
deep: bool = False,
) -> OrderedDict[Any, Any]:
if not isinstance(node, yaml.MappingNode):
@@ -322,7 +311,7 @@ def _validate_mapping(
def construct_mapping(
self,
- node: Union[yaml.MappingNode, yaml.ScalarNode, yaml.SequenceNode],
+ node: yaml.MappingNode | yaml.ScalarNode | yaml.SequenceNode,
deep: bool = False,
) -> OrderedDict[Any, Any]:
"""Override parent method to use OrderedDict."""
@@ -331,7 +320,7 @@ def construct_mapping(
return self._validate_mapping(node, deep=deep)
def construct_yaml_map(
- self, node: Union[yaml.MappingNode, yaml.ScalarNode, yaml.SequenceNode]
+ self, node: yaml.MappingNode | yaml.ScalarNode | yaml.SequenceNode
) -> Iterator[OrderedDict[Any, Any]]:
data: OrderedDict[Any, Any] = OrderedDict()
yield data
@@ -341,7 +330,7 @@ def construct_yaml_map(
OrderedUniqueLoader.add_constructor(
"tag:yaml.org,2002:map", OrderedUniqueLoader.construct_yaml_map
)
- return yaml.load(stream, OrderedUniqueLoader)
+ return yaml.load(stream, OrderedUniqueLoader) # noqa: S506
def uppercase_first_letter(string_: str) -> str:
@@ -361,7 +350,7 @@ def cf_safe_name(name: str) -> str:
return "".join(uppercase_first_letter(part) for part in parts)
-def read_value_from_path(value: str, *, root_path: Optional[Path] = None) -> str:
+def read_value_from_path(value: str, *, root_path: Path | None = None) -> str:
"""Enable translators to read values from files.
The value can be referred to with the `file://` prefix.
@@ -373,23 +362,16 @@ def read_value_from_path(value: str, *, root_path: Optional[Path] = None) -> str
"""
if value.startswith("file://"):
- path = value.split("file://", 1)[1]
- if os.path.isabs(path):
+ path = Path(value.split("file://", 1)[1])
+ if path.is_absolute():
read_path = Path(path)
else:
root_path = root_path or Path.cwd()
- if root_path.is_dir():
- read_path = root_path / path
- else:
- read_path = root_path.parent / path
+ read_path = root_path / path if root_path.is_dir() else root_path.parent / path
if read_path.is_file():
- return read_path.read_text(
- encoding=locale.getpreferredencoding(do_setlocale=False)
- )
+ return read_path.read_text(encoding=locale.getpreferredencoding(do_setlocale=False))
if read_path.is_dir():
- raise ValueError(
- f"path must lead to a file not directory: {read_path.absolute()}"
- )
+ raise ValueError(f"path must lead to a file not directory: {read_path.absolute()}")
raise ValueError(f"path does not exist: {read_path.absolute()}")
return value
@@ -404,7 +386,7 @@ def get_client_region(client: Any) -> str:
AWS region string.
"""
- return client._client_config.region_name # type: ignore
+ return client._client_config.region_name # type: ignore # noqa: SLF001
def get_s3_endpoint(client: Any) -> str:
@@ -417,7 +399,7 @@ def get_s3_endpoint(client: Any) -> str:
The AWS endpoint for the client.
"""
- return client._endpoint.host # type: ignore
+ return client._endpoint.host # type: ignore # noqa: SLF001
def s3_bucket_location_constraint(region: Optional[str]) -> Optional[str]:
@@ -484,7 +466,7 @@ def ensure_s3_bucket(
# can't use s3_client.exceptions.NoSuchBucket here.
# it does not work if the bucket was recently deleted.
LOGGER.debug("creating bucket %s", bucket_name)
- create_args: Dict[str, Any] = {"Bucket": bucket_name}
+ create_args: dict[str, Any] = {"Bucket": bucket_name}
location_constraint = s3_bucket_location_constraint(bucket_region)
if location_constraint:
create_args["CreateBucketConfiguration"] = {
@@ -498,8 +480,7 @@ def ensure_s3_bucket(
return
if err.response["Error"]["Message"] == "Forbidden":
LOGGER.exception(
- "Access denied for bucket %s. Did you remember "
- "to use a globally unique name?",
+ "Access denied for bucket %s. Did you remember to use a globally unique name?",
bucket_name,
)
elif err.response["Error"]["Message"] != "Not Found":
@@ -507,7 +488,7 @@ def ensure_s3_bucket(
raise
-def parse_cloudformation_template(template: str) -> Dict[str, Any]:
+def parse_cloudformation_template(template: str) -> dict[str, Any]:
"""Parse CFN template string.
Leverages the vendored aws-cli yamlhelper to handle JSON or YAML templates.
@@ -532,8 +513,8 @@ def is_within_directory(directory: Path | str, target: str) -> bool:
bool: True if the target is in the directory or subdirectories, False otherwise.
"""
- abs_directory = os.path.abspath(directory)
- abs_target = os.path.abspath(target)
+ abs_directory = os.path.abspath(directory) # noqa: PTH100
+ abs_target = os.path.abspath(target) # noqa: PTH100
prefix = os.path.commonprefix([abs_directory, abs_target])
return prefix == abs_directory
@@ -544,27 +525,27 @@ def safe_tar_extract(
members: list[tarfile.TarInfo] | None = None,
*,
numeric_owner: bool = False,
-):
+) -> None:
"""Safely extract the contents of a tar file to a specified directory.
This code is modified from a PR provided to Runway project
to address CVE-2007-4559.
Args:
- tar (TarFile): The tar file object that will be extracted.
- path (Union[Path, str], optional): The directory to extract the tar into.
- members (List[TarInfo] | None, optional): List of TarInfo objects to extract.
- numeric_owner (bool, optional): Enable usage of owner and group IDs when extracting.
+ tar: The tar file object that will be extracted.
+ path: The directory to extract the tar into.
+ members: List of TarInfo objects to extract.
+ numeric_owner: Enable usage of owner and group IDs when extracting.
Raises:
Exception: If any tar file tries to go outside the specified area.
"""
for member in tar.getmembers():
- member_path = os.path.join(path, member.name)
+ member_path = os.path.join(path, member.name) # noqa: PTH118
if not is_within_directory(path, member_path):
raise Exception("Attempted Path Traversal in Tar File")
- tar.extractall(path, members, numeric_owner=numeric_owner)
+ tar.extractall(path, members, numeric_owner=numeric_owner) # noqa: S202
class Extractor:
@@ -622,7 +603,7 @@ def extract(self, destination: Path) -> None:
"""Extract the archive."""
if self.archive:
with zipfile.ZipFile(self.archive, "r") as zip_ref:
- zip_ref.extractall(destination)
+ zip_ref.extractall(destination) # noqa: S202
class SourceProcessor:
@@ -645,7 +626,7 @@ def __init__(
self.cache_dir = cache_dir
self.package_cache_dir = cache_dir / "packages"
self.sources = sources
- self.configs_to_merge: List[Path] = []
+ self.configs_to_merge: list[Path] = []
self.create_cache_directories()
def create_cache_directories(self) -> None:
@@ -664,9 +645,7 @@ def get_package_sources(self) -> None:
for config in self.sources.git:
self.fetch_git_package(config=config)
- def fetch_local_package(
- self, config: LocalCfnginPackageSourceDefinitionModel
- ) -> None:
+ def fetch_local_package(self, config: LocalCfnginPackageSourceDefinitionModel) -> None:
"""Make a local path available to current CFNgin config.
Args:
@@ -713,7 +692,7 @@ def fetch_s3_package(self, config: S3CfnginPackageSourceDefinitionModel) -> None
)
session = get_session(region=None)
- extra_s3_args: Dict[str, Any] = {}
+ extra_s3_args: dict[str, Any] = {}
if config.requester_pays:
extra_s3_args["RequestPayer"] = "requester"
@@ -749,13 +728,12 @@ def fetch_s3_package(self, config: S3CfnginPackageSourceDefinitionModel) -> None
cached_dir_path,
)
tmp_dir = tempfile.mkdtemp(prefix="cfngin")
- tmp_package_path = os.path.join(tmp_dir, dir_name)
+ tmp_package_path = os.path.join(tmp_dir, dir_name) # noqa: PTH118
with tempfile.TemporaryDirectory(prefix="runway-cfngin") as tmp_dir:
tmp_package_path = Path(tmp_dir) / dir_name
extractor.set_archive(tmp_package_path)
LOGGER.debug(
- "starting remote package download from S3 to %s "
- 'with extra S3 options "%s"',
+ 'starting remote package download from S3 to %s with extra S3 options "%s"',
extractor.archive,
str(extra_s3_args),
)
@@ -770,8 +748,7 @@ def fetch_s3_package(self, config: S3CfnginPackageSourceDefinitionModel) -> None
)
extractor.extract(tmp_package_path)
LOGGER.debug(
- "moving extracted package directory %s to the "
- "CFNgin cache at %s",
+ "moving extracted package directory %s to the CFNgin cache at %s",
dir_name,
self.package_cache_dir,
)
@@ -797,7 +774,7 @@ def fetch_git_package(self, config: GitCfnginPackageSourceDefinitionModel) -> No
"""
# only loading git here when needed to avoid load errors on systems
# without git installed
- from git.repo import Repo # pylint: disable=import-outside-toplevel
+ from git.repo import Repo
ref = self.determine_git_ref(config)
dir_name = self.sanitize_git_path(uri=config.uri, ref=ref)
@@ -813,7 +790,7 @@ def fetch_git_package(self, config: GitCfnginPackageSourceDefinitionModel) -> No
)
tmp_dir = tempfile.mkdtemp(prefix="cfngin")
try:
- tmp_repo_path = os.path.join(tmp_dir, dir_name)
+ tmp_repo_path = os.path.join(tmp_dir, dir_name) # noqa: PTH118
with Repo.clone_from(config.uri, tmp_repo_path) as repo:
repo.head.set_reference(ref)
repo.head.reset(index=True, working_tree=True)
@@ -822,8 +799,7 @@ def fetch_git_package(self, config: GitCfnginPackageSourceDefinitionModel) -> No
shutil.rmtree(tmp_dir)
else:
LOGGER.debug(
- "remote repo %s appears to have been previously "
- "cloned to %s; download skipped",
+ "remote repo %s appears to have been previously cloned to %s; download skipped",
config.uri,
cached_dir_path,
)
@@ -933,7 +909,7 @@ def sanitize_uri_path(uri: str) -> str:
uri = uri.replace(i, "_")
return uri
- def sanitize_git_path(self, uri: str, ref: Optional[str] = None) -> str:
+ def sanitize_git_path(self, uri: str, ref: str | None = None) -> str:
"""Take a git URI and ref and converts it to a directory safe path.
Args:
@@ -944,10 +920,7 @@ def sanitize_git_path(self, uri: str, ref: Optional[str] = None) -> str:
Directory name for the supplied uri
"""
- if uri.endswith(".git"):
- dir_name = uri[:-4] # drop .git
- else:
- dir_name = uri
+ dir_name = uri[:-4] if uri.endswith(".git") else uri # drop .git
dir_name = self.sanitize_uri_path(dir_name)
if ref is not None:
dir_name += f"-{ref}"
diff --git a/runway/compat.py b/runway/compat.py
index 038e76ed2..360443eff 100644
--- a/runway/compat.py
+++ b/runway/compat.py
@@ -1,26 +1,19 @@
"""Python dependency compatibility handling."""
import sys
-from typing import Iterable
-
-if sys.version_info < (3, 8): # 3.7
- import shlex
-
- from backports.cached_property import cached_property
- from importlib_metadata import PackageNotFoundError, version
-
- def shlex_join(split_command: Iterable[str]) -> str:
- """Backport of :meth:`shlex.join`."""
- return " ".join(shlex.quote(arg) for arg in split_command)
+from functools import cached_property
+from importlib.metadata import PackageNotFoundError, version
+from shlex import join as shlex_join
+if sys.version_info < (3, 11):
+ from typing_extensions import Self
else:
- from functools import cached_property
- from importlib.metadata import PackageNotFoundError, version
- from shlex import join as shlex_join
+ from typing import Self
__all__ = [
- "PackageNotFoundError",
- "cached_property",
- "shlex_join",
- "version",
+ "PackageNotFoundError", # TODO (kyle): remove in next major release
+ "Self",
+ "cached_property", # TODO (kyle): remove in next major release
+ "shlex_join", # TODO (kyle): remove in next major release
+ "version", # TODO (kyle): remove in next major release
]
diff --git a/runway/config/__init__.py b/runway/config/__init__.py
index c6a63ca8e..61e508fdb 100644
--- a/runway/config/__init__.py
+++ b/runway/config/__init__.py
@@ -7,18 +7,7 @@
import sys
from pathlib import Path
from string import Template
-from typing import (
- TYPE_CHECKING,
- AbstractSet,
- Any,
- Dict,
- List,
- Mapping,
- MutableMapping,
- Optional,
- Union,
- cast,
-)
+from typing import TYPE_CHECKING, Any, Union, cast
import yaml
@@ -41,6 +30,9 @@
from .models.runway import RunwayConfigDefinitionModel, RunwayFutureDefinitionModel
if TYPE_CHECKING:
+ from collections.abc import Mapping, MutableMapping
+ from collections.abc import Set as AbstractSet
+
from packaging.specifiers import SpecifierSet
from pydantic import BaseModel
@@ -53,7 +45,7 @@ class BaseConfig:
file_path: Path
_data: BaseModel
- def __init__(self, data: BaseModel, *, path: Optional[Path] = None) -> None:
+ def __init__(self, data: BaseModel, *, path: Path | None = None) -> None:
"""Instantiate class.
Args:
@@ -68,15 +60,11 @@ def dump(
self,
*,
by_alias: bool = False,
- exclude: Optional[
- Union[AbstractSet[Union[int, str]], Mapping[Union[int, str], Any]]
- ] = None,
+ exclude: Union[AbstractSet[Union[int, str]], Mapping[Union[int, str], Any]] | None = None,
exclude_defaults: bool = False,
exclude_none: bool = False,
exclude_unset: bool = True,
- include: Optional[
- Union[AbstractSet[Union[int, str]], Mapping[Union[int, str], Any]]
- ] = None,
+ include: Union[AbstractSet[Union[int, str]], Mapping[Union[int, str], Any]] | None = None,
) -> str:
"""Dump model to a YAML string.
@@ -108,7 +96,7 @@ def dump(
)
@classmethod
- def find_config_file(cls, path: Path) -> Optional[Path]:
+ def find_config_file(cls, path: Path) -> Path | None:
"""Find a config file in the provided path.
Args:
@@ -147,24 +135,24 @@ class CfnginConfig(BaseConfig):
EXCLUDE_LIST = ["bitbucket-pipelines.yml", "buildspec.yml", "docker-compose.yml"]
"""Explicit files names to ignore when looking for config files."""
- cfngin_bucket: Optional[str]
+ cfngin_bucket: str | None
"""Bucket to use for CFNgin resources. (e.g. CloudFormation templates).
May be an empty string.
"""
- cfngin_bucket_region: Optional[str]
+ cfngin_bucket_region: str | None
"""Explicit region to use for :attr:`CfnginConfig.cfngin_bucket`"""
cfngin_cache_dir: Path
"""Local directory to use for caching."""
- log_formats: Dict[str, str]
+ log_formats: dict[str, str]
"""Custom formatting for log messages."""
- lookups: Dict[str, str]
+ lookups: dict[str, str]
"""Register custom lookups."""
- mappings: Dict[str, Dict[str, Dict[str, Any]]]
+ mappings: dict[str, dict[str, dict[str, Any]]]
"""Mappings that will be added to all stacks."""
namespace: str
@@ -176,31 +164,31 @@ class CfnginConfig(BaseConfig):
package_sources: CfnginPackageSourcesDefinitionModel
"""Remote source locations."""
- persistent_graph_key: Optional[str] = None
+ persistent_graph_key: str | None = None
"""S3 object key were the persistent graph is stored."""
- post_deploy: List[CfnginHookDefinitionModel]
+ post_deploy: list[CfnginHookDefinitionModel]
"""Hooks to run after a deploy action."""
- post_destroy: List[CfnginHookDefinitionModel]
+ post_destroy: list[CfnginHookDefinitionModel]
"""Hooks to run after a destroy action."""
- pre_deploy: List[CfnginHookDefinitionModel]
+ pre_deploy: list[CfnginHookDefinitionModel]
"""Hooks to run before a deploy action."""
- pre_destroy: List[CfnginHookDefinitionModel]
+ pre_destroy: list[CfnginHookDefinitionModel]
"""Hooks to run before a destroy action."""
- service_role: Optional[str]
+ service_role: str | None
"""IAM role for CloudFormation to use."""
- stacks: List[CfnginStackDefinitionModel]
+ stacks: list[CfnginStackDefinitionModel]
"""Stacks to be processed."""
- sys_path: Optional[Path]
+ sys_path: Path | None
"""Relative or absolute path to use as the work directory."""
- tags: Optional[Dict[str, str]]
+ tags: dict[str, str] | None
"""Tags to apply to all resources."""
template_indent: int
@@ -212,8 +200,8 @@ def __init__(
self,
data: CfnginConfigDefinitionModel,
*,
- path: Optional[Path] = None,
- work_dir: Optional[Path] = None,
+ path: Path | None = None,
+ work_dir: Path | None = None,
) -> None:
"""Instantiate class.
@@ -242,14 +230,12 @@ def __init__(
self.namespace_delimiter = self._data.namespace_delimiter
self.package_sources = self._data.package_sources
self.persistent_graph_key = self._data.persistent_graph_key
- self.post_deploy = cast(List[CfnginHookDefinitionModel], self._data.post_deploy)
- self.post_destroy = cast(
- List[CfnginHookDefinitionModel], self._data.post_destroy
- )
- self.pre_deploy = cast(List[CfnginHookDefinitionModel], self._data.pre_deploy)
- self.pre_destroy = cast(List[CfnginHookDefinitionModel], self._data.pre_destroy)
+ self.post_deploy = cast("list[CfnginHookDefinitionModel]", self._data.post_deploy)
+ self.post_destroy = cast("list[CfnginHookDefinitionModel]", self._data.post_destroy)
+ self.pre_deploy = cast("list[CfnginHookDefinitionModel]", self._data.pre_deploy)
+ self.pre_destroy = cast("list[CfnginHookDefinitionModel]", self._data.pre_destroy)
self.service_role = self._data.service_role
- self.stacks = cast(List[CfnginStackDefinitionModel], self._data.stacks)
+ self.stacks = cast("list[CfnginStackDefinitionModel]", self._data.stacks)
self.sys_path = self._data.sys_path
self.tags = self._data.tags
self.template_indent = self._data.template_indent
@@ -265,9 +251,9 @@ def load(self) -> None:
register_lookup_handler(key, handler)
@classmethod
- def find_config_file( # type: ignore pylint: disable=arguments-differ
- cls, path: Optional[Path] = None, *, exclude: Optional[List[str]] = None
- ) -> List[Path]:
+ def find_config_file( # type: ignore
+ cls, path: Path | None = None, *, exclude: list[str] | None = None
+ ) -> list[Path]:
"""Find a config file in the provided path.
Args:
@@ -286,18 +272,14 @@ def find_config_file( # type: ignore pylint: disable=arguments-differ
return [path]
exclude = exclude or []
- result: List[Path] = []
+ result: list[Path] = []
exclude.extend(cls.EXCLUDE_LIST)
yml_files = list(path.glob("*.yml"))
yml_files.extend(list(path.glob("*.yaml")))
for f in yml_files:
- if (
- re.match(cls.EXCLUDE_REGEX, f.name)
- or f.name in exclude
- or f.name.startswith(".")
- ):
+ if re.match(cls.EXCLUDE_REGEX, f.name) or f.name in exclude or f.name.startswith("."):
continue # cov: ignore
result.append(f)
result.sort()
@@ -307,10 +289,10 @@ def find_config_file( # type: ignore pylint: disable=arguments-differ
def parse_file(
cls,
*,
- path: Optional[Path] = None,
- file_path: Optional[Path] = None,
- parameters: Optional[MutableMapping[str, Any]] = None,
- work_dir: Optional[Path] = None,
+ path: Path | None = None,
+ file_path: Path | None = None,
+ parameters: MutableMapping[str, Any] | None = None,
+ work_dir: Path | None = None,
**kwargs: Any,
) -> CfnginConfig:
"""Parse a YAML file to create a config object.
@@ -320,6 +302,7 @@ def parse_file(
file_path: Exact path to a file to parse.
parameters: Values to use when resolving a raw config.
work_dir: Explicit working directory.
+ **kwargs: Arbitrary keyword arguments.
Raises:
ConfigNotFound: Provided config file was not found.
@@ -349,7 +332,7 @@ def parse_file(
@classmethod
def parse_obj(
- cls, obj: Any, *, path: Optional[Path] = None, work_dir: Optional[Path] = None
+ cls, obj: Any, *, path: Path | None = None, work_dir: Path | None = None
) -> CfnginConfig:
"""Parse a python object.
@@ -359,19 +342,17 @@ def parse_obj(
work_dir: Working directory.
"""
- return cls(
- CfnginConfigDefinitionModel.parse_obj(obj), path=path, work_dir=work_dir
- )
+ return cls(CfnginConfigDefinitionModel.parse_obj(obj), path=path, work_dir=work_dir)
@classmethod
def parse_raw(
cls,
data: str,
*,
- parameters: Optional[MutableMapping[str, Any]] = None,
- path: Optional[Path] = None,
+ parameters: MutableMapping[str, Any] | None = None,
+ path: Path | None = None,
skip_package_sources: bool = False,
- work_dir: Optional[Path] = None,
+ work_dir: Path | None = None,
) -> CfnginConfig:
"""Parse raw data.
@@ -389,9 +370,7 @@ def parse_raw(
if skip_package_sources:
return cls.parse_obj(yaml.safe_load(pre_rendered))
config_dict = yaml.safe_load(
- cls.process_package_sources(
- pre_rendered, parameters=parameters, work_dir=work_dir
- )
+ cls.process_package_sources(pre_rendered, parameters=parameters, work_dir=work_dir)
)
return cls.parse_obj(config_dict, path=path)
@@ -400,8 +379,8 @@ def process_package_sources(
cls,
raw_data: str,
*,
- parameters: Optional[MutableMapping[str, Any]] = None,
- work_dir: Optional[Path] = None,
+ parameters: MutableMapping[str, Any] | None = None,
+ work_dir: Path | None = None,
) -> str:
"""Process the package sources defined in a rendered config.
@@ -412,29 +391,27 @@ def process_package_sources(
work_dir: Explicit working directory.
"""
- config = yaml.safe_load(raw_data) or {}
+ config: dict[str, Any] = yaml.safe_load(raw_data) or {}
processor = SourceProcessor(
sources=CfnginPackageSourcesDefinitionModel.parse_obj(
- config.get("package_sources", {}) # type: ignore
+ config.get("package_sources", {})
),
cache_dir=Path(
- config.get(
- "cfngin_cache_dir", (work_dir or Path().cwd() / ".runway") / "cache"
- )
+ config.get("cfngin_cache_dir", (work_dir or Path().cwd() / ".runway") / "cache")
),
)
processor.get_package_sources()
if processor.configs_to_merge:
for i in processor.configs_to_merge:
LOGGER.debug("merging in remote config: %s", i)
- with open(i, "rb") as opened_file:
+ with i.open("rb") as opened_file:
config = merge_dicts(yaml.safe_load(opened_file), config)
return cls.resolve_raw_data(yaml.dump(config), parameters=parameters or {})
return raw_data
@staticmethod
def resolve_raw_data(
- raw_data: str, *, parameters: Optional[MutableMapping[str, Any]] = None
+ raw_data: str, *, parameters: MutableMapping[str, Any] | None = None
) -> str:
"""Resolve raw data.
@@ -464,19 +441,17 @@ class RunwayConfig(BaseConfig):
ACCEPTED_NAMES = ["runway.yml", "runway.yaml"]
- deployments: List[RunwayDeploymentDefinition]
+ deployments: list[RunwayDeploymentDefinition]
file_path: Path
future: RunwayFutureDefinitionModel
ignore_git_branch: bool
- runway_version: Optional[SpecifierSet]
- tests: List[RunwayTestDefinition[Any]]
+ runway_version: SpecifierSet | None
+ tests: list[RunwayTestDefinition[Any]]
variables: RunwayVariablesDefinition
_data: RunwayConfigDefinitionModel
- def __init__(
- self, data: RunwayConfigDefinitionModel, *, path: Optional[Path] = None
- ) -> None:
+ def __init__(self, data: RunwayConfigDefinitionModel, *, path: Path | None = None) -> None:
"""Instantiate class.
Args:
@@ -485,9 +460,7 @@ def __init__(
"""
super().__init__(data, path=path)
- self.deployments = [
- RunwayDeploymentDefinition(d) for d in self._data.deployments
- ]
+ self.deployments = [RunwayDeploymentDefinition(d) for d in self._data.deployments]
self.future = self._data.future
self.ignore_git_branch = self._data.ignore_git_branch
self.runway_version = self._data.runway_version
@@ -520,8 +493,8 @@ def find_config_file(cls, path: Path) -> Path:
def parse_file(
cls,
*,
- path: Optional[Path] = None,
- file_path: Optional[Path] = None,
+ path: Path | None = None,
+ file_path: Path | None = None,
**kwargs: Any,
) -> RunwayConfig:
"""Parse a YAML file to create a config object.
@@ -529,6 +502,7 @@ def parse_file(
Args:
path: The path to search for a config file.
file_path: Exact path to a file to parse.
+ **kwargs: Arbitrary keyword arguments.
Raises:
ConfigNotFound: Provided config file was not found.
@@ -538,15 +512,13 @@ def parse_file(
if file_path:
if not file_path.is_file():
raise ConfigNotFound(path=file_path)
- return cls.parse_obj(
- yaml.safe_load(file_path.read_text()), path=file_path, **kwargs
- )
+ return cls.parse_obj(yaml.safe_load(file_path.read_text()), path=file_path, **kwargs)
if path:
return cls.parse_file(file_path=cls.find_config_file(path), **kwargs)
raise ValueError("must provide path or file_path")
@classmethod
- def parse_obj(cls, obj: Any, *, path: Optional[Path] = None) -> RunwayConfig:
+ def parse_obj(cls, obj: Any, *, path: Path | None = None) -> RunwayConfig:
"""Parse a python object into a config object.
Args:
diff --git a/runway/config/components/runway/_deployment_def.py b/runway/config/components/runway/_deployment_def.py
index f0ffd84e2..d2ce0bba8 100644
--- a/runway/config/components/runway/_deployment_def.py
+++ b/runway/config/components/runway/_deployment_def.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple, Union, overload
+from typing import TYPE_CHECKING, Any, overload
from ....exceptions import UnresolvedVariable
from ....variables import Variable
@@ -28,26 +28,26 @@
class RunwayDeploymentDefinition(ConfigComponentDefinition):
"""Runway deployment definition."""
- account_alias: Optional[str]
- account_id: Optional[str]
+ account_alias: str | None
+ account_id: str | None
assume_role: RunwayAssumeRoleDefinitionModel
environments: RunwayEnvironmentsType
env_vars: RunwayEnvVarsType
- module_options: Dict[str, Any]
+ module_options: dict[str, Any]
name: str
- parallel_regions: List[str]
- parameters: Dict[str, Any]
- regions: List[str]
+ parallel_regions: list[str]
+ parameters: dict[str, Any]
+ regions: list[str]
_data: RunwayDeploymentDefinitionModel
- _pre_process_vars: Tuple[str, ...] = (
+ _pre_process_vars: tuple[str, ...] = (
"account_alias",
"account_id",
"assume_role",
"env_vars",
"regions",
)
- _supports_vars: Tuple[str, ...] = (
+ _supports_vars: tuple[str, ...] = (
"account_alias",
"account_id",
"assume_role",
@@ -81,12 +81,12 @@ def menu_entry(self) -> str:
)
@property
- def modules(self) -> List[RunwayModuleDefinition]:
+ def modules(self) -> list[RunwayModuleDefinition]:
"""List of Runway modules."""
return [RunwayModuleDefinition(module) for module in self._data.modules]
@modules.setter
- def modules(self, modules: List[RunwayModuleDefinition]) -> None:
+ def modules(self, modules: list[RunwayModuleDefinition]) -> None:
"""Set the value of the property.
Args:
@@ -97,12 +97,10 @@ def modules(self, modules: List[RunwayModuleDefinition]) -> None:
"""
if not all(isinstance(i, RunwayModuleDefinition) for i in modules): # type: ignore
- raise TypeError("modules must be type List[RunwayModuleDefinition]")
- self._data.modules = [
- RunwayModuleDefinitionModel.parse_obj(mod.data) for mod in modules
- ]
+ raise TypeError("modules must be type list[RunwayModuleDefinition]")
+ self._data.modules = [RunwayModuleDefinitionModel.parse_obj(mod.data) for mod in modules]
- def reverse(self):
+ def reverse(self) -> None:
"""Reverse the order of modules and regions."""
self._data.modules.reverse()
for mod in self._data.modules:
@@ -112,7 +110,7 @@ def reverse(self):
prop.reverse()
def set_modules(
- self, modules: List[Union[RunwayModuleDefinition, RunwayModuleDefinitionModel]]
+ self, modules: list[RunwayModuleDefinition | RunwayModuleDefinitionModel]
) -> None:
"""Set the value of modules.
@@ -124,10 +122,8 @@ def set_modules(
"""
if not isinstance(modules, list): # type: ignore
- raise TypeError(
- f"expected List[RunwayModuleDefinition]; got {type(modules)}"
- )
- sanitized: List[RunwayModuleDefinitionModel] = []
+ raise TypeError(f"expected list[RunwayModuleDefinition]; got {type(modules)}")
+ sanitized: list[RunwayModuleDefinitionModel] = []
for i, mod in enumerate(modules):
if isinstance(mod, RunwayModuleDefinition):
sanitized.append(RunwayModuleDefinitionModel.parse_obj(mod.data))
@@ -156,29 +152,23 @@ def _register_variable(self, var_name: str, var_value: Any) -> None:
@overload
@classmethod
- def parse_obj(
- cls, obj: List[Dict[str, Any]]
- ) -> List[RunwayDeploymentDefinition]: ...
+ def parse_obj(cls, obj: list[dict[str, Any]]) -> list[RunwayDeploymentDefinition]: ...
@overload
@classmethod
def parse_obj(
cls,
- obj: Union[
- List[ConfigProperty], Set[ConfigProperty], Tuple[ConfigProperty, ...]
- ],
- ) -> List[RunwayDeploymentDefinition]: ...
+ obj: list[ConfigProperty] | set[ConfigProperty] | tuple[ConfigProperty, ...],
+ ) -> list[RunwayDeploymentDefinition]: ...
@overload
@classmethod
- def parse_obj(
- cls, obj: Union[Dict[str, Any], ConfigProperty]
- ) -> RunwayDeploymentDefinition: ...
+ def parse_obj(cls, obj: dict[str, Any] | ConfigProperty) -> RunwayDeploymentDefinition: ...
@classmethod
def parse_obj( # type: ignore
cls, obj: Any
- ) -> Union[RunwayDeploymentDefinition, List[RunwayDeploymentDefinition]]:
+ ) -> RunwayDeploymentDefinition | list[RunwayDeploymentDefinition]:
"""Parse a python object into this class.
Args:
@@ -186,7 +176,5 @@ def parse_obj( # type: ignore
"""
if isinstance(obj, (list, set, tuple)):
- return [
- cls(RunwayDeploymentDefinitionModel.parse_obj(o)) for o in obj # type: ignore
- ]
+ return [cls(RunwayDeploymentDefinitionModel.parse_obj(o)) for o in obj] # type: ignore
return cls(RunwayDeploymentDefinitionModel.parse_obj(obj))
diff --git a/runway/config/components/runway/_module_def.py b/runway/config/components/runway/_module_def.py
index cfb31c5ae..b604f4849 100644
--- a/runway/config/components/runway/_module_def.py
+++ b/runway/config/components/runway/_module_def.py
@@ -2,14 +2,15 @@
from __future__ import annotations
-from pathlib import Path
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
+from typing import TYPE_CHECKING, Any
from ....variables import Variable
from ...models.runway import RunwayModuleDefinitionModel
from .base import ConfigComponentDefinition
if TYPE_CHECKING:
+ from pathlib import Path
+
from ...models.runway import (
RunwayEnvironmentsType,
RunwayEnvVarsType,
@@ -20,18 +21,18 @@
class RunwayModuleDefinition(ConfigComponentDefinition):
"""Runway module definition."""
- class_path: Optional[str]
+ class_path: str | None
environments: RunwayEnvironmentsType
env_vars: RunwayEnvVarsType
name: str
- options: Dict[str, Any]
- parameters: Dict[str, Any]
- path: Optional[Union[str, Path]]
- tags: List[str]
- type: Optional[RunwayModuleTypeTypeDef]
+ options: dict[str, Any]
+ parameters: dict[str, Any]
+ path: str | Path | None
+ tags: list[str]
+ type: RunwayModuleTypeTypeDef | None
_data: RunwayModuleDefinitionModel
- _supports_vars: Tuple[str, ...] = (
+ _supports_vars: tuple[str, ...] = (
"class_path",
"env_vars",
"environments",
@@ -45,14 +46,14 @@ def __init__(self, data: RunwayModuleDefinitionModel) -> None:
super().__init__(data)
@property
- def child_modules(self) -> List[RunwayModuleDefinition]:
+ def child_modules(self) -> list[RunwayModuleDefinition]:
"""List of child modules."""
return [RunwayModuleDefinition(child) for child in self._data.parallel]
@child_modules.setter
def child_modules(
self,
- modules: List[Union[RunwayModuleDefinition, RunwayModuleDefinitionModel]], # type: ignore
+ modules: list[RunwayModuleDefinition | RunwayModuleDefinitionModel], # type: ignore
) -> None:
"""Set the value of the property.
@@ -64,10 +65,8 @@ def child_modules(
"""
if not isinstance(modules, list): # type: ignore
- raise TypeError(
- f"expected List[RunwayModuleDefinition]; got {type(modules)}"
- )
- sanitized: List[RunwayModuleDefinitionModel] = []
+ raise TypeError(f"expected list[RunwayModuleDefinition]; got {type(modules)}")
+ sanitized: list[RunwayModuleDefinitionModel] = []
for i, mod in enumerate(modules):
if isinstance(mod, RunwayModuleDefinition):
sanitized.append(RunwayModuleDefinitionModel.parse_obj(mod.data))
@@ -89,12 +88,10 @@ def is_parent(self) -> bool:
def menu_entry(self) -> str:
"""Return menu entry representation of this module."""
if self.is_parent:
- return (
- f"{self.name} [{', '.join([c.menu_entry for c in self.child_modules])}]"
- )
+ return f"{self.name} [{', '.join([c.menu_entry for c in self.child_modules])}]"
return self.name
- def reverse(self):
+ def reverse(self) -> None:
"""Reverse the order of child/parallel modules."""
self._data.parallel.reverse()
diff --git a/runway/config/components/runway/_test_def.py b/runway/config/components/runway/_test_def.py
index 1b537d871..3ac48aa42 100644
--- a/runway/config/components/runway/_test_def.py
+++ b/runway/config/components/runway/_test_def.py
@@ -1,5 +1,6 @@
"""Runway config test definition."""
+# ruff: noqa: UP006, UP035
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Dict, Generic, Tuple, TypeVar, Union
@@ -46,8 +47,7 @@ def __init__(self, data: _DataModel) -> None:
"""Instantiate class."""
super().__init__(data)
- # error present on python3.7
- def __new__( # pylint: disable=arguments-differ
+ def __new__(
cls,
data: _DataModel,
) -> RunwayTestDefinition[_DataModel]:
@@ -60,11 +60,11 @@ def __new__( # pylint: disable=arguments-differ
if cls is not RunwayTestDefinition:
return super().__new__(cls)
if isinstance(data, CfnLintRunwayTestDefinitionModel):
- return super().__new__(CfnLintRunwayTestDefinition)
+ return super().__new__(CfnLintRunwayTestDefinition) # type: ignore
if isinstance(data, ScriptRunwayTestDefinitionModel):
- return super().__new__(ScriptRunwayTestDefinition)
+ return super().__new__(ScriptRunwayTestDefinition) # type: ignore
if isinstance(data, YamlLintRunwayTestDefinitionModel):
- return super().__new__(YamlLintRunwayTestDefinition)
+ return super().__new__(YamlLintRunwayTestDefinition) # type: ignore
raise TypeError(
"expected data of type CfnLintRunwayTestDefinitionModel, "
"ScriptRunwayTestDefinitionModel, or YamlLintRunwayTestDefinitionModel; "
@@ -96,9 +96,7 @@ def parse_obj(cls, obj: Any) -> RunwayTestDefinition[_DataModel]:
return cls(RunwayTestDefinitionModel.parse_obj(obj))
-class CfnLintRunwayTestDefinition(
- RunwayTestDefinition[CfnLintRunwayTestDefinitionModel]
-):
+class CfnLintRunwayTestDefinition(RunwayTestDefinition[CfnLintRunwayTestDefinitionModel]):
"""Runway cfn-lint test definition."""
args: CfnLintRunwayTestArgs
@@ -140,9 +138,7 @@ def parse_obj(cls, obj: Any) -> ScriptRunwayTestDefinition:
return cls(ScriptRunwayTestDefinitionModel.parse_obj(obj))
-class YamlLintRunwayTestDefinition(
- RunwayTestDefinition[YamlLintRunwayTestDefinitionModel]
-):
+class YamlLintRunwayTestDefinition(RunwayTestDefinition[YamlLintRunwayTestDefinitionModel]):
"""Runway yamllint test definition."""
type: Literal["yamllint"] = "yamllint"
diff --git a/runway/config/components/runway/_variables_def.py b/runway/config/components/runway/_variables_def.py
index 8d02f2630..a8b14d218 100644
--- a/runway/config/components/runway/_variables_def.py
+++ b/runway/config/components/runway/_variables_def.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, ClassVar, Dict, cast
+from typing import TYPE_CHECKING, Any, ClassVar, cast
import yaml
@@ -32,9 +32,8 @@ def __init__(self, data: RunwayVariablesDefinitionModel) -> None:
data = RunwayVariablesDefinitionModel(**{**data.dict(), **self.__load_file()})
super().__init__(**data.dict(exclude={"file_path", "sys_path"}))
- def __load_file(self) -> Dict[str, Any]:
+ def __load_file(self) -> dict[str, Any]:
"""Load a variables file."""
- # pylint: disable=protected-access
if self._file_path:
if self._file_path.is_file():
return yaml.safe_load(self._file_path.read_text())
@@ -52,7 +51,7 @@ def __load_file(self) -> Dict[str, Any]:
"could not find %s in the current directory; continuing without a variables file",
" or ".join(self.default_names),
)
- self.__class__._has_notified_missing_file = True
+ self.__class__._has_notified_missing_file = True # noqa: SLF001
return {}
@classmethod
diff --git a/runway/config/components/runway/base.py b/runway/config/components/runway/base.py
index d0edec99a..68973c99f 100644
--- a/runway/config/components/runway/base.py
+++ b/runway/config/components/runway/base.py
@@ -4,7 +4,7 @@
import logging
from abc import ABC, abstractmethod
-from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, cast
+from typing import TYPE_CHECKING, Any, Optional, cast
from ...._logging import PrefixAdaptor
from ....exceptions import UnresolvedVariable
@@ -23,9 +23,9 @@ class ConfigComponentDefinition(ABC):
"""Base class for Runway config components."""
_data: ConfigProperty
- _pre_process_vars: Tuple[str, ...] = ()
- _supports_vars: Tuple[str, ...] = ()
- _vars: Dict[str, Variable] = {}
+ _pre_process_vars: tuple[str, ...] = ()
+ _supports_vars: tuple[str, ...] = ()
+ _vars: dict[str, Variable] = {}
def __init__(self, data: ConfigProperty) -> None:
"""Instantiate class."""
@@ -37,7 +37,7 @@ def __init__(self, data: ConfigProperty) -> None:
self._register_variable(var, self._data[var])
@property
- def data(self) -> Dict[str, Any]:
+ def data(self) -> dict[str, Any]:
"""Return the underlying data as a dict."""
return self._data.dict()
@@ -96,9 +96,7 @@ def _register_variable(self, var_name: str, var_value: Any) -> None:
as a variable if it contains a lookup.
"""
- self._vars[var_name] = Variable(
- name=var_name, value=var_value, variable_type="runway"
- )
+ self._vars[var_name] = Variable(name=var_name, value=var_value, variable_type="runway")
@classmethod
@abstractmethod
@@ -117,7 +115,7 @@ def __contains__(self, name: str) -> bool:
return name in self.__dict__
return self._data.__contains__(name)
- def __getattr__(self, name: str):
+ def __getattr__(self, name: str) -> Any:
"""Implement evaluation of self.name.
Args:
@@ -134,11 +132,9 @@ def __getattr__(self, name: str):
raise UnresolvedVariable(self._vars[name])
if name in super().__getattribute__("_data"):
return super().__getattribute__("_data").__getattribute__(name)
- raise AttributeError(
- f"{self.__class__.__name__} object has not attribute {name}"
- )
+ raise AttributeError(f"{self.__class__.__name__} object has not attribute {name}")
- def __getitem__(self, name: str):
+ def __getitem__(self, name: str) -> Any:
"""Implement evaluation of self[name].
Args:
diff --git a/runway/config/models/cfngin/__init__.py b/runway/config/models/cfngin/__init__.py
index e227ab563..1d37c938f 100644
--- a/runway/config/models/cfngin/__init__.py
+++ b/runway/config/models/cfngin/__init__.py
@@ -1,6 +1,6 @@
"""CFNgin config models."""
-# pylint: disable=no-self-argument
+# ruff: noqa: UP006, UP035
from __future__ import annotations
import copy
@@ -9,11 +9,9 @@
from typing import (
TYPE_CHECKING,
Any,
- Callable,
Dict,
List,
Optional,
- Type,
TypeVar,
Union,
cast,
@@ -91,18 +89,14 @@ class CfnginStackDefinitionModel(ConfigProperty):
title="Stack Description",
description="A description that will be applied to the stack in CloudFormation.",
)
- enabled: bool = Field(
- default=True, description="Whether the stack will be deployed."
- )
+ enabled: bool = Field(default=True, description="Whether the stack will be deployed.")
in_progress_behavior: Optional[Literal["wait"]] = Field(
default=None,
title="Stack In Progress Behavior",
description="The action to take when a stack's status is "
"CREATE_IN_PROGRESS or UPDATE_IN_PROGRESS when trying to update it.",
)
- locked: bool = Field(
- default=False, description="Whether to limit updating of the stack."
- )
+ locked: bool = Field(default=False, description="Whether to limit updating of the stack.")
name: str = Field(..., title="Stack Name", description="Name of the stack.")
protected: bool = Field(
default=False,
@@ -153,7 +147,7 @@ class Config(ConfigProperty.Config):
title = "CFNgin Stack Definition"
@staticmethod
- def schema_extra(schema: Dict[str, Any]) -> None: # type: ignore
+ def schema_extra(schema: dict[str, Any]) -> None: # type: ignore
"""Process the schema after it has been generated.
Schema is modified in place. Return value is ignored.
@@ -161,9 +155,7 @@ def schema_extra(schema: Dict[str, Any]) -> None: # type: ignore
https://pydantic-docs.helpmanual.io/usage/schema/#schema-customization
"""
- schema["description"] = (
- "Define CloudFormation stacks using a Blueprint or Template."
- )
+ schema["description"] = "Define CloudFormation stacks using a Blueprint or Template."
# prevents a false error when defining stacks as a dict
schema.get("required", ["name"]).remove("name")
@@ -175,30 +167,23 @@ def schema_extra(schema: Dict[str, Any]) -> None: # type: ignore
{"type": "string", "pattern": utils.CFNGIN_LOOKUP_STRING_REGEX},
]
- _resolve_path_fields = cast(
- "classmethod[Callable[..., Any]]",
- validator("stack_policy_path", "template_path", allow_reuse=True)(
- utils.resolve_path_field
- ),
- )
+ _resolve_path_fields = validator( # pyright: ignore[reportUnknownVariableType]
+ "stack_policy_path", "template_path", allow_reuse=True
+ )(utils.resolve_path_field)
@root_validator(pre=True)
- def _validate_class_and_template(cls, values: Dict[str, Any]) -> Dict[str, Any]:
+ def _validate_class_and_template(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
"""Validate class_path and template_path are not both provided."""
if values.get("class_path") and values.get("template_path"):
raise ValueError("only one of class_path or template_path can be defined")
return values
@root_validator(pre=True)
- def _validate_class_or_template(cls, values: Dict[str, Any]) -> Dict[str, Any]:
+ def _validate_class_or_template(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
"""Ensure that either class_path or template_path is defined."""
# if the stack is disabled or locked, it is ok that these are missing
required = values.get("enabled", True) and not values.get("locked", False)
- if (
- not values.get("class_path")
- and not values.get("template_path")
- and required
- ):
+ if not values.get("class_path") and not values.get("template_path") and required:
raise ValueError("either class_path or template_path must be defined")
return values
@@ -223,7 +208,7 @@ class CfnginConfigDefinitionModel(ConfigProperty):
title="CFNgin Cache Directory",
description="Path to a local directory that CFNgin will use for local caching.",
)
- log_formats: Dict[str, str] = Field( # TODO create model
+ log_formats: Dict[str, str] = Field( # TODO (kyle): create model
default={}, description="Customize log message formatting by log level."
)
lookups: Dict[str, str] = Field(
@@ -246,9 +231,7 @@ class CfnginConfigDefinitionModel(ConfigProperty):
)
package_sources: CfnginPackageSourcesDefinitionModel = Field(
default=CfnginPackageSourcesDefinitionModel(),
- description=CfnginPackageSourcesDefinitionModel.Config.schema_extra[
- "description"
- ],
+ description=CfnginPackageSourcesDefinitionModel.Config.schema_extra["description"],
)
persistent_graph_key: Optional[str] = Field(
default=None,
@@ -304,17 +287,14 @@ class Config(ConfigProperty.Config):
schema_extra = {"description": "Configuration file for Runway's CFNgin."}
title = "CFNgin Config File"
- _resolve_path_fields = cast(
- "classmethod[Callable[..., Any]]",
- validator("cfngin_cache_dir", "sys_path", allow_reuse=True)(
- utils.resolve_path_field
- ),
- )
+ _resolve_path_fields = validator( # pyright: ignore[reportUnknownVariableType]
+ "cfngin_cache_dir", "sys_path", allow_reuse=True
+ )(utils.resolve_path_field)
@validator("post_deploy", "post_destroy", "pre_deploy", "pre_destroy", pre=True)
def _convert_hook_definitions(
- cls, v: Union[Dict[str, Any], List[Dict[str, Any]]]
- ) -> List[Dict[str, Any]]:
+ cls, v: Union[dict[str, Any], list[dict[str, Any]]] # noqa: N805
+ ) -> list[dict[str, Any]]:
"""Convert hooks defined as a dict to a list."""
if isinstance(v, list):
return v
@@ -322,12 +302,12 @@ def _convert_hook_definitions(
@validator("stacks", pre=True)
def _convert_stack_definitions(
- cls, v: Union[Dict[str, Any], List[Dict[str, Any]]]
- ) -> List[Dict[str, Any]]:
+ cls, v: Union[dict[str, Any], list[dict[str, Any]]] # noqa: N805
+ ) -> list[dict[str, Any]]:
"""Convert stacks defined as a dict to a list."""
if isinstance(v, list):
return v
- result: List[Dict[str, Any]] = []
+ result: list[dict[str, Any]] = []
for name, stack in copy.deepcopy(v).items():
stack["name"] = name
result.append(stack)
@@ -335,8 +315,8 @@ def _convert_stack_definitions(
@validator("stacks")
def _validate_unique_stack_names(
- cls, stacks: List[CfnginStackDefinitionModel]
- ) -> List[CfnginStackDefinitionModel]:
+ cls, stacks: list[CfnginStackDefinitionModel] # noqa: N805
+ ) -> list[CfnginStackDefinitionModel]:
"""Validate that each stack has a unique name."""
stack_names = [stack.name for stack in stacks]
if len(set(stack_names)) != len(stack_names):
@@ -347,21 +327,19 @@ def _validate_unique_stack_names(
@classmethod
def parse_file(
- cls: Type[Model],
+ cls: type[Model],
path: Union[str, Path],
*,
- content_type: Optional[str] = None,
+ content_type: str | None = None,
encoding: str = "utf8",
- proto: Optional[Protocol] = None,
+ proto: Protocol | None = None,
allow_pickle: bool = False,
) -> Model:
"""Parse a file."""
return cast(
"Model",
cls.parse_raw(
- Path(path).read_text(
- encoding=locale.getpreferredencoding(do_setlocale=False)
- ),
+ Path(path).read_text(encoding=locale.getpreferredencoding(do_setlocale=False)),
content_type=content_type, # type: ignore
encoding=encoding,
proto=proto, # type: ignore
@@ -371,13 +349,13 @@ def parse_file(
@classmethod
def parse_raw(
- cls: Type[Model],
+ cls: type[Model],
b: Union[bytes, str],
*,
- content_type: Optional[str] = None, # pylint: disable=unused-argument
- encoding: str = "utf8", # pylint: disable=unused-argument
- proto: Optional[Protocol] = None, # pylint: disable=unused-argument
- allow_pickle: bool = False, # pylint: disable=unused-argument
+ content_type: str | None = None, # noqa: ARG003
+ encoding: str = "utf8", # noqa: ARG003
+ proto: Protocol | None = None, # noqa: ARG003
+ allow_pickle: bool = False, # noqa: ARG003
) -> Model:
"""Parse raw data."""
return cast("Model", cls.parse_obj(yaml.safe_load(b)))
diff --git a/runway/config/models/cfngin/_package_sources.py b/runway/config/models/cfngin/_package_sources.py
index b50309c8e..4221d0bee 100644
--- a/runway/config/models/cfngin/_package_sources.py
+++ b/runway/config/models/cfngin/_package_sources.py
@@ -1,6 +1,6 @@
"""CFNgin package source models."""
-# pylint: disable=no-self-argument
+# ruff: noqa: UP006, UP035
from __future__ import annotations
from typing import Any, Dict, List, Optional
@@ -38,9 +38,7 @@ class GitCfnginPackageSourceDefinitionModel(ConfigProperty):
default=[],
description="Array of paths relative to the root of the package source to add to $PATH.",
)
- tag: Optional[str] = Field(
- default=None, title="Git Tag", examples=["1.0.0", "v1.0.0"]
- )
+ tag: Optional[str] = Field(default=None, title="Git Tag", examples=["1.0.0", "v1.0.0"])
uri: str = Field(
...,
title="Git Repository URI",
@@ -58,7 +56,7 @@ class Config(ConfigProperty.Config):
title = "CFNgin Git Repository Package Source Definition"
@root_validator
- def _validate_one_ref(cls, values: Dict[str, Any]) -> Dict[str, Any]:
+ def _validate_one_ref(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
"""Ensure that only one ref is defined."""
ref_keys = ["branch", "commit", "tag"]
count_ref_defs = sum(bool(values.get(i)) for i in ref_keys)
@@ -166,23 +164,17 @@ class CfnginPackageSourcesDefinitionModel(ConfigProperty):
git: List[GitCfnginPackageSourceDefinitionModel] = Field(
default=[],
title="CFNgin Git Repository Package Source Definitions",
- description=GitCfnginPackageSourceDefinitionModel.Config.schema_extra[
- "description"
- ],
+ description=GitCfnginPackageSourceDefinitionModel.Config.schema_extra["description"],
)
local: List[LocalCfnginPackageSourceDefinitionModel] = Field(
default=[],
title="CFNgin Local Package Source Definitions",
- description=LocalCfnginPackageSourceDefinitionModel.Config.schema_extra[
- "description"
- ],
+ description=LocalCfnginPackageSourceDefinitionModel.Config.schema_extra["description"],
)
s3: List[S3CfnginPackageSourceDefinitionModel] = Field(
default=[],
title="CFNgin S3 Package Source Definitions",
- description=S3CfnginPackageSourceDefinitionModel.Config.schema_extra[
- "description"
- ],
+ description=S3CfnginPackageSourceDefinitionModel.Config.schema_extra["description"],
)
class Config(ConfigProperty.Config):
diff --git a/runway/config/models/runway/__init__.py b/runway/config/models/runway/__init__.py
index 3d919d723..daef9a546 100644
--- a/runway/config/models/runway/__init__.py
+++ b/runway/config/models/runway/__init__.py
@@ -1,8 +1,8 @@
"""Runway config models."""
-# pylint: disable=no-self-argument
from __future__ import annotations
+# ruff: noqa: UP006, UP035
import locale
import logging
from pathlib import Path
@@ -11,10 +11,8 @@
Any,
Callable,
Dict,
- Generator,
List,
Optional,
- Type,
TypeVar,
Union,
cast,
@@ -39,6 +37,8 @@
)
if TYPE_CHECKING:
+ from collections.abc import Generator
+
from pydantic import BaseModel
Model = TypeVar("Model", bound=BaseModel)
@@ -110,7 +110,7 @@ class Config(ConfigProperty.Config):
"""Model configuration."""
extra = Extra.forbid
- schema_extra: Dict[str, Any] = {
+ schema_extra: dict[str, Any] = {
"description": "Used to defined a role to assume while Runway is "
"processing each module.",
"examples": [
@@ -126,13 +126,13 @@ class Config(ConfigProperty.Config):
title = "Runway Deployment.assume_role Definition"
@validator("arn")
- def _convert_arn_null_value(cls, v: Optional[str]) -> Optional[str]:
+ def _convert_arn_null_value(cls, v: Optional[str]) -> Optional[str]: # noqa: N805
"""Convert a "nul" string into type(None)."""
null_strings = ["null", "none", "undefined"]
return None if isinstance(v, str) and v.lower() in null_strings else v
@validator("duration", pre=True)
- def _validate_duration(cls, v: Union[int, str]) -> Union[int, str]:
+ def _validate_duration(cls, v: Union[int, str]) -> Union[int, str]: # noqa: N805
"""Validate duration is within the range allowed by AWS."""
if isinstance(v, str):
return v
@@ -142,12 +142,9 @@ def _validate_duration(cls, v: Union[int, str]) -> Union[int, str]:
raise ValueError("duration must be less than or equal to 43,200")
return v
- _validate_string_is_lookup = cast(
- "classmethod[Callable[..., Any]]",
- validator("duration", allow_reuse=True, pre=True)(
- utils.validate_string_is_lookup
- ),
- )
+ _validate_string_is_lookup = validator( # pyright: ignore[reportUnknownVariableType]
+ "duration", allow_reuse=True, pre=True
+ )(utils.validate_string_is_lookup)
class RunwayDeploymentRegionDefinitionModel(ConfigProperty):
@@ -172,12 +169,9 @@ class Config(ConfigProperty.Config):
}
title = "Runway Deployment.regions Definition"
- _validate_string_is_lookup = cast(
- "classmethod[Callable[..., Any]]",
- validator("parallel", allow_reuse=True, pre=True)(
- utils.validate_string_is_lookup
- ),
- )
+ _validate_string_is_lookup = validator( # pyright: ignore[reportUnknownVariableType]
+ "parallel", allow_reuse=True, pre=True
+ )(utils.validate_string_is_lookup)
class RunwayDeploymentDefinitionModel(ConfigProperty):
@@ -198,10 +192,10 @@ class RunwayDeploymentDefinitionModel(ConfigProperty):
assume_role: Union[str, RunwayAssumeRoleDefinitionModel] = Field(
default={},
description="Assume a role when processing the deployment. (supports lookups)",
- examples=["arn:aws:iam::123456789012:role/name"]
- + cast(
- List[Any], RunwayAssumeRoleDefinitionModel.Config.schema_extra["examples"]
- ),
+ examples=[
+ "arn:aws:iam::123456789012:role/name",
+ *cast("list[Any]", RunwayAssumeRoleDefinitionModel.Config.schema_extra["examples"]),
+ ],
)
env_vars: RunwayEnvVarsUnresolvedType = Field(
default={},
@@ -283,7 +277,7 @@ class Config(ConfigProperty.Config):
title = "Runway Deployment Definition"
@staticmethod
- def schema_extra(schema: Dict[str, Any]) -> None: # type: ignore
+ def schema_extra(schema: dict[str, Any]) -> None: # type: ignore
"""Process the schema after it has been generated.
Schema is modified in place. Return value is ignored.
@@ -302,10 +296,10 @@ def schema_extra(schema: Dict[str, Any]) -> None: # type: ignore
]
@root_validator(pre=True)
- def _convert_simple_module(cls, values: Dict[str, Any]) -> Dict[str, Any]:
+ def _convert_simple_module(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
"""Convert simple modules to dicts."""
modules = values.get("modules", [])
- result: List[Dict[str, Any]] = []
+ result: List[dict[str, Any]] = []
for module in modules:
if isinstance(module, str):
result.append({"path": module})
@@ -315,7 +309,7 @@ def _convert_simple_module(cls, values: Dict[str, Any]) -> Dict[str, Any]:
return values
@root_validator(pre=True)
- def _validate_regions(cls, values: Dict[str, Any]) -> Dict[str, Any]:
+ def _validate_regions(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
"""Validate & simplify regions."""
raw_regions = values.get("regions", [])
parallel_regions = values.get("parallel_regions", [])
@@ -340,19 +334,16 @@ def _validate_regions(cls, values: Dict[str, Any]) -> Dict[str, Any]:
values["parallel_regions"] = regions.parallel
return values
- _validate_string_is_lookup = cast(
- "classmethod[Callable[..., Any]]",
- validator(
- "env_vars",
- "environments",
- "module_options",
- "parallel_regions",
- "parameters",
- "regions",
- allow_reuse=True,
- pre=True,
- )(utils.validate_string_is_lookup),
- )
+ _validate_string_is_lookup = validator( # pyright: ignore[reportUnknownVariableType]
+ "env_vars",
+ "environments",
+ "module_options",
+ "parallel_regions",
+ "parameters",
+ "regions",
+ allow_reuse=True,
+ pre=True,
+ )(utils.validate_string_is_lookup)
class RunwayFutureDefinitionModel(ConfigProperty):
@@ -457,7 +448,7 @@ class Config(ConfigProperty.Config):
use_enum_values = True
@root_validator(pre=True)
- def _validate_name(cls, values: Dict[str, Any]) -> Dict[str, Any]:
+ def _validate_name(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
"""Validate module name."""
if "name" in values:
return values
@@ -470,7 +461,7 @@ def _validate_name(cls, values: Dict[str, Any]) -> Dict[str, Any]:
return values
@root_validator(pre=True)
- def _validate_path(cls, values: Dict[str, Any]) -> Dict[str, Any]:
+ def _validate_path(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
"""Validate path and sets a default value if needed."""
if not values.get("path") and not values.get("parallel"):
values["path"] = Path.cwd()
@@ -478,12 +469,12 @@ def _validate_path(cls, values: Dict[str, Any]) -> Dict[str, Any]:
@validator("parallel", pre=True)
def _validate_parallel(
- cls, v: List[Union[Dict[str, Any], str]], values: Dict[str, Any]
- ) -> List[Dict[str, Any]]:
+ cls, v: List[Union[dict[str, Any], str]], values: dict[str, Any] # noqa: N805
+ ) -> List[dict[str, Any]]:
"""Validate parallel."""
if v and values.get("path"):
raise ValueError("only one of parallel or path can be defined")
- result: List[Dict[str, Any]] = []
+ result: List[dict[str, Any]] = []
for mod in v:
if isinstance(mod, str):
result.append({"path": mod})
@@ -491,18 +482,15 @@ def _validate_parallel(
result.append(mod)
return result
- # TODO add regex to schema
- _validate_string_is_lookup = cast(
- "classmethod[Callable[..., Any]]",
- validator(
- "env_vars",
- "environments",
- "options",
- "parameters",
- allow_reuse=True,
- pre=True,
- )(utils.validate_string_is_lookup),
- )
+ # TODO (kyle): add regex to schema
+ _validate_string_is_lookup = validator( # pyright: ignore[reportUnknownVariableType]
+ "env_vars",
+ "environments",
+ "options",
+ "parameters",
+ allow_reuse=True,
+ pre=True,
+ )(utils.validate_string_is_lookup)
# https://pydantic-docs.helpmanual.io/usage/postponed_annotations/#self-referencing-models
@@ -534,10 +522,9 @@ class Config(ConfigProperty.Config):
}
title = "Runway Variables Definition"
- _convert_null_values = cast(
- "classmethod[Callable[..., Any]]",
- validator("*", allow_reuse=True)(utils.convert_null_values),
- )
+ _convert_null_values = validator( # pyright: ignore[reportUnknownVariableType]
+ "*", allow_reuse=True
+ )(utils.convert_null_values)
class RunwayVersionField(SpecifierSet):
@@ -553,7 +540,7 @@ def __get_validators__(cls) -> Generator[Callable[..., Any], None, None]:
yield cls._convert_value
@classmethod
- def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+ def __modify_schema__(cls, field_schema: dict[str, Any]) -> None:
"""Mutate the field schema in place.
This is only called when output JSON schema from a model.
@@ -622,7 +609,7 @@ class Config(ConfigProperty.Config):
validate_assignment = True
@root_validator(pre=True)
- def _add_deployment_names(cls, values: Dict[str, Any]) -> Dict[str, Any]:
+ def _add_deployment_names(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
"""Add names to deployments that are missing them."""
deployments = values.get("deployments", [])
for i, deployment in enumerate(deployments):
@@ -633,21 +620,19 @@ def _add_deployment_names(cls, values: Dict[str, Any]) -> Dict[str, Any]:
@classmethod
def parse_file(
- cls: Type[Model],
+ cls: type[Model],
path: Union[str, Path],
*,
- content_type: Optional[str] = None,
+ content_type: str | None = None,
encoding: str = "utf8",
- proto: Optional[Protocol] = None,
+ proto: Protocol | None = None,
allow_pickle: bool = False,
) -> Model:
"""Parse a file."""
return cast(
"Model",
cls.parse_raw(
- Path(path).read_text(
- encoding=locale.getpreferredencoding(do_setlocale=False)
- ),
+ Path(path).read_text(encoding=locale.getpreferredencoding(do_setlocale=False)),
content_type=content_type, # type: ignore
encoding=encoding,
proto=proto, # type: ignore
@@ -657,13 +642,13 @@ def parse_file(
@classmethod
def parse_raw(
- cls: Type[Model],
+ cls: type[Model],
b: Union[bytes, str],
*,
- content_type: Optional[str] = None, # pylint: disable=unused-argument
- encoding: str = "utf8", # pylint: disable=unused-argument
- proto: Optional[Protocol] = None, # pylint: disable=unused-argument
- allow_pickle: bool = False, # pylint: disable=unused-argument
+ content_type: str | None = None, # noqa: ARG003
+ encoding: str = "utf8", # noqa: ARG003
+ proto: Protocol | None = None, # noqa: ARG003
+ allow_pickle: bool = False, # noqa: ARG003
) -> Model:
"""Parse raw data."""
return cast("Model", cls.parse_obj(yaml.safe_load(b)))
diff --git a/runway/config/models/runway/_builtin_tests.py b/runway/config/models/runway/_builtin_tests.py
index db66e3160..7bbd9d124 100644
--- a/runway/config/models/runway/_builtin_tests.py
+++ b/runway/config/models/runway/_builtin_tests.py
@@ -2,6 +2,7 @@
from __future__ import annotations
+# ruff: noqa: UP006, UP035
from typing import TYPE_CHECKING, Any, Dict, List, Union, cast
from pydantic import Extra, Field, validator
@@ -40,7 +41,7 @@ class Config(ConfigProperty.Config):
title = "Runway Test Definition"
use_enum_values = True
- def __new__(cls, **kwargs: Any) -> RunwayTestDefinitionModel:
+ def __new__(cls, **kwargs: Any) -> RunwayTestDefinitionModel: # noqa: PYI034
"""Create a new instance of a class.
Returns:
@@ -50,20 +51,17 @@ def __new__(cls, **kwargs: Any) -> RunwayTestDefinitionModel:
test_type = kwargs.get("type")
if cls is RunwayTestDefinitionModel:
if test_type == "cfn-lint":
- return super().__new__(CfnLintRunwayTestDefinitionModel)
+ return super().__new__(CfnLintRunwayTestDefinitionModel) # type: ignore
if test_type == "script":
- return super().__new__(ScriptRunwayTestDefinitionModel)
+ return super().__new__(ScriptRunwayTestDefinitionModel) # type: ignore
if test_type == "yamllint":
- return super().__new__(YamlLintRunwayTestDefinitionModel)
- return super().__new__(cls)
+ return super().__new__(YamlLintRunwayTestDefinitionModel) # type: ignore
+ return super().__new__(cls) # type: ignore
- # TODO add regex to schema
- _validate_string_is_lookup = cast(
- "classmethod[Callable[..., Any]]",
- validator("args", "required", allow_reuse=True, pre=True)(
- utils.validate_string_is_lookup
- ),
- )
+ # TODO (kyle): add regex to schema
+ _validate_string_is_lookup = validator( # pyright: ignore[reportUnknownVariableType]
+ "args", "required", allow_reuse=True, pre=True
+ )(utils.validate_string_is_lookup)
class CfnLintRunwayTestArgs(ConfigProperty):
@@ -84,13 +82,10 @@ class Config(ConfigProperty.Config):
}
title = "cfn-lint Runway Test Arguments"
- # TODO add regex to schema
- _validate_string_is_lookup = cast(
- "classmethod[Callable[..., Any]]",
- validator("cli_args", allow_reuse=True, pre=True)(
- utils.validate_string_is_lookup
- ),
- )
+ # TODO (kyle): add regex to schema
+ _validate_string_is_lookup = validator( # pyright: ignore[reportUnknownVariableType]
+ "cli_args", allow_reuse=True, pre=True
+ )(utils.validate_string_is_lookup)
class CfnLintRunwayTestDefinitionModel(RunwayTestDefinitionModel):
@@ -106,9 +101,7 @@ class CfnLintRunwayTestDefinitionModel(RunwayTestDefinitionModel):
default=False,
description="Whether the test must pass for subsequent tests to be run.",
)
- type: Literal["cfn-lint"] = Field(
- default="cfn-lint", description="The type of test to run."
- )
+ type: Literal["cfn-lint"] = Field(default="cfn-lint", description="The type of test to run.")
class Config(RunwayTestDefinitionModel.Config):
"""Model configuration."""
@@ -135,12 +128,10 @@ class Config(ConfigProperty.Config):
}
title = "Script Runway Test Arguments"
- # TODO add regex to schema
+ # TODO (kyle): add regex to schema
_validate_string_is_lookup = cast(
"classmethod[Callable[..., Any]]",
- validator("commands", allow_reuse=True, pre=True)(
- utils.validate_string_is_lookup
- ),
+ validator("commands", allow_reuse=True, pre=True)(utils.validate_string_is_lookup),
)
@@ -157,9 +148,7 @@ class ScriptRunwayTestDefinitionModel(RunwayTestDefinitionModel):
default=False,
description="Whether the test must pass for subsequent tests to be run.",
)
- type: Literal["script"] = Field(
- default="script", description="The type of test to run."
- )
+ type: Literal["script"] = Field(default="script", description="The type of test to run.")
class Config(RunwayTestDefinitionModel.Config):
"""Model configuration."""
@@ -178,9 +167,7 @@ class YamlLintRunwayTestDefinitionModel(RunwayTestDefinitionModel):
default=False,
description="Whether the test must pass for subsequent tests to be run.",
)
- type: Literal["yamllint"] = Field(
- default="yamllint", description="The type of test to run."
- )
+ type: Literal["yamllint"] = Field(default="yamllint", description="The type of test to run.")
class Config(RunwayTestDefinitionModel.Config):
"""Model configuration."""
diff --git a/runway/config/models/runway/options/cdk.py b/runway/config/models/runway/options/cdk.py
index 182310460..b27db65e6 100644
--- a/runway/config/models/runway/options/cdk.py
+++ b/runway/config/models/runway/options/cdk.py
@@ -1,5 +1,6 @@
"""Runway AWS Cloud Development Kit Module options."""
+# ruff: noqa: UP006, UP035
from __future__ import annotations
from typing import List
diff --git a/runway/config/models/runway/options/serverless.py b/runway/config/models/runway/options/serverless.py
index 81f051916..02567a7db 100644
--- a/runway/config/models/runway/options/serverless.py
+++ b/runway/config/models/runway/options/serverless.py
@@ -1,5 +1,6 @@
"""Runway Serverless Framework Module options."""
+# ruff: noqa: UP006, UP035
from __future__ import annotations
from typing import Any, Dict, List, Optional
diff --git a/runway/config/models/runway/options/terraform.py b/runway/config/models/runway/options/terraform.py
index ade284365..0f049cb5a 100644
--- a/runway/config/models/runway/options/terraform.py
+++ b/runway/config/models/runway/options/terraform.py
@@ -1,9 +1,9 @@
"""Runway Terraform Module options."""
-# pylint: disable=no-self-argument
+# ruff: noqa: UP006, UP035
from __future__ import annotations
-from typing import Dict, List, Optional, Union
+from typing import List, Optional, Union
from pydantic import Extra, Field, validator
@@ -64,8 +64,8 @@ class Config(ConfigProperty.Config):
@validator("args", pre=True)
def _convert_args(
- cls, v: Union[List[str], Dict[str, List[str]]]
- ) -> Dict[str, List[str]]:
+ cls, v: Union[list[str], dict[str, list[str]]] # noqa: N805
+ ) -> dict[str, list[str]]:
"""Convert args from list to dict."""
if isinstance(v, list):
return {"apply": v}
diff --git a/runway/config/models/utils.py b/runway/config/models/utils.py
index 0a8f7fce8..fc774a178 100644
--- a/runway/config/models/utils.py
+++ b/runway/config/models/utils.py
@@ -3,8 +3,10 @@
from __future__ import annotations
import re
-from pathlib import Path
-from typing import Any, Optional
+from typing import TYPE_CHECKING, Any
+
+if TYPE_CHECKING:
+ from pathlib import Path
CFNGIN_LOOKUP_STRING_REGEX = r"^\${.*}$"
RUNWAY_LOOKUP_STRING_ERROR = ValueError("field can only be a string if it's a lookup")
@@ -17,7 +19,7 @@ def convert_null_values(v: Any) -> Any:
return None if isinstance(v, str) and v.lower() in null_strings else v
-def resolve_path_field(v: Optional[Path]) -> Optional[Path]:
+def resolve_path_field(v: Path | None) -> Path | None:
"""Resolve sys_path."""
return v.resolve() if v else v
diff --git a/runway/constants.py b/runway/constants.py
index 12c53db5d..a8a4c7693 100644
--- a/runway/constants.py
+++ b/runway/constants.py
@@ -1,8 +1,10 @@
"""Runway constants."""
-from typing import Any, Dict
+from __future__ import annotations
-BOTO3_CREDENTIAL_CACHE: Dict[str, Any] = {}
+from typing import Any
+
+BOTO3_CREDENTIAL_CACHE: dict[str, Any] = {}
"""A global credential cache that can be shared among boto3 sessions.
This is inherently threadsafe thanks to the GIL.
(https://docs.python.org/3/glossary.html#term-global-interpreter-lock)
diff --git a/runway/context/_base.py b/runway/context/_base.py
index 84d3ff2af..992706b33 100644
--- a/runway/context/_base.py
+++ b/runway/context/_base.py
@@ -4,7 +4,7 @@
import logging
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Optional, Union, cast
+from typing import TYPE_CHECKING, Any, cast
import boto3
import botocore.exceptions
@@ -30,7 +30,7 @@ class BaseContext(DelCachedPropMixin):
env: DeployEnvironment
"""Object containing information about the environment being deployed to."""
- logger: Union[PrefixAdaptor, RunwayLogger]
+ logger: PrefixAdaptor | RunwayLogger
"""Custom logger."""
sys_info: SystemInfo
@@ -43,8 +43,8 @@ def __init__(
self,
*,
deploy_environment: DeployEnvironment,
- logger: Union[PrefixAdaptor, RunwayLogger] = LOGGER,
- work_dir: Optional[Path] = None,
+ logger: PrefixAdaptor | RunwayLogger = LOGGER,
+ work_dir: Path | None = None,
**_: Any,
) -> None:
"""Instantiate class.
@@ -94,11 +94,11 @@ def is_noninteractive(self) -> bool:
def get_session(
self,
*,
- aws_access_key_id: Optional[str] = None,
- aws_secret_access_key: Optional[str] = None,
- aws_session_token: Optional[str] = None,
- profile: Optional[str] = None,
- region: Optional[str] = None,
+ aws_access_key_id: str | None = None,
+ aws_secret_access_key: str | None = None,
+ aws_session_token: str | None = None,
+ profile: str | None = None,
+ region: str | None = None,
) -> boto3.Session:
"""Create a thread-safe boto3 session.
@@ -125,15 +125,11 @@ def get_session(
region or "default",
)
else: # use explicit values or grab values from env vars
- aws_access_key_id = aws_access_key_id or self.env.vars.get(
- "AWS_ACCESS_KEY_ID"
- )
+ aws_access_key_id = aws_access_key_id or self.env.vars.get("AWS_ACCESS_KEY_ID")
aws_secret_access_key = aws_secret_access_key or self.env.vars.get(
"AWS_SECRET_ACCESS_KEY"
)
- aws_session_token = aws_session_token or self.env.vars.get(
- "AWS_SESSION_TOKEN"
- )
+ aws_session_token = aws_session_token or self.env.vars.get("AWS_SESSION_TOKEN")
if aws_access_key_id:
self.logger.debug(
'building session with Access Key "%s" in region "%s"',
@@ -151,10 +147,10 @@ def get_session(
cred_provider = session._session.get_component("credential_provider") # type: ignore
provider = cred_provider.get_provider("assume-role") # type: ignore
provider.cache = BOTO3_CREDENTIAL_CACHE
- provider._prompter = ui.getpass
+ provider._prompter = ui.getpass # noqa: SLF001
return session
- # TODO remove after IaC tools support AWS SSO
+ # TODO (kyle): remove after IaC tools support AWS SSO
def _inject_profile_credentials(self) -> None: # cov: ignore
"""Inject AWS credentials into self.env_vars if using an AWS profile.
diff --git a/runway/context/_cfngin.py b/runway/context/_cfngin.py
index c8f386f2b..f4f60a122 100644
--- a/runway/context/_cfngin.py
+++ b/runway/context/_cfngin.py
@@ -3,10 +3,11 @@
from __future__ import annotations
import collections.abc
+import contextlib
import json
import logging
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Dict, List, MutableMapping, Optional, Union, cast
+from typing import TYPE_CHECKING, Any, cast
from pydantic import BaseModel
@@ -27,6 +28,8 @@
from ._base import BaseContext
if TYPE_CHECKING:
+ from collections.abc import MutableMapping
+
from mypy_boto3_s3.client import S3Client
from .type_defs import PersistentGraphLocation
@@ -34,7 +37,7 @@
LOGGER = cast(RunwayLogger, logging.getLogger(__name__))
-def get_fqn(base_fqn: str, delimiter: str, name: Optional[str] = None) -> str:
+def get_fqn(base_fqn: str, delimiter: str, name: str | None = None) -> str:
"""Return the fully qualified name of an object within this context.
If the name passed already appears to be a fully qualified name, it
@@ -70,32 +73,32 @@ class CfnginContext(BaseContext):
"""
- _persistent_graph_lock_code: Optional[str]
+ _persistent_graph_lock_code: str | None
_persistent_graph_lock_tag: str = "cfngin_lock_code"
- _persistent_graph: Optional[Graph]
+ _persistent_graph: Graph | None
_s3_bucket_verified: bool
bucket_region: str
config: CfnginConfig
config_path: Path
env: DeployEnvironment
- force_stacks: List[str]
- hook_data: Dict[str, Any]
- logger: Union[PrefixAdaptor, RunwayLogger]
+ force_stacks: list[str]
+ hook_data: dict[str, Any]
+ logger: PrefixAdaptor | RunwayLogger
parameters: MutableMapping[str, Any]
- stack_names: List[str]
+ stack_names: list[str]
def __init__(
self,
*,
- config: Optional[CfnginConfig] = None,
- config_path: Optional[Path] = None,
- deploy_environment: Optional[DeployEnvironment] = None,
- force_stacks: Optional[List[str]] = None,
- logger: Union[PrefixAdaptor, RunwayLogger] = LOGGER,
- parameters: Optional[MutableMapping[str, Any]] = None,
- stack_names: Optional[List[str]] = None,
- work_dir: Optional[Path] = None,
+ config: CfnginConfig | None = None,
+ config_path: Path | None = None,
+ deploy_environment: DeployEnvironment | None = None,
+ force_stacks: list[str] | None = None,
+ logger: PrefixAdaptor | RunwayLogger = LOGGER,
+ parameters: MutableMapping[str, Any] | None = None,
+ stack_names: list[str] | None = None,
+ work_dir: Path | None = None,
**_: Any,
) -> None:
"""Instantiate class.
@@ -137,17 +140,14 @@ def base_fqn(self) -> str:
return self.config.namespace.replace(".", "-").lower()
@cached_property
- def bucket_name(self) -> Optional[str]:
+ def bucket_name(self) -> str | None:
"""Return ``cfngin_bucket`` from config, calculated name, or None."""
if not self.upload_to_s3:
return None
- return (
- self.config.cfngin_bucket
- or f"cfngin-{self.get_fqn()}-{self.env.aws_region}"
- )
+ return self.config.cfngin_bucket or f"cfngin-{self.get_fqn()}-{self.env.aws_region}"
@cached_property
- def mappings(self) -> Dict[str, Dict[str, Dict[str, Any]]]:
+ def mappings(self) -> dict[str, dict[str, dict[str, Any]]]:
"""Return ``mappings`` from config."""
return self.config.mappings or {}
@@ -185,7 +185,7 @@ def persistent_graph_locked(self) -> bool:
return bool(self.persistent_graph_lock_code)
@property
- def persistent_graph_lock_code(self) -> Optional[str]:
+ def persistent_graph_lock_code(self) -> str | None:
"""Code used to lock the persistent graph S3 object."""
if not self._persistent_graph_lock_code and self.persistent_graph_location:
self._persistent_graph_lock_code = self.persistent_graph_tags.get(
@@ -194,23 +194,21 @@ def persistent_graph_lock_code(self) -> Optional[str]:
return self._persistent_graph_lock_code
@property
- def persistent_graph_tags(self) -> Dict[str, str]:
+ def persistent_graph_tags(self) -> dict[str, str]:
"""Cache of tags on the persistent graph object."""
try:
return {
t["Key"]: t["Value"]
- for t in self.s3_client.get_object_tagging(
- **self.persistent_graph_location
- ).get("TagSet", [])
+ for t in self.s3_client.get_object_tagging(**self.persistent_graph_location).get(
+ "TagSet", []
+ )
}
except self.s3_client.exceptions.NoSuchKey:
- self.logger.debug(
- "persistent graph object does not exist in S3; could not get tags"
- )
+ self.logger.debug("persistent graph object does not exist in S3; could not get tags")
return {}
@property
- def persistent_graph(self) -> Optional[Graph]:
+ def persistent_graph(self) -> Graph | None:
"""Graph if a persistent graph is being used.
Will create an "empty" object in S3 if one is not found.
@@ -238,8 +236,7 @@ def persistent_graph(self) -> Optional[Graph]:
)
except self.s3_client.exceptions.NoSuchKey:
self.logger.info(
- "persistent graph object does not exist in s3; "
- "creating one now..."
+ "persistent graph object does not exist in s3; creating one now..."
)
self.s3_client.put_object(
Body=content.encode(),
@@ -253,7 +250,7 @@ def persistent_graph(self) -> Optional[Graph]:
return self._persistent_graph
@persistent_graph.setter
- def persistent_graph(self, graph: Optional[Graph]) -> None:
+ def persistent_graph(self, graph: Graph | None) -> None:
"""Load a persistent graph dict as a :class:`runway.cfngin.plan.Graph`."""
self._persistent_graph = graph
@@ -281,12 +278,12 @@ def s3_client(self) -> S3Client:
return self.get_session(region=self.bucket_region).client("s3")
@cached_property
- def stacks_dict(self) -> Dict[str, Stack]:
+ def stacks_dict(self) -> dict[str, Stack]:
"""Construct a dict of ``{stack.fqn: Stack}`` for easy access to stacks."""
return {stack.fqn: stack for stack in self.stacks}
@cached_property
- def stacks(self) -> List[Stack]:
+ def stacks(self) -> list[Stack]:
"""Stacks for the current action."""
return [
Stack(
@@ -302,16 +299,12 @@ def stacks(self) -> List[Stack]:
]
@cached_property
- def tags(self) -> Dict[str, str]:
+ def tags(self) -> dict[str, str]:
"""Return ``tags`` from config."""
return (
self.config.tags
if self.config.tags is not None
- else (
- {"cfngin_namespace": self.config.namespace}
- if self.config.namespace
- else {}
- )
+ else ({"cfngin_namespace": self.config.namespace} if self.config.namespace else {})
)
@cached_property
@@ -326,8 +319,7 @@ def upload_to_s3(self) -> bool:
# explicitly set to an empty string.
if self.config.cfngin_bucket == "":
self.logger.debug(
- "not uploading to s3; cfngin_bucket "
- "is explicitly set to an empty string"
+ "not uploading to s3; cfngin_bucket is explicitly set to an empty string"
)
return False
@@ -336,9 +328,7 @@ def upload_to_s3(self) -> bool:
# sense because we can't realistically auto generate a cfngin
# bucket name in this case.
if not self.config.namespace and not self.config.cfngin_bucket:
- self.logger.debug(
- "not uploading to s3; namespace & cfngin_bucket not provided"
- )
+ self.logger.debug("not uploading to s3; namespace & cfngin_bucket not provided")
return False
return True
@@ -356,7 +346,7 @@ def copy(self) -> CfnginContext:
work_dir=self.work_dir,
)
- def get_fqn(self, name: Optional[str] = None) -> str:
+ def get_fqn(self, name: str | None = None) -> str:
"""Return the fully qualified name of an object within this context.
If the name passed already appears to be a fully qualified name, it
@@ -365,7 +355,7 @@ def get_fqn(self, name: Optional[str] = None) -> str:
"""
return get_fqn(self.base_fqn, self.config.namespace_delimiter, name)
- def get_stack(self, name: str) -> Optional[Stack]:
+ def get_stack(self, name: str) -> Stack | None:
"""Get a stack by name.
Args:
@@ -400,11 +390,7 @@ def lock_persistent_graph(self, lock_code: str) -> None:
try:
self.s3_client.put_object_tagging(
- Tagging={
- "TagSet": [
- {"Key": self._persistent_graph_lock_tag, "Value": lock_code}
- ]
- },
+ Tagging={"TagSet": [{"Key": self._persistent_graph_lock_tag, "Value": lock_code}]},
**self.persistent_graph_location,
)
self.logger.info(
@@ -445,9 +431,7 @@ def put_persistent_graph(self, lock_code: str) -> None:
)
if self.persistent_graph_lock_code != lock_code:
- raise PersistentGraphLockCodeMismatch(
- lock_code, self.persistent_graph_lock_code
- )
+ raise PersistentGraphLockCodeMismatch(lock_code, self.persistent_graph_lock_code)
self.s3_client.put_object(
Body=self.persistent_graph.dumps(4).encode(),
@@ -457,9 +441,7 @@ def put_persistent_graph(self, lock_code: str) -> None:
Tagging=f"{self._persistent_graph_lock_tag}={lock_code}",
**self.persistent_graph_location,
)
- self.logger.debug(
- "persistent graph updated:\n%s", self.persistent_graph.dumps(indent=4)
- )
+ self.logger.debug("persistent graph updated:\n%s", self.persistent_graph.dumps(indent=4))
def set_hook_data(self, key: str, data: Any) -> None:
"""Set hook data for the given key.
@@ -477,8 +459,7 @@ def set_hook_data(self, key: str, data: Any) -> None:
if key in self.hook_data:
raise KeyError(
- f"Hook data for key {key} already exists, each hook "
- "must have a unique data_key."
+ f"Hook data for key {key} already exists, each hook must have a unique data_key."
)
self.hook_data[key] = data
@@ -503,14 +484,10 @@ def unlock_persistent_graph(self, lock_code: str) -> bool:
**self.persistent_graph_location,
)
except self.s3_client.exceptions.NoSuchKey:
- self.logger.info(
- "persistent graph deleted; does not need to be unlocked"
- )
+ self.logger.info("persistent graph deleted; does not need to be unlocked")
return True
- self.logger.verbose(
- 'unlocking persistent graph "%s"...', self.persistent_graph_location
- )
+ self.logger.verbose('unlocking persistent graph "%s"...', self.persistent_graph_location)
if not self.persistent_graph_locked:
raise PersistentGraphCannotUnlock(
@@ -520,10 +497,8 @@ def unlock_persistent_graph(self, lock_code: str) -> bool:
)
if self.persistent_graph_lock_code == lock_code:
- try:
+ with contextlib.suppress(self.s3_client.exceptions.NoSuchKey):
self.s3_client.delete_object_tagging(**self.persistent_graph_location)
- except self.s3_client.exceptions.NoSuchKey:
- pass
self._persistent_graph_lock_code = None
self.logger.info(
'unlocked persistent graph "%s/%s"',
diff --git a/runway/context/_runway.py b/runway/context/_runway.py
index 426e4977b..8758527de 100644
--- a/runway/context/_runway.py
+++ b/runway/context/_runway.py
@@ -4,7 +4,7 @@
import logging
import sys
-from typing import TYPE_CHECKING, Any, Optional, Union, cast
+from typing import TYPE_CHECKING, Any, cast
from ..compat import cached_property
from ..core.components import DeployEnvironment
@@ -19,7 +19,7 @@
LOGGER = cast("RunwayLogger", logging.getLogger(__name__))
-def str2bool(v: str):
+def str2bool(v: str) -> bool:
"""Return boolean value of string."""
return v.lower() in ("yes", "true", "t", "1", "on", "y")
@@ -27,16 +27,16 @@ def str2bool(v: str):
class RunwayContext(BaseContext):
"""Runway context object."""
- command: Optional[RunwayActionTypeDef]
+ command: RunwayActionTypeDef | None
"""Runway command/action being run."""
def __init__(
self,
*,
- command: Optional[RunwayActionTypeDef] = None,
- deploy_environment: Optional[DeployEnvironment] = None,
- logger: Union[PrefixAdaptor, RunwayLogger] = LOGGER,
- work_dir: Optional[Path] = None,
+ command: RunwayActionTypeDef | None = None,
+ deploy_environment: DeployEnvironment | None = None,
+ logger: PrefixAdaptor | RunwayLogger = LOGGER,
+ work_dir: Path | None = None,
**_: Any,
) -> None:
"""Instantiate class.
diff --git a/runway/context/sys_info.py b/runway/context/sys_info.py
index a0c12d2b5..52dcc7999 100644
--- a/runway/context/sys_info.py
+++ b/runway/context/sys_info.py
@@ -5,15 +5,16 @@
import os
import platform
import sys
-from typing import Any, ClassVar, Optional, cast
+from typing import Any, ClassVar, cast, final
from ..compat import cached_property
+@final
class OsInfo:
"""Information about the operating system running on the current system."""
- __instance: ClassVar[Optional[OsInfo]] = None
+ __instance: ClassVar[OsInfo | None] = None
def __new__(cls, *args: Any, **kwargs: Any) -> OsInfo:
"""Create a new instance of class.
@@ -69,10 +70,11 @@ def clear_singleton(cls) -> None:
cls.__instance = None
+@final
class SystemInfo:
"""Information about the system running Runway."""
- __instance: ClassVar[Optional[SystemInfo]] = None
+ __instance: ClassVar[SystemInfo | None] = None
def __new__(cls, *args: Any, **kwargs: Any) -> SystemInfo:
"""Create a new instance of class.
@@ -87,9 +89,7 @@ def __new__(cls, *args: Any, **kwargs: Any) -> SystemInfo:
@cached_property
def is_frozen(self) -> bool:
"""Whether or not Runway is running from a frozen package (Pyinstaller)."""
- if getattr(sys, "frozen", False):
- return True
- return False
+ return bool(getattr(sys, "frozen", False))
@cached_property
def os(self) -> OsInfo:
diff --git a/runway/core/__init__.py b/runway/core/__init__.py
index 508b67976..e74eb7b99 100644
--- a/runway/core/__init__.py
+++ b/runway/core/__init__.py
@@ -5,7 +5,7 @@
import logging as _logging
import sys as _sys
import traceback as _traceback
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, cast
+from typing import TYPE_CHECKING, Any, cast
import yaml as _yaml
@@ -48,9 +48,7 @@ def __init__(self, config: RunwayConfig, context: RunwayContext) -> None:
self.__assert_config_version()
self.ctx.env.log_name()
- def deploy(
- self, deployments: Optional[List[RunwayDeploymentDefinition]] = None
- ) -> None:
+ def deploy(self, deployments: list[RunwayDeploymentDefinition] | None = None) -> None:
"""Deploy action.
Args:
@@ -58,13 +56,9 @@ def deploy(
all deployments in the config will be run.
"""
- self.__run_action(
- "deploy", deployments if deployments is not None else self.deployments
- )
+ self.__run_action("deploy", deployments if deployments is not None else self.deployments)
- def destroy(
- self, deployments: Optional[List[RunwayDeploymentDefinition]] = None
- ) -> None:
+ def destroy(self, deployments: list[RunwayDeploymentDefinition] | None = None) -> None:
"""Destroy action.
Args:
@@ -85,8 +79,8 @@ def destroy(
self.reverse_deployments(self.deployments)
def get_env_vars(
- self, deployments: Optional[List[RunwayDeploymentDefinition]] = None
- ) -> Dict[str, Any]:
+ self, deployments: list[RunwayDeploymentDefinition] | None = None
+ ) -> dict[str, Any]:
"""Get env_vars defined in the config.
Args:
@@ -97,7 +91,7 @@ def get_env_vars(
"""
deployments = deployments or self.deployments
- result: Dict[str, str] = {}
+ result: dict[str, str] = {}
for deployment in deployments:
obj = components.Deployment(
context=self.ctx, definition=deployment, variables=self.variables
@@ -105,9 +99,7 @@ def get_env_vars(
result.update(obj.env_vars_config)
return result
- def init(
- self, deployments: Optional[List[RunwayDeploymentDefinition]] = None
- ) -> None:
+ def init(self, deployments: list[RunwayDeploymentDefinition] | None = None) -> None:
"""Init action.
Args:
@@ -115,13 +107,9 @@ def init(
all deployments in the config will be run.
"""
- self.__run_action(
- "init", deployments if deployments is not None else self.deployments
- )
+ self.__run_action("init", deployments if deployments is not None else self.deployments)
- def plan(
- self, deployments: Optional[List[RunwayDeploymentDefinition]] = None
- ) -> None:
+ def plan(self, deployments: list[RunwayDeploymentDefinition] | None = None) -> None:
"""Plan action.
Args:
@@ -129,14 +117,12 @@ def plan(
all deployments in the config will be run.
"""
- self.__run_action(
- "plan", deployments if deployments is not None else self.deployments
- )
+ self.__run_action("plan", deployments if deployments is not None else self.deployments)
@staticmethod
def reverse_deployments(
- deployments: List[RunwayDeploymentDefinition],
- ) -> List[RunwayDeploymentDefinition]:
+ deployments: list[RunwayDeploymentDefinition],
+ ) -> list[RunwayDeploymentDefinition]:
"""Reverse deployments and the modules within them.
Args:
@@ -146,7 +132,7 @@ def reverse_deployments(
Deployments and modules in reverse order.
"""
- result: List[RunwayDeploymentDefinition] = []
+ result: list[RunwayDeploymentDefinition] = []
for deployment in deployments:
deployment.reverse()
result.insert(0, deployment)
@@ -180,7 +166,7 @@ def test(self) -> None:
_sys.exit(1)
self.ctx.command = "test"
- failed_tests: List[str] = []
+ failed_tests: list[str] = []
LOGGER.info("found %i test(s)", len(self.tests))
for tst in self.tests:
@@ -198,7 +184,7 @@ def test(self) -> None:
try:
handler.handle(tst.name, tst.args)
logger.success("running test (pass)")
- except (Exception, SystemExit) as err: # pylint: disable=broad-except
+ except (Exception, SystemExit) as err:
# for lack of an easy, better way to do this atm, assume
# SystemExits are due to a test failure and the failure reason
# has already been properly logged by the handler or the
@@ -217,7 +203,7 @@ def test(self) -> None:
_sys.exit(1)
LOGGER.success("all tests passed")
- def __assert_config_version(self):
+ def __assert_config_version(self) -> None:
"""Assert the config supports this version of Runway."""
if not self.required_version:
LOGGER.debug("required Runway version not specified")
@@ -245,7 +231,7 @@ def __assert_config_version(self):
def __run_action(
self,
action: type_defs.RunwayActionTypeDef,
- deployments: Optional[List[RunwayDeploymentDefinition]],
+ deployments: list[RunwayDeploymentDefinition] | None,
) -> None:
"""Run an action on a list of deployments.
diff --git a/runway/core/components/_deploy_environment.py b/runway/core/components/_deploy_environment.py
index 261d4a071..0097de7e7 100644
--- a/runway/core/components/_deploy_environment.py
+++ b/runway/core/components/_deploy_environment.py
@@ -7,7 +7,7 @@
import os
import sys
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Dict, Optional, cast
+from typing import TYPE_CHECKING, Any, Optional, cast
import click
@@ -20,7 +20,7 @@
import git
from git.exc import InvalidGitRepositoryError
except ImportError: # cov: ignore
- git = object # pylint: disable=invalid-name
+ git = object
InvalidGitRepositoryError = AttributeError
if TYPE_CHECKING:
@@ -41,7 +41,7 @@ class DeployEnvironment(DelCachedPropMixin):
def __init__(
self,
*,
- environ: Optional[Dict[str, str]] = None,
+ environ: Optional[dict[str, str]] = None,
explicit_name: Optional[str] = None,
ignore_git_branch: bool = False,
root_dir: Optional[Path] = None,
@@ -82,9 +82,7 @@ def aws_profile(self, profile_name: str) -> None:
@property
def aws_region(self) -> str:
"""Get AWS region from environment variables."""
- return self.vars.get(
- "AWS_REGION", self.vars.get("AWS_DEFAULT_REGION", "us-east-1")
- )
+ return self.vars.get("AWS_REGION", self.vars.get("AWS_DEFAULT_REGION", "us-east-1"))
@aws_region.setter
def aws_region(self, region: str) -> None:
@@ -261,9 +259,7 @@ def name(self) -> str:
else:
self.name_derived_from = "directory"
if self.root_dir.name.startswith("ENV-"):
- LOGGER.verbose(
- 'stripped "ENV-" from the directory name "%s"', self.root_dir.name
- )
+ LOGGER.verbose('stripped "ENV-" from the directory name "%s"', self.root_dir.name)
name = self.root_dir.name[4:]
else:
name = self.root_dir.name
@@ -307,9 +303,7 @@ def log_name(self) -> None:
"""Output name to log."""
name = self.name # resolve if not already resolved
if self.name_derived_from == "explicit":
- LOGGER.info(
- 'deploy environment "%s" is explicitly defined in the environment', name
- )
+ LOGGER.info('deploy environment "%s" is explicitly defined in the environment', name)
LOGGER.info(
"if not correct, update the value or unset it to fall back "
"to the name of the current git branch or parent directory"
@@ -337,9 +331,7 @@ def _parse_branch_name(self) -> Optional[str]:
"""Parse branch name for use as deploy environment name."""
if self.branch_name:
if self.branch_name.startswith("ENV-"):
- LOGGER.verbose(
- 'stripped "ENV-" from the branch name "%s"', self.branch_name
- )
+ LOGGER.verbose('stripped "ENV-" from the branch name "%s"', self.branch_name)
return self.branch_name[4:]
if self.branch_name == "master":
LOGGER.verbose('translated branch name "master" to "common"')
@@ -354,11 +346,11 @@ def _parse_branch_name(self) -> Optional[str]:
return result
return self.branch_name
- def _update_vars(self, env_vars: Dict[str, str]) -> None:
+ def _update_vars(self, env_vars: dict[str, str]) -> None:
"""Update vars and log the change.
Args:
- env_vars (Dict[str, str]): Dict to update self.vars with.
+ env_vars: Dict to update self.vars with.
"""
self.vars.update(env_vars)
diff --git a/runway/core/components/_deployment.py b/runway/core/components/_deployment.py
index 882e402a9..a073dff3e 100644
--- a/runway/core/components/_deployment.py
+++ b/runway/core/components/_deployment.py
@@ -6,7 +6,7 @@
import logging
import multiprocessing
import sys
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
+from typing import TYPE_CHECKING, Any, Optional, Union
from ..._logging import PrefixAdaptor
from ...compat import cached_property
@@ -57,13 +57,11 @@ def __init__(
self.__merge_env_vars()
@property
- def assume_role_config(self) -> Dict[str, Union[bool, int, str]]:
+ def assume_role_config(self) -> dict[str, Union[bool, int, str]]:
"""Parse the definition to get assume role arguments."""
assume_role = self.definition.assume_role
if not assume_role:
- self.logger.debug(
- "assume_role not configured for deployment: %s", self.name
- )
+ self.logger.debug("assume_role not configured for deployment: %s", self.name)
return {}
if isinstance(assume_role, str): # type: ignore
self.logger.debug("role found: %s", assume_role)
@@ -71,9 +69,7 @@ def assume_role_config(self) -> Dict[str, Union[bool, int, str]]:
elif isinstance(assume_role, dict): # type: ignore
assume_role = RunwayAssumeRoleDefinitionModel.parse_obj(assume_role)
if not assume_role.arn:
- self.logger.debug(
- "assume_role not configured for deployment: %s", self.name
- )
+ self.logger.debug("assume_role not configured for deployment: %s", self.name)
return {}
return {
"duration_seconds": assume_role.duration,
@@ -83,23 +79,22 @@ def assume_role_config(self) -> Dict[str, Union[bool, int, str]]:
}
@property
- def env_vars_config(self) -> Dict[str, str]:
+ def env_vars_config(self) -> dict[str, str]:
"""Parse the definition to get the correct env_vars configuration."""
try:
if not self.definition.env_vars:
return {}
except UnresolvedVariable:
- # pylint: disable=protected-access
- if "env_vars" in self.definition._vars:
- var = self.definition._vars["env_vars"]
+ if "env_vars" in self.definition._vars: # noqa: SLF001
+ var = self.definition._vars["env_vars"] # noqa: SLF001
var.resolve(self.ctx, variables=self._variables)
- self.definition._data["env_vars"] = var.value
+ self.definition._data["env_vars"] = var.value # noqa: SLF001
else:
raise
return flatten_path_lists(self.definition.env_vars, str(self.ctx.env.root_dir))
@cached_property
- def regions(self) -> List[str]:
+ def regions(self) -> list[str]:
"""List of regions this deployment is associated with."""
return self.definition.parallel_regions or self.definition.regions
@@ -114,9 +109,7 @@ def deploy(self) -> None:
High level method for running a deployment.
"""
- self.logger.verbose(
- "attempting to deploy to region(s): %s", ", ".join(self.regions)
- )
+ self.logger.verbose("attempting to deploy to region(s): %s", ", ".join(self.regions))
if self.use_async:
return self.__async("deploy")
return self.__sync("deploy")
@@ -127,9 +120,7 @@ def destroy(self) -> None:
High level method for running a deployment.
"""
- self.logger.verbose(
- "attempting to destroy in region(s): %s", ", ".join(self.regions)
- )
+ self.logger.verbose("attempting to destroy in region(s): %s", ", ".join(self.regions))
if self.use_async:
return self.__async("destroy")
return self.__sync("destroy")
@@ -140,9 +131,7 @@ def init(self) -> None:
High level method for running a deployment.
"""
- self.logger.verbose(
- "attempting to initialize region(s): %s", ", ".join(self.regions)
- )
+ self.logger.verbose("attempting to initialize region(s): %s", ", ".join(self.regions))
if self.use_async:
return self.__async("init")
return self.__sync("init")
@@ -189,9 +178,7 @@ def run(self, action: RunwayActionTypeDef, region: str) -> None:
variables=self._variables,
)
- def validate_account_credentials(
- self, context: Optional[RunwayContext] = None
- ) -> None:
+ def validate_account_credentials(self, context: Optional[RunwayContext] = None) -> None:
"""Exit if requested deployment account doesn't match credentials.
Args:
@@ -213,7 +200,7 @@ def validate_account_credentials(
)
sys.exit(1)
self.logger.info(
- "verified current AWS account matches required " + 'account id "%s"',
+ 'verified current AWS account matches required account id "%s"',
self.definition.account_id,
)
if self.definition.account_alias:
@@ -236,9 +223,7 @@ def __merge_env_vars(self) -> None:
self.logger.verbose(
"environment variable overrides are being applied to this deployment"
)
- self.logger.debug(
- "environment variable overrides: %s", self.env_vars_config
- )
+ self.logger.debug("environment variable overrides: %s", self.env_vars_config)
self.ctx.env.vars = merge_dicts(self.ctx.env.vars, self.env_vars_config)
def __async(self, action: RunwayActionTypeDef) -> None:
@@ -248,16 +233,12 @@ def __async(self, action: RunwayActionTypeDef) -> None:
action: Name of action to run.
"""
- self.logger.info(
- "processing regions in parallel... (output will be interwoven)"
- )
+ self.logger.info("processing regions in parallel... (output will be interwoven)")
with concurrent.futures.ProcessPoolExecutor(
max_workers=self.ctx.env.max_concurrent_regions,
mp_context=multiprocessing.get_context("fork"),
) as executor:
- futures = [
- executor.submit(self.run, *[action, region]) for region in self.regions
- ]
+ futures = [executor.submit(self.run, *[action, region]) for region in self.regions]
for job in futures:
job.result() # raise exceptions / exit as needed
@@ -278,7 +259,7 @@ def run_list(
cls,
action: RunwayActionTypeDef,
context: RunwayContext,
- deployments: List[RunwayDeploymentDefinition],
+ deployments: list[RunwayDeploymentDefinition],
future: RunwayFutureDefinitionModel,
variables: RunwayVariablesDefinition,
) -> None:
diff --git a/runway/core/components/_module.py b/runway/core/components/_module.py
index bc0e6cedc..73c2d8fe6 100644
--- a/runway/core/components/_module.py
+++ b/runway/core/components/_module.py
@@ -7,7 +7,7 @@
import logging
import multiprocessing
import sys
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast
+from typing import TYPE_CHECKING, Any, Optional, Union, cast
import yaml
@@ -66,9 +66,7 @@ def __init__(
"""
self.__deployment = deployment
self.__future = future or RunwayFutureDefinitionModel()
- self.__variables = variables or RunwayVariablesDefinition(
- RunwayVariablesDefinitionModel()
- )
+ self.__variables = variables or RunwayVariablesDefinition(RunwayVariablesDefinitionModel())
self.ctx = context.copy() # each module has it's own instance of context
definition.resolve(self.ctx, variables=variables)
self.definition = definition
@@ -76,7 +74,7 @@ def __init__(
self.logger = PrefixAdaptor(self.fqn, LOGGER)
@cached_property
- def child_modules(self) -> List[Module]:
+ def child_modules(self) -> list[Module]:
"""Return child modules."""
return [
self.__class__(
@@ -109,14 +107,14 @@ def environments(self) -> RunwayEnvironmentsType:
return tmp
@cached_property
- def fqn(self):
+ def fqn(self) -> str:
"""Fully qualified name."""
if not self.__deployment:
return self.name
return f"{self.__deployment.name}.{self.name}"
@cached_property
- def opts_from_file(self) -> Dict[str, Any]:
+ def opts_from_file(self) -> dict[str, Any]:
"""Load module options from local file."""
opts_file = self.path.module_root / "runway.module.yml"
if opts_file.is_file():
@@ -134,9 +132,9 @@ def path(self) -> ModulePath: # lazy load the path
)
@cached_property
- def payload(self) -> Dict[str, Any]: # lazy load the payload
+ def payload(self) -> dict[str, Any]: # lazy load the payload
"""Return payload to be passed to module class handler class."""
- payload: Dict[str, Any] = {}
+ payload: dict[str, Any] = {}
if self.__deployment:
payload.update(
{
@@ -216,9 +214,7 @@ def plan(self) -> None:
if not self.child_modules:
return self.run("plan")
if self.use_async:
- self.logger.info(
- "processing of modules will be done in parallel during deploy/destroy"
- )
+ self.logger.info("processing of modules will be done in parallel during deploy/destroy")
return self.__sync("plan")
def run(self, action: RunwayActionTypeDef) -> None:
@@ -231,9 +227,7 @@ def run(self, action: RunwayActionTypeDef) -> None:
"""
LOGGER.info("")
- self.logger.notice(
- "processing module in %s (in progress)", self.ctx.env.aws_region
- )
+ self.logger.notice("processing module in %s (in progress)", self.ctx.env.aws_region)
self.logger.verbose("module payload: %s", json.dumps(self.payload))
if self.should_skip:
return
@@ -248,9 +242,7 @@ def run(self, action: RunwayActionTypeDef) -> None:
else:
self.logger.error('"%s" is missing method "%s"', inst, action)
sys.exit(1)
- self.logger.success(
- "processing module in %s (complete)", self.ctx.env.aws_region
- )
+ self.logger.success("processing module in %s (complete)", self.ctx.env.aws_region)
def __async(self, action: RunwayActionTypeDef) -> None:
"""Execute asynchronously.
@@ -259,9 +251,7 @@ def __async(self, action: RunwayActionTypeDef) -> None:
action: Name of action to run.
"""
- self.logger.info(
- "processing modules in parallel... (output will be interwoven)"
- )
+ self.logger.info("processing modules in parallel... (output will be interwoven)")
# Can't use threading or ThreadPoolExecutor here because
# we need to be able to do things like `cd` which is not
# thread safe.
@@ -269,9 +259,7 @@ def __async(self, action: RunwayActionTypeDef) -> None:
max_workers=self.ctx.env.max_concurrent_modules,
mp_context=multiprocessing.get_context("fork"),
) as executor:
- futures = [
- executor.submit(child.run, *[action]) for child in self.child_modules
- ]
+ futures = [executor.submit(child.run, *[action]) for child in self.child_modules]
for job in futures:
job.result() # raise exceptions / exit as needed
@@ -294,9 +282,7 @@ def __merge_env_vars(self, env_vars: RunwayEnvVarsType) -> None:
self.logger.verbose(
"environment variable overrides are being applied to this module"
)
- self.logger.debug(
- "environment variable overrides: %s", resolved_env_vars
- )
+ self.logger.debug("environment variable overrides: %s", resolved_env_vars)
self.ctx.env.vars = merge_dicts(self.ctx.env.vars, resolved_env_vars)
@classmethod
@@ -304,7 +290,7 @@ def run_list(
cls,
action: RunwayActionTypeDef,
context: RunwayContext,
- modules: List[RunwayModuleDefinition],
+ modules: list[RunwayModuleDefinition],
variables: RunwayVariablesDefinition,
deployment: RunwayDeploymentDefinition = None,
future: Optional[RunwayFutureDefinitionModel] = None,
@@ -341,7 +327,7 @@ def __getitem__(self, key: str) -> Any:
def validate_environment(
context: RunwayContext,
- env_def: Optional[Union[bool, Dict[str, Any], int, str, List[str]]],
+ env_def: Optional[Union[bool, dict[str, Any], int, str, list[str]]],
logger: Union[PrefixAdaptor, RunwayLogger] = LOGGER,
) -> Optional[bool]:
"""Check if an environment should be deployed to.
diff --git a/runway/core/components/_module_path.py b/runway/core/components/_module_path.py
index 29b8d16b2..4eff846a1 100644
--- a/runway/core/components/_module_path.py
+++ b/runway/core/components/_module_path.py
@@ -5,7 +5,7 @@
import logging
import re
from pathlib import Path
-from typing import TYPE_CHECKING, ClassVar, Dict, Optional, Type, Union
+from typing import TYPE_CHECKING, ClassVar
from urllib.parse import parse_qs
from typing_extensions import TypedDict
@@ -25,7 +25,7 @@
class ModulePathMetadataTypeDef(TypedDict):
"""Type definition for ModulePath.metadata."""
- arguments: Dict[str, str]
+ arguments: dict[str, str]
cache_dir: Path
location: str
source: str
@@ -36,16 +36,16 @@ class ModulePath:
"""Handler for the ``path`` field of a Runway module."""
ARGS_REGEX: ClassVar[str] = r"(\?)(?P.*)$"
- REMOTE_SOURCE_HANDLERS: ClassVar[Dict[str, Type[Source]]] = {"git": Git}
+ REMOTE_SOURCE_HANDLERS: ClassVar[dict[str, type[Source]]] = {"git": Git}
SOURCE_REGEX: ClassVar[str] = r"(?P