diff --git a/.devcontainer/mysql/devcontainer.json b/.devcontainer/mysql/devcontainer.json index 5a25b6ad50625..011ff292d2718 100644 --- a/.devcontainer/mysql/devcontainer.json +++ b/.devcontainer/mysql/devcontainer.json @@ -22,5 +22,10 @@ "rogalmic.bash-debug" ], "service": "airflow", - "forwardPorts": [8080,5555,5432,6379] + "forwardPorts": [8080,5555,5432,6379], + "workspaceFolder": "/opt/airflow", + // for users who use non-standard git config patterns + // https://github.com/microsoft/vscode-remote-release/issues/2084#issuecomment-989756268 + "initializeCommand": "cd \"${localWorkspaceFolder}\" && git config --local user.email \"$(git config user.email)\" && git config --local user.name \"$(git config user.name)\"", + "overrideCommand": true } diff --git a/.devcontainer/postgres/devcontainer.json b/.devcontainer/postgres/devcontainer.json index 46ba305b58554..419dbedfa1d38 100644 --- a/.devcontainer/postgres/devcontainer.json +++ b/.devcontainer/postgres/devcontainer.json @@ -22,5 +22,10 @@ "rogalmic.bash-debug" ], "service": "airflow", - "forwardPorts": [8080,5555,5432,6379] + "forwardPorts": [8080,5555,5432,6379], + "workspaceFolder": "/opt/airflow", + // for users who use non-standard git config patterns + // https://github.com/microsoft/vscode-remote-release/issues/2084#issuecomment-989756268 + "initializeCommand": "cd \"${localWorkspaceFolder}\" && git config --local user.email \"$(git config user.email)\" && git config --local user.name \"$(git config user.name)\"", + "overrideCommand": true } diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index 0a3fc240fc8d9..71128dafa4d22 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -225,6 +225,11 @@ labelPRBasedOnFilePath: - providers/tests/docker/**/* - providers/tests/system/docker/**/* + provider:edge: + - providers/src/airflow/providers/edge/**/* + - docs/apache-airflow-providers-edge/**/* + - providers/tests/edge/**/* + provider:elasticsearch: - providers/src/airflow/providers/elasticsearch/**/* - docs/apache-airflow-providers-elasticsearch/**/* @@ -647,12 +652,12 @@ labelPRBasedOnFilePath: - airflow/cli/commands/triggerer_command.py - airflow/jobs/triggerer_job_runner.py - airflow/models/trigger.py - - airflow/triggers/**/* + - providers/src/airflow/providers/standard/triggers/**/* - tests/cli/commands/test_triggerer_command.py - tests/jobs/test_triggerer_job.py - tests/models/test_trigger.py - tests/jobs/test_triggerer_job_logging.py - - tests/triggers/**/* + - providers/tests/standard/triggers/**/* area:Serialization: - airflow/serialization/**/* diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml index 14f0628e454bb..4276573b95fe2 100644 --- a/.github/workflows/basic-tests.yml +++ b/.github/workflows/basic-tests.yml @@ -153,108 +153,6 @@ jobs: env: FORCE_COLOR: 2 - test-openapi-client: - timeout-minutes: 10 - name: "Test OpenAPI client" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} - if: inputs.needs-api-codegen == 'true' - steps: - - name: "Cleanup repo" - shell: bash - run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - fetch-depth: 2 - persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - repository: "apache/airflow-client-python" - fetch-depth: 1 - persist-credentials: false - path: ./airflow-client-python - - name: "Install Breeze" - uses: ./.github/actions/breeze - - name: "Generate client with breeze" - run: > - breeze release-management prepare-python-client --package-format both - --version-suffix-for-pypi dev0 --python-client-repo ./airflow-client-python - - name: "Show diff" - run: git diff --color HEAD - working-directory: ./airflow-client-python - - name: Install hatch - run: | - python -m pip install --upgrade uv - uv tool install hatch - - name: Run tests - run: hatch run run-coverage - env: - HATCH_ENV: "test" - working-directory: ./clients/python - - name: "Install source version of required packages" - run: | - breeze release-management prepare-provider-packages \ - fab \ - standard \ - common.sql \ - sqlite \ - --package-format wheel \ - --skip-tag-check \ - --version-suffix-for-pypi dev0 - pip install . \ - dist/apache_airflow_providers_fab-*.whl \ - dist/apache_airflow_providers_standard-*.whl \ - dist/apache_airflow_providers_common_sql-*.whl \ - dist/apache_airflow_providers_sqlite-*.whl - breeze release-management prepare-task-sdk-package --package-format wheel - pip install ./dist/apache_airflow_task_sdk-*.whl - - name: "Install Python client" - run: pip install ./dist/apache_airflow_client-*.whl - - name: "Initialize Airflow DB and start webserver" - run: | - airflow db init - # Let scheduler runs a few loops and get all DAG files from example DAGs serialized to DB - airflow scheduler --num-runs 100 - airflow users create --username admin --password admin --firstname Admin --lastname Admin \ - --role Admin --email admin@example.org - killall python || true # just in case there is a webserver running in the background - nohup airflow webserver --port 8080 & - echo "Started webserver" - env: - AIRFLOW__API__AUTH_BACKENDS: >- - airflow.api.auth.backend.session,airflow.providers.fab.auth_manager.api.auth.backend.basic_auth - AIRFLOW__WEBSERVER__EXPOSE_CONFIG: "True" - AIRFLOW__CORE__LOAD_EXAMPLES: "True" - AIRFLOW_HOME: "${{ github.workspace }}/airflow_home" - - name: "Waiting for the webserver to be available" - run: | - timeout 30 bash -c 'until nc -z $0 $1; do echo "sleeping"; sleep 1; done' localhost 8080 - sleep 5 - - name: "Run test python client" - run: python ./clients/python/test_python_client.py - env: - FORCE_COLOR: "standard" - - name: "Stop running webserver" - run: killall python || true # just in case there is a webserver running in the background - if: always() - - name: "Upload python client packages" - uses: actions/upload-artifact@v4 - with: - name: python-client-packages - path: ./dist/apache_airflow_client-* - retention-days: 7 - if-no-files-found: error - - name: "Upload logs from failed tests" - uses: actions/upload-artifact@v4 - if: failure() - with: - name: python-client-failed-logs - path: "${{ github.workspace }}/airflow_home/logs" - retention-days: 7 - # Those checks are run if no image needs to be built for checks. This is for simple changes that # Do not touch any of the python code or any of the important files that might require building # The CI Docker image and they can be run entirely using the pre-commit virtual environments on host diff --git a/.github/workflows/static-checks-mypy-docs.yml b/.github/workflows/ci-image-checks.yml similarity index 83% rename from .github/workflows/static-checks-mypy-docs.yml rename to .github/workflows/ci-image-checks.yml index be2c4f8e28645..63598755c32d0 100644 --- a/.github/workflows/static-checks-mypy-docs.yml +++ b/.github/workflows/ci-image-checks.yml @@ -16,7 +16,7 @@ # under the License. # --- -name: Static checks, mypy, docs +name: CI Image Checks on: # yamllint disable-line rule:truthy workflow_call: inputs: @@ -96,6 +96,19 @@ on: # yamllint disable-line rule:truthy description: "Whether to build docs (true/false)" required: true type: string + needs-api-codegen: + description: "Whether to run API codegen (true/false)" + required: true + type: string + default-postgres-version: + description: "The default version of the postgres to use" + required: true + type: string + run-coverage: + description: "Whether to run coverage or not (true/false)" + required: true + type: string + jobs: static-checks: timeout-minutes: 45 @@ -304,3 +317,50 @@ jobs: - name: "Upload documentation to AWS S3" if: inputs.branch == 'main' run: aws s3 sync --delete ./docs/_build s3://apache-airflow-docs + + test-python-api-client: + timeout-minutes: 60 + name: "Test Python API client" + runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + if: inputs.needs-api-codegen == 'true' + env: + BACKEND: "postgres" + BACKEND_VERSION: "${{ inputs.default-postgres-version }}" + DEBUG_RESOURCES: "${{ inputs.debug-resources }}" + ENABLE_COVERAGE: "${{ inputs.run-coverage }}" + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + IMAGE_TAG: "${{ inputs.image-tag }}" + JOB_ID: "python-api-client-tests" + PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" + VERBOSE: "true" + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v4 + with: + fetch-depth: 2 + persist-credentials: false + - name: "Cleanup docker" + run: ./scripts/ci/cleanup_docker.sh + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v4 + with: + repository: "apache/airflow-client-python" + fetch-depth: 1 + persist-credentials: false + path: ./airflow-client-python + - name: "Prepare breeze & CI image: ${{inputs.default-python-version}}:${{inputs.image-tag}}" + uses: ./.github/actions/prepare_breeze_and_image + - name: "Generate airflow python client" + run: > + breeze release-management prepare-python-client --package-format both + --version-suffix-for-pypi dev0 --python-client-repo ./airflow-client-python + - name: "Show diff" + run: git diff --color HEAD + working-directory: ./airflow-client-python + - name: "Python API client tests" + run: breeze testing python-api-client-tests diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a1f46d74024c2..e0e1ffd8bf20b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -109,7 +109,8 @@ jobs: postgres-exclude: ${{ steps.selective-checks.outputs.postgres-exclude }} postgres-versions: ${{ steps.selective-checks.outputs.postgres-versions }} prod-image-build: ${{ steps.selective-checks.outputs.prod-image-build }} - providers-compatibility-checks: ${{ steps.selective-checks.outputs.providers-compatibility-checks }} + # yamllint disable rule:line-length + providers-compatibility-tests-matrix: ${{ steps.selective-checks.outputs.providers-compatibility-tests-matrix }} providers-test-types-list-as-string: >- ${{ steps.selective-checks.outputs.providers-test-types-list-as-string }} pull-request-labels: ${{ steps.source-run-info.outputs.pr-labels }} @@ -296,10 +297,10 @@ jobs: chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} - static-checks-mypy-docs: - name: "Static checks, mypy, docs" + ci-image-checks: + name: "CI image checks" needs: [build-info, wait-for-ci-images] - uses: ./.github/workflows/static-checks-mypy-docs.yml + uses: ./.github/workflows/ci-image-checks.yml secrets: inherit with: runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} @@ -321,10 +322,13 @@ jobs: include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} docs-build: ${{ needs.build-info.outputs.docs-build }} + needs-api-codegen: ${{ needs.build-info.outputs.needs-api-codegen }} + default-postgres-version: ${{ needs.build-info.outputs.default-postgres-version }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} providers: - name: "Provider checks" - uses: ./.github/workflows/check-providers.yml + name: "Provider packages tests" + uses: ./.github/workflows/test-provider-packages.yml needs: [build-info, wait-for-ci-images] permissions: contents: read @@ -340,7 +344,8 @@ jobs: default-python-version: ${{ needs.build-info.outputs.default-python-version }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} selected-providers-list-as-string: ${{ needs.build-info.outputs.selected-providers-list-as-string }} - providers-compatibility-checks: ${{ needs.build-info.outputs.providers-compatibility-checks }} + # yamllint disable rule:line-length + providers-compatibility-tests-matrix: ${{ needs.build-info.outputs.providers-compatibility-tests-matrix }} skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} python-versions: ${{ needs.build-info.outputs.python-versions }} providers-test-types-list-as-string: ${{ needs.build-info.outputs.providers-test-types-list-as-string }} @@ -700,7 +705,7 @@ jobs: - generate-constraints - wait-for-ci-images - wait-for-prod-images - - static-checks-mypy-docs + - ci-image-checks - tests-sqlite - tests-mysql - tests-postgres diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index ec608192a7079..2c6bf6dc4448f 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -19,6 +19,8 @@ name: "CodeQL" on: # yamllint disable-line rule:truthy + pull_request: + branches: ['main', 'v[0-9]+-[0-9]+-test', 'v[0-9]+-[0-9]+-stable'] push: branches: [main] schedule: diff --git a/.github/workflows/news-fragment.yml b/.github/workflows/news-fragment.yml index bf1bd6ce27b2b..4bcf95e2bba88 100644 --- a/.github/workflows/news-fragment.yml +++ b/.github/workflows/news-fragment.yml @@ -36,7 +36,7 @@ jobs: # needs a non-shallow clone. fetch-depth: 0 - - name: Check news fragment + - name: Check news fragment existence run: > python -m pip install --upgrade uv && uv tool run towncrier check @@ -52,3 +52,26 @@ jobs: && false ; } + + - name: Check news fragment contains change types + run: > + change_types=( + 'DAG changes' + 'Config changes' + 'API changes' + 'CLI changes' + 'Behaviour changes' + 'Plugin changes' + 'Dependency change' + ) + news_fragment_content=`git diff origin/${{ github.base_ref }} newsfragments/*.significant.rst` + + for type in "${change_types[@]}"; do + if [[ $news_fragment_content != *"$type"* ]]; then + printf "\033[1;33mMissing change type '$type' in significant newsfragment for PR labeled with + 'airflow3.0:breaking'.\nCheck + https://github.com/apache/airflow/blob/main/contributing-docs/16_contribution_workflow.rst + for guidance.\033[m\n" + exit 1 + fi + done diff --git a/.github/workflows/special-tests.yml b/.github/workflows/special-tests.yml index 9ff5afeeaf121..1cf1df579a49a 100644 --- a/.github/workflows/special-tests.yml +++ b/.github/workflows/special-tests.yml @@ -76,6 +76,7 @@ on: # yamllint disable-line rule:truthy description: "Whether to debug resources or not (true/false)" required: true type: string + jobs: tests-min-sqlalchemy: name: "Min SQLAlchemy test" diff --git a/.github/workflows/check-providers.yml b/.github/workflows/test-provider-packages.yml similarity index 68% rename from .github/workflows/check-providers.yml rename to .github/workflows/test-provider-packages.yml index a0bf2d316f82a..08715af6b58ba 100644 --- a/.github/workflows/check-providers.yml +++ b/.github/workflows/test-provider-packages.yml @@ -44,9 +44,9 @@ on: # yamllint disable-line rule:truthy description: "List of affected providers as string" required: false type: string - providers-compatibility-checks: + providers-compatibility-tests-matrix: description: > - JSON-formatted array of providers compatibility checks in the form of array of dicts + JSON-formatted array of providers compatibility tests in the form of array of dicts (airflow-version, python-versions, remove-providers, run-tests) required: true type: string @@ -63,10 +63,14 @@ on: # yamllint disable-line rule:truthy required: true type: string jobs: - prepare-install-verify-provider-packages-wheel: + prepare-install-verify-provider-packages: timeout-minutes: 80 - name: "Provider packages wheel build and verify" + name: "Providers ${{ matrix.package-format }} tests" runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + strategy: + fail-fast: false + matrix: + package-format: ["wheel", "sdist"] env: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -94,120 +98,65 @@ jobs: run: > breeze release-management prepare-provider-documentation --include-not-ready-providers --non-interactive - - name: "Prepare provider packages: wheel" + if: matrix.package-format == 'wheel' + - name: "Prepare provider packages: ${{ matrix.package-format }}" run: > breeze release-management prepare-provider-packages --include-not-ready-providers - --version-suffix-for-pypi dev0 --package-format wheel - - name: "Prepare airflow package: wheel" - run: breeze release-management prepare-airflow-package --version-suffix-for-pypi dev0 - - name: "Verify wheel packages with twine" + --version-suffix-for-pypi dev0 --package-format ${{ matrix.package-format }} + - name: "Prepare airflow package: ${{ matrix.package-format }}" + run: > + breeze release-management prepare-airflow-package --version-suffix-for-pypi dev0 + --package-format ${{ matrix.package-format }} + - name: "Verify ${{ matrix.package-format }} packages with twine" run: | uv tool uninstall twine || true - uv tool install twine && twine check dist/*.whl + uv tool install twine && twine check dist/* - name: "Test providers issue generation automatically" run: > breeze release-management generate-issue-content-providers --only-available-in-dist --disable-progress + if: matrix.package-format == 'wheel' - name: Remove Python 3.9-incompatible provider packages run: | echo "Removing Python 3.9-incompatible provider: cloudant" - rm -vf dist/apache_airflow_providers_cloudant* + rm -vf dist/*cloudant* - name: "Generate source constraints from CI image" shell: bash run: > breeze release-management generate-constraints --airflow-constraints-mode constraints-source-providers --answer yes - - name: "Install and verify all provider packages and airflow via wheel files" + - name: "Install and verify wheel provider packages" run: > breeze release-management verify-provider-packages --use-packages-from-dist - --package-format wheel - --use-airflow-version wheel + --package-format ${{ matrix.package-format }} + --use-airflow-version ${{ matrix.package-format }} --airflow-constraints-reference default --providers-constraints-location /files/constraints-${{env.PYTHON_MAJOR_MINOR_VERSION}}/constraints-source-providers-${{env.PYTHON_MAJOR_MINOR_VERSION}}.txt env: AIRFLOW_SKIP_CONSTRAINTS: "${{ inputs.upgrade-to-newer-dependencies }}" - - name: "Prepare airflow package: wheel without suffix and skipping the tag check" - run: > - breeze release-management prepare-provider-packages --skip-tag-check --package-format wheel - - prepare-install-provider-packages-sdist: - timeout-minutes: 80 - name: "Provider packages sdist build and install" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} - env: - GITHUB_REPOSITORY: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ inputs.image-tag }}" - INCLUDE_NOT_READY_PROVIDERS: "true" - PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" - VERBOSE: "true" - steps: - - name: "Cleanup repo" - shell: bash - run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: > - Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }} - uses: ./.github/actions/prepare_breeze_and_image - - name: "Cleanup dist files" - run: rm -fv ./dist/* - - name: "Prepare provider packages: sdist" - run: > - breeze release-management prepare-provider-packages --include-not-ready-providers - --version-suffix-for-pypi dev0 --package-format sdist - ${{ inputs.selected-providers-list-as-string }} - - name: "Prepare airflow package: sdist" - run: > - breeze release-management prepare-airflow-package - --version-suffix-for-pypi dev0 --package-format sdist - - name: "Verify sdist packages with twine" - run: | - uv tool uninstall twine || true - uv tool install twine && twine check dist/*.tar.gz - - name: "Generate source constraints from CI image" - shell: bash - run: > - breeze release-management generate-constraints - --airflow-constraints-mode constraints-source-providers --answer yes - - name: "Install all provider packages and airflow via sdist files" - run: > - breeze release-management install-provider-packages - --use-packages-from-dist - --package-format sdist - --use-airflow-version sdist - --airflow-constraints-reference default - --providers-constraints-location - /files/constraints-${{env.PYTHON_MAJOR_MINOR_VERSION}}/constraints-source-providers-${{env.PYTHON_MAJOR_MINOR_VERSION}}.txt - --run-in-parallel - if: inputs.selected-providers-list-as-string == '' - - name: "Install affected provider packages and airflow via sdist files" + if: matrix.package-format == 'wheel' + - name: "Install all sdist provider packages and airflow" run: > breeze release-management install-provider-packages --use-packages-from-dist - --package-format sdist - --use-airflow-version sdist + --package-format ${{ matrix.package-format }} + --use-airflow-version ${{ matrix.package-format }} --airflow-constraints-reference default --providers-constraints-location /files/constraints-${{env.PYTHON_MAJOR_MINOR_VERSION}}/constraints-source-providers-${{env.PYTHON_MAJOR_MINOR_VERSION}}.txt --run-in-parallel - if: inputs.selected-providers-list-as-string != '' + if: matrix.package-format == 'sdist' - providers-compatibility-checks: + providers-compatibility-tests-matrix: timeout-minutes: 80 - name: Compat ${{ matrix.airflow-version }}:P${{ matrix.python-version }} provider check + name: Compat ${{ matrix.airflow-version }}:P${{ matrix.python-version }} providers test runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} strategy: fail-fast: false matrix: - include: ${{fromJSON(inputs.providers-compatibility-checks)}} + include: ${{fromJSON(inputs.providers-compatibility-tests-matrix)}} env: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d072d21055cff..326ba2b1cb8f8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -360,7 +360,7 @@ repos: types_or: [python, pyi] args: [--fix] require_serial: true - additional_dependencies: ["ruff==0.7.3"] + additional_dependencies: ["ruff==0.8.0"] exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py|^performance/tests/test_.*.py - id: ruff-format name: Run 'ruff format' @@ -370,7 +370,7 @@ repos: types_or: [python, pyi] args: [] require_serial: true - additional_dependencies: ["ruff==0.7.3"] + additional_dependencies: ["ruff==0.8.0"] exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py$ - id: replace-bad-characters name: Replace bad characters @@ -699,7 +699,7 @@ repos: ^airflow/hooks/.*$| ^airflow/operators/.*$| ^providers/src/airflow/providers/.*$| - ^airflow/sensors/.*$| + ^providers/src/airflow/providers/standard/sensors/.*$| ^dev/provider_packages/.*$ - id: check-base-operator-usage language: pygrep @@ -714,7 +714,7 @@ repos: ^airflow/hooks/.*$| ^airflow/operators/.*$| ^providers/src/airflow/providers/.*$| - ^airflow/sensors/.*$| + ^providers/src/airflow/providers/standard/sensors/.*$| ^dev/provider_packages/.*$ - id: check-base-operator-usage language: pygrep @@ -725,7 +725,7 @@ repos: files: > (?x) ^providers/src/airflow/providers/.*\.py$ - exclude: ^.*/.*_vendor/|providers/src/airflow/providers/standard/operators/bash.py|providers/src/airflow/providers/standard/operators/python.py + exclude: ^.*/.*_vendor/|providers/src/airflow/providers/standard/operators/bash.py|providers/src/airflow/providers/standard/operators/python.py|providers/src/airflow/providers/standard/sensors/external_task.py - id: check-get-lineage-collector-providers language: python name: Check providers import hook lineage code from compat @@ -750,10 +750,9 @@ repos: pass_filenames: true - id: check-provide-create-sessions-imports language: pygrep - name: Check provide_session and create_session imports - description: provide_session and create_session should be imported from airflow.utils.session - to avoid import cycles. - entry: "from airflow\\.utils\\.db import.* (provide_session|create_session)" + name: Check session util imports + description: NEW_SESSION, provide_session, and create_session should be imported from airflow.utils.session to avoid import cycles. + entry: "from airflow\\.utils\\.db import.* (NEW_SESSION|provide_session|create_session)" files: \.py$ exclude: ^.*/.*_vendor/ pass_filenames: true @@ -798,7 +797,7 @@ repos: entry: ./scripts/ci/pre_commit/boring_cyborg.py pass_filenames: false require_serial: true - additional_dependencies: ['pyyaml', 'termcolor==1.1.0', 'wcmatch==8.2'] + additional_dependencies: ['pyyaml', 'termcolor==2.5.0', 'wcmatch==8.2'] - id: update-in-the-wild-to-be-sorted name: Sort INTHEWILD.md alphabetically entry: ./scripts/ci/pre_commit/sort_in_the_wild.py @@ -1189,7 +1188,13 @@ repos: files: \.py$ exclude: | (?x) + ^airflow/configuration.py$ | + ^airflow/metrics/validators.py$ | + ^airflow/models/dag.py$ | + ^airflow/serialization/serde.py$ | + ^airflow/utils/file.py$ | ^airflow/utils/helpers.py$ | + ^airflow/utils/log/secrets_masker.py$ | ^providers/src/airflow/providers/ | ^(providers/)?tests/ | task_sdk/src/airflow/sdk/definitions/dag.py$ | @@ -1268,13 +1273,14 @@ repos: # These migrations contain FAB related changes but existed before moving FAB auth manager # to its own provider exclude: > - (?ix) - ^airflow/migrations/versions/00.*\.py$| - ^airflow/migrations/versions/0106.*\.py$| - ^airflow/migrations/versions/0118.*\.py$| - ^airflow/migrations/versions/0119.*\.py$| - ^airflow/migrations/versions/0121.*\.py$| - ^airflow/migrations/versions/0124.*\.py$ + (?ix)^( + airflow/migrations/versions/00.*\.py| + airflow/migrations/versions/0106.*\.py| + airflow/migrations/versions/0118.*\.py| + airflow/migrations/versions/0119.*\.py| + airflow/migrations/versions/0121.*\.py| + airflow/migrations/versions/0124.*\.py + )$ ## ADD MOST PRE-COMMITS ABOVE THAT LINE # The below pre-commits are those requiring CI image to be built - id: mypy-dev @@ -1292,36 +1298,37 @@ repos: pass_filenames: false files: ^.*\.py$ require_serial: true - additional_dependencies: [ 'rich>=12.4.4' ] + additional_dependencies: ['rich>=12.4.4'] - id: mypy-airflow name: Run mypy for airflow language: python entry: ./scripts/ci/pre_commit/mypy.py files: \.py$ exclude: | - (?x) - ^.*/.*_vendor/ | - ^airflow/migrations | - ^providers/ | - ^task_sdk/ | - ^dev | - ^scripts | - ^docs | - ^provider_packages | - ^performance/ | - ^tests/dags/test_imports.py | - ^clients/python/test_.*\.py + (?x)^( + .*/.*_vendor/| + airflow/migrations| + clients/python/test_.*\.py| + dev| + docs| + performance/| + provider_packages| + providers/| + scripts| + task_sdk/| + tests/dags/test_imports\.py + ) require_serial: true additional_dependencies: ['rich>=12.4.4'] - id: mypy-airflow - stages: [ 'manual' ] + stages: ['manual'] name: Run mypy for airflow (manual) language: python entry: ./scripts/ci/pre_commit/mypy_folder.py airflow pass_filenames: false files: ^.*\.py$ require_serial: true - additional_dependencies: [ 'rich>=12.4.4' ] + additional_dependencies: ['rich>=12.4.4'] - id: mypy-providers name: Run mypy for providers language: python diff --git a/Dockerfile b/Dockerfile index 3d90a9d52703e..a2c48a6831a50 100644 --- a/Dockerfile +++ b/Dockerfile @@ -55,7 +55,7 @@ ARG PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" # Also use `force pip` label on your PR to swap all places we use `uv` to `pip` ARG AIRFLOW_PIP_VERSION=24.3.1 # ARG AIRFLOW_PIP_VERSION="git+https://github.com/pypa/pip.git@main" -ARG AIRFLOW_UV_VERSION=0.5.3 +ARG AIRFLOW_UV_VERSION=0.5.4 ARG AIRFLOW_USE_UV="false" ARG UV_HTTP_TIMEOUT="300" ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow" diff --git a/Dockerfile.ci b/Dockerfile.ci index 05233f06419e4..568680d9e5ce1 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -822,6 +822,8 @@ mkdir "${AIRFLOW_HOME}/sqlite" -p || true ASSET_COMPILATION_WAIT_MULTIPLIER=${ASSET_COMPILATION_WAIT_MULTIPLIER:=1} +. "${IN_CONTAINER_DIR}/check_connectivity.sh" + function wait_for_asset_compilation() { if [[ -f "${AIRFLOW_SOURCES}/.build/www/.asset_compile.lock" ]]; then echo @@ -1080,7 +1082,7 @@ function check_run_tests() { if [[ ${REMOVE_ARM_PACKAGES:="false"} == "true" ]]; then # Test what happens if we do not have ARM packages installed. # This is useful to see if pytest collection works without ARM packages which is important - # for the MacOS M1 users running tests in their ARM machines with `breeze testing tests` command + # for the MacOS M1 users running tests in their ARM machines with `breeze testing *-tests` command python "${IN_CONTAINER_DIR}/remove_arm_packages.py" fi @@ -1146,12 +1148,61 @@ function check_force_lowest_dependencies() { set +x } +function check_airflow_python_client_installation() { + if [[ ${INSTALL_AIRFLOW_PYTHON_CLIENT=} != "true" ]]; then + return + fi + python "${IN_CONTAINER_DIR}/install_airflow_python_client.py" +} + +function start_webserver_with_examples(){ + if [[ ${START_WEBSERVER_WITH_EXAMPLES=} != "true" ]]; then + return + fi + export AIRFLOW__CORE__LOAD_EXAMPLES=True + export AIRFLOW__API__AUTH_BACKENDS=airflow.api.auth.backend.session,airflow.providers.fab.auth_manager.api.auth.backend.basic_auth + export AIRFLOW__WEBSERVER__EXPOSE_CONFIG=True + echo + echo "${COLOR_BLUE}Initializing database${COLOR_RESET}" + echo + airflow db migrate + echo + echo "${COLOR_BLUE}Database initialized${COLOR_RESET}" + echo + echo "${COLOR_BLUE}Parsing example dags${COLOR_RESET}" + echo + airflow scheduler --num-runs 100 + echo "Example dags parsing finished" + echo "Create admin user" + airflow users create -u admin -p admin -f Thor -l Administrator -r Admin -e admin@email.domain + echo "Admin user created" + echo + echo "${COLOR_BLUE}Starting airflow webserver${COLOR_RESET}" + echo + airflow webserver --port 8080 --daemon + echo + echo "${COLOR_BLUE}Waiting for webserver to start${COLOR_RESET}" + echo + check_service_connection "Airflow webserver" "run_nc localhost 8080" 100 + EXIT_CODE=$? + if [[ ${EXIT_CODE} != 0 ]]; then + echo + echo "${COLOR_RED}Webserver did not start properly${COLOR_RESET}" + echo + exit ${EXIT_CODE} + fi + echo + echo "${COLOR_BLUE}Airflow webserver started${COLOR_RESET}" +} + determine_airflow_to_use environment_initialization check_boto_upgrade check_downgrade_sqlalchemy check_downgrade_pendulum check_force_lowest_dependencies +check_airflow_python_client_installation +start_webserver_with_examples check_run_tests "${@}" exec /bin/bash "${@}" @@ -1354,7 +1405,7 @@ RUN bash /scripts/docker/install_packaging_tools.sh; \ # Also use `force pip` label on your PR to swap all places we use `uv` to `pip` ARG AIRFLOW_PIP_VERSION=24.3.1 # ARG AIRFLOW_PIP_VERSION="git+https://github.com/pypa/pip.git@main" -ARG AIRFLOW_UV_VERSION=0.5.3 +ARG AIRFLOW_UV_VERSION=0.5.4 # TODO(potiuk): automate with upgrade check (possibly) ARG AIRFLOW_PRE_COMMIT_VERSION="4.0.1" ARG AIRFLOW_PRE_COMMIT_UV_VERSION="4.1.4" diff --git a/INTHEWILD.md b/INTHEWILD.md index bdcb4973d78cb..267a798fdcaa8 100644 --- a/INTHEWILD.md +++ b/INTHEWILD.md @@ -366,6 +366,7 @@ Currently, **officially** using Airflow: 1. [OK.ru](https://ok.ru/) [[@Mikhail-M](https://github.com/Mikhail-M)] 1. [OneFineStay](https://www.onefinestay.com) [[@slangwald](https://github.com/slangwald)] 1. [Open Knowledge International](https://okfn.org) [@vitorbaptista](https://github.com/vitorbaptista) +1. [OpenBlock Labs](https://www.openblocklabs.com/) [[@melotik](https://github.com/melotik), [@l-jhon](https://github.com/l-jhon) & [@nitish-91](https://github.com/nitish-91)] 1. [Opensignal](https://www.opensignal.com) [@harrisjoseph](https://github.com/harrisjoseph) 1. [OpenSlate](https://openslate.com) [@marcusianlevine](https://github.com/marcusianlevine) 1. [Openverse](https://wordpress.org/openverse) diff --git a/airflow/api/common/mark_tasks.py b/airflow/api/common/mark_tasks.py index b57d25498d267..7c55c3527d154 100644 --- a/airflow/api/common/mark_tasks.py +++ b/airflow/api/common/mark_tasks.py @@ -19,7 +19,8 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Collection, Iterable, Iterator, NamedTuple +from collections.abc import Collection, Iterable +from typing import TYPE_CHECKING from sqlalchemy import and_, or_, select from sqlalchemy.orm import lazyload @@ -29,7 +30,6 @@ from airflow.utils import timezone from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.state import DagRunState, State, TaskInstanceState -from airflow.utils.types import DagRunTriggeredByType, DagRunType if TYPE_CHECKING: from datetime import datetime @@ -38,47 +38,6 @@ from airflow.models.dag import DAG from airflow.models.operator import Operator - from airflow.utils.types import DagRunType - - -class _DagRunInfo(NamedTuple): - logical_date: datetime - data_interval: tuple[datetime, datetime] - - -def _create_dagruns( - dag: DAG, - infos: Iterable[_DagRunInfo], - state: DagRunState, - run_type: DagRunType, -) -> Iterable[DagRun]: - """ - Infers from data intervals which DAG runs need to be created and does so. - - :param dag: The DAG to create runs for. - :param infos: List of logical dates and data intervals to evaluate. - :param state: The state to set the dag run to - :param run_type: The prefix will be used to construct dag run id: ``{run_id_prefix}__{logical_date}``. - :return: Newly created and existing dag runs for the logical dates supplied. - """ - # Find out existing DAG runs that we don't need to create. - dag_runs = { - run.logical_date: run - for run in DagRun.find(dag_id=dag.dag_id, logical_date=[info.logical_date for info in infos]) - } - - for info in infos: - if info.logical_date not in dag_runs: - dag_runs[info.logical_date] = dag.create_dagrun( - logical_date=info.logical_date, - data_interval=info.data_interval, - start_date=timezone.utcnow(), - external_trigger=False, - state=state, - run_type=run_type, - triggered_by=DagRunTriggeredByType.TIMETABLE, - ) - return dag_runs.values() @provide_session @@ -131,7 +90,7 @@ def set_state( task_id_map_index_list = list(find_task_relatives(tasks, downstream, upstream)) # now look for the task instances that are affected - qry_dag = get_all_dag_task_query(dag, session, state, task_id_map_index_list, dag_run_ids) + qry_dag = get_all_dag_task_query(dag, state, task_id_map_index_list, dag_run_ids) if commit: tis_altered = session.scalars(qry_dag.with_for_update()).all() @@ -145,7 +104,6 @@ def set_state( def get_all_dag_task_query( dag: DAG, - session: SASession, state: TaskInstanceState, task_ids: list[str | tuple[str, int]], run_ids: Iterable[str], @@ -163,13 +121,6 @@ def get_all_dag_task_query( return qry_dag -def _iter_existing_dag_run_infos(dag: DAG, run_ids: list[str], session: SASession) -> Iterator[_DagRunInfo]: - for dag_run in DagRun.find(dag_id=dag.dag_id, run_id=run_ids, session=session): - dag_run.dag = dag - dag_run.verify_integrity(session=session) - yield _DagRunInfo(dag_run.logical_date, dag.get_run_data_interval(dag_run)) - - def find_task_relatives(tasks, downstream, upstream): """Yield task ids and optionally ancestor and descendant ids.""" for item in tasks: @@ -417,28 +368,6 @@ def __set_dag_run_state_to_running_or_queued( return res -@provide_session -def set_dag_run_state_to_running( - *, - dag: DAG, - run_id: str | None = None, - commit: bool = False, - session: SASession = NEW_SESSION, -) -> list[TaskInstance]: - """ - Set the dag run's state to running. - - Set for a specific logical date and its task instances to running. - """ - return __set_dag_run_state_to_running_or_queued( - new_state=DagRunState.RUNNING, - dag=dag, - run_id=run_id, - commit=commit, - session=session, - ) - - @provide_session def set_dag_run_state_to_queued( *, diff --git a/airflow/api_connexion/endpoints/dag_endpoint.py b/airflow/api_connexion/endpoints/dag_endpoint.py index 352bf9cfd4c11..bb5d419691592 100644 --- a/airflow/api_connexion/endpoints/dag_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_endpoint.py @@ -16,8 +16,9 @@ # under the License. from __future__ import annotations +from collections.abc import Collection from http import HTTPStatus -from typing import TYPE_CHECKING, Collection +from typing import TYPE_CHECKING from connexion import NoContent from flask import g, request diff --git a/airflow/api_connexion/endpoints/dag_parsing.py b/airflow/api_connexion/endpoints/dag_parsing.py index 8c48888629b2b..da0cd7a7e8776 100644 --- a/airflow/api_connexion/endpoints/dag_parsing.py +++ b/airflow/api_connexion/endpoints/dag_parsing.py @@ -16,8 +16,9 @@ # under the License. from __future__ import annotations +from collections.abc import Sequence from http import HTTPStatus -from typing import TYPE_CHECKING, Sequence +from typing import TYPE_CHECKING from flask import Response, current_app from itsdangerous import BadSignature, URLSafeSerializer diff --git a/airflow/api_connexion/endpoints/dag_run_endpoint.py b/airflow/api_connexion/endpoints/dag_run_endpoint.py index 00dd8ca907193..985efc7fc898d 100644 --- a/airflow/api_connexion/endpoints/dag_run_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_run_endpoint.py @@ -16,8 +16,9 @@ # under the License. from __future__ import annotations +from collections.abc import Collection from http import HTTPStatus -from typing import TYPE_CHECKING, Collection +from typing import TYPE_CHECKING import pendulum from connexion import NoContent @@ -264,6 +265,7 @@ def get_dag_runs( raise BadRequest("DAGRunCollectionSchema error", detail=str(e)) +@mark_fastapi_migration_done @security.requires_access_dag("GET", DagAccessEntity.RUN) @provide_session def get_dag_runs_batch(*, session: Session = NEW_SESSION) -> APIResponse: @@ -303,6 +305,7 @@ def get_dag_runs_batch(*, session: Session = NEW_SESSION) -> APIResponse: return dagrun_collection_schema.dump(DAGRunCollection(dag_runs=dag_runs, total_entries=total_entries)) +@mark_fastapi_migration_done @security.requires_access_dag("POST", DagAccessEntity.RUN) @action_logging @provide_session diff --git a/airflow/api_connexion/endpoints/dag_source_endpoint.py b/airflow/api_connexion/endpoints/dag_source_endpoint.py index e53ffe89c73ea..1c6e34fc2b1a7 100644 --- a/airflow/api_connexion/endpoints/dag_source_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_source_endpoint.py @@ -16,8 +16,9 @@ # under the License. from __future__ import annotations +from collections.abc import Sequence from http import HTTPStatus -from typing import TYPE_CHECKING, Sequence +from typing import TYPE_CHECKING from flask import Response, request from sqlalchemy import select diff --git a/airflow/api_connexion/endpoints/extra_link_endpoint.py b/airflow/api_connexion/endpoints/extra_link_endpoint.py index ddf4b670285c8..e5e015ac72403 100644 --- a/airflow/api_connexion/endpoints/extra_link_endpoint.py +++ b/airflow/api_connexion/endpoints/extra_link_endpoint.py @@ -25,6 +25,7 @@ from airflow.auth.managers.models.resource_details import DagAccessEntity from airflow.exceptions import TaskNotFound from airflow.utils.airflow_flask_app import get_airflow_app +from airflow.utils.api_migration import mark_fastapi_migration_done from airflow.utils.session import NEW_SESSION, provide_session if TYPE_CHECKING: @@ -35,6 +36,7 @@ from airflow.models.dagbag import DagBag +@mark_fastapi_migration_done @security.requires_access_dag("GET", DagAccessEntity.TASK_INSTANCE) @provide_session def get_extra_links( diff --git a/airflow/api_connexion/endpoints/import_error_endpoint.py b/airflow/api_connexion/endpoints/import_error_endpoint.py index 633dd0bebde52..76fad6cb92d4e 100644 --- a/airflow/api_connexion/endpoints/import_error_endpoint.py +++ b/airflow/api_connexion/endpoints/import_error_endpoint.py @@ -16,7 +16,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Sequence +from collections.abc import Sequence +from typing import TYPE_CHECKING from sqlalchemy import func, select diff --git a/airflow/api_connexion/endpoints/pool_endpoint.py b/airflow/api_connexion/endpoints/pool_endpoint.py index a6ccd3a4aa9b9..3babe3370e3d3 100644 --- a/airflow/api_connexion/endpoints/pool_endpoint.py +++ b/airflow/api_connexion/endpoints/pool_endpoint.py @@ -119,8 +119,8 @@ def patch_pool( update_mask = [i.strip() for i in update_mask] _patch_body = {} try: - # MyPy infers a List[Optional[str]] type here but it should be a List[str] - # there is no way field is None here (UpdateMask is a List[str]) + # MyPy infers a list[Optional[str]] type here but it should be a list[str] + # there is no way field is None here (UpdateMask is a list[str]) # so if pool_schema.declared_fields[field].attribute is None file is returned update_mask = [ ( diff --git a/airflow/api_connexion/endpoints/request_dict.py b/airflow/api_connexion/endpoints/request_dict.py index b07e06c0b63f8..f0e78a41bd041 100644 --- a/airflow/api_connexion/endpoints/request_dict.py +++ b/airflow/api_connexion/endpoints/request_dict.py @@ -16,7 +16,8 @@ # under the License. from __future__ import annotations -from typing import Any, Mapping, cast +from collections.abc import Mapping +from typing import Any, cast def get_json_request_dict() -> Mapping[str, Any]: diff --git a/airflow/api_connexion/endpoints/task_instance_endpoint.py b/airflow/api_connexion/endpoints/task_instance_endpoint.py index 95c824656ff27..b599f24ab311c 100644 --- a/airflow/api_connexion/endpoints/task_instance_endpoint.py +++ b/airflow/api_connexion/endpoints/task_instance_endpoint.py @@ -16,7 +16,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Any, Iterable, Sequence, TypeVar +from collections.abc import Iterable, Sequence +from typing import TYPE_CHECKING, Any, TypeVar from flask import g from marshmallow import ValidationError @@ -557,6 +558,7 @@ def set_mapped_task_instance_note( return set_task_instance_note(dag_id=dag_id, dag_run_id=dag_run_id, task_id=task_id, map_index=map_index) +@mark_fastapi_migration_done @security.requires_access_dag("PUT", DagAccessEntity.TASK_INSTANCE) @action_logging @provide_session @@ -779,6 +781,7 @@ def get_mapped_task_instance_try_details( ) +@mark_fastapi_migration_done @security.requires_access_dag("GET", DagAccessEntity.TASK_INSTANCE) @provide_session def get_task_instance_tries( @@ -811,6 +814,7 @@ def _query(orm_object): ) +@mark_fastapi_migration_done @security.requires_access_dag("GET", DagAccessEntity.TASK_INSTANCE) @provide_session def get_mapped_task_instance_tries( diff --git a/airflow/api_connexion/endpoints/update_mask.py b/airflow/api_connexion/endpoints/update_mask.py index 38fd255f51b3a..d6ff005d8aea5 100644 --- a/airflow/api_connexion/endpoints/update_mask.py +++ b/airflow/api_connexion/endpoints/update_mask.py @@ -16,7 +16,8 @@ # under the License. from __future__ import annotations -from typing import Any, Mapping, Sequence +from collections.abc import Mapping, Sequence +from typing import Any from airflow.api_connexion.exceptions import BadRequest diff --git a/airflow/api_connexion/endpoints/xcom_endpoint.py b/airflow/api_connexion/endpoints/xcom_endpoint.py index c86617391ab12..cb3faf7379e0a 100644 --- a/airflow/api_connexion/endpoints/xcom_endpoint.py +++ b/airflow/api_connexion/endpoints/xcom_endpoint.py @@ -45,6 +45,7 @@ from airflow.api_connexion.types import APIResponse +@mark_fastapi_migration_done @security.requires_access_dag("GET", DagAccessEntity.XCOM) @format_parameters({"limit": check_limit}) @provide_session diff --git a/airflow/api_connexion/parameters.py b/airflow/api_connexion/parameters.py index a05ded37614d4..c6d10b8a29e81 100644 --- a/airflow/api_connexion/parameters.py +++ b/airflow/api_connexion/parameters.py @@ -17,8 +17,9 @@ from __future__ import annotations import logging +from collections.abc import Container from functools import wraps -from typing import TYPE_CHECKING, Any, Callable, Container, TypeVar, cast +from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast from pendulum.parsing import ParserError from sqlalchemy import text diff --git a/airflow/api_connexion/types.py b/airflow/api_connexion/types.py index 3a6f89d9bb52a..f17f2a0d2712b 100644 --- a/airflow/api_connexion/types.py +++ b/airflow/api_connexion/types.py @@ -16,13 +16,14 @@ # under the License. from __future__ import annotations -from typing import Any, Mapping, Optional, Sequence, Tuple, Union +from collections.abc import Mapping, Sequence +from typing import Any, Optional, Union from flask import Response APIResponse = Union[ Response, - Tuple[object, int], # For '(NoContent, 201)'. + tuple[object, int], # For '(NoContent, 201)'. Mapping[str, Any], # JSON. ] diff --git a/airflow/api_fastapi/common/db/common.py b/airflow/api_fastapi/common/db/common.py index 17da1eafacc93..fc7907e5bd25a 100644 --- a/airflow/api_fastapi/common/db/common.py +++ b/airflow/api_fastapi/common/db/common.py @@ -22,10 +22,13 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Literal, Sequence, overload +from collections.abc import Sequence +from typing import TYPE_CHECKING, Literal, overload -from airflow.utils.db import get_query_count -from airflow.utils.session import NEW_SESSION, create_session, provide_session +from sqlalchemy.ext.asyncio import AsyncSession + +from airflow.utils.db import get_query_count, get_query_count_async +from airflow.utils.session import NEW_SESSION, create_session, create_session_async, provide_session if TYPE_CHECKING: from sqlalchemy.orm import Session @@ -53,22 +56,96 @@ def your_route(session: Annotated[Session, Depends(get_session)]): def apply_filters_to_select( - *, base_select: Select, filters: Sequence[BaseParam | None] | None = None + *, statement: Select, filters: Sequence[BaseParam | None] | None = None ) -> Select: if filters is None: - return base_select + return statement for f in filters: if f is None: continue - base_select = f.to_orm(base_select) + statement = f.to_orm(statement) + + return statement + + +async def get_async_session() -> AsyncSession: + """ + Dependency for providing a session. + + Example usage: + + .. code:: python + + @router.get("/your_path") + def your_route(session: Annotated[AsyncSession, Depends(get_async_session)]): + pass + """ + async with create_session_async() as session: + yield session + + +@overload +async def paginated_select_async( + *, + statement: Select, + filters: Sequence[BaseParam] | None = None, + order_by: BaseParam | None = None, + offset: BaseParam | None = None, + limit: BaseParam | None = None, + session: AsyncSession, + return_total_entries: Literal[True] = True, +) -> tuple[Select, int]: ... + + +@overload +async def paginated_select_async( + *, + statement: Select, + filters: Sequence[BaseParam] | None = None, + order_by: BaseParam | None = None, + offset: BaseParam | None = None, + limit: BaseParam | None = None, + session: AsyncSession, + return_total_entries: Literal[False], +) -> tuple[Select, None]: ... + + +async def paginated_select_async( + *, + statement: Select, + filters: Sequence[BaseParam | None] | None = None, + order_by: BaseParam | None = None, + offset: BaseParam | None = None, + limit: BaseParam | None = None, + session: AsyncSession, + return_total_entries: bool = True, +) -> tuple[Select, int | None]: + statement = apply_filters_to_select( + statement=statement, + filters=filters, + ) + + total_entries = None + if return_total_entries: + total_entries = await get_query_count_async(statement, session=session) + + # TODO: Re-enable when permissions are handled. Readable / writable entities, + # for instance: + # readable_dags = get_auth_manager().get_permitted_dag_ids(user=g.user) + # dags_select = dags_select.where(DagModel.dag_id.in_(readable_dags)) + + statement = apply_filters_to_select( + statement=statement, + filters=[order_by, offset, limit], + ) - return base_select + return statement, total_entries @overload def paginated_select( *, - select: Select, + statement: Select, filters: Sequence[BaseParam] | None = None, order_by: BaseParam | None = None, offset: BaseParam | None = None, @@ -81,7 +158,7 @@ def paginated_select( @overload def paginated_select( *, - select: Select, + statement: Select, filters: Sequence[BaseParam] | None = None, order_by: BaseParam | None = None, offset: BaseParam | None = None, @@ -94,7 +171,7 @@ def paginated_select( @provide_session def paginated_select( *, - select: Select, + statement: Select, filters: Sequence[BaseParam] | None = None, order_by: BaseParam | None = None, offset: BaseParam | None = None, @@ -102,20 +179,20 @@ def paginated_select( session: Session = NEW_SESSION, return_total_entries: bool = True, ) -> tuple[Select, int | None]: - base_select = apply_filters_to_select( - base_select=select, + statement = apply_filters_to_select( + statement=statement, filters=filters, ) total_entries = None if return_total_entries: - total_entries = get_query_count(base_select, session=session) + total_entries = get_query_count(statement, session=session) # TODO: Re-enable when permissions are handled. Readable / writable entities, # for instance: # readable_dags = get_auth_manager().get_permitted_dag_ids(user=g.user) # dags_select = dags_select.where(DagModel.dag_id.in_(readable_dags)) - base_select = apply_filters_to_select(base_select=base_select, filters=[order_by, offset, limit]) + statement = apply_filters_to_select(statement=statement, filters=[order_by, offset, limit]) - return base_select, total_entries + return statement, total_entries diff --git a/airflow/api_fastapi/common/headers.py b/airflow/api_fastapi/common/headers.py index 7e0bd5aae3d29..7d1a0fa69613b 100644 --- a/airflow/api_fastapi/common/headers.py +++ b/airflow/api_fastapi/common/headers.py @@ -16,8 +16,9 @@ # under the License. from __future__ import annotations +from typing import Annotated + from fastapi import Depends, Header, HTTPException, status -from typing_extensions import Annotated from airflow.api_fastapi.common.types import Mimetype diff --git a/airflow/api_fastapi/common/parameters.py b/airflow/api_fastapi/common/parameters.py index 7554ee88450bb..3390a455cc76f 100644 --- a/airflow/api_fastapi/common/parameters.py +++ b/airflow/api_fastapi/common/parameters.py @@ -18,6 +18,7 @@ from __future__ import annotations from abc import ABC, abstractmethod +from collections.abc import Iterable from datetime import datetime from typing import ( TYPE_CHECKING, @@ -25,8 +26,6 @@ Any, Callable, Generic, - Iterable, - List, Optional, TypeVar, Union, @@ -40,12 +39,12 @@ from sqlalchemy.inspection import inspect from airflow.api_connexion.endpoints.task_instance_endpoint import _convert_ti_states -from airflow.models import Base, Connection +from airflow.jobs.job import Job +from airflow.models import Base from airflow.models.asset import AssetEvent, AssetModel, DagScheduleAssetReference, TaskOutletAssetReference from airflow.models.dag import DagModel, DagTag from airflow.models.dagrun import DagRun from airflow.models.dagwarning import DagWarning, DagWarningType -from airflow.models.errors import ParseImportError from airflow.models.taskinstance import TaskInstance from airflow.typing_compat import Self from airflow.utils import timezone @@ -218,16 +217,8 @@ def depends(self, dag_display_name_pattern: str | None = None) -> _DagDisplayNam class SortParam(BaseParam[str]): """Order result by the attribute.""" - attr_mapping = { - "last_run_state": DagRun.state, - "last_run_start_date": DagRun.start_date, - "connection_id": Connection.conn_id, - "import_error_id": ParseImportError.id, - "dag_run_id": DagRun.run_id, - } - def __init__( - self, allowed_attrs: list[str], model: Base, to_replace: dict[str, str] | None = None + self, allowed_attrs: list[str], model: Base, to_replace: dict[str, str | Column] | None = None ) -> None: super().__init__() self.allowed_attrs = allowed_attrs @@ -239,22 +230,25 @@ def to_orm(self, select: Select) -> Select: raise ValueError(f"Cannot set 'skip_none' to False on a {type(self)}") if self.value is None: - return select + self.value = self.get_primary_key_string() lstriped_orderby = self.value.lstrip("-") + column: Column | None = None if self.to_replace: - lstriped_orderby = self.to_replace.get(lstriped_orderby, lstriped_orderby) + replacement = self.to_replace.get(lstriped_orderby, lstriped_orderby) + if isinstance(replacement, str): + lstriped_orderby = replacement + else: + column = replacement - if self.allowed_attrs and lstriped_orderby not in self.allowed_attrs: + if (self.allowed_attrs and lstriped_orderby not in self.allowed_attrs) and column is None: raise HTTPException( 400, f"Ordering with '{lstriped_orderby}' is disallowed or " f"the attribute does not exist on the model", ) - - column: Column = self.attr_mapping.get(lstriped_orderby, None) or getattr( - self.model, lstriped_orderby - ) + if column is None: + column = getattr(self.model, lstriped_orderby) # MySQL does not support `nullslast`, and True/False ordering depends on the # database implementation. @@ -288,7 +282,7 @@ def inner(order_by: str = default or self.get_primary_key_string()) -> SortParam return inner -class _TagsFilter(BaseParam[List[str]]): +class _TagsFilter(BaseParam[list[str]]): """Filter on tags.""" def to_orm(self, select: Select) -> Select: @@ -305,7 +299,7 @@ def depends(self, tags: list[str] = Query(default_factory=list)) -> _TagsFilter: return self.set_value(tags) -class _OwnersFilter(BaseParam[List[str]]): +class _OwnersFilter(BaseParam[list[str]]): """Filter on owners.""" def to_orm(self, select: Select) -> Select: @@ -322,7 +316,7 @@ def depends(self, owners: list[str] = Query(default_factory=list)) -> _OwnersFil return self.set_value(owners) -class DagRunStateFilter(BaseParam[List[Optional[DagRunState]]]): +class DagRunStateFilter(BaseParam[list[Optional[DagRunState]]]): """Filter on Dag Run state.""" def to_orm(self, select: Select) -> Select: @@ -352,7 +346,7 @@ def depends(self, state: list[str] = Query(default_factory=list)) -> DagRunState return self.set_value(states) -class TIStateFilter(BaseParam[List[Optional[TaskInstanceState]]]): +class TIStateFilter(BaseParam[list[Optional[TaskInstanceState]]]): """Filter on task instance state.""" def to_orm(self, select: Select) -> Select: @@ -376,7 +370,7 @@ def depends(self, state: list[str] = Query(default_factory=list)) -> TIStateFilt return self.set_value(states) -class TIPoolFilter(BaseParam[List[str]]): +class TIPoolFilter(BaseParam[list[str]]): """Filter on task instance pool.""" def to_orm(self, select: Select) -> Select: @@ -393,7 +387,7 @@ def depends(self, pool: list[str] = Query(default_factory=list)) -> TIPoolFilter return self.set_value(pool) -class TIQueueFilter(BaseParam[List[str]]): +class TIQueueFilter(BaseParam[list[str]]): """Filter on task instance queue.""" def to_orm(self, select: Select) -> Select: @@ -410,7 +404,7 @@ def depends(self, queue: list[str] = Query(default_factory=list)) -> TIQueueFilt return self.set_value(queue) -class TIExecutorFilter(BaseParam[List[str]]): +class TIExecutorFilter(BaseParam[list[str]]): """Filter on task instance executor.""" def to_orm(self, select: Select) -> Select: @@ -450,6 +444,54 @@ def depends(self, tag_name_pattern: str | None = None) -> _DagTagNamePatternSear return self.set_value(tag_name_pattern) +class _JobTypeFilter(BaseParam[str]): + """Filter on job_type.""" + + def to_orm(self, select: Select) -> Select: + if self.value is None and self.skip_none: + return select + return select.where(Job.job_type == self.value) + + def depends(self, job_type: str | None = None) -> _JobTypeFilter: + return self.set_value(job_type) + + +class _JobStateFilter(BaseParam[str]): + """Filter on job_state.""" + + def to_orm(self, select: Select) -> Select: + if self.value is None and self.skip_none: + return select + return select.where(Job.state == self.value) + + def depends(self, job_state: str | None = None) -> _JobStateFilter: + return self.set_value(job_state) + + +class _JobHostnameFilter(BaseParam[str]): + """Filter on hostname.""" + + def to_orm(self, select: Select) -> Select: + if self.value is None and self.skip_none: + return select + return select.where(Job.hostname == self.value) + + def depends(self, hostname: str | None = None) -> _JobHostnameFilter: + return self.set_value(hostname) + + +class _JobExecutorClassFilter(BaseParam[str]): + """Filter on executor_class.""" + + def to_orm(self, select: Select) -> Select: + if self.value is None and self.skip_none: + return select + return select.where(Job.executor_class == self.value) + + def depends(self, executor_class: str | None = None) -> _JobExecutorClassFilter: + return self.set_value(executor_class) + + def _safe_parse_datetime(date_to_check: str) -> datetime: """ Parse datetime and raise error for invalid dates. @@ -719,6 +761,11 @@ def depends_float( QueryTIPoolFilter = Annotated[TIPoolFilter, Depends(TIPoolFilter().depends)] QueryTIQueueFilter = Annotated[TIQueueFilter, Depends(TIQueueFilter().depends)] QueryTIExecutorFilter = Annotated[TIExecutorFilter, Depends(TIExecutorFilter().depends)] +# Job +QueryJobTypeFilter = Annotated[_JobTypeFilter, Depends(_JobTypeFilter().depends)] +QueryJobStateFilter = Annotated[_JobStateFilter, Depends(_JobStateFilter().depends)] +QueryJobHostnameFilter = Annotated[_JobHostnameFilter, Depends(_JobHostnameFilter().depends)] +QueryJobExecutorClassFilter = Annotated[_JobExecutorClassFilter, Depends(_JobExecutorClassFilter().depends)] # Assets QueryUriPatternSearch = Annotated[_UriPatternSearch, Depends(_UriPatternSearch().depends)] diff --git a/airflow/api_fastapi/common/router.py b/airflow/api_fastapi/common/router.py index 5bf07e0fe834a..aeb1fb22452b5 100644 --- a/airflow/api_fastapi/common/router.py +++ b/airflow/api_fastapi/common/router.py @@ -17,8 +17,9 @@ from __future__ import annotations +from collections.abc import Sequence from enum import Enum -from typing import Any, Callable, Sequence +from typing import Any, Callable from fastapi import APIRouter, params from fastapi.datastructures import Default diff --git a/airflow/api_fastapi/core_api/datamodels/assets.py b/airflow/api_fastapi/core_api/datamodels/assets.py index adc32c2e4808f..638ee1cba6e29 100644 --- a/airflow/api_fastapi/core_api/datamodels/assets.py +++ b/airflow/api_fastapi/core_api/datamodels/assets.py @@ -53,7 +53,9 @@ class AssetResponse(BaseModel): """Asset serializer for responses.""" id: int + name: str uri: str + group: str extra: dict | None = None created_at: datetime updated_at: datetime diff --git a/airflow/api_fastapi/core_api/datamodels/dag_run.py b/airflow/api_fastapi/core_api/datamodels/dag_run.py index d211b0205b3b4..ab8126277873e 100644 --- a/airflow/api_fastapi/core_api/datamodels/dag_run.py +++ b/airflow/api_fastapi/core_api/datamodels/dag_run.py @@ -20,9 +20,11 @@ from datetime import datetime from enum import Enum -from pydantic import Field +from pydantic import AwareDatetime, Field, NonNegativeInt, computed_field, model_validator from airflow.api_fastapi.core_api.base import BaseModel +from airflow.models import DagRun +from airflow.utils import timezone from airflow.utils.state import DagRunState from airflow.utils.types import DagRunTriggeredByType, DagRunType @@ -51,7 +53,7 @@ class DAGRunClearBody(BaseModel): class DAGRunResponse(BaseModel): """DAG Run serializer for responses.""" - dag_run_id: str | None = Field(alias="run_id") + dag_run_id: str | None = Field(validation_alias="run_id") dag_id: str logical_date: datetime | None queued_at: datetime | None @@ -73,3 +75,50 @@ class DAGRunCollectionResponse(BaseModel): dag_runs: list[DAGRunResponse] total_entries: int + + +class TriggerDAGRunPostBody(BaseModel): + """Trigger DAG Run Serializer for POST body.""" + + dag_run_id: str | None = None + data_interval_start: AwareDatetime | None = None + data_interval_end: AwareDatetime | None = None + + conf: dict = Field(default_factory=dict) + note: str | None = None + + @model_validator(mode="after") + def check_data_intervals(cls, values): + if (values.data_interval_start is None) != (values.data_interval_end is None): + raise ValueError( + "Either both data_interval_start and data_interval_end must be provided or both must be None" + ) + return values + + @model_validator(mode="after") + def validate_dag_run_id(self): + if not self.dag_run_id: + self.dag_run_id = DagRun.generate_run_id(DagRunType.MANUAL, self.logical_date) + return self + + # Mypy issue https://github.com/python/mypy/issues/1362 + @computed_field # type: ignore[misc] + @property + def logical_date(self) -> datetime: + return timezone.utcnow() + + +class DAGRunsBatchBody(BaseModel): + """List DAG Runs body for batch endpoint.""" + + order_by: str | None = None + page_offset: NonNegativeInt = 0 + page_limit: NonNegativeInt = 100 + dag_ids: list[str] | None = None + states: list[DagRunState | None] | None = None + logical_date_gte: AwareDatetime | None = None + logical_date_lte: AwareDatetime | None = None + start_date_gte: AwareDatetime | None = None + start_date_lte: AwareDatetime | None = None + end_date_gte: AwareDatetime | None = None + end_date_lte: AwareDatetime | None = None diff --git a/airflow/api_fastapi/core_api/datamodels/dags.py b/airflow/api_fastapi/core_api/datamodels/dags.py index fc7bdcebe242b..f1dd7bd798044 100644 --- a/airflow/api_fastapi/core_api/datamodels/dags.py +++ b/airflow/api_fastapi/core_api/datamodels/dags.py @@ -18,8 +18,9 @@ from __future__ import annotations from collections import abc +from collections.abc import Iterable from datetime import datetime, timedelta -from typing import Any, Iterable +from typing import Any from itsdangerous import URLSafeSerializer from pendulum.tz.timezone import FixedTimezone, Timezone diff --git a/airflow/api_fastapi/core_api/datamodels/extra_links.py b/airflow/api_fastapi/core_api/datamodels/extra_links.py new file mode 100644 index 0000000000000..1e86c5a4bc25f --- /dev/null +++ b/airflow/api_fastapi/core_api/datamodels/extra_links.py @@ -0,0 +1,25 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from pydantic import RootModel + + +class ExtraLinksResponse(RootModel): + """Extra Links Response.""" + + root: dict[str, str | None] diff --git a/airflow/api_fastapi/core_api/datamodels/job.py b/airflow/api_fastapi/core_api/datamodels/job.py index 9fb4a61f9dd16..32074b5f7be83 100644 --- a/airflow/api_fastapi/core_api/datamodels/job.py +++ b/airflow/api_fastapi/core_api/datamodels/job.py @@ -34,3 +34,10 @@ class JobResponse(BaseModel): executor_class: datetime | None hostname: str | None unixname: str | None + + +class JobCollectionResponse(BaseModel): + """Job Collection Response.""" + + jobs: list[JobResponse] + total_entries: int diff --git a/airflow/api_fastapi/core_api/datamodels/task_instances.py b/airflow/api_fastapi/core_api/datamodels/task_instances.py index 6e2cc376dcd0d..8736f3522df7e 100644 --- a/airflow/api_fastapi/core_api/datamodels/task_instances.py +++ b/airflow/api_fastapi/core_api/datamodels/task_instances.py @@ -26,7 +26,9 @@ ConfigDict, Field, NonNegativeInt, + StringConstraints, ValidationError, + field_validator, model_validator, ) @@ -193,6 +195,33 @@ def validate_model(cls, data: Any) -> Any: return data +class PatchTaskInstanceBody(BaseModel): + """Request body for Clear Task Instances endpoint.""" + + dry_run: bool = True + new_state: str | None = None + note: Annotated[str, StringConstraints(max_length=1000)] | None = None + include_upstream: bool = False + include_downstream: bool = False + include_future: bool = False + include_past: bool = False + + @field_validator("new_state", mode="before") + @classmethod + def validate_new_state(cls, ns: str | None) -> str: + """Validate new_state.""" + valid_states = [ + vs.name.lower() + for vs in (TaskInstanceState.SUCCESS, TaskInstanceState.FAILED, TaskInstanceState.SKIPPED) + ] + if ns is None: + raise ValueError("'new_state' should not be empty") + ns = ns.lower() + if ns not in valid_states: + raise ValueError(f"'{ns}' is not one of {valid_states}") + return ns + + class TaskInstanceReferenceResponse(BaseModel): """Task Instance Reference serializer for responses.""" diff --git a/airflow/api_fastapi/core_api/datamodels/xcom.py b/airflow/api_fastapi/core_api/datamodels/xcom.py index 370aa651cb2c8..4e3c6f54a7a4f 100644 --- a/airflow/api_fastapi/core_api/datamodels/xcom.py +++ b/airflow/api_fastapi/core_api/datamodels/xcom.py @@ -49,3 +49,10 @@ class XComResponseString(XComResponse): @field_validator("value", mode="before") def value_to_string(cls, v): return str(v) if v is not None else None + + +class XComCollection(BaseModel): + """List of XCom items.""" + + xcom_entries: list[XComResponse] + total_entries: int diff --git a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml index c8ecc8030aa3d..b55c7e273526a 100644 --- a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml @@ -1828,6 +1828,119 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + post: + tags: + - DagRun + summary: Trigger Dag Run + description: Trigger a DAG. + operationId: trigger_dag_run + parameters: + - name: dag_id + in: path + required: true + schema: + title: Dag Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TriggerDAGRunPostBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/dags/{dag_id}/dagRuns/list: + post: + tags: + - DagRun + summary: Get List Dag Runs Batch + description: Get a list of DAG Runs. + operationId: get_list_dag_runs_batch + parameters: + - name: dag_id + in: path + required: true + schema: + const: '~' + type: string + title: Dag Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunsBatchBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /public/dagSources/{dag_id}: get: tags: @@ -2899,6 +3012,64 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/links: + get: + tags: + - Extra Links + - Task Instance + summary: Get Extra Links + description: Get extra links for task instance. + operationId: get_extra_links + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ExtraLinksResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /public/importErrors/{import_error_id}: get: tags: @@ -3000,13 +3171,58 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/plugins: + /public/jobs: get: tags: - - Plugin - summary: Get Plugins - operationId: get_plugins + - Job + summary: Get Jobs + description: Get all jobs. + operationId: get_jobs parameters: + - name: is_alive + in: query + required: false + schema: + anyOf: + - type: boolean + - type: 'null' + title: Is Alive + - name: start_date_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date Gte + - name: start_date_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date Lte + - name: end_date_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date Gte + - name: end_date_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date Lte - name: limit in: query required: false @@ -3023,13 +3239,52 @@ paths: minimum: 0 default: 0 title: Offset + - name: order_by + in: query + required: false + schema: + type: string + default: id + title: Order By + - name: job_state + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Job State + - name: job_type + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Job Type + - name: hostname + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Hostname + - name: executor_class + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Executor Class responses: '200': description: Successful Response content: application/json: schema: - $ref: '#/components/schemas/PluginCollectionResponse' + $ref: '#/components/schemas/JobCollectionResponse' '401': content: application/json: @@ -3042,22 +3297,76 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request '422': description: Validation Error content: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/pools/{pool_name}: - delete: + /public/plugins: + get: tags: - - Pool - summary: Delete Pool - description: Delete a pool entry. - operationId: delete_pool + - Plugin + summary: Get Plugins + operationId: get_plugins parameters: - - name: pool_name - in: path + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 100 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/PluginCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/pools/{pool_name}: + delete: + tags: + - Pool + summary: Delete Pool + description: Delete a pool entry. + operationId: delete_pool + parameters: + - name: pool_name + in: path required: true schema: type: string @@ -3403,6 +3712,200 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}: + get: + tags: + - XCom + summary: Get Xcom Entry + description: Get an XCom entry. + operationId: get_xcom_entry + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: xcom_key + in: path + required: true + schema: + type: string + title: Xcom Key + - name: map_index + in: query + required: false + schema: + type: integer + minimum: -1 + default: -1 + title: Map Index + - name: deserialize + in: query + required: false + schema: + type: boolean + default: false + title: Deserialize + - name: stringify + in: query + required: false + schema: + type: boolean + default: true + title: Stringify + responses: + '200': + description: Successful Response + content: + application/json: + schema: + anyOf: + - $ref: '#/components/schemas/XComResponseNative' + - $ref: '#/components/schemas/XComResponseString' + title: Response Get Xcom Entry + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries: + get: + tags: + - XCom + summary: Get Xcom Entries + description: 'Get all XCom entries. + + + This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to + retrieve XCom entries for all DAGs.' + operationId: get_xcom_entries + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: xcom_key + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Xcom Key + - name: map_index + in: query + required: false + schema: + anyOf: + - type: integer + minimum: -1 + - type: 'null' + title: Map Index + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 100 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/XComCollection' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}: get: tags: @@ -3460,6 +3963,91 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - Task Instance + summary: Patch Task Instance + description: Update the state of a task instance. + operationId: patch_task_instance + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: query + required: false + schema: + type: integer + default: -1 + title: Map Index + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PatchTaskInstanceBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/listMapped: get: tags: @@ -3594,48 +4182,175 @@ paths: in: query required: false schema: - type: array - items: - type: string - title: Queue - - name: executor - in: query - required: false + type: array + items: + type: string + title: Queue + - name: executor + in: query + required: false + schema: + type: array + items: + type: string + title: Executor + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 100 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: string + default: map_index + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dependencies: + get: + tags: + - Task Instance + summary: Get Task Instance Dependencies + description: Get dependencies blocking task from getting scheduled. + operationId: get_task_instance_dependencies + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: path + required: true + schema: + type: integer + title: Map Index + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskDependencyCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dependencies: + get: + tags: + - Task Instance + summary: Get Task Instance Dependencies + description: Get dependencies blocking task from getting scheduled. + operationId: get_task_instance_dependencies + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true schema: - type: array - items: - type: string - title: Executor - - name: limit - in: query - required: false + type: string + title: Dag Run Id + - name: task_id + in: path + required: true schema: - type: integer - minimum: 0 - default: 100 - title: Limit - - name: offset + type: string + title: Task Id + - name: map_index in: query required: false schema: type: integer - minimum: 0 - default: 0 - title: Offset - - name: order_by - in: query - required: false - schema: - type: string - default: map_index - title: Order By + default: -1 + title: Map Index responses: '200': description: Successful Response content: application/json: schema: - $ref: '#/components/schemas/TaskInstanceCollectionResponse' + $ref: '#/components/schemas/TaskDependencyCollectionResponse' '401': content: application/json: @@ -3660,13 +4375,13 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dependencies: + /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries: get: tags: - Task Instance - summary: Get Task Instance Dependencies - description: Get dependencies blocking task from getting scheduled. - operationId: get_task_instance_dependencies + summary: Get Task Instance Tries + description: Get list of task instances history. + operationId: get_task_instance_tries parameters: - name: dag_id in: path @@ -3687,10 +4402,11 @@ paths: type: string title: Task Id - name: map_index - in: path - required: true + in: query + required: false schema: type: integer + default: -1 title: Map Index responses: '200': @@ -3698,7 +4414,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/TaskDependencyCollectionResponse' + $ref: '#/components/schemas/TaskInstanceHistoryCollectionResponse' '401': content: application/json: @@ -3723,13 +4439,12 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dependencies: + /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries: get: tags: - Task Instance - summary: Get Task Instance Dependencies - description: Get dependencies blocking task from getting scheduled. - operationId: get_task_instance_dependencies + summary: Get Mapped Task Instance Tries + operationId: get_mapped_task_instance_tries parameters: - name: dag_id in: path @@ -3750,11 +4465,10 @@ paths: type: string title: Task Id - name: map_index - in: query - required: false + in: path + required: true schema: type: integer - default: -1 title: Map Index responses: '200': @@ -3762,7 +4476,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/TaskDependencyCollectionResponse' + $ref: '#/components/schemas/TaskInstanceHistoryCollectionResponse' '401': content: application/json: @@ -3850,6 +4564,90 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - Task Instance + summary: Patch Task Instance + description: Update the state of a task instance. + operationId: patch_task_instance + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: path + required: true + schema: + type: integer + title: Map Index + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PatchTaskInstanceBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances: get: tags: @@ -4579,147 +5377,28 @@ paths: minimum: 0 default: 100 title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: order_by - in: query - required: false - schema: - type: string - default: id - title: Order By - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/VariableCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - post: - tags: - - Variable - summary: Post Variable - description: Create a variable. - operationId: post_variable - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/VariableBody' - responses: - '201': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/VariableResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}: - get: - tags: - - XCom - summary: Get Xcom Entry - description: Get an XCom entry. - operationId: get_xcom_entry - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: xcom_key - in: path - required: true - schema: - type: string - title: Xcom Key - - name: map_index + - name: offset in: query required: false schema: type: integer - minimum: -1 - default: -1 - title: Map Index - - name: deserialize - in: query - required: false - schema: - type: boolean - default: false - title: Deserialize - - name: stringify + minimum: 0 + default: 0 + title: Offset + - name: order_by in: query required: false schema: - type: boolean - default: true - title: Stringify + type: string + default: id + title: Order By responses: '200': description: Successful Response content: application/json: schema: - anyOf: - - $ref: '#/components/schemas/XComResponseNative' - - $ref: '#/components/schemas/XComResponseString' - title: Response Get Xcom Entry + $ref: '#/components/schemas/VariableCollectionResponse' '401': content: application/json: @@ -4732,18 +5411,43 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Forbidden - '400': + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + post: + tags: + - Variable + summary: Post Variable + description: Create a variable. + operationId: post_variable + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/VariableBody' + responses: + '201': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VariableResponse' + '401': content: application/json: schema: $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': + description: Unauthorized + '403': content: application/json: schema: $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found + description: Forbidden '422': description: Validation Error content: @@ -5037,9 +5741,15 @@ components: id: type: integer title: Id + name: + type: string + title: Name uri: type: string title: Uri + group: + type: string + title: Group extra: anyOf: - type: object @@ -5072,7 +5782,9 @@ components: type: object required: - id + - name - uri + - group - created_at - updated_at - consuming_dags @@ -5983,11 +6695,11 @@ components: description: Enum for DAG Run states when updating a DAG Run. DAGRunResponse: properties: - run_id: + dag_run_id: anyOf: - type: string - type: 'null' - title: Run Id + title: Dag Run Id dag_id: type: string title: Dag Id @@ -6053,7 +6765,7 @@ components: additionalProperties: false type: object required: - - run_id + - dag_run_id - dag_id - logical_date - queued_at @@ -6116,6 +6828,78 @@ components: - asset_triggered title: DAGRunTypes description: DAG Run Types for responses. + DAGRunsBatchBody: + properties: + order_by: + anyOf: + - type: string + - type: 'null' + title: Order By + page_offset: + type: integer + minimum: 0.0 + title: Page Offset + default: 0 + page_limit: + type: integer + minimum: 0.0 + title: Page Limit + default: 100 + dag_ids: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Dag Ids + states: + anyOf: + - items: + anyOf: + - $ref: '#/components/schemas/DagRunState' + - type: 'null' + type: array + - type: 'null' + title: States + logical_date_gte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date Gte + logical_date_lte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date Lte + start_date_gte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date Gte + start_date_lte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date Lte + end_date_gte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date Gte + end_date_lte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date Lte + type: object + title: DAGRunsBatchBody + description: List DAG Runs body for batch endpoint. DAGSourceResponse: properties: content: @@ -6637,6 +7421,14 @@ components: - extra title: EventLogResponse description: Event Log Response. + ExtraLinksResponse: + additionalProperties: + anyOf: + - type: string + - type: 'null' + type: object + title: ExtraLinksResponse + description: Extra Links Response. FastAPIAppResponse: properties: app: @@ -6753,6 +7545,22 @@ components: - stack_trace title: ImportErrorResponse description: Import Error Response. + JobCollectionResponse: + properties: + jobs: + items: + $ref: '#/components/schemas/JobResponse' + type: array + title: Jobs + total_entries: + type: integer + title: Total Entries + type: object + required: + - jobs + - total_entries + title: JobCollectionResponse + description: Job Collection Response. JobResponse: properties: id: @@ -6822,6 +7630,42 @@ components: - unixname title: JobResponse description: Job serializer for responses. + PatchTaskInstanceBody: + properties: + dry_run: + type: boolean + title: Dry Run + default: true + new_state: + anyOf: + - type: string + - type: 'null' + title: New State + note: + anyOf: + - type: string + maxLength: 1000 + - type: 'null' + title: Note + include_upstream: + type: boolean + title: Include Upstream + default: false + include_downstream: + type: boolean + title: Include Downstream + default: false + include_future: + type: boolean + title: Include Future + default: false + include_past: + type: boolean + title: Include Past + default: false + type: object + title: PatchTaskInstanceBody + description: Request body for Clear Task Instances endpoint. PluginCollectionResponse: properties: plugins: @@ -7209,6 +8053,22 @@ components: - total_entries title: TaskInstanceCollectionResponse description: Task Instance Collection serializer for responses. + TaskInstanceHistoryCollectionResponse: + properties: + task_instances: + items: + $ref: '#/components/schemas/TaskInstanceHistoryResponse' + type: array + title: Task Instances + total_entries: + type: integer + title: Total Entries + type: object + required: + - task_instances + - total_entries + title: TaskInstanceHistoryCollectionResponse + description: TaskInstanceHistory Collection serializer for responses. TaskInstanceHistoryResponse: properties: task_id: @@ -7951,6 +8811,36 @@ components: - microseconds title: TimeDelta description: TimeDelta can be used to interact with datetime.timedelta objects. + TriggerDAGRunPostBody: + properties: + dag_run_id: + anyOf: + - type: string + - type: 'null' + title: Dag Run Id + data_interval_start: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Data Interval Start + data_interval_end: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Data Interval End + conf: + type: object + title: Conf + note: + anyOf: + - type: string + - type: 'null' + title: Note + type: object + title: TriggerDAGRunPostBody + description: Trigger DAG Run Serializer for POST body. TriggerResponse: properties: id: @@ -8100,6 +8990,54 @@ components: - git_version title: VersionInfo description: Version information serializer for responses. + XComCollection: + properties: + xcom_entries: + items: + $ref: '#/components/schemas/XComResponse' + type: array + title: Xcom Entries + total_entries: + type: integer + title: Total Entries + type: object + required: + - xcom_entries + - total_entries + title: XComCollection + description: List of XCom items. + XComResponse: + properties: + key: + type: string + title: Key + timestamp: + type: string + format: date-time + title: Timestamp + logical_date: + type: string + format: date-time + title: Logical Date + map_index: + type: integer + title: Map Index + task_id: + type: string + title: Task Id + dag_id: + type: string + title: Dag Id + type: object + required: + - key + - timestamp + - logical_date + - map_index + - task_id + - dag_id + title: XComResponse + description: Serializer for a xcom item. XComResponseNative: properties: key: diff --git a/airflow/api_fastapi/core_api/routes/public/__init__.py b/airflow/api_fastapi/core_api/routes/public/__init__.py index 2b194fafae595..3e05eb876802c 100644 --- a/airflow/api_fastapi/core_api/routes/public/__init__.py +++ b/airflow/api_fastapi/core_api/routes/public/__init__.py @@ -31,7 +31,9 @@ from airflow.api_fastapi.core_api.routes.public.dag_warning import dag_warning_router from airflow.api_fastapi.core_api.routes.public.dags import dags_router from airflow.api_fastapi.core_api.routes.public.event_logs import event_logs_router +from airflow.api_fastapi.core_api.routes.public.extra_links import extra_links_router from airflow.api_fastapi.core_api.routes.public.import_error import import_error_router +from airflow.api_fastapi.core_api.routes.public.job import job_router from airflow.api_fastapi.core_api.routes.public.log import task_instances_log_router from airflow.api_fastapi.core_api.routes.public.monitor import monitor_router from airflow.api_fastapi.core_api.routes.public.plugins import plugins_router @@ -60,16 +62,19 @@ authenticated_router.include_router(dag_warning_router) authenticated_router.include_router(dags_router) authenticated_router.include_router(event_logs_router) +authenticated_router.include_router(extra_links_router) authenticated_router.include_router(import_error_router) +authenticated_router.include_router(job_router) authenticated_router.include_router(plugins_router) authenticated_router.include_router(pools_router) authenticated_router.include_router(providers_router) +authenticated_router.include_router(xcom_router) authenticated_router.include_router(task_instances_router) authenticated_router.include_router(tasks_router) authenticated_router.include_router(variables_router) -authenticated_router.include_router(xcom_router) authenticated_router.include_router(task_instances_log_router) + # Include authenticated router in public router public_router.include_router(authenticated_router) diff --git a/airflow/api_fastapi/core_api/routes/public/assets.py b/airflow/api_fastapi/core_api/routes/public/assets.py index 5aa37c7a6f9f8..b7cc9140e973b 100644 --- a/airflow/api_fastapi/core_api/routes/public/assets.py +++ b/airflow/api_fastapi/core_api/routes/public/assets.py @@ -95,7 +95,7 @@ def get_assets( ) -> AssetCollectionResponse: """Get assets.""" assets_select, total_entries = paginated_select( - select=select(AssetModel), + statement=select(AssetModel), filters=[uri_pattern, dag_ids], order_by=order_by, offset=offset, @@ -145,7 +145,7 @@ def get_asset_events( ) -> AssetEventCollectionResponse: """Get asset events.""" assets_event_select, total_entries = paginated_select( - select=select(AssetEvent), + statement=select(AssetEvent), filters=[asset_id, source_dag_id, source_task_id, source_run_id, source_map_index], order_by=order_by, offset=offset, @@ -210,7 +210,7 @@ def get_asset_queued_events( .where(*where_clause) ) - dag_asset_queued_events_select, total_entries = paginated_select(select=query) + dag_asset_queued_events_select, total_entries = paginated_select(statement=query) adrqs = session.execute(dag_asset_queued_events_select).all() if not adrqs: @@ -269,7 +269,7 @@ def get_dag_asset_queued_events( .where(*where_clause) ) - dag_asset_queued_events_select, total_entries = paginated_select(select=query) + dag_asset_queued_events_select, total_entries = paginated_select(statement=query) adrqs = session.execute(dag_asset_queued_events_select).all() if not adrqs: raise HTTPException(status.HTTP_404_NOT_FOUND, f"Queue event with dag_id: `{dag_id}` was not found") diff --git a/airflow/api_fastapi/core_api/routes/public/backfills.py b/airflow/api_fastapi/core_api/routes/public/backfills.py index aa6f540d32791..94d0fd1ed48b8 100644 --- a/airflow/api_fastapi/core_api/routes/public/backfills.py +++ b/airflow/api_fastapi/core_api/routes/public/backfills.py @@ -20,9 +20,10 @@ from fastapi import Depends, HTTPException, status from sqlalchemy import select, update +from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import Session -from airflow.api_fastapi.common.db.common import get_session, paginated_select +from airflow.api_fastapi.common.db.common import get_async_session, get_session, paginated_select_async from airflow.api_fastapi.common.parameters import QueryLimit, QueryOffset, SortParam from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.core_api.datamodels.backfills import ( @@ -49,7 +50,7 @@ @backfills_router.get( path="", ) -def list_backfills( +async def list_backfills( dag_id: str, limit: QueryLimit, offset: QueryOffset, @@ -57,18 +58,16 @@ def list_backfills( SortParam, Depends(SortParam(["id"], Backfill).dynamic_depends()), ], - session: Annotated[Session, Depends(get_session)], + session: Annotated[AsyncSession, Depends(get_async_session)], ) -> BackfillCollectionResponse: - select_stmt, total_entries = paginated_select( - select=select(Backfill).where(Backfill.dag_id == dag_id), + select_stmt, total_entries = await paginated_select_async( + statement=select(Backfill).where(Backfill.dag_id == dag_id), order_by=order_by, offset=offset, limit=limit, session=session, ) - - backfills = session.scalars(select_stmt) - + backfills = await session.scalars(select_stmt) return BackfillCollectionResponse( backfills=backfills, total_entries=total_entries, diff --git a/airflow/api_fastapi/core_api/routes/public/connections.py b/airflow/api_fastapi/core_api/routes/public/connections.py index 46ebcfcf98ca1..9eb14bb9c401f 100644 --- a/airflow/api_fastapi/core_api/routes/public/connections.py +++ b/airflow/api_fastapi/core_api/routes/public/connections.py @@ -92,7 +92,9 @@ def get_connections( SortParam, Depends( SortParam( - ["connection_id", "conn_type", "description", "host", "port", "id"], Connection + ["conn_id", "conn_type", "description", "host", "port", "id"], + Connection, + {"connection_id": "conn_id"}, ).dynamic_depends() ), ], @@ -100,7 +102,7 @@ def get_connections( ) -> ConnectionCollectionResponse: """Get all connection entries.""" connection_select, total_entries = paginated_select( - select=select(Connection), + statement=select(Connection), order_by=order_by, offset=offset, limit=limit, diff --git a/airflow/api_fastapi/core_api/routes/public/dag_run.py b/airflow/api_fastapi/core_api/routes/public/dag_run.py index c26650767c98a..fab34d80bde1e 100644 --- a/airflow/api_fastapi/core_api/routes/public/dag_run.py +++ b/airflow/api_fastapi/core_api/routes/public/dag_run.py @@ -17,8 +17,9 @@ from __future__ import annotations -from typing import Annotated, cast +from typing import Annotated, Literal, cast +import pendulum from fastapi import Depends, HTTPException, Query, Request, status from sqlalchemy import select from sqlalchemy.orm import Session @@ -30,9 +31,13 @@ ) from airflow.api_fastapi.common.db.common import get_session, paginated_select from airflow.api_fastapi.common.parameters import ( + DagIdsFilter, + LimitFilter, + OffsetFilter, QueryDagRunStateFilter, QueryLimit, QueryOffset, + Range, RangeFilter, SortParam, datetime_range_filter_factory, @@ -45,13 +50,20 @@ DAGRunPatchBody, DAGRunPatchStates, DAGRunResponse, + DAGRunsBatchBody, + TriggerDAGRunPostBody, ) from airflow.api_fastapi.core_api.datamodels.task_instances import ( TaskInstanceCollectionResponse, TaskInstanceResponse, ) from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc -from airflow.models import DAG, DagRun +from airflow.exceptions import ParamValidationError +from airflow.models import DAG, DagModel, DagRun +from airflow.models.dag_version import DagVersion +from airflow.timetables.base import DataInterval +from airflow.utils.state import DagRunState +from airflow.utils.types import DagRunTriggeredByType, DagRunType dag_run_router = AirflowRouter(tags=["DagRun"], prefix="/dags/{dag_id}/dagRuns") @@ -254,8 +266,8 @@ def get_dag_runs( "id", "state", "dag_id", + "run_id", "logical_date", - "dag_run_id", "start_date", "end_date", "updated_at", @@ -263,6 +275,7 @@ def get_dag_runs( "conf", ], DagRun, + {"dag_run_id": "run_id"}, ).dynamic_depends(default="id") ), ], @@ -274,17 +287,17 @@ def get_dag_runs( This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. """ - base_query = select(DagRun) + query = select(DagRun) if dag_id != "~": dag: DAG = request.app.state.dag_bag.get_dag(dag_id) if not dag: raise HTTPException(status.HTTP_404_NOT_FOUND, f"The DAG with dag_id: `{dag_id}` was not found") - base_query = base_query.filter(DagRun.dag_id == dag_id) + query = query.filter(DagRun.dag_id == dag_id) dag_run_select, total_entries = paginated_select( - select=base_query, + statement=query, filters=[logical_date, start_date_range, end_date_range, update_at_range, state], order_by=order_by, offset=offset, @@ -296,3 +309,123 @@ def get_dag_runs( dag_runs=dag_runs, total_entries=total_entries, ) + + +@dag_run_router.post( + "", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_400_BAD_REQUEST, + status.HTTP_404_NOT_FOUND, + status.HTTP_409_CONFLICT, + ] + ), +) +def trigger_dag_run( + dag_id, body: TriggerDAGRunPostBody, request: Request, session: Annotated[Session, Depends(get_session)] +) -> DAGRunResponse: + """Trigger a DAG.""" + dm = session.scalar(select(DagModel).where(DagModel.is_active, DagModel.dag_id == dag_id).limit(1)) + if not dm: + raise HTTPException(status.HTTP_404_NOT_FOUND, f"DAG with dag_id: '{dag_id}' not found") + + if dm.has_import_errors: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + f"DAG with dag_id: '{dag_id}' has import errors and cannot be triggered", + ) + + run_id = body.dag_run_id + logical_date = pendulum.instance(body.logical_date) + + try: + dag: DAG = request.app.state.dag_bag.get_dag(dag_id) + + if body.data_interval_start and body.data_interval_end: + data_interval = DataInterval( + start=pendulum.instance(body.data_interval_start), + end=pendulum.instance(body.data_interval_end), + ) + else: + data_interval = dag.timetable.infer_manual_data_interval(run_after=logical_date) + dag_version = DagVersion.get_latest_version(dag.dag_id) + dag_run = dag.create_dagrun( + run_type=DagRunType.MANUAL, + run_id=run_id, + logical_date=logical_date, + data_interval=data_interval, + state=DagRunState.QUEUED, + conf=body.conf, + external_trigger=True, + dag_version=dag_version, + session=session, + triggered_by=DagRunTriggeredByType.REST_API, + ) + dag_run_note = body.note + if dag_run_note: + current_user_id = None # refer to https://github.com/apache/airflow/issues/43534 + dag_run.note = (dag_run_note, current_user_id) + return dag_run + except ValueError as e: + raise HTTPException(status.HTTP_400_BAD_REQUEST, str(e)) + except ParamValidationError as e: + raise HTTPException(status.HTTP_400_BAD_REQUEST, str(e)) + + +@dag_run_router.post("/list", responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND])) +def get_list_dag_runs_batch( + dag_id: Literal["~"], body: DAGRunsBatchBody, session: Annotated[Session, Depends(get_session)] +) -> DAGRunCollectionResponse: + """Get a list of DAG Runs.""" + dag_ids = DagIdsFilter(DagRun, body.dag_ids) + logical_date = RangeFilter( + Range(lower_bound=body.logical_date_gte, upper_bound=body.logical_date_lte), + attribute=DagRun.logical_date, + ) + start_date = RangeFilter( + Range(lower_bound=body.start_date_gte, upper_bound=body.start_date_lte), + attribute=DagRun.start_date, + ) + end_date = RangeFilter( + Range(lower_bound=body.end_date_gte, upper_bound=body.end_date_lte), + attribute=DagRun.end_date, + ) + + state = QueryDagRunStateFilter(body.states) + + offset = OffsetFilter(body.page_offset) + limit = LimitFilter(body.page_limit) + + order_by = SortParam( + [ + "id", + "state", + "dag_id", + "logical_date", + "run_id", + "start_date", + "end_date", + "updated_at", + "external_trigger", + "conf", + ], + DagRun, + {"dag_run_id": "run_id"}, + ).set_value(body.order_by) + + base_query = select(DagRun) + dag_runs_select, total_entries = paginated_select( + statement=base_query, + filters=[dag_ids, logical_date, start_date, end_date, state], + order_by=order_by, + offset=offset, + limit=limit, + session=session, + ) + + dag_runs = session.scalars(dag_runs_select) + + return DAGRunCollectionResponse( + dag_runs=dag_runs, + total_entries=total_entries, + ) diff --git a/airflow/api_fastapi/core_api/routes/public/dag_stats.py b/airflow/api_fastapi/core_api/routes/public/dag_stats.py index 119961f8c5f36..89a22f7face6e 100644 --- a/airflow/api_fastapi/core_api/routes/public/dag_stats.py +++ b/airflow/api_fastapi/core_api/routes/public/dag_stats.py @@ -55,7 +55,7 @@ def get_dag_stats( ) -> DagStatsCollectionResponse: """Get Dag statistics.""" dagruns_select, _ = paginated_select( - select=dagruns_select_with_state_count, + statement=dagruns_select_with_state_count, filters=[dag_ids], session=session, return_total_entries=False, diff --git a/airflow/api_fastapi/core_api/routes/public/dag_warning.py b/airflow/api_fastapi/core_api/routes/public/dag_warning.py index e933710bc6903..df1e636faa50f 100644 --- a/airflow/api_fastapi/core_api/routes/public/dag_warning.py +++ b/airflow/api_fastapi/core_api/routes/public/dag_warning.py @@ -59,7 +59,7 @@ def list_dag_warnings( ) -> DAGWarningCollectionResponse: """Get a list of DAG warnings.""" dag_warnings_select, total_entries = paginated_select( - select=select(DagWarning), + statement=select(DagWarning), filters=[warning_type, dag_id], order_by=order_by, offset=offset, diff --git a/airflow/api_fastapi/core_api/routes/public/dags.py b/airflow/api_fastapi/core_api/routes/public/dags.py index 99a86508edad4..4cc2000a2e357 100644 --- a/airflow/api_fastapi/core_api/routes/public/dags.py +++ b/airflow/api_fastapi/core_api/routes/public/dags.py @@ -54,6 +54,7 @@ from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.exceptions import AirflowException, DagNotFound from airflow.models import DAG, DagModel, DagTag +from airflow.models.dagrun import DagRun dags_router = AirflowRouter(tags=["DAG"], prefix="/dags") @@ -73,8 +74,9 @@ def get_dags( SortParam, Depends( SortParam( - ["dag_id", "dag_display_name", "next_dagrun", "last_run_state", "last_run_start_date"], + ["dag_id", "dag_display_name", "next_dagrun", "state", "start_date"], DagModel, + {"last_run_state": DagRun.state, "last_run_start_date": DagRun.start_date}, ).dynamic_depends() ), ], @@ -82,7 +84,7 @@ def get_dags( ) -> DAGCollectionResponse: """Get all DAGs.""" dags_select, total_entries = paginated_select( - select=dags_select_with_latest_dag_run, + statement=dags_select_with_latest_dag_run, filters=[ only_active, paused, @@ -125,9 +127,9 @@ def get_dag_tags( session: Annotated[Session, Depends(get_session)], ) -> DAGTagCollectionResponse: """Get all DAG tags.""" - base_select = select(DagTag.name).group_by(DagTag.name) + query = select(DagTag.name).group_by(DagTag.name) dag_tags_select, total_entries = paginated_select( - select=base_select, + statement=query, filters=[tag_name_pattern], order_by=order_by, offset=offset, @@ -263,7 +265,7 @@ def patch_dags( update_mask = ["is_paused"] dags_select, total_entries = paginated_select( - select=dags_select_with_latest_dag_run, + statement=dags_select_with_latest_dag_run, filters=[only_active, paused, dag_id_pattern, tags, owners, last_dag_run_state], order_by=None, offset=offset, diff --git a/airflow/api_fastapi/core_api/routes/public/event_logs.py b/airflow/api_fastapi/core_api/routes/public/event_logs.py index 51feb7e22cfb2..aa1504a51f391 100644 --- a/airflow/api_fastapi/core_api/routes/public/event_logs.py +++ b/airflow/api_fastapi/core_api/routes/public/event_logs.py @@ -97,32 +97,32 @@ def get_event_logs( after: datetime | None = None, ) -> EventLogCollectionResponse: """Get all Event Logs.""" - base_select = select(Log).group_by(Log.id) + query = select(Log).group_by(Log.id) # TODO: Refactor using the `FilterParam` class in commit `574b72e41cc5ed175a2bbf4356522589b836bb11` if dag_id is not None: - base_select = base_select.where(Log.dag_id == dag_id) + query = query.where(Log.dag_id == dag_id) if task_id is not None: - base_select = base_select.where(Log.task_id == task_id) + query = query.where(Log.task_id == task_id) if run_id is not None: - base_select = base_select.where(Log.run_id == run_id) + query = query.where(Log.run_id == run_id) if map_index is not None: - base_select = base_select.where(Log.map_index == map_index) + query = query.where(Log.map_index == map_index) if try_number is not None: - base_select = base_select.where(Log.try_number == try_number) + query = query.where(Log.try_number == try_number) if owner is not None: - base_select = base_select.where(Log.owner == owner) + query = query.where(Log.owner == owner) if event is not None: - base_select = base_select.where(Log.event == event) + query = query.where(Log.event == event) if excluded_events is not None: - base_select = base_select.where(Log.event.notin_(excluded_events)) + query = query.where(Log.event.notin_(excluded_events)) if included_events is not None: - base_select = base_select.where(Log.event.in_(included_events)) + query = query.where(Log.event.in_(included_events)) if before is not None: - base_select = base_select.where(Log.dttm < before) + query = query.where(Log.dttm < before) if after is not None: - base_select = base_select.where(Log.dttm > after) + query = query.where(Log.dttm > after) event_logs_select, total_entries = paginated_select( - select=base_select, + statement=query, order_by=order_by, offset=offset, limit=limit, diff --git a/airflow/api_fastapi/core_api/routes/public/extra_links.py b/airflow/api_fastapi/core_api/routes/public/extra_links.py new file mode 100644 index 0000000000000..756031917c5bb --- /dev/null +++ b/airflow/api_fastapi/core_api/routes/public/extra_links.py @@ -0,0 +1,85 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated + +from fastapi import Depends, HTTPException, Request, status +from sqlalchemy.orm import Session +from sqlalchemy.sql import select + +from airflow.api_fastapi.common.db.common import get_session +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.datamodels.extra_links import ExtraLinksResponse +from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc +from airflow.exceptions import TaskNotFound + +if TYPE_CHECKING: + from sqlalchemy.orm.session import Session + + from airflow.models import DAG + + +extra_links_router = AirflowRouter( + tags=["Extra Links"], prefix="/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/links" +) + + +@extra_links_router.get( + "", + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), + tags=["Task Instance"], +) +def get_extra_links( + dag_id: str, + dag_run_id: str, + task_id: str, + session: Annotated[Session, Depends(get_session)], + request: Request, +) -> ExtraLinksResponse: + """Get extra links for task instance.""" + from airflow.models.taskinstance import TaskInstance + + dag: DAG = request.app.state.dag_bag.get_dag(dag_id) + if not dag: + raise HTTPException(status.HTTP_404_NOT_FOUND, f"DAG with ID = {dag_id} not found") + + try: + task = dag.get_task(task_id) + except TaskNotFound: + raise HTTPException(status.HTTP_404_NOT_FOUND, f"Task with ID = {task_id} not found") + + ti = session.scalar( + select(TaskInstance).where( + TaskInstance.dag_id == dag_id, + TaskInstance.run_id == dag_run_id, + TaskInstance.task_id == task_id, + ) + ) + + if not ti: + raise HTTPException( + status.HTTP_404_NOT_FOUND, + f"DAG Run with ID = {dag_run_id} not found", + ) + + all_extra_link_pairs = ( + (link_name, task.get_extra_links(ti, link_name)) for link_name in task.extra_links + ) + all_extra_links = {link_name: link_url or None for link_name, link_url in sorted(all_extra_link_pairs)} + return ExtraLinksResponse.model_validate(all_extra_links) diff --git a/airflow/api_fastapi/core_api/routes/public/import_error.py b/airflow/api_fastapi/core_api/routes/public/import_error.py index 233f94df3102d..26e0d0858bf85 100644 --- a/airflow/api_fastapi/core_api/routes/public/import_error.py +++ b/airflow/api_fastapi/core_api/routes/public/import_error.py @@ -73,12 +73,12 @@ def get_import_errors( SortParam( [ "id", - "import_error_id", "timestamp", "filename", "stacktrace", ], ParseImportError, + {"import_error_id": "id"}, ).dynamic_depends() ), ], @@ -86,7 +86,7 @@ def get_import_errors( ) -> ImportErrorCollectionResponse: """Get all import errors.""" import_errors_select, total_entries = paginated_select( - select=select(ParseImportError), + statement=select(ParseImportError), order_by=order_by, offset=offset, limit=limit, diff --git a/airflow/api_fastapi/core_api/routes/public/job.py b/airflow/api_fastapi/core_api/routes/public/job.py new file mode 100644 index 0000000000000..1f8808980cb89 --- /dev/null +++ b/airflow/api_fastapi/core_api/routes/public/job.py @@ -0,0 +1,128 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import Annotated + +from fastapi import Depends, status +from sqlalchemy import select +from sqlalchemy.orm import Session + +from airflow.api_fastapi.common.db.common import ( + get_session, + paginated_select, +) +from airflow.api_fastapi.common.parameters import ( + QueryJobExecutorClassFilter, + QueryJobHostnameFilter, + QueryJobStateFilter, + QueryJobTypeFilter, + QueryLimit, + QueryOffset, + RangeFilter, + SortParam, + datetime_range_filter_factory, +) +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.datamodels.job import ( + JobCollectionResponse, + JobResponse, +) +from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc +from airflow.jobs.job import Job +from airflow.utils.state import JobState + +job_router = AirflowRouter(tags=["Job"], prefix="/jobs") + + +@job_router.get( + "", + responses=create_openapi_http_exception_doc([status.HTTP_400_BAD_REQUEST]), +) +def get_jobs( + start_date_range: Annotated[ + RangeFilter, + Depends(datetime_range_filter_factory("start_date", Job)), + ], + end_date_range: Annotated[ + RangeFilter, + Depends(datetime_range_filter_factory("end_date", Job)), + ], + limit: QueryLimit, + offset: QueryOffset, + order_by: Annotated[ + SortParam, + Depends( + SortParam( + [ + "id", + "dag_id", + "state", + "job_type", + "start_date", + "end_date", + "latest_heartbeat", + "executor_class", + "hostname", + "unixname", + ], + Job, + ).dynamic_depends(default="id") + ), + ], + session: Annotated[Session, Depends(get_session)], + state: QueryJobStateFilter, + job_type: QueryJobTypeFilter, + hostname: QueryJobHostnameFilter, + executor_class: QueryJobExecutorClassFilter, + is_alive: bool | None = None, +) -> JobCollectionResponse: + """Get all jobs.""" + base_select = select(Job).where(Job.state == JobState.RUNNING).order_by(Job.latest_heartbeat.desc()) + # TODO: Refactor using the `FilterParam` class in commit `574b72e41cc5ed175a2bbf4356522589b836bb11` + + jobs_select, total_entries = paginated_select( + statement=base_select, + filters=[ + start_date_range, + end_date_range, + state, + job_type, + hostname, + executor_class, + ], + order_by=order_by, + limit=limit, + offset=offset, + session=session, + return_total_entries=True, + ) + jobs = session.scalars(jobs_select).all() + + if is_alive is not None: + jobs = [job for job in jobs if job.is_alive()] + + return JobCollectionResponse( + jobs=[ + JobResponse.model_validate( + job, + from_attributes=True, + ) + for job in jobs + ], + total_entries=total_entries, + ) diff --git a/airflow/api_fastapi/core_api/routes/public/pools.py b/airflow/api_fastapi/core_api/routes/public/pools.py index 6fe1cb3a312b3..d80bf75d9e8ab 100644 --- a/airflow/api_fastapi/core_api/routes/public/pools.py +++ b/airflow/api_fastapi/core_api/routes/public/pools.py @@ -96,7 +96,7 @@ def get_pools( ) -> PoolCollectionResponse: """Get all pools entries.""" pools_select, total_entries = paginated_select( - select=select(Pool), + statement=select(Pool), order_by=order_by, offset=offset, limit=limit, diff --git a/airflow/api_fastapi/core_api/routes/public/task_instances.py b/airflow/api_fastapi/core_api/routes/public/task_instances.py index 857b03ab00e6e..304634cefc68f 100644 --- a/airflow/api_fastapi/core_api/routes/public/task_instances.py +++ b/airflow/api_fastapi/core_api/routes/public/task_instances.py @@ -17,11 +17,13 @@ from __future__ import annotations -from typing import Annotated, Literal +from typing import Annotated, Literal, cast -from fastapi import Depends, HTTPException, Request, status +from fastapi import Depends, HTTPException, Query, Request, status +from sqlalchemy import or_, select +from sqlalchemy.exc import MultipleResultsFound from sqlalchemy.orm import Session, joinedload -from sqlalchemy.sql import select +from sqlalchemy.sql.selectable import Select from airflow.api_fastapi.common.db.common import get_session, paginated_select from airflow.api_fastapi.common.parameters import ( @@ -49,8 +51,10 @@ from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.core_api.datamodels.task_instances import ( ClearTaskInstancesBody, + PatchTaskInstanceBody, TaskDependencyCollectionResponse, TaskInstanceCollectionResponse, + TaskInstanceHistoryCollectionResponse, TaskInstanceHistoryResponse, TaskInstanceReferenceCollectionResponse, TaskInstanceReferenceResponse, @@ -133,13 +137,13 @@ def get_mapped_task_instances( session: Annotated[Session, Depends(get_session)], ) -> TaskInstanceCollectionResponse: """Get list of mapped task instances.""" - base_query = ( + query = ( select(TI) .where(TI.dag_id == dag_id, TI.run_id == dag_run_id, TI.task_id == task_id, TI.map_index >= 0) .join(TI.dag_run) ) # 0 can mean a mapped TI that expanded to an empty list, so it is not an automatic 404 - unfiltered_total_count = get_query_count(base_query, session=session) + unfiltered_total_count = get_query_count(query, session=session) if unfiltered_total_count == 0: dag = request.app.state.dag_bag.get_dag(dag_id) if not dag: @@ -155,7 +159,7 @@ def get_mapped_task_instances( raise HTTPException(status.HTTP_404_NOT_FOUND, error_message) task_instance_select, total_entries = paginated_select( - select=base_query, + statement=query, filters=[ logical_date_range, start_date_range, @@ -234,6 +238,66 @@ def get_task_instance_dependencies( return TaskDependencyCollectionResponse.model_validate({"dependencies": deps}) +@task_instances_router.get( + task_instances_prefix + "/{task_id}/tries", + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), +) +def get_task_instance_tries( + dag_id: str, + dag_run_id: str, + task_id: str, + session: Annotated[Session, Depends(get_session)], + map_index: int = -1, +) -> TaskInstanceHistoryCollectionResponse: + """Get list of task instances history.""" + + def _query(orm_object: Base) -> Select: + query = select(orm_object).where( + orm_object.dag_id == dag_id, + orm_object.run_id == dag_run_id, + orm_object.task_id == task_id, + orm_object.map_index == map_index, + ) + return query + + # Exclude TaskInstance with state UP_FOR_RETRY since they have been recorded in TaskInstanceHistory + tis = session.scalars( + _query(TI).where(or_(TI.state != TaskInstanceState.UP_FOR_RETRY, TI.state.is_(None))) + ).all() + task_instances = session.scalars(_query(TIH)).all() + tis + + if not task_instances: + raise HTTPException( + status.HTTP_404_NOT_FOUND, + f"The Task Instance with dag_id: `{dag_id}`, run_id: `{dag_run_id}`, task_id: `{task_id}` and map_index: `{map_index}` was not found", + ) + + return TaskInstanceHistoryCollectionResponse( + task_instances=cast(list[TaskInstanceHistoryResponse], task_instances), + total_entries=len(task_instances), + ) + + +@task_instances_router.get( + task_instances_prefix + "/{task_id}/{map_index}/tries", + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), +) +def get_mapped_task_instance_tries( + dag_id: str, + dag_run_id: str, + task_id: str, + session: Annotated[Session, Depends(get_session)], + map_index: int, +) -> TaskInstanceHistoryCollectionResponse: + return get_task_instance_tries( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + map_index=map_index, + session=session, + ) + + @task_instances_router.get( task_instances_prefix + "/{task_id}/{map_index}", responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), @@ -299,13 +363,13 @@ def get_task_instances( This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs and DAG runs. """ - base_query = select(TI).join(TI.dag_run) + query = select(TI).join(TI.dag_run) if dag_id != "~": dag = request.app.state.dag_bag.get_dag(dag_id) if not dag: raise HTTPException(status.HTTP_404_NOT_FOUND, f"DAG with dag_id: `{dag_id}` was not found") - base_query = base_query.where(TI.dag_id == dag_id) + query = query.where(TI.dag_id == dag_id) if dag_run_id != "~": dag_run = session.scalar(select(DagRun).filter_by(run_id=dag_run_id)) @@ -314,10 +378,10 @@ def get_task_instances( status.HTTP_404_NOT_FOUND, f"DagRun with run_id: `{dag_run_id}` was not found", ) - base_query = base_query.where(TI.run_id == dag_run_id) + query = query.where(TI.run_id == dag_run_id) task_instance_select, total_entries = paginated_select( - select=base_query, + statement=query, filters=[ logical_date, start_date_range, @@ -384,9 +448,9 @@ def get_task_instances_batch( TI, ).set_value(body.order_by) - base_query = select(TI).join(TI.dag_run) + query = select(TI).join(TI.dag_run) task_instance_select, total_entries = paginated_select( - select=base_query, + statement=query, filters=[ dag_ids, dag_run_ids, @@ -558,3 +622,88 @@ def post_clear_task_instances( ], total_entries=len(task_instances), ) + + +@task_instances_router.patch( + task_instances_prefix + "/{task_id}", + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND, status.HTTP_400_BAD_REQUEST]), +) +@task_instances_router.patch( + task_instances_prefix + "/{task_id}/{map_index}", + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND, status.HTTP_400_BAD_REQUEST]), +) +def patch_task_instance( + dag_id: str, + dag_run_id: str, + task_id: str, + request: Request, + body: PatchTaskInstanceBody, + session: Annotated[Session, Depends(get_session)], + map_index: int = -1, + update_mask: list[str] | None = Query(None), +) -> TaskInstanceResponse: + """Update the state of a task instance.""" + dag = request.app.state.dag_bag.get_dag(dag_id) + if not dag: + raise HTTPException(status.HTTP_404_NOT_FOUND, f"DAG {dag_id} not found") + + if not dag.has_task(task_id): + raise HTTPException(status.HTTP_404_NOT_FOUND, f"Task '{task_id}' not found in DAG '{dag_id}'") + + query = ( + select(TI) + .where(TI.dag_id == dag_id, TI.run_id == dag_run_id, TI.task_id == task_id) + .join(TI.dag_run) + .options(joinedload(TI.rendered_task_instance_fields)) + ) + if map_index == -1: + query = query.where(or_(TI.map_index == -1, TI.map_index is None)) + else: + query = query.where(TI.map_index == map_index) + + try: + ti = session.scalar(query) + except MultipleResultsFound: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + "Multiple task instances found. As the TI is mapped, add the map_index value to the URL", + ) + + err_msg_404 = f"Task Instance not found for dag_id={dag_id}, run_id={dag_run_id}, task_id={task_id}" + if ti is None: + raise HTTPException(status.HTTP_404_NOT_FOUND, err_msg_404) + + fields_to_update = body.model_fields_set + if update_mask: + fields_to_update = fields_to_update.intersection(update_mask) + + for field in fields_to_update: + if field == "new_state": + if not body.dry_run: + tis: list[TI] = dag.set_task_instance_state( + task_id=task_id, + run_id=dag_run_id, + map_indexes=[map_index], + state=body.new_state, + upstream=body.include_upstream, + downstream=body.include_downstream, + future=body.include_future, + past=body.include_past, + commit=True, + session=session, + ) + if not ti: + raise HTTPException(status.HTTP_404_NOT_FOUND, err_msg_404) + ti = tis[0] if isinstance(tis, list) else tis + elif field == "note": + if update_mask or body.note is not None: + # @TODO: replace None passed for user_id with actual user id when + # permissions and auth is in place. + if ti.task_instance_note is None: + ti.note = (body.note, None) + else: + ti.task_instance_note.content = body.note + ti.task_instance_note.user_id = None + session.commit() + + return TaskInstanceResponse.model_validate(ti, from_attributes=True) diff --git a/airflow/api_fastapi/core_api/routes/public/variables.py b/airflow/api_fastapi/core_api/routes/public/variables.py index a96aa51b5dd64..bd91e9403c152 100644 --- a/airflow/api_fastapi/core_api/routes/public/variables.py +++ b/airflow/api_fastapi/core_api/routes/public/variables.py @@ -90,7 +90,7 @@ def get_variables( ) -> VariableCollectionResponse: """Get all Variables entries.""" variable_select, total_entries = paginated_select( - select=select(Variable), + statement=select(Variable), order_by=order_by, offset=offset, limit=limit, diff --git a/airflow/api_fastapi/core_api/routes/public/xcom.py b/airflow/api_fastapi/core_api/routes/public/xcom.py index dff2933940c62..1d4b154fd87c1 100644 --- a/airflow/api_fastapi/core_api/routes/public/xcom.py +++ b/airflow/api_fastapi/core_api/routes/public/xcom.py @@ -17,15 +17,17 @@ from __future__ import annotations import copy +from typing import Annotated from fastapi import Depends, HTTPException, Query, status from sqlalchemy import and_, select from sqlalchemy.orm import Session -from typing_extensions import Annotated -from airflow.api_fastapi.common.db.common import get_session +from airflow.api_fastapi.common.db.common import get_session, paginated_select +from airflow.api_fastapi.common.parameters import QueryLimit, QueryOffset from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.core_api.datamodels.xcom import ( + XComCollection, XComResponseNative, XComResponseString, ) @@ -90,5 +92,53 @@ def get_xcom_entry( if stringify: return XComResponseString.model_validate(item) - return XComResponseNative.model_validate(item) + + +@xcom_router.get( + "", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_400_BAD_REQUEST, + status.HTTP_404_NOT_FOUND, + ] + ), +) +def get_xcom_entries( + dag_id: str, + dag_run_id: str, + task_id: str, + limit: QueryLimit, + offset: QueryOffset, + session: Annotated[Session, Depends(get_session)], + xcom_key: Annotated[str | None, Query()] = None, + map_index: Annotated[int | None, Query(ge=-1)] = None, +) -> XComCollection: + """ + Get all XCom entries. + + This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. + """ + query = select(XCom) + if dag_id != "~": + query = query.where(XCom.dag_id == dag_id) + query = query.join(DR, and_(XCom.dag_id == DR.dag_id, XCom.run_id == DR.run_id)) + + if task_id != "~": + query = query.where(XCom.task_id == task_id) + if dag_run_id != "~": + query = query.where(DR.run_id == dag_run_id) + if map_index is not None: + query = query.where(XCom.map_index == map_index) + if xcom_key is not None: + query = query.where(XCom.key == xcom_key) + + query, total_entries = paginated_select( + statement=query, + offset=offset, + limit=limit, + session=session, + ) + query = query.order_by(XCom.dag_id, XCom.task_id, XCom.run_id, XCom.map_index, XCom.key) + xcoms = session.scalars(query) + return XComCollection(xcom_entries=xcoms, total_entries=total_entries) diff --git a/airflow/api_fastapi/core_api/routes/ui/dags.py b/airflow/api_fastapi/core_api/routes/ui/dags.py index 017ef3c165701..002d11e488943 100644 --- a/airflow/api_fastapi/core_api/routes/ui/dags.py +++ b/airflow/api_fastapi/core_api/routes/ui/dags.py @@ -103,7 +103,7 @@ def recent_dag_runs( .order_by(recent_runs_subquery.c.logical_date.desc()) ) dags_with_recent_dag_runs_select_filter, _ = paginated_select( - select=dags_with_recent_dag_runs_select, + statement=dags_with_recent_dag_runs_select, filters=[ only_active, paused, diff --git a/airflow/api_fastapi/core_api/security.py b/airflow/api_fastapi/core_api/security.py index ede628e04aa70..30470e9b5da55 100644 --- a/airflow/api_fastapi/core_api/security.py +++ b/airflow/api_fastapi/core_api/security.py @@ -17,12 +17,11 @@ from __future__ import annotations from functools import cache -from typing import Any, Callable +from typing import Annotated, Any, Callable from fastapi import Depends, HTTPException from fastapi.security import OAuth2PasswordBearer from jwt import InvalidTokenError -from typing_extensions import Annotated from airflow.api_fastapi.app import get_auth_manager from airflow.auth.managers.base_auth_manager import ResourceMethod diff --git a/airflow/api_fastapi/execution_api/datamodels/taskinstance.py b/airflow/api_fastapi/execution_api/datamodels/taskinstance.py index a2be682cd60d9..ae05cc140c435 100644 --- a/airflow/api_fastapi/execution_api/datamodels/taskinstance.py +++ b/airflow/api_fastapi/execution_api/datamodels/taskinstance.py @@ -18,9 +18,10 @@ from __future__ import annotations import uuid -from typing import Annotated, Literal, Union +from datetime import timedelta +from typing import Annotated, Any, Literal, Union -from pydantic import Discriminator, Tag, WithJsonSchema +from pydantic import Discriminator, Field, Tag, WithJsonSchema from airflow.api_fastapi.common.types import UtcDateTime from airflow.api_fastapi.core_api.base import BaseModel @@ -60,6 +61,26 @@ class TITargetStatePayload(BaseModel): state: IntermediateTIState +class TIDeferredStatePayload(BaseModel): + """Schema for updating TaskInstance to a deferred state.""" + + state: Annotated[ + Literal[IntermediateTIState.DEFERRED], + # Specify a default in the schema, but not in code, so Pydantic marks it as required. + WithJsonSchema( + { + "type": "string", + "enum": [IntermediateTIState.DEFERRED], + "default": IntermediateTIState.DEFERRED, + } + ), + ] + classpath: str + trigger_kwargs: Annotated[dict[str, Any], Field(default_factory=dict)] + next_method: str + trigger_timeout: timedelta | None = None + + def ti_state_discriminator(v: dict[str, str] | BaseModel) -> str: """ Determine the discriminator key for TaskInstance state transitions. @@ -77,6 +98,8 @@ def ti_state_discriminator(v: dict[str, str] | BaseModel) -> str: return str(state) elif state in set(TerminalTIState): return "_terminal_" + elif state == TIState.DEFERRED: + return "deferred" return "_other_" @@ -87,6 +110,7 @@ def ti_state_discriminator(v: dict[str, str] | BaseModel) -> str: Annotated[TIEnterRunningPayload, Tag("running")], Annotated[TITerminalStatePayload, Tag("_terminal_")], Annotated[TITargetStatePayload, Tag("_other_")], + Annotated[TIDeferredStatePayload, Tag("deferred")], ], Discriminator(ti_state_discriminator), ] diff --git a/airflow/api_fastapi/execution_api/routes/task_instances.py b/airflow/api_fastapi/execution_api/routes/task_instances.py index 3adbd51ff2aae..0927e92a1f84b 100644 --- a/airflow/api_fastapi/execution_api/routes/task_instances.py +++ b/airflow/api_fastapi/execution_api/routes/task_instances.py @@ -30,12 +30,14 @@ from airflow.api_fastapi.common.db.common import get_session from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.execution_api.datamodels.taskinstance import ( + TIDeferredStatePayload, TIEnterRunningPayload, TIHeartbeatInfo, TIStateUpdate, TITerminalStatePayload, ) from airflow.models.taskinstance import TaskInstance as TI +from airflow.models.trigger import Trigger from airflow.utils import timezone from airflow.utils.state import State @@ -122,6 +124,29 @@ def ti_update_state( ) elif isinstance(ti_patch_payload, TITerminalStatePayload): query = TI.duration_expression_update(ti_patch_payload.end_date, query, session.bind) + elif isinstance(ti_patch_payload, TIDeferredStatePayload): + # Calculate timeout if it was passed + timeout = None + if ti_patch_payload.trigger_timeout is not None: + timeout = timezone.utcnow() + ti_patch_payload.trigger_timeout + + trigger_row = Trigger( + classpath=ti_patch_payload.classpath, + kwargs=ti_patch_payload.trigger_kwargs, + ) + session.add(trigger_row) + + # TODO: HANDLE execution timeout later as it requires a call to the DB + # either get it from the serialised DAG or get it from the API + + query = update(TI).where(TI.id == ti_id_str) + query = query.values( + state=State.DEFERRED, + trigger_id=trigger_row.id, + next_method=ti_patch_payload.next_method, + next_kwargs=ti_patch_payload.trigger_kwargs, + trigger_timeout=timeout, + ) # TODO: Replace this with FastAPI's Custom Exception handling: # https://fastapi.tiangolo.com/tutorial/handling-errors/#install-custom-exception-handlers diff --git a/airflow/auth/managers/base_auth_manager.py b/airflow/auth/managers/base_auth_manager.py index 028d4dadb1326..ea0c921c98c00 100644 --- a/airflow/auth/managers/base_auth_manager.py +++ b/airflow/auth/managers/base_auth_manager.py @@ -18,8 +18,9 @@ from __future__ import annotations from abc import abstractmethod +from collections.abc import Container, Sequence from functools import cached_property -from typing import TYPE_CHECKING, Any, Container, Generic, Literal, Sequence, TypeVar +from typing import TYPE_CHECKING, Any, Generic, Literal, TypeVar from flask_appbuilder.menu import MenuItem from sqlalchemy import select diff --git a/airflow/cli/cli_config.py b/airflow/cli/cli_config.py index 00dd68041cbd2..d03ebd312600e 100644 --- a/airflow/cli/cli_config.py +++ b/airflow/cli/cli_config.py @@ -24,7 +24,8 @@ import json import os import textwrap -from typing import Callable, Iterable, NamedTuple, Union +from collections.abc import Iterable +from typing import Callable, NamedTuple, Union import lazy_object_proxy @@ -966,13 +967,6 @@ def string_lower_type(val): help="The maximum number of triggers that a Triggerer will run at one time.", ) -# reserialize -ARG_CLEAR_ONLY = Arg( - ("--clear-only",), - action="store_true", - help="If passed, serialized DAGs will be cleared but not reserialized.", -) - ARG_DAG_LIST_COLUMNS = Arg( ("--columns",), type=string_list_type, @@ -980,6 +974,13 @@ def string_lower_type(val): default=("dag_id", "fileloc", "owners", "is_paused"), ) +ARG_ASSET_LIST_COLUMNS = Arg( + ("--columns",), + type=string_list_type, + help="List of columns to render. (default: ['name', 'uri', 'group', 'extra'])", + default=("name", "uri", "group", "extra"), +) + ALTERNATIVE_CONN_SPECS_ARGS = [ ARG_CONN_TYPE, ARG_CONN_DESCRIPTION, @@ -1015,6 +1016,14 @@ class GroupCommand(NamedTuple): CLICommand = Union[ActionCommand, GroupCommand] +ASSETS_COMMANDS = ( + ActionCommand( + name="list", + help="List assets", + func=lazy_load_command("airflow.cli.commands.asset_command.asset_list"), + args=(ARG_OUTPUT, ARG_VERBOSE, ARG_ASSET_LIST_COLUMNS), + ), +) BACKFILL_COMMANDS = ( ActionCommand( name="create", @@ -1242,15 +1251,14 @@ class GroupCommand(NamedTuple): ), ActionCommand( name="reserialize", - help="Reserialize all DAGs by parsing the DagBag files", + help="Reserialize DAGs by parsing the DagBag files", description=( - "Drop all serialized dags from the metadata DB. This will cause all DAGs to be reserialized " - "from the DagBag folder. This can be helpful if your serialized DAGs get out of sync with the " - "version of Airflow that you are running." + "Reserialize DAGs in the metadata DB. This can be " + "particularly useful if your serialized DAGs become out of sync with the Airflow " + "version you are using." ), func=lazy_load_command("airflow.cli.commands.dag_command.dag_reserialize"), args=( - ARG_CLEAR_ONLY, ARG_SUBDIR, ARG_VERBOSE, ), @@ -1863,6 +1871,11 @@ class GroupCommand(NamedTuple): help="Manage tasks", subcommands=TASKS_COMMANDS, ), + GroupCommand( + name="assets", + help="Manage assets", + subcommands=ASSETS_COMMANDS, + ), GroupCommand( name="pools", help="Manage pools", diff --git a/airflow/cli/cli_parser.py b/airflow/cli/cli_parser.py index 9380692687b3c..a48e6b438d390 100644 --- a/airflow/cli/cli_parser.py +++ b/airflow/cli/cli_parser.py @@ -29,8 +29,9 @@ import sys from argparse import Action from collections import Counter -from functools import lru_cache -from typing import TYPE_CHECKING, Iterable +from collections.abc import Iterable +from functools import cache +from typing import TYPE_CHECKING import lazy_object_proxy from rich_argparse import RawTextRichHelpFormatter, RichHelpFormatter @@ -138,7 +139,7 @@ def add_argument(self, action: Action) -> None: return super().add_argument(action) -@lru_cache(maxsize=None) +@cache def get_parser(dag_parser: bool = False) -> argparse.ArgumentParser: """Create and returns command line argument parser.""" parser = DefaultHelpParser(prog="airflow", formatter_class=AirflowHelpFormatter) diff --git a/airflow/cli/commands/asset_command.py b/airflow/cli/commands/asset_command.py new file mode 100644 index 0000000000000..a43fe409021ba --- /dev/null +++ b/airflow/cli/commands/asset_command.py @@ -0,0 +1,53 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import logging +import typing + +from sqlalchemy import select + +from airflow.api_fastapi.core_api.datamodels.assets import AssetResponse +from airflow.cli.simple_table import AirflowConsole +from airflow.models.asset import AssetModel +from airflow.utils import cli as cli_utils +from airflow.utils.session import NEW_SESSION, provide_session + +if typing.TYPE_CHECKING: + from typing import Any + + from sqlalchemy.orm import Session + +log = logging.getLogger(__name__) + + +@cli_utils.action_cli +@provide_session +def asset_list(args, *, session: Session = NEW_SESSION) -> None: + """Display assets in the command line.""" + assets = session.scalars(select(AssetModel).order_by(AssetModel.name)) + + def detail_mapper(asset: AssetModel) -> dict[str, Any]: + model = AssetResponse.model_validate(asset) + return model.model_dump(include=args.columns) + + AirflowConsole().print_as( + data=assets, + output=args.output, + mapper=detail_mapper, + ) diff --git a/airflow/cli/commands/cheat_sheet_command.py b/airflow/cli/commands/cheat_sheet_command.py index 9b8b78718bbc5..f8889d139e15d 100644 --- a/airflow/cli/commands/cheat_sheet_command.py +++ b/airflow/cli/commands/cheat_sheet_command.py @@ -16,7 +16,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Iterable +from collections.abc import Iterable +from typing import TYPE_CHECKING from airflow.cli.cli_parser import GroupCommand, airflow_commands from airflow.cli.simple_table import AirflowConsole, SimpleTable diff --git a/airflow/cli/commands/dag_command.py b/airflow/cli/commands/dag_command.py index 0b4d3192d6eda..669a075a6db2c 100644 --- a/airflow/cli/commands/dag_command.py +++ b/airflow/cli/commands/dag_command.py @@ -28,7 +28,7 @@ from typing import TYPE_CHECKING import re2 -from sqlalchemy import delete, select +from sqlalchemy import select from airflow.api.client import get_current_api_client from airflow.api_connexion.schemas.dag_schema import dag_schema @@ -537,8 +537,5 @@ def dag_test(args, dag: DAG | None = None, session: Session = NEW_SESSION) -> No @provide_session def dag_reserialize(args, session: Session = NEW_SESSION) -> None: """Serialize a DAG instance.""" - session.execute(delete(SerializedDagModel).execution_options(synchronize_session=False)) - - if not args.clear_only: - dagbag = DagBag(process_subdir(args.subdir)) - dagbag.sync_to_db(session=session) + dagbag = DagBag(process_subdir(args.subdir)) + dagbag.sync_to_db(session=session) diff --git a/airflow/cli/commands/standalone_command.py b/airflow/cli/commands/standalone_command.py index 0f8d45eb5f1b4..f248990a3ddc3 100644 --- a/airflow/cli/commands/standalone_command.py +++ b/airflow/cli/commands/standalone_command.py @@ -37,6 +37,8 @@ from airflow.utils.providers_configuration_loader import providers_configuration_loaded if TYPE_CHECKING: + from termcolor.termcolor import Color + from airflow.jobs.base_job_runner import BaseJobRunner @@ -141,14 +143,14 @@ def print_output(self, name: str, output): You can pass multiple lines to output if you wish; it will be split for you. """ - color = { + color: dict[str, Color] = { "fastapi-api": "magenta", "webserver": "green", "scheduler": "blue", "triggerer": "cyan", "standalone": "white", - }.get(name, "white") - colorised_name = colored(f"{name:10}", color) + } + colorised_name = colored(f"{name:10}", color.get(name, "white")) for line in output.splitlines(): print(f"{colorised_name} | {line.strip()}") diff --git a/airflow/cli/commands/task_command.py b/airflow/cli/commands/task_command.py index 2b5a6c18a8089..55c1662d35d24 100644 --- a/airflow/cli/commands/task_command.py +++ b/airflow/cli/commands/task_command.py @@ -26,8 +26,9 @@ import os import sys import textwrap +from collections.abc import Generator from contextlib import contextmanager, redirect_stderr, redirect_stdout, suppress -from typing import TYPE_CHECKING, Generator, Protocol, Union, cast +from typing import TYPE_CHECKING, Protocol, Union, cast import pendulum from pendulum.parsing.exceptions import ParserError diff --git a/airflow/cli/commands/triggerer_command.py b/airflow/cli/commands/triggerer_command.py index eba4328e7e702..7b935e160ca5e 100644 --- a/airflow/cli/commands/triggerer_command.py +++ b/airflow/cli/commands/triggerer_command.py @@ -18,10 +18,10 @@ from __future__ import annotations +from collections.abc import Generator from contextlib import contextmanager from functools import partial from multiprocessing import Process -from typing import Generator from airflow import settings from airflow.cli.commands.daemon_utils import run_command_with_daemon_option diff --git a/airflow/cli/simple_table.py b/airflow/cli/simple_table.py index 72750fbdc4415..b8e4c6a7a41a0 100644 --- a/airflow/cli/simple_table.py +++ b/airflow/cli/simple_table.py @@ -18,7 +18,8 @@ import inspect import json -from typing import TYPE_CHECKING, Any, Callable, Sequence +from collections.abc import Sequence +from typing import TYPE_CHECKING, Any, Callable from rich.box import ASCII_DOUBLE_HEAD from rich.console import Console diff --git a/airflow/cli/utils.py b/airflow/cli/utils.py index eb37b502d4346..d132deeb373a9 100644 --- a/airflow/cli/utils.py +++ b/airflow/cli/utils.py @@ -19,7 +19,8 @@ import io import sys -from typing import TYPE_CHECKING, Collection +from collections.abc import Collection +from typing import TYPE_CHECKING if TYPE_CHECKING: from io import IOBase diff --git a/airflow/configuration.py b/airflow/configuration.py index 521af6cbe320d..c2f8455125065 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -31,12 +31,14 @@ import sys import warnings from base64 import b64encode +from collections.abc import Generator, Iterable from configparser import ConfigParser, NoOptionError, NoSectionError from contextlib import contextmanager from copy import deepcopy from io import StringIO from json.decoder import JSONDecodeError -from typing import IO, TYPE_CHECKING, Any, Dict, Generator, Iterable, Pattern, Set, Tuple, Union +from re import Pattern +from typing import IO, TYPE_CHECKING, Any, Union from urllib.parse import urlsplit import re2 @@ -65,9 +67,9 @@ _SQLITE3_VERSION_PATTERN = re2.compile(r"(?P^\d+(?:\.\d+)*)\D?.*$") ConfigType = Union[str, int, float, bool] -ConfigOptionsDictType = Dict[str, ConfigType] -ConfigSectionSourcesType = Dict[str, Union[str, Tuple[str, str]]] -ConfigSourcesType = Dict[str, ConfigSectionSourcesType] +ConfigOptionsDictType = dict[str, ConfigType] +ConfigSectionSourcesType = dict[str, Union[str, tuple[str, str]]] +ConfigSourcesType = dict[str, ConfigSectionSourcesType] ENV_VAR_PREFIX = "AIRFLOW__" @@ -300,7 +302,7 @@ def get_default_pre_2_7_value(self, section: str, key: str, **kwargs) -> Any: # These configs can also be fetched from Secrets backend # following the "{section}__{name}__secret" pattern @functools.cached_property - def sensitive_config_values(self) -> Set[tuple[str, str]]: # noqa: UP006 + def sensitive_config_values(self) -> set[tuple[str, str]]: if self.configuration_description is None: return ( _get_empty_set_for_configuration() diff --git a/airflow/dag_processing/collection.py b/airflow/dag_processing/collection.py index 8d45aab17a304..716c920e12c16 100644 --- a/airflow/dag_processing/collection.py +++ b/airflow/dag_processing/collection.py @@ -44,7 +44,9 @@ ) from airflow.models.dag import DAG, DagModel, DagOwnerAttributes, DagTag from airflow.models.dagrun import DagRun +from airflow.models.trigger import Trigger from airflow.sdk.definitions.asset import Asset, AssetAlias +from airflow.triggers.base import BaseTrigger from airflow.utils.sqlalchemy import with_row_locks from airflow.utils.timezone import utcnow from airflow.utils.types import DagRunType @@ -425,3 +427,97 @@ def add_task_asset_references( for task_id, asset_id in referenced_outlets if (task_id, asset_id) not in orm_refs ) + + def add_asset_trigger_references( + self, assets: dict[tuple[str, str], AssetModel], *, session: Session + ) -> None: + # Update references from assets being used + refs_to_add: dict[tuple[str, str], set[str]] = {} + refs_to_remove: dict[tuple[str, str], set[str]] = {} + triggers: dict[str, BaseTrigger] = {} + for name_uri, asset in self.assets.items(): + asset_model = assets[name_uri] + trigger_repr_to_trigger_dict: dict[str, BaseTrigger] = { + repr(trigger): trigger for trigger in asset.watchers + } + triggers.update(trigger_repr_to_trigger_dict) + trigger_repr_from_asset: set[str] = set(trigger_repr_to_trigger_dict.keys()) + + trigger_repr_from_asset_model: set[str] = { + BaseTrigger.repr(trigger.classpath, trigger.kwargs) for trigger in asset_model.triggers + } + + # Optimization: no diff between the DB and DAG definitions, no update needed + if trigger_repr_from_asset == trigger_repr_from_asset_model: + continue + + diff_to_add = trigger_repr_from_asset - trigger_repr_from_asset_model + diff_to_remove = trigger_repr_from_asset_model - trigger_repr_from_asset + if diff_to_add: + refs_to_add[name_uri] = diff_to_add + if diff_to_remove: + refs_to_remove[name_uri] = diff_to_remove + + if refs_to_add: + all_trigger_reprs: set[str] = { + trigger_repr for trigger_reprs in refs_to_add.values() for trigger_repr in trigger_reprs + } + + all_trigger_keys: set[tuple[str, str]] = { + self._encrypt_trigger_kwargs(triggers[trigger_repr]) + for trigger_reprs in refs_to_add.values() + for trigger_repr in trigger_reprs + } + orm_triggers: dict[str, Trigger] = { + BaseTrigger.repr(trigger.classpath, trigger.kwargs): trigger + for trigger in session.scalars( + select(Trigger).where( + tuple_(Trigger.classpath, Trigger.encrypted_kwargs).in_(all_trigger_keys) + ) + ) + } + + # Create new triggers + new_trigger_models = [ + trigger + for trigger in [ + Trigger.from_object(triggers[trigger_repr]) + for trigger_repr in all_trigger_reprs + if trigger_repr not in orm_triggers + ] + ] + session.add_all(new_trigger_models) + orm_triggers.update( + (BaseTrigger.repr(trigger.classpath, trigger.kwargs), trigger) + for trigger in new_trigger_models + ) + + # Add new references + for name_uri, trigger_reprs in refs_to_add.items(): + asset_model = assets[name_uri] + asset_model.triggers.extend( + [orm_triggers.get(trigger_repr) for trigger_repr in trigger_reprs] + ) + + if refs_to_remove: + # Remove old references + for name_uri, trigger_reprs in refs_to_remove.items(): + asset_model = assets[name_uri] + asset_model.triggers = [ + trigger + for trigger in asset_model.triggers + if BaseTrigger.repr(trigger.classpath, trigger.kwargs) not in trigger_reprs + ] + + # Remove references from assets no longer used + orphan_assets = session.scalars( + select(AssetModel).filter(~AssetModel.consuming_dags.any()).filter(AssetModel.triggers.any()) + ) + for asset_model in orphan_assets: + if (asset_model.name, asset_model.uri) not in self.assets: + asset_model.triggers = [] + + @staticmethod + def _encrypt_trigger_kwargs(trigger: BaseTrigger) -> tuple[str, str]: + classpath, kwargs = trigger.serialize() + return classpath, Trigger.encrypt_kwargs(kwargs) diff --git a/airflow/dag_processing/manager.py b/airflow/dag_processing/manager.py index 3056a7fc1f4db..bf0c41cf3b68b 100644 --- a/airflow/dag_processing/manager.py +++ b/airflow/dag_processing/manager.py @@ -31,10 +31,11 @@ import time import zipfile from collections import defaultdict, deque +from collections.abc import Iterator from datetime import datetime, timedelta from importlib import import_module from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable, Iterator, NamedTuple, cast +from typing import TYPE_CHECKING, Any, Callable, NamedTuple, cast from setproctitle import setproctitle from sqlalchemy import delete, select, update diff --git a/airflow/dag_processing/processor.py b/airflow/dag_processing/processor.py index 219c7aa9776af..c44da8c857083 100644 --- a/airflow/dag_processing/processor.py +++ b/airflow/dag_processing/processor.py @@ -23,9 +23,10 @@ import threading import time import zipfile +from collections.abc import Generator, Iterable from contextlib import contextmanager, redirect_stderr, redirect_stdout, suppress from dataclasses import dataclass -from typing import TYPE_CHECKING, Generator, Iterable +from typing import TYPE_CHECKING from setproctitle import setproctitle from sqlalchemy import delete, event, select @@ -189,9 +190,11 @@ def _handle_dag_file_processing(): # The following line ensures that stdout goes to the same destination as the logs. If stdout # gets sent to logs and logs are sent to stdout, this leads to an infinite loop. This # necessitates this conditional based on the value of DAG_PROCESSOR_LOG_TARGET. - with redirect_stdout(StreamLogWriter(log, logging.INFO)), redirect_stderr( - StreamLogWriter(log, logging.WARNING) - ), Stats.timer() as timer: + with ( + redirect_stdout(StreamLogWriter(log, logging.INFO)), + redirect_stderr(StreamLogWriter(log, logging.WARNING)), + Stats.timer() as timer, + ): _handle_dag_file_processing() log.info("Processing %s took %.3f seconds", file_path, timer.duration) except Exception: diff --git a/airflow/decorators/__init__.pyi b/airflow/decorators/__init__.pyi index 7dd887431c6f5..c483d010d32a7 100644 --- a/airflow/decorators/__init__.pyi +++ b/airflow/decorators/__init__.pyi @@ -20,8 +20,9 @@ # documentation for more details. from __future__ import annotations +from collections.abc import Collection, Container, Iterable, Mapping from datetime import timedelta -from typing import Any, Callable, Collection, Container, Iterable, Mapping, TypeVar, overload +from typing import Any, Callable, TypeVar, overload from docker.types import Mount from kubernetes.client import models as k8s diff --git a/airflow/decorators/base.py b/airflow/decorators/base.py index 4f40c6ad3b478..9a9498d49c307 100644 --- a/airflow/decorators/base.py +++ b/airflow/decorators/base.py @@ -20,17 +20,14 @@ import itertools import textwrap import warnings +from collections.abc import Collection, Iterator, Mapping, Sequence from functools import cached_property, update_wrapper from typing import ( TYPE_CHECKING, Any, Callable, ClassVar, - Collection, Generic, - Iterator, - Mapping, - Sequence, TypeVar, cast, overload, diff --git a/airflow/decorators/bash.py b/airflow/decorators/bash.py index e4dc19745e0ab..ae5b0a9e0c153 100644 --- a/airflow/decorators/bash.py +++ b/airflow/decorators/bash.py @@ -18,7 +18,8 @@ from __future__ import annotations import warnings -from typing import Any, Callable, ClassVar, Collection, Mapping, Sequence +from collections.abc import Collection, Mapping, Sequence +from typing import Any, Callable, ClassVar from airflow.decorators.base import DecoratedOperator, TaskDecorator, task_decorator_factory from airflow.providers.standard.operators.bash import BashOperator diff --git a/airflow/decorators/python.py b/airflow/decorators/python.py index b65a4a9667009..26172f7c45400 100644 --- a/airflow/decorators/python.py +++ b/airflow/decorators/python.py @@ -16,7 +16,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Callable, Sequence +from collections.abc import Sequence +from typing import TYPE_CHECKING, Callable from airflow.decorators.base import DecoratedOperator, task_decorator_factory from airflow.providers.standard.operators.python import PythonOperator diff --git a/airflow/decorators/sensor.py b/airflow/decorators/sensor.py index 5d409c2d599d8..1601f5d308ea8 100644 --- a/airflow/decorators/sensor.py +++ b/airflow/decorators/sensor.py @@ -17,7 +17,8 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Callable, ClassVar, Sequence +from collections.abc import Sequence +from typing import TYPE_CHECKING, Callable, ClassVar from airflow.decorators.base import get_unique_task_id, task_decorator_factory from airflow.providers.standard.sensors.python import PythonSensor diff --git a/airflow/decorators/task_group.py b/airflow/decorators/task_group.py index daaa81e1ce62a..591ba39018e1d 100644 --- a/airflow/decorators/task_group.py +++ b/airflow/decorators/task_group.py @@ -28,7 +28,8 @@ import functools import inspect import warnings -from typing import TYPE_CHECKING, Any, Callable, ClassVar, Generic, Mapping, Sequence, TypeVar, overload +from collections.abc import Mapping, Sequence +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Generic, TypeVar, overload import attr diff --git a/airflow/example_dags/example_external_task_marker_dag.py b/airflow/example_dags/example_external_task_marker_dag.py index abaf21770523a..0a282a834195c 100644 --- a/airflow/example_dags/example_external_task_marker_dag.py +++ b/airflow/example_dags/example_external_task_marker_dag.py @@ -44,7 +44,7 @@ from airflow.models.dag import DAG from airflow.operators.empty import EmptyOperator -from airflow.sensors.external_task import ExternalTaskMarker, ExternalTaskSensor +from airflow.providers.standard.sensors.external_task import ExternalTaskMarker, ExternalTaskSensor start_date = pendulum.datetime(2021, 1, 1, tz="UTC") diff --git a/airflow/example_dags/example_params_ui_tutorial.py b/airflow/example_dags/example_params_ui_tutorial.py index 133df85d07b5c..5d21e6b774d84 100644 --- a/airflow/example_dags/example_params_ui_tutorial.py +++ b/airflow/example_dags/example_params_ui_tutorial.py @@ -31,222 +31,224 @@ from airflow.models.dag import DAG from airflow.models.param import Param, ParamsDict -with DAG( - dag_id=Path(__file__).stem, - dag_display_name="Params UI tutorial", - description=__doc__.partition(".")[0], - doc_md=__doc__, - schedule=None, - start_date=datetime.datetime(2022, 3, 4), - catchup=False, - tags=["example", "params", "ui"], - # [START section_1] - params={ - # Let's start simple: Standard dict values are detected from type and offered as entry form fields. - # Detected types are numbers, text, boolean, lists and dicts. - # Note that such auto-detected parameters are treated as optional (not required to contain a value) - "x": 3, - "text": "Hello World!", - "flag": False, - "a_simple_list": ["one", "two", "three", "actually one value is made per line"], - # But of course you might want to have it nicer! Let's add some description to parameters. - # Note if you can add any Markdown formatting to the description, you need to use the description_md - # attribute. - "most_loved_number": Param( - 42, - type="integer", - title="Your favorite number", - description_md="Everybody should have a **favorite** number. Not only _math teachers_. " - "If you can not think of any at the moment please think of the 42 which is very famous because" - "of the book [The Hitchhiker's Guide to the Galaxy]" - "(https://en.wikipedia.org/wiki/Phrases_from_The_Hitchhiker%27s_Guide_to_the_Galaxy#" - "The_Answer_to_the_Ultimate_Question_of_Life,_the_Universe,_and_Everything_is_42).", - ), - # If you want to have a selection list box then you can use the enum feature of JSON schema - "pick_one": Param( - "value 42", - type="string", - title="Select one Value", - description="You can use JSON schema enum's to generate drop down selection boxes.", - enum=[f"value {i}" for i in range(16, 64)], - ), - # [END section_1] - # You can also label the selected values via values_display attribute - "pick_with_label": Param( - 3, - type="number", - title="Select one Number", - description="With drop down selections you can also have nice display labels for the values.", - enum=[*range(1, 10)], - values_display={ - 1: "One", - 2: "Two", - 3: "Three", - 4: "Four - is like you take three and get one for free!", - 5: "Five", - 6: "Six", - 7: "Seven", - 8: "Eight", - 9: "Nine", - }, - ), - # If you want to have a list box with proposals but not enforcing a fixed list - # then you can use the examples feature of JSON schema - "proposals": Param( - "some value", - type="string", - title="Field with proposals", - description="You can use JSON schema examples's to generate drop down selection boxes " - "but allow also to enter custom values. Try typing an 'a' and see options.", - examples=( - "Alpha,Bravo,Charlie,Delta,Echo,Foxtrot,Golf,Hotel,India,Juliett,Kilo,Lima,Mike,November,Oscar,Papa," - "Quebec,Romeo,Sierra,Tango,Uniform,Victor,Whiskey,X-ray,Yankee,Zulu" - ).split(","), - ), - # If you want to select multiple items from a fixed list JSON schema does not allow to use enum - # In this case the type "array" is being used together with "examples" as pick list - "multi_select": Param( - ["two", "three"], - "Select from the list of options.", - type="array", - title="Multi Select", - examples=["one", "two", "three", "four", "five"], - ), - # A multiple options selection can also be combined with values_display - "multi_select_with_label": Param( - ["2", "3"], - "Select from the list of options. See that options can have nicer text and still technical values" - "are propagated as values during trigger to the DAG.", - type="array", - title="Multi Select with Labels", - examples=["1", "2", "3", "4", "5"], - values_display={ - "1": "One box of choccolate", - "2": "Two bananas", - "3": "Three apples", - # Note: Value display mapping does not need to be complete - }, - ), - # An array of numbers - "array_of_numbers": Param( - [1, 2, 3], - "Only integers are accepted in this array", - type="array", - title="Array of numbers", - items={"type": "number"}, - ), - # Boolean as proper parameter with description - "bool": Param( - True, - type="boolean", - title="Please confirm", - description="A On/Off selection with a proper description.", - ), - # Dates and Times are also supported - "date_time": Param( - f"{datetime.date.today()}T{datetime.time(hour=12, minute=17, second=00)}+00:00", - type="string", - format="date-time", - title="Date-Time Picker", - description="Please select a date and time, use the button on the left for a pop-up calendar.", - ), - "date": Param( - f"{datetime.date.today()}", - type="string", - format="date", - title="Date Picker", - description="Please select a date, use the button on the left for a pop-up calendar. " - "See that here are no times!", - ), - "time": Param( - f"{datetime.time(hour=12, minute=13, second=14)}", - type=["string", "null"], - format="time", - title="Time Picker", - description="Please select a time, use the button on the left for a pop-up tool.", - ), - "multiline_text": Param( - "A multiline text Param\nthat will keep the newline\ncharacters in its value.", - description="This field allows for multiline text input. The returned value will be a single with newline (\\n) characters kept intact.", - type=["string", "null"], - format="multiline", - ), - # Fields can be required or not. If the defined fields are typed they are getting required by default - # (else they would not pass JSON schema validation) - to make typed fields optional you must - # permit the optional "null" type. - # You can omit a default value if the DAG is triggered manually - # [START section_2] - "required_field": Param( - # In this example we have no default value - # Form will enforce a value supplied by users to be able to trigger - type="string", - title="Required text field", - description="This field is required. You can not submit without having text in here.", - ), - "optional_field": Param( - "optional text, you can trigger also w/o text", - type=["null", "string"], - title="Optional text field", - description_md="This field is optional. As field content is JSON schema validated you must " - "allow the `null` type.", - ), - # [END section_2] - # You can arrange the entry fields in sections so that you can have a better overview for the user - # Therefore you can add the "section" attribute. - # The benefit of the Params class definition is that the full scope of JSON schema validation - # can be leveraged for form fields and they will be validated before DAG submission. - "checked_text": Param( - "length-checked-field", - type="string", - title="Text field with length check", - description_md="""This field is required. And you need to provide something between 10 and 30 +with ( + DAG( + dag_id=Path(__file__).stem, + dag_display_name="Params UI tutorial", + description=__doc__.partition(".")[0], + doc_md=__doc__, + schedule=None, + start_date=datetime.datetime(2022, 3, 4), + catchup=False, + tags=["example", "params", "ui"], + # [START section_1] + params={ + # Let's start simple: Standard dict values are detected from type and offered as entry form fields. + # Detected types are numbers, text, boolean, lists and dicts. + # Note that such auto-detected parameters are treated as optional (not required to contain a value) + "x": 3, + "text": "Hello World!", + "flag": False, + "a_simple_list": ["one", "two", "three", "actually one value is made per line"], + # But of course you might want to have it nicer! Let's add some description to parameters. + # Note if you can add any Markdown formatting to the description, you need to use the description_md + # attribute. + "most_loved_number": Param( + 42, + type="integer", + title="Your favorite number", + description_md="Everybody should have a **favorite** number. Not only _math teachers_. " + "If you can not think of any at the moment please think of the 42 which is very famous because" + "of the book [The Hitchhiker's Guide to the Galaxy]" + "(https://en.wikipedia.org/wiki/Phrases_from_The_Hitchhiker%27s_Guide_to_the_Galaxy#" + "The_Answer_to_the_Ultimate_Question_of_Life,_the_Universe,_and_Everything_is_42).", + ), + # If you want to have a selection list box then you can use the enum feature of JSON schema + "pick_one": Param( + "value 42", + type="string", + title="Select one Value", + description="You can use JSON schema enum's to generate drop down selection boxes.", + enum=[f"value {i}" for i in range(16, 64)], + ), + # [END section_1] + # You can also label the selected values via values_display attribute + "pick_with_label": Param( + 3, + type="number", + title="Select one Number", + description="With drop down selections you can also have nice display labels for the values.", + enum=[*range(1, 10)], + values_display={ + 1: "One", + 2: "Two", + 3: "Three", + 4: "Four - is like you take three and get one for free!", + 5: "Five", + 6: "Six", + 7: "Seven", + 8: "Eight", + 9: "Nine", + }, + ), + # If you want to have a list box with proposals but not enforcing a fixed list + # then you can use the examples feature of JSON schema + "proposals": Param( + "some value", + type="string", + title="Field with proposals", + description="You can use JSON schema examples's to generate drop down selection boxes " + "but allow also to enter custom values. Try typing an 'a' and see options.", + examples=( + "Alpha,Bravo,Charlie,Delta,Echo,Foxtrot,Golf,Hotel,India,Juliett,Kilo,Lima,Mike,November,Oscar,Papa," + "Quebec,Romeo,Sierra,Tango,Uniform,Victor,Whiskey,X-ray,Yankee,Zulu" + ).split(","), + ), + # If you want to select multiple items from a fixed list JSON schema does not allow to use enum + # In this case the type "array" is being used together with "examples" as pick list + "multi_select": Param( + ["two", "three"], + "Select from the list of options.", + type="array", + title="Multi Select", + examples=["one", "two", "three", "four", "five"], + ), + # A multiple options selection can also be combined with values_display + "multi_select_with_label": Param( + ["2", "3"], + "Select from the list of options. See that options can have nicer text and still technical values" + "are propagated as values during trigger to the DAG.", + type="array", + title="Multi Select with Labels", + examples=["1", "2", "3", "4", "5"], + values_display={ + "1": "One box of choccolate", + "2": "Two bananas", + "3": "Three apples", + # Note: Value display mapping does not need to be complete + }, + ), + # An array of numbers + "array_of_numbers": Param( + [1, 2, 3], + "Only integers are accepted in this array", + type="array", + title="Array of numbers", + items={"type": "number"}, + ), + # Boolean as proper parameter with description + "bool": Param( + True, + type="boolean", + title="Please confirm", + description="A On/Off selection with a proper description.", + ), + # Dates and Times are also supported + "date_time": Param( + f"{datetime.date.today()}T{datetime.time(hour=12, minute=17, second=00)}+00:00", + type="string", + format="date-time", + title="Date-Time Picker", + description="Please select a date and time, use the button on the left for a pop-up calendar.", + ), + "date": Param( + f"{datetime.date.today()}", + type="string", + format="date", + title="Date Picker", + description="Please select a date, use the button on the left for a pop-up calendar. " + "See that here are no times!", + ), + "time": Param( + f"{datetime.time(hour=12, minute=13, second=14)}", + type=["string", "null"], + format="time", + title="Time Picker", + description="Please select a time, use the button on the left for a pop-up tool.", + ), + "multiline_text": Param( + "A multiline text Param\nthat will keep the newline\ncharacters in its value.", + description="This field allows for multiline text input. The returned value will be a single with newline (\\n) characters kept intact.", + type=["string", "null"], + format="multiline", + ), + # Fields can be required or not. If the defined fields are typed they are getting required by default + # (else they would not pass JSON schema validation) - to make typed fields optional you must + # permit the optional "null" type. + # You can omit a default value if the DAG is triggered manually + # [START section_2] + "required_field": Param( + # In this example we have no default value + # Form will enforce a value supplied by users to be able to trigger + type="string", + title="Required text field", + description="This field is required. You can not submit without having text in here.", + ), + "optional_field": Param( + "optional text, you can trigger also w/o text", + type=["null", "string"], + title="Optional text field", + description_md="This field is optional. As field content is JSON schema validated you must " + "allow the `null` type.", + ), + # [END section_2] + # You can arrange the entry fields in sections so that you can have a better overview for the user + # Therefore you can add the "section" attribute. + # The benefit of the Params class definition is that the full scope of JSON schema validation + # can be leveraged for form fields and they will be validated before DAG submission. + "checked_text": Param( + "length-checked-field", + type="string", + title="Text field with length check", + description_md="""This field is required. And you need to provide something between 10 and 30 characters. See the JSON [schema description (string)](https://json-schema.org/understanding-json-schema/reference/string.html) for more details""", - minLength=10, - maxLength=20, - section="JSON Schema validation options", - ), - "checked_number": Param( - 100, - type="number", - title="Number field with value check", - description_md="""This field is required. You need to provide any number between 64 and 128. + minLength=10, + maxLength=20, + section="JSON Schema validation options", + ), + "checked_number": Param( + 100, + type="number", + title="Number field with value check", + description_md="""This field is required. You need to provide any number between 64 and 128. See the JSON [schema description (numbers)](https://json-schema.org/understanding-json-schema/reference/numeric.html) for more details""", - minimum=64, - maximum=128, - section="JSON Schema validation options", - ), - # Some further cool stuff as advanced options are also possible - # You can have the user entering a dict object as a JSON with validation - "object": Param( - {"key": "value"}, - type=["object", "null"], - title="JSON entry field", - section="Special advanced stuff with form fields", - ), - "array_of_objects": Param( - [{"name": "account_name", "country": "country_name"}], - description_md="Array with complex objects and validation rules. " - "See [JSON Schema validation options in specs]" - "(https://json-schema.org/understanding-json-schema/reference/array.html#items).", - type="array", - title="JSON array field", - items={ - "type": "object", - "properties": {"name": {"type": "string"}, "country_name": {"type": "string"}}, - "required": ["name"], - }, - section="Special advanced stuff with form fields", - ), - # If you want to have static parameters which are always passed and not editable by the user - # then you can use the JSON schema option of passing constant values. These parameters - # will not be displayed but passed to the DAG - "hidden_secret_field": Param("constant value", const="constant value"), - }, -) as dag: + minimum=64, + maximum=128, + section="JSON Schema validation options", + ), + # Some further cool stuff as advanced options are also possible + # You can have the user entering a dict object as a JSON with validation + "object": Param( + {"key": "value"}, + type=["object", "null"], + title="JSON entry field", + section="Special advanced stuff with form fields", + ), + "array_of_objects": Param( + [{"name": "account_name", "country": "country_name"}], + description_md="Array with complex objects and validation rules. " + "See [JSON Schema validation options in specs]" + "(https://json-schema.org/understanding-json-schema/reference/array.html#items).", + type="array", + title="JSON array field", + items={ + "type": "object", + "properties": {"name": {"type": "string"}, "country_name": {"type": "string"}}, + "required": ["name"], + }, + section="Special advanced stuff with form fields", + ), + # If you want to have static parameters which are always passed and not editable by the user + # then you can use the JSON schema option of passing constant values. These parameters + # will not be displayed but passed to the DAG + "hidden_secret_field": Param("constant value", const="constant value"), + }, + ) as dag +): # [START section_3] @task(task_display_name="Show used parameters") def show_params(**kwargs) -> None: diff --git a/airflow/executors/base_executor.py b/airflow/executors/base_executor.py index d24fd4f151691..3e406e3d74d10 100644 --- a/airflow/executors/base_executor.py +++ b/airflow/executors/base_executor.py @@ -21,8 +21,9 @@ import logging import sys from collections import defaultdict, deque +from collections.abc import Sequence from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Any, List, Optional, Sequence, Tuple +from typing import TYPE_CHECKING, Any, Optional import pendulum from deprecated import deprecated @@ -55,20 +56,20 @@ # Command to execute - list of strings # the first element is always "airflow". # It should be result of TaskInstance.generate_command method. - CommandType = List[str] + CommandType = list[str] # Task that is queued. It contains all the information that is # needed to run the task. # # Tuple of: command, priority, queue name, TaskInstance - QueuedTaskInstanceType = Tuple[CommandType, int, Optional[str], TaskInstance] + QueuedTaskInstanceType = tuple[CommandType, int, Optional[str], TaskInstance] # Event_buffer dict value type # Tuple of: state, info - EventBufferValueType = Tuple[Optional[str], Any] + EventBufferValueType = tuple[Optional[str], Any] # Task tuple to send to be executed - TaskTuple = Tuple[TaskInstanceKey, CommandType, Optional[str], Optional[Any]] + TaskTuple = tuple[TaskInstanceKey, CommandType, Optional[str], Optional[Any]] log = logging.getLogger(__name__) diff --git a/airflow/executors/executor_loader.py b/airflow/executors/executor_loader.py index 7fc0bd63e9802..84375a4baeb0b 100644 --- a/airflow/executors/executor_loader.py +++ b/airflow/executors/executor_loader.py @@ -52,8 +52,6 @@ _classname_to_executors: dict[str, ExecutorName] = {} # Used to cache the computed ExecutorNames so that we don't need to read/parse config more than once _executor_names: list[ExecutorName] = [] -# Used to cache executors so that we don't construct executor objects unnecessarily -_loaded_executors: dict[ExecutorName, BaseExecutor] = {} class ExecutorLoader: @@ -165,22 +163,6 @@ def get_default_executor(cls) -> BaseExecutor: return default_executor - @classmethod - def set_default_executor(cls, executor: BaseExecutor) -> None: - """ - Externally set an executor to be the default. - - This is used in rare cases such as dag.test which allows, as a user convenience, to provide - the executor by cli/argument instead of Airflow configuration - """ - exec_class_name = executor.__class__.__qualname__ - exec_name = ExecutorName(f"{executor.__module__}.{exec_class_name}") - - _module_to_executors[exec_name.module_path] = exec_name - _classname_to_executors[exec_class_name] = exec_name - _executor_names.insert(0, exec_name) - _loaded_executors[exec_name] = executor - @classmethod def init_executors(cls) -> list[BaseExecutor]: """Create a new instance of all configured executors if not cached already.""" @@ -229,10 +211,6 @@ def load_executor(cls, executor_name: ExecutorName | str | None) -> BaseExecutor else: _executor_name = executor_name - # Check if the executor has been previously loaded. Avoid constructing a new object - if _executor_name in _loaded_executors: - return _loaded_executors[_executor_name] - try: if _executor_name.alias == CELERY_KUBERNETES_EXECUTOR: executor = cls.__load_celery_kubernetes_executor() @@ -255,9 +233,6 @@ def load_executor(cls, executor_name: ExecutorName | str | None) -> BaseExecutor # instance. This makes it easier for the Scheduler, Backfill, etc to # know how we refer to this executor. executor.name = _executor_name - # Cache this executor by name here, so we can look it up later if it is - # requested again, and not have to construct a new object - _loaded_executors[_executor_name] = executor return executor @@ -298,7 +273,7 @@ def import_default_executor_cls(cls, validate: bool = True) -> tuple[type[BaseEx return executor, source @classmethod - @functools.lru_cache(maxsize=None) + @functools.cache def validate_database_executor_compatibility(cls, executor: type[BaseExecutor]) -> None: """ Validate database and executor compatibility. diff --git a/airflow/executors/local_executor.py b/airflow/executors/local_executor.py index 3b8b52176db5b..02e201f7b5bfb 100644 --- a/airflow/executors/local_executor.py +++ b/airflow/executors/local_executor.py @@ -32,7 +32,7 @@ import os import subprocess from multiprocessing import Queue, SimpleQueue -from typing import TYPE_CHECKING, Any, Optional, Tuple +from typing import TYPE_CHECKING, Any, Optional from setproctitle import setproctitle @@ -49,8 +49,8 @@ # This is a work to be executed by a worker. # It can Key and Command - but it can also be None, None which is actually a # "Poison Pill" - worker seeing Poison Pill should take the pill and ... die instantly. - ExecutorWorkType = Optional[Tuple[TaskInstanceKey, CommandType]] - TaskInstanceStateType = Tuple[TaskInstanceKey, TaskInstanceState, Optional[Exception]] + ExecutorWorkType = Optional[tuple[TaskInstanceKey, CommandType]] + TaskInstanceStateType = tuple[TaskInstanceKey, TaskInstanceState, Optional[Exception]] def _run_worker( diff --git a/airflow/io/__init__.py b/airflow/io/__init__.py index 49f2711c3c6cd..3229b83101616 100644 --- a/airflow/io/__init__.py +++ b/airflow/io/__init__.py @@ -18,11 +18,11 @@ import inspect import logging +from collections.abc import Mapping from functools import cache from typing import ( TYPE_CHECKING, Callable, - Mapping, ) from fsspec.implementations.local import LocalFileSystem diff --git a/airflow/io/path.py b/airflow/io/path.py index 3526050d12883..5a68789f8ebac 100644 --- a/airflow/io/path.py +++ b/airflow/io/path.py @@ -20,7 +20,8 @@ import os import shutil import typing -from typing import Any, Mapping +from collections.abc import Mapping +from typing import Any from urllib.parse import urlsplit from fsspec.utils import stringify_path diff --git a/airflow/io/typedef.py b/airflow/io/typedef.py index 3fd92564fc1c3..f1a3f8e1ce5f0 100644 --- a/airflow/io/typedef.py +++ b/airflow/io/typedef.py @@ -16,6 +16,4 @@ # under the License. from __future__ import annotations -from typing import Dict - -Properties = Dict[str, str] +Properties = dict[str, str] diff --git a/airflow/jobs/job.py b/airflow/jobs/job.py index 0c2db219ef957..6e802372d8344 100644 --- a/airflow/jobs/job.py +++ b/airflow/jobs/job.py @@ -123,7 +123,6 @@ def __init__(self, executor: BaseExecutor | None = None, heartrate=None, **kwarg self.heartbeat_failed = False self.hostname = get_hostname() if executor: - self.executor = executor self.executors = [executor] self.start_date = timezone.utcnow() self.latest_heartbeat = timezone.utcnow() @@ -135,9 +134,9 @@ def __init__(self, executor: BaseExecutor | None = None, heartrate=None, **kwarg get_listener_manager().hook.on_starting(component=self) super().__init__(**kwargs) - @cached_property + @property def executor(self): - return ExecutorLoader.get_default_executor() + return self.executors[0] @cached_property def executors(self): diff --git a/airflow/jobs/scheduler_job_runner.py b/airflow/jobs/scheduler_job_runner.py index 96a09eb99bb28..da50deb31c972 100644 --- a/airflow/jobs/scheduler_job_runner.py +++ b/airflow/jobs/scheduler_job_runner.py @@ -25,12 +25,13 @@ import sys import time from collections import Counter, defaultdict, deque -from contextlib import suppress +from collections.abc import Collection, Iterable, Iterator +from contextlib import ExitStack, suppress from datetime import timedelta from functools import lru_cache, partial from itertools import groupby from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable, Collection, Iterable, Iterator +from typing import TYPE_CHECKING, Any, Callable from deprecated import deprecated from sqlalchemy import and_, delete, exists, func, not_, select, text, update @@ -42,7 +43,7 @@ from airflow.callbacks.callback_requests import DagCallbackRequest, TaskCallbackRequest from airflow.callbacks.pipe_callback_sink import PipeCallbackSink from airflow.configuration import conf -from airflow.exceptions import RemovedInAirflow3Warning, UnknownExecutorException +from airflow.exceptions import RemovedInAirflow3Warning from airflow.executors.executor_loader import ExecutorLoader from airflow.jobs.base_job_runner import BaseJobRunner from airflow.jobs.job import Job, perform_heartbeat @@ -218,14 +219,22 @@ def __init__( def heartbeat_callback(self, session: Session = NEW_SESSION) -> None: Stats.incr("scheduler_heartbeat", 1, 1) - def register_signals(self) -> None: + def register_signals(self) -> ExitStack: """Register signals that stop child processes.""" - signal.signal(signal.SIGINT, self._exit_gracefully) - signal.signal(signal.SIGTERM, self._exit_gracefully) - signal.signal(signal.SIGUSR2, self._debug_dump) + resetter = ExitStack() + prev_int = signal.signal(signal.SIGINT, self._exit_gracefully) + prev_term = signal.signal(signal.SIGTERM, self._exit_gracefully) + prev_usr2 = signal.signal(signal.SIGUSR2, self._debug_dump) + + resetter.callback(signal.signal, signal.SIGINT, prev_int) + resetter.callback(signal.signal, signal.SIGTERM, prev_term) + resetter.callback(signal.signal, signal.SIGUSR2, prev_usr2) if self._enable_tracemalloc: - signal.signal(signal.SIGUSR1, self._log_memory_usage) + prev = signal.signal(signal.SIGUSR1, self._log_memory_usage) + resetter.callback(signal.signal, signal.SIGUSR1, prev) + + return resetter def _exit_gracefully(self, signum: int, frame: FrameType | None) -> None: """Clean up processor_agent to avoid leaving orphan processes.""" @@ -926,6 +935,7 @@ def _execute(self) -> int | None: async_mode=async_mode, ) + reset_signals = self.register_signals() try: callback_sink: PipeCallbackSink | DatabaseCallbackSink @@ -943,8 +953,6 @@ def _execute(self) -> int | None: executor.callback_sink = callback_sink executor.start() - self.register_signals() - if self.processor_agent: self.processor_agent.start() @@ -980,6 +988,10 @@ def _execute(self) -> int | None: self.processor_agent.end() except Exception: self.log.exception("Exception when executing DagFileProcessorAgent.end") + + # Under normal execution, this doesn't metter, but by resetting signals it lets us run more things + # in the same process under testing without leaking global state + reset_signals.close() self.log.info("Exited execute loop") return None @@ -1073,9 +1085,10 @@ def _run_scheduler_loop(self) -> None: ) for loop_count in itertools.count(start=1): - with Trace.start_span( - span_name="scheduler_job_loop", component="SchedulerJobRunner" - ) as span, Stats.timer("scheduler.scheduler_loop_duration") as timer: + with ( + Trace.start_span(span_name="scheduler_job_loop", component="SchedulerJobRunner") as span, + Stats.timer("scheduler.scheduler_loop_duration") as timer, + ): span.set_attributes( { "category": "scheduler", @@ -2280,13 +2293,17 @@ def _try_to_load_executor(self, executor_name: str | None) -> BaseExecutor | Non In this context, we don't want to fail if the executor does not exist. Catch the exception and log to the user. """ - try: - return ExecutorLoader.load_executor(executor_name) - except UnknownExecutorException: - # This case should not happen unless some (as of now unknown) edge case occurs or direct DB - # modification, since the DAG parser will validate the tasks in the DAG and ensure the executor - # they request is available and if not, disallow the DAG to be scheduled. - # Keeping this exception handling because this is a critical issue if we do somehow find - # ourselves here and the user should get some feedback about that. - self.log.warning("Executor, %s, was not found but a Task was configured to use it", executor_name) - return None + if executor_name is None: + return self.job.executor + + for e in self.job.executors: + if e.name.alias == executor_name or e.name.module_path == executor_name: + return e + + # This case should not happen unless some (as of now unknown) edge case occurs or direct DB + # modification, since the DAG parser will validate the tasks in the DAG and ensure the executor + # they request is available and if not, disallow the DAG to be scheduled. + # Keeping this exception handling because this is a critical issue if we do somehow find + # ourselves here and the user should get some feedback about that. + self.log.warning("Executor, %s, was not found but a Task was configured to use it", executor_name) + return None diff --git a/airflow/metrics/otel_logger.py b/airflow/metrics/otel_logger.py index c3633212cd278..328effc8ea1e5 100644 --- a/airflow/metrics/otel_logger.py +++ b/airflow/metrics/otel_logger.py @@ -20,8 +20,9 @@ import logging import random import warnings +from collections.abc import Iterable from functools import partial -from typing import TYPE_CHECKING, Callable, Iterable, Union +from typing import TYPE_CHECKING, Callable, Union from opentelemetry import metrics from opentelemetry.exporter.otlp.proto.http.metric_exporter import OTLPMetricExporter @@ -407,10 +408,7 @@ def get_otel_logger(cls) -> SafeOtelLogger: log.info("[Metric Exporter] Connecting to OpenTelemetry Collector at %s", endpoint) readers = [ PeriodicExportingMetricReader( - OTLPMetricExporter( - endpoint=endpoint, - headers={"Content-Type": "application/json"}, - ), + OTLPMetricExporter(endpoint=endpoint), export_interval_millis=interval, ) ] diff --git a/airflow/metrics/validators.py b/airflow/metrics/validators.py index b47cdac1be551..a570721ffe2b4 100644 --- a/airflow/metrics/validators.py +++ b/airflow/metrics/validators.py @@ -23,8 +23,10 @@ import logging import string import warnings +from collections.abc import Iterable from functools import partial, wraps -from typing import Callable, Iterable, Pattern, cast +from re import Pattern +from typing import Callable, cast import re2 diff --git a/airflow/migrations/versions/0047_3_0_0_add_dag_versioning.py b/airflow/migrations/versions/0047_3_0_0_add_dag_versioning.py index e98bf5a200899..35234894367dd 100644 --- a/airflow/migrations/versions/0047_3_0_0_add_dag_versioning.py +++ b/airflow/migrations/versions/0047_3_0_0_add_dag_versioning.py @@ -80,8 +80,9 @@ def upgrade(): ondelete="CASCADE", ) batch_op.create_unique_constraint("dag_code_dag_version_id_uq", ["dag_version_id"]) - batch_op.drop_column("last_updated") - batch_op.add_column(sa.Column("created_at", UtcDateTime(), nullable=False, default=timezone.utcnow)) + batch_op.add_column(sa.Column("source_code_hash", sa.String(length=32), nullable=False)) + batch_op.drop_column("fileloc_hash") + batch_op.add_column(sa.Column("dag_id", sa.String(length=250), nullable=False)) with op.batch_alter_table( "serialized_dag", recreate="always", naming_convention=naming_convention @@ -131,6 +132,9 @@ def upgrade(): def downgrade(): """Unapply add dag versioning.""" + # Going down from here, the way we serialize DAG changes, so we need to delete the dag_version table + # which in turn deletes the serialized dag and dag code tables. + op.execute(sa.text("DELETE FROM dag_version")) with op.batch_alter_table("task_instance_history", schema=None) as batch_op: batch_op.drop_column("dag_version_id") @@ -142,9 +146,10 @@ def downgrade(): batch_op.drop_column("id") batch_op.drop_constraint(batch_op.f("dag_code_dag_version_id_fkey"), type_="foreignkey") batch_op.drop_column("dag_version_id") + batch_op.add_column(sa.Column("fileloc_hash", sa.BigInteger, nullable=False)) batch_op.create_primary_key("dag_code_pkey", ["fileloc_hash"]) - batch_op.drop_column("created_at") - batch_op.add_column(sa.Column("last_updated", UtcDateTime(), nullable=False)) + batch_op.drop_column("source_code_hash") + batch_op.drop_column("dag_id") with op.batch_alter_table("serialized_dag", schema=None, naming_convention=naming_convention) as batch_op: batch_op.drop_column("id") diff --git a/airflow/models/abstractoperator.py b/airflow/models/abstractoperator.py index d8a000352f40b..aa23bf33e131a 100644 --- a/airflow/models/abstractoperator.py +++ b/airflow/models/abstractoperator.py @@ -19,8 +19,9 @@ import datetime import inspect +from collections.abc import Iterable, Iterator, Sequence from functools import cached_property -from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, Sequence +from typing import TYPE_CHECKING, Any, Callable import methodtools from sqlalchemy import select diff --git a/airflow/models/asset.py b/airflow/models/asset.py index 126bc5dc2d3fb..d47986a85e560 100644 --- a/airflow/models/asset.py +++ b/airflow/models/asset.py @@ -42,7 +42,7 @@ from airflow.utils.sqlalchemy import UtcDateTime if TYPE_CHECKING: - from typing import Sequence + from collections.abc import Sequence from sqlalchemy.orm import Session diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py index 108d9e51d7588..13eb787b4f86b 100644 --- a/airflow/models/baseoperator.py +++ b/airflow/models/baseoperator.py @@ -28,6 +28,7 @@ import copy import functools import logging +from collections.abc import Collection, Iterable, Sequence from datetime import datetime, timedelta from functools import wraps from threading import local @@ -36,10 +37,7 @@ TYPE_CHECKING, Any, Callable, - Collection, - Iterable, NoReturn, - Sequence, TypeVar, ) @@ -116,7 +114,7 @@ # Todo: AIP-44: Once we get rid of AIP-44 we can remove this. But without this here pydantic fails to resolve # types for serialization -from airflow.utils.task_group import TaskGroup # noqa: TCH001 +from airflow.utils.task_group import TaskGroup # noqa: TC001 TaskPreExecuteHook = Callable[[Context], None] TaskPostExecuteHook = Callable[[Context, Any], None] diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 8177025c2d8ed..34ed6694a0695 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -25,18 +25,15 @@ import sys import time from collections import defaultdict +from collections.abc import Collection, Container, Iterable, Sequence from contextlib import ExitStack from datetime import datetime, timedelta from functools import cache +from re import Pattern from typing import ( TYPE_CHECKING, Any, Callable, - Collection, - Container, - Iterable, - Pattern, - Sequence, Union, cast, overload, @@ -1089,7 +1086,7 @@ def _get_task_instances( if include_dependent_dags: # Recursively find external tasks indicated by ExternalTaskMarker - from airflow.sensors.external_task import ExternalTaskMarker + from airflow.providers.standard.sensors.external_task import ExternalTaskMarker query = tis if as_pk_tuple: @@ -1866,6 +1863,7 @@ def bulk_write_to_db( asset_op.add_dag_asset_references(orm_dags, orm_assets, session=session) asset_op.add_dag_asset_alias_references(orm_dags, orm_asset_aliases, session=session) asset_op.add_task_asset_references(orm_dags, orm_assets, session=session) + asset_op.add_asset_trigger_references(orm_assets, session=session) session.flush() @provide_session diff --git a/airflow/models/dagbag.py b/airflow/models/dagbag.py index 5b57c7983ea14..1e98e4922e4f2 100644 --- a/airflow/models/dagbag.py +++ b/airflow/models/dagbag.py @@ -50,6 +50,7 @@ ) from airflow.listeners.listener import get_listener_manager from airflow.models.base import Base +from airflow.models.dagcode import DagCode from airflow.stats import Stats from airflow.utils import timezone from airflow.utils.dag_cycle_tester import check_cycle @@ -626,6 +627,9 @@ def _serialize_dag_capturing_errors(dag, session, processor_subdir): ) if dag_was_updated: DagBag._sync_perm_for_dag(dag, session=session) + else: + # Check and update DagCode + DagCode.update_source_code(dag) return [] except OperationalError: raise diff --git a/airflow/models/dagcode.py b/airflow/models/dagcode.py index e4e364571a7db..503aeba2b5e19 100644 --- a/airflow/models/dagcode.py +++ b/airflow/models/dagcode.py @@ -17,11 +17,10 @@ from __future__ import annotations import logging -import struct from typing import TYPE_CHECKING import uuid6 -from sqlalchemy import BigInteger, Column, ForeignKey, String, Text, select +from sqlalchemy import Column, ForeignKey, String, Text, select from sqlalchemy.dialects.mysql import MEDIUMTEXT from sqlalchemy.orm import relationship from sqlalchemy.sql.expression import literal @@ -29,15 +28,18 @@ from airflow.configuration import conf from airflow.exceptions import DagCodeNotFound -from airflow.models.base import Base +from airflow.models.base import ID_LEN, Base from airflow.utils import timezone from airflow.utils.file import open_maybe_zipped +from airflow.utils.hashlib_wrapper import md5 from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.sqlalchemy import UtcDateTime if TYPE_CHECKING: from sqlalchemy.orm import Session + from sqlalchemy.sql import Select + from airflow.models.dag import DAG from airflow.models.dag_version import DagVersion log = logging.getLogger(__name__) @@ -54,11 +56,12 @@ class DagCode(Base): __tablename__ = "dag_code" id = Column(UUIDType(binary=False), primary_key=True, default=uuid6.uuid7) - fileloc_hash = Column(BigInteger, nullable=False) + dag_id = Column(String(ID_LEN), nullable=False) fileloc = Column(String(2000), nullable=False) # The max length of fileloc exceeds the limit of indexing. - created_at = Column(UtcDateTime, nullable=False, default=timezone.utcnow) + last_updated = Column(UtcDateTime, nullable=False, default=timezone.utcnow, onupdate=timezone.utcnow) source_code = Column(Text().with_variant(MEDIUMTEXT(), "mysql"), nullable=False) + source_code_hash = Column(String(32), nullable=False) dag_version_id = Column( UUIDType(binary=False), ForeignKey("dag_version.id", ondelete="CASCADE"), nullable=False, unique=True ) @@ -67,9 +70,9 @@ class DagCode(Base): def __init__(self, dag_version, full_filepath: str, source_code: str | None = None): self.dag_version = dag_version self.fileloc = full_filepath - self.fileloc_hash = DagCode.dag_fileloc_hash(self.fileloc) - self.last_updated = timezone.utcnow() - self.source_code = source_code or DagCode.code(self.fileloc) + self.source_code = source_code or DagCode.code(self.dag_version.dag_id) + self.source_code_hash = self.dag_source_hash(self.source_code) + self.dag_id = dag_version.dag_id @classmethod @provide_session @@ -81,50 +84,37 @@ def write_code(cls, dag_version: DagVersion, fileloc: str, session: Session = NE :param session: ORM Session """ log.debug("Writing DAG file %s into DagCode table", fileloc) - dag_code = DagCode(dag_version, fileloc, cls._get_code_from_file(fileloc)) + dag_code = DagCode(dag_version, fileloc, cls.get_code_from_file(fileloc)) session.add(dag_code) log.debug("DAG file %s written into DagCode table", fileloc) return dag_code @classmethod @provide_session - def has_dag(cls, fileloc: str, session: Session = NEW_SESSION) -> bool: + def has_dag(cls, dag_id: str, session: Session = NEW_SESSION) -> bool: """ - Check a file exist in dag_code table. + Check a dag exists in dag code table. - :param fileloc: the file to check + :param dag_id: the dag_id of the DAG :param session: ORM Session """ - fileloc_hash = cls.dag_fileloc_hash(fileloc) return ( - session.scalars( - select(literal(True)).where(cls.fileloc_hash == fileloc_hash).limit(1) - ).one_or_none() + session.scalars(select(literal(True)).where(cls.dag_id == dag_id).limit(1)).one_or_none() is not None ) - @classmethod - def get_code_by_fileloc(cls, fileloc: str) -> str: - """ - Return source code for a given fileloc. - - :param fileloc: file path of a DAG - :return: source code as string - """ - return cls.code(fileloc) - @classmethod @provide_session - def code(cls, fileloc, session: Session = NEW_SESSION) -> str: + def code(cls, dag_id, session: Session = NEW_SESSION) -> str: """ Return source code for this DagCode object. :return: source code as string """ - return cls._get_code_from_db(fileloc, session) + return cls._get_code_from_db(dag_id, session) @staticmethod - def _get_code_from_file(fileloc): + def get_code_from_file(fileloc): try: with open_maybe_zipped(fileloc, "r") as f: code = f.read() @@ -137,12 +127,9 @@ def _get_code_from_file(fileloc): @classmethod @provide_session - def _get_code_from_db(cls, fileloc, session: Session = NEW_SESSION) -> str: + def _get_code_from_db(cls, dag_id, session: Session = NEW_SESSION) -> str: dag_code = session.scalar( - select(cls) - .where(cls.fileloc_hash == cls.dag_fileloc_hash(fileloc)) - .order_by(cls.created_at.desc()) - .limit(1) + select(cls).where(cls.dag_id == dag_id).order_by(cls.last_updated.desc()).limit(1) ) if not dag_code: raise DagCodeNotFound() @@ -151,21 +138,52 @@ def _get_code_from_db(cls, fileloc, session: Session = NEW_SESSION) -> str: return code @staticmethod - def dag_fileloc_hash(full_filepath: str) -> int: + def dag_source_hash(source: str) -> str: """ - Hashing file location for indexing. + Hash the source code of the DAG. - :param full_filepath: full filepath of DAG file - :return: hashed full_filepath + This is needed so we can update the source on code changes """ - # Hashing is needed because the length of fileloc is 2000 as an Airflow convention, - # which is over the limit of indexing. - import hashlib + return md5(source.encode("utf-8")).hexdigest() - # Only 7 bytes because MySQL BigInteger can hold only 8 bytes (signed). - return ( - struct.unpack( - ">Q", hashlib.sha1(full_filepath.encode("utf-8"), usedforsecurity=False).digest()[-8:] - )[0] - >> 8 - ) + @classmethod + def _latest_dagcode_select(cls, dag_id: str) -> Select: + """ + Get the select object to get the latest dagcode. + + :param dag_id: The DAG ID. + :return: The select object. + """ + return select(cls).where(cls.dag_id == dag_id).order_by(cls.last_updated.desc()).limit(1) + + @classmethod + @provide_session + def get_latest_dagcode(cls, dag_id: str, session: Session = NEW_SESSION) -> DagCode | None: + """ + Get the latest dagcode. + + :param dag_id: The DAG ID. + :param session: The database session. + :return: The latest dagcode or None if not found. + """ + return session.scalar(cls._latest_dagcode_select(dag_id)) + + @classmethod + @provide_session + def update_source_code(cls, dag: DAG, session: Session = NEW_SESSION) -> None: + """ + Check if the source code of the DAG has changed and update it if needed. + + :param dag: The DAG object. + :param session: The database session. + :return: None + """ + latest_dagcode = cls.get_latest_dagcode(dag.dag_id, session) + if not latest_dagcode: + return + new_source_code = cls.get_code_from_file(dag.fileloc) + new_source_code_hash = cls.dag_source_hash(new_source_code) + if new_source_code_hash != latest_dagcode.source_code_hash: + latest_dagcode.source_code = new_source_code + latest_dagcode.source_code_hash = new_source_code_hash + session.merge(latest_dagcode) diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index b535d8729ea7b..e7fa37c82f93a 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -20,7 +20,8 @@ import itertools import os from collections import defaultdict -from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, NamedTuple, Sequence, TypeVar, overload +from collections.abc import Iterable, Iterator, Sequence +from typing import TYPE_CHECKING, Any, Callable, NamedTuple, TypeVar, overload import re2 from sqlalchemy import ( @@ -875,8 +876,9 @@ def recalculate(self) -> _UnfinishedStates: start_dttm = timezone.utcnow() self.last_scheduling_decision = start_dttm - with Stats.timer(f"dagrun.dependency-check.{self.dag_id}"), Stats.timer( - "dagrun.dependency-check", tags=self.stats_tags + with ( + Stats.timer(f"dagrun.dependency-check.{self.dag_id}"), + Stats.timer("dagrun.dependency-check", tags=self.stats_tags), ): dag = self.get_dag() info = self.task_instance_scheduling_decisions(session) diff --git a/airflow/models/expandinput.py b/airflow/models/expandinput.py index 7feb7f76a602e..8d86ec193eb4d 100644 --- a/airflow/models/expandinput.py +++ b/airflow/models/expandinput.py @@ -20,8 +20,8 @@ import collections.abc import functools import operator -from collections.abc import Sized -from typing import TYPE_CHECKING, Any, Dict, Iterable, Mapping, NamedTuple, Sequence, Union +from collections.abc import Iterable, Mapping, Sequence, Sized +from typing import TYPE_CHECKING, Any, NamedTuple, Union import attr @@ -41,7 +41,7 @@ # Each keyword argument to expand() can be an XComArg, sequence, or dict (not # any mapping since we need the value to be ordered). -OperatorExpandArgument = Union["MappedArgument", "XComArg", Sequence, Dict[str, Any]] +OperatorExpandArgument = Union["MappedArgument", "XComArg", Sequence, dict[str, Any]] # The single argument of expand_kwargs() can be an XComArg, or a list with each # element being either an XComArg or a dict. diff --git a/airflow/models/mappedoperator.py b/airflow/models/mappedoperator.py index 9a5c1b563ce4d..524415b848f62 100644 --- a/airflow/models/mappedoperator.py +++ b/airflow/models/mappedoperator.py @@ -21,7 +21,8 @@ import contextlib import copy import warnings -from typing import TYPE_CHECKING, Any, ClassVar, Collection, Iterable, Iterator, Mapping, Sequence, Union +from collections.abc import Collection, Iterable, Iterator, Mapping, Sequence +from typing import TYPE_CHECKING, Any, ClassVar, Union import attr import methodtools @@ -61,7 +62,6 @@ if TYPE_CHECKING: import datetime - from typing import List import jinja2 # Slow import. import pendulum @@ -87,7 +87,7 @@ from airflow.utils.task_group import TaskGroup from airflow.utils.trigger_rule import TriggerRule - TaskStateChangeCallbackAttrType = Union[None, TaskStateChangeCallback, List[TaskStateChangeCallback]] + TaskStateChangeCallbackAttrType = Union[None, TaskStateChangeCallback, list[TaskStateChangeCallback]] ValidationSource = Union[Literal["expand"], Literal["partial"]] diff --git a/airflow/models/param.py b/airflow/models/param.py index 55b4ad0f2b6b4..e6150ee50cba5 100644 --- a/airflow/models/param.py +++ b/airflow/models/param.py @@ -20,7 +20,8 @@ import copy import json import logging -from typing import TYPE_CHECKING, Any, ClassVar, ItemsView, Iterable, MutableMapping, ValuesView +from collections.abc import ItemsView, Iterable, MutableMapping, ValuesView +from typing import TYPE_CHECKING, Any, ClassVar from airflow.exceptions import AirflowException, ParamValidationError from airflow.utils.mixins import ResolveMixin diff --git a/airflow/models/skipmixin.py b/airflow/models/skipmixin.py index a67c7cf310ba4..87cd4b2d931e7 100644 --- a/airflow/models/skipmixin.py +++ b/airflow/models/skipmixin.py @@ -17,8 +17,9 @@ # under the License. from __future__ import annotations +from collections.abc import Iterable, Sequence from types import GeneratorType -from typing import TYPE_CHECKING, Iterable, Sequence +from typing import TYPE_CHECKING from sqlalchemy import update diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index a176e0282b603..53e1925234ecb 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -27,11 +27,12 @@ import os import signal from collections import defaultdict +from collections.abc import Collection, Generator, Iterable, Mapping from contextlib import nullcontext from datetime import timedelta from enum import Enum from functools import cache -from typing import TYPE_CHECKING, Any, Callable, Collection, Generator, Iterable, Mapping, Tuple +from typing import TYPE_CHECKING, Any, Callable from urllib.parse import quote import dill @@ -132,7 +133,6 @@ ExtendedJSON, UtcDateTime, tuple_in_condition, - with_row_locks, ) from airflow.utils.state import DagRunState, State, TaskInstanceState from airflow.utils.task_group import MappedTaskGroup @@ -1672,8 +1672,6 @@ def _handle_reschedule( ti = _coalesce_to_orm_ti(ti=ti, session=session) - from airflow.models.dagrun import DagRun # Avoid circular import - ti.refresh_from_db(session) if TYPE_CHECKING: @@ -1682,16 +1680,16 @@ def _handle_reschedule( ti.end_date = timezone.utcnow() ti.set_duration() - # Lock DAG run to be sure not to get into a deadlock situation when trying to insert - # TaskReschedule which apparently also creates lock on corresponding DagRun entity - with_row_locks( - session.query(DagRun).filter_by( - dag_id=ti.dag_id, - run_id=ti.run_id, - ), - session=session, - ).one() - # Log reschedule request + # set state + ti.state = TaskInstanceState.UP_FOR_RESCHEDULE + + ti.clear_next_method_args() + + session.merge(ti) + session.commit() + + # we add this in separate commit to reduce likelihood of deadlock + # see https://github.com/apache/airflow/pull/21362 for more info session.add( TaskReschedule( ti.task_id, @@ -1704,13 +1702,6 @@ def _handle_reschedule( ti.map_index, ) ) - - # set state - ti.state = TaskInstanceState.UP_FOR_RESCHEDULE - - ti.clear_next_method_args() - - session.merge(ti) session.commit() return ti @@ -2891,8 +2882,9 @@ def signal_handler(signum, frame): if not self.next_method: self.clear_xcom_data() - with Stats.timer(f"dag.{self.task.dag_id}.{self.task.task_id}.duration"), Stats.timer( - "task.duration", tags=self.stats_tags + with ( + Stats.timer(f"dag.{self.task.dag_id}.{self.task.task_id}.duration"), + Stats.timer("task.duration", tags=self.stats_tags), ): # Set the validated/merged params on the task object. self.task.params = context["params"] @@ -3742,7 +3734,7 @@ def _is_further_mapped_inside(operator: Operator, container: TaskGroup) -> bool: # State of the task instance. # Stores string version of the task state. -TaskInstanceStateType = Tuple[TaskInstanceKey, TaskInstanceState] +TaskInstanceStateType = tuple[TaskInstanceKey, TaskInstanceState] class SimpleTaskInstance: diff --git a/airflow/models/taskmap.py b/airflow/models/taskmap.py index 061f3b22d4805..478f09e0f1148 100644 --- a/airflow/models/taskmap.py +++ b/airflow/models/taskmap.py @@ -21,7 +21,8 @@ import collections.abc import enum -from typing import TYPE_CHECKING, Any, Collection +from collections.abc import Collection +from typing import TYPE_CHECKING, Any from sqlalchemy import CheckConstraint, Column, ForeignKeyConstraint, Integer, String diff --git a/airflow/models/trigger.py b/airflow/models/trigger.py index 27868daf083f6..9d9d78086c811 100644 --- a/airflow/models/trigger.py +++ b/airflow/models/trigger.py @@ -17,8 +17,9 @@ from __future__ import annotations import datetime +from collections.abc import Iterable from traceback import format_exception -from typing import TYPE_CHECKING, Any, Iterable +from typing import TYPE_CHECKING, Any from sqlalchemy import Column, Integer, String, Text, delete, func, or_, select, update from sqlalchemy.orm import relationship, selectinload @@ -38,7 +39,6 @@ from sqlalchemy.orm import Session from sqlalchemy.sql import Select - from airflow.serialization.pydantic.trigger import TriggerPydantic from airflow.triggers.base import BaseTrigger @@ -88,7 +88,7 @@ def __init__( ) -> None: super().__init__() self.classpath = classpath - self.encrypted_kwargs = self._encrypt_kwargs(kwargs) + self.encrypted_kwargs = self.encrypt_kwargs(kwargs) self.created_date = created_date or timezone.utcnow() @property @@ -99,10 +99,10 @@ def kwargs(self) -> dict[str, Any]: @kwargs.setter def kwargs(self, kwargs: dict[str, Any]) -> None: """Set the encrypted kwargs of the trigger.""" - self.encrypted_kwargs = self._encrypt_kwargs(kwargs) + self.encrypted_kwargs = self.encrypt_kwargs(kwargs) @staticmethod - def _encrypt_kwargs(kwargs: dict[str, Any]) -> str: + def encrypt_kwargs(kwargs: dict[str, Any]) -> str: """Encrypt the kwargs of the trigger.""" import json @@ -140,8 +140,7 @@ def rotate_fernet_key(self): @classmethod @internal_api_call - @provide_session - def from_object(cls, trigger: BaseTrigger, session=NEW_SESSION) -> Trigger | TriggerPydantic: + def from_object(cls, trigger: BaseTrigger) -> Trigger: """Alternative constructor that creates a trigger row based directly off of a Trigger object.""" classpath, kwargs = trigger.serialize() return cls(classpath=classpath, kwargs=kwargs) diff --git a/airflow/models/xcom.py b/airflow/models/xcom.py index 45208e353bdc1..098d2d64db5f9 100644 --- a/airflow/models/xcom.py +++ b/airflow/models/xcom.py @@ -20,7 +20,8 @@ import inspect import json import logging -from typing import TYPE_CHECKING, Any, Iterable, cast +from collections.abc import Iterable +from typing import TYPE_CHECKING, Any, cast from sqlalchemy import ( JSON, @@ -34,6 +35,7 @@ select, text, ) +from sqlalchemy.dialects import postgresql from sqlalchemy.ext.associationproxy import association_proxy from sqlalchemy.orm import Query, reconstructor, relationship @@ -79,7 +81,7 @@ class BaseXCom(TaskInstanceDependencies, LoggingMixin): dag_id = Column(String(ID_LEN, **COLLATION_ARGS), nullable=False) run_id = Column(String(ID_LEN, **COLLATION_ARGS), nullable=False) - value = Column(JSON) + value = Column(JSON().with_variant(postgresql.JSONB, "postgresql")) timestamp = Column(UtcDateTime, default=timezone.utcnow, nullable=False) __table_args__ = ( diff --git a/airflow/models/xcom_arg.py b/airflow/models/xcom_arg.py index bd8f4555b50c8..df643d22b2167 100644 --- a/airflow/models/xcom_arg.py +++ b/airflow/models/xcom_arg.py @@ -20,7 +20,8 @@ import contextlib import inspect import itertools -from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, Mapping, Sequence, Union, overload +from collections.abc import Iterable, Iterator, Mapping, Sequence +from typing import TYPE_CHECKING, Any, Callable, Union, overload from sqlalchemy import func, or_, select diff --git a/airflow/notifications/basenotifier.py b/airflow/notifications/basenotifier.py index 11d7ee17fe939..eaac6d11df36d 100644 --- a/airflow/notifications/basenotifier.py +++ b/airflow/notifications/basenotifier.py @@ -18,7 +18,8 @@ from __future__ import annotations from abc import abstractmethod -from typing import TYPE_CHECKING, Sequence +from collections.abc import Sequence +from typing import TYPE_CHECKING from airflow.template.templater import Templater from airflow.utils.context import context_merge diff --git a/airflow/operators/branch.py b/airflow/operators/branch.py index 0085bfa5af108..088aea23fd338 100644 --- a/airflow/operators/branch.py +++ b/airflow/operators/branch.py @@ -19,7 +19,8 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Iterable +from collections.abc import Iterable +from typing import TYPE_CHECKING from airflow.models.baseoperator import BaseOperator from airflow.models.skipmixin import SkipMixin diff --git a/airflow/operators/email.py b/airflow/operators/email.py index 98af60afe4afc..e2ae26739c10f 100644 --- a/airflow/operators/email.py +++ b/airflow/operators/email.py @@ -17,7 +17,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Any, Sequence +from collections.abc import Sequence +from typing import TYPE_CHECKING, Any from airflow.models.baseoperator import BaseOperator from airflow.utils.email import send_email diff --git a/airflow/operators/latest_only.py b/airflow/operators/latest_only.py index 6f2c0288d90d6..ae15ee017b046 100644 --- a/airflow/operators/latest_only.py +++ b/airflow/operators/latest_only.py @@ -19,7 +19,8 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Iterable +from collections.abc import Iterable +from typing import TYPE_CHECKING import pendulum diff --git a/airflow/plugins_manager.py b/airflow/plugins_manager.py index 881e07a81ed67..3e34f29cc1070 100644 --- a/airflow/plugins_manager.py +++ b/airflow/plugins_manager.py @@ -27,8 +27,9 @@ import os import sys import types +from collections.abc import Iterable from pathlib import Path -from typing import TYPE_CHECKING, Any, Iterable +from typing import TYPE_CHECKING, Any from airflow import settings from airflow.configuration import conf @@ -47,8 +48,8 @@ import importlib_metadata as metadata except ImportError: from importlib import metadata # type: ignore[no-redef] + from collections.abc import Generator from types import ModuleType - from typing import Generator from airflow.listeners.listener import ListenerManager from airflow.timetables.base import Timetable diff --git a/airflow/providers_manager.py b/airflow/providers_manager.py index 5d38454b18917..8d9f93734d7eb 100644 --- a/airflow/providers_manager.py +++ b/airflow/providers_manager.py @@ -27,11 +27,12 @@ import os import traceback import warnings +from collections.abc import MutableMapping from dataclasses import dataclass from functools import wraps from importlib.resources import files as resource_files from time import perf_counter -from typing import TYPE_CHECKING, Any, Callable, MutableMapping, NamedTuple, TypeVar +from typing import TYPE_CHECKING, Any, Callable, NamedTuple, TypeVar from packaging.utils import canonicalize_name diff --git a/airflow/reproducible_build.yaml b/airflow/reproducible_build.yaml index 1c13b028bb4f0..5f2d030fd0b63 100644 --- a/airflow/reproducible_build.yaml +++ b/airflow/reproducible_build.yaml @@ -1,2 +1,2 @@ release-notes-hash: c68f3fa23f84c7fc270d73baaa2cc18d -source-date-epoch: 1731415143 +source-date-epoch: 1732690252 diff --git a/airflow/sensors/base.py b/airflow/sensors/base.py index 3e5a8565e50c0..f117b97d0ce5a 100644 --- a/airflow/sensors/base.py +++ b/airflow/sensors/base.py @@ -22,8 +22,9 @@ import hashlib import time import traceback +from collections.abc import Iterable from datetime import timedelta -from typing import TYPE_CHECKING, Any, Callable, Iterable +from typing import TYPE_CHECKING, Any, Callable from sqlalchemy import select @@ -56,7 +57,7 @@ _MYSQL_TIMESTAMP_MAX = datetime.datetime(2038, 1, 19, 3, 14, 7, tzinfo=timezone.utc) -@functools.lru_cache(maxsize=None) +@functools.cache def _is_metadatabase_mysql() -> bool: if InternalApiConfig.get_use_internal_api(): return False diff --git a/airflow/serialization/json_schema.py b/airflow/serialization/json_schema.py index 32f3b9e0d108c..c4412a49ffcfc 100644 --- a/airflow/serialization/json_schema.py +++ b/airflow/serialization/json_schema.py @@ -20,7 +20,8 @@ from __future__ import annotations import pkgutil -from typing import TYPE_CHECKING, Iterable +from collections.abc import Iterable +from typing import TYPE_CHECKING from airflow.exceptions import AirflowException from airflow.settings import json diff --git a/airflow/serialization/pydantic/asset.py b/airflow/serialization/pydantic/asset.py index 611730dd92e47..0e5623099ea49 100644 --- a/airflow/serialization/pydantic/asset.py +++ b/airflow/serialization/pydantic/asset.py @@ -15,7 +15,7 @@ # specific language governing permissions and limitations # under the License. from datetime import datetime -from typing import List, Optional +from typing import Optional from pydantic import BaseModel as BaseModelPydantic, ConfigDict @@ -52,8 +52,8 @@ class AssetPydantic(BaseModelPydantic): created_at: datetime updated_at: datetime - consuming_dags: List[DagScheduleAssetReferencePydantic] - producing_tasks: List[TaskOutletAssetReferencePydantic] + consuming_dags: list[DagScheduleAssetReferencePydantic] + producing_tasks: list[TaskOutletAssetReferencePydantic] model_config = ConfigDict(from_attributes=True) diff --git a/airflow/serialization/pydantic/dag.py b/airflow/serialization/pydantic/dag.py index f222808c3d0bc..5a1199887f99f 100644 --- a/airflow/serialization/pydantic/dag.py +++ b/airflow/serialization/pydantic/dag.py @@ -18,7 +18,7 @@ import pathlib from datetime import datetime -from typing import Annotated, Any, List, Optional +from typing import Annotated, Any, Optional from pydantic import ( BaseModel as BaseModelPydantic, @@ -87,8 +87,8 @@ class DagModelPydantic(BaseModelPydantic): default_view: Optional[str] timetable_summary: Optional[str] timetable_description: Optional[str] - tags: List[DagTagPydantic] # noqa: UP006 - dag_owner_links: List[DagOwnerAttributesPydantic] # noqa: UP006 + tags: list[DagTagPydantic] + dag_owner_links: list[DagOwnerAttributesPydantic] max_active_tasks: int max_active_runs: Optional[int] diff --git a/airflow/serialization/pydantic/dag_run.py b/airflow/serialization/pydantic/dag_run.py index c06f098766584..e9409e3a8ac19 100644 --- a/airflow/serialization/pydantic/dag_run.py +++ b/airflow/serialization/pydantic/dag_run.py @@ -16,8 +16,9 @@ # under the License. from __future__ import annotations +from collections.abc import Iterable from datetime import datetime -from typing import TYPE_CHECKING, Iterable, List, Optional +from typing import TYPE_CHECKING, Optional from uuid import UUID from pydantic import BaseModel as BaseModelPydantic, ConfigDict @@ -56,7 +57,7 @@ class DagRunPydantic(BaseModelPydantic): dag_version_id: Optional[UUID] updated_at: Optional[datetime] dag: Optional[PydanticDag] - consumed_asset_events: List[AssetEventPydantic] # noqa: UP006 + consumed_asset_events: list[AssetEventPydantic] log_template_id: Optional[int] triggered_by: Optional[DagRunTriggeredByType] diff --git a/airflow/serialization/pydantic/taskinstance.py b/airflow/serialization/pydantic/taskinstance.py index 685334426d8bc..87d6f48f111b8 100644 --- a/airflow/serialization/pydantic/taskinstance.py +++ b/airflow/serialization/pydantic/taskinstance.py @@ -16,8 +16,9 @@ # under the License. from __future__ import annotations +from collections.abc import Iterable from datetime import datetime -from typing import TYPE_CHECKING, Annotated, Any, Iterable, Optional +from typing import TYPE_CHECKING, Annotated, Any, Optional from uuid import UUID from pydantic import ( diff --git a/airflow/serialization/pydantic/trigger.py b/airflow/serialization/pydantic/trigger.py index 19a8f6b70c007..4c120148aff39 100644 --- a/airflow/serialization/pydantic/trigger.py +++ b/airflow/serialization/pydantic/trigger.py @@ -16,7 +16,7 @@ # under the License. import datetime -from typing import Any, Dict, Optional +from typing import Any, Optional from pydantic import BaseModel as BaseModelPydantic, ConfigDict @@ -44,20 +44,20 @@ def __init__(self, **kwargs) -> None: # created_date if "kwargs" in kwargs: self.classpath = kwargs.pop("classpath") - self.encrypted_kwargs = Trigger._encrypt_kwargs(kwargs.pop("kwargs")) + self.encrypted_kwargs = Trigger.encrypt_kwargs(kwargs.pop("kwargs")) self.created_date = kwargs.pop("created_date", timezone.utcnow()) super().__init__(**kwargs) @property - def kwargs(self) -> Dict[str, Any]: + def kwargs(self) -> dict[str, Any]: """Return the decrypted kwargs of the trigger.""" from airflow.models import Trigger return Trigger._decrypt_kwargs(self.encrypted_kwargs) @kwargs.setter - def kwargs(self, kwargs: Dict[str, Any]) -> None: + def kwargs(self, kwargs: dict[str, Any]) -> None: """Set the encrypted kwargs of the trigger.""" from airflow.models import Trigger - self.encrypted_kwargs = Trigger._encrypt_kwargs(kwargs) + self.encrypted_kwargs = Trigger.encrypt_kwargs(kwargs) diff --git a/airflow/serialization/serde.py b/airflow/serialization/serde.py index ec779c631c994..7d1b583ce9a65 100644 --- a/airflow/serialization/serde.py +++ b/airflow/serialization/serde.py @@ -24,7 +24,8 @@ import sys from fnmatch import fnmatch from importlib import import_module -from typing import TYPE_CHECKING, Any, Pattern, TypeVar, Union, cast +from re import Pattern +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast import attr import re2 @@ -301,14 +302,14 @@ def _match(classname: str) -> bool: return _match_glob(classname) or _match_regexp(classname) -@functools.lru_cache(maxsize=None) +@functools.cache def _match_glob(classname: str): """Check if the given classname matches a pattern from allowed_deserialization_classes using glob syntax.""" patterns = _get_patterns() return any(fnmatch(classname, p.pattern) for p in patterns) -@functools.lru_cache(maxsize=None) +@functools.cache def _match_regexp(classname: str): """Check if the given classname matches a pattern from allowed_deserialization_classes_regexp using regexp.""" patterns = _get_regexp_patterns() @@ -393,12 +394,12 @@ def _register(): log.debug("loading serializers took %.3f seconds", timer.duration) -@functools.lru_cache(maxsize=None) +@functools.cache def _get_patterns() -> list[Pattern]: return [re2.compile(p) for p in conf.get("core", "allowed_deserialization_classes").split()] -@functools.lru_cache(maxsize=None) +@functools.cache def _get_regexp_patterns() -> list[Pattern]: return [re2.compile(p) for p in conf.get("core", "allowed_deserialization_classes_regexp").split()] diff --git a/airflow/serialization/serialized_objects.py b/airflow/serialization/serialized_objects.py index 61d851aaed118..2e1a5cd14f919 100644 --- a/airflow/serialization/serialized_objects.py +++ b/airflow/serialization/serialized_objects.py @@ -24,10 +24,11 @@ import itertools import logging import weakref +from collections.abc import Collection, Iterable, Mapping from functools import cache from inspect import signature from textwrap import dedent -from typing import TYPE_CHECKING, Any, Collection, Iterable, Mapping, NamedTuple, Union, cast +from typing import TYPE_CHECKING, Any, NamedTuple, Union, cast import attrs import lazy_object_proxy @@ -110,9 +111,9 @@ HAS_KUBERNETES: bool try: - from kubernetes.client import models as k8s # noqa: TCH004 + from kubernetes.client import models as k8s # noqa: TC004 - from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator # noqa: TCH004 + from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator # noqa: TC004 except ImportError: pass @@ -120,11 +121,11 @@ _OPERATOR_EXTRA_LINKS: set[str] = { "airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunLink", - "airflow.sensors.external_task.ExternalDagLink", + "airflow.providers.standard.sensors.external_task.ExternalDagLink", # Deprecated names, so that existing serialized dags load straight away. - "airflow.sensors.external_task.ExternalTaskSensorLink", + "airflow.providers.standard.sensors.external_task.ExternalTaskSensorLink", "airflow.operators.dagrun_operator.TriggerDagRunLink", - "airflow.sensors.external_task_sensor.ExternalTaskSensorLink", + "airflow.providers.standard.sensors.external_task_sensor.ExternalTaskSensorLink", } @@ -1021,7 +1022,7 @@ class DependencyDetector: def detect_task_dependencies(task: Operator) -> list[DagDependency]: """Detect dependencies caused by tasks.""" from airflow.providers.standard.operators.trigger_dagrun import TriggerDagRunOperator - from airflow.sensors.external_task import ExternalTaskSensor + from airflow.providers.standard.sensors.external_task import ExternalTaskSensor deps = [] if isinstance(task, TriggerDagRunOperator): diff --git a/airflow/settings.py b/airflow/settings.py index 5b458efcba473..d4d6346f3f131 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -31,7 +31,7 @@ import pluggy from packaging.version import Version from sqlalchemy import create_engine, exc, text -from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, create_async_engine +from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession as SAAsyncSession, create_async_engine from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.pool import NullPool @@ -111,7 +111,7 @@ # this is achieved by the Session factory above. NonScopedSession: Callable[..., SASession] async_engine: AsyncEngine -create_async_session: Callable[..., AsyncSession] +AsyncSession: Callable[..., SAAsyncSession] # The JSON library to use for DAG Serialization and De-Serialization json = json @@ -134,7 +134,7 @@ } -@functools.lru_cache(maxsize=None) +@functools.cache def _get_rich_console(file): # Delay imports until we need it import rich.console @@ -469,7 +469,7 @@ def configure_orm(disable_connection_pool=False, pool_class=None): global Session global engine global async_engine - global create_async_session + global AsyncSession global NonScopedSession if os.environ.get("_AIRFLOW_SKIP_DB_TESTS") == "true": @@ -498,11 +498,11 @@ def configure_orm(disable_connection_pool=False, pool_class=None): engine = create_engine(SQL_ALCHEMY_CONN, connect_args=connect_args, **engine_args, future=True) async_engine = create_async_engine(SQL_ALCHEMY_CONN_ASYNC, future=True) - create_async_session = sessionmaker( + AsyncSession = sessionmaker( bind=async_engine, autocommit=False, autoflush=False, - class_=AsyncSession, + class_=SAAsyncSession, expire_on_commit=False, ) mask_secret(engine.url.password) diff --git a/airflow/template/templater.py b/airflow/template/templater.py index ff0a4e51c1958..e81c0877c23d5 100644 --- a/airflow/template/templater.py +++ b/airflow/template/templater.py @@ -17,8 +17,9 @@ from __future__ import annotations +from collections.abc import Collection, Iterable, Sequence from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, Collection, Iterable, Sequence +from typing import TYPE_CHECKING, Any from airflow.io.path import ObjectStoragePath from airflow.utils.helpers import render_template_as_native, render_template_to_string diff --git a/airflow/ti_deps/deps/base_ti_dep.py b/airflow/ti_deps/deps/base_ti_dep.py index f2fe5ee89fd32..0e8a6bd395d74 100644 --- a/airflow/ti_deps/deps/base_ti_dep.py +++ b/airflow/ti_deps/deps/base_ti_dep.py @@ -17,7 +17,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Any, Iterator, NamedTuple +from collections.abc import Iterator +from typing import TYPE_CHECKING, Any, NamedTuple from airflow.ti_deps.dep_context import DepContext from airflow.utils.session import provide_session diff --git a/airflow/ti_deps/deps/trigger_rule_dep.py b/airflow/ti_deps/deps/trigger_rule_dep.py index 76291c8a057f9..ccd11ab303e66 100644 --- a/airflow/ti_deps/deps/trigger_rule_dep.py +++ b/airflow/ti_deps/deps/trigger_rule_dep.py @@ -20,7 +20,8 @@ import collections.abc import functools from collections import Counter -from typing import TYPE_CHECKING, Iterator, KeysView, NamedTuple +from collections.abc import Iterator, KeysView +from typing import TYPE_CHECKING, NamedTuple from sqlalchemy import and_, func, or_, select diff --git a/airflow/timetables/base.py b/airflow/timetables/base.py index 1a076747ec597..60b1c141209fe 100644 --- a/airflow/timetables/base.py +++ b/airflow/timetables/base.py @@ -16,7 +16,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Any, Iterator, NamedTuple, Sequence +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, Any, NamedTuple from airflow.sdk.definitions.asset import BaseAsset from airflow.typing_compat import Protocol, runtime_checkable diff --git a/airflow/timetables/events.py b/airflow/timetables/events.py index c8fd65c2a9884..567a0e351e437 100644 --- a/airflow/timetables/events.py +++ b/airflow/timetables/events.py @@ -17,7 +17,8 @@ from __future__ import annotations import itertools -from typing import TYPE_CHECKING, Iterable +from collections.abc import Iterable +from typing import TYPE_CHECKING import pendulum diff --git a/airflow/timetables/simple.py b/airflow/timetables/simple.py index 8ce498c9e0491..f282c7fe67f8a 100644 --- a/airflow/timetables/simple.py +++ b/airflow/timetables/simple.py @@ -16,7 +16,8 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Any, Collection, Sequence +from collections.abc import Collection, Sequence +from typing import TYPE_CHECKING, Any from airflow.sdk.definitions.asset import AssetAlias, AssetAliasCondition from airflow.timetables.base import DagRunInfo, DataInterval, Timetable diff --git a/airflow/traces/otel_tracer.py b/airflow/traces/otel_tracer.py index f85d3856ae7b3..b172645850353 100644 --- a/airflow/traces/otel_tracer.py +++ b/airflow/traces/otel_tracer.py @@ -275,10 +275,7 @@ def get_otel_tracer(cls) -> OtelTrace: protocol = "https" if ssl_active else "http" endpoint = f"{protocol}://{host}:{port}/v1/traces" log.info("[OTLPSpanExporter] Connecting to OpenTelemetry Collector at %s", endpoint) - return OtelTrace( - span_exporter=OTLPSpanExporter(endpoint=endpoint, headers={"Content-Type": "application/json"}), - tag_string=tag_string, - ) + return OtelTrace(span_exporter=OTLPSpanExporter(endpoint=endpoint), tag_string=tag_string) class AirflowOtelIdGenerator(IdGenerator): diff --git a/airflow/triggers/base.py b/airflow/triggers/base.py index bc1da861f3c2d..5d2fa10b44f6d 100644 --- a/airflow/triggers/base.py +++ b/airflow/triggers/base.py @@ -18,9 +18,10 @@ import abc import logging +from collections.abc import AsyncIterator from dataclasses import dataclass from datetime import timedelta -from typing import TYPE_CHECKING, Any, AsyncIterator +from typing import TYPE_CHECKING, Any from airflow.callbacks.callback_requests import TaskCallbackRequest from airflow.callbacks.database_callback_sink import DatabaseCallbackSink @@ -115,11 +116,15 @@ async def cleanup(self) -> None: and handle it appropriately (in async-compatible way). """ - def __repr__(self) -> str: - classpath, kwargs = self.serialize() + @staticmethod + def repr(classpath: str, kwargs: dict[str, Any]): kwargs_str = ", ".join(f"{k}={v}" for k, v in kwargs.items()) return f"<{classpath} {kwargs_str}>" + def __repr__(self) -> str: + classpath, kwargs = self.serialize() + return self.repr(classpath, kwargs) + class TriggerEvent: """ diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index a8a5ac9b9784e..cb7f5c7a53724 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -14,7 +14,9 @@ import { DagsService, DashboardService, EventLogService, + ExtraLinksService, ImportErrorService, + JobService, MonitorService, PluginService, PoolService, @@ -761,122 +763,54 @@ export const UseEventLogServiceGetEventLogsKeyFn = ( }, ]), ]; -export type ImportErrorServiceGetImportErrorDefaultResponse = Awaited< - ReturnType +export type ExtraLinksServiceGetExtraLinksDefaultResponse = Awaited< + ReturnType >; -export type ImportErrorServiceGetImportErrorQueryResult< - TData = ImportErrorServiceGetImportErrorDefaultResponse, +export type ExtraLinksServiceGetExtraLinksQueryResult< + TData = ExtraLinksServiceGetExtraLinksDefaultResponse, TError = unknown, > = UseQueryResult; -export const useImportErrorServiceGetImportErrorKey = - "ImportErrorServiceGetImportError"; -export const UseImportErrorServiceGetImportErrorKeyFn = ( +export const useExtraLinksServiceGetExtraLinksKey = + "ExtraLinksServiceGetExtraLinks"; +export const UseExtraLinksServiceGetExtraLinksKeyFn = ( { - importErrorId, + dagId, + dagRunId, + taskId, }: { - importErrorId: number; + dagId: string; + dagRunId: string; + taskId: string; }, queryKey?: Array, ) => [ - useImportErrorServiceGetImportErrorKey, - ...(queryKey ?? [{ importErrorId }]), + useExtraLinksServiceGetExtraLinksKey, + ...(queryKey ?? [{ dagId, dagRunId, taskId }]), ]; -export type ImportErrorServiceGetImportErrorsDefaultResponse = Awaited< - ReturnType +export type TaskInstanceServiceGetExtraLinksDefaultResponse = Awaited< + ReturnType >; -export type ImportErrorServiceGetImportErrorsQueryResult< - TData = ImportErrorServiceGetImportErrorsDefaultResponse, +export type TaskInstanceServiceGetExtraLinksQueryResult< + TData = TaskInstanceServiceGetExtraLinksDefaultResponse, TError = unknown, > = UseQueryResult; -export const useImportErrorServiceGetImportErrorsKey = - "ImportErrorServiceGetImportErrors"; -export const UseImportErrorServiceGetImportErrorsKeyFn = ( +export const useTaskInstanceServiceGetExtraLinksKey = + "TaskInstanceServiceGetExtraLinks"; +export const UseTaskInstanceServiceGetExtraLinksKeyFn = ( { - limit, - offset, - orderBy, + dagId, + dagRunId, + taskId, }: { - limit?: number; - offset?: number; - orderBy?: string; - } = {}, + dagId: string; + dagRunId: string; + taskId: string; + }, queryKey?: Array, ) => [ - useImportErrorServiceGetImportErrorsKey, - ...(queryKey ?? [{ limit, offset, orderBy }]), + useTaskInstanceServiceGetExtraLinksKey, + ...(queryKey ?? [{ dagId, dagRunId, taskId }]), ]; -export type PluginServiceGetPluginsDefaultResponse = Awaited< - ReturnType ->; -export type PluginServiceGetPluginsQueryResult< - TData = PluginServiceGetPluginsDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const usePluginServiceGetPluginsKey = "PluginServiceGetPlugins"; -export const UsePluginServiceGetPluginsKeyFn = ( - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, - queryKey?: Array, -) => [usePluginServiceGetPluginsKey, ...(queryKey ?? [{ limit, offset }])]; -export type PoolServiceGetPoolDefaultResponse = Awaited< - ReturnType ->; -export type PoolServiceGetPoolQueryResult< - TData = PoolServiceGetPoolDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const usePoolServiceGetPoolKey = "PoolServiceGetPool"; -export const UsePoolServiceGetPoolKeyFn = ( - { - poolName, - }: { - poolName: string; - }, - queryKey?: Array, -) => [usePoolServiceGetPoolKey, ...(queryKey ?? [{ poolName }])]; -export type PoolServiceGetPoolsDefaultResponse = Awaited< - ReturnType ->; -export type PoolServiceGetPoolsQueryResult< - TData = PoolServiceGetPoolsDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const usePoolServiceGetPoolsKey = "PoolServiceGetPools"; -export const UsePoolServiceGetPoolsKeyFn = ( - { - limit, - offset, - orderBy, - }: { - limit?: number; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: Array, -) => [usePoolServiceGetPoolsKey, ...(queryKey ?? [{ limit, offset, orderBy }])]; -export type ProviderServiceGetProvidersDefaultResponse = Awaited< - ReturnType ->; -export type ProviderServiceGetProvidersQueryResult< - TData = ProviderServiceGetProvidersDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useProviderServiceGetProvidersKey = "ProviderServiceGetProviders"; -export const UseProviderServiceGetProvidersKeyFn = ( - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, - queryKey?: Array, -) => [useProviderServiceGetProvidersKey, ...(queryKey ?? [{ limit, offset }])]; export type TaskInstanceServiceGetTaskInstanceDefaultResponse = Awaited< ReturnType >; @@ -1032,6 +966,57 @@ export const UseTaskInstanceServiceGetTaskInstanceDependencies1KeyFn = ( useTaskInstanceServiceGetTaskInstanceDependencies1Key, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }]), ]; +export type TaskInstanceServiceGetTaskInstanceTriesDefaultResponse = Awaited< + ReturnType +>; +export type TaskInstanceServiceGetTaskInstanceTriesQueryResult< + TData = TaskInstanceServiceGetTaskInstanceTriesDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useTaskInstanceServiceGetTaskInstanceTriesKey = + "TaskInstanceServiceGetTaskInstanceTries"; +export const UseTaskInstanceServiceGetTaskInstanceTriesKeyFn = ( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, + queryKey?: Array, +) => [ + useTaskInstanceServiceGetTaskInstanceTriesKey, + ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }]), +]; +export type TaskInstanceServiceGetMappedTaskInstanceTriesDefaultResponse = + Awaited>; +export type TaskInstanceServiceGetMappedTaskInstanceTriesQueryResult< + TData = TaskInstanceServiceGetMappedTaskInstanceTriesDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useTaskInstanceServiceGetMappedTaskInstanceTriesKey = + "TaskInstanceServiceGetMappedTaskInstanceTries"; +export const UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn = ( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, + queryKey?: Array, +) => [ + useTaskInstanceServiceGetMappedTaskInstanceTriesKey, + ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }]), +]; export type TaskInstanceServiceGetMappedTaskInstanceDefaultResponse = Awaited< ReturnType >; @@ -1236,6 +1221,244 @@ export const UseTaskInstanceServiceGetLogKeyFn = ( }, ]), ]; +export type ImportErrorServiceGetImportErrorDefaultResponse = Awaited< + ReturnType +>; +export type ImportErrorServiceGetImportErrorQueryResult< + TData = ImportErrorServiceGetImportErrorDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useImportErrorServiceGetImportErrorKey = + "ImportErrorServiceGetImportError"; +export const UseImportErrorServiceGetImportErrorKeyFn = ( + { + importErrorId, + }: { + importErrorId: number; + }, + queryKey?: Array, +) => [ + useImportErrorServiceGetImportErrorKey, + ...(queryKey ?? [{ importErrorId }]), +]; +export type ImportErrorServiceGetImportErrorsDefaultResponse = Awaited< + ReturnType +>; +export type ImportErrorServiceGetImportErrorsQueryResult< + TData = ImportErrorServiceGetImportErrorsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useImportErrorServiceGetImportErrorsKey = + "ImportErrorServiceGetImportErrors"; +export const UseImportErrorServiceGetImportErrorsKeyFn = ( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: Array, +) => [ + useImportErrorServiceGetImportErrorsKey, + ...(queryKey ?? [{ limit, offset, orderBy }]), +]; +export type JobServiceGetJobsDefaultResponse = Awaited< + ReturnType +>; +export type JobServiceGetJobsQueryResult< + TData = JobServiceGetJobsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useJobServiceGetJobsKey = "JobServiceGetJobs"; +export const UseJobServiceGetJobsKeyFn = ( + { + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }: { + endDateGte?: string; + endDateLte?: string; + executorClass?: string; + hostname?: string; + isAlive?: boolean; + jobState?: string; + jobType?: string; + limit?: number; + offset?: number; + orderBy?: string; + startDateGte?: string; + startDateLte?: string; + } = {}, + queryKey?: Array, +) => [ + useJobServiceGetJobsKey, + ...(queryKey ?? [ + { + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }, + ]), +]; +export type PluginServiceGetPluginsDefaultResponse = Awaited< + ReturnType +>; +export type PluginServiceGetPluginsQueryResult< + TData = PluginServiceGetPluginsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const usePluginServiceGetPluginsKey = "PluginServiceGetPlugins"; +export const UsePluginServiceGetPluginsKeyFn = ( + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, + queryKey?: Array, +) => [usePluginServiceGetPluginsKey, ...(queryKey ?? [{ limit, offset }])]; +export type PoolServiceGetPoolDefaultResponse = Awaited< + ReturnType +>; +export type PoolServiceGetPoolQueryResult< + TData = PoolServiceGetPoolDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const usePoolServiceGetPoolKey = "PoolServiceGetPool"; +export const UsePoolServiceGetPoolKeyFn = ( + { + poolName, + }: { + poolName: string; + }, + queryKey?: Array, +) => [usePoolServiceGetPoolKey, ...(queryKey ?? [{ poolName }])]; +export type PoolServiceGetPoolsDefaultResponse = Awaited< + ReturnType +>; +export type PoolServiceGetPoolsQueryResult< + TData = PoolServiceGetPoolsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const usePoolServiceGetPoolsKey = "PoolServiceGetPools"; +export const UsePoolServiceGetPoolsKeyFn = ( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: Array, +) => [usePoolServiceGetPoolsKey, ...(queryKey ?? [{ limit, offset, orderBy }])]; +export type ProviderServiceGetProvidersDefaultResponse = Awaited< + ReturnType +>; +export type ProviderServiceGetProvidersQueryResult< + TData = ProviderServiceGetProvidersDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useProviderServiceGetProvidersKey = "ProviderServiceGetProviders"; +export const UseProviderServiceGetProvidersKeyFn = ( + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, + queryKey?: Array, +) => [useProviderServiceGetProvidersKey, ...(queryKey ?? [{ limit, offset }])]; +export type XcomServiceGetXcomEntryDefaultResponse = Awaited< + ReturnType +>; +export type XcomServiceGetXcomEntryQueryResult< + TData = XcomServiceGetXcomEntryDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useXcomServiceGetXcomEntryKey = "XcomServiceGetXcomEntry"; +export const UseXcomServiceGetXcomEntryKeyFn = ( + { + dagId, + dagRunId, + deserialize, + mapIndex, + stringify, + taskId, + xcomKey, + }: { + dagId: string; + dagRunId: string; + deserialize?: boolean; + mapIndex?: number; + stringify?: boolean; + taskId: string; + xcomKey: string; + }, + queryKey?: Array, +) => [ + useXcomServiceGetXcomEntryKey, + ...(queryKey ?? [ + { dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }, + ]), +]; +export type XcomServiceGetXcomEntriesDefaultResponse = Awaited< + ReturnType +>; +export type XcomServiceGetXcomEntriesQueryResult< + TData = XcomServiceGetXcomEntriesDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useXcomServiceGetXcomEntriesKey = "XcomServiceGetXcomEntries"; +export const UseXcomServiceGetXcomEntriesKeyFn = ( + { + dagId, + dagRunId, + limit, + mapIndex, + offset, + taskId, + xcomKey, + }: { + dagId: string; + dagRunId: string; + limit?: number; + mapIndex?: number; + offset?: number; + taskId: string; + xcomKey?: string; + }, + queryKey?: Array, +) => [ + useXcomServiceGetXcomEntriesKey, + ...(queryKey ?? [ + { dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }, + ]), +]; export type TaskServiceGetTasksDefaultResponse = Awaited< ReturnType >; @@ -1311,39 +1534,6 @@ export const UseVariableServiceGetVariablesKeyFn = ( useVariableServiceGetVariablesKey, ...(queryKey ?? [{ limit, offset, orderBy }]), ]; -export type XcomServiceGetXcomEntryDefaultResponse = Awaited< - ReturnType ->; -export type XcomServiceGetXcomEntryQueryResult< - TData = XcomServiceGetXcomEntryDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useXcomServiceGetXcomEntryKey = "XcomServiceGetXcomEntry"; -export const UseXcomServiceGetXcomEntryKeyFn = ( - { - dagId, - dagRunId, - deserialize, - mapIndex, - stringify, - taskId, - xcomKey, - }: { - dagId: string; - dagRunId: string; - deserialize?: boolean; - mapIndex?: number; - stringify?: boolean; - taskId: string; - xcomKey: string; - }, - queryKey?: Array, -) => [ - useXcomServiceGetXcomEntryKey, - ...(queryKey ?? [ - { dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }, - ]), -]; export type MonitorServiceGetHealthDefaultResponse = Awaited< ReturnType >; @@ -1383,11 +1573,11 @@ export type ConnectionServiceTestConnectionMutationResult = Awaited< export type DagRunServiceClearDagRunMutationResult = Awaited< ReturnType >; -export type PoolServicePostPoolMutationResult = Awaited< - ReturnType +export type DagRunServiceTriggerDagRunMutationResult = Awaited< + ReturnType >; -export type PoolServicePostPoolsMutationResult = Awaited< - ReturnType +export type DagRunServiceGetListDagRunsBatchMutationResult = Awaited< + ReturnType >; export type TaskInstanceServiceGetTaskInstancesBatchMutationResult = Awaited< ReturnType @@ -1395,6 +1585,12 @@ export type TaskInstanceServiceGetTaskInstancesBatchMutationResult = Awaited< export type TaskInstanceServicePostClearTaskInstancesMutationResult = Awaited< ReturnType >; +export type PoolServicePostPoolMutationResult = Awaited< + ReturnType +>; +export type PoolServicePostPoolsMutationResult = Awaited< + ReturnType +>; export type VariableServicePostVariableMutationResult = Awaited< ReturnType >; @@ -1419,6 +1615,12 @@ export type DagServicePatchDagsMutationResult = Awaited< export type DagServicePatchDagMutationResult = Awaited< ReturnType >; +export type TaskInstanceServicePatchTaskInstanceMutationResult = Awaited< + ReturnType +>; +export type TaskInstanceServicePatchTaskInstance1MutationResult = Awaited< + ReturnType +>; export type PoolServicePatchPoolMutationResult = Awaited< ReturnType >; diff --git a/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow/ui/openapi-gen/queries/prefetch.ts index cfc40a363c5ae..8f423b4c083de 100644 --- a/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow/ui/openapi-gen/queries/prefetch.ts @@ -14,7 +14,9 @@ import { DagsService, DashboardService, EventLogService, + ExtraLinksService, ImportErrorService, + JobService, MonitorService, PluginService, PoolService, @@ -992,148 +994,65 @@ export const prefetchUseEventLogServiceGetEventLogs = ( }), }); /** - * Get Import Error - * Get an import error. + * Get Extra Links + * Get extra links for task instance. * @param data The data for the request. - * @param data.importErrorId - * @returns ImportErrorResponse Successful Response + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns ExtraLinksResponse Successful Response * @throws ApiError */ -export const prefetchUseImportErrorServiceGetImportError = ( +export const prefetchUseExtraLinksServiceGetExtraLinks = ( queryClient: QueryClient, { - importErrorId, + dagId, + dagRunId, + taskId, }: { - importErrorId: number; + dagId: string; + dagRunId: string; + taskId: string; }, ) => queryClient.prefetchQuery({ - queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ - importErrorId, + queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ + dagId, + dagRunId, + taskId, }), - queryFn: () => ImportErrorService.getImportError({ importErrorId }), + queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, taskId }), }); /** - * Get Import Errors - * Get all import errors. + * Get Extra Links + * Get extra links for task instance. * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns ImportErrorCollectionResponse Successful Response + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns ExtraLinksResponse Successful Response * @throws ApiError */ -export const prefetchUseImportErrorServiceGetImportErrors = ( +export const prefetchUseTaskInstanceServiceGetExtraLinks = ( queryClient: QueryClient, { - limit, - offset, - orderBy, + dagId, + dagRunId, + taskId, }: { - limit?: number; - offset?: number; - orderBy?: string; - } = {}, + dagId: string; + dagRunId: string; + taskId: string; + }, ) => queryClient.prefetchQuery({ - queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ - limit, - offset, - orderBy, + queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn({ + dagId, + dagRunId, + taskId, }), queryFn: () => - ImportErrorService.getImportErrors({ limit, offset, orderBy }), - }); -/** - * Get Plugins - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns PluginCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUsePluginServiceGetPlugins = ( - queryClient: QueryClient, - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }), - queryFn: () => PluginService.getPlugins({ limit, offset }), - }); -/** - * Get Pool - * Get a pool. - * @param data The data for the request. - * @param data.poolName - * @returns PoolResponse Successful Response - * @throws ApiError - */ -export const prefetchUsePoolServiceGetPool = ( - queryClient: QueryClient, - { - poolName, - }: { - poolName: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }), - queryFn: () => PoolService.getPool({ poolName }), - }); -/** - * Get Pools - * Get all pools entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns PoolCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUsePoolServiceGetPools = ( - queryClient: QueryClient, - { - limit, - offset, - orderBy, - }: { - limit?: number; - offset?: number; - orderBy?: string; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy }), - queryFn: () => PoolService.getPools({ limit, offset, orderBy }), - }); -/** - * Get Providers - * Get providers. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns ProviderCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseProviderServiceGetProviders = ( - queryClient: QueryClient, - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }), - queryFn: () => ProviderService.getProviders({ limit, offset }), + TaskInstanceService.getExtraLinks({ dagId, dagRunId, taskId }), }); /** * Get Task Instance @@ -1366,6 +1285,85 @@ export const prefetchUseTaskInstanceServiceGetTaskInstanceDependencies1 = ( taskId, }), }); +/** + * Get Task Instance Tries + * Get list of task instances history. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskInstanceServiceGetTaskInstanceTries = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn({ + dagId, + dagRunId, + mapIndex, + taskId, + }), + queryFn: () => + TaskInstanceService.getTaskInstanceTries({ + dagId, + dagRunId, + mapIndex, + taskId, + }), + }); +/** + * Get Mapped Task Instance Tries + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskInstanceServiceGetMappedTaskInstanceTries = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn({ + dagId, + dagRunId, + mapIndex, + taskId, + }), + queryFn: () => + TaskInstanceService.getMappedTaskInstanceTries({ + dagId, + dagRunId, + mapIndex, + taskId, + }), + }); /** * Get Mapped Task Instance * Get task instance. @@ -1670,82 +1668,191 @@ export const prefetchUseTaskInstanceServiceGetLog = ( }), }); /** - * Get Tasks - * Get tasks for DAG. + * Get Import Error + * Get an import error. * @param data The data for the request. - * @param data.dagId - * @param data.orderBy - * @returns TaskCollectionResponse Successful Response + * @param data.importErrorId + * @returns ImportErrorResponse Successful Response * @throws ApiError */ -export const prefetchUseTaskServiceGetTasks = ( +export const prefetchUseImportErrorServiceGetImportError = ( queryClient: QueryClient, { - dagId, - orderBy, + importErrorId, }: { - dagId: string; - orderBy?: string; + importErrorId: number; }, ) => queryClient.prefetchQuery({ - queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }), - queryFn: () => TaskService.getTasks({ dagId, orderBy }), + queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ + importErrorId, + }), + queryFn: () => ImportErrorService.getImportError({ importErrorId }), }); /** - * Get Task - * Get simplified representation of a task. + * Get Import Errors + * Get all import errors. * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @returns TaskResponse Successful Response + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ImportErrorCollectionResponse Successful Response * @throws ApiError */ -export const prefetchUseTaskServiceGetTask = ( +export const prefetchUseImportErrorServiceGetImportErrors = ( queryClient: QueryClient, { - dagId, - taskId, + limit, + offset, + orderBy, }: { - dagId: string; - taskId: unknown; - }, + limit?: number; + offset?: number; + orderBy?: string; + } = {}, ) => queryClient.prefetchQuery({ - queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }), - queryFn: () => TaskService.getTask({ dagId, taskId }), + queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ + limit, + offset, + orderBy, + }), + queryFn: () => + ImportErrorService.getImportErrors({ limit, offset, orderBy }), }); /** - * Get Variable - * Get a variable entry. + * Get Jobs + * Get all jobs. * @param data The data for the request. - * @param data.variableKey - * @returns VariableResponse Successful Response + * @param data.isAlive + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.jobState + * @param data.jobType + * @param data.hostname + * @param data.executorClass + * @returns JobCollectionResponse Successful Response * @throws ApiError */ -export const prefetchUseVariableServiceGetVariable = ( +export const prefetchUseJobServiceGetJobs = ( queryClient: QueryClient, { - variableKey, + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, }: { - variableKey: string; + endDateGte?: string; + endDateLte?: string; + executorClass?: string; + hostname?: string; + isAlive?: boolean; + jobState?: string; + jobType?: string; + limit?: number; + offset?: number; + orderBy?: string; + startDateGte?: string; + startDateLte?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseJobServiceGetJobsKeyFn({ + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }), + queryFn: () => + JobService.getJobs({ + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }), + }); +/** + * Get Plugins + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns PluginCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUsePluginServiceGetPlugins = ( + queryClient: QueryClient, + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }), + queryFn: () => PluginService.getPlugins({ limit, offset }), + }); +/** + * Get Pool + * Get a pool. + * @param data The data for the request. + * @param data.poolName + * @returns PoolResponse Successful Response + * @throws ApiError + */ +export const prefetchUsePoolServiceGetPool = ( + queryClient: QueryClient, + { + poolName, + }: { + poolName: string; }, ) => queryClient.prefetchQuery({ - queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }), - queryFn: () => VariableService.getVariable({ variableKey }), + queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }), + queryFn: () => PoolService.getPool({ poolName }), }); /** - * Get Variables - * Get all Variables entries. + * Get Pools + * Get all pools entries. * @param data The data for the request. * @param data.limit * @param data.offset * @param data.orderBy - * @returns VariableCollectionResponse Successful Response + * @returns PoolCollectionResponse Successful Response * @throws ApiError */ -export const prefetchUseVariableServiceGetVariables = ( +export const prefetchUsePoolServiceGetPools = ( queryClient: QueryClient, { limit, @@ -1758,12 +1865,31 @@ export const prefetchUseVariableServiceGetVariables = ( } = {}, ) => queryClient.prefetchQuery({ - queryKey: Common.UseVariableServiceGetVariablesKeyFn({ - limit, - offset, - orderBy, - }), - queryFn: () => VariableService.getVariables({ limit, offset, orderBy }), + queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy }), + queryFn: () => PoolService.getPools({ limit, offset, orderBy }), + }); +/** + * Get Providers + * Get providers. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns ProviderCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseProviderServiceGetProviders = ( + queryClient: QueryClient, + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }), + queryFn: () => ProviderService.getProviders({ limit, offset }), }); /** * Get Xcom Entry @@ -1820,6 +1946,159 @@ export const prefetchUseXcomServiceGetXcomEntry = ( xcomKey, }), }); +/** + * Get Xcom Entries + * Get all XCom entries. + * + * This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.xcomKey + * @param data.mapIndex + * @param data.limit + * @param data.offset + * @returns XComCollection Successful Response + * @throws ApiError + */ +export const prefetchUseXcomServiceGetXcomEntries = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + limit, + mapIndex, + offset, + taskId, + xcomKey, + }: { + dagId: string; + dagRunId: string; + limit?: number; + mapIndex?: number; + offset?: number; + taskId: string; + xcomKey?: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn({ + dagId, + dagRunId, + limit, + mapIndex, + offset, + taskId, + xcomKey, + }), + queryFn: () => + XcomService.getXcomEntries({ + dagId, + dagRunId, + limit, + mapIndex, + offset, + taskId, + xcomKey, + }), + }); +/** + * Get Tasks + * Get tasks for DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.orderBy + * @returns TaskCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskServiceGetTasks = ( + queryClient: QueryClient, + { + dagId, + orderBy, + }: { + dagId: string; + orderBy?: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }), + queryFn: () => TaskService.getTasks({ dagId, orderBy }), + }); +/** + * Get Task + * Get simplified representation of a task. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @returns TaskResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskServiceGetTask = ( + queryClient: QueryClient, + { + dagId, + taskId, + }: { + dagId: string; + taskId: unknown; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }), + queryFn: () => TaskService.getTask({ dagId, taskId }), + }); +/** + * Get Variable + * Get a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns VariableResponse Successful Response + * @throws ApiError + */ +export const prefetchUseVariableServiceGetVariable = ( + queryClient: QueryClient, + { + variableKey, + }: { + variableKey: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }), + queryFn: () => VariableService.getVariable({ variableKey }), + }); +/** + * Get Variables + * Get all Variables entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns VariableCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseVariableServiceGetVariables = ( + queryClient: QueryClient, + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseVariableServiceGetVariablesKeyFn({ + limit, + offset, + orderBy, + }), + queryFn: () => VariableService.getVariables({ limit, offset, orderBy }), + }); /** * Get Health * @returns HealthInfoSchema Successful Response diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index fb1d6568cf1b5..d644beb729488 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -19,7 +19,9 @@ import { DagsService, DashboardService, EventLogService, + ExtraLinksService, ImportErrorService, + JobService, MonitorService, PluginService, PoolService, @@ -38,12 +40,15 @@ import { DAGPatchBody, DAGRunClearBody, DAGRunPatchBody, + DAGRunsBatchBody, DagRunState, DagWarningType, + PatchTaskInstanceBody, PoolPatchBody, PoolPostBody, PoolPostBulkBody, TaskInstancesBatchBody, + TriggerDAGRunPostBody, VariableBody, } from "../requests/types.gen"; import * as Common from "./common"; @@ -1218,193 +1223,75 @@ export const useEventLogServiceGetEventLogs = < ...options, }); /** - * Get Import Error - * Get an import error. + * Get Extra Links + * Get extra links for task instance. * @param data The data for the request. - * @param data.importErrorId - * @returns ImportErrorResponse Successful Response + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns ExtraLinksResponse Successful Response * @throws ApiError */ -export const useImportErrorServiceGetImportError = < - TData = Common.ImportErrorServiceGetImportErrorDefaultResponse, +export const useExtraLinksServiceGetExtraLinks = < + TData = Common.ExtraLinksServiceGetExtraLinksDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { - importErrorId, + dagId, + dagRunId, + taskId, }: { - importErrorId: number; + dagId: string; + dagRunId: string; + taskId: string; }, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => useQuery({ - queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn( - { importErrorId }, - queryKey, - ), - queryFn: () => - ImportErrorService.getImportError({ importErrorId }) as TData, - ...options, - }); -/** - * Get Import Errors - * Get all import errors. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns ImportErrorCollectionResponse Successful Response - * @throws ApiError - */ -export const useImportErrorServiceGetImportErrors = < - TData = Common.ImportErrorServiceGetImportErrorsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - orderBy, - }: { - limit?: number; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn( - { limit, offset, orderBy }, + queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn( + { dagId, dagRunId, taskId }, queryKey, ), queryFn: () => - ImportErrorService.getImportErrors({ limit, offset, orderBy }) as TData, - ...options, - }); -/** - * Get Plugins - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns PluginCollectionResponse Successful Response - * @throws ApiError - */ -export const usePluginServiceGetPlugins = < - TData = Common.PluginServiceGetPluginsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UsePluginServiceGetPluginsKeyFn( - { limit, offset }, - queryKey, - ), - queryFn: () => PluginService.getPlugins({ limit, offset }) as TData, + ExtraLinksService.getExtraLinks({ dagId, dagRunId, taskId }) as TData, ...options, }); /** - * Get Pool - * Get a pool. + * Get Extra Links + * Get extra links for task instance. * @param data The data for the request. - * @param data.poolName - * @returns PoolResponse Successful Response + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns ExtraLinksResponse Successful Response * @throws ApiError */ -export const usePoolServiceGetPool = < - TData = Common.PoolServiceGetPoolDefaultResponse, +export const useTaskInstanceServiceGetExtraLinks = < + TData = Common.TaskInstanceServiceGetExtraLinksDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { - poolName, + dagId, + dagRunId, + taskId, }: { - poolName: string; + dagId: string; + dagRunId: string; + taskId: string; }, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => useQuery({ - queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }, queryKey), - queryFn: () => PoolService.getPool({ poolName }) as TData, - ...options, - }); -/** - * Get Pools - * Get all pools entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns PoolCollectionResponse Successful Response - * @throws ApiError - */ -export const usePoolServiceGetPools = < - TData = Common.PoolServiceGetPoolsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - orderBy, - }: { - limit?: number; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UsePoolServiceGetPoolsKeyFn( - { limit, offset, orderBy }, - queryKey, - ), - queryFn: () => PoolService.getPools({ limit, offset, orderBy }) as TData, - ...options, - }); -/** - * Get Providers - * Get providers. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns ProviderCollectionResponse Successful Response - * @throws ApiError - */ -export const useProviderServiceGetProviders = < - TData = Common.ProviderServiceGetProvidersDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseProviderServiceGetProvidersKeyFn( - { limit, offset }, + queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn( + { dagId, dagRunId, taskId }, queryKey, ), - queryFn: () => ProviderService.getProviders({ limit, offset }) as TData, + queryFn: () => + TaskInstanceService.getExtraLinks({ dagId, dagRunId, taskId }) as TData, ...options, }); /** @@ -1660,6 +1547,93 @@ export const useTaskInstanceServiceGetTaskInstanceDependencies1 = < }) as TData, ...options, }); +/** + * Get Task Instance Tries + * Get list of task instances history. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstanceTries = < + TData = Common.TaskInstanceServiceGetTaskInstanceTriesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getTaskInstanceTries({ + dagId, + dagRunId, + mapIndex, + taskId, + }) as TData, + ...options, + }); +/** + * Get Mapped Task Instance Tries + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetMappedTaskInstanceTries = < + TData = Common.TaskInstanceServiceGetMappedTaskInstanceTriesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getMappedTaskInstanceTries({ + dagId, + dagRunId, + mapIndex, + taskId, + }) as TData, + ...options, + }); /** * Get Mapped Task Instance * Get task instance. @@ -1996,104 +1970,229 @@ export const useTaskInstanceServiceGetLog = < ...options, }); /** - * Get Tasks - * Get tasks for DAG. + * Get Import Error + * Get an import error. * @param data The data for the request. - * @param data.dagId - * @param data.orderBy - * @returns TaskCollectionResponse Successful Response - * @throws ApiError + * @param data.importErrorId + * @returns ImportErrorResponse Successful Response + * @throws ApiError */ -export const useTaskServiceGetTasks = < - TData = Common.TaskServiceGetTasksDefaultResponse, +export const useImportErrorServiceGetImportError = < + TData = Common.ImportErrorServiceGetImportErrorDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { - dagId, + importErrorId, + }: { + importErrorId: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn( + { importErrorId }, + queryKey, + ), + queryFn: () => + ImportErrorService.getImportError({ importErrorId }) as TData, + ...options, + }); +/** + * Get Import Errors + * Get all import errors. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ImportErrorCollectionResponse Successful Response + * @throws ApiError + */ +export const useImportErrorServiceGetImportErrors = < + TData = Common.ImportErrorServiceGetImportErrorsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, orderBy, }: { - dagId: string; + limit?: number; + offset?: number; orderBy?: string; - }, + } = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => useQuery({ - queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }, queryKey), - queryFn: () => TaskService.getTasks({ dagId, orderBy }) as TData, + queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn( + { limit, offset, orderBy }, + queryKey, + ), + queryFn: () => + ImportErrorService.getImportErrors({ limit, offset, orderBy }) as TData, ...options, }); /** - * Get Task - * Get simplified representation of a task. + * Get Jobs + * Get all jobs. * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @returns TaskResponse Successful Response + * @param data.isAlive + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.jobState + * @param data.jobType + * @param data.hostname + * @param data.executorClass + * @returns JobCollectionResponse Successful Response * @throws ApiError */ -export const useTaskServiceGetTask = < - TData = Common.TaskServiceGetTaskDefaultResponse, +export const useJobServiceGetJobs = < + TData = Common.JobServiceGetJobsDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { - dagId, - taskId, + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, }: { - dagId: string; - taskId: unknown; - }, + endDateGte?: string; + endDateLte?: string; + executorClass?: string; + hostname?: string; + isAlive?: boolean; + jobState?: string; + jobType?: string; + limit?: number; + offset?: number; + orderBy?: string; + startDateGte?: string; + startDateLte?: string; + } = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => useQuery({ - queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }, queryKey), - queryFn: () => TaskService.getTask({ dagId, taskId }) as TData, + queryKey: Common.UseJobServiceGetJobsKeyFn( + { + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }, + queryKey, + ), + queryFn: () => + JobService.getJobs({ + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }) as TData, ...options, }); /** - * Get Variable - * Get a variable entry. + * Get Plugins * @param data The data for the request. - * @param data.variableKey - * @returns VariableResponse Successful Response + * @param data.limit + * @param data.offset + * @returns PluginCollectionResponse Successful Response * @throws ApiError */ -export const useVariableServiceGetVariable = < - TData = Common.VariableServiceGetVariableDefaultResponse, +export const usePluginServiceGetPlugins = < + TData = Common.PluginServiceGetPluginsDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { - variableKey, + limit, + offset, }: { - variableKey: string; - }, + limit?: number; + offset?: number; + } = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => useQuery({ - queryKey: Common.UseVariableServiceGetVariableKeyFn( - { variableKey }, + queryKey: Common.UsePluginServiceGetPluginsKeyFn( + { limit, offset }, queryKey, ), - queryFn: () => VariableService.getVariable({ variableKey }) as TData, + queryFn: () => PluginService.getPlugins({ limit, offset }) as TData, ...options, }); /** - * Get Variables - * Get all Variables entries. + * Get Pool + * Get a pool. + * @param data The data for the request. + * @param data.poolName + * @returns PoolResponse Successful Response + * @throws ApiError + */ +export const usePoolServiceGetPool = < + TData = Common.PoolServiceGetPoolDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + poolName, + }: { + poolName: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }, queryKey), + queryFn: () => PoolService.getPool({ poolName }) as TData, + ...options, + }); +/** + * Get Pools + * Get all pools entries. * @param data The data for the request. * @param data.limit * @param data.offset * @param data.orderBy - * @returns VariableCollectionResponse Successful Response + * @returns PoolCollectionResponse Successful Response * @throws ApiError */ -export const useVariableServiceGetVariables = < - TData = Common.VariableServiceGetVariablesDefaultResponse, +export const usePoolServiceGetPools = < + TData = Common.PoolServiceGetPoolsDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( @@ -2110,12 +2209,43 @@ export const useVariableServiceGetVariables = < options?: Omit, "queryKey" | "queryFn">, ) => useQuery({ - queryKey: Common.UseVariableServiceGetVariablesKeyFn( + queryKey: Common.UsePoolServiceGetPoolsKeyFn( { limit, offset, orderBy }, queryKey, ), - queryFn: () => - VariableService.getVariables({ limit, offset, orderBy }) as TData, + queryFn: () => PoolService.getPools({ limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Providers + * Get providers. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns ProviderCollectionResponse Successful Response + * @throws ApiError + */ +export const useProviderServiceGetProviders = < + TData = Common.ProviderServiceGetProvidersDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseProviderServiceGetProvidersKeyFn( + { limit, offset }, + queryKey, + ), + queryFn: () => ProviderService.getProviders({ limit, offset }) as TData, ...options, }); /** @@ -2132,46 +2262,227 @@ export const useVariableServiceGetVariables = < * @returns unknown Successful Response * @throws ApiError */ -export const useXcomServiceGetXcomEntry = < - TData = Common.XcomServiceGetXcomEntryDefaultResponse, +export const useXcomServiceGetXcomEntry = < + TData = Common.XcomServiceGetXcomEntryDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + deserialize, + mapIndex, + stringify, + taskId, + xcomKey, + }: { + dagId: string; + dagRunId: string; + deserialize?: boolean; + mapIndex?: number; + stringify?: boolean; + taskId: string; + xcomKey: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseXcomServiceGetXcomEntryKeyFn( + { dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }, + queryKey, + ), + queryFn: () => + XcomService.getXcomEntry({ + dagId, + dagRunId, + deserialize, + mapIndex, + stringify, + taskId, + xcomKey, + }) as TData, + ...options, + }); +/** + * Get Xcom Entries + * Get all XCom entries. + * + * This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.xcomKey + * @param data.mapIndex + * @param data.limit + * @param data.offset + * @returns XComCollection Successful Response + * @throws ApiError + */ +export const useXcomServiceGetXcomEntries = < + TData = Common.XcomServiceGetXcomEntriesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + limit, + mapIndex, + offset, + taskId, + xcomKey, + }: { + dagId: string; + dagRunId: string; + limit?: number; + mapIndex?: number; + offset?: number; + taskId: string; + xcomKey?: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn( + { dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }, + queryKey, + ), + queryFn: () => + XcomService.getXcomEntries({ + dagId, + dagRunId, + limit, + mapIndex, + offset, + taskId, + xcomKey, + }) as TData, + ...options, + }); +/** + * Get Tasks + * Get tasks for DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.orderBy + * @returns TaskCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskServiceGetTasks = < + TData = Common.TaskServiceGetTasksDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + orderBy, + }: { + dagId: string; + orderBy?: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }, queryKey), + queryFn: () => TaskService.getTasks({ dagId, orderBy }) as TData, + ...options, + }); +/** + * Get Task + * Get simplified representation of a task. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @returns TaskResponse Successful Response + * @throws ApiError + */ +export const useTaskServiceGetTask = < + TData = Common.TaskServiceGetTaskDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + taskId, + }: { + dagId: string; + taskId: unknown; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }, queryKey), + queryFn: () => TaskService.getTask({ dagId, taskId }) as TData, + ...options, + }); +/** + * Get Variable + * Get a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns VariableResponse Successful Response + * @throws ApiError + */ +export const useVariableServiceGetVariable = < + TData = Common.VariableServiceGetVariableDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + variableKey, + }: { + variableKey: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseVariableServiceGetVariableKeyFn( + { variableKey }, + queryKey, + ), + queryFn: () => VariableService.getVariable({ variableKey }) as TData, + ...options, + }); +/** + * Get Variables + * Get all Variables entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns VariableCollectionResponse Successful Response + * @throws ApiError + */ +export const useVariableServiceGetVariables = < + TData = Common.VariableServiceGetVariablesDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { - dagId, - dagRunId, - deserialize, - mapIndex, - stringify, - taskId, - xcomKey, + limit, + offset, + orderBy, }: { - dagId: string; - dagRunId: string; - deserialize?: boolean; - mapIndex?: number; - stringify?: boolean; - taskId: string; - xcomKey: string; - }, + limit?: number; + offset?: number; + orderBy?: string; + } = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => useQuery({ - queryKey: Common.UseXcomServiceGetXcomEntryKeyFn( - { dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }, + queryKey: Common.UseVariableServiceGetVariablesKeyFn( + { limit, offset, orderBy }, queryKey, ), queryFn: () => - XcomService.getXcomEntry({ - dagId, - dagRunId, - deserialize, - mapIndex, - stringify, - taskId, - xcomKey, - }) as TData, + VariableService.getVariables({ limit, offset, orderBy }) as TData, ...options, }); /** @@ -2417,15 +2728,16 @@ export const useDagRunServiceClearDagRun = < ...options, }); /** - * Post Pool - * Create a Pool. + * Trigger Dag Run + * Trigger a DAG. * @param data The data for the request. + * @param data.dagId * @param data.requestBody - * @returns PoolResponse Successful Response + * @returns DAGRunResponse Successful Response * @throws ApiError */ -export const usePoolServicePostPool = < - TData = Common.PoolServicePostPoolMutationResult, +export const useDagRunServiceTriggerDagRun = < + TData = Common.DagRunServiceTriggerDagRunMutationResult, TError = unknown, TContext = unknown, >( @@ -2434,7 +2746,8 @@ export const usePoolServicePostPool = < TData, TError, { - requestBody: PoolPostBody; + dagId: unknown; + requestBody: TriggerDAGRunPostBody; }, TContext >, @@ -2445,24 +2758,29 @@ export const usePoolServicePostPool = < TData, TError, { - requestBody: PoolPostBody; + dagId: unknown; + requestBody: TriggerDAGRunPostBody; }, TContext >({ - mutationFn: ({ requestBody }) => - PoolService.postPool({ requestBody }) as unknown as Promise, + mutationFn: ({ dagId, requestBody }) => + DagRunService.triggerDagRun({ + dagId, + requestBody, + }) as unknown as Promise, ...options, }); /** - * Post Pools - * Create multiple pools. + * Get List Dag Runs Batch + * Get a list of DAG Runs. * @param data The data for the request. + * @param data.dagId * @param data.requestBody - * @returns PoolCollectionResponse Successful Response + * @returns DAGRunCollectionResponse Successful Response * @throws ApiError */ -export const usePoolServicePostPools = < - TData = Common.PoolServicePostPoolsMutationResult, +export const useDagRunServiceGetListDagRunsBatch = < + TData = Common.DagRunServiceGetListDagRunsBatchMutationResult, TError = unknown, TContext = unknown, >( @@ -2471,7 +2789,8 @@ export const usePoolServicePostPools = < TData, TError, { - requestBody: PoolPostBulkBody; + dagId: "~"; + requestBody: DAGRunsBatchBody; }, TContext >, @@ -2482,12 +2801,16 @@ export const usePoolServicePostPools = < TData, TError, { - requestBody: PoolPostBulkBody; + dagId: "~"; + requestBody: DAGRunsBatchBody; }, TContext >({ - mutationFn: ({ requestBody }) => - PoolService.postPools({ requestBody }) as unknown as Promise, + mutationFn: ({ dagId, requestBody }) => + DagRunService.getListDagRunsBatch({ + dagId, + requestBody, + }) as unknown as Promise, ...options, }); /** @@ -2580,6 +2903,80 @@ export const useTaskInstanceServicePostClearTaskInstances = < }) as unknown as Promise, ...options, }); +/** + * Post Pool + * Create a Pool. + * @param data The data for the request. + * @param data.requestBody + * @returns PoolResponse Successful Response + * @throws ApiError + */ +export const usePoolServicePostPool = < + TData = Common.PoolServicePostPoolMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: PoolPostBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: PoolPostBody; + }, + TContext + >({ + mutationFn: ({ requestBody }) => + PoolService.postPool({ requestBody }) as unknown as Promise, + ...options, + }); +/** + * Post Pools + * Create multiple pools. + * @param data The data for the request. + * @param data.requestBody + * @returns PoolCollectionResponse Successful Response + * @throws ApiError + */ +export const usePoolServicePostPools = < + TData = Common.PoolServicePostPoolsMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: PoolPostBulkBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: PoolPostBulkBody; + }, + TContext + >({ + mutationFn: ({ requestBody }) => + PoolService.postPools({ requestBody }) as unknown as Promise, + ...options, + }); /** * Post Variable * Create a variable. @@ -2964,6 +3361,138 @@ export const useDagServicePatchDag = < }) as unknown as Promise, ...options, }); +/** + * Patch Task Instance + * Update the state of a task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.requestBody + * @param data.mapIndex + * @param data.updateMask + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServicePatchTaskInstance = < + TData = Common.TaskInstanceServicePatchTaskInstanceMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + dagRunId: string; + mapIndex?: number; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: string[]; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + dagRunId: string; + mapIndex?: number; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: string[]; + }, + TContext + >({ + mutationFn: ({ + dagId, + dagRunId, + mapIndex, + requestBody, + taskId, + updateMask, + }) => + TaskInstanceService.patchTaskInstance({ + dagId, + dagRunId, + mapIndex, + requestBody, + taskId, + updateMask, + }) as unknown as Promise, + ...options, + }); +/** + * Patch Task Instance + * Update the state of a task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @param data.requestBody + * @param data.updateMask + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServicePatchTaskInstance1 = < + TData = Common.TaskInstanceServicePatchTaskInstance1MutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + dagRunId: string; + mapIndex: number; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: string[]; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + dagRunId: string; + mapIndex: number; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: string[]; + }, + TContext + >({ + mutationFn: ({ + dagId, + dagRunId, + mapIndex, + requestBody, + taskId, + updateMask, + }) => + TaskInstanceService.patchTaskInstance1({ + dagId, + dagRunId, + mapIndex, + requestBody, + taskId, + updateMask, + }) as unknown as Promise, + ...options, + }); /** * Patch Pool * Update a Pool. diff --git a/airflow/ui/openapi-gen/queries/suspense.ts b/airflow/ui/openapi-gen/queries/suspense.ts index af0ddd9a09a34..11386ab5d1fbc 100644 --- a/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow/ui/openapi-gen/queries/suspense.ts @@ -14,7 +14,9 @@ import { DagsService, DashboardService, EventLogService, + ExtraLinksService, ImportErrorService, + JobService, MonitorService, PluginService, PoolService, @@ -1198,193 +1200,75 @@ export const useEventLogServiceGetEventLogsSuspense = < ...options, }); /** - * Get Import Error - * Get an import error. + * Get Extra Links + * Get extra links for task instance. * @param data The data for the request. - * @param data.importErrorId - * @returns ImportErrorResponse Successful Response + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns ExtraLinksResponse Successful Response * @throws ApiError */ -export const useImportErrorServiceGetImportErrorSuspense = < - TData = Common.ImportErrorServiceGetImportErrorDefaultResponse, +export const useExtraLinksServiceGetExtraLinksSuspense = < + TData = Common.ExtraLinksServiceGetExtraLinksDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { - importErrorId, + dagId, + dagRunId, + taskId, }: { - importErrorId: number; + dagId: string; + dagRunId: string; + taskId: string; }, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => useSuspenseQuery({ - queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn( - { importErrorId }, - queryKey, - ), - queryFn: () => - ImportErrorService.getImportError({ importErrorId }) as TData, - ...options, - }); -/** - * Get Import Errors - * Get all import errors. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns ImportErrorCollectionResponse Successful Response - * @throws ApiError - */ -export const useImportErrorServiceGetImportErrorsSuspense = < - TData = Common.ImportErrorServiceGetImportErrorsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - orderBy, - }: { - limit?: number; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn( - { limit, offset, orderBy }, + queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn( + { dagId, dagRunId, taskId }, queryKey, ), queryFn: () => - ImportErrorService.getImportErrors({ limit, offset, orderBy }) as TData, - ...options, - }); -/** - * Get Plugins - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns PluginCollectionResponse Successful Response - * @throws ApiError - */ -export const usePluginServiceGetPluginsSuspense = < - TData = Common.PluginServiceGetPluginsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UsePluginServiceGetPluginsKeyFn( - { limit, offset }, - queryKey, - ), - queryFn: () => PluginService.getPlugins({ limit, offset }) as TData, + ExtraLinksService.getExtraLinks({ dagId, dagRunId, taskId }) as TData, ...options, }); /** - * Get Pool - * Get a pool. + * Get Extra Links + * Get extra links for task instance. * @param data The data for the request. - * @param data.poolName - * @returns PoolResponse Successful Response + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns ExtraLinksResponse Successful Response * @throws ApiError */ -export const usePoolServiceGetPoolSuspense = < - TData = Common.PoolServiceGetPoolDefaultResponse, +export const useTaskInstanceServiceGetExtraLinksSuspense = < + TData = Common.TaskInstanceServiceGetExtraLinksDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { - poolName, + dagId, + dagRunId, + taskId, }: { - poolName: string; + dagId: string; + dagRunId: string; + taskId: string; }, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => useSuspenseQuery({ - queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }, queryKey), - queryFn: () => PoolService.getPool({ poolName }) as TData, - ...options, - }); -/** - * Get Pools - * Get all pools entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns PoolCollectionResponse Successful Response - * @throws ApiError - */ -export const usePoolServiceGetPoolsSuspense = < - TData = Common.PoolServiceGetPoolsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - orderBy, - }: { - limit?: number; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UsePoolServiceGetPoolsKeyFn( - { limit, offset, orderBy }, - queryKey, - ), - queryFn: () => PoolService.getPools({ limit, offset, orderBy }) as TData, - ...options, - }); -/** - * Get Providers - * Get providers. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns ProviderCollectionResponse Successful Response - * @throws ApiError - */ -export const useProviderServiceGetProvidersSuspense = < - TData = Common.ProviderServiceGetProvidersDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseProviderServiceGetProvidersKeyFn( - { limit, offset }, + queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn( + { dagId, dagRunId, taskId }, queryKey, ), - queryFn: () => ProviderService.getProviders({ limit, offset }) as TData, + queryFn: () => + TaskInstanceService.getExtraLinks({ dagId, dagRunId, taskId }) as TData, ...options, }); /** @@ -1640,6 +1524,93 @@ export const useTaskInstanceServiceGetTaskInstanceDependencies1Suspense = < }) as TData, ...options, }); +/** + * Get Task Instance Tries + * Get list of task instances history. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstanceTriesSuspense = < + TData = Common.TaskInstanceServiceGetTaskInstanceTriesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getTaskInstanceTries({ + dagId, + dagRunId, + mapIndex, + taskId, + }) as TData, + ...options, + }); +/** + * Get Mapped Task Instance Tries + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetMappedTaskInstanceTriesSuspense = < + TData = Common.TaskInstanceServiceGetMappedTaskInstanceTriesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getMappedTaskInstanceTries({ + dagId, + dagRunId, + mapIndex, + taskId, + }) as TData, + ...options, + }); /** * Get Mapped Task Instance * Get task instance. @@ -1976,104 +1947,229 @@ export const useTaskInstanceServiceGetLogSuspense = < ...options, }); /** - * Get Tasks - * Get tasks for DAG. + * Get Import Error + * Get an import error. * @param data The data for the request. - * @param data.dagId - * @param data.orderBy - * @returns TaskCollectionResponse Successful Response - * @throws ApiError + * @param data.importErrorId + * @returns ImportErrorResponse Successful Response + * @throws ApiError */ -export const useTaskServiceGetTasksSuspense = < - TData = Common.TaskServiceGetTasksDefaultResponse, +export const useImportErrorServiceGetImportErrorSuspense = < + TData = Common.ImportErrorServiceGetImportErrorDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { - dagId, + importErrorId, + }: { + importErrorId: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn( + { importErrorId }, + queryKey, + ), + queryFn: () => + ImportErrorService.getImportError({ importErrorId }) as TData, + ...options, + }); +/** + * Get Import Errors + * Get all import errors. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ImportErrorCollectionResponse Successful Response + * @throws ApiError + */ +export const useImportErrorServiceGetImportErrorsSuspense = < + TData = Common.ImportErrorServiceGetImportErrorsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, orderBy, }: { - dagId: string; + limit?: number; + offset?: number; orderBy?: string; - }, + } = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => useSuspenseQuery({ - queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }, queryKey), - queryFn: () => TaskService.getTasks({ dagId, orderBy }) as TData, + queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn( + { limit, offset, orderBy }, + queryKey, + ), + queryFn: () => + ImportErrorService.getImportErrors({ limit, offset, orderBy }) as TData, ...options, }); /** - * Get Task - * Get simplified representation of a task. + * Get Jobs + * Get all jobs. * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @returns TaskResponse Successful Response + * @param data.isAlive + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.jobState + * @param data.jobType + * @param data.hostname + * @param data.executorClass + * @returns JobCollectionResponse Successful Response * @throws ApiError */ -export const useTaskServiceGetTaskSuspense = < - TData = Common.TaskServiceGetTaskDefaultResponse, +export const useJobServiceGetJobsSuspense = < + TData = Common.JobServiceGetJobsDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { - dagId, - taskId, + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, }: { - dagId: string; - taskId: unknown; - }, + endDateGte?: string; + endDateLte?: string; + executorClass?: string; + hostname?: string; + isAlive?: boolean; + jobState?: string; + jobType?: string; + limit?: number; + offset?: number; + orderBy?: string; + startDateGte?: string; + startDateLte?: string; + } = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => useSuspenseQuery({ - queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }, queryKey), - queryFn: () => TaskService.getTask({ dagId, taskId }) as TData, + queryKey: Common.UseJobServiceGetJobsKeyFn( + { + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }, + queryKey, + ), + queryFn: () => + JobService.getJobs({ + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }) as TData, ...options, }); /** - * Get Variable - * Get a variable entry. + * Get Plugins * @param data The data for the request. - * @param data.variableKey - * @returns VariableResponse Successful Response + * @param data.limit + * @param data.offset + * @returns PluginCollectionResponse Successful Response * @throws ApiError */ -export const useVariableServiceGetVariableSuspense = < - TData = Common.VariableServiceGetVariableDefaultResponse, +export const usePluginServiceGetPluginsSuspense = < + TData = Common.PluginServiceGetPluginsDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { - variableKey, + limit, + offset, }: { - variableKey: string; - }, + limit?: number; + offset?: number; + } = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => useSuspenseQuery({ - queryKey: Common.UseVariableServiceGetVariableKeyFn( - { variableKey }, + queryKey: Common.UsePluginServiceGetPluginsKeyFn( + { limit, offset }, queryKey, ), - queryFn: () => VariableService.getVariable({ variableKey }) as TData, + queryFn: () => PluginService.getPlugins({ limit, offset }) as TData, ...options, }); /** - * Get Variables - * Get all Variables entries. + * Get Pool + * Get a pool. + * @param data The data for the request. + * @param data.poolName + * @returns PoolResponse Successful Response + * @throws ApiError + */ +export const usePoolServiceGetPoolSuspense = < + TData = Common.PoolServiceGetPoolDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + poolName, + }: { + poolName: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }, queryKey), + queryFn: () => PoolService.getPool({ poolName }) as TData, + ...options, + }); +/** + * Get Pools + * Get all pools entries. * @param data The data for the request. * @param data.limit * @param data.offset * @param data.orderBy - * @returns VariableCollectionResponse Successful Response + * @returns PoolCollectionResponse Successful Response * @throws ApiError */ -export const useVariableServiceGetVariablesSuspense = < - TData = Common.VariableServiceGetVariablesDefaultResponse, +export const usePoolServiceGetPoolsSuspense = < + TData = Common.PoolServiceGetPoolsDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( @@ -2090,12 +2186,43 @@ export const useVariableServiceGetVariablesSuspense = < options?: Omit, "queryKey" | "queryFn">, ) => useSuspenseQuery({ - queryKey: Common.UseVariableServiceGetVariablesKeyFn( + queryKey: Common.UsePoolServiceGetPoolsKeyFn( { limit, offset, orderBy }, queryKey, ), - queryFn: () => - VariableService.getVariables({ limit, offset, orderBy }) as TData, + queryFn: () => PoolService.getPools({ limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Providers + * Get providers. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns ProviderCollectionResponse Successful Response + * @throws ApiError + */ +export const useProviderServiceGetProvidersSuspense = < + TData = Common.ProviderServiceGetProvidersDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseProviderServiceGetProvidersKeyFn( + { limit, offset }, + queryKey, + ), + queryFn: () => ProviderService.getProviders({ limit, offset }) as TData, ...options, }); /** @@ -2154,6 +2281,187 @@ export const useXcomServiceGetXcomEntrySuspense = < }) as TData, ...options, }); +/** + * Get Xcom Entries + * Get all XCom entries. + * + * This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.xcomKey + * @param data.mapIndex + * @param data.limit + * @param data.offset + * @returns XComCollection Successful Response + * @throws ApiError + */ +export const useXcomServiceGetXcomEntriesSuspense = < + TData = Common.XcomServiceGetXcomEntriesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + limit, + mapIndex, + offset, + taskId, + xcomKey, + }: { + dagId: string; + dagRunId: string; + limit?: number; + mapIndex?: number; + offset?: number; + taskId: string; + xcomKey?: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn( + { dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }, + queryKey, + ), + queryFn: () => + XcomService.getXcomEntries({ + dagId, + dagRunId, + limit, + mapIndex, + offset, + taskId, + xcomKey, + }) as TData, + ...options, + }); +/** + * Get Tasks + * Get tasks for DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.orderBy + * @returns TaskCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskServiceGetTasksSuspense = < + TData = Common.TaskServiceGetTasksDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + orderBy, + }: { + dagId: string; + orderBy?: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }, queryKey), + queryFn: () => TaskService.getTasks({ dagId, orderBy }) as TData, + ...options, + }); +/** + * Get Task + * Get simplified representation of a task. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @returns TaskResponse Successful Response + * @throws ApiError + */ +export const useTaskServiceGetTaskSuspense = < + TData = Common.TaskServiceGetTaskDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + taskId, + }: { + dagId: string; + taskId: unknown; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }, queryKey), + queryFn: () => TaskService.getTask({ dagId, taskId }) as TData, + ...options, + }); +/** + * Get Variable + * Get a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns VariableResponse Successful Response + * @throws ApiError + */ +export const useVariableServiceGetVariableSuspense = < + TData = Common.VariableServiceGetVariableDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + variableKey, + }: { + variableKey: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseVariableServiceGetVariableKeyFn( + { variableKey }, + queryKey, + ), + queryFn: () => VariableService.getVariable({ variableKey }) as TData, + ...options, + }); +/** + * Get Variables + * Get all Variables entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns VariableCollectionResponse Successful Response + * @throws ApiError + */ +export const useVariableServiceGetVariablesSuspense = < + TData = Common.VariableServiceGetVariablesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseVariableServiceGetVariablesKeyFn( + { limit, offset, orderBy }, + queryKey, + ), + queryFn: () => + VariableService.getVariables({ limit, offset, orderBy }) as TData, + ...options, + }); /** * Get Health * @returns HealthInfoSchema Successful Response diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts index 98a15b3172b9a..04bd8548f4f6d 100644 --- a/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -244,10 +244,18 @@ export const $AssetResponse = { type: "integer", title: "Id", }, + name: { + type: "string", + title: "Name", + }, uri: { type: "string", title: "Uri", }, + group: { + type: "string", + title: "Group", + }, extra: { anyOf: [ { @@ -295,7 +303,9 @@ export const $AssetResponse = { type: "object", required: [ "id", + "name", "uri", + "group", "created_at", "updated_at", "consuming_dags", @@ -1694,7 +1704,7 @@ export const $DAGRunPatchStates = { export const $DAGRunResponse = { properties: { - run_id: { + dag_run_id: { anyOf: [ { type: "string", @@ -1703,7 +1713,7 @@ export const $DAGRunResponse = { type: "null", }, ], - title: "Run Id", + title: "Dag Run Id", }, dag_id: { type: "string", @@ -1825,7 +1835,7 @@ export const $DAGRunResponse = { additionalProperties: false, type: "object", required: [ - "run_id", + "dag_run_id", "dag_id", "logical_date", "queued_at", @@ -1897,6 +1907,144 @@ export const $DAGRunTypes = { description: "DAG Run Types for responses.", } as const; +export const $DAGRunsBatchBody = { + properties: { + order_by: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Order By", + }, + page_offset: { + type: "integer", + minimum: 0, + title: "Page Offset", + default: 0, + }, + page_limit: { + type: "integer", + minimum: 0, + title: "Page Limit", + default: 100, + }, + dag_ids: { + anyOf: [ + { + items: { + type: "string", + }, + type: "array", + }, + { + type: "null", + }, + ], + title: "Dag Ids", + }, + states: { + anyOf: [ + { + items: { + anyOf: [ + { + $ref: "#/components/schemas/DagRunState", + }, + { + type: "null", + }, + ], + }, + type: "array", + }, + { + type: "null", + }, + ], + title: "States", + }, + logical_date_gte: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Logical Date Gte", + }, + logical_date_lte: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Logical Date Lte", + }, + start_date_gte: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Start Date Gte", + }, + start_date_lte: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Start Date Lte", + }, + end_date_gte: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "End Date Gte", + }, + end_date_lte: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "End Date Lte", + }, + }, + type: "object", + title: "DAGRunsBatchBody", + description: "List DAG Runs body for batch endpoint.", +} as const; + export const $DAGSourceResponse = { properties: { content: { @@ -2632,6 +2780,22 @@ export const $EventLogResponse = { description: "Event Log Response.", } as const; +export const $ExtraLinksResponse = { + additionalProperties: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + }, + type: "object", + title: "ExtraLinksResponse", + description: "Extra Links Response.", +} as const; + export const $FastAPIAppResponse = { properties: { app: { @@ -2777,6 +2941,26 @@ export const $ImportErrorResponse = { description: "Import Error Response.", } as const; +export const $JobCollectionResponse = { + properties: { + jobs: { + items: { + $ref: "#/components/schemas/JobResponse", + }, + type: "array", + title: "Jobs", + }, + total_entries: { + type: "integer", + title: "Total Entries", + }, + }, + type: "object", + required: ["jobs", "total_entries"], + title: "JobCollectionResponse", + description: "Job Collection Response.", +} as const; + export const $JobResponse = { properties: { id: { @@ -2905,6 +3089,62 @@ export const $JobResponse = { description: "Job serializer for responses.", } as const; +export const $PatchTaskInstanceBody = { + properties: { + dry_run: { + type: "boolean", + title: "Dry Run", + default: true, + }, + new_state: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "New State", + }, + note: { + anyOf: [ + { + type: "string", + maxLength: 1000, + }, + { + type: "null", + }, + ], + title: "Note", + }, + include_upstream: { + type: "boolean", + title: "Include Upstream", + default: false, + }, + include_downstream: { + type: "boolean", + title: "Include Downstream", + default: false, + }, + include_future: { + type: "boolean", + title: "Include Future", + default: false, + }, + include_past: { + type: "boolean", + title: "Include Past", + default: false, + }, + }, + type: "object", + title: "PatchTaskInstanceBody", + description: "Request body for Clear Task Instances endpoint.", +} as const; + export const $PluginCollectionResponse = { properties: { plugins: { @@ -3427,6 +3667,26 @@ export const $TaskInstanceCollectionResponse = { description: "Task Instance Collection serializer for responses.", } as const; +export const $TaskInstanceHistoryCollectionResponse = { + properties: { + task_instances: { + items: { + $ref: "#/components/schemas/TaskInstanceHistoryResponse", + }, + type: "array", + title: "Task Instances", + }, + total_entries: { + type: "integer", + title: "Total Entries", + }, + }, + type: "object", + required: ["task_instances", "total_entries"], + title: "TaskInstanceHistoryCollectionResponse", + description: "TaskInstanceHistory Collection serializer for responses.", +} as const; + export const $TaskInstanceHistoryResponse = { properties: { task_id: { @@ -4672,6 +4932,64 @@ export const $TimeDelta = { "TimeDelta can be used to interact with datetime.timedelta objects.", } as const; +export const $TriggerDAGRunPostBody = { + properties: { + dag_run_id: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Dag Run Id", + }, + data_interval_start: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Data Interval Start", + }, + data_interval_end: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Data Interval End", + }, + conf: { + type: "object", + title: "Conf", + }, + note: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Note", + }, + }, + type: "object", + title: "TriggerDAGRunPostBody", + description: "Trigger DAG Run Serializer for POST body.", +} as const; + export const $TriggerResponse = { properties: { id: { @@ -4890,6 +5208,68 @@ export const $VersionInfo = { description: "Version information serializer for responses.", } as const; +export const $XComCollection = { + properties: { + xcom_entries: { + items: { + $ref: "#/components/schemas/XComResponse", + }, + type: "array", + title: "Xcom Entries", + }, + total_entries: { + type: "integer", + title: "Total Entries", + }, + }, + type: "object", + required: ["xcom_entries", "total_entries"], + title: "XComCollection", + description: "List of XCom items.", +} as const; + +export const $XComResponse = { + properties: { + key: { + type: "string", + title: "Key", + }, + timestamp: { + type: "string", + format: "date-time", + title: "Timestamp", + }, + logical_date: { + type: "string", + format: "date-time", + title: "Logical Date", + }, + map_index: { + type: "integer", + title: "Map Index", + }, + task_id: { + type: "string", + title: "Task Id", + }, + dag_id: { + type: "string", + title: "Dag Id", + }, + }, + type: "object", + required: [ + "key", + "timestamp", + "logical_date", + "map_index", + "task_id", + "dag_id", + ], + title: "XComResponse", + description: "Serializer for a xcom item.", +} as const; + export const $XComResponseNative = { properties: { key: { diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index 71e9c729a7f70..4c8944447cec2 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -70,6 +70,10 @@ import type { ClearDagRunResponse, GetDagRunsData, GetDagRunsResponse, + TriggerDagRunData, + TriggerDagRunResponse, + GetListDagRunsBatchData, + GetListDagRunsBatchResponse, GetDagSourceData, GetDagSourceResponse, GetDagStatsData, @@ -94,36 +98,26 @@ import type { GetEventLogResponse, GetEventLogsData, GetEventLogsResponse, - GetImportErrorData, - GetImportErrorResponse, - GetImportErrorsData, - GetImportErrorsResponse, - GetPluginsData, - GetPluginsResponse, - DeletePoolData, - DeletePoolResponse, - GetPoolData, - GetPoolResponse, - PatchPoolData, - PatchPoolResponse, - GetPoolsData, - GetPoolsResponse, - PostPoolData, - PostPoolResponse, - PostPoolsData, - PostPoolsResponse, - GetProvidersData, - GetProvidersResponse, + GetExtraLinksData, + GetExtraLinksResponse, GetTaskInstanceData, GetTaskInstanceResponse, + PatchTaskInstanceData, + PatchTaskInstanceResponse, GetMappedTaskInstancesData, GetMappedTaskInstancesResponse, GetTaskInstanceDependenciesData, GetTaskInstanceDependenciesResponse, GetTaskInstanceDependencies1Data, GetTaskInstanceDependencies1Response, + GetTaskInstanceTriesData, + GetTaskInstanceTriesResponse, + GetMappedTaskInstanceTriesData, + GetMappedTaskInstanceTriesResponse, GetMappedTaskInstanceData, GetMappedTaskInstanceResponse, + PatchTaskInstance1Data, + PatchTaskInstance1Response, GetTaskInstancesData, GetTaskInstancesResponse, GetTaskInstancesBatchData, @@ -136,6 +130,32 @@ import type { PostClearTaskInstancesResponse, GetLogData, GetLogResponse, + GetImportErrorData, + GetImportErrorResponse, + GetImportErrorsData, + GetImportErrorsResponse, + GetJobsData, + GetJobsResponse, + GetPluginsData, + GetPluginsResponse, + DeletePoolData, + DeletePoolResponse, + GetPoolData, + GetPoolResponse, + PatchPoolData, + PatchPoolResponse, + GetPoolsData, + GetPoolsResponse, + PostPoolData, + PostPoolResponse, + PostPoolsData, + PostPoolsResponse, + GetProvidersData, + GetProvidersResponse, + GetXcomEntryData, + GetXcomEntryResponse, + GetXcomEntriesData, + GetXcomEntriesResponse, GetTasksData, GetTasksResponse, GetTaskData, @@ -150,8 +170,6 @@ import type { GetVariablesResponse, PostVariableData, PostVariableResponse, - GetXcomEntryData, - GetXcomEntryResponse, GetHealthResponse, GetVersionResponse, } from "./types.gen"; @@ -1176,6 +1194,66 @@ export class DagRunService { }, }); } + + /** + * Trigger Dag Run + * Trigger a DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.requestBody + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ + public static triggerDagRun( + data: TriggerDagRunData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "POST", + url: "/public/dags/{dag_id}/dagRuns", + path: { + dag_id: data.dagId, + }, + body: data.requestBody, + mediaType: "application/json", + errors: { + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 409: "Conflict", + 422: "Validation Error", + }, + }); + } + + /** + * Get List Dag Runs Batch + * Get a list of DAG Runs. + * @param data The data for the request. + * @param data.dagId + * @param data.requestBody + * @returns DAGRunCollectionResponse Successful Response + * @throws ApiError + */ + public static getListDagRunsBatch( + data: GetListDagRunsBatchData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "POST", + url: "/public/dags/{dag_id}/dagRuns/list", + path: { + dag_id: data.dagId, + }, + body: data.requestBody, + mediaType: "application/json", + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } } export class DagSourceService { @@ -1593,23 +1671,27 @@ export class EventLogService { } } -export class ImportErrorService { +export class ExtraLinksService { /** - * Get Import Error - * Get an import error. + * Get Extra Links + * Get extra links for task instance. * @param data The data for the request. - * @param data.importErrorId - * @returns ImportErrorResponse Successful Response + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns ExtraLinksResponse Successful Response * @throws ApiError */ - public static getImportError( - data: GetImportErrorData, - ): CancelablePromise { + public static getExtraLinks( + data: GetExtraLinksData, + ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/importErrors/{import_error_id}", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/links", path: { - import_error_id: data.importErrorId, + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, }, errors: { 401: "Unauthorized", @@ -1619,85 +1701,31 @@ export class ImportErrorService { }, }); } - - /** - * Get Import Errors - * Get all import errors. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns ImportErrorCollectionResponse Successful Response - * @throws ApiError - */ - public static getImportErrors( - data: GetImportErrorsData = {}, - ): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/public/importErrors", - query: { - limit: data.limit, - offset: data.offset, - order_by: data.orderBy, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } } -export class PluginService { +export class TaskInstanceService { /** - * Get Plugins + * Get Extra Links + * Get extra links for task instance. * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns PluginCollectionResponse Successful Response + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns ExtraLinksResponse Successful Response * @throws ApiError */ - public static getPlugins( - data: GetPluginsData = {}, - ): CancelablePromise { + public static getExtraLinks( + data: GetExtraLinksData, + ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/plugins", - query: { - limit: data.limit, - offset: data.offset, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } -} - -export class PoolService { - /** - * Delete Pool - * Delete a pool entry. - * @param data The data for the request. - * @param data.poolName - * @returns void Successful Response - * @throws ApiError - */ - public static deletePool( - data: DeletePoolData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "DELETE", - url: "/public/pools/{pool_name}", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/links", path: { - pool_name: data.poolName, + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, }, errors: { - 400: "Bad Request", 401: "Unauthorized", 403: "Forbidden", 404: "Not Found", @@ -1707,19 +1735,25 @@ export class PoolService { } /** - * Get Pool - * Get a pool. + * Get Task Instance + * Get task instance. * @param data The data for the request. - * @param data.poolName - * @returns PoolResponse Successful Response + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns TaskInstanceResponse Successful Response * @throws ApiError */ - public static getPool(data: GetPoolData): CancelablePromise { + public static getTaskInstance( + data: GetTaskInstanceData, + ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/pools/{pool_name}", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}", path: { - pool_name: data.poolName, + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, }, errors: { 401: "Unauthorized", @@ -1731,25 +1765,31 @@ export class PoolService { } /** - * Patch Pool - * Update a Pool. + * Patch Task Instance + * Update the state of a task instance. * @param data The data for the request. - * @param data.poolName + * @param data.dagId + * @param data.dagRunId + * @param data.taskId * @param data.requestBody + * @param data.mapIndex * @param data.updateMask - * @returns PoolResponse Successful Response + * @returns TaskInstanceResponse Successful Response * @throws ApiError */ - public static patchPool( - data: PatchPoolData, - ): CancelablePromise { + public static patchTaskInstance( + data: PatchTaskInstanceData, + ): CancelablePromise { return __request(OpenAPI, { method: "PATCH", - url: "/public/pools/{pool_name}", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}", path: { - pool_name: data.poolName, + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, }, query: { + map_index: data.mapIndex, update_mask: data.updateMask, }, body: data.requestBody, @@ -1765,24 +1805,60 @@ export class PoolService { } /** - * Get Pools - * Get all pools entries. + * Get Mapped Task Instances + * Get list of mapped task instances. * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.logicalDateGte + * @param data.logicalDateLte + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.updatedAtGte + * @param data.updatedAtLte + * @param data.durationGte + * @param data.durationLte + * @param data.state + * @param data.pool + * @param data.queue + * @param data.executor * @param data.limit * @param data.offset * @param data.orderBy - * @returns PoolCollectionResponse Successful Response + * @returns TaskInstanceCollectionResponse Successful Response * @throws ApiError */ - public static getPools( - data: GetPoolsData = {}, - ): CancelablePromise { + public static getMappedTaskInstances( + data: GetMappedTaskInstancesData, + ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/pools", - query: { - limit: data.limit, - offset: data.offset, + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/listMapped", + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + }, + query: { + logical_date_gte: data.logicalDateGte, + logical_date_lte: data.logicalDateLte, + start_date_gte: data.startDateGte, + start_date_lte: data.startDateLte, + end_date_gte: data.endDateGte, + end_date_lte: data.endDateLte, + updated_at_gte: data.updatedAtGte, + updated_at_lte: data.updatedAtLte, + duration_gte: data.durationGte, + duration_lte: data.durationLte, + state: data.state, + pool: data.pool, + queue: data.queue, + executor: data.executor, + limit: data.limit, + offset: data.offset, order_by: data.orderBy, }, errors: { @@ -1795,107 +1871,62 @@ export class PoolService { } /** - * Post Pool - * Create a Pool. - * @param data The data for the request. - * @param data.requestBody - * @returns PoolResponse Successful Response - * @throws ApiError - */ - public static postPool( - data: PostPoolData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/public/pools", - body: data.requestBody, - mediaType: "application/json", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 409: "Conflict", - 422: "Validation Error", - }, - }); - } - - /** - * Post Pools - * Create multiple pools. - * @param data The data for the request. - * @param data.requestBody - * @returns PoolCollectionResponse Successful Response - * @throws ApiError - */ - public static postPools( - data: PostPoolsData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/public/pools/bulk", - body: data.requestBody, - mediaType: "application/json", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 409: "Conflict", - 422: "Validation Error", - }, - }); - } -} - -export class ProviderService { - /** - * Get Providers - * Get providers. + * Get Task Instance Dependencies + * Get dependencies blocking task from getting scheduled. * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns ProviderCollectionResponse Successful Response + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskDependencyCollectionResponse Successful Response * @throws ApiError */ - public static getProviders( - data: GetProvidersData = {}, - ): CancelablePromise { + public static getTaskInstanceDependencies( + data: GetTaskInstanceDependenciesData, + ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/providers", - query: { - limit: data.limit, - offset: data.offset, + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dependencies", + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + map_index: data.mapIndex, }, errors: { 401: "Unauthorized", 403: "Forbidden", + 404: "Not Found", 422: "Validation Error", }, }); } -} -export class TaskInstanceService { /** - * Get Task Instance - * Get task instance. + * Get Task Instance Dependencies + * Get dependencies blocking task from getting scheduled. * @param data The data for the request. * @param data.dagId * @param data.dagRunId * @param data.taskId - * @returns TaskInstanceResponse Successful Response + * @param data.mapIndex + * @returns TaskDependencyCollectionResponse Successful Response * @throws ApiError */ - public static getTaskInstance( - data: GetTaskInstanceData, - ): CancelablePromise { + public static getTaskInstanceDependencies1( + data: GetTaskInstanceDependencies1Data, + ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dependencies", path: { dag_id: data.dagId, dag_run_id: data.dagRunId, task_id: data.taskId, }, + query: { + map_index: data.mapIndex, + }, errors: { 401: "Unauthorized", 403: "Forbidden", @@ -1906,61 +1937,29 @@ export class TaskInstanceService { } /** - * Get Mapped Task Instances - * Get list of mapped task instances. + * Get Task Instance Tries + * Get list of task instances history. * @param data The data for the request. * @param data.dagId * @param data.dagRunId * @param data.taskId - * @param data.logicalDateGte - * @param data.logicalDateLte - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.updatedAtGte - * @param data.updatedAtLte - * @param data.durationGte - * @param data.durationLte - * @param data.state - * @param data.pool - * @param data.queue - * @param data.executor - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns TaskInstanceCollectionResponse Successful Response + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response * @throws ApiError */ - public static getMappedTaskInstances( - data: GetMappedTaskInstancesData, - ): CancelablePromise { + public static getTaskInstanceTries( + data: GetTaskInstanceTriesData, + ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/listMapped", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries", path: { dag_id: data.dagId, dag_run_id: data.dagRunId, task_id: data.taskId, }, query: { - logical_date_gte: data.logicalDateGte, - logical_date_lte: data.logicalDateLte, - start_date_gte: data.startDateGte, - start_date_lte: data.startDateLte, - end_date_gte: data.endDateGte, - end_date_lte: data.endDateLte, - updated_at_gte: data.updatedAtGte, - updated_at_lte: data.updatedAtLte, - duration_gte: data.durationGte, - duration_lte: data.durationLte, - state: data.state, - pool: data.pool, - queue: data.queue, - executor: data.executor, - limit: data.limit, - offset: data.offset, - order_by: data.orderBy, + map_index: data.mapIndex, }, errors: { 401: "Unauthorized", @@ -1972,22 +1971,21 @@ export class TaskInstanceService { } /** - * Get Task Instance Dependencies - * Get dependencies blocking task from getting scheduled. + * Get Mapped Task Instance Tries * @param data The data for the request. * @param data.dagId * @param data.dagRunId * @param data.taskId * @param data.mapIndex - * @returns TaskDependencyCollectionResponse Successful Response + * @returns TaskInstanceHistoryCollectionResponse Successful Response * @throws ApiError */ - public static getTaskInstanceDependencies( - data: GetTaskInstanceDependenciesData, - ): CancelablePromise { + public static getMappedTaskInstanceTries( + data: GetMappedTaskInstanceTriesData, + ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dependencies", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries", path: { dag_id: data.dagId, dag_run_id: data.dagRunId, @@ -2004,28 +2002,26 @@ export class TaskInstanceService { } /** - * Get Task Instance Dependencies - * Get dependencies blocking task from getting scheduled. + * Get Mapped Task Instance + * Get task instance. * @param data The data for the request. * @param data.dagId * @param data.dagRunId * @param data.taskId * @param data.mapIndex - * @returns TaskDependencyCollectionResponse Successful Response + * @returns TaskInstanceResponse Successful Response * @throws ApiError */ - public static getTaskInstanceDependencies1( - data: GetTaskInstanceDependencies1Data, - ): CancelablePromise { + public static getMappedTaskInstance( + data: GetMappedTaskInstanceData, + ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dependencies", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}", path: { dag_id: data.dagId, dag_run_id: data.dagRunId, task_id: data.taskId, - }, - query: { map_index: data.mapIndex, }, errors: { @@ -2038,21 +2034,23 @@ export class TaskInstanceService { } /** - * Get Mapped Task Instance - * Get task instance. + * Patch Task Instance + * Update the state of a task instance. * @param data The data for the request. * @param data.dagId * @param data.dagRunId * @param data.taskId * @param data.mapIndex + * @param data.requestBody + * @param data.updateMask * @returns TaskInstanceResponse Successful Response * @throws ApiError */ - public static getMappedTaskInstance( - data: GetMappedTaskInstanceData, - ): CancelablePromise { + public static patchTaskInstance1( + data: PatchTaskInstance1Data, + ): CancelablePromise { return __request(OpenAPI, { - method: "GET", + method: "PATCH", url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}", path: { dag_id: data.dagId, @@ -2060,7 +2058,13 @@ export class TaskInstanceService { task_id: data.taskId, map_index: data.mapIndex, }, + query: { + update_mask: data.updateMask, + }, + body: data.requestBody, + mediaType: "application/json", errors: { + 400: "Bad Request", 401: "Unauthorized", 403: "Forbidden", 404: "Not Found", @@ -2308,30 +2312,25 @@ export class TaskInstanceService { } } -export class TaskService { +export class ImportErrorService { /** - * Get Tasks - * Get tasks for DAG. + * Get Import Error + * Get an import error. * @param data The data for the request. - * @param data.dagId - * @param data.orderBy - * @returns TaskCollectionResponse Successful Response + * @param data.importErrorId + * @returns ImportErrorResponse Successful Response * @throws ApiError */ - public static getTasks( - data: GetTasksData, - ): CancelablePromise { + public static getImportError( + data: GetImportErrorData, + ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/dags/{dag_id}/tasks", + url: "/public/importErrors/{import_error_id}", path: { - dag_id: data.dagId, - }, - query: { - order_by: data.orderBy, + import_error_id: data.importErrorId, }, errors: { - 400: "Bad Request", 401: "Unauthorized", 403: "Forbidden", 404: "Not Found", @@ -2341,52 +2340,133 @@ export class TaskService { } /** - * Get Task - * Get simplified representation of a task. + * Get Import Errors + * Get all import errors. * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @returns TaskResponse Successful Response + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ImportErrorCollectionResponse Successful Response * @throws ApiError */ - public static getTask(data: GetTaskData): CancelablePromise { + public static getImportErrors( + data: GetImportErrorsData = {}, + ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/dags/{dag_id}/tasks/{task_id}", - path: { - dag_id: data.dagId, - task_id: data.taskId, + url: "/public/importErrors", + query: { + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, }, errors: { - 400: "Bad Request", 401: "Unauthorized", 403: "Forbidden", - 404: "Not Found", 422: "Validation Error", }, }); } } -export class VariableService { +export class JobService { /** - * Delete Variable - * Delete a variable entry. + * Get Jobs + * Get all jobs. * @param data The data for the request. - * @param data.variableKey + * @param data.isAlive + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.jobState + * @param data.jobType + * @param data.hostname + * @param data.executorClass + * @returns JobCollectionResponse Successful Response + * @throws ApiError + */ + public static getJobs( + data: GetJobsData = {}, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/jobs", + query: { + is_alive: data.isAlive, + start_date_gte: data.startDateGte, + start_date_lte: data.startDateLte, + end_date_gte: data.endDateGte, + end_date_lte: data.endDateLte, + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + job_state: data.jobState, + job_type: data.jobType, + hostname: data.hostname, + executor_class: data.executorClass, + }, + errors: { + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 422: "Validation Error", + }, + }); + } +} + +export class PluginService { + /** + * Get Plugins + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns PluginCollectionResponse Successful Response + * @throws ApiError + */ + public static getPlugins( + data: GetPluginsData = {}, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/plugins", + query: { + limit: data.limit, + offset: data.offset, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 422: "Validation Error", + }, + }); + } +} + +export class PoolService { + /** + * Delete Pool + * Delete a pool entry. + * @param data The data for the request. + * @param data.poolName * @returns void Successful Response * @throws ApiError */ - public static deleteVariable( - data: DeleteVariableData, - ): CancelablePromise { + public static deletePool( + data: DeletePoolData, + ): CancelablePromise { return __request(OpenAPI, { method: "DELETE", - url: "/public/variables/{variable_key}", + url: "/public/pools/{pool_name}", path: { - variable_key: data.variableKey, + pool_name: data.poolName, }, errors: { + 400: "Bad Request", 401: "Unauthorized", 403: "Forbidden", 404: "Not Found", @@ -2396,21 +2476,19 @@ export class VariableService { } /** - * Get Variable - * Get a variable entry. + * Get Pool + * Get a pool. * @param data The data for the request. - * @param data.variableKey - * @returns VariableResponse Successful Response + * @param data.poolName + * @returns PoolResponse Successful Response * @throws ApiError */ - public static getVariable( - data: GetVariableData, - ): CancelablePromise { + public static getPool(data: GetPoolData): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/variables/{variable_key}", + url: "/public/pools/{pool_name}", path: { - variable_key: data.variableKey, + pool_name: data.poolName, }, errors: { 401: "Unauthorized", @@ -2422,23 +2500,23 @@ export class VariableService { } /** - * Patch Variable - * Update a variable by key. + * Patch Pool + * Update a Pool. * @param data The data for the request. - * @param data.variableKey + * @param data.poolName * @param data.requestBody * @param data.updateMask - * @returns VariableResponse Successful Response + * @returns PoolResponse Successful Response * @throws ApiError */ - public static patchVariable( - data: PatchVariableData, - ): CancelablePromise { + public static patchPool( + data: PatchPoolData, + ): CancelablePromise { return __request(OpenAPI, { method: "PATCH", - url: "/public/variables/{variable_key}", + url: "/public/pools/{pool_name}", path: { - variable_key: data.variableKey, + pool_name: data.poolName, }, query: { update_mask: data.updateMask, @@ -2456,21 +2534,21 @@ export class VariableService { } /** - * Get Variables - * Get all Variables entries. + * Get Pools + * Get all pools entries. * @param data The data for the request. * @param data.limit * @param data.offset * @param data.orderBy - * @returns VariableCollectionResponse Successful Response + * @returns PoolCollectionResponse Successful Response * @throws ApiError */ - public static getVariables( - data: GetVariablesData = {}, - ): CancelablePromise { + public static getPools( + data: GetPoolsData = {}, + ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/variables", + url: "/public/pools", query: { limit: data.limit, offset: data.offset, @@ -2479,27 +2557,83 @@ export class VariableService { errors: { 401: "Unauthorized", 403: "Forbidden", + 404: "Not Found", 422: "Validation Error", }, }); } /** - * Post Variable - * Create a variable. + * Post Pool + * Create a Pool. * @param data The data for the request. * @param data.requestBody - * @returns VariableResponse Successful Response + * @returns PoolResponse Successful Response * @throws ApiError */ - public static postVariable( - data: PostVariableData, - ): CancelablePromise { + public static postPool( + data: PostPoolData, + ): CancelablePromise { return __request(OpenAPI, { method: "POST", - url: "/public/variables", + url: "/public/pools", + body: data.requestBody, + mediaType: "application/json", + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 409: "Conflict", + 422: "Validation Error", + }, + }); + } + + /** + * Post Pools + * Create multiple pools. + * @param data The data for the request. + * @param data.requestBody + * @returns PoolCollectionResponse Successful Response + * @throws ApiError + */ + public static postPools( + data: PostPoolsData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "POST", + url: "/public/pools/bulk", body: data.requestBody, mediaType: "application/json", + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 409: "Conflict", + 422: "Validation Error", + }, + }); + } +} + +export class ProviderService { + /** + * Get Providers + * Get providers. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns ProviderCollectionResponse Successful Response + * @throws ApiError + */ + public static getProviders( + data: GetProvidersData = {}, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/providers", + query: { + limit: data.limit, + offset: data.offset, + }, errors: { 401: "Unauthorized", 403: "Forbidden", @@ -2550,6 +2684,250 @@ export class XcomService { }, }); } + + /** + * Get Xcom Entries + * Get all XCom entries. + * + * This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.xcomKey + * @param data.mapIndex + * @param data.limit + * @param data.offset + * @returns XComCollection Successful Response + * @throws ApiError + */ + public static getXcomEntries( + data: GetXcomEntriesData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries", + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + }, + query: { + xcom_key: data.xcomKey, + map_index: data.mapIndex, + limit: data.limit, + offset: data.offset, + }, + errors: { + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } +} + +export class TaskService { + /** + * Get Tasks + * Get tasks for DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.orderBy + * @returns TaskCollectionResponse Successful Response + * @throws ApiError + */ + public static getTasks( + data: GetTasksData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/dags/{dag_id}/tasks", + path: { + dag_id: data.dagId, + }, + query: { + order_by: data.orderBy, + }, + errors: { + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } + + /** + * Get Task + * Get simplified representation of a task. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @returns TaskResponse Successful Response + * @throws ApiError + */ + public static getTask(data: GetTaskData): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/dags/{dag_id}/tasks/{task_id}", + path: { + dag_id: data.dagId, + task_id: data.taskId, + }, + errors: { + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } +} + +export class VariableService { + /** + * Delete Variable + * Delete a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns void Successful Response + * @throws ApiError + */ + public static deleteVariable( + data: DeleteVariableData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "DELETE", + url: "/public/variables/{variable_key}", + path: { + variable_key: data.variableKey, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } + + /** + * Get Variable + * Get a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns VariableResponse Successful Response + * @throws ApiError + */ + public static getVariable( + data: GetVariableData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/variables/{variable_key}", + path: { + variable_key: data.variableKey, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } + + /** + * Patch Variable + * Update a variable by key. + * @param data The data for the request. + * @param data.variableKey + * @param data.requestBody + * @param data.updateMask + * @returns VariableResponse Successful Response + * @throws ApiError + */ + public static patchVariable( + data: PatchVariableData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "PATCH", + url: "/public/variables/{variable_key}", + path: { + variable_key: data.variableKey, + }, + query: { + update_mask: data.updateMask, + }, + body: data.requestBody, + mediaType: "application/json", + errors: { + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } + + /** + * Get Variables + * Get all Variables entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns VariableCollectionResponse Successful Response + * @throws ApiError + */ + public static getVariables( + data: GetVariablesData = {}, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/variables", + query: { + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 422: "Validation Error", + }, + }); + } + + /** + * Post Variable + * Create a variable. + * @param data The data for the request. + * @param data.requestBody + * @returns VariableResponse Successful Response + * @throws ApiError + */ + public static postVariable( + data: PostVariableData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "POST", + url: "/public/variables", + body: data.requestBody, + mediaType: "application/json", + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 422: "Validation Error", + }, + }); + } } export class MonitorService { diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index c844b3b56f8d3..1adcdc8eba8f4 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -68,7 +68,9 @@ export type AssetEventResponse = { */ export type AssetResponse = { id: number; + name: string; uri: string; + group: string; extra?: { [key: string]: unknown; } | null; @@ -384,7 +386,7 @@ export type DAGRunPatchStates = "queued" | "success" | "failed"; * DAG Run serializer for responses. */ export type DAGRunResponse = { - run_id: string | null; + dag_run_id: string | null; dag_id: string; logical_date: string | null; queued_at: string | null; @@ -423,6 +425,23 @@ export type DAGRunTypes = { asset_triggered: number; }; +/** + * List DAG Runs body for batch endpoint. + */ +export type DAGRunsBatchBody = { + order_by?: string | null; + page_offset?: number; + page_limit?: number; + dag_ids?: Array | null; + states?: Array | null; + logical_date_gte?: string | null; + logical_date_lte?: string | null; + start_date_gte?: string | null; + start_date_lte?: string | null; + end_date_gte?: string | null; + end_date_lte?: string | null; +}; + /** * DAG Source serializer for responses. */ @@ -626,6 +645,13 @@ export type EventLogResponse = { extra: string | null; }; +/** + * Extra Links Response. + */ +export type ExtraLinksResponse = { + [key: string]: string | null; +}; + /** * Serializer for Plugin FastAPI App responses. */ @@ -688,6 +714,14 @@ export type ImportErrorResponse = { stack_trace: string; }; +/** + * Job Collection Response. + */ +export type JobCollectionResponse = { + jobs: Array; + total_entries: number; +}; + /** * Job serializer for responses. */ @@ -704,6 +738,19 @@ export type JobResponse = { unixname: string | null; }; +/** + * Request body for Clear Task Instances endpoint. + */ +export type PatchTaskInstanceBody = { + dry_run?: boolean; + new_state?: string | null; + note?: string | null; + include_upstream?: boolean; + include_downstream?: boolean; + include_future?: boolean; + include_past?: boolean; +}; + /** * Plugin Collection serializer. */ @@ -861,6 +908,14 @@ export type TaskInstanceCollectionResponse = { total_entries: number; }; +/** + * TaskInstanceHistory Collection serializer for responses. + */ +export type TaskInstanceHistoryCollectionResponse = { + task_instances: Array; + total_entries: number; +}; + /** * TaskInstanceHistory serializer for responses. */ @@ -1073,6 +1128,19 @@ export type TimeDelta = { microseconds: number; }; +/** + * Trigger DAG Run Serializer for POST body. + */ +export type TriggerDAGRunPostBody = { + dag_run_id?: string | null; + data_interval_start?: string | null; + data_interval_end?: string | null; + conf?: { + [key: string]: unknown; + }; + note?: string | null; +}; + /** * Trigger serializer for responses. */ @@ -1132,6 +1200,26 @@ export type VersionInfo = { git_version: string | null; }; +/** + * List of XCom items. + */ +export type XComCollection = { + xcom_entries: Array; + total_entries: number; +}; + +/** + * Serializer for a xcom item. + */ +export type XComResponse = { + key: string; + timestamp: string; + logical_date: string; + map_index: number; + task_id: string; + dag_id: string; +}; + /** * XCom response serializer with native return type. */ @@ -1421,6 +1509,20 @@ export type GetDagRunsData = { export type GetDagRunsResponse = DAGRunCollectionResponse; +export type TriggerDagRunData = { + dagId: unknown; + requestBody: TriggerDAGRunPostBody; +}; + +export type TriggerDagRunResponse = DAGRunResponse; + +export type GetListDagRunsBatchData = { + dagId: "~"; + requestBody: DAGRunsBatchBody; +}; + +export type GetListDagRunsBatchResponse = DAGRunCollectionResponse; + export type GetDagSourceData = { accept?: "application/json" | "text/plain" | "*/*"; dagId: string; @@ -1535,81 +1637,32 @@ export type GetEventLogsData = { export type GetEventLogsResponse = EventLogCollectionResponse; -export type GetImportErrorData = { - importErrorId: number; -}; - -export type GetImportErrorResponse = ImportErrorResponse; - -export type GetImportErrorsData = { - limit?: number; - offset?: number; - orderBy?: string; -}; - -export type GetImportErrorsResponse = ImportErrorCollectionResponse; - -export type GetPluginsData = { - limit?: number; - offset?: number; -}; - -export type GetPluginsResponse = PluginCollectionResponse; - -export type DeletePoolData = { - poolName: string; -}; - -export type DeletePoolResponse = void; - -export type GetPoolData = { - poolName: string; -}; - -export type GetPoolResponse = PoolResponse; - -export type PatchPoolData = { - poolName: string; - requestBody: PoolPatchBody; - updateMask?: Array | null; -}; - -export type PatchPoolResponse = PoolResponse; - -export type GetPoolsData = { - limit?: number; - offset?: number; - orderBy?: string; -}; - -export type GetPoolsResponse = PoolCollectionResponse; - -export type PostPoolData = { - requestBody: PoolPostBody; -}; - -export type PostPoolResponse = PoolResponse; - -export type PostPoolsData = { - requestBody: PoolPostBulkBody; +export type GetExtraLinksData = { + dagId: string; + dagRunId: string; + taskId: string; }; -export type PostPoolsResponse = PoolCollectionResponse; +export type GetExtraLinksResponse = ExtraLinksResponse; -export type GetProvidersData = { - limit?: number; - offset?: number; +export type GetTaskInstanceData = { + dagId: string; + dagRunId: string; + taskId: string; }; -export type GetProvidersResponse = ProviderCollectionResponse; +export type GetTaskInstanceResponse = TaskInstanceResponse; -export type GetTaskInstanceData = { +export type PatchTaskInstanceData = { dagId: string; dagRunId: string; + mapIndex?: number; + requestBody: PatchTaskInstanceBody; taskId: string; + updateMask?: Array | null; }; -export type GetTaskInstanceResponse = TaskInstanceResponse; +export type PatchTaskInstanceResponse = TaskInstanceResponse; export type GetMappedTaskInstancesData = { dagId: string; @@ -1656,6 +1709,26 @@ export type GetTaskInstanceDependencies1Data = { export type GetTaskInstanceDependencies1Response = TaskDependencyCollectionResponse; +export type GetTaskInstanceTriesData = { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}; + +export type GetTaskInstanceTriesResponse = + TaskInstanceHistoryCollectionResponse; + +export type GetMappedTaskInstanceTriesData = { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}; + +export type GetMappedTaskInstanceTriesResponse = + TaskInstanceHistoryCollectionResponse; + export type GetMappedTaskInstanceData = { dagId: string; dagRunId: string; @@ -1665,6 +1738,17 @@ export type GetMappedTaskInstanceData = { export type GetMappedTaskInstanceResponse = TaskInstanceResponse; +export type PatchTaskInstance1Data = { + dagId: string; + dagRunId: string; + mapIndex: number; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: Array | null; +}; + +export type PatchTaskInstance1Response = TaskInstanceResponse; + export type GetTaskInstancesData = { dagId: string; dagRunId: string; @@ -1739,53 +1823,90 @@ export type GetLogData = { export type GetLogResponse = TaskInstancesLogResponse; -export type GetTasksData = { - dagId: string; +export type GetImportErrorData = { + importErrorId: number; +}; + +export type GetImportErrorResponse = ImportErrorResponse; + +export type GetImportErrorsData = { + limit?: number; + offset?: number; orderBy?: string; }; -export type GetTasksResponse = TaskCollectionResponse; +export type GetImportErrorsResponse = ImportErrorCollectionResponse; -export type GetTaskData = { - dagId: string; - taskId: unknown; +export type GetJobsData = { + endDateGte?: string | null; + endDateLte?: string | null; + executorClass?: string | null; + hostname?: string | null; + isAlive?: boolean | null; + jobState?: string | null; + jobType?: string | null; + limit?: number; + offset?: number; + orderBy?: string; + startDateGte?: string | null; + startDateLte?: string | null; }; -export type GetTaskResponse = TaskResponse; +export type GetJobsResponse = JobCollectionResponse; -export type DeleteVariableData = { - variableKey: string; +export type GetPluginsData = { + limit?: number; + offset?: number; }; -export type DeleteVariableResponse = void; +export type GetPluginsResponse = PluginCollectionResponse; -export type GetVariableData = { - variableKey: string; +export type DeletePoolData = { + poolName: string; }; -export type GetVariableResponse = VariableResponse; +export type DeletePoolResponse = void; -export type PatchVariableData = { - requestBody: VariableBody; +export type GetPoolData = { + poolName: string; +}; + +export type GetPoolResponse = PoolResponse; + +export type PatchPoolData = { + poolName: string; + requestBody: PoolPatchBody; updateMask?: Array | null; - variableKey: string; }; -export type PatchVariableResponse = VariableResponse; +export type PatchPoolResponse = PoolResponse; -export type GetVariablesData = { +export type GetPoolsData = { limit?: number; offset?: number; orderBy?: string; }; -export type GetVariablesResponse = VariableCollectionResponse; +export type GetPoolsResponse = PoolCollectionResponse; -export type PostVariableData = { - requestBody: VariableBody; +export type PostPoolData = { + requestBody: PoolPostBody; }; -export type PostVariableResponse = VariableResponse; +export type PostPoolResponse = PoolResponse; + +export type PostPoolsData = { + requestBody: PoolPostBulkBody; +}; + +export type PostPoolsResponse = PoolCollectionResponse; + +export type GetProvidersData = { + limit?: number; + offset?: number; +}; + +export type GetProvidersResponse = ProviderCollectionResponse; export type GetXcomEntryData = { dagId: string; @@ -1799,16 +1920,76 @@ export type GetXcomEntryData = { export type GetXcomEntryResponse = XComResponseNative | XComResponseString; -export type GetHealthResponse = HealthInfoSchema; +export type GetXcomEntriesData = { + dagId: string; + dagRunId: string; + limit?: number; + mapIndex?: number | null; + offset?: number; + taskId: string; + xcomKey?: string | null; +}; -export type GetVersionResponse = VersionInfo; +export type GetXcomEntriesResponse = XComCollection; -export type $OpenApiTs = { - "/ui/next_run_assets/{dag_id}": { - get: { - req: NextRunAssetsData; - res: { - /** +export type GetTasksData = { + dagId: string; + orderBy?: string; +}; + +export type GetTasksResponse = TaskCollectionResponse; + +export type GetTaskData = { + dagId: string; + taskId: unknown; +}; + +export type GetTaskResponse = TaskResponse; + +export type DeleteVariableData = { + variableKey: string; +}; + +export type DeleteVariableResponse = void; + +export type GetVariableData = { + variableKey: string; +}; + +export type GetVariableResponse = VariableResponse; + +export type PatchVariableData = { + requestBody: VariableBody; + updateMask?: Array | null; + variableKey: string; +}; + +export type PatchVariableResponse = VariableResponse; + +export type GetVariablesData = { + limit?: number; + offset?: number; + orderBy?: string; +}; + +export type GetVariablesResponse = VariableCollectionResponse; + +export type PostVariableData = { + requestBody: VariableBody; +}; + +export type PostVariableResponse = VariableResponse; + +export type GetHealthResponse = HealthInfoSchema; + +export type GetVersionResponse = VersionInfo; + +export type $OpenApiTs = { + "/ui/next_run_assets/{dag_id}": { + get: { + req: NextRunAssetsData; + res: { + /** * Successful Response */ 200: { @@ -2694,6 +2875,66 @@ export type $OpenApiTs = { 422: HTTPValidationError; }; }; + post: { + req: TriggerDagRunData; + res: { + /** + * Successful Response + */ + 200: DAGRunResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/dags/{dag_id}/dagRuns/list": { + post: { + req: GetListDagRunsBatchData; + res: { + /** + * Successful Response + */ + 200: DAGRunCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; "/public/dagSources/{dag_id}": { get: { @@ -3029,14 +3270,14 @@ export type $OpenApiTs = { }; }; }; - "/public/importErrors/{import_error_id}": { + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/links": { get: { - req: GetImportErrorData; + req: GetExtraLinksData; res: { /** * Successful Response */ - 200: ImportErrorResponse; + 200: ExtraLinksResponse; /** * Unauthorized */ @@ -3056,14 +3297,14 @@ export type $OpenApiTs = { }; }; }; - "/public/importErrors": { + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}": { get: { - req: GetImportErrorsData; + req: GetTaskInstanceData; res: { /** * Successful Response */ - 200: ImportErrorCollectionResponse; + 200: TaskInstanceResponse; /** * Unauthorized */ @@ -3073,43 +3314,22 @@ export type $OpenApiTs = { */ 403: HTTPExceptionResponse; /** - * Validation Error - */ - 422: HTTPValidationError; - }; - }; - }; - "/public/plugins": { - get: { - req: GetPluginsData; - res: { - /** - * Successful Response - */ - 200: PluginCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden + * Not Found */ - 403: HTTPExceptionResponse; + 404: HTTPExceptionResponse; /** * Validation Error */ 422: HTTPValidationError; }; }; - }; - "/public/pools/{pool_name}": { - delete: { - req: DeletePoolData; + patch: { + req: PatchTaskInstanceData; res: { /** * Successful Response */ - 204: void; + 200: TaskInstanceResponse; /** * Bad Request */ @@ -3132,13 +3352,15 @@ export type $OpenApiTs = { 422: HTTPValidationError; }; }; + }; + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/listMapped": { get: { - req: GetPoolData; + req: GetMappedTaskInstancesData; res: { /** * Successful Response */ - 200: PoolResponse; + 200: TaskInstanceCollectionResponse; /** * Unauthorized */ @@ -3157,17 +3379,15 @@ export type $OpenApiTs = { 422: HTTPValidationError; }; }; - patch: { - req: PatchPoolData; + }; + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dependencies": { + get: { + req: GetTaskInstanceDependenciesData; res: { /** * Successful Response */ - 200: PoolResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; + 200: TaskDependencyCollectionResponse; /** * Unauthorized */ @@ -3187,14 +3407,14 @@ export type $OpenApiTs = { }; }; }; - "/public/pools": { + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dependencies": { get: { - req: GetPoolsData; + req: GetTaskInstanceDependencies1Data; res: { /** * Successful Response */ - 200: PoolCollectionResponse; + 200: TaskDependencyCollectionResponse; /** * Unauthorized */ @@ -3213,13 +3433,15 @@ export type $OpenApiTs = { 422: HTTPValidationError; }; }; - post: { - req: PostPoolData; + }; + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries": { + get: { + req: GetTaskInstanceTriesData; res: { /** * Successful Response */ - 201: PoolResponse; + 200: TaskInstanceHistoryCollectionResponse; /** * Unauthorized */ @@ -3229,9 +3451,9 @@ export type $OpenApiTs = { */ 403: HTTPExceptionResponse; /** - * Conflict + * Not Found */ - 409: HTTPExceptionResponse; + 404: HTTPExceptionResponse; /** * Validation Error */ @@ -3239,14 +3461,14 @@ export type $OpenApiTs = { }; }; }; - "/public/pools/bulk": { - post: { - req: PostPoolsData; + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries": { + get: { + req: GetMappedTaskInstanceTriesData; res: { /** * Successful Response */ - 201: PoolCollectionResponse; + 200: TaskInstanceHistoryCollectionResponse; /** * Unauthorized */ @@ -3256,9 +3478,9 @@ export type $OpenApiTs = { */ 403: HTTPExceptionResponse; /** - * Conflict + * Not Found */ - 409: HTTPExceptionResponse; + 404: HTTPExceptionResponse; /** * Validation Error */ @@ -3266,14 +3488,14 @@ export type $OpenApiTs = { }; }; }; - "/public/providers": { + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}": { get: { - req: GetProvidersData; + req: GetMappedTaskInstanceData; res: { /** * Successful Response */ - 200: ProviderCollectionResponse; + 200: TaskInstanceResponse; /** * Unauthorized */ @@ -3282,21 +3504,27 @@ export type $OpenApiTs = { * Forbidden */ 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; /** * Validation Error */ 422: HTTPValidationError; }; }; - }; - "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}": { - get: { - req: GetTaskInstanceData; + patch: { + req: PatchTaskInstance1Data; res: { /** * Successful Response */ 200: TaskInstanceResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; /** * Unauthorized */ @@ -3316,9 +3544,9 @@ export type $OpenApiTs = { }; }; }; - "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/listMapped": { + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances": { get: { - req: GetMappedTaskInstancesData; + req: GetTaskInstancesData; res: { /** * Successful Response @@ -3343,14 +3571,14 @@ export type $OpenApiTs = { }; }; }; - "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dependencies": { - get: { - req: GetTaskInstanceDependenciesData; + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/list": { + post: { + req: GetTaskInstancesBatchData; res: { /** * Successful Response */ - 200: TaskDependencyCollectionResponse; + 200: TaskInstanceCollectionResponse; /** * Unauthorized */ @@ -3370,14 +3598,14 @@ export type $OpenApiTs = { }; }; }; - "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dependencies": { + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries/{task_try_number}": { get: { - req: GetTaskInstanceDependencies1Data; + req: GetTaskInstanceTryDetailsData; res: { /** * Successful Response */ - 200: TaskDependencyCollectionResponse; + 200: TaskInstanceHistoryResponse; /** * Unauthorized */ @@ -3397,14 +3625,14 @@ export type $OpenApiTs = { }; }; }; - "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}": { + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries/{task_try_number}": { get: { - req: GetMappedTaskInstanceData; + req: GetMappedTaskInstanceTryDetailsData; res: { /** * Successful Response */ - 200: TaskInstanceResponse; + 200: TaskInstanceHistoryResponse; /** * Unauthorized */ @@ -3424,14 +3652,14 @@ export type $OpenApiTs = { }; }; }; - "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances": { - get: { - req: GetTaskInstancesData; + "/public/dags/{dag_id}/clearTaskInstances": { + post: { + req: PostClearTaskInstancesData; res: { /** * Successful Response */ - 200: TaskInstanceCollectionResponse; + 200: TaskInstanceReferenceCollectionResponse; /** * Unauthorized */ @@ -3451,14 +3679,14 @@ export type $OpenApiTs = { }; }; }; - "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/list": { - post: { - req: GetTaskInstancesBatchData; + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/logs/{try_number}": { + get: { + req: GetLogData; res: { /** * Successful Response */ - 200: TaskInstanceCollectionResponse; + 200: TaskInstancesLogResponse; /** * Unauthorized */ @@ -3478,14 +3706,14 @@ export type $OpenApiTs = { }; }; }; - "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries/{task_try_number}": { + "/public/importErrors/{import_error_id}": { get: { - req: GetTaskInstanceTryDetailsData; + req: GetImportErrorData; res: { /** * Successful Response */ - 200: TaskInstanceHistoryResponse; + 200: ImportErrorResponse; /** * Unauthorized */ @@ -3505,14 +3733,145 @@ export type $OpenApiTs = { }; }; }; - "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries/{task_try_number}": { + "/public/importErrors": { get: { - req: GetMappedTaskInstanceTryDetailsData; + req: GetImportErrorsData; res: { /** * Successful Response */ - 200: TaskInstanceHistoryResponse; + 200: ImportErrorCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/jobs": { + get: { + req: GetJobsData; + res: { + /** + * Successful Response + */ + 200: JobCollectionResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/plugins": { + get: { + req: GetPluginsData; + res: { + /** + * Successful Response + */ + 200: PluginCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/pools/{pool_name}": { + delete: { + req: DeletePoolData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + get: { + req: GetPoolData; + res: { + /** + * Successful Response + */ + 200: PoolResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + patch: { + req: PatchPoolData; + res: { + /** + * Successful Response + */ + 200: PoolResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; /** * Unauthorized */ @@ -3532,14 +3891,120 @@ export type $OpenApiTs = { }; }; }; - "/public/dags/{dag_id}/clearTaskInstances": { + "/public/pools": { + get: { + req: GetPoolsData; + res: { + /** + * Successful Response + */ + 200: PoolCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; post: { - req: PostClearTaskInstancesData; + req: PostPoolData; res: { /** * Successful Response */ - 200: TaskInstanceReferenceCollectionResponse; + 201: PoolResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/pools/bulk": { + post: { + req: PostPoolsData; + res: { + /** + * Successful Response + */ + 201: PoolCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/providers": { + get: { + req: GetProvidersData; + res: { + /** + * Successful Response + */ + 200: ProviderCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}": { + get: { + req: GetXcomEntryData; + res: { + /** + * Successful Response + */ + 200: XComResponseNative | XComResponseString; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; /** * Unauthorized */ @@ -3559,14 +4024,18 @@ export type $OpenApiTs = { }; }; }; - "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/logs/{try_number}": { + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries": { get: { - req: GetLogData; + req: GetXcomEntriesData; res: { /** * Successful Response */ - 200: TaskInstancesLogResponse; + 200: XComCollection; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; /** * Unauthorized */ @@ -3773,37 +4242,6 @@ export type $OpenApiTs = { }; }; }; - "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}": { - get: { - req: GetXcomEntryData; - res: { - /** - * Successful Response - */ - 200: XComResponseNative | XComResponseString; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; - }; - }; "/public/monitor/health": { get: { res: { diff --git a/airflow/ui/src/components/DataTable/useTableUrlState.ts b/airflow/ui/src/components/DataTable/useTableUrlState.ts index bb249b735c8d8..286882bd02528 100644 --- a/airflow/ui/src/components/DataTable/useTableUrlState.ts +++ b/airflow/ui/src/components/DataTable/useTableUrlState.ts @@ -27,9 +27,7 @@ import type { TableState } from "./types"; export const useTableURLState = (defaultState?: Partial) => { const [searchParams, setSearchParams] = useSearchParams(); - const configPageSize = useConfig("webserver", "page_size"); - const pageSize = - typeof configPageSize === "string" ? parseInt(configPageSize, 10) : 50; + const pageSize = useConfig("page_size") as number; const defaultTableState = { pagination: { diff --git a/airflow/ui/src/components/TogglePause.tsx b/airflow/ui/src/components/TogglePause.tsx index 9670eb372c6fb..87dea1cec6b2f 100644 --- a/airflow/ui/src/components/TogglePause.tsx +++ b/airflow/ui/src/components/TogglePause.tsx @@ -60,8 +60,9 @@ export const TogglePause = ({ onSuccess, }); - const showConfirmation = - useConfig("webserver", "require_confirmation_dag_change") === "True"; + const showConfirmation = Boolean( + useConfig("require_confirmation_dag_change"), + ); const onToggle = useCallback(() => { mutate({ diff --git a/airflow/ui/src/context/timezone/TimezoneProvider.tsx b/airflow/ui/src/context/timezone/TimezoneProvider.tsx index 5b1ef11558d4b..6d4dd5eb29ee6 100644 --- a/airflow/ui/src/context/timezone/TimezoneProvider.tsx +++ b/airflow/ui/src/context/timezone/TimezoneProvider.tsx @@ -33,7 +33,7 @@ export const TimezoneContext = createContext( const TIMEZONE_KEY = "timezone"; export const TimezoneProvider = ({ children }: PropsWithChildren) => { - const defaultUITimezone = useConfig("webserver", "default_ui_timezone"); + const defaultUITimezone = useConfig("default_ui_timezone"); const [selectedTimezone, setSelectedTimezone] = useLocalStorage( TIMEZONE_KEY, diff --git a/airflow/ui/src/layouts/BaseLayout.tsx b/airflow/ui/src/layouts/BaseLayout.tsx index 73b43a40a25b5..ddc3dc16ba85b 100644 --- a/airflow/ui/src/layouts/BaseLayout.tsx +++ b/airflow/ui/src/layouts/BaseLayout.tsx @@ -25,7 +25,7 @@ import { useConfig } from "src/queries/useConfig"; import { Nav } from "./Nav"; export const BaseLayout = ({ children }: PropsWithChildren) => { - const instanceName = useConfig("webserver", "instance_name"); + const instanceName = useConfig("instance_name"); // const instanceNameHasMarkup = // webserverConfig?.options.find( // ({ key }) => key === "instance_name_has_markup", diff --git a/airflow/ui/src/layouts/Nav/DocsButton.tsx b/airflow/ui/src/layouts/Nav/DocsButton.tsx index 8b305e87f0ee1..b6e494b4fc4ab 100644 --- a/airflow/ui/src/layouts/Nav/DocsButton.tsx +++ b/airflow/ui/src/layouts/Nav/DocsButton.tsx @@ -40,7 +40,7 @@ const links = [ ]; export const DocsButton = () => { - const showAPIDocs = useConfig("webserver", "enable_swagger_ui") === "True"; + const showAPIDocs = Boolean(useConfig("enable_swagger_ui")); return ( diff --git a/airflow/ui/src/main.tsx b/airflow/ui/src/main.tsx index 8d6d3bf0a6955..524efdb6695ce 100644 --- a/airflow/ui/src/main.tsx +++ b/airflow/ui/src/main.tsx @@ -47,11 +47,13 @@ const queryClient = new QueryClient({ axios.interceptors.response.use( (response) => response, (error: AxiosError) => { - if (error.response?.status === 403 || error.response?.status === 401) { + if (error.response?.status === 401) { const params = new URLSearchParams(); params.set("next", globalThis.location.href); - globalThis.location.replace(`/login?${params.toString()}`); + globalThis.location.replace( + `${import.meta.env.VITE_LEGACY_API_URL}/login?${params.toString()}`, + ); } return Promise.reject(error); diff --git a/airflow/ui/src/pages/DagsList/Dag/Code/Code.tsx b/airflow/ui/src/pages/DagsList/Dag/Code/Code.tsx index 8dd9a68472d35..e224db5a36218 100644 --- a/airflow/ui/src/pages/DagsList/Dag/Code/Code.tsx +++ b/airflow/ui/src/pages/DagsList/Dag/Code/Code.tsx @@ -60,7 +60,7 @@ export const Code = () => { dagId: dagId ?? "", }); - const defaultWrap = useConfig("webserver", "default_wrap") === "True"; + const defaultWrap = Boolean(useConfig("default_wrap")); const [wrap, setWrap] = useState(defaultWrap); diff --git a/airflow/ui/src/pages/DagsList/DagsFilters.tsx b/airflow/ui/src/pages/DagsList/DagsFilters.tsx index 9ec45dff16492..fbce68971b27c 100644 --- a/airflow/ui/src/pages/DagsList/DagsFilters.tsx +++ b/airflow/ui/src/pages/DagsList/DagsFilters.tsx @@ -70,12 +70,10 @@ export const DagsFilters = () => { orderBy: "name", }); - const hidePausedDagsByDefault = useConfig( - "webserver", - "hide_paused_dags_by_default", + const hidePausedDagsByDefault = Boolean( + useConfig("hide_paused_dags_by_default"), ); - const defaultShowPaused = - hidePausedDagsByDefault === "True" ? "false" : "all"; + const defaultShowPaused = hidePausedDagsByDefault ? "false" : "all"; const { setTableURLState, tableURLState } = useTableURLState(); const { pagination, sorting } = tableURLState; diff --git a/airflow/ui/src/pages/DagsList/DagsList.tsx b/airflow/ui/src/pages/DagsList/DagsList.tsx index daa931e5464d6..66f1bd8a7007e 100644 --- a/airflow/ui/src/pages/DagsList/DagsList.tsx +++ b/airflow/ui/src/pages/DagsList/DagsList.tsx @@ -159,12 +159,10 @@ export const DagsList = () => { "card", ); - const hidePausedDagsByDefault = useConfig( - "webserver", - "hide_paused_dags_by_default", + const hidePausedDagsByDefault = Boolean( + useConfig("hide_paused_dags_by_default"), ); - const defaultShowPaused = - hidePausedDagsByDefault === "True" ? false : undefined; + const defaultShowPaused = hidePausedDagsByDefault ? false : undefined; const showPaused = searchParams.get(PAUSED_PARAM); diff --git a/airflow/ui/src/pages/DagsList/RecentRuns.tsx b/airflow/ui/src/pages/DagsList/RecentRuns.tsx index fd9f45cef028b..355c68c9d09b0 100644 --- a/airflow/ui/src/pages/DagsList/RecentRuns.tsx +++ b/airflow/ui/src/pages/DagsList/RecentRuns.tsx @@ -63,7 +63,7 @@ export const RecentRuns = ({ Duration: {run.duration.toFixed(2)}s } - key={run.run_id} + key={run.dag_run_id} positioning={{ offset: { crossAxis: 5, diff --git a/airflow/ui/src/pages/Dashboard/Health/Health.tsx b/airflow/ui/src/pages/Dashboard/Health/Health.tsx index 42829a69581a3..0694283930ce9 100644 --- a/airflow/ui/src/pages/Dashboard/Health/Health.tsx +++ b/airflow/ui/src/pages/Dashboard/Health/Health.tsx @@ -21,16 +21,12 @@ import { MdOutlineHealthAndSafety } from "react-icons/md"; import { useMonitorServiceGetHealth } from "openapi/queries"; import { ErrorAlert } from "src/components/ErrorAlert"; -import { useConfig } from "src/queries/useConfig"; import { HealthTag } from "./HealthTag"; export const Health = () => { const { data, error, isLoading } = useMonitorServiceGetHealth(); - const isStandaloneDagProcessor = - useConfig("scheduler", "standalone_dag_processor") === "True"; - return ( @@ -58,14 +54,15 @@ export const Health = () => { status={data?.triggerer.status} title="Triggerer" /> - {isStandaloneDagProcessor ? ( + {/* TODO: Update this to match the API when we move the config check to the API level */} + {data?.dag_processor.status === undefined ? undefined : ( - ) : undefined} + )} ); diff --git a/airflow/ui/src/queries/useConfig.tsx b/airflow/ui/src/queries/useConfig.tsx index 7dd17ef27bf34..a77a6613bfa6e 100644 --- a/airflow/ui/src/queries/useConfig.tsx +++ b/airflow/ui/src/queries/useConfig.tsx @@ -16,17 +16,11 @@ * specific language governing permissions and limitations * under the License. */ -import { useConfigServiceGetConfig } from "openapi/queries"; +import { useConfigServiceGetConfigs } from "openapi/queries"; +import type { ConfigResponse } from "openapi/requests/types.gen"; -export const useConfig = (sectionName: string, configKey: string) => { - // TODO: replace with a ui/config endpoint which will always return what the UI need to render - const { data: config } = useConfigServiceGetConfig({ - accept: "application/json", - }); +export const useConfig = (configKey: keyof ConfigResponse) => { + const { data: config } = useConfigServiceGetConfigs(); - const configSection = config?.sections.find( - (section) => section.name === sectionName, - ); - - return configSection?.options.find(({ key }) => key === configKey)?.value; + return config?.[configKey]; }; diff --git a/airflow/utils/context.py b/airflow/utils/context.py index b954a5e1f2f91..90232e7b2efd8 100644 --- a/airflow/utils/context.py +++ b/airflow/utils/context.py @@ -23,17 +23,11 @@ import copy import functools import warnings +from collections.abc import Container, ItemsView, Iterator, KeysView, Mapping, MutableMapping, ValuesView from typing import ( TYPE_CHECKING, Any, - Container, - ItemsView, - Iterator, - KeysView, - Mapping, - MutableMapping, SupportsIndex, - ValuesView, ) import attrs diff --git a/airflow/utils/context.pyi b/airflow/utils/context.pyi index 069dba2f8f191..d0ea2132f1c95 100644 --- a/airflow/utils/context.pyi +++ b/airflow/utils/context.pyi @@ -26,7 +26,8 @@ # declare "these are defined, but don't error if others are accessed" someday. from __future__ import annotations -from typing import Any, Collection, Container, Iterable, Iterator, Mapping, Sequence, overload +from collections.abc import Collection, Container, Iterable, Iterator, Mapping, Sequence +from typing import Any, overload from pendulum import DateTime from sqlalchemy.orm import Session diff --git a/airflow/utils/db.py b/airflow/utils/db.py index d8939a117317f..5748adf0e46e6 100644 --- a/airflow/utils/db.py +++ b/airflow/utils/db.py @@ -27,16 +27,13 @@ import sys import time import warnings +from collections.abc import Generator, Iterable, Iterator, Sequence from tempfile import gettempdir from typing import ( TYPE_CHECKING, Any, Callable, - Generator, - Iterable, - Iterator, Protocol, - Sequence, TypeVar, overload, ) @@ -44,7 +41,6 @@ import attrs from sqlalchemy import ( Table, - delete, exc, func, inspect, @@ -70,6 +66,7 @@ from alembic.runtime.environment import EnvironmentContext from alembic.script import ScriptDirectory from sqlalchemy.engine import Row + from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import Session from sqlalchemy.sql.elements import ClauseElement, TextClause from sqlalchemy.sql.selectable import Select @@ -857,10 +854,13 @@ def _configured_alembic_environment() -> Generator[EnvironmentContext, None, Non config = _get_alembic_config() script = _get_script_object(config) - with EnvironmentContext( - config, - script, - ) as env, settings.engine.connect() as connection: + with ( + EnvironmentContext( + config, + script, + ) as env, + settings.engine.connect() as connection, + ): alembic_logger = logging.getLogger("alembic") level = alembic_logger.level alembic_logger.setLevel(logging.WARNING) @@ -923,9 +923,7 @@ def check_and_run_migrations(): def _reserialize_dags(*, session: Session) -> None: from airflow.models.dagbag import DagBag - from airflow.models.serialized_dag import SerializedDagModel - session.execute(delete(SerializedDagModel).execution_options(synchronize_session=False)) dagbag = DagBag(collect_dags=False) dagbag.collect_dags(only_if_updated=False) dagbag.sync_to_db(session=session) @@ -1223,19 +1221,6 @@ def resetdb(session: Session = NEW_SESSION, skip_init: bool = False): initdb(session=session) -@provide_session -def bootstrap_dagbag(session: Session = NEW_SESSION): - from airflow.models.dag import DAG - from airflow.models.dagbag import DagBag - - dagbag = DagBag() - # Save DAGs in the ORM - dagbag.sync_to_db(session=session) - - # Deactivate the unknown ones - DAG.deactivate_unknown_dags(dagbag.dags.keys(), session=session) - - @provide_session def downgrade(*, to_revision, from_revision=None, show_sql_only=False, session: Session = NEW_SESSION): """ @@ -1447,6 +1432,21 @@ def get_query_count(query_stmt: Select, *, session: Session) -> int: return session.scalar(count_stmt) +async def get_query_count_async(statement: Select, *, session: AsyncSession) -> int: + """ + Get count of a query. + + A SELECT COUNT() FROM is issued against the subquery built from the + given statement. The ORDER BY clause is stripped from the statement + since it's unnecessary for COUNT, and can impact query planning and + degrade performance. + + :meta private: + """ + count_stmt = select(func.count()).select_from(statement.order_by(None).subquery()) + return await session.scalar(count_stmt) + + def check_query_exists(query_stmt: Select, *, session: Session) -> bool: """ Check whether there is at least one row matching a query. diff --git a/airflow/utils/db_cleanup.py b/airflow/utils/db_cleanup.py index 9f0f8d63fe12c..a429826a40674 100644 --- a/airflow/utils/db_cleanup.py +++ b/airflow/utils/db_cleanup.py @@ -53,6 +53,10 @@ logger = logging.getLogger(__name__) ARCHIVE_TABLE_PREFIX = "_airflow_deleted__" +# Archived tables created by DB migrations +ARCHIVED_TABLES_FROM_DB_MIGRATIONS = [ + "_xcom_archive" # Table created by the AF 2 -> 3.0.0 migration when the XComs had pickled values +] @dataclass @@ -116,10 +120,12 @@ def readable_config(self): _TableConfig(table_name="task_instance_history", recency_column_name="start_date"), _TableConfig(table_name="task_reschedule", recency_column_name="start_date"), _TableConfig(table_name="xcom", recency_column_name="timestamp"), + _TableConfig(table_name="_xcom_archive", recency_column_name="timestamp"), _TableConfig(table_name="callback_request", recency_column_name="created_at"), _TableConfig(table_name="celery_taskmeta", recency_column_name="date_done"), _TableConfig(table_name="celery_tasksetmeta", recency_column_name="date_done"), _TableConfig(table_name="trigger", recency_column_name="created_date"), + _TableConfig(table_name="dag_version", recency_column_name="created_at"), ] if conf.get("webserver", "session_backend") == "database": @@ -380,13 +386,20 @@ def _effective_table_names(*, table_names: list[str] | None) -> tuple[set[str], def _get_archived_table_names(table_names: list[str] | None, session: Session) -> list[str]: inspector = inspect(session.bind) - db_table_names = [x for x in inspector.get_table_names() if x.startswith(ARCHIVE_TABLE_PREFIX)] + db_table_names = [ + x + for x in inspector.get_table_names() + if x.startswith(ARCHIVE_TABLE_PREFIX) or x in ARCHIVED_TABLES_FROM_DB_MIGRATIONS + ] effective_table_names, _ = _effective_table_names(table_names=table_names) # Filter out tables that don't start with the archive prefix archived_table_names = [ table_name for table_name in db_table_names - if any("__" + x + "__" in table_name for x in effective_table_names) + if ( + any("__" + x + "__" in table_name for x in effective_table_names) + or table_name in ARCHIVED_TABLES_FROM_DB_MIGRATIONS + ) ] return archived_table_names diff --git a/airflow/utils/email.py b/airflow/utils/email.py index 3a63b4180472b..455b135c23e6c 100644 --- a/airflow/utils/email.py +++ b/airflow/utils/email.py @@ -22,11 +22,12 @@ import os import smtplib import ssl +from collections.abc import Iterable from email.mime.application import MIMEApplication from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from email.utils import formatdate -from typing import Any, Iterable +from typing import Any import re2 diff --git a/airflow/utils/entry_points.py b/airflow/utils/entry_points.py index 443d3e13f588e..55e7dc789dacb 100644 --- a/airflow/utils/entry_points.py +++ b/airflow/utils/entry_points.py @@ -20,7 +20,7 @@ import logging import sys from collections import defaultdict -from typing import Iterator, Tuple +from collections.abc import Iterator if sys.version_info >= (3, 12): from importlib import metadata @@ -29,10 +29,10 @@ log = logging.getLogger(__name__) -EPnD = Tuple[metadata.EntryPoint, metadata.Distribution] +EPnD = tuple[metadata.EntryPoint, metadata.Distribution] -@functools.lru_cache(maxsize=None) +@functools.cache def _get_grouped_entry_points() -> dict[str, list[EPnD]]: mapping: dict[str, list[EPnD]] = defaultdict(list) for dist in metadata.distributions(): diff --git a/airflow/utils/file.py b/airflow/utils/file.py index 09b39e98ccf99..5c3e454e294af 100644 --- a/airflow/utils/file.py +++ b/airflow/utils/file.py @@ -22,9 +22,11 @@ import logging import os import zipfile +from collections.abc import Generator from io import TextIOWrapper from pathlib import Path -from typing import Generator, NamedTuple, Pattern, Protocol, overload +from re import Pattern +from typing import NamedTuple, Protocol, overload import re2 from pathspec.patterns import GitWildMatchPattern diff --git a/airflow/utils/helpers.py b/airflow/utils/helpers.py index 1a6b7396e3225..4cfc62acdd21a 100644 --- a/airflow/utils/helpers.py +++ b/airflow/utils/helpers.py @@ -21,9 +21,10 @@ import itertools import re import signal +from collections.abc import Generator, Iterable, Mapping, MutableMapping from datetime import datetime from functools import reduce -from typing import TYPE_CHECKING, Any, Callable, Generator, Iterable, Mapping, MutableMapping, TypeVar, cast +from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast from lazy_object_proxy import Proxy diff --git a/airflow/utils/log/file_task_handler.py b/airflow/utils/log/file_task_handler.py index c5b60b762d4b1..9ec14c98dbbd2 100644 --- a/airflow/utils/log/file_task_handler.py +++ b/airflow/utils/log/file_task_handler.py @@ -21,11 +21,12 @@ import logging import os +from collections.abc import Iterable from contextlib import suppress from enum import Enum from functools import cached_property from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable, Iterable +from typing import TYPE_CHECKING, Any, Callable from urllib.parse import urljoin import pendulum diff --git a/airflow/utils/log/log_reader.py b/airflow/utils/log/log_reader.py index c99efc350bb19..cc60500532fb1 100644 --- a/airflow/utils/log/log_reader.py +++ b/airflow/utils/log/log_reader.py @@ -18,8 +18,9 @@ import logging import time +from collections.abc import Iterator from functools import cached_property -from typing import TYPE_CHECKING, Iterator +from typing import TYPE_CHECKING from airflow.configuration import conf from airflow.utils.helpers import render_log_filename diff --git a/airflow/utils/log/secrets_masker.py b/airflow/utils/log/secrets_masker.py index 4f9604aced7f4..12019fbad6dcd 100644 --- a/airflow/utils/log/secrets_masker.py +++ b/airflow/utils/log/secrets_masker.py @@ -21,20 +21,15 @@ import collections.abc import logging import sys +from collections.abc import Generator, Iterable, Iterator from enum import Enum from functools import cache, cached_property +from re import Pattern from typing import ( TYPE_CHECKING, Any, Callable, - Dict, - Generator, - Iterable, - Iterator, - List, - Pattern, TextIO, - Tuple, TypeVar, Union, ) @@ -48,7 +43,7 @@ from airflow.typing_compat import TypeGuard -Redactable = TypeVar("Redactable", str, "V1EnvVar", Dict[Any, Any], Tuple[Any, ...], List[Any]) +Redactable = TypeVar("Redactable", str, "V1EnvVar", dict[Any, Any], tuple[Any, ...], list[Any]) Redacted = Union[Redactable, str] log = logging.getLogger(__name__) diff --git a/airflow/utils/net.py b/airflow/utils/net.py index 9fc79b3842c3a..efdc509a7f505 100644 --- a/airflow/utils/net.py +++ b/airflow/utils/net.py @@ -18,11 +18,11 @@ from __future__ import annotations import socket -from functools import lru_cache +from functools import cache # patched version of socket.getfqdn() - see https://github.com/python/cpython/issues/49254 -@lru_cache(maxsize=None) +@cache def getfqdn(name=""): """ Get fully qualified domain name from name. diff --git a/airflow/utils/operator_helpers.py b/airflow/utils/operator_helpers.py index f841d968ad6e4..e5e304bb4a414 100644 --- a/airflow/utils/operator_helpers.py +++ b/airflow/utils/operator_helpers.py @@ -19,8 +19,9 @@ import inspect import logging +from collections.abc import Collection, Mapping from datetime import datetime -from typing import TYPE_CHECKING, Any, Callable, Collection, Mapping, Protocol, TypeVar +from typing import TYPE_CHECKING, Any, Callable, Protocol, TypeVar from airflow import settings from airflow.sdk.definitions.asset.metadata import Metadata diff --git a/airflow/utils/process_utils.py b/airflow/utils/process_utils.py index 7120bd5df860e..99d0034975b61 100644 --- a/airflow/utils/process_utils.py +++ b/airflow/utils/process_utils.py @@ -35,8 +35,8 @@ import termios import tty +from collections.abc import Generator from contextlib import contextmanager -from typing import Generator import psutil from lockfile.pidlockfile import PIDLockFile diff --git a/airflow/utils/session.py b/airflow/utils/session.py index a63d3f3f937a8..fde56354b4782 100644 --- a/airflow/utils/session.py +++ b/airflow/utils/session.py @@ -18,9 +18,10 @@ import contextlib import os +from collections.abc import Generator from functools import wraps from inspect import signature -from typing import Callable, Generator, TypeVar, cast +from typing import Callable, TypeVar, cast from sqlalchemy.orm import Session as SASession @@ -65,6 +66,24 @@ def create_session(scoped: bool = True) -> Generator[SASession, None, None]: session.close() +@contextlib.asynccontextmanager +async def create_session_async(): + """ + Context manager to create async session. + + :meta private: + """ + from airflow.settings import AsyncSession + + async with AsyncSession() as session: + try: + yield session + await session.commit() + except Exception: + await session.rollback() + raise + + PS = ParamSpec("PS") RT = TypeVar("RT") diff --git a/airflow/utils/sqlalchemy.py b/airflow/utils/sqlalchemy.py index c81ecf037a62e..541898c1a1ccf 100644 --- a/airflow/utils/sqlalchemy.py +++ b/airflow/utils/sqlalchemy.py @@ -21,8 +21,9 @@ import copy import datetime import logging +from collections.abc import Generator, Iterable from importlib import metadata -from typing import TYPE_CHECKING, Any, Generator, Iterable, overload +from typing import TYPE_CHECKING, Any, overload from packaging import version from sqlalchemy import TIMESTAMP, PickleType, event, nullsfirst, tuple_ @@ -311,6 +312,7 @@ def with_row_locks( *, nowait: bool = False, skip_locked: bool = False, + key_share: bool = True, **kwargs, ) -> Query: """ @@ -328,6 +330,7 @@ def with_row_locks( :param session: ORM Session :param nowait: If set to True, will pass NOWAIT to supported database backends. :param skip_locked: If set to True, will pass SKIP LOCKED to supported database backends. + :param key_share: If true, will lock with FOR KEY SHARE UPDATE (at least on postgres). :param kwargs: Extra kwargs to pass to with_for_update (of, nowait, skip_locked, etc) :return: updated query """ @@ -342,6 +345,8 @@ def with_row_locks( kwargs["nowait"] = True if skip_locked: kwargs["skip_locked"] = True + if key_share: + kwargs["key_share"] = True return query.with_for_update(**kwargs) diff --git a/airflow/utils/task_group.py b/airflow/utils/task_group.py index 1f94880902c93..3597c7f893cd2 100644 --- a/airflow/utils/task_group.py +++ b/airflow/utils/task_group.py @@ -21,7 +21,8 @@ import functools import operator -from typing import TYPE_CHECKING, Iterator +from collections.abc import Iterator +from typing import TYPE_CHECKING import airflow.sdk.definitions.taskgroup diff --git a/airflow/utils/timeout.py b/airflow/utils/timeout.py index 59330e7c0784a..11a5e1bfa1e22 100644 --- a/airflow/utils/timeout.py +++ b/airflow/utils/timeout.py @@ -19,14 +19,14 @@ import os import signal +from contextlib import AbstractContextManager from threading import Timer -from typing import ContextManager from airflow.exceptions import AirflowTaskTimeout from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.platform import IS_WINDOWS -_timeout = ContextManager[None] +_timeout = AbstractContextManager[None] class TimeoutWindows(_timeout, LoggingMixin): diff --git a/airflow/utils/weekday.py b/airflow/utils/weekday.py index d148858cc5fef..a8782a9e436be 100644 --- a/airflow/utils/weekday.py +++ b/airflow/utils/weekday.py @@ -19,7 +19,7 @@ from __future__ import annotations import enum -from typing import Iterable +from collections.abc import Iterable @enum.unique diff --git a/airflow/www/auth.py b/airflow/www/auth.py index d4b8ad619e6e5..2b864273e5fa0 100644 --- a/airflow/www/auth.py +++ b/airflow/www/auth.py @@ -18,8 +18,9 @@ import functools import logging +from collections.abc import Sequence from functools import wraps -from typing import TYPE_CHECKING, Callable, Sequence, TypeVar, cast +from typing import TYPE_CHECKING, Callable, TypeVar, cast from flask import flash, redirect, render_template, request, url_for from flask_appbuilder._compat import as_unicode diff --git a/airflow/www/extensions/init_wsgi_middlewares.py b/airflow/www/extensions/init_wsgi_middlewares.py index 27368e71eed9b..922d06ff8004a 100644 --- a/airflow/www/extensions/init_wsgi_middlewares.py +++ b/airflow/www/extensions/init_wsgi_middlewares.py @@ -17,7 +17,8 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Iterable +from collections.abc import Iterable +from typing import TYPE_CHECKING from urllib.parse import urlsplit from werkzeug.middleware.dispatcher import DispatcherMiddleware diff --git a/airflow/www/forms.py b/airflow/www/forms.py index 57b4fde3a6e6e..7028e2026e449 100644 --- a/airflow/www/forms.py +++ b/airflow/www/forms.py @@ -20,8 +20,8 @@ import datetime import json import operator +from collections.abc import Iterator from functools import cache -from typing import Iterator import pendulum from flask_appbuilder.fieldwidgets import ( diff --git a/airflow/www/utils.py b/airflow/www/utils.py index ce8174253f1ab..8a74a74d5a8d9 100644 --- a/airflow/www/utils.py +++ b/airflow/www/utils.py @@ -21,7 +21,8 @@ import logging import textwrap import time -from typing import TYPE_CHECKING, Any, Callable, Sequence +from collections.abc import Sequence +from typing import TYPE_CHECKING, Any, Callable from urllib.parse import urlencode from flask import request, url_for diff --git a/airflow/www/views.py b/airflow/www/views.py index c7704ce394ee8..35cfa45a85a11 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -32,10 +32,11 @@ import warnings from bisect import insort_left from collections import defaultdict +from collections.abc import Collection, Iterator, Mapping, MutableMapping, Sequence from functools import cache, cached_property from json import JSONDecodeError from pathlib import Path -from typing import TYPE_CHECKING, Any, Collection, Iterator, Mapping, MutableMapping, Sequence +from typing import TYPE_CHECKING, Any from urllib.parse import unquote, urlencode, urljoin, urlparse, urlsplit import configupdater @@ -317,7 +318,10 @@ def dag_to_grid(dag: DagModel, dag_runs: Sequence[DagRun], session: Session) -> TaskInstance.task_id, TaskInstance.run_id, TaskInstance.state, - TaskInstance.try_number, + case( + (TaskInstance.map_index == -1, TaskInstance.try_number), + else_=None, + ).label("try_number"), func.min(TaskInstanceNote.content).label("note"), func.count(func.coalesce(TaskInstance.state, sqla.literal("no_status"))).label("state_count"), func.min(TaskInstance.queued_dttm).label("queued_dttm"), @@ -329,7 +333,15 @@ def dag_to_grid(dag: DagModel, dag_runs: Sequence[DagRun], session: Session) -> TaskInstance.dag_id == dag.dag_id, TaskInstance.run_id.in_([dag_run.run_id for dag_run in dag_runs]), ) - .group_by(TaskInstance.task_id, TaskInstance.run_id, TaskInstance.state, TaskInstance.try_number) + .group_by( + TaskInstance.task_id, + TaskInstance.run_id, + TaskInstance.state, + case( + (TaskInstance.map_index == -1, TaskInstance.try_number), + else_=None, + ), + ) .order_by(TaskInstance.task_id, TaskInstance.run_id) ) diff --git a/chart/templates/NOTES.txt b/chart/templates/NOTES.txt index 846159d6ac659..531e6e29aba8b 100644 --- a/chart/templates/NOTES.txt +++ b/chart/templates/NOTES.txt @@ -81,7 +81,7 @@ Airflow Webserver: http{{ if $tlsEnabled }}s{{ end }}://{{ (tpl $hostname $) }}{{ $.Values.ingress.web.path }}/ {{- end }} {{- end }} -{{- if and (or .Values.ingress.flower.enabled .Values.ingress.enabled) (or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor")) }} +{{- if and (or .Values.ingress.flower.enabled .Values.ingress.enabled) (or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor)) }} Flower dashboard: {{- range .Values.ingress.flower.hosts | default (list .Values.ingress.flower.host) }} {{- $tlsEnabled := $.Values.ingress.flower.tls.enabled -}} @@ -101,7 +101,7 @@ You can now access your dashboard(s) by executing the following command(s) and v Airflow Webserver: kubectl port-forward svc/{{ include "airflow.fullname" . }}-webserver {{ .Values.ports.airflowUI }}:{{ .Values.ports.airflowUI }} --namespace {{ .Release.Namespace }} {{- if .Values.flower.enabled }} -{{- if or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor")}} +{{- if or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor)}} Flower dashboard: kubectl port-forward svc/{{ include "airflow.fullname" . }}-flower {{ .Values.ports.flowerUI }}:{{ .Values.ports.flowerUI }} --namespace {{ .Release.Namespace }} {{- end }} @@ -131,7 +131,7 @@ You can get Fernet Key value by running the following: {{- end }} -{{- if or (eq .Values.executor "KubernetesExecutor") (eq .Values.executor "CeleryKubernetesExecutor") }} +{{- if or (contains "KubernetesExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor) }} {{- if and (not .Values.logs.persistence.enabled) (eq (lower (tpl .Values.config.logging.remote_logging .)) "false") }} WARNING: diff --git a/chart/templates/_helpers.yaml b/chart/templates/_helpers.yaml index 93802bd723b3f..25a70cb780222 100644 --- a/chart/templates/_helpers.yaml +++ b/chart/templates/_helpers.yaml @@ -98,7 +98,7 @@ If release name contains chart name it will be used as a full name. name: {{ template "webserver_secret_key_secret" . }} key: webserver-secret-key {{- end }} - {{- if or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor") }} + {{- if or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor) }} {{- if or (semverCompare "<2.4.0" .Values.airflowVersion) (.Values.data.resultBackendSecretName) (.Values.data.resultBackendConnection) }} {{- if .Values.enableBuiltInSecretEnvVars.AIRFLOW__CELERY__CELERY_RESULT_BACKEND }} # (Airflow 1.10.* variant) @@ -150,7 +150,7 @@ If release name contains chart name it will be used as a full name. {{- range $i, $config := .Values.env }} - name: {{ $config.name }} value: {{ $config.value | quote }} - {{- if or (eq $.Values.executor "KubernetesExecutor") (eq $.Values.executor "LocalKubernetesExecutor") (eq $.Values.executor "CeleryKubernetesExecutor") }} + {{- if or (contains "KubernetesExecutor" $.Values.executor) (contains "LocalKubernetesExecutor" $.Values.executor) (contains "CeleryKubernetesExecutor" $.Values.executor) }} - name: AIRFLOW__KUBERNETES_ENVIRONMENT_VARIABLES__{{ $config.name }} value: {{ $config.value | quote }} {{- end }} @@ -163,7 +163,7 @@ If release name contains chart name it will be used as a full name. name: {{ $config.secretName }} key: {{ default "value" $config.secretKey }} {{- end }} - {{- if or (eq $.Values.executor "LocalKubernetesExecutor") (eq $.Values.executor "KubernetesExecutor") (eq $.Values.executor "CeleryKubernetesExecutor") }} + {{- if or (contains "LocalKubernetesExecutor" $.Values.executor) (contains "KubernetesExecutor" $.Values.executor) (contains "CeleryKubernetesExecutor" $.Values.executor) }} {{- range $i, $config := .Values.secret }} - name: AIRFLOW__KUBERNETES_SECRETS__{{ $config.envName }} value: {{ printf "%s=%s" $config.secretName $config.secretKey }} @@ -1056,7 +1056,7 @@ capabilities: key: {{ $config.valueFrom.configMapKeyRef.key }} {{- end }} {{- end }} - {{- if or (eq $.Values.executor "KubernetesExecutor") (eq $.Values.executor "LocalKubernetesExecutor") (eq $.Values.executor "CeleryKubernetesExecutor") }} + {{- if or (contains "KubernetesExecutor" $.Values.executor) (contains "LocalKubernetesExecutor" $.Values.executor) (contains "CeleryKubernetesExecutor" $.Values.executor) }} - name: AIRFLOW__KUBERNETES_ENVIRONMENT_VARIABLES__{{ $config.name }} {{- if $config.value }} value: {{ $config.value | quote }} diff --git a/chart/templates/check-values.yaml b/chart/templates/check-values.yaml index b1c27240500c2..6dfbbd9554108 100644 --- a/chart/templates/check-values.yaml +++ b/chart/templates/check-values.yaml @@ -27,7 +27,7 @@ The sole purpose of this yaml file is it to check the values file is consistent ############################# */ -}} - {{- if or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor") }} + {{- if or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor) }} {{- if .Values.redis.enabled }} {{- if .Values.redis.passwordSecretName }} diff --git a/chart/templates/configmaps/configmap.yaml b/chart/templates/configmaps/configmap.yaml index b93c1cbe1e51d..cb8139cc6eeb6 100644 --- a/chart/templates/configmaps/configmap.yaml +++ b/chart/templates/configmaps/configmap.yaml @@ -57,7 +57,7 @@ data: {{- .Values.dags.gitSync.knownHosts | nindent 4 }} {{- end }} -{{- if or (eq $.Values.executor "LocalKubernetesExecutor") (eq $.Values.executor "KubernetesExecutor") (eq $.Values.executor "CeleryKubernetesExecutor") }} +{{- if or (contains "LocalKubernetesExecutor" $.Values.executor) (contains "KubernetesExecutor" $.Values.executor) (contains "CeleryKubernetesExecutor" $.Values.executor) }} {{- if semverCompare ">=1.10.12" .Values.airflowVersion }} pod_template_file.yaml: |- {{- if .Values.podTemplate }} diff --git a/chart/templates/flower/flower-deployment.yaml b/chart/templates/flower/flower-deployment.yaml index 62f961160ed9c..1f2b1b1fa1f72 100644 --- a/chart/templates/flower/flower-deployment.yaml +++ b/chart/templates/flower/flower-deployment.yaml @@ -21,7 +21,7 @@ ## Airflow Flower Deployment ################################# {{- if .Values.flower.enabled }} -{{- if or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor") }} +{{- if or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor) }} {{- $nodeSelector := or .Values.flower.nodeSelector .Values.nodeSelector }} {{- $affinity := or .Values.flower.affinity .Values.affinity }} {{- $tolerations := or .Values.flower.tolerations .Values.tolerations }} diff --git a/chart/templates/flower/flower-ingress.yaml b/chart/templates/flower/flower-ingress.yaml index 1b24d82588069..fde9db6ecb4a4 100644 --- a/chart/templates/flower/flower-ingress.yaml +++ b/chart/templates/flower/flower-ingress.yaml @@ -21,7 +21,7 @@ ## Airflow Flower Ingress ################################# {{- if .Values.flower.enabled }} -{{- if and (or .Values.ingress.flower.enabled .Values.ingress.enabled) (or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor")) }} +{{- if and (or .Values.ingress.flower.enabled .Values.ingress.enabled) (or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor)) }} {{- $fullname := (include "airflow.fullname" .) }} apiVersion: networking.k8s.io/v1 kind: Ingress diff --git a/chart/templates/flower/flower-service.yaml b/chart/templates/flower/flower-service.yaml index 0847ff6274272..1a023d5575a17 100644 --- a/chart/templates/flower/flower-service.yaml +++ b/chart/templates/flower/flower-service.yaml @@ -21,7 +21,7 @@ ## Airflow Flower Service Component ################################# {{- if .Values.flower.enabled }} -{{- if or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor") }} +{{- if or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor) }} apiVersion: v1 kind: Service metadata: diff --git a/chart/templates/flower/flower-serviceaccount.yaml b/chart/templates/flower/flower-serviceaccount.yaml index efe621c625293..7eae8d52bed8b 100644 --- a/chart/templates/flower/flower-serviceaccount.yaml +++ b/chart/templates/flower/flower-serviceaccount.yaml @@ -20,7 +20,7 @@ ###################################### ## Airflow Flower ServiceAccount ###################################### -{{- if and .Values.flower.enabled (or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor")) .Values.flower.serviceAccount.create }} +{{- if and .Values.flower.enabled (or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor)) .Values.flower.serviceAccount.create }} apiVersion: v1 kind: ServiceAccount automountServiceAccountToken: {{ .Values.flower.serviceAccount.automountServiceAccountToken }} diff --git a/chart/templates/rbac/security-context-constraint-rolebinding.yaml b/chart/templates/rbac/security-context-constraint-rolebinding.yaml index bd95c5b779bb0..0ac7542c16751 100644 --- a/chart/templates/rbac/security-context-constraint-rolebinding.yaml +++ b/chart/templates/rbac/security-context-constraint-rolebinding.yaml @@ -67,7 +67,7 @@ subjects: name: {{ include "statsd.serviceAccountName" . }} namespace: "{{ .Release.Namespace }}" {{- end }} - {{- if and .Values.flower.enabled (or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor")) }} + {{- if and .Values.flower.enabled (or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor)) }} - kind: ServiceAccount name: {{ include "flower.serviceAccountName" . }} namespace: "{{ .Release.Namespace }}" diff --git a/chart/templates/redis/redis-networkpolicy.yaml b/chart/templates/redis/redis-networkpolicy.yaml index 457d18309911d..6a186a4b6855c 100644 --- a/chart/templates/redis/redis-networkpolicy.yaml +++ b/chart/templates/redis/redis-networkpolicy.yaml @@ -20,7 +20,7 @@ ################################ ## Airflow Redis NetworkPolicy ################################# -{{- if and .Values.redis.enabled .Values.networkPolicies.enabled (or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor")) }} +{{- if and .Values.redis.enabled .Values.networkPolicies.enabled (or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor)) }} apiVersion: networking.k8s.io/v1 kind: NetworkPolicy metadata: diff --git a/chart/templates/redis/redis-service.yaml b/chart/templates/redis/redis-service.yaml index ee010901ef84e..40424a7313e99 100644 --- a/chart/templates/redis/redis-service.yaml +++ b/chart/templates/redis/redis-service.yaml @@ -20,7 +20,7 @@ ################################ ## Airflow Redis Service ################################# -{{- if and .Values.redis.enabled (or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor")) }} +{{- if and .Values.redis.enabled (or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor)) }} apiVersion: v1 kind: Service metadata: diff --git a/chart/templates/redis/redis-serviceaccount.yaml b/chart/templates/redis/redis-serviceaccount.yaml index 42921f3f30d19..06f33e12f5d38 100644 --- a/chart/templates/redis/redis-serviceaccount.yaml +++ b/chart/templates/redis/redis-serviceaccount.yaml @@ -20,7 +20,7 @@ ###################################### ## Airflow Redis ServiceAccount ###################################### -{{- if and .Values.redis.enabled .Values.redis.serviceAccount.create (or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor")) }} +{{- if and .Values.redis.enabled .Values.redis.serviceAccount.create (or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor)) }} apiVersion: v1 kind: ServiceAccount automountServiceAccountToken: {{ .Values.redis.serviceAccount.automountServiceAccountToken }} diff --git a/chart/templates/redis/redis-statefulset.yaml b/chart/templates/redis/redis-statefulset.yaml index 95df10ccf2371..d1e1edef808eb 100644 --- a/chart/templates/redis/redis-statefulset.yaml +++ b/chart/templates/redis/redis-statefulset.yaml @@ -20,7 +20,7 @@ ################################ ## Airflow Redis StatefulSet ################################# -{{- if and .Values.redis.enabled (or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor")) }} +{{- if and .Values.redis.enabled (or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor)) }} {{- $nodeSelector := or .Values.redis.nodeSelector .Values.nodeSelector }} {{- $affinity := or .Values.redis.affinity .Values.affinity }} {{- $tolerations := or .Values.redis.tolerations .Values.tolerations }} diff --git a/chart/templates/scheduler/scheduler-networkpolicy.yaml b/chart/templates/scheduler/scheduler-networkpolicy.yaml index f1c04ad4bca7d..4327e8f89b547 100644 --- a/chart/templates/scheduler/scheduler-networkpolicy.yaml +++ b/chart/templates/scheduler/scheduler-networkpolicy.yaml @@ -43,7 +43,7 @@ spec: release: {{ .Release.Name }} policyTypes: - Ingress - {{- if eq .Values.executor "LocalExecutor" }} + {{- if contains "LocalExecutor" .Values.executor }} ingress: - from: - podSelector: diff --git a/chart/templates/scheduler/scheduler-service.yaml b/chart/templates/scheduler/scheduler-service.yaml index aa28563deb8d1..1ac300648cd28 100644 --- a/chart/templates/scheduler/scheduler-service.yaml +++ b/chart/templates/scheduler/scheduler-service.yaml @@ -21,7 +21,7 @@ ## Airflow Scheduler Service ################################# {{- if .Values.scheduler.enabled }} -{{- if or (eq .Values.executor "LocalExecutor") (eq .Values.executor "LocalKubernetesExecutor") }} +{{- if or (contains "LocalExecutor" .Values.executor) (contains "LocalKubernetesExecutor" .Values.executor) }} apiVersion: v1 kind: Service metadata: diff --git a/chart/templates/secrets/result-backend-connection-secret.yaml b/chart/templates/secrets/result-backend-connection-secret.yaml index e89046d3dde35..5a1b3171e6ef3 100644 --- a/chart/templates/secrets/result-backend-connection-secret.yaml +++ b/chart/templates/secrets/result-backend-connection-secret.yaml @@ -21,7 +21,7 @@ ## Airflow Result Backend Secret ################################# {{- if not .Values.data.resultBackendSecretName }} -{{- if or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor") }} +{{- if or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor) }} {{- if or (semverCompare "<2.4.0" .Values.airflowVersion) (and (semverCompare ">=2.4.0" .Values.airflowVersion) .Values.data.resultBackendConnection) }} {{- $connection := .Values.data.resultBackendConnection | default .Values.data.metadataConnection }} {{- $resultBackendHost := $connection.host | default (printf "%s-%s" .Release.Name "postgresql") }} diff --git a/chart/templates/workers/worker-deployment.yaml b/chart/templates/workers/worker-deployment.yaml index 5499e64ebece5..a029b1c142569 100644 --- a/chart/templates/workers/worker-deployment.yaml +++ b/chart/templates/workers/worker-deployment.yaml @@ -23,7 +23,7 @@ {{- $persistence := .Values.workers.persistence.enabled }} {{- $keda := .Values.workers.keda.enabled }} {{- $hpa := and .Values.workers.hpa.enabled (not .Values.workers.keda.enabled) }} -{{- if or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor") }} +{{- if or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor) }} {{- $nodeSelector := or .Values.workers.nodeSelector .Values.nodeSelector }} {{- $affinity := or .Values.workers.affinity .Values.affinity }} {{- $tolerations := or .Values.workers.tolerations .Values.tolerations }} diff --git a/chart/templates/workers/worker-kedaautoscaler.yaml b/chart/templates/workers/worker-kedaautoscaler.yaml index 74af09b174589..0f1ca7a1cbada 100644 --- a/chart/templates/workers/worker-kedaautoscaler.yaml +++ b/chart/templates/workers/worker-kedaautoscaler.yaml @@ -20,7 +20,7 @@ ################################ ## Airflow Worker KEDA Scaler ################################# -{{- if and .Values.workers.keda.enabled (has .Values.executor (list "CeleryExecutor" "CeleryKubernetesExecutor")) }} +{{- if and .Values.workers.keda.enabled (or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor) ) }} apiVersion: keda.sh/v1alpha1 kind: ScaledObject metadata: diff --git a/chart/templates/workers/worker-networkpolicy.yaml b/chart/templates/workers/worker-networkpolicy.yaml index 05de79311450f..41bdb5d0e7ff4 100644 --- a/chart/templates/workers/worker-networkpolicy.yaml +++ b/chart/templates/workers/worker-networkpolicy.yaml @@ -20,7 +20,7 @@ ################################ ## Airflow Worker NetworkPolicy ################################# -{{- if and .Values.networkPolicies.enabled (or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor")) }} +{{- if and .Values.networkPolicies.enabled (or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor)) }} apiVersion: networking.k8s.io/v1 kind: NetworkPolicy metadata: diff --git a/chart/templates/workers/worker-service.yaml b/chart/templates/workers/worker-service.yaml index 463f51c0bb287..41f96d6ab36d3 100644 --- a/chart/templates/workers/worker-service.yaml +++ b/chart/templates/workers/worker-service.yaml @@ -20,7 +20,7 @@ ################################ ## Airflow Worker Service ################################# -{{- if or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor") }} +{{- if or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor) }} apiVersion: v1 kind: Service metadata: diff --git a/chart/templates/workers/worker-serviceaccount.yaml b/chart/templates/workers/worker-serviceaccount.yaml index 87dbb9728478c..0feec8de3d99f 100644 --- a/chart/templates/workers/worker-serviceaccount.yaml +++ b/chart/templates/workers/worker-serviceaccount.yaml @@ -20,7 +20,7 @@ ################################ ## Airflow Worker ServiceAccount ################################# -{{- if and .Values.workers.serviceAccount.create (or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor") (eq .Values.executor "KubernetesExecutor") (eq .Values.executor "LocalKubernetesExecutor")) }} +{{- if and .Values.workers.serviceAccount.create (or (contains "CeleryExecutor" .Values.executor) (contains "CeleryKubernetesExecutor" .Values.executor) (contains "KubernetesExecutor" .Values.executor) (contains "LocalKubernetesExecutor" .Values.executor)) }} apiVersion: v1 kind: ServiceAccount automountServiceAccountToken: {{ .Values.workers.serviceAccount.automountServiceAccountToken }} diff --git a/chart/values.schema.json b/chart/values.schema.json index 2408355ce7c60..6bfa5fe8b5e7b 100644 --- a/chart/values.schema.json +++ b/chart/values.schema.json @@ -687,15 +687,7 @@ "type": "string", "x-docsSection": "Common", "default": "CeleryExecutor", - "enum": [ - "LocalExecutor", - "LocalKubernetesExecutor", - "CeleryExecutor", - "KubernetesExecutor", - "CeleryKubernetesExecutor", - "airflow.providers.amazon.aws.executors.batch.AwsBatchExecutor", - "airflow.providers.amazon.aws.executors.ecs.AwsEcsExecutor" - ] + "pattern": "^(LocalExecutor|LocalKubernetesExecutor|CeleryExecutor|KubernetesExecutor|CeleryKubernetesExecutor|airflow.providers.amazon.aws.executors.batch.AwsBatchExecutor|airflow.providers.amazon.aws.executors.ecs.AwsEcsExecutor)(,(LocalExecutor|LocalKubernetesExecutor|CeleryExecutor|KubernetesExecutor|CeleryKubernetesExecutor|airflow.providers.amazon.aws.executors.batch.AwsBatchExecutor|airflow.providers.amazon.aws.executors.ecs.AwsEcsExecutor))*$" }, "allowPodLaunching": { "description": "Whether various Airflow components launch pods.", @@ -1717,7 +1709,7 @@ "query": { "description": "Query to use for KEDA autoscaling. Must return a single integer.", "type": "string", - "default": "SELECT ceil(COUNT(*)::decimal / {{ .Values.config.celery.worker_concurrency }}) FROM task_instance WHERE (state='running' OR state='queued') {{- if eq .Values.executor \"CeleryKubernetesExecutor\" }} AND queue != '{{ .Values.config.celery_kubernetes_executor.kubernetes_queue }}' {{- end }}" + "default": "SELECT ceil(COUNT(*)::decimal / {{ .Values.config.celery.worker_concurrency }}) FROM task_instance WHERE (state='running' OR state='queued') {{- if or (contains \"CeleryKubernetesExecutor\" .Values.executor) (contains \"KubernetesExecutor\" .Values.executor) }} AND queue != '{{ .Values.config.celery_kubernetes_executor.kubernetes_queue }}' {{- end }}" }, "usePgbouncer": { "description": "Weather to use PGBouncer to connect to the database or not when it is enabled. This configuration will be ignored if PGBouncer is not enabled.", diff --git a/chart/values.yaml b/chart/values.yaml index d0db1f7d2e91e..495fa25749cf3 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -324,6 +324,7 @@ rbac: # Airflow executor # One of: LocalExecutor, LocalKubernetesExecutor, CeleryExecutor, KubernetesExecutor, CeleryKubernetesExecutor +# Specify executors in a prioritized list to leverage multiple execution environments as needed. executor: "CeleryExecutor" # If this is true and using LocalExecutor/KubernetesExecutor/CeleryKubernetesExecutor, the scheduler's @@ -655,7 +656,8 @@ workers: SELECT ceil(COUNT(*)::decimal / {{ .Values.config.celery.worker_concurrency }}) FROM task_instance WHERE (state='running' OR state='queued') - {{- if eq .Values.executor "CeleryKubernetesExecutor" }} + {{- if or (contains "CeleryKubernetesExecutor" .Values.executor) + (contains "KubernetesExecutor" .Values.executor) }} AND queue != '{{ .Values.config.celery_kubernetes_executor.kubernetes_queue }}' {{- end }} diff --git a/clients/python/test_python_client.py b/clients/python/test_python_client.py index 5d0accdc019ff..87ceb656b7db8 100644 --- a/clients/python/test_python_client.py +++ b/clients/python/test_python_client.py @@ -27,9 +27,11 @@ from __future__ import annotations import sys +import time import uuid import airflow_client.client +import pytest try: # If you have rich installed, you will have nice colored output of the API responses @@ -63,68 +65,81 @@ # or AIRFLOW__CORE__LOAD_EXAMPLES environment variable set to True DAG_ID = "example_bash_operator" + # Enter a context with an instance of the API client -with airflow_client.client.ApiClient(configuration) as api_client: - errors = False - - print("[blue]Getting DAG list") - dag_api_instance = dag_api.DAGApi(api_client) - try: - api_response = dag_api_instance.get_dags() - print(api_response) - except airflow_client.client.OpenApiException as e: - print(f"[red]Exception when calling DagAPI->get_dags: {e}\n") - errors = True - else: - print("[green]Getting DAG list successful") - - print("[blue]Getting Tasks for a DAG") - try: - api_response = dag_api_instance.get_tasks(DAG_ID) - print(api_response) - except airflow_client.client.exceptions.OpenApiException as e: - print(f"[red]Exception when calling DagAPI->get_tasks: {e}\n") - errors = True - else: - print("[green]Getting Tasks successful") - - print("[blue]Triggering a DAG run") - dag_run_api_instance = dag_run_api.DAGRunApi(api_client) - try: - # Create a DAGRun object (no dag_id should be specified because it is read-only property of DAGRun) - # dag_run id is generated randomly to allow multiple executions of the script - dag_run = DAGRun( - dag_run_id="some_test_run_" + uuid.uuid4().hex, - ) - api_response = dag_run_api_instance.post_dag_run(DAG_ID, dag_run) - print(api_response) - except airflow_client.client.exceptions.OpenApiException as e: - print(f"[red]Exception when calling DAGRunAPI->post_dag_run: {e}\n") - errors = True - else: - print("[green]Posting DAG Run successful") - - # Get current configuration. Note, this is disabled by default with most installation. - # You need to set `expose_config = True` in Airflow configuration in order to retrieve configuration. - conf_api_instance = config_api.ConfigApi(api_client) - try: - api_response = conf_api_instance.get_config() - print(api_response) - except airflow_client.client.OpenApiException as e: - if "FORBIDDEN" in str(e): - print( - "[yellow]You need to set `expose_config = True` in Airflow configuration" - " in order to retrieve configuration." - ) - print("[bright_blue]This is OK. Exposing config is disabled by default.") +@pytest.mark.execution_timeout(400) +def test_python_client(): + with airflow_client.client.ApiClient(configuration) as api_client: + errors = False + + print("[blue]Getting DAG list") + max_retries = 10 + while max_retries > 0: + try: + dag_api_instance = dag_api.DAGApi(api_client) + api_response = dag_api_instance.get_dags() + print(api_response) + except airflow_client.client.OpenApiException as e: + print(f"[red]Exception when calling DagAPI->get_dags: {e}\n") + errors = True + time.sleep(6) + max_retries -= 1 + else: + errors = False + print("[green]Getting DAG list successful") + break + + print("[blue]Getting Tasks for a DAG") + try: + api_response = dag_api_instance.get_tasks(DAG_ID) + print(api_response) + except airflow_client.client.exceptions.OpenApiException as e: + print(f"[red]Exception when calling DagAPI->get_tasks: {e}\n") + errors = True else: + print("[green]Getting Tasks successful") + + print("[blue]Triggering a DAG run") + dag_run_api_instance = dag_run_api.DAGRunApi(api_client) + try: + # Create a DAGRun object (no dag_id should be specified because it is read-only property of DAGRun) + # dag_run id is generated randomly to allow multiple executions of the script + dag_run = DAGRun( + dag_run_id="some_test_run_" + uuid.uuid4().hex, + ) + api_response = dag_run_api_instance.post_dag_run(DAG_ID, dag_run) + print(api_response) + except airflow_client.client.exceptions.OpenApiException as e: print(f"[red]Exception when calling DAGRunAPI->post_dag_run: {e}\n") errors = True - else: - print("[green]Config retrieved successfully") - - if errors: - print("\n[red]There were errors while running the script - see above for details") - sys.exit(1) - else: - print("\n[green]Everything went well") + else: + print("[green]Posting DAG Run successful") + + # Get current configuration. Note, this is disabled by default with most installation. + # You need to set `expose_config = True` in Airflow configuration in order to retrieve configuration. + conf_api_instance = config_api.ConfigApi(api_client) + try: + api_response = conf_api_instance.get_config() + print(api_response) + except airflow_client.client.OpenApiException as e: + if "FORBIDDEN" in str(e): + print( + "[yellow]You need to set `expose_config = True` in Airflow configuration" + " in order to retrieve configuration." + ) + print("[bright_blue]This is OK. Exposing config is disabled by default.") + else: + print(f"[red]Exception when calling DAGRunAPI->post_dag_run: {e}\n") + errors = True + else: + print("[green]Config retrieved successfully") + + if errors: + print("\n[red]There were errors while running the script - see above for details") + sys.exit(1) + else: + print("\n[green]Everything went well") + + +if __name__ == "__main__": + test_python_client() diff --git a/contributing-docs/05_pull_requests.rst b/contributing-docs/05_pull_requests.rst index 1e14167943497..db25643d74bae 100644 --- a/contributing-docs/05_pull_requests.rst +++ b/contributing-docs/05_pull_requests.rst @@ -88,7 +88,8 @@ these guidelines: to the changed code (for example for ``airflow/cli/cli_parser.py`` changes you have tests in ``tests/cli/test_cli_parser.py``). However there are a number of cases where the tests that should run are placed elsewhere - you can either run tests for the whole ``TEST_TYPE`` that is relevant (see - ``breeze testing tests --help`` output for available test types) or you can run all tests, or eventually + ``breeze testing core-tests --help`` or ``breeze testing providers-tests --help`` output for + available test types for each of the testing commands) or you can run all tests, or eventually you can push your code to PR and see results of the tests in the CI. - You can use any supported python version to run the tests, but the best is to check diff --git a/contributing-docs/08_static_code_checks.rst b/contributing-docs/08_static_code_checks.rst index fc0d4280b9d0e..a74f24e5d6732 100644 --- a/contributing-docs/08_static_code_checks.rst +++ b/contributing-docs/08_static_code_checks.rst @@ -208,7 +208,7 @@ require Breeze Docker image to be built locally. +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-pre-commit-information-consistent | Validate hook IDs & names and sync with docs | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ -| check-provide-create-sessions-imports | Check provide_session and create_session imports | | +| check-provide-create-sessions-imports | Check session util imports | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-provider-docs-valid | Validate provider doc files | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ diff --git a/contributing-docs/09_testing.rst b/contributing-docs/09_testing.rst index 895fd1b964f09..18a43e9d6b21c 100644 --- a/contributing-docs/09_testing.rst +++ b/contributing-docs/09_testing.rst @@ -49,6 +49,9 @@ You can also run other kinds of tests when you are developing airflow packages: * `Testing packages `__ is a document that describes how to manually build and test pre-release candidate packages of airflow and providers. +* `Python client tests `__ are tests we run to check if the Python API + client works correctly. + * `DAG testing `__ is a document that describes how to test DAGs in a local environment with ``DebugExecutor``. Note, that this is a legacy method - you can now use dag.test() method to test DAGs. diff --git a/contributing-docs/11_provider_packages.rst b/contributing-docs/11_provider_packages.rst index 1b70d91205dae..e96d5e40d4160 100644 --- a/contributing-docs/11_provider_packages.rst +++ b/contributing-docs/11_provider_packages.rst @@ -36,9 +36,17 @@ repository and project. It has also some disadvantages as this introduces some coupling between those - so contributing to providers might interfere with contributing to Airflow. Python ecosystem does not yet have proper monorepo support for keeping -several packages in one repository and being able to work on multiple of them at the same time, but we have -high hopes Hatch project that use as our recommended packaging frontend -will `solve this problem in the future `__ +several packages in one repository and being able to work on more than one of them at the same time. The tool ``uv`` is +recommended to help manage this through it's ``workspace`` feature. While developing, dependencies and extras for a +provider can be installed using ``uv``'s ``sync`` command. Here is an example for the microsoft.azure provider: + +.. code:: bash + + uv sync --extra devel --extra devel-tests --extra microsoft.azure + +This will synchronize all extras that you need for development and testing of Airflow and the Microsoft Azure provider +dependencies including runtime dependencies. See `local virtualenv <../07_local_virtualenv.rst>`_ or the uv project +for more information. Therefore, until we can introduce multiple ``pyproject.toml`` for providers information/meta-data about the providers is kept in ``provider.yaml`` file in the right sub-directory of ``airflow\providers``. This file contains: diff --git a/contributing-docs/16_contribution_workflow.rst b/contributing-docs/16_contribution_workflow.rst index 8c941f8afc7a4..4b3e77a706dc1 100644 --- a/contributing-docs/16_contribution_workflow.rst +++ b/contributing-docs/16_contribution_workflow.rst @@ -196,7 +196,13 @@ Step 4: Prepare PR and place in either `newsfragments `__ for core newsfragments, or `chart/newsfragments `__ for helm chart newsfragments. - In general newsfragments must be one line. For newsfragment type ``significant``, you may include summary and body separated by a blank line, similar to ``git`` commit messages. + In general newsfragments must be one line. For newsfragment type ``significant``, + you should follow the template in ``newsfragments/template.significant.rst`` to include summary, body, change type and migrations rules needed. + This can also be done by the following command. + + .. code-block:: bash + + uv tool run towncrier create --dir . --config newsfragments/config.toml --content "`cat newsfragments/template.significant.rst`" 2. Rebase your fork, squash commits, and resolve all conflicts. See `How to rebase PR <#how-to-rebase-pr>`_ if you need help with rebasing your change. Remember to rebase often if your PR takes a lot of time to diff --git a/contributing-docs/testing/python_client_tests.rst b/contributing-docs/testing/python_client_tests.rst new file mode 100644 index 0000000000000..5bc492734bff3 --- /dev/null +++ b/contributing-docs/testing/python_client_tests.rst @@ -0,0 +1,47 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Airflow Python API Client Tests +=============================== + +This document describes how to run tests for the Airflow Python API client. + +Running Python API Client Tests with Breeze +------------------------------------------- + +The Python API client tests are run in the Breeze environment. The tests are run in the same way as the other tests in Breeze. + +The way the tests work: + +1. The Airflow Python API client package is first built into a wheel file and placed in the dist folder. +2. The ``breeze testing python-api-client-tests`` command is used to initiate the tests. +3. This command installs the package from the dist folder. +4. Example DAGs are then parsed and executed to validate the Python API client. +5. The webserver is started with the credentials admin/admin, and tests are run against the webserver. + +If you have python client repository not cloned, you can clone it by running the following command: + +.. code-block:: bash + + git clone https://github.com/apache/airflow-client-python.git + +To build the package, you can run the following command: + +.. code-block:: bash + + breeze release-management prepare-python-client --package-format both + --version-suffix-for-pypi dev0 --python-client-repo ./airflow-client-python diff --git a/dev/README_AIRFLOW3_DEV.md b/dev/README_AIRFLOW3_DEV.md index 2db87b4b059bd..1c221bfa80358 100644 --- a/dev/README_AIRFLOW3_DEV.md +++ b/dev/README_AIRFLOW3_DEV.md @@ -44,7 +44,7 @@ # Main branch is Airflow 3 -The main branch is for development of Airflow 3. +The `main` branch is for development of Airflow 3. Airflow 2.10.x releases will be cut from `v2-10-stable` branch. Airflow 2.11.x releases will be cut from `v2-11-stable` branch. @@ -58,29 +58,33 @@ PRs should target `main` branch. Make sure your code is only about Providers or Helm chart. Avoid mixing core changes into the same PR +> [!NOTE] +> Please note that providers have been relocated from `airflow/providers` to `providers/src/airflow/providers`. + ## Developing for Airflow 3 and 2.10.x / 2.11.x If the PR is relevant for both Airflow 3 and 2, it should target `main` branch. -Note: The mental model of Airflow 2.11 is bridge release for Airflow 3. -As a result, Airflow 2.11 is not planned to introduce new features other than ones relevant as bridge release for Airflow 3. -That said, we recognize that there may be exceptions. -If you believe a specific feature is a must-have for Airflow 2.11, you will need to raise this as discussion thread on the mailing list. -Points to address to make your case: - -1. You must clarify what is the urgency (i.e., why it can't wait for Airflow 3). -2. You need be willing to deliver the feature for both main branch and Airflow 2.11 branch. -3. You must be willing to provide support future bug fixes as needed. - -Points to consider on how PMC members evaluate the request of exception: - -1. Feature impact - Is it really urgent? How many are affected? -2. Workarounds - Are there any ? -3. Scope of change - Both in code lines / number of files and components changed. -4. Centrality - Is the feature at the heart of Airflow (scheduler, dag parser) or peripheral. -5. Identity of the requester - Is the request from/supported by a member of the community? -6. Similar previous cases approved. -7. Other considerations that may raise by PMC members depending on the case. +> [!IMPORTANT] +> The mental model of Airflow 2.11 is a bridge release for Airflow 3. +> As a result, Airflow 2.11 is not planned to introduce new features other than ones relevant to the bridge release for Airflow 3. +> That said, we recognize that there may be exceptions. +> If you believe a specific feature is a must-have for Airflow 2.11, you will need to raise this as a discussion thread on the mailing list. +> Points to address to make your case: +> +> 1. You must clarify the urgency, specifically why it can't wait for Airflow 3. +> 2. You need to be willing to deliver the feature for both the `main` branch and the Airflow 2.11 branch. +> 3. You must be willing to provide support for future bug fixes as needed. +> +> Points to consider on how PMC members evaluate the request for exception: +> +> 1. Feature impact - Is it really urgent? How many are affected? +> 2. Workarounds - Are there any? +> 3. Scope of change - Both in code lines / number of files and components changed. +> 4. Centrality - Is the feature at the heart of Airflow (scheduler, dag parser) or peripheral. +> 5. Identity of the requester - Is the request from/supported by a member of the community? +> 6. Approved cases with similar details in the past. +> 7. Other considerations that may be raised by PMC members depending on the case. ## Developing for Airflow 3 @@ -96,8 +100,10 @@ PR should never target `v2-10-stable` unless specifically instructed by release Version 2.11 is planned to be cut from `v2-10-stable` branch. The version will contain features relevant as bridge release for Airflow 3. -We will not backport otherwise features from main branch to 2.11 -Note that 2.11 policy may change as 2.11 becomes closer. +We will not backport other features from `main` branch to 2.11. + +> [!WARNING] +> Airflow 2.11 policy may change as its release becomes closer. # Committers / PMCs @@ -106,8 +112,8 @@ The following sections explains the protocol for merging PRs. ## Merging PRs for providers and Helm chart Make sure PR targets `main` branch. -Avoid merging PRs that involve providers + core / helm chart + core -Core part should be extracted to a separated PR. +Avoid merging PRs that involve (providers + core) or (helm chart + core). +Core parts should be extracted to a separate PR. Exclusions should be pre-approved specifically with a comment by release manager. Do not treat PR approval (Green V) as exclusion approval. @@ -135,9 +141,9 @@ When you want to backport commit via GitHub actions (you need to be a committer) should use "Backport commit" action. You need to know the commit hash of the commit you want to backport. You can pin the workflow from the list of workflows for easy access to it. -[!NOTE] -It should be the commit hash of the commit in the `main` branch, not in the original PR - you can find it -via `git log` or looking up main History. +> [!NOTE] +> It should be the commit hash of the commit in the `main` branch, not in the original PR - you can find it +> via `git log` or looking up main History. ![Backport commit](images/backport_commit_action.png) @@ -218,8 +224,7 @@ Make sure PR target `main` branch. ### PRs that involve breaking changes -Make sure it has newsfragment, please allow time for community members to review. -Our goal is to avoid breaking changes whenever possible. Therefore, we should allow time for community members to review PRs that contain such changes - please avoid rushing to merge them. In addition, ensure that these PRs include a newsfragment. +Our goal is to avoid breaking changes whenever possible. Therefore, we should allow time for community members to review PRs that contain such changes - please avoid rushing to merge them. Also, please make sure that such PRs contain a `significant` newsfragment that contains `**Breaking Change**`. ## Merging PRs for Airflow 2.11 diff --git a/dev/backport/update_backport_status.py b/dev/backport/update_backport_status.py index e6e3ce064ce98..c7317d1f37bf9 100644 --- a/dev/backport/update_backport_status.py +++ b/dev/backport/update_backport_status.py @@ -41,18 +41,35 @@ def get_success_comment(branch: str, pr_url: str, pr_number: str): def get_failure_comment(branch: str, commit_sha_url: str, commit_sha: str): commit_shield_url = f"https://img.shields.io/badge/Commit-{commit_sha[:7]}-red" - comment = f"""### Backport failed to create: {branch}. View the failure log Run details \n\n - - - - - - - - - - -
StatusBranchResult
{branch}Commit Link
""" + comment = f"""### Backport failed to create: {branch}. View the failure log Run details \n + + + + + + + + + + + +
StatusBranchResult
{branch}Commit Link
+ +You can attempt to backport this manually by running: + +```bash +cherry_picker {commit_sha[:7]} {branch} +``` + +This should apply the commit to the {branch} branch and leave the commit in conflict state marking +the files that need manual conflict resolution. + +After you have resolved the conflicts, you can continue the backport process by running: + +```bash +cherry_picker --continue +``` +""" return comment diff --git a/dev/breeze/README.md b/dev/breeze/README.md index 655aaf5407e2c..6be020408bc1e 100644 --- a/dev/breeze/README.md +++ b/dev/breeze/README.md @@ -128,6 +128,6 @@ PLEASE DO NOT MODIFY THE HASH BELOW! IT IS AUTOMATICALLY UPDATED BY PRE-COMMIT. --------------------------------------------------------------------------------------------------------- -Package config hash: 1a6bdff24f910175038dbd62c1c18dd091958ee2ffbb55ac7d5c93cc43f8f9ad5176093c135ac72031574292397164402a2c17a7c4f7f5fdb3c02e3d576109bf +Package config hash: d58974d3f120f707d02ad2594b03c96cdda42fe07621d940dbb357ef5eafce5a49dc9725a0e1a076800a126616196205ecb2a2a6e6f6541e12c1284aaf307df2 --------------------------------------------------------------------------------------------------------- diff --git a/dev/breeze/doc/03_developer_tasks.rst b/dev/breeze/doc/03_developer_tasks.rst index 87bb2713b93fa..ad1a4fe0a6f99 100644 --- a/dev/breeze/doc/03_developer_tasks.rst +++ b/dev/breeze/doc/03_developer_tasks.rst @@ -419,7 +419,7 @@ are several reasons why you might want to do that. Breeze uses docker images heavily and those images are rebuild periodically and might leave dangling, unused images in docker cache. This might cause extra disk usage. Also running various docker compose commands -(for example running tests with ``breeze testing tests``) might create additional docker networks that might +(for example running tests with ``breeze testing core-tests``) might create additional docker networks that might prevent new networks from being created. Those networks are not removed automatically by docker-compose. Also Breeze uses it's own cache to keep information about all images. diff --git a/dev/breeze/doc/05_test_commands.rst b/dev/breeze/doc/05_test_commands.rst index 3ce2f366db9b9..819ac80224525 100644 --- a/dev/breeze/doc/05_test_commands.rst +++ b/dev/breeze/doc/05_test_commands.rst @@ -106,9 +106,9 @@ For example this will run API and WWW tests in parallel: .. code-block:: bash - breeze testing tests --parallel-test-types "API WWW" --run-in-parallel + breeze testing core-tests --parallel-test-types "API WWW" --run-in-parallel -Here is the detailed set of options for the ``breeze testing tests`` command. +Here is the detailed set of options for the ``breeze testing core-tests`` command. .. image:: ./images/output_testing_core-tests.svg :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_testing_core-tests.svg @@ -143,11 +143,11 @@ You can also run parallel tests with ``--run-in-parallel`` flag - by default it in parallel, but you can specify the test type that you want to run with space separated list of test types passed to ``--parallel-test-types`` flag. -For example this will run API and WWW tests in parallel: +For example this will run ``amazon`` and ``google`` tests in parallel: .. code-block:: bash - breeze testing tests --parallel-test-types "Providers[amazon] Providers[google]" --run-in-parallel + breeze testing providers-tests --parallel-test-types "Providers[amazon] Providers[google]" --run-in-parallel Here is the detailed set of options for the ``breeze testing providers-test`` command. @@ -218,6 +218,29 @@ Here is the detailed set of options for the ``breeze testing providers-integrati :alt: Breeze testing providers-integration-tests +Running Python API client tests +............................... + +To run Python API client tests, you need to have airflow python client packaged in dist folder. +To package the client, clone the airflow-python-client repository and run the following command: + +.. code-block:: bash + + breeze release-management prepare-python-client --package-format both + --version-suffix-for-pypi dev0 --python-client-repo ./airflow-client-python + +.. code-block:: bash + + breeze testing python-api-client-tests + +Here is the detailed set of options for the ``breeze testing python-api-client-tests`` command. + +.. image:: ./images/output_testing_python-api-client-tests.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_testing_python-api-client-tests.svg + :width: 100% + :alt: Breeze testing python-api-client-tests + + Running system tests .................... diff --git a/dev/breeze/doc/ci/02_images.md b/dev/breeze/doc/ci/02_images.md index 7616fa4fbf833..fbf47d3352c3d 100644 --- a/dev/breeze/doc/ci/02_images.md +++ b/dev/breeze/doc/ci/02_images.md @@ -448,7 +448,7 @@ can be used for CI images: | `ADDITIONAL_DEV_APT_DEPS` | | Additional apt dev dependencies installed in the first part of the image | | `ADDITIONAL_DEV_APT_ENV` | | Additional env variables defined when installing dev deps | | `AIRFLOW_PIP_VERSION` | `24.3.1` | PIP version used. | -| `AIRFLOW_UV_VERSION` | `0.5.3` | UV version used. | +| `AIRFLOW_UV_VERSION` | `0.5.4` | UV version used. | | `AIRFLOW_USE_UV` | `true` | Whether to use UV for installation. | | `PIP_PROGRESS_BAR` | `on` | Progress bar for PIP installation | diff --git a/dev/breeze/doc/ci/04_selective_checks.md b/dev/breeze/doc/ci/04_selective_checks.md index c39bc5df55fa5..08e5906745cbf 100644 --- a/dev/breeze/doc/ci/04_selective_checks.md +++ b/dev/breeze/doc/ci/04_selective_checks.md @@ -225,7 +225,7 @@ Github Actions to pass the list of parameters to a command to execute | postgres-exclude | Which versions of Postgres to exclude for tests as JSON array | [] | | | postgres-versions | Which versions of Postgres to use for tests as JSON array | \['12'\] | | | prod-image-build | Whether PROD image build is needed | true | | -| providers-compatibility-checks | List of dicts: (python_version, airflow_version, removed_providers) for compatibility checks | \[{}\] | | +| providers-compatibility-tests-matrix | Matrix of providers compatibility tests: (python_version, airflow_version, removed_providers) | \[{}\] | | | providers-test-types-list-as-string | Which test types should be run for unit tests for providers | Providers Providers\[-google\] | * | | pyproject-toml-changed | When pyproject.toml changed in the PR. | false | | | python-versions | List of python versions to use for that build | \['3.9'\] | | diff --git a/dev/breeze/doc/images/output_setup_check-all-params-in-groups.svg b/dev/breeze/doc/images/output_setup_check-all-params-in-groups.svg index 2281855ed930d..6a312f5b20d2c 100644 --- a/dev/breeze/doc/images/output_setup_check-all-params-in-groups.svg +++ b/dev/breeze/doc/images/output_setup_check-all-params-in-groups.svg @@ -202,8 +202,8 @@ setup:check-all-params-in-groups | setup:config | setup:regenerate-command-images | setup:self-upgrade  | setup:synchronize-local-mounts | setup:version | shell | start-airflow | static-checks | testing |    testing:core-integration-tests | testing:core-tests | testing:docker-compose-tests | testing:helm-tests -| testing:providers-integration-tests | testing:providers-tests | testing:system-tests |                -testing:task-sdk-tests)                                                                                 +| testing:providers-integration-tests | testing:providers-tests | testing:python-api-client-tests |     +testing:system-tests | testing:task-sdk-tests)                                                          ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ --verbose-vPrint verbose information about performed steps. diff --git a/dev/breeze/doc/images/output_setup_check-all-params-in-groups.txt b/dev/breeze/doc/images/output_setup_check-all-params-in-groups.txt index 8af1fd4596083..6bc1fa3014426 100644 --- a/dev/breeze/doc/images/output_setup_check-all-params-in-groups.txt +++ b/dev/breeze/doc/images/output_setup_check-all-params-in-groups.txt @@ -1 +1 @@ -5b1f2b4c7ccd4fb99efaa8f48c721c34 +4f2ddff749749897c4bdd3b36b5b3c32 diff --git a/dev/breeze/doc/images/output_setup_regenerate-command-images.svg b/dev/breeze/doc/images/output_setup_regenerate-command-images.svg index 7a3bcdedd717e..2956a66972df4 100644 --- a/dev/breeze/doc/images/output_setup_regenerate-command-images.svg +++ b/dev/breeze/doc/images/output_setup_regenerate-command-images.svg @@ -1,4 +1,4 @@ - + setup:self-upgrade | setup:synchronize-local-mounts | setup:version | shell | start-airflow |        static-checks | testing | testing:core-integration-tests | testing:core-tests |                      testing:docker-compose-tests | testing:helm-tests | testing:providers-integration-tests |            -testing:providers-tests | testing:system-tests | testing:task-sdk-tests)                             ---check-onlyOnly check if some images need to be regenerated. Return 0 if no need or 1 if needed. Cannot be used -together with --command flag or --force.                                                             -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +testing:providers-tests | testing:python-api-client-tests | testing:system-tests |                   +testing:task-sdk-tests)                                                                              +--check-onlyOnly check if some images need to be regenerated. Return 0 if no need or 1 if needed. Cannot be used +together with --command flag or --force.                                                             +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_setup_regenerate-command-images.txt b/dev/breeze/doc/images/output_setup_regenerate-command-images.txt index 8cc4669293051..d26de039adee2 100644 --- a/dev/breeze/doc/images/output_setup_regenerate-command-images.txt +++ b/dev/breeze/doc/images/output_setup_regenerate-command-images.txt @@ -1 +1 @@ -cef764d3eaa21feef72d2f259627ade6 +9001008210b2148e49e4fd11f11bb25a diff --git a/dev/breeze/doc/images/output_shell.svg b/dev/breeze/doc/images/output_shell.svg index 86892b0ea2ec3..aefd48c1531eb 100644 --- a/dev/breeze/doc/images/output_shell.svg +++ b/dev/breeze/doc/images/output_shell.svg @@ -1,4 +1,4 @@ - +