Skip to content

Commit

Permalink
Merge branch 'main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
bareketamir authored Nov 14, 2024
2 parents c269e21 + de15523 commit d7cb982
Show file tree
Hide file tree
Showing 242 changed files with 11,771 additions and 3,721 deletions.
21 changes: 8 additions & 13 deletions .github/workflows/basic-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -194,19 +194,14 @@ jobs:
env:
HATCH_ENV: "test"
working-directory: ./clients/python
- name: "Prepare FAB+standard provider packages: wheel"
run: >
breeze release-management prepare-provider-packages fab standard \
--package-format wheel --skip-tag-check
- name: "Install Airflow with fab for webserver tests"
run: pip install . dist/apache_airflow_providers_fab-*.whl
- name: "Install Airflow with standard provider for webserver tests"
run: pip install . dist/apache_airflow_providers_standard-*.whl
- name: "Prepare Task SDK package: wheel"
run: >
breeze release-management prepare-task-sdk-package --package-format wheel
- name: "Install Task SDK package"
run: pip install ./dist/apache_airflow_task_sdk-*.whl
- name: "Install source version of required packages"
run: |
breeze release-management prepare-provider-packages fab standard common.sql --package-format \
wheel --skip-tag-check --version-suffix-for-pypi dev0
pip install . dist/apache_airflow_providers_fab-*.whl \
dist/apache_airflow_providers_standard-*.whl dist/apache_airflow_providers_common_sql-*.whl
breeze release-management prepare-task-sdk-package --package-format wheel
pip install ./dist/apache_airflow_task_sdk-*.whl
- name: "Install Python client"
run: pip install ./dist/apache_airflow_client-*.whl
- name: "Initialize Airflow DB and start webserver"
Expand Down
16 changes: 10 additions & 6 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ jobs:
latest-versions-only: ${{ steps.selective-checks.outputs.latest-versions-only }}
chicken-egg-providers: ${{ steps.selective-checks.outputs.chicken-egg-providers }}
has-migrations: ${{ steps.selective-checks.outputs.has-migrations }}
only-new-ui-files: ${{ steps.selective-checks.outputs.only-new-ui-files }}
source-head-repo: ${{ steps.source-run-info.outputs.source-head-repo }}
pull-request-labels: ${{ steps.source-run-info.outputs.pr-labels }}
in-workflow-build: ${{ steps.source-run-info.outputs.in-workflow-build }}
Expand Down Expand Up @@ -279,7 +280,9 @@ jobs:
name: "Generate constraints"
needs: [build-info, wait-for-ci-images]
uses: ./.github/workflows/generate-constraints.yml
if: needs.build-info.outputs.ci-image-build == 'true'
if: >
needs.build-info.outputs.ci-image-build == 'true' &&
needs.build-info.outputs.only-new-ui-files != 'true'
with:
runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }}
python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }}
Expand Down Expand Up @@ -381,7 +384,7 @@ jobs:
run-migration-tests: "true"
run-coverage: ${{ needs.build-info.outputs.run-coverage }}
debug-resources: ${{ needs.build-info.outputs.debug-resources }}
if: needs.build-info.outputs.run-tests == 'true'
if: needs.build-info.outputs.run-tests == 'true' && needs.build-info.outputs.only-new-ui-files != 'true'

tests-mysql:
name: "MySQL tests"
Expand All @@ -406,7 +409,7 @@ jobs:
run-coverage: ${{ needs.build-info.outputs.run-coverage }}
run-migration-tests: "true"
debug-resources: ${{ needs.build-info.outputs.debug-resources }}
if: needs.build-info.outputs.run-tests == 'true'
if: needs.build-info.outputs.run-tests == 'true' && needs.build-info.outputs.only-new-ui-files != 'true'

tests-sqlite:
name: "Sqlite tests"
Expand All @@ -433,7 +436,7 @@ jobs:
run-coverage: ${{ needs.build-info.outputs.run-coverage }}
run-migration-tests: "true"
debug-resources: ${{ needs.build-info.outputs.debug-resources }}
if: needs.build-info.outputs.run-tests == 'true'
if: needs.build-info.outputs.run-tests == 'true' && needs.build-info.outputs.only-new-ui-files != 'true'

tests-non-db:
name: "Non-DB tests"
Expand All @@ -459,7 +462,7 @@ jobs:
include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }}
run-coverage: ${{ needs.build-info.outputs.run-coverage }}
debug-resources: ${{ needs.build-info.outputs.debug-resources }}
if: needs.build-info.outputs.run-tests == 'true'
if: needs.build-info.outputs.run-tests == 'true' && needs.build-info.outputs.only-new-ui-files != 'true'

tests-special:
name: "Special tests"
Expand Down Expand Up @@ -664,7 +667,8 @@ jobs:
run-task-sdk-tests: ${{ needs.build-info.outputs.run-task-sdk-tests }}
if: >
( needs.build-info.outputs.run-task-sdk-tests == 'true' ||
needs.build-info.outputs.run-tests == 'true')
needs.build-info.outputs.run-tests == 'true' &&
needs.build-info.outputs.only-new-ui-files != 'true')
finalize-tests:
name: Finalize tests
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/news-fragment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ jobs:

- name: Check news fragment
run: >
python -m pip install --upgrade uv &&
uv tool run towncrier check
--dir .
--config newsfragments/config.toml
Expand Down
10 changes: 5 additions & 5 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -360,7 +360,7 @@ repos:
types_or: [python, pyi]
args: [--fix]
require_serial: true
additional_dependencies: ["ruff==0.7.2"]
additional_dependencies: ["ruff==0.7.3"]
exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py|^performance/tests/test_.*.py
- id: ruff-format
name: Run 'ruff format'
Expand All @@ -370,7 +370,7 @@ repos:
types_or: [python, pyi]
args: []
require_serial: true
additional_dependencies: ["ruff==0.7.2"]
additional_dependencies: ["ruff==0.7.3"]
exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py$
- id: replace-bad-characters
name: Replace bad characters
Expand Down Expand Up @@ -477,21 +477,21 @@ repos:
files: ^docs/apache-airflow/extra-packages-ref\.rst$|^hatch_build.py
pass_filenames: false
entry: ./scripts/ci/pre_commit/check_extra_packages_ref.py
additional_dependencies: ['rich>=12.4.4', 'hatchling==1.26.1', 'tabulate']
additional_dependencies: ['rich>=12.4.4', 'hatchling==1.26.3', 'tabulate']
- id: check-hatch-build-order
name: Check order of dependencies in hatch_build.py
language: python
files: ^hatch_build.py$
pass_filenames: false
entry: ./scripts/ci/pre_commit/check_order_hatch_build.py
additional_dependencies: ['rich>=12.4.4', 'hatchling==1.26.1']
additional_dependencies: ['rich>=12.4.4', 'hatchling==1.26.3']
- id: update-extras
name: Update extras in documentation
entry: ./scripts/ci/pre_commit/insert_extras.py
language: python
files: ^contributing-docs/12_airflow_dependencies_and_extras.rst$|^INSTALL$|^providers/src/airflow/providers/.*/provider\.yaml$|^Dockerfile.*
pass_filenames: false
additional_dependencies: ['rich>=12.4.4', 'hatchling==1.26.1']
additional_dependencies: ['rich>=12.4.4', 'hatchling==1.26.3']
- id: check-extras-order
name: Check order of extras in Dockerfile
entry: ./scripts/ci/pre_commit/check_order_dockerfile_extras.py
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -890,7 +890,7 @@ function install_airflow() {

# Similarly we need _a_ file for task_sdk too
mkdir -p ./task_sdk/src/airflow/sdk/
touch ./task_sdk/src/airflow/__init__.py
touch ./task_sdk/src/airflow/sdk/__init__.py

trap 'rm -f ./providers/src/airflow/providers/__init__.py ./task_sdk/src/airflow/__init__.py 2>/dev/null' EXIT

Expand Down
2 changes: 1 addition & 1 deletion Dockerfile.ci
Original file line number Diff line number Diff line change
Expand Up @@ -660,7 +660,7 @@ function install_airflow() {

# Similarly we need _a_ file for task_sdk too
mkdir -p ./task_sdk/src/airflow/sdk/
touch ./task_sdk/src/airflow/__init__.py
touch ./task_sdk/src/airflow/sdk/__init__.py

trap 'rm -f ./providers/src/airflow/providers/__init__.py ./task_sdk/src/airflow/__init__.py 2>/dev/null' EXIT

Expand Down
4 changes: 4 additions & 0 deletions airflow/api_connexion/endpoints/asset_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
from airflow.assets.manager import asset_manager
from airflow.models.asset import AssetDagRunQueue, AssetEvent, AssetModel
from airflow.utils import timezone
from airflow.utils.api_migration import mark_fastapi_migration_done
from airflow.utils.db import get_query_count
from airflow.utils.session import NEW_SESSION, provide_session
from airflow.www.decorators import action_logging
Expand All @@ -60,6 +61,7 @@
RESOURCE_EVENT_PREFIX = "asset"


@mark_fastapi_migration_done
@security.requires_access_asset("GET")
@provide_session
def get_asset(*, uri: str, session: Session = NEW_SESSION) -> APIResponse:
Expand All @@ -77,6 +79,7 @@ def get_asset(*, uri: str, session: Session = NEW_SESSION) -> APIResponse:
return asset_schema.dump(asset)


@mark_fastapi_migration_done
@security.requires_access_asset("GET")
@format_parameters({"limit": check_limit})
@provide_session
Expand Down Expand Up @@ -112,6 +115,7 @@ def get_assets(
return asset_collection_schema.dump(AssetCollection(assets=assets, total_entries=total_entries))


@mark_fastapi_migration_done
@security.requires_access_asset("GET")
@provide_session
@format_parameters({"limit": check_limit})
Expand Down
1 change: 1 addition & 0 deletions airflow/api_connexion/endpoints/connection_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,7 @@ def post_connection(*, session: Session = NEW_SESSION) -> APIResponse:
raise AlreadyExists(detail=f"Connection already exist. ID: {conn_id}")


@mark_fastapi_migration_done
@security.requires_access_connection("POST")
def test_connection() -> APIResponse:
"""
Expand Down
2 changes: 2 additions & 0 deletions airflow/api_connexion/endpoints/task_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,14 @@
from airflow.auth.managers.models.resource_details import DagAccessEntity
from airflow.exceptions import TaskNotFound
from airflow.utils.airflow_flask_app import get_airflow_app
from airflow.utils.api_migration import mark_fastapi_migration_done

if TYPE_CHECKING:
from airflow import DAG
from airflow.api_connexion.types import APIResponse


@mark_fastapi_migration_done
@security.requires_access_dag("GET", DagAccessEntity.TASK)
def get_task(*, dag_id: str, task_id: str) -> APIResponse:
"""Get simplified representation of a task."""
Expand Down
7 changes: 6 additions & 1 deletion airflow/api_connexion/endpoints/task_instance_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -810,7 +810,12 @@ def _query(orm_object):
)
return query

task_instances = session.scalars(_query(TIH)).all() + session.scalars(_query(TI)).all()
# Exclude TaskInstance with state UP_FOR_RETRY since they have been recorded in TaskInstanceHistory
tis = session.scalars(
_query(TI).where(or_(TI.state != TaskInstanceState.UP_FOR_RETRY, TI.state.is_(None)))
).all()

task_instances = session.scalars(_query(TIH)).all() + tis
return task_instance_history_collection_schema.dump(
TaskInstanceHistoryCollection(task_instances=task_instances, total_entries=len(task_instances))
)
Expand Down
2 changes: 2 additions & 0 deletions airflow/api_connexion/endpoints/xcom_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
from airflow.auth.managers.models.resource_details import DagAccessEntity
from airflow.models import DagRun as DR, XCom
from airflow.settings import conf
from airflow.utils.api_migration import mark_fastapi_migration_done
from airflow.utils.db import get_query_count
from airflow.utils.session import NEW_SESSION, provide_session
from airflow.www.extensions.init_auth_manager import get_auth_manager
Expand Down Expand Up @@ -83,6 +84,7 @@ def get_xcom_entries(
return xcom_collection_schema.dump(XComCollection(xcom_entries=query, total_entries=total_entries))


@mark_fastapi_migration_done
@security.requires_access_dag("GET", DagAccessEntity.XCOM)
@provide_session
def get_xcom_entry(
Expand Down
10 changes: 1 addition & 9 deletions airflow/api_connexion/openapi/v1.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2284,7 +2284,7 @@ paths:
properties:
content:
type: string
plain/text:
text/plain:
schema:
type: string

Expand Down Expand Up @@ -2921,14 +2921,6 @@ components:
Time when the DAG last received a refresh signal
(e.g. the DAG's "refresh" button was clicked in the web UI)
*New in version 2.3.0*
pickle_id:
type: string
readOnly: true
nullable: true
description: |
Foreign key to the latest pickle_id
*New in version 2.3.0*
default_view:
type: string
Expand Down
Loading

0 comments on commit d7cb982

Please sign in to comment.