diff --git a/.github/workflows/news-fragment.yml b/.github/workflows/news-fragment.yml index 979a1148a6ea7..bb9d30bf1db6c 100644 --- a/.github/workflows/news-fragment.yml +++ b/.github/workflows/news-fragment.yml @@ -68,6 +68,7 @@ jobs: 'Behaviour changes' 'Plugin changes' 'Dependency changes' + 'Code interface changes' ) news_fragment_content=`git diff origin/${BASE_REF} newsfragments/*.significant.rst` diff --git a/airflow/api_fastapi/core_api/datamodels/assets.py b/airflow/api_fastapi/core_api/datamodels/assets.py index 9721157998564..c7b7bec034c06 100644 --- a/airflow/api_fastapi/core_api/datamodels/assets.py +++ b/airflow/api_fastapi/core_api/datamodels/assets.py @@ -102,6 +102,9 @@ class AssetEventResponse(BaseModel): id: int asset_id: int + uri: str | None = Field(alias="uri", default=None) + name: str | None = Field(alias="name", default=None) + group: str | None = Field(alias="group", default=None) extra: dict | None = None source_task_id: str | None = None source_dag_id: str | None = None diff --git a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml index 4a6ff7f2bbed4..6075d0c886ef2 100644 --- a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml @@ -6447,6 +6447,21 @@ components: asset_id: type: integer title: Asset Id + uri: + anyOf: + - type: string + - type: 'null' + title: Uri + name: + anyOf: + - type: string + - type: 'null' + title: Name + group: + anyOf: + - type: string + - type: 'null' + title: Group extra: anyOf: - type: object diff --git a/airflow/api_fastapi/execution_api/datamodels/asset.py b/airflow/api_fastapi/execution_api/datamodels/asset.py index 6d3a53c3e4ca8..29b260c291c2b 100644 --- a/airflow/api_fastapi/execution_api/datamodels/asset.py +++ b/airflow/api_fastapi/execution_api/datamodels/asset.py @@ -34,3 +34,18 @@ class AssetAliasResponse(BaseModel): name: str group: str + + +class AssetProfile(BaseModel): + """ + Profile of an Asset. + + Asset will have name, uri and asset_type defined. + AssetNameRef will have name and asset_type defined. + AssetUriRef will have uri and asset_type defined. + + """ + + name: str | None = None + uri: str | None = None + asset_type: str diff --git a/airflow/api_fastapi/execution_api/datamodels/taskinstance.py b/airflow/api_fastapi/execution_api/datamodels/taskinstance.py index 5e8c267b82fde..6cc82259cf758 100644 --- a/airflow/api_fastapi/execution_api/datamodels/taskinstance.py +++ b/airflow/api_fastapi/execution_api/datamodels/taskinstance.py @@ -21,10 +21,19 @@ from datetime import timedelta from typing import Annotated, Any, Literal, Union -from pydantic import AwareDatetime, Discriminator, Field, Tag, TypeAdapter, WithJsonSchema, field_validator +from pydantic import ( + AwareDatetime, + Discriminator, + Field, + Tag, + TypeAdapter, + WithJsonSchema, + field_validator, +) from airflow.api_fastapi.common.types import UtcDateTime from airflow.api_fastapi.core_api.base import BaseModel +from airflow.api_fastapi.execution_api.datamodels.asset import AssetProfile from airflow.api_fastapi.execution_api.datamodels.connection import ConnectionResponse from airflow.api_fastapi.execution_api.datamodels.variable import VariableResponse from airflow.utils.state import IntermediateTIState, TaskInstanceState as TIState, TerminalTIState @@ -52,14 +61,41 @@ class TIEnterRunningPayload(BaseModel): class TITerminalStatePayload(BaseModel): - """Schema for updating TaskInstance to a terminal state (e.g., SUCCESS or FAILED).""" + """Schema for updating TaskInstance to a terminal state except SUCCESS state.""" - state: TerminalTIState + state: Literal[ + TerminalTIState.FAILED, + TerminalTIState.SKIPPED, + TerminalTIState.REMOVED, + TerminalTIState.FAIL_WITHOUT_RETRY, + ] end_date: UtcDateTime """When the task completed executing""" +class TISuccessStatePayload(BaseModel): + """Schema for updating TaskInstance to success state.""" + + state: Annotated[ + Literal[TerminalTIState.SUCCESS], + # Specify a default in the schema, but not in code, so Pydantic marks it as required. + WithJsonSchema( + { + "type": "string", + "enum": [TerminalTIState.SUCCESS], + "default": TerminalTIState.SUCCESS, + } + ), + ] + + end_date: UtcDateTime + """When the task completed executing""" + + task_outlets: Annotated[list[AssetProfile], Field(default_factory=list)] + outlet_events: Annotated[list[Any], Field(default_factory=list)] + + class TITargetStatePayload(BaseModel): """Schema for updating TaskInstance to a target state, excluding terminal and running states.""" @@ -123,7 +159,10 @@ def ti_state_discriminator(v: dict[str, str] | BaseModel) -> str: state = v.get("state") else: state = getattr(v, "state", None) - if state in set(TerminalTIState): + + if state == TIState.SUCCESS: + return "success" + elif state in set(TerminalTIState): return "_terminal_" elif state == TIState.DEFERRED: return "deferred" @@ -137,6 +176,7 @@ def ti_state_discriminator(v: dict[str, str] | BaseModel) -> str: TIStateUpdate = Annotated[ Union[ Annotated[TITerminalStatePayload, Tag("_terminal_")], + Annotated[TISuccessStatePayload, Tag("success")], Annotated[TITargetStatePayload, Tag("_other_")], Annotated[TIDeferredStatePayload, Tag("deferred")], Annotated[TIRescheduleStatePayload, Tag("up_for_reschedule")], diff --git a/airflow/api_fastapi/execution_api/routes/task_instances.py b/airflow/api_fastapi/execution_api/routes/task_instances.py index 4899e93c61252..899017e612d5a 100644 --- a/airflow/api_fastapi/execution_api/routes/task_instances.py +++ b/airflow/api_fastapi/execution_api/routes/task_instances.py @@ -38,6 +38,7 @@ TIRescheduleStatePayload, TIRunContext, TIStateUpdate, + TISuccessStatePayload, TITerminalStatePayload, ) from airflow.models.dagrun import DagRun as DR @@ -226,7 +227,7 @@ def ti_update_state( ) # We exclude_unset to avoid updating fields that are not set in the payload - data = ti_patch_payload.model_dump(exclude_unset=True) + data = ti_patch_payload.model_dump(exclude={"task_outlets", "outlet_events"}, exclude_unset=True) query = update(TI).where(TI.id == ti_id_str).values(data) @@ -243,6 +244,17 @@ def ti_update_state( else: updated_state = State.FAILED query = query.values(state=updated_state) + elif isinstance(ti_patch_payload, TISuccessStatePayload): + query = TI.duration_expression_update(ti_patch_payload.end_date, query, session.bind) + updated_state = ti_patch_payload.state + task_instance = session.get(TI, ti_id_str) + TI.register_asset_changes_in_db( + task_instance, + ti_patch_payload.task_outlets, # type: ignore + ti_patch_payload.outlet_events, + session, + ) + query = query.values(state=updated_state) elif isinstance(ti_patch_payload, TIDeferredStatePayload): # Calculate timeout if it was passed timeout = None diff --git a/airflow/auth/managers/simple/ui/package-lock.json b/airflow/auth/managers/simple/ui/package-lock.json index 690bebb444dcc..a0087cc944ce1 100644 --- a/airflow/auth/managers/simple/ui/package-lock.json +++ b/airflow/auth/managers/simple/ui/package-lock.json @@ -23,6 +23,7 @@ "@vitejs/plugin-react-swc": "^3.7.0", "eslint": "^9.10.0", "happy-dom": "^15.10.2", + "vite": "^5.4.14", "vite-plugin-css-injected-by-js": "^3.5.2", "vitest": "^2.1.1" } @@ -5364,10 +5365,11 @@ } }, "node_modules/vite": { - "version": "5.4.11", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.11.tgz", - "integrity": "sha512-c7jFQRklXua0mTzneGW9QVyxFjUgwcihC4bXEtujIo2ouWCe1Ajt/amn2PCxYnhYfd5k09JX3SB7OYWFKYqj8Q==", + "version": "5.4.14", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.14.tgz", + "integrity": "sha512-EK5cY7Q1D8JNhSaPKVK4pwBFvaTmZxEnoKXLG/U9gmdDcihQGNzFlgIvaxezFR4glP1LsuiedwMBqCXH3wZccA==", "dev": true, + "license": "MIT", "dependencies": { "esbuild": "^0.21.3", "postcss": "^8.4.43", diff --git a/airflow/auth/managers/simple/ui/package.json b/airflow/auth/managers/simple/ui/package.json index 3bb9d5e0508e3..2001ef7162626 100644 --- a/airflow/auth/managers/simple/ui/package.json +++ b/airflow/auth/managers/simple/ui/package.json @@ -27,7 +27,7 @@ "@vitejs/plugin-react-swc": "^3.7.0", "eslint": "^9.10.0", "happy-dom": "^15.10.2", - "vite": "^5.4.6", + "vite": "^5.4.14", "vite-plugin-css-injected-by-js": "^3.5.2", "vitest": "^2.1.1" } diff --git a/airflow/auth/managers/simple/ui/pnpm-lock.yaml b/airflow/auth/managers/simple/ui/pnpm-lock.yaml index c1931f0cde77f..1676d87998832 100644 --- a/airflow/auth/managers/simple/ui/pnpm-lock.yaml +++ b/airflow/auth/managers/simple/ui/pnpm-lock.yaml @@ -49,7 +49,7 @@ importers: specifier: ^15.10.2 version: 15.11.7 vite: - specifier: ^5.4.6 + specifier: ^5.4.14 version: 5.4.14 vite-plugin-css-injected-by-js: specifier: ^3.5.2 @@ -437,98 +437,98 @@ packages: resolution: {integrity: sha512-KeBYSwohb8g4/wCcnksvKTYlg69O62sQeLynn2YE+5z7JWEj95if27kclW9QqbrlsQ2DINI8fjbV3zyuKfwjKg==} engines: {node: '>=14.0.0'} - '@rollup/rollup-android-arm-eabi@4.30.1': - resolution: {integrity: sha512-pSWY+EVt3rJ9fQ3IqlrEUtXh3cGqGtPDH1FQlNZehO2yYxCHEX1SPsz1M//NXwYfbTlcKr9WObLnJX9FsS9K1Q==} + '@rollup/rollup-android-arm-eabi@4.31.0': + resolution: {integrity: sha512-9NrR4033uCbUBRgvLcBrJofa2KY9DzxL2UKZ1/4xA/mnTNyhZCWBuD8X3tPm1n4KxcgaraOYgrFKSgwjASfmlA==} cpu: [arm] os: [android] - '@rollup/rollup-android-arm64@4.30.1': - resolution: {integrity: sha512-/NA2qXxE3D/BRjOJM8wQblmArQq1YoBVJjrjoTSBS09jgUisq7bqxNHJ8kjCHeV21W/9WDGwJEWSN0KQ2mtD/w==} + '@rollup/rollup-android-arm64@4.31.0': + resolution: {integrity: sha512-iBbODqT86YBFHajxxF8ebj2hwKm1k8PTBQSojSt3d1FFt1gN+xf4CowE47iN0vOSdnd+5ierMHBbu/rHc7nq5g==} cpu: [arm64] os: [android] - '@rollup/rollup-darwin-arm64@4.30.1': - resolution: {integrity: sha512-r7FQIXD7gB0WJ5mokTUgUWPl0eYIH0wnxqeSAhuIwvnnpjdVB8cRRClyKLQr7lgzjctkbp5KmswWszlwYln03Q==} + '@rollup/rollup-darwin-arm64@4.31.0': + resolution: {integrity: sha512-WHIZfXgVBX30SWuTMhlHPXTyN20AXrLH4TEeH/D0Bolvx9PjgZnn4H677PlSGvU6MKNsjCQJYczkpvBbrBnG6g==} cpu: [arm64] os: [darwin] - '@rollup/rollup-darwin-x64@4.30.1': - resolution: {integrity: sha512-x78BavIwSH6sqfP2xeI1hd1GpHL8J4W2BXcVM/5KYKoAD3nNsfitQhvWSw+TFtQTLZ9OmlF+FEInEHyubut2OA==} + '@rollup/rollup-darwin-x64@4.31.0': + resolution: {integrity: sha512-hrWL7uQacTEF8gdrQAqcDy9xllQ0w0zuL1wk1HV8wKGSGbKPVjVUv/DEwT2+Asabf8Dh/As+IvfdU+H8hhzrQQ==} cpu: [x64] os: [darwin] - '@rollup/rollup-freebsd-arm64@4.30.1': - resolution: {integrity: sha512-HYTlUAjbO1z8ywxsDFWADfTRfTIIy/oUlfIDmlHYmjUP2QRDTzBuWXc9O4CXM+bo9qfiCclmHk1x4ogBjOUpUQ==} + '@rollup/rollup-freebsd-arm64@4.31.0': + resolution: {integrity: sha512-S2oCsZ4hJviG1QjPY1h6sVJLBI6ekBeAEssYKad1soRFv3SocsQCzX6cwnk6fID6UQQACTjeIMB+hyYrFacRew==} cpu: [arm64] os: [freebsd] - '@rollup/rollup-freebsd-x64@4.30.1': - resolution: {integrity: sha512-1MEdGqogQLccphhX5myCJqeGNYTNcmTyaic9S7CG3JhwuIByJ7J05vGbZxsizQthP1xpVx7kd3o31eOogfEirw==} + '@rollup/rollup-freebsd-x64@4.31.0': + resolution: {integrity: sha512-pCANqpynRS4Jirn4IKZH4tnm2+2CqCNLKD7gAdEjzdLGbH1iO0zouHz4mxqg0uEMpO030ejJ0aA6e1PJo2xrPA==} cpu: [x64] os: [freebsd] - '@rollup/rollup-linux-arm-gnueabihf@4.30.1': - resolution: {integrity: sha512-PaMRNBSqCx7K3Wc9QZkFx5+CX27WFpAMxJNiYGAXfmMIKC7jstlr32UhTgK6T07OtqR+wYlWm9IxzennjnvdJg==} + '@rollup/rollup-linux-arm-gnueabihf@4.31.0': + resolution: {integrity: sha512-0O8ViX+QcBd3ZmGlcFTnYXZKGbFu09EhgD27tgTdGnkcYXLat4KIsBBQeKLR2xZDCXdIBAlWLkiXE1+rJpCxFw==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm-musleabihf@4.30.1': - resolution: {integrity: sha512-B8Rcyj9AV7ZlEFqvB5BubG5iO6ANDsRKlhIxySXcF1axXYUyqwBok+XZPgIYGBgs7LDXfWfifxhw0Ik57T0Yug==} + '@rollup/rollup-linux-arm-musleabihf@4.31.0': + resolution: {integrity: sha512-w5IzG0wTVv7B0/SwDnMYmbr2uERQp999q8FMkKG1I+j8hpPX2BYFjWe69xbhbP6J9h2gId/7ogesl9hwblFwwg==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm64-gnu@4.30.1': - resolution: {integrity: sha512-hqVyueGxAj3cBKrAI4aFHLV+h0Lv5VgWZs9CUGqr1z0fZtlADVV1YPOij6AhcK5An33EXaxnDLmJdQikcn5NEw==} + '@rollup/rollup-linux-arm64-gnu@4.31.0': + resolution: {integrity: sha512-JyFFshbN5xwy6fulZ8B/8qOqENRmDdEkcIMF0Zz+RsfamEW+Zabl5jAb0IozP/8UKnJ7g2FtZZPEUIAlUSX8cA==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-arm64-musl@4.30.1': - resolution: {integrity: sha512-i4Ab2vnvS1AE1PyOIGp2kXni69gU2DAUVt6FSXeIqUCPIR3ZlheMW3oP2JkukDfu3PsexYRbOiJrY+yVNSk9oA==} + '@rollup/rollup-linux-arm64-musl@4.31.0': + resolution: {integrity: sha512-kpQXQ0UPFeMPmPYksiBL9WS/BDiQEjRGMfklVIsA0Sng347H8W2iexch+IEwaR7OVSKtr2ZFxggt11zVIlZ25g==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-loongarch64-gnu@4.30.1': - resolution: {integrity: sha512-fARcF5g296snX0oLGkVxPmysetwUk2zmHcca+e9ObOovBR++9ZPOhqFUM61UUZ2EYpXVPN1redgqVoBB34nTpQ==} + '@rollup/rollup-linux-loongarch64-gnu@4.31.0': + resolution: {integrity: sha512-pMlxLjt60iQTzt9iBb3jZphFIl55a70wexvo8p+vVFK+7ifTRookdoXX3bOsRdmfD+OKnMozKO6XM4zR0sHRrQ==} cpu: [loong64] os: [linux] - '@rollup/rollup-linux-powerpc64le-gnu@4.30.1': - resolution: {integrity: sha512-GLrZraoO3wVT4uFXh67ElpwQY0DIygxdv0BNW9Hkm3X34wu+BkqrDrkcsIapAY+N2ATEbvak0XQ9gxZtCIA5Rw==} + '@rollup/rollup-linux-powerpc64le-gnu@4.31.0': + resolution: {integrity: sha512-D7TXT7I/uKEuWiRkEFbed1UUYZwcJDU4vZQdPTcepK7ecPhzKOYk4Er2YR4uHKme4qDeIh6N3XrLfpuM7vzRWQ==} cpu: [ppc64] os: [linux] - '@rollup/rollup-linux-riscv64-gnu@4.30.1': - resolution: {integrity: sha512-0WKLaAUUHKBtll0wvOmh6yh3S0wSU9+yas923JIChfxOaaBarmb/lBKPF0w/+jTVozFnOXJeRGZ8NvOxvk/jcw==} + '@rollup/rollup-linux-riscv64-gnu@4.31.0': + resolution: {integrity: sha512-wal2Tc8O5lMBtoePLBYRKj2CImUCJ4UNGJlLwspx7QApYny7K1cUYlzQ/4IGQBLmm+y0RS7dwc3TDO/pmcneTw==} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-s390x-gnu@4.30.1': - resolution: {integrity: sha512-GWFs97Ruxo5Bt+cvVTQkOJ6TIx0xJDD/bMAOXWJg8TCSTEK8RnFeOeiFTxKniTc4vMIaWvCplMAFBt9miGxgkA==} + '@rollup/rollup-linux-s390x-gnu@4.31.0': + resolution: {integrity: sha512-O1o5EUI0+RRMkK9wiTVpk2tyzXdXefHtRTIjBbmFREmNMy7pFeYXCFGbhKFwISA3UOExlo5GGUuuj3oMKdK6JQ==} cpu: [s390x] os: [linux] - '@rollup/rollup-linux-x64-gnu@4.30.1': - resolution: {integrity: sha512-UtgGb7QGgXDIO+tqqJ5oZRGHsDLO8SlpE4MhqpY9Llpzi5rJMvrK6ZGhsRCST2abZdBqIBeXW6WPD5fGK5SDwg==} + '@rollup/rollup-linux-x64-gnu@4.31.0': + resolution: {integrity: sha512-zSoHl356vKnNxwOWnLd60ixHNPRBglxpv2g7q0Cd3Pmr561gf0HiAcUBRL3S1vPqRC17Zo2CX/9cPkqTIiai1g==} cpu: [x64] os: [linux] - '@rollup/rollup-linux-x64-musl@4.30.1': - resolution: {integrity: sha512-V9U8Ey2UqmQsBT+xTOeMzPzwDzyXmnAoO4edZhL7INkwQcaW1Ckv3WJX3qrrp/VHaDkEWIBWhRwP47r8cdrOow==} + '@rollup/rollup-linux-x64-musl@4.31.0': + resolution: {integrity: sha512-ypB/HMtcSGhKUQNiFwqgdclWNRrAYDH8iMYH4etw/ZlGwiTVxBz2tDrGRrPlfZu6QjXwtd+C3Zib5pFqID97ZA==} cpu: [x64] os: [linux] - '@rollup/rollup-win32-arm64-msvc@4.30.1': - resolution: {integrity: sha512-WabtHWiPaFF47W3PkHnjbmWawnX/aE57K47ZDT1BXTS5GgrBUEpvOzq0FI0V/UYzQJgdb8XlhVNH8/fwV8xDjw==} + '@rollup/rollup-win32-arm64-msvc@4.31.0': + resolution: {integrity: sha512-JuhN2xdI/m8Hr+aVO3vspO7OQfUFO6bKLIRTAy0U15vmWjnZDLrEgCZ2s6+scAYaQVpYSh9tZtRijApw9IXyMw==} cpu: [arm64] os: [win32] - '@rollup/rollup-win32-ia32-msvc@4.30.1': - resolution: {integrity: sha512-pxHAU+Zv39hLUTdQQHUVHf4P+0C47y/ZloorHpzs2SXMRqeAWmGghzAhfOlzFHHwjvgokdFAhC4V+6kC1lRRfw==} + '@rollup/rollup-win32-ia32-msvc@4.31.0': + resolution: {integrity: sha512-U1xZZXYkvdf5MIWmftU8wrM5PPXzyaY1nGCI4KI4BFfoZxHamsIe+BtnPLIvvPykvQWlVbqUXdLa4aJUuilwLQ==} cpu: [ia32] os: [win32] - '@rollup/rollup-win32-x64-msvc@4.30.1': - resolution: {integrity: sha512-D6qjsXGcvhTjv0kI4fU8tUuBDF/Ueee4SVX79VfNDXZa64TfCW1Slkb6Z7O1p7vflqZjcmOVdZlqf8gvJxc6og==} + '@rollup/rollup-win32-x64-msvc@4.31.0': + resolution: {integrity: sha512-ul8rnCsUumNln5YWwz0ted2ZHFhzhRRnkpBZ+YRuHoRAlUji9KChpOUOndY7uykrPEPXVbHLlsdo6v5yXo/TXw==} cpu: [x64] os: [win32] @@ -1692,8 +1692,8 @@ packages: resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} - rollup@4.30.1: - resolution: {integrity: sha512-mlJ4glW020fPuLi7DkM/lN97mYEZGWeqBnrljzN0gs7GLctqX3lNWxKQ7Gl712UAX+6fog/L3jh4gb7R6aVi3w==} + rollup@4.31.0: + resolution: {integrity: sha512-9cCE8P4rZLx9+PjoyqHLs31V9a9Vpvfo4qNcs6JCiGWYhw2gijSetFbH6SSy1whnkgcefnUwr8sad7tgqsGvnw==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true @@ -2353,61 +2353,61 @@ snapshots: '@remix-run/router@1.21.1': {} - '@rollup/rollup-android-arm-eabi@4.30.1': + '@rollup/rollup-android-arm-eabi@4.31.0': optional: true - '@rollup/rollup-android-arm64@4.30.1': + '@rollup/rollup-android-arm64@4.31.0': optional: true - '@rollup/rollup-darwin-arm64@4.30.1': + '@rollup/rollup-darwin-arm64@4.31.0': optional: true - '@rollup/rollup-darwin-x64@4.30.1': + '@rollup/rollup-darwin-x64@4.31.0': optional: true - '@rollup/rollup-freebsd-arm64@4.30.1': + '@rollup/rollup-freebsd-arm64@4.31.0': optional: true - '@rollup/rollup-freebsd-x64@4.30.1': + '@rollup/rollup-freebsd-x64@4.31.0': optional: true - '@rollup/rollup-linux-arm-gnueabihf@4.30.1': + '@rollup/rollup-linux-arm-gnueabihf@4.31.0': optional: true - '@rollup/rollup-linux-arm-musleabihf@4.30.1': + '@rollup/rollup-linux-arm-musleabihf@4.31.0': optional: true - '@rollup/rollup-linux-arm64-gnu@4.30.1': + '@rollup/rollup-linux-arm64-gnu@4.31.0': optional: true - '@rollup/rollup-linux-arm64-musl@4.30.1': + '@rollup/rollup-linux-arm64-musl@4.31.0': optional: true - '@rollup/rollup-linux-loongarch64-gnu@4.30.1': + '@rollup/rollup-linux-loongarch64-gnu@4.31.0': optional: true - '@rollup/rollup-linux-powerpc64le-gnu@4.30.1': + '@rollup/rollup-linux-powerpc64le-gnu@4.31.0': optional: true - '@rollup/rollup-linux-riscv64-gnu@4.30.1': + '@rollup/rollup-linux-riscv64-gnu@4.31.0': optional: true - '@rollup/rollup-linux-s390x-gnu@4.30.1': + '@rollup/rollup-linux-s390x-gnu@4.31.0': optional: true - '@rollup/rollup-linux-x64-gnu@4.30.1': + '@rollup/rollup-linux-x64-gnu@4.31.0': optional: true - '@rollup/rollup-linux-x64-musl@4.30.1': + '@rollup/rollup-linux-x64-musl@4.31.0': optional: true - '@rollup/rollup-win32-arm64-msvc@4.30.1': + '@rollup/rollup-win32-arm64-msvc@4.31.0': optional: true - '@rollup/rollup-win32-ia32-msvc@4.30.1': + '@rollup/rollup-win32-ia32-msvc@4.31.0': optional: true - '@rollup/rollup-win32-x64-msvc@4.30.1': + '@rollup/rollup-win32-x64-msvc@4.31.0': optional: true '@swc/core-darwin-arm64@1.10.7': @@ -3814,29 +3814,29 @@ snapshots: reusify@1.0.4: {} - rollup@4.30.1: + rollup@4.31.0: dependencies: '@types/estree': 1.0.6 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.30.1 - '@rollup/rollup-android-arm64': 4.30.1 - '@rollup/rollup-darwin-arm64': 4.30.1 - '@rollup/rollup-darwin-x64': 4.30.1 - '@rollup/rollup-freebsd-arm64': 4.30.1 - '@rollup/rollup-freebsd-x64': 4.30.1 - '@rollup/rollup-linux-arm-gnueabihf': 4.30.1 - '@rollup/rollup-linux-arm-musleabihf': 4.30.1 - '@rollup/rollup-linux-arm64-gnu': 4.30.1 - '@rollup/rollup-linux-arm64-musl': 4.30.1 - '@rollup/rollup-linux-loongarch64-gnu': 4.30.1 - '@rollup/rollup-linux-powerpc64le-gnu': 4.30.1 - '@rollup/rollup-linux-riscv64-gnu': 4.30.1 - '@rollup/rollup-linux-s390x-gnu': 4.30.1 - '@rollup/rollup-linux-x64-gnu': 4.30.1 - '@rollup/rollup-linux-x64-musl': 4.30.1 - '@rollup/rollup-win32-arm64-msvc': 4.30.1 - '@rollup/rollup-win32-ia32-msvc': 4.30.1 - '@rollup/rollup-win32-x64-msvc': 4.30.1 + '@rollup/rollup-android-arm-eabi': 4.31.0 + '@rollup/rollup-android-arm64': 4.31.0 + '@rollup/rollup-darwin-arm64': 4.31.0 + '@rollup/rollup-darwin-x64': 4.31.0 + '@rollup/rollup-freebsd-arm64': 4.31.0 + '@rollup/rollup-freebsd-x64': 4.31.0 + '@rollup/rollup-linux-arm-gnueabihf': 4.31.0 + '@rollup/rollup-linux-arm-musleabihf': 4.31.0 + '@rollup/rollup-linux-arm64-gnu': 4.31.0 + '@rollup/rollup-linux-arm64-musl': 4.31.0 + '@rollup/rollup-linux-loongarch64-gnu': 4.31.0 + '@rollup/rollup-linux-powerpc64le-gnu': 4.31.0 + '@rollup/rollup-linux-riscv64-gnu': 4.31.0 + '@rollup/rollup-linux-s390x-gnu': 4.31.0 + '@rollup/rollup-linux-x64-gnu': 4.31.0 + '@rollup/rollup-linux-x64-musl': 4.31.0 + '@rollup/rollup-win32-arm64-msvc': 4.31.0 + '@rollup/rollup-win32-ia32-msvc': 4.31.0 + '@rollup/rollup-win32-x64-msvc': 4.31.0 fsevents: 2.3.3 run-parallel@1.2.0: @@ -3976,7 +3976,7 @@ snapshots: dependencies: esbuild: 0.21.5 postcss: 8.5.1 - rollup: 4.30.1 + rollup: 4.31.0 optionalDependencies: fsevents: 2.3.3 diff --git a/airflow/executors/executor_loader.py b/airflow/executors/executor_loader.py index 2651718bbad23..6d6b8d115bcc1 100644 --- a/airflow/executors/executor_loader.py +++ b/airflow/executors/executor_loader.py @@ -231,6 +231,10 @@ def init_executors(cls) -> list[BaseExecutor]: @classmethod def lookup_executor_name_by_str(cls, executor_name_str: str) -> ExecutorName: # lookup the executor by alias first, if not check if we're given a module path + if not _classname_to_executors or not _module_to_executors or not _alias_to_executors: + # if we haven't loaded the executors yet, such as directly calling load_executor + cls._get_executor_names() + if executor_name := _alias_to_executors.get(executor_name_str): return executor_name elif executor_name := _module_to_executors.get(executor_name_str): diff --git a/airflow/models/asset.py b/airflow/models/asset.py index 1ac14fb6ec2f2..212a0b3a84c6c 100644 --- a/airflow/models/asset.py +++ b/airflow/models/asset.py @@ -714,6 +714,14 @@ class AssetEvent(Base): def uri(self): return self.asset.uri + @property + def group(self): + return self.asset.group + + @property + def name(self): + return self.asset.name + def __repr__(self) -> str: args = [] for attr in [ diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index c0f6d20703b61..f1aa3a8236e9c 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -105,6 +105,7 @@ from airflow.models.taskreschedule import TaskReschedule from airflow.models.xcom import LazyXComSelectSequence, XCom from airflow.plugins_manager import integrate_macros_plugins +from airflow.sdk.api.datamodels._generated import AssetProfile from airflow.sdk.definitions._internal.templater import SandboxedEnvironment from airflow.sdk.definitions.asset import Asset, AssetAlias, AssetNameRef, AssetUniqueKey, AssetUriRef from airflow.sdk.definitions.taskgroup import MappedTaskGroup @@ -160,7 +161,7 @@ from airflow.models.dagrun import DagRun from airflow.sdk.definitions._internal.abstractoperator import Operator from airflow.sdk.definitions.dag import DAG - from airflow.sdk.types import OutletEventAccessorsProtocol, RuntimeTaskInstanceProtocol + from airflow.sdk.types import RuntimeTaskInstanceProtocol from airflow.typing_compat import Literal, TypeGuard from airflow.utils.task_group import TaskGroup @@ -352,7 +353,29 @@ def _run_raw_task( if not test_mode: _add_log(event=ti.state, task_instance=ti, session=session) if ti.state == TaskInstanceState.SUCCESS: - ti._register_asset_changes(events=context["outlet_events"], session=session) + added_alias_to_task_outlet = False + task_outlets = [] + outlet_events = [] + events = context["outlet_events"] + for obj in ti.task.outlets or []: + # Lineage can have other types of objects besides assets + asset_type = type(obj).__name__ + if isinstance(obj, Asset): + task_outlets.append(AssetProfile(name=obj.name, uri=obj.uri, asset_type=asset_type)) + outlet_events.append(attrs.asdict(events[obj])) # type: ignore + elif isinstance(obj, AssetNameRef): + task_outlets.append(AssetProfile(name=obj.name, asset_type=asset_type)) + outlet_events.append(attrs.asdict(events)) # type: ignore + elif isinstance(obj, AssetUriRef): + task_outlets.append(AssetProfile(uri=obj.uri, asset_type=asset_type)) + outlet_events.append(attrs.asdict(events)) # type: ignore + elif isinstance(obj, AssetAlias): + if not added_alias_to_task_outlet: + task_outlets.append(AssetProfile(asset_type=asset_type)) + added_alias_to_task_outlet = True + for asset_alias_event in events[obj].asset_alias_events: + outlet_events.append(attrs.asdict(asset_alias_event)) + TaskInstance.register_asset_changes_in_db(ti, task_outlets, outlet_events, session=session) TaskInstance.save_to_db(ti=ti, session=session) if ti.state == TaskInstanceState.SUCCESS: @@ -2733,49 +2756,46 @@ def _run_raw_task( session=session, ) - def _register_asset_changes( - self, *, events: OutletEventAccessorsProtocol, session: Session | None = None - ) -> None: - if session: - TaskInstance._register_asset_changes_int(ti=self, events=events, session=session) - else: - TaskInstance._register_asset_changes_int(ti=self, events=events) - @staticmethod @provide_session - def _register_asset_changes_int( - ti: TaskInstance, *, events: OutletEventAccessorsProtocol, session: Session = NEW_SESSION + def register_asset_changes_in_db( + ti: TaskInstance, + task_outlets: list[AssetProfile], + outlet_events: list[Any], + session: Session = NEW_SESSION, ) -> None: - if TYPE_CHECKING: - assert ti.task - # One task only triggers one asset event for each asset with the same extra. # This tuple[asset uri, extra] to sets alias names mapping is used to find whether # there're assets with same uri but different extra that we need to emit more than one asset events. asset_alias_names: dict[tuple[AssetUniqueKey, frozenset], set[str]] = defaultdict(set) - asset_name_refs: set[str] = set() asset_uri_refs: set[str] = set() - for obj in ti.task.outlets or []: + for obj in task_outlets: ti.log.debug("outlet obj %s", obj) # Lineage can have other types of objects besides assets - if isinstance(obj, Asset): + if obj.asset_type == Asset.__name__: asset_manager.register_asset_change( task_instance=ti, - asset=obj, - extra=events[obj].extra, + asset=Asset(name=obj.name, uri=obj.uri), # type: ignore + extra=outlet_events[0]["extra"], session=session, ) - elif isinstance(obj, AssetNameRef): - asset_name_refs.add(obj.name) - elif isinstance(obj, AssetUriRef): - asset_uri_refs.add(obj.uri) - elif isinstance(obj, AssetAlias): - for asset_alias_event in events[obj].asset_alias_events: - asset_alias_name = asset_alias_event.source_alias_name - asset_unique_key = asset_alias_event.dest_asset_key - frozen_extra = frozenset(asset_alias_event.extra.items()) + elif obj.asset_type == AssetNameRef.__name__: + asset_name_refs.add(obj.name) # type: ignore + elif obj.asset_type == AssetUriRef.__name__: + asset_uri_refs.add(obj.uri) # type: ignore + elif obj.asset_type == AssetAlias.__name__: + outlet_events = list( + map( + lambda event: {**event, "dest_asset_key": AssetUniqueKey(**event["dest_asset_key"])}, + outlet_events, + ) + ) + for asset_alias_event in outlet_events: + asset_alias_name = asset_alias_event["source_alias_name"] + asset_unique_key = asset_alias_event["dest_asset_key"] + frozen_extra = frozenset(asset_alias_event["extra"].items()) asset_alias_names[(asset_unique_key, frozen_extra)].add(asset_alias_name) asset_unique_keys = {key for key, _ in asset_alias_names} @@ -2827,7 +2847,7 @@ def _register_asset_changes_int( asset_manager.register_asset_change( task_instance=ti, asset=asset_model, - extra=events[asset_model].extra, + extra=outlet_events[asset_model].extra, session=session, ) asset_stmt = select(AssetModel).where(AssetModel.uri.in_(asset_uri_refs), AssetModel.active.has()) @@ -2836,7 +2856,7 @@ def _register_asset_changes_int( asset_manager.register_asset_change( task_instance=ti, asset=asset_model, - extra=events[asset_model].extra, + extra=outlet_events[asset_model].extra, session=session, ) diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts index 186467dd4097c..1873dbbd9e206 100644 --- a/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -180,6 +180,39 @@ export const $AssetEventResponse = { type: "integer", title: "Asset Id", }, + uri: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Uri", + }, + name: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Name", + }, + group: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Group", + }, extra: { anyOf: [ { diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 842377ad8a647..1b597efdb07aa 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -60,6 +60,9 @@ export type AssetEventCollectionResponse = { export type AssetEventResponse = { id: number; asset_id: number; + uri?: string | null; + name?: string | null; + group?: string | null; extra?: { [key: string]: unknown; } | null; diff --git a/airflow/ui/package.json b/airflow/ui/package.json index b3063b4858859..34592143802b6 100644 --- a/airflow/ui/package.json +++ b/airflow/ui/package.json @@ -40,6 +40,7 @@ "react-dom": "^18.3.1", "react-hook-form": "^7.20.0", "react-icons": "^5.4.0", + "react-json-view": "^1.21.3", "react-markdown": "^9.0.1", "react-router-dom": "^6.26.2", "react-syntax-highlighter": "^15.5.6", diff --git a/airflow/ui/pnpm-lock.yaml b/airflow/ui/pnpm-lock.yaml index 6e277d5c6dacc..3096fd332d360 100644 --- a/airflow/ui/pnpm-lock.yaml +++ b/airflow/ui/pnpm-lock.yaml @@ -80,6 +80,9 @@ importers: react-icons: specifier: ^5.4.0 version: 5.4.0(react@18.3.1) + react-json-view: + specifier: ^1.21.3 + version: 1.21.3(@types/react@18.3.5)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react-markdown: specifier: ^9.0.1 version: 9.0.1(@types/react@18.3.5)(react@18.3.1) @@ -1637,6 +1640,9 @@ packages: resolution: {integrity: sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==} engines: {node: '>= 0.4'} + asap@2.0.6: + resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==} + assertion-error@2.0.1: resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} engines: {node: '>=12'} @@ -1672,6 +1678,9 @@ packages: balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + base16@1.0.0: + resolution: {integrity: sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ==} + binary-extensions@2.3.0: resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} engines: {node: '>=8'} @@ -1859,6 +1868,9 @@ packages: crelt@1.0.6: resolution: {integrity: sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==} + cross-fetch@3.2.0: + resolution: {integrity: sha512-Q+xVJLoGOeIMXZmbUK4HYk+69cQH6LudR0Vu/pRm2YlU/hDV9CiS0gKUMaWY5f2NeUH9C1nV3bsTlCo0FsTV1Q==} + cross-spawn@7.0.5: resolution: {integrity: sha512-ZVJrKKYunU38/76t0RMOulHOnUcbU9GbpWKAOZ0mhjr7CX6FVrH+4FrAapSOekrgFQ3f/8gwMEuIft0aKq6Hug==} engines: {node: '>= 8'} @@ -2269,6 +2281,15 @@ packages: fault@1.0.4: resolution: {integrity: sha512-CJ0HCB5tL5fYTEA7ToAq5+kTwd++Borf1/bifxd9iT70QcXr4MRrO3Llf8Ifs70q+SJcGHFtnIE/Nw6giCtECA==} + fbemitter@3.0.0: + resolution: {integrity: sha512-KWKaceCwKQU0+HPoop6gn4eOHk50bBv/VxjJtGMfwmJt3D29JpN4H4eisCtIPA+a8GVBam+ldMMpMjJUvpDyHw==} + + fbjs-css-vars@1.0.2: + resolution: {integrity: sha512-b2XGFAFdWZWg0phtAWLHCk836A1Xann+I+Dgd3Gk64MHKZO44FfoD1KxyvbSh0qZsIoXQGGlVztIY+oitJPpRQ==} + + fbjs@3.0.5: + resolution: {integrity: sha512-ztsSx77JBtkuMrEypfhgc3cI0+0h+svqeie7xHbh1k/IKdcydnvadp/mUaGgjAOXQmQSxsqgaRhS3q9fy+1kxg==} + file-entry-cache@8.0.0: resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==} engines: {node: '>=16.0.0'} @@ -2295,6 +2316,11 @@ packages: flatted@3.3.1: resolution: {integrity: sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==} + flux@4.0.4: + resolution: {integrity: sha512-NCj3XlayA2UsapRpM7va6wU1+9rE5FIL7qoMcmxWHRzbp0yujihMBm9BBHZ1MDIk5h5o2Bl6eGiCe8rYELAmYw==} + peerDependencies: + react: ^15.0.2 || ^16.0.0 || ^17.0.0 + focus-trap@7.6.0: resolution: {integrity: sha512-1td0l3pMkWJLFipobUcGaf+5DTY4PLDDrcqoSaKP8ediO/CoWCCYk/fT/Y2A4e6TNB+Sh6clRJCjOPPnKoNHnQ==} @@ -2760,9 +2786,15 @@ packages: resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} engines: {node: '>=10'} + lodash.curry@4.1.1: + resolution: {integrity: sha512-/u14pXGviLaweY5JI0IUzgzF2J6Ne8INyzAZjImcryjgkZ+ebruBxy2/JaOOkTqScddcYtakjhSaeemV8lR0tA==} + lodash.debounce@4.0.8: resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==} + lodash.flow@3.5.0: + resolution: {integrity: sha512-ff3BX/tSioo+XojX4MOsOMhJw0nZoUEF011LX8g8d3gvjVbxd89cCio4BCXronjxcTUIJUoqKEUA+n4CqvvRPw==} + lodash.merge@4.6.2: resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} @@ -3037,6 +3069,15 @@ packages: node-fetch-native@1.6.4: resolution: {integrity: sha512-IhOigYzAKHd244OC0JIMIUrjzctirCmPkaIfhDeGcEETWof5zKYUW7e7MYvChGWh/4CJeXEgsRyGzuF334rOOQ==} + node-fetch@2.7.0: + resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} + engines: {node: 4.x || >=6.0.0} + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + node-releases@2.0.18: resolution: {integrity: sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==} @@ -3233,6 +3274,9 @@ packages: resolution: {integrity: sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==} engines: {node: '>=6'} + promise@7.3.1: + resolution: {integrity: sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==} + prop-types@15.8.1: resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==} @@ -3255,12 +3299,18 @@ packages: resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} engines: {node: '>=6'} + pure-color@1.3.0: + resolution: {integrity: sha512-QFADYnsVoBMw1srW7OVKEYjG+MbIa49s54w1MA1EDY6r2r/sTcKKYqRX1f4GYvnXP7eN/Pe9HFcX+hwzmrXRHA==} + queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} rc9@2.1.2: resolution: {integrity: sha512-btXCnMmRIBINM2LDZoEmOogIZU7Qe7zn4BpomSKZ/ykbLObuBdvG+mFq11DL6fjH1DRwHhrlgtYWG96bJiC7Cg==} + react-base16-styling@0.6.0: + resolution: {integrity: sha512-yvh/7CArceR/jNATXOKDlvTnPKPmGZz7zsenQ3jUwLzHkNUR0CvY3yGYJbWJ/nnxsL8Sgmt5cO3/SILVuPO6TQ==} + react-chartjs-2@5.2.0: resolution: {integrity: sha512-98iN5aguJyVSxp5U3CblRLH67J8gkfyGNbiK3c+l1QI/G4irHMPQw44aEPmjVag+YKTyQ260NcF82GTQ3bdscA==} peerDependencies: @@ -3289,6 +3339,15 @@ packages: react-is@17.0.2: resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==} + react-json-view@1.21.3: + resolution: {integrity: sha512-13p8IREj9/x/Ye4WI/JpjhoIwuzEgUAtgJZNBJckfzJt1qyh24BdTm6UQNGnyTq9dapQdrqvquZTo3dz1X6Cjw==} + peerDependencies: + react: ^17.0.0 || ^16.3.0 || ^15.5.4 + react-dom: ^17.0.0 || ^16.3.0 || ^15.5.4 + + react-lifecycles-compat@3.0.4: + resolution: {integrity: sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==} + react-markdown@9.0.1: resolution: {integrity: sha512-186Gw/vF1uRkydbsOIkcGXw7aHq0sZOCRFFjGrr7b9+nVZg4UfA4enXCaxm4fUzecU38sWfrNDitGhshuU7rdg==} peerDependencies: @@ -3319,6 +3378,12 @@ packages: peerDependencies: react: '>= 0.14.0' + react-textarea-autosize@8.5.7: + resolution: {integrity: sha512-2MqJ3p0Jh69yt9ktFIaZmORHXw4c4bxSIhCeWiFwmJ9EYKgLmuNII3e9c9b2UO+ijl4StnpZdqpxNIhTdHvqtQ==} + engines: {node: '>=10'} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-transition-group@4.4.5: resolution: {integrity: sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==} peerDependencies: @@ -3438,6 +3503,9 @@ packages: resolution: {integrity: sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==} engines: {node: '>= 0.4'} + setimmediate@1.0.5: + resolution: {integrity: sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==} + shebang-command@2.0.0: resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} engines: {node: '>=8'} @@ -3618,6 +3686,9 @@ packages: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} + tr46@0.0.3: + resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + trim-lines@3.0.1: resolution: {integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==} @@ -3678,6 +3749,10 @@ packages: engines: {node: '>=14.17'} hasBin: true + ua-parser-js@1.0.40: + resolution: {integrity: sha512-z6PJ8Lml+v3ichVojCiB8toQJBuwR42ySM4ezjXIqXK3M0HczmKQ3LF4rhU55PfD99KEEXQG6yb7iOMyvYuHew==} + hasBin: true + ufo@1.5.4: resolution: {integrity: sha512-UsUk3byDzKd04EyoZ7U4DOlxQaD14JUKQl6/P7wiX4FNvUfm3XL246n9W5AmqwW5RSFJ27NAuM0iLscAOYUiGQ==} @@ -3722,6 +3797,15 @@ packages: uri-js@4.4.1: resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + use-composed-ref@1.4.0: + resolution: {integrity: sha512-djviaxuOOh7wkj0paeO1Q/4wMZ8Zrnag5H6yBvzN7AKKe8beOaED9SF5/ByLqsku8NP4zQqsvM2u3ew/tJK8/w==} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + use-debounce@10.0.3: resolution: {integrity: sha512-DxQSI9ZKso689WM1mjgGU3ozcxU1TJElBJ3X6S4SMzMNcm2lVH0AHmyXB+K7ewjz2BSUKJTDqTcwtSMRfB89dg==} engines: {node: '>= 16.0.0'} @@ -3737,6 +3821,15 @@ packages: '@types/react': optional: true + use-latest@1.3.0: + resolution: {integrity: sha512-mhg3xdm9NaM8q+gLT8KryJPnRFOz1/5XPBhmDEVZK1webPzDjrPk7f/mbpeLqTgB9msytYWANxgALOCJKnLvcQ==} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + use-sync-external-store@1.2.2: resolution: {integrity: sha512-PElTlVMwpblvbNqQ82d2n6RjStvdSoNe9FG28kNfz3WiXilJm4DdNkEzRhCZuIDwY8U08WVihhGR5iRqAwfDiw==} peerDependencies: @@ -3829,6 +3922,9 @@ packages: web-worker@1.3.0: resolution: {integrity: sha512-BSR9wyRsy/KOValMgd5kMyr3JzpdeoR9KVId8u5GVlTTAtNChlsE4yTxeY7zMdNSyOmoKBv8NH2qeRY9Tg+IaA==} + webidl-conversions@3.0.1: + resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + webidl-conversions@7.0.0: resolution: {integrity: sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==} engines: {node: '>=12'} @@ -3837,6 +3933,9 @@ packages: resolution: {integrity: sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==} engines: {node: '>=12'} + whatwg-url@5.0.0: + resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} + which-boxed-primitive@1.0.2: resolution: {integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==} @@ -6019,6 +6118,8 @@ snapshots: is-array-buffer: 3.0.4 is-shared-array-buffer: 1.0.3 + asap@2.0.6: {} + assertion-error@2.0.1: {} ast-types-flow@0.0.8: {} @@ -6051,6 +6152,8 @@ snapshots: balanced-match@1.0.2: {} + base16@1.0.0: {} + binary-extensions@2.3.0: {} brace-expansion@1.1.11: @@ -6251,6 +6354,12 @@ snapshots: crelt@1.0.6: {} + cross-fetch@3.2.0: + dependencies: + node-fetch: 2.7.0 + transitivePeerDependencies: + - encoding + cross-spawn@7.0.5: dependencies: path-key: 3.1.1 @@ -6809,6 +6918,26 @@ snapshots: dependencies: format: 0.2.2 + fbemitter@3.0.0: + dependencies: + fbjs: 3.0.5 + transitivePeerDependencies: + - encoding + + fbjs-css-vars@1.0.2: {} + + fbjs@3.0.5: + dependencies: + cross-fetch: 3.2.0 + fbjs-css-vars: 1.0.2 + loose-envify: 1.4.0 + object-assign: 4.1.1 + promise: 7.3.1 + setimmediate: 1.0.5 + ua-parser-js: 1.0.40 + transitivePeerDependencies: + - encoding + file-entry-cache@8.0.0: dependencies: flat-cache: 4.0.1 @@ -6836,6 +6965,14 @@ snapshots: flatted@3.3.1: {} + flux@4.0.4(react@18.3.1): + dependencies: + fbemitter: 3.0.0 + fbjs: 3.0.5 + react: 18.3.1 + transitivePeerDependencies: + - encoding + focus-trap@7.6.0: dependencies: tabbable: 6.2.0 @@ -7302,8 +7439,12 @@ snapshots: dependencies: p-locate: 5.0.0 + lodash.curry@4.1.1: {} + lodash.debounce@4.0.8: {} + lodash.flow@3.5.0: {} + lodash.merge@4.6.2: {} lodash@4.17.21: {} @@ -7767,6 +7908,10 @@ snapshots: node-fetch-native@1.6.4: {} + node-fetch@2.7.0: + dependencies: + whatwg-url: 5.0.0 + node-releases@2.0.18: {} normalize-package-data@2.5.0: @@ -7966,6 +8111,10 @@ snapshots: prismjs@1.29.0: {} + promise@7.3.1: + dependencies: + asap: 2.0.6 + prop-types@15.8.1: dependencies: loose-envify: 1.4.0 @@ -7988,6 +8137,8 @@ snapshots: punycode@2.3.1: {} + pure-color@1.3.0: {} + queue-microtask@1.2.3: {} rc9@2.1.2: @@ -7995,6 +8146,13 @@ snapshots: defu: 6.1.4 destr: 2.0.3 + react-base16-styling@0.6.0: + dependencies: + base16: 1.0.0 + lodash.curry: 4.1.1 + lodash.flow: 3.5.0 + pure-color: 1.3.0 + react-chartjs-2@5.2.0(chart.js@4.4.6)(react@18.3.1): dependencies: chart.js: 4.4.6 @@ -8018,6 +8176,20 @@ snapshots: react-is@17.0.2: {} + react-json-view@1.21.3(@types/react@18.3.5)(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + dependencies: + flux: 4.0.4(react@18.3.1) + react: 18.3.1 + react-base16-styling: 0.6.0 + react-dom: 18.3.1(react@18.3.1) + react-lifecycles-compat: 3.0.4 + react-textarea-autosize: 8.5.7(@types/react@18.3.5)(react@18.3.1) + transitivePeerDependencies: + - '@types/react' + - encoding + + react-lifecycles-compat@3.0.4: {} + react-markdown@9.0.1(@types/react@18.3.5)(react@18.3.1): dependencies: '@types/hast': 3.0.4 @@ -8074,6 +8246,15 @@ snapshots: react: 18.3.1 refractor: 3.6.0 + react-textarea-autosize@8.5.7(@types/react@18.3.5)(react@18.3.1): + dependencies: + '@babel/runtime': 7.25.6 + react: 18.3.1 + use-composed-ref: 1.4.0(@types/react@18.3.5)(react@18.3.1) + use-latest: 1.3.0(@types/react@18.3.5)(react@18.3.1) + transitivePeerDependencies: + - '@types/react' + react-transition-group@4.4.5(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: '@babel/runtime': 7.25.6 @@ -8260,6 +8441,8 @@ snapshots: functions-have-names: 1.2.3 has-property-descriptors: 1.0.2 + setimmediate@1.0.5: {} + shebang-command@2.0.0: dependencies: shebang-regex: 3.0.0 @@ -8446,6 +8629,8 @@ snapshots: dependencies: is-number: 7.0.0 + tr46@0.0.3: {} + trim-lines@3.0.1: {} trough@2.2.0: {} @@ -8514,6 +8699,8 @@ snapshots: typescript@5.5.4: {} + ua-parser-js@1.0.40: {} + ufo@1.5.4: {} uglify-js@3.19.2: @@ -8573,6 +8760,12 @@ snapshots: dependencies: punycode: 2.3.1 + use-composed-ref@1.4.0(@types/react@18.3.5)(react@18.3.1): + dependencies: + react: 18.3.1 + optionalDependencies: + '@types/react': 18.3.5 + use-debounce@10.0.3(react@18.3.1): dependencies: react: 18.3.1 @@ -8583,6 +8776,13 @@ snapshots: optionalDependencies: '@types/react': 18.3.5 + use-latest@1.3.0(@types/react@18.3.5)(react@18.3.1): + dependencies: + react: 18.3.1 + use-isomorphic-layout-effect: 1.1.2(@types/react@18.3.5)(react@18.3.1) + optionalDependencies: + '@types/react': 18.3.5 + use-sync-external-store@1.2.2(react@18.3.1): dependencies: react: 18.3.1 @@ -8676,10 +8876,17 @@ snapshots: web-worker@1.3.0: {} + webidl-conversions@3.0.1: {} + webidl-conversions@7.0.0: {} whatwg-mimetype@3.0.0: {} + whatwg-url@5.0.0: + dependencies: + tr46: 0.0.3 + webidl-conversions: 3.0.1 + which-boxed-primitive@1.0.2: dependencies: is-bigint: 1.0.4 diff --git a/airflow/ui/src/components/RenderedJsonField.tsx b/airflow/ui/src/components/RenderedJsonField.tsx new file mode 100644 index 0000000000000..950f7152c1a21 --- /dev/null +++ b/airflow/ui/src/components/RenderedJsonField.tsx @@ -0,0 +1,57 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Flex, Spacer, type FlexProps } from "@chakra-ui/react"; +import { useTheme } from "next-themes"; +import ReactJson, { type ReactJsonViewProps } from "react-json-view"; + +import { ClipboardRoot, ClipboardButton } from "src/components/ui"; + +type Props = { + readonly content: object; + readonly jsonProps?: Omit; +} & FlexProps; + +const RenderedJsonField = ({ content, jsonProps, ...rest }: Props) => { + const contentFormatted = JSON.stringify(content, null, 4); + const { theme } = useTheme(); + + return ( + + + + + + + + ); +}; + +export default RenderedJsonField; diff --git a/airflow/ui/src/pages/Dashboard/HistoricalMetrics/AssetEvent.tsx b/airflow/ui/src/pages/Dashboard/HistoricalMetrics/AssetEvent.tsx new file mode 100644 index 0000000000000..51dd47ecacfdf --- /dev/null +++ b/airflow/ui/src/pages/Dashboard/HistoricalMetrics/AssetEvent.tsx @@ -0,0 +1,75 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Box, Text, HStack } from "@chakra-ui/react"; +import { FiDatabase } from "react-icons/fi"; +import { MdOutlineAccountTree } from "react-icons/md"; +import { Link } from "react-router-dom"; + +import type { AssetEventResponse } from "openapi/requests/types.gen"; +import Time from "src/components/Time"; +import { Tooltip } from "src/components/ui"; + +export const AssetEvent = ({ event }: { readonly event: AssetEventResponse }) => { + const hasDagRuns = event.created_dagruns.length > 0; + const source = event.extra?.from_rest_api === true ? "API" : ""; + + return ( + + + + + + + group: {event.group ?? ""} + uri: {event.uri ?? ""} + + } + showArrow + > + {event.name ?? ""} + + + + Source: + {source === "" ? ( + + {event.source_dag_id} + + ) : ( + source + )} + + + Triggered Dag Runs: + {hasDagRuns ? ( + + {event.created_dagruns[0]?.dag_id} + + ) : ( + "~" + )} + + + ); +}; diff --git a/airflow/ui/src/pages/Dashboard/HistoricalMetrics/AssetEvents.tsx b/airflow/ui/src/pages/Dashboard/HistoricalMetrics/AssetEvents.tsx new file mode 100644 index 0000000000000..21c425f0c5a76 --- /dev/null +++ b/airflow/ui/src/pages/Dashboard/HistoricalMetrics/AssetEvents.tsx @@ -0,0 +1,93 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Box, Heading, Flex, HStack, VStack, StackSeparator, Skeleton } from "@chakra-ui/react"; +import { createListCollection } from "@chakra-ui/react/collection"; + +import { useAssetServiceGetAssetEvents } from "openapi/queries"; +import { MetricsBadge } from "src/components/MetricsBadge"; +import { Select } from "src/components/ui"; + +import { AssetEvent } from "./AssetEvent"; + +type AssetEventProps = { + readonly assetSortBy: string; + readonly endDate: string; + readonly setAssetSortBy: React.Dispatch>; + readonly startDate: string; +}; + +export const AssetEvents = ({ assetSortBy, endDate, setAssetSortBy, startDate }: AssetEventProps) => { + const { data, isLoading } = useAssetServiceGetAssetEvents({ + limit: 6, + orderBy: assetSortBy, + timestampGte: startDate, + timestampLte: endDate, + }); + + const assetSortOptions = createListCollection({ + items: [ + { label: "Newest first", value: "-timestamp" }, + { label: "Oldest first", value: "timestamp" }, + ], + }); + + return ( + + + + + + Asset Events + + + setAssetSortBy(option.value[0] as string)} + width={130} + > + + + + + + {assetSortOptions.items.map((option) => ( + + {option.label} + + ))} + + + + {isLoading ? ( + }> + {Array.from({ length: 5 }, (_, index) => index).map((index) => ( + + ))} + + ) : ( + }> + {data?.asset_events.map((event) => )} + + )} + + ); +}; diff --git a/airflow/ui/src/pages/Dashboard/HistoricalMetrics/HistoricalMetrics.tsx b/airflow/ui/src/pages/Dashboard/HistoricalMetrics/HistoricalMetrics.tsx index ac2201bb22c23..76f6c6b9ef1cf 100644 --- a/airflow/ui/src/pages/Dashboard/HistoricalMetrics/HistoricalMetrics.tsx +++ b/airflow/ui/src/pages/Dashboard/HistoricalMetrics/HistoricalMetrics.tsx @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import { Box, VStack } from "@chakra-ui/react"; +import { Box, VStack, SimpleGrid, GridItem } from "@chakra-ui/react"; import dayjs from "dayjs"; import { useState } from "react"; @@ -24,6 +24,7 @@ import { useDashboardServiceHistoricalMetrics } from "openapi/queries"; import { ErrorAlert } from "src/components/ErrorAlert"; import TimeRangeSelector from "src/components/TimeRangeSelector"; +import { AssetEvents } from "./AssetEvents"; import { DagRunMetrics } from "./DagRunMetrics"; import { MetricSectionSkeleton } from "./MetricSectionSkeleton"; import { TaskInstanceMetrics } from "./TaskInstanceMetrics"; @@ -34,6 +35,7 @@ export const HistoricalMetrics = () => { const now = dayjs(); const [startDate, setStartDate] = useState(now.subtract(Number(defaultHour), "hour").toISOString()); const [endDate, setEndDate] = useState(now.toISOString()); + const [assetSortBy, setAssetSortBy] = useState("-timestamp"); const { data, error, isLoading } = useDashboardServiceHistoricalMetrics({ endDate, @@ -59,13 +61,25 @@ export const HistoricalMetrics = () => { setStartDate={setStartDate} startDate={startDate} /> - {isLoading ? : undefined} - {!isLoading && data !== undefined && ( - - - - - )} + + + {isLoading ? : undefined} + {!isLoading && data !== undefined && ( + + + + + )} + + + + + ); diff --git a/airflow/ui/src/pages/Pools/PoolBar.tsx b/airflow/ui/src/pages/Pools/PoolBar.tsx index d14db7a6db8e0..1fead035016c9 100644 --- a/airflow/ui/src/pages/Pools/PoolBar.tsx +++ b/airflow/ui/src/pages/Pools/PoolBar.tsx @@ -19,7 +19,8 @@ * under the License. */ import { Box, Flex, HStack, Text, VStack } from "@chakra-ui/react"; -import { FiClock } from "react-icons/fi"; +import React from "react"; +import { FiActivity, FiCalendar, FiCheckCircle, FiClock, FiList, FiXCircle } from "react-icons/fi"; import type { PoolResponse } from "openapi/requests/types.gen"; import { Tooltip } from "src/components/ui"; @@ -27,12 +28,12 @@ import { capitalize } from "src/utils"; import { stateColor } from "src/utils/stateColor"; const slots = { - open_slots: stateColor.success, - occupied_slots: stateColor.up_for_retry, - running_slots: stateColor.running, - queued_slots: stateColor.queued, - scheduled_slots: stateColor.scheduled, - deferred_slots: stateColor.deferred, + open_slots: { color: stateColor.success, icon: FiCheckCircle }, + occupied_slots: { color: stateColor.up_for_retry, icon: FiXCircle }, + running_slots: { color: stateColor.running, icon: FiActivity }, + queued_slots: { color: stateColor.queued, icon: FiList }, + scheduled_slots: { color: stateColor.scheduled, icon: FiCalendar }, + deferred_slots: { color: stateColor.deferred, icon: FiClock }, }; type PoolBarProps = { @@ -63,14 +64,27 @@ const PoolBar = ({ pool }: PoolBarProps) => ( - {Object.entries(slots).map(([slotKey, color]) => { + {Object.entries(slots).map(([slotKey, { color, icon }]) => { const rawSlotValue = pool[slotKey as keyof PoolResponse]; const slotValue = typeof rawSlotValue === "number" ? rawSlotValue : 0; const flexValue = slotValue / pool.slots || 0; + if (flexValue === 0) { + return undefined; // Skip rendering if no value for this slot + } + return ( - + + {React.createElement(icon, { size: 16, color: "white" })} + ); })} diff --git a/airflow/ui/src/pages/Run/Details.tsx b/airflow/ui/src/pages/Run/Details.tsx index bb526041a26a4..757a996e3b4bf 100644 --- a/airflow/ui/src/pages/Run/Details.tsx +++ b/airflow/ui/src/pages/Run/Details.tsx @@ -16,178 +16,120 @@ * specific language governing permissions and limitations * under the License. */ -import { Box, Flex, HStack, Table } from "@chakra-ui/react"; -import { useParams, useSearchParams } from "react-router-dom"; +import { Box, Flex, HStack, Table, Text } from "@chakra-ui/react"; +import { useParams } from "react-router-dom"; -import { - useTaskInstanceServiceGetMappedTaskInstance, - useTaskInstanceServiceGetTaskInstanceTryDetails, -} from "openapi/queries"; -import { TaskTrySelect } from "src/components/TaskTrySelect"; +import { useDagRunServiceGetDagRun } from "openapi/queries"; +import RenderedJsonField from "src/components/RenderedJsonField"; +import { RunTypeIcon } from "src/components/RunTypeIcon"; import Time from "src/components/Time"; import { ClipboardRoot, ClipboardIconButton, Status } from "src/components/ui"; import { getDuration } from "src/utils"; export const Details = () => { - const { dagId = "", runId = "", taskId = "" } = useParams(); - const [searchParams, setSearchParams] = useSearchParams(); + const { dagId = "", runId = "" } = useParams(); - const mapIndexParam = searchParams.get("map_index"); - const tryNumberParam = searchParams.get("try_number"); - const mapIndex = parseInt(mapIndexParam ?? "-1", 10); - - const { data: taskInstance } = useTaskInstanceServiceGetMappedTaskInstance({ - dagId, - dagRunId: runId, - mapIndex, - taskId, - }); - - const onSelectTryNumber = (newTryNumber: number) => { - if (newTryNumber === taskInstance?.try_number) { - searchParams.delete("try_number"); - } else { - searchParams.set("try_number", newTryNumber.toString()); - } - setSearchParams(searchParams); - }; - - const tryNumber = tryNumberParam === null ? taskInstance?.try_number : parseInt(tryNumberParam, 10); - - const { data: tryInstance } = useTaskInstanceServiceGetTaskInstanceTryDetails({ + const { data: dagRun } = useDagRunServiceGetDagRun({ dagId, dagRunId: runId, - mapIndex, - taskId, - taskTryNumber: tryNumber ?? 1, }); + // TODO : Render DagRun configuration object return ( - {taskInstance === undefined || tryNumber === undefined || taskInstance.try_number <= 1 ? ( + {dagRun === undefined ? (
) : ( - + + + + Status + + + + {dagRun.state} + + + + + Run ID + + + {dagRun.dag_run_id} + + + + + + + + Run Type + + + + {dagRun.run_type} + + + + + Run Duration + {getDuration(dagRun.start_date, dagRun.end_date)}s + + + Last Scheduling Decision + + + + + Queued at + + + + + Start Date + + + + + End Date + + + + + Data Interval Start + + + + + Data Interval End + + + + + Externally Triggered + {dagRun.external_trigger ? "True" : "False"} + + {dagRun.external_trigger ? ( + + Externally Trigger Source + {dagRun.triggered_by} + + ) : undefined} + + Run Config + + + + + + )} - - - - Status - - - - {tryInstance?.state ?? "no status"} - - - - - Task ID - - - {tryInstance?.task_id} - - - - - - - - Run ID - - - {tryInstance?.dag_run_id} - - - - - - - - Map Index - {tryInstance?.map_index} - - - Operator - {tryInstance?.operator} - - - Duration - - {getDuration(tryInstance?.start_date ?? null, tryInstance?.end_date ?? null)}s - - - - Started - - - - - Ended - - - - - Process ID (PID) - - - {tryInstance?.pid} - - - - - - - - Hostname - - - {tryInstance?.hostname} - - - - - - - - Pool - {tryInstance?.pool} - - - Pool Slots - {tryInstance?.pool_slots} - - - Executor - {tryInstance?.executor} - - - Executor Config - {tryInstance?.executor_config} - - - Unix Name - {tryInstance?.unixname} - - - Max Tries - {tryInstance?.max_tries} - - - Queue - {tryInstance?.queue} - - - Priority Weight - {tryInstance?.priority_weight} - - - ); }; diff --git a/airflow/ui/src/pages/Run/Run.tsx b/airflow/ui/src/pages/Run/Run.tsx index 74e0fcc5e4e20..abc935a3513c2 100644 --- a/airflow/ui/src/pages/Run/Run.tsx +++ b/airflow/ui/src/pages/Run/Run.tsx @@ -29,6 +29,7 @@ const tabs = [ { label: "Task Instances", value: "" }, { label: "Events", value: "events" }, { label: "Code", value: "code" }, + { label: "Details", value: "details" }, ]; export const Run = () => { diff --git a/airflow/ui/src/pages/TaskInstance/Details.tsx b/airflow/ui/src/pages/TaskInstance/Details.tsx new file mode 100644 index 0000000000000..bb526041a26a4 --- /dev/null +++ b/airflow/ui/src/pages/TaskInstance/Details.tsx @@ -0,0 +1,193 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Box, Flex, HStack, Table } from "@chakra-ui/react"; +import { useParams, useSearchParams } from "react-router-dom"; + +import { + useTaskInstanceServiceGetMappedTaskInstance, + useTaskInstanceServiceGetTaskInstanceTryDetails, +} from "openapi/queries"; +import { TaskTrySelect } from "src/components/TaskTrySelect"; +import Time from "src/components/Time"; +import { ClipboardRoot, ClipboardIconButton, Status } from "src/components/ui"; +import { getDuration } from "src/utils"; + +export const Details = () => { + const { dagId = "", runId = "", taskId = "" } = useParams(); + const [searchParams, setSearchParams] = useSearchParams(); + + const mapIndexParam = searchParams.get("map_index"); + const tryNumberParam = searchParams.get("try_number"); + const mapIndex = parseInt(mapIndexParam ?? "-1", 10); + + const { data: taskInstance } = useTaskInstanceServiceGetMappedTaskInstance({ + dagId, + dagRunId: runId, + mapIndex, + taskId, + }); + + const onSelectTryNumber = (newTryNumber: number) => { + if (newTryNumber === taskInstance?.try_number) { + searchParams.delete("try_number"); + } else { + searchParams.set("try_number", newTryNumber.toString()); + } + setSearchParams(searchParams); + }; + + const tryNumber = tryNumberParam === null ? taskInstance?.try_number : parseInt(tryNumberParam, 10); + + const { data: tryInstance } = useTaskInstanceServiceGetTaskInstanceTryDetails({ + dagId, + dagRunId: runId, + mapIndex, + taskId, + taskTryNumber: tryNumber ?? 1, + }); + + return ( + + {taskInstance === undefined || tryNumber === undefined || taskInstance.try_number <= 1 ? ( +
+ ) : ( + + )} + + + + Status + + + + {tryInstance?.state ?? "no status"} + + + + + Task ID + + + {tryInstance?.task_id} + + + + + + + + Run ID + + + {tryInstance?.dag_run_id} + + + + + + + + Map Index + {tryInstance?.map_index} + + + Operator + {tryInstance?.operator} + + + Duration + + {getDuration(tryInstance?.start_date ?? null, tryInstance?.end_date ?? null)}s + + + + Started + + + + + Ended + + + + + Process ID (PID) + + + {tryInstance?.pid} + + + + + + + + Hostname + + + {tryInstance?.hostname} + + + + + + + + Pool + {tryInstance?.pool} + + + Pool Slots + {tryInstance?.pool_slots} + + + Executor + {tryInstance?.executor} + + + Executor Config + {tryInstance?.executor_config} + + + Unix Name + {tryInstance?.unixname} + + + Max Tries + {tryInstance?.max_tries} + + + Queue + {tryInstance?.queue} + + + Priority Weight + {tryInstance?.priority_weight} + + + + + ); +}; diff --git a/airflow/ui/src/router.tsx b/airflow/ui/src/router.tsx index baac26d1e2f53..63a0be2317178 100644 --- a/airflow/ui/src/router.tsx +++ b/airflow/ui/src/router.tsx @@ -32,10 +32,11 @@ import { Dashboard } from "src/pages/Dashboard"; import { ErrorPage } from "src/pages/Error"; import { Events } from "src/pages/Events"; import { Run } from "src/pages/Run"; -import { Details } from "src/pages/Run/Details"; +import { Details as DagRunDetails } from "src/pages/Run/Details"; import { TaskInstances } from "src/pages/Run/TaskInstances"; import { Task, Instances } from "src/pages/Task"; import { TaskInstance, Logs } from "src/pages/TaskInstance"; +import { Details } from "src/pages/TaskInstance/Details"; import { XCom } from "src/pages/XCom"; import { Pools } from "./pages/Pools"; @@ -86,6 +87,7 @@ export const router = createBrowserRouter( { element: , index: true }, { element: , path: "events" }, { element: , path: "code" }, + { element: , path: "details" }, ], element: , path: "dags/:dagId/runs/:runId", diff --git a/airflow/utils/log/file_task_handler.py b/airflow/utils/log/file_task_handler.py index 73ee79126a9ee..21b745affbccd 100644 --- a/airflow/utils/log/file_task_handler.py +++ b/airflow/utils/log/file_task_handler.py @@ -24,7 +24,6 @@ from collections.abc import Iterable from contextlib import suppress from enum import Enum -from functools import cached_property from pathlib import Path from typing import TYPE_CHECKING, Any, Callable from urllib.parse import urljoin @@ -44,6 +43,7 @@ if TYPE_CHECKING: from pendulum import DateTime + from airflow.executors.base_executor import BaseExecutor from airflow.models.taskinstance import TaskInstance from airflow.models.taskinstancekey import TaskInstanceKey @@ -179,6 +179,8 @@ class FileTaskHandler(logging.Handler): inherits_from_empty_operator_log_message = ( "Operator inherits from empty operator and thus does not have logs" ) + executor_instances: dict[str, BaseExecutor] = {} + DEFAULT_EXECUTOR_KEY = "_default_executor" def __init__( self, @@ -314,11 +316,27 @@ def _render_filename(self, ti: TaskInstance, try_number: int, session=NEW_SESSIO def _read_grouped_logs(self): return False - @cached_property - def _executor_get_task_log(self) -> Callable[[TaskInstance, int], tuple[list[str], list[str]]]: - """This cached property avoids loading executor repeatedly.""" - executor = ExecutorLoader.get_default_executor() - return executor.get_task_log + def _get_executor_get_task_log( + self, ti: TaskInstance + ) -> Callable[[TaskInstance, int], tuple[list[str], list[str]]]: + """ + Get the get_task_log method from executor of current task instance. + + Since there might be multiple executors, so we need to get the executor of current task instance instead of getting from default executor. + + :param ti: task instance object + :return: get_task_log method of the executor + """ + executor_name = ti.executor or self.DEFAULT_EXECUTOR_KEY + executor = self.executor_instances.get(executor_name) + if executor is not None: + return executor.get_task_log + + if executor_name == self.DEFAULT_EXECUTOR_KEY: + self.executor_instances[executor_name] = ExecutorLoader.get_default_executor() + else: + self.executor_instances[executor_name] = ExecutorLoader.load_executor(executor_name) + return self.executor_instances[executor_name].get_task_log def _read( self, @@ -360,7 +378,8 @@ def _read( messages_list.extend(remote_messages) has_k8s_exec_pod = False if ti.state == TaskInstanceState.RUNNING: - response = self._executor_get_task_log(ti, try_number) + executor_get_task_log = self._get_executor_get_task_log(ti) + response = executor_get_task_log(ti, try_number) if response: executor_messages, executor_logs = response if executor_messages: diff --git a/newsfragments/24842.significant.rst b/newsfragments/24842.significant.rst index 4e02531b3e6b7..f1b5e57cba6d5 100644 --- a/newsfragments/24842.significant.rst +++ b/newsfragments/24842.significant.rst @@ -4,3 +4,14 @@ When a *schedule* parameter is not passed to the ``DAG`` constructor, Airflow now defaults to never automatically schedule the DAG at all. The created DAG can still be manually triggered, either by the user directly, or from another DAG with ``TriggerDagRunOperator``. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [x] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/40029.significant.rst b/newsfragments/40029.significant.rst index e64f5170efda1..1d9bc26ef858a 100644 --- a/newsfragments/40029.significant.rst +++ b/newsfragments/40029.significant.rst @@ -1 +1,18 @@ -Removed deprecated ``allow_raw_html_descriptions`` option from UI Trigger forms. +Removed deprecated airflow configuration ``webserver.allow_raw_html_descriptions`` from UI Trigger forms. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ``airflow config lint`` + + * [x] ``webserver.allow_raw_html_descriptions`` diff --git a/newsfragments/40931.significant.rst b/newsfragments/40931.significant.rst index b5d47f0c36b8a..7893e6dc0a014 100644 --- a/newsfragments/40931.significant.rst +++ b/newsfragments/40931.significant.rst @@ -1 +1,12 @@ Removed dagbag deprecated ``store_serialized_dags`` parameter. Please use ``read_dags_from_db`` parameter. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/41096.significant.rst b/newsfragments/41096.significant.rst index 1d63fa77409df..e08520b14255d 100644 --- a/newsfragments/41096.significant.rst +++ b/newsfragments/41096.significant.rst @@ -1 +1,18 @@ Removed deprecated ``processor_poll_interval`` configuration parameter from ``scheduler`` section. Please use ``scheduler_idle_sleep_time`` configuration parameter. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ``airflow config lint`` + + * [x] ``scheduler.processor_poll_interval`` → ``scheduler.scheduler_idle_sleep_time`` diff --git a/newsfragments/41348.significant.rst b/newsfragments/41348.significant.rst index eca66b78708f9..f7637f3676ecd 100644 --- a/newsfragments/41348.significant.rst +++ b/newsfragments/41348.significant.rst @@ -1,240 +1,324 @@ -**Breaking Change** +Rename ``Dataset`` as ``Asset`` -* Rename module ``airflow.api_connexion.schemas.dataset_schema`` as ``airflow.api_connexion.schemas.asset_schema`` +* list of renamed objects - * Rename variable ``create_dataset_event_schema`` as ``create_asset_event_schema`` - * Rename variable ``dataset_collection_schema`` as ``asset_collection_schema`` - * Rename variable ``dataset_event_collection_schema`` as ``asset_event_collection_schema`` - * Rename variable ``dataset_event_schema`` as ``asset_event_schema`` - * Rename variable ``dataset_schema`` as ``asset_schema`` - * Rename class ``TaskOutletDatasetReferenceSchema`` as ``TaskOutletAssetReferenceSchema`` - * Rename class ``DagScheduleDatasetReferenceSchema`` as ``DagScheduleAssetReferenceSchema`` - * Rename class ``DatasetAliasSchema`` as ``AssetAliasSchema`` - * Rename class ``DatasetSchema`` as ``AssetSchema`` - * Rename class ``DatasetCollection`` as ``AssetCollection`` - * Rename class ``DatasetEventSchema`` as ``AssetEventSchema`` - * Rename class ``DatasetEventCollection`` as ``AssetEventCollection`` - * Rename class ``DatasetEventCollectionSchema`` as ``AssetEventCollectionSchema`` - * Rename class ``CreateDatasetEventSchema`` as ``CreateAssetEventSchema`` + * Rename module ``airflow.api_connexion.schemas.dataset_schema`` as ``airflow.api_connexion.schemas.asset_schema`` -* Move module ``airflow.datasets`` to ``airflow.sdk.definitions.asset`` + * Rename variable ``create_dataset_event_schema`` as ``create_asset_event_schema`` + * Rename variable ``dataset_collection_schema`` as ``asset_collection_schema`` + * Rename variable ``dataset_event_collection_schema`` as ``asset_event_collection_schema`` + * Rename variable ``dataset_event_schema`` as ``asset_event_schema`` + * Rename variable ``dataset_schema`` as ``asset_schema`` + * Rename class ``TaskOutletDatasetReferenceSchema`` as ``TaskOutletAssetReferenceSchema`` + * Rename class ``DagScheduleDatasetReferenceSchema`` as ``DagScheduleAssetReferenceSchema`` + * Rename class ``DatasetAliasSchema`` as ``AssetAliasSchema`` + * Rename class ``DatasetSchema`` as ``AssetSchema`` + * Rename class ``DatasetCollection`` as ``AssetCollection`` + * Rename class ``DatasetEventSchema`` as ``AssetEventSchema`` + * Rename class ``DatasetEventCollection`` as ``AssetEventCollection`` + * Rename class ``DatasetEventCollectionSchema`` as ``AssetEventCollectionSchema`` + * Rename class ``CreateDatasetEventSchema`` as ``CreateAssetEventSchema`` - * Rename class ``DatasetAlias`` as ``AssetAlias`` - * Rename class ``DatasetAll`` as ``AssetAll`` - * Rename class ``DatasetAny`` as ``AssetAny`` - * Rename function ``expand_alias_to_datasets`` as ``expand_alias_to_assets`` - * Rename class ``DatasetAliasEvent`` as ``AssetAliasEvent`` + * Move module ``airflow.datasets`` to ``airflow.sdk.definitions.asset`` - * Rename attribute ``dest_dataset_uri`` as ``dest_asset_uri`` + * Rename class ``DatasetAlias`` as ``AssetAlias`` + * Rename class ``DatasetAll`` as ``AssetAll`` + * Rename class ``DatasetAny`` as ``AssetAny`` + * Rename function ``expand_alias_to_datasets`` as ``expand_alias_to_assets`` + * Rename class ``DatasetAliasEvent`` as ``AssetAliasEvent`` - * Rename class ``BaseDataset`` as ``BaseAsset`` + * Rename attribute ``dest_dataset_uri`` as ``dest_asset_uri`` - * Rename method ``iter_datasets`` as ``iter_assets`` - * Rename method ``iter_dataset_aliases`` as ``iter_asset_aliases`` + * Rename class ``BaseDataset`` as ``BaseAsset`` - * Rename class ``Dataset`` as ``Asset`` + * Rename method ``iter_datasets`` as ``iter_assets`` + * Rename method ``iter_dataset_aliases`` as ``iter_asset_aliases`` - * Rename method ``iter_datasets`` as ``iter_assets`` - * Rename method ``iter_dataset_aliases`` as ``iter_asset_aliases`` + * Rename class ``Dataset`` as ``Asset`` - * Rename class ``_DatasetBooleanCondition`` as ``_AssetBooleanCondition`` + * Rename method ``iter_datasets`` as ``iter_assets`` + * Rename method ``iter_dataset_aliases`` as ``iter_asset_aliases`` - * Rename method ``iter_datasets`` as ``iter_assets`` - * Rename method ``iter_dataset_aliases`` as ``iter_asset_aliases`` + * Rename class ``_DatasetBooleanCondition`` as ``_AssetBooleanCondition`` -* Rename module ``airflow.datasets.manager`` as ``airflow.assets.manager`` + * Rename method ``iter_datasets`` as ``iter_assets`` + * Rename method ``iter_dataset_aliases`` as ``iter_asset_aliases`` - * Rename variable ``dataset_manager`` as ``asset_manager`` - * Rename function ``resolve_dataset_manager`` as ``resolve_asset_manager`` - * Rename class ``DatasetManager`` as ``AssetManager`` + * Rename module ``airflow.datasets.manager`` as ``airflow.assets.manager`` - * Rename method ``register_dataset_change`` as ``register_asset_change`` - * Rename method ``create_datasets`` as ``create_assets`` - * Rename method ``register_dataset_change`` as ``notify_asset_created`` - * Rename method ``notify_dataset_changed`` as ``notify_asset_changed`` - * Rename method ``notify_dataset_alias_created`` as ``notify_asset_alias_created`` + * Rename variable ``dataset_manager`` as ``asset_manager`` + * Rename function ``resolve_dataset_manager`` as ``resolve_asset_manager`` + * Rename class ``DatasetManager`` as ``AssetManager`` -* Rename module ``airflow.models.dataset`` as ``airflow.models.asset`` + * Rename method ``register_dataset_change`` as ``register_asset_change`` + * Rename method ``create_datasets`` as ``create_assets`` + * Rename method ``register_dataset_change`` as ``notify_asset_created`` + * Rename method ``notify_dataset_changed`` as ``notify_asset_changed`` + * Rename method ``notify_dataset_alias_created`` as ``notify_asset_alias_created`` - * Rename class ``DatasetDagRunQueue`` as ``AssetDagRunQueue`` - * Rename class ``DatasetEvent`` as ``AssetEvent`` - * Rename class ``DatasetModel`` as ``AssetModel`` - * Rename class ``DatasetAliasModel`` as ``AssetAliasModel`` - * Rename class ``DagScheduleDatasetReference`` as ``DagScheduleAssetReference`` - * Rename class ``TaskOutletDatasetReference`` as ``TaskOutletAssetReference`` - * Rename class ``DagScheduleDatasetAliasReference`` as ``DagScheduleAssetAliasReference`` + * Rename module ``airflow.models.dataset`` as ``airflow.models.asset`` -* Rename module ``airflow.api_ui.views.datasets`` as ``airflow.api_ui.views.assets`` + * Rename class ``DatasetDagRunQueue`` as ``AssetDagRunQueue`` + * Rename class ``DatasetEvent`` as ``AssetEvent`` + * Rename class ``DatasetModel`` as ``AssetModel`` + * Rename class ``DatasetAliasModel`` as ``AssetAliasModel`` + * Rename class ``DagScheduleDatasetReference`` as ``DagScheduleAssetReference`` + * Rename class ``TaskOutletDatasetReference`` as ``TaskOutletAssetReference`` + * Rename class ``DagScheduleDatasetAliasReference`` as ``DagScheduleAssetAliasReference`` - * Rename variable ``dataset_router`` as ``asset_rounter`` + * Rename module ``airflow.api_ui.views.datasets`` as ``airflow.api_ui.views.assets`` -* Rename module ``airflow.listeners.spec.dataset`` as ``airflow.listeners.spec.asset`` + * Rename variable ``dataset_router`` as ``asset_rounter`` - * Rename function ``on_dataset_created`` as ``on_asset_created`` - * Rename function ``on_dataset_changed`` as ``on_asset_changed`` + * Rename module ``airflow.listeners.spec.dataset`` as ``airflow.listeners.spec.asset`` -* Rename module ``airflow.timetables.datasets`` as ``airflow.timetables.assets`` + * Rename function ``on_dataset_created`` as ``on_asset_created`` + * Rename function ``on_dataset_changed`` as ``on_asset_changed`` - * Rename class ``DatasetOrTimeSchedule`` as ``AssetOrTimeSchedule`` + * Rename module ``airflow.timetables.datasets`` as ``airflow.timetables.assets`` -* Rename module ``airflow.serialization.pydantic.dataset`` as ``airflow.serialization.pydantic.asset`` + * Rename class ``DatasetOrTimeSchedule`` as ``AssetOrTimeSchedule`` - * Rename class ``DagScheduleDatasetReferencePydantic`` as ``DagScheduleAssetReferencePydantic`` - * Rename class ``TaskOutletDatasetReferencePydantic`` as ``TaskOutletAssetReferencePydantic`` - * Rename class ``DatasetPydantic`` as ``AssetPydantic`` - * Rename class ``DatasetEventPydantic`` as ``AssetEventPydantic`` + * Rename module ``airflow.serialization.pydantic.dataset`` as ``airflow.serialization.pydantic.asset`` -* Rename module ``airflow.datasets.metadata`` as ``airflow.sdk.definitions.asset.metadata`` + * Rename class ``DagScheduleDatasetReferencePydantic`` as ``DagScheduleAssetReferencePydantic`` + * Rename class ``TaskOutletDatasetReferencePydantic`` as ``TaskOutletAssetReferencePydantic`` + * Rename class ``DatasetPydantic`` as ``AssetPydantic`` + * Rename class ``DatasetEventPydantic`` as ``AssetEventPydantic`` -* In module ``airflow.jobs.scheduler_job_runner`` + * Rename module ``airflow.datasets.metadata`` as ``airflow.sdk.definitions.asset.metadata`` - * and its class ``SchedulerJobRunner`` + * In module ``airflow.jobs.scheduler_job_runner`` - * Rename method ``_create_dag_runs_dataset_triggered`` as ``_create_dag_runs_asset_triggered`` - * Rename method ``_orphan_unreferenced_datasets`` as ``_orphan_unreferenced_datasets`` + * and its class ``SchedulerJobRunner`` -* In module ``airflow.api_connexion.security`` + * Rename method ``_create_dag_runs_dataset_triggered`` as ``_create_dag_runs_asset_triggered`` + * Rename method ``_orphan_unreferenced_datasets`` as ``_orphan_unreferenced_datasets`` - * Rename decorator ``requires_access_dataset`` as ``requires_access_asset`` + * In module ``airflow.api_connexion.security`` -* In module ``airflow.auth.managers.models.resource_details`` + * Rename decorator ``requires_access_dataset`` as ``requires_access_asset`` - * Rename class ``DatasetDetails`` as ``AssetDetails`` + * In module ``airflow.auth.managers.models.resource_details`` -* In module ``airflow.auth.managers.base_auth_manager`` + * Rename class ``DatasetDetails`` as ``AssetDetails`` - * Rename function ``is_authorized_dataset`` as ``is_authorized_asset`` + * In module ``airflow.auth.managers.base_auth_manager`` -* In module ``airflow.timetables.simple`` + * Rename function ``is_authorized_dataset`` as ``is_authorized_asset`` - * Rename class ``DatasetTriggeredTimetable`` as ``AssetTriggeredTimetable`` + * In module ``airflow.timetables.simple`` -* In module ``airflow.lineage.hook`` + * Rename class ``DatasetTriggeredTimetable`` as ``AssetTriggeredTimetable`` - * Rename class ``DatasetLineageInfo`` as ``AssetLineageInfo`` + * In module ``airflow.lineage.hook`` - * Rename attribute ``dataset`` as ``asset`` + * Rename class ``DatasetLineageInfo`` as ``AssetLineageInfo`` - * In its class ``HookLineageCollector`` + * Rename attribute ``dataset`` as ``asset`` - * Rename method ``create_dataset`` as ``create_asset`` - * Rename method ``add_input_dataset`` as ``add_input_asset`` - * Rename method ``add_output_dataset`` as ``add_output_asset`` - * Rename method ``collected_datasets`` as ``collected_assets`` + * In its class ``HookLineageCollector`` -* In module ``airflow.models.dag`` + * Rename method ``create_dataset`` as ``create_asset`` + * Rename method ``add_input_dataset`` as ``add_input_asset`` + * Rename method ``add_output_dataset`` as ``add_output_asset`` + * Rename method ``collected_datasets`` as ``collected_assets`` - * Rename function ``get_dataset_triggered_next_run_info`` as ``get_asset_triggered_next_run_info`` + * In module ``airflow.models.dag`` - * In its class ``DagModel`` + * Rename function ``get_dataset_triggered_next_run_info`` as ``get_asset_triggered_next_run_info`` - * Rename method ``get_dataset_triggered_next_run_info`` as ``get_asset_triggered_next_run_info`` + * In its class ``DagModel`` -* In module ``airflow.models.taskinstance`` + * Rename method ``get_dataset_triggered_next_run_info`` as ``get_asset_triggered_next_run_info`` - * and its class ``TaskInstance`` + * In module ``airflow.models.taskinstance`` - * Rename method ``_register_dataset_changes`` as ``_register_asset_changes`` + * and its class ``TaskInstance`` -* In module ``airflow.providers_manager`` + * Rename method ``_register_dataset_changes`` as ``_register_asset_changes`` - * and its class ``ProvidersManager`` + * In module ``airflow.providers_manager`` - * Rename method ``initialize_providers_dataset_uri_resources`` as ``initialize_providers_asset_uri_resources`` - * Rename attribute ``_discover_dataset_uri_resources`` as ``_discover_asset_uri_resources`` - * Rename property ``dataset_factories`` as ``asset_factories`` - * Rename property ``dataset_uri_handlers`` as ``asset_uri_handlers`` - * Rename property ``dataset_to_openlineage_converters`` as ``asset_to_openlineage_converters`` + * and its class ``ProvidersManager`` -* In module ``airflow.security.permissions`` + * Rename method ``initialize_providers_dataset_uri_resources`` as ``initialize_providers_asset_uri_resources`` + * Rename attribute ``_discover_dataset_uri_resources`` as ``_discover_asset_uri_resources`` + * Rename property ``dataset_factories`` as ``asset_factories`` + * Rename property ``dataset_uri_handlers`` as ``asset_uri_handlers`` + * Rename property ``dataset_to_openlineage_converters`` as ``asset_to_openlineage_converters`` - * Rename constant ``RESOURCE_DATASET`` as ``RESOURCE_ASSET`` + * In module ``airflow.security.permissions`` -* In module ``airflow.serialization.enums`` + * Rename constant ``RESOURCE_DATASET`` as ``RESOURCE_ASSET`` - * and its class DagAttributeTypes + * In module ``airflow.serialization.enums`` - * Rename attribute ``DATASET_EVENT_ACCESSORS`` as ``ASSET_EVENT_ACCESSORS`` - * Rename attribute ``DATASET_EVENT_ACCESSOR`` as ``ASSET_EVENT_ACCESSOR`` - * Rename attribute ``DATASET`` as ``ASSET`` - * Rename attribute ``DATASET_ALIAS`` as ``ASSET_ALIAS`` - * Rename attribute ``DATASET_ANY`` as ``ASSET_ANY`` - * Rename attribute ``DATASET_ALL`` as ``ASSET_ALL`` + * and its class DagAttributeTypes -* In module ``airflow.serialization.pydantic.taskinstance`` + * Rename attribute ``DATASET_EVENT_ACCESSORS`` as ``ASSET_EVENT_ACCESSORS`` + * Rename attribute ``DATASET_EVENT_ACCESSOR`` as ``ASSET_EVENT_ACCESSOR`` + * Rename attribute ``DATASET`` as ``ASSET`` + * Rename attribute ``DATASET_ALIAS`` as ``ASSET_ALIAS`` + * Rename attribute ``DATASET_ANY`` as ``ASSET_ANY`` + * Rename attribute ``DATASET_ALL`` as ``ASSET_ALL`` - * and its class ``TaskInstancePydantic`` + * In module ``airflow.serialization.pydantic.taskinstance`` - * Rename method ``_register_dataset_changes`` as ``_register_dataset_changes`` + * and its class ``TaskInstancePydantic`` -* In module ``airflow.serialization.serialized_objects`` + * Rename method ``_register_dataset_changes`` as ``_register_dataset_changes`` - * Rename function ``encode_dataset_condition`` as ``encode_asset_condition`` - * Rename function ``decode_dataset_condition`` as ``decode_asset_condition`` + * In module ``airflow.serialization.serialized_objects`` -* In module ``airflow.timetables.base`` + * Rename function ``encode_dataset_condition`` as ``encode_asset_condition`` + * Rename function ``decode_dataset_condition`` as ``decode_asset_condition`` - * Rename class ```_NullDataset``` as ```_NullAsset``` + * In module ``airflow.timetables.base`` - * Rename method ``iter_datasets`` as ``iter_assets`` - * Rename method ``iter_dataset_aliases`` as ``iter_assets_aliases`` + * Rename class ```_NullDataset``` as ```_NullAsset``` -* In module ``airflow.utils.context`` + * Rename method ``iter_datasets`` as ``iter_assets`` + * Rename method ``iter_dataset_aliases`` as ``iter_assets_aliases`` - * Rename class ``LazyDatasetEventSelectSequence`` as ``LazyAssetEventSelectSequence`` + * In module ``airflow.utils.context`` -* In module ``airflow.www.auth`` + * Rename class ``LazyDatasetEventSelectSequence`` as ``LazyAssetEventSelectSequence`` - * Rename function ``has_access_dataset`` as ``has_access_asset`` + * In module ``airflow.www.auth`` -* Rename configuration ``core.dataset_manager_class`` as ``core.asset_manager_class`` and ``core.dataset_manager_class`` as ``core.asset_manager_class`` -* Rename example dags ``example_dataset_alias.py``, ``example_dataset_alias_with_no_taskflow.py``, ``example_datasets.py`` as ``example_asset_alias.py``, ``example_asset_alias_with_no_taskflow.py``, ``example_assets.py`` -* Rename DagDependency name ``dataset-alias``, ``dataset`` as ``asset-alias``, ``asset`` -* Rename context key ``triggering_dataset_events`` as ``triggering_asset_events`` -* Rename resource key ``dataset-uris`` as ``asset-uris`` for providers amazon, common.io, mysql, fab, postgres, trino + * Rename function ``has_access_dataset`` as ``has_access_asset`` -* In provider ``airflow.providers.amazon.aws`` + * Rename configuration ``core.dataset_manager_class`` as ``core.asset_manager_class`` and ``core.dataset_manager_kwargs`` as ``core.asset_manager_kwargs`` + * Rename example dags ``example_dataset_alias.py``, ``example_dataset_alias_with_no_taskflow.py``, ``example_datasets.py`` as ``example_asset_alias.py``, ``example_asset_alias_with_no_taskflow.py``, ``example_assets.py`` + * Rename DagDependency name ``dataset-alias``, ``dataset`` as ``asset-alias``, ``asset`` + * Rename context key ``triggering_dataset_events`` as ``triggering_asset_events`` + * Rename resource key ``dataset-uris`` as ``asset-uris`` for providers amazon, common.io, mysql, fab, postgres, trino - * Rename package ``datasets`` as ``assets`` + * In provider ``airflow.providers.amazon.aws`` - * In its module ``s3`` + * Rename package ``datasets`` as ``assets`` - * Rename method ``create_dataset`` as ``create_asset`` - * Rename method ``convert_dataset_to_openlineage`` as ``convert_asset_to_openlineage`` + * In its module ``s3`` - * and its module ``auth_manager.avp.entities`` + * Rename method ``create_dataset`` as ``create_asset`` + * Rename method ``convert_dataset_to_openlineage`` as ``convert_asset_to_openlineage`` - * Rename attribute ``AvpEntities.DATASET`` as ``AvpEntities.ASSET`` + * and its module ``auth_manager.avp.entities`` - * and its module ``auth_manager.auth_manager.aws_auth_manager`` + * Rename attribute ``AvpEntities.DATASET`` as ``AvpEntities.ASSET`` - * Rename function ``is_authorized_dataset`` as ``is_authorized_asset`` + * and its module ``auth_manager.auth_manager.aws_auth_manager`` -* In provider ``airflow.providers.common.io`` + * Rename function ``is_authorized_dataset`` as ``is_authorized_asset`` - * Rename package ``datasets`` as ``assets`` + * In provider ``airflow.providers.common.io`` - * in its module ``file`` + * Rename package ``datasets`` as ``assets`` - * Rename method ``create_dataset`` as ``create_asset`` - * Rename method ``convert_dataset_to_openlineage`` as ``convert_asset_to_openlineage`` + * in its module ``file`` -* In provider ``airflow.providers.fab`` + * Rename method ``create_dataset`` as ``create_asset`` + * Rename method ``convert_dataset_to_openlineage`` as ``convert_asset_to_openlineage`` - * in its module ``auth_manager.fab_auth_manager`` + * In provider ``airflow.providers.fab`` - * Rename function ``is_authorized_dataset`` as ``is_authorized_asset`` + * in its module ``auth_manager.fab_auth_manager`` -* In provider ``airflow.providers.openlineage`` + * Rename function ``is_authorized_dataset`` as ``is_authorized_asset`` - * in its module ``utils.utils`` + * In provider ``airflow.providers.openlineage`` - * Rename class ``DatasetInfo`` as ``AssetInfo`` - * Rename function ``translate_airflow_dataset`` as ``translate_airflow_asset`` + * in its module ``utils.utils`` -* Rename package ``airflow.providers.postgres.datasets`` as ``airflow.providers.postgres.assets`` -* Rename package ``airflow.providers.mysql.datasets`` as ``airflow.providers.mysql.assets`` -* Rename package ``airflow.providers.trino.datasets`` as ``airflow.providers.trino.assets`` -* Add module ``airflow.providers.common.compat.assets`` -* Add module ``airflow.providers.common.compat.openlineage.utils.utils`` -* Add module ``airflow.providers.common.compat.security.permissions`` + * Rename class ``DatasetInfo`` as ``AssetInfo`` + * Rename function ``translate_airflow_dataset`` as ``translate_airflow_asset`` + + * Rename package ``airflow.providers.postgres.datasets`` as ``airflow.providers.postgres.assets`` + * Rename package ``airflow.providers.mysql.datasets`` as ``airflow.providers.mysql.assets`` + * Rename package ``airflow.providers.trino.datasets`` as ``airflow.providers.trino.assets`` + * Add module ``airflow.providers.common.compat.assets`` + * Add module ``airflow.providers.common.compat.openlineage.utils.utils`` + * Add module ``airflow.providers.common.compat.security.permissions`` + +* Types of change + + * [x] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ``airflow config lint`` + + * [x] ``core.dataset_manager_class`` → ``core.asset_manager_class`` + * [x] ``core.dataset_manager_kwargs`` → ``core.asset_manager_kwargs`` + + * ruff + + * AIR302 + + * [ ] context key ``triggering_dataset_events`` → ``triggering_asset_events`` + * [x] ``airflow.api_connexion.security.requires_access_dataset`` → ``airflow.api_connexion.security.requires_access_asset`` + * [x] ``airflow.auth.managers.base_auth_manager.is_authorized_dataset`` → ``airflow.auth.managers.base_auth_manager.is_authorized_asset`` + * [x] ``airflow.auth.managers.models.resource_details.DatasetDetails`` → ``airflow.auth.managers.models.resource_details.AssetDetails`` + * [x] ``airflow.lineage.hook.DatasetLineageInfo`` → ``airflow.lineage.hook.AssetLineageInfo`` + * [x] ``airflow.security.permissions.RESOURCE_DATASET`` → ``airflow.security.permissions.RESOURCE_ASSET`` + * [x] ``airflow.www.auth.has_access_dataset`` → ``airflow.www.auth.has_access_dataset.has_access_asset`` + * [x] ``airflow.datasets.DatasetAliasEvent`` + * [x] ``airflow.datasets.Dataset`` → ``airflow.sdk.definitions.asset.Asset`` + * [x] ``airflow.Dataset`` → ``airflow.sdk.definitions.asset.Asset`` + * [x] ``airflow.datasets.DatasetAlias`` → ``airflow.sdk.definitions.asset.AssetAlias`` + * [x] ``airflow.datasets.DatasetAll`` → ``airflow.sdk.definitions.asset.AssetAll`` + * [x] ``airflow.datasets.DatasetAny`` → ``airflow.sdk.definitions.asset.AssetAny`` + * [x] ``airflow.datasets.metadata`` → ``airflow.sdk.definitions.asset.metadata`` + * [x] ``airflow.datasets.expand_alias_to_datasets`` → ``airflow.sdk.definitions.asset.expand_alias_to_assets`` + * [x] ``airflow.datasets.manager.dataset_manager`` → ``airflow.assets.manager`` + * [x] ``airflow.datasets.manager.resolve_dataset_manager`` → ``airflow.assets.resolve_asset_manager`` + * [x] ``airflow.datasets.manager.DatasetManager`` → ``airflow.assets.AssetManager`` + * [x] ``airflow.listeners.spec.dataset.on_dataset_created`` → ``airflow.listeners.spec.asset.on_asset_created`` + * [x] ``airflow.listeners.spec.dataset.on_dataset_changed`` → ``airflow.listeners.spec.asset.on_asset_changed`` + * [x] ``airflow.timetables.simple.DatasetTriggeredTimetable`` → ``airflow.timetables.simple.AssetTriggeredTimetable`` + * [x] ``airflow.timetables.datasets.DatasetOrTimeSchedule`` → ``airflow.timetables.assets.AssetOrTimeSchedule`` + * [x] ``airflow.providers.amazon.auth_manager.avp.entities.AvpEntities.DATASET`` → ``airflow.providers.amazon.auth_manager.avp.entities.AvpEntities.ASSET`` + * [x] ``airflow.providers.amazon.aws.datasets.s3.create_dataset`` → ``airflow.providers.amazon.aws.assets.s3.create_asset`` + * [x] ``airflow.providers.amazon.aws.datasets.s3.convert_dataset_to_openlineage`` → ``airflow.providers.amazon.aws.datasets.s3.convert_dataset_to_openlineage`` + * [x] ``airflow.providers.amazon.aws.datasets.s3.sanitize_uri`` → ``airflow.providers.amazon.aws.assets.s3.sanitize_uri`` + * [x] ``airflow.providers.common.io.datasets.file.convert_dataset_to_openlineage`` → ``airflow.providers.common.io.assets.file.convert_asset_to_openlineage`` + * [x] ``airflow.providers.common.io.datasets.file.sanitize_uri`` → ``airflow.providers.common.io.assets.file.sanitize_uri`` + * [x] ``airflow.providers.common.io.datasets.file.create_dataset`` → ``airflow.providers.common.io.assets.file.create_asset`` + * [x] ``airflow.providers.google.datasets.bigquery.sanitize_uri`` → ``airflow.providers.google.assets.bigquery.sanitize_uri`` + * [x] ``airflow.providers.google.datasets.gcs.create_dataset`` → ``airflow.providers.google.assets.gcs.create_asset`` + * [x] ``airflow.providers.google.datasets.gcs.sanitize_uri`` → ``airflow.providers.google.assets.gcs.sanitize_uri`` + * [x] ``airflow.providers.google.datasets.gcs.convert_dataset_to_openlineage`` → ``airflow.providers.google.assets.gcs.convert_asset_to_openlineage`` + * [x] ``airflow.providers.fab.auth_manager.fab_auth_manager.is_authorized_dataset`` → ``airflow.providers.fab.auth_manager.fab_auth_manager.is_authorized_asset`` + * [x] ``airflow.providers.openlineage.utils.utils.DatasetInfo`` → ``airflow.providers.openlineage.utils.utils.AssetInfo`` + * [x] ``airflow.providers.openlineage.utils.utils.translate_airflow_dataset`` → ``airflow.providers.openlineage.utils.utils.translate_airflow_asset`` + * [x] ``airflow.providers.postgres.datasets.postgres.sanitize_uri`` → ``airflow.providers.postgres.assets.postgres.sanitize_uri`` + * [x] ``airflow.providers.mysql.datasets.mysql.sanitize_uri`` → ``airflow.providers.mysql.assets.mysql.sanitize_uri`` + * [x] ``airflow.providers.trino.datasets.trino.sanitize_uri`` → ``airflow.providers.trino.assets.trino.sanitize_uri`` + * [x] property ``airflow.providers_manager.ProvidersManager.dataset_factories`` → ``airflow.providers_manager.ProvidersManager.asset_factories`` + * [x] property ``airflow.providers_manager.ProvidersManager.dataset_uri_handlers`` → ``airflow.providers_manager.ProvidersManager.asset_uri_handlers`` + * [x] property ``airflow.providers_manager.ProvidersManager.dataset_to_openlineage_converters`` → ``airflow.providers_manager.ProvidersManager.asset_to_openlineage_converters`` + * [x] class attribute ``airflow.lineage.hook.DatasetLineageInfo.dataset`` → ``airflow.lineage.hook.AssetLineageInfo.asset`` + * [x] method ``airflow.datasets.manager.DatasetManager.register_dataset_change`` → ``airflow.assets.manager.AssetManager.register_asset_change`` + * [x] method ``airflow.datasets.manager.DatasetManager.create_datasets`` → ``airflow.assets.manager.AssetManager.create_assets`` + * [x] method ``airflow.datasets.manager.DatasetManager.notify_dataset_created`` → ``airflow.assets.manager.AssetManager.notify_asset_created`` + * [x] method ``airflow.datasets.manager.DatasetManager.notify_dataset_changed`` → ``airflow.assets.manager.AssetManager.notify_asset_changed`` + * [x] method ``airflow.datasets.manager.DatasetManager.notify_dataset_alias_created`` → ``airflow.assets.manager.AssetManager.notify_asset_alias_created`` + * [x] method ``airflow.providers.amazon.auth_manager.aws_auth_manager.AwsAuthManager.is_authorized_dataset`` → ``airflow.providers.amazon.auth_manager.aws_auth_manager.AwsAuthManager.is_authorized_asset`` + * [x] method ``airflow.lineage.hook.HookLineageCollector.create_dataset`` → ``airflow.lineage.hook.HookLineageCollector.create_asset`` + * [x] method ``airflow.lineage.hook.HookLineageCollector.add_input_dataset`` → ``airflow.lineage.hook.HookLineageCollector.add_input_asset`` + * [x] method ``airflow.lineage.hook.HookLineageCollector.add_output_dataset`` → ``airflow.lineage.hook.HookLineageCollector.dd_output_asset`` + * [x] method ``airflow.lineage.hook.HookLineageCollector.collected_datasets`` → ``airflow.lineage.hook.HookLineageCollector.collected_assets`` + * [x] method ``airflow.providers_manager.ProvidersManager.initialize_providers_dataset_uri_resources`` → ``airflow.providers_manager.ProvidersManager.initialize_providers_asset_uri_resources`` + * [x] method ``airflow.secrets.base_secrets.BaseSecretsBackend.get_conn_uri`` → ``airflow.secrets.base_secrets.BaseSecretsBackend.get_conn_value`` + * [x] method ``airflow.secrets.base_secrets.BaseSecretsBackend.get_connections`` → ``airflow.secrets.base_secrets.BaseSecretsBackend.get_connection`` + * [x] method ``airflow.hooks.base.BaseHook.get_connections`` → ``airflow.hooks.base.BaseHook.get_connection`` + * [x] method ``airflow.datasets.BaseDataset.iter_datasets`` → ``airflow.sdk.definitions.asset.BaseAsset.iter_assets`` + * [x] method ``airflow.datasets.BaseDataset.iter_dataset_aliases`` → ``airflow.sdk.definitions.asset.BaseAsset.iter_asset_aliases`` diff --git a/newsfragments/41366.significant.rst b/newsfragments/41366.significant.rst index c552325d8a70f..edd588d90bd75 100644 --- a/newsfragments/41366.significant.rst +++ b/newsfragments/41366.significant.rst @@ -1,3 +1,22 @@ ``airflow.contrib`` modules have been removed All modules from ``airflow.contrib``, which were deprecated in Airflow 2, have been removed. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.contrib.*`` diff --git a/newsfragments/41367.significant.rst b/newsfragments/41367.significant.rst index 410d5941bfab7..1e7405bd8e2ee 100644 --- a/newsfragments/41367.significant.rst +++ b/newsfragments/41367.significant.rst @@ -2,3 +2,14 @@ Deprecated ``ImportError`` removed from ``airflow.models`` The deprecated ``ImportError`` class can no longer be imported from ``airflow.models``. It has been moved to ``airflow.models.errors.ParseImportError``. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/41368.significant.rst b/newsfragments/41368.significant.rst index 26b76b0f3a1ec..2ae517e7ce430 100644 --- a/newsfragments/41368.significant.rst +++ b/newsfragments/41368.significant.rst @@ -10,3 +10,162 @@ Support for importing classes etc from the following locations was deprecated at Instead, import from the right provider or more specific module instead. For example, instead of ``from airflow.sensors import TimeDeltaSensor``, use ``from airflow.sensors.time_delta import TimeDeltaSensor``. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.operators.bash_operator.BashOperator`` → ``airflow.operators.bash.BashOperator`` + * [x] ``airflow.operators.branch_operator.BaseBranchOperator`` → ``airflow.operators.branch.BaseBranchOperator`` + * [x] ``airflow.operators....EmptyOperator`` → ``airflow.operators.empty.EmptyOperator`` + * [x] ``airflow.operators....DummyOperator`` → ``airflow.operators.empty.EmptyOperator`` + * [x] ``airflow.operators.dummy_operator.EmptyOperator`` → ``airflow.operators.empty.EmptyOperator`` + * [x] ``airflow.operators.dummy_operator.DummyOperator`` → ``airflow.operators.empty.EmptyOperator`` + * [x] ``airflow.operators.email_operator.EmailOperator`` → ``airflow.operators.email.EmailOperator`` + * [x] ``airflow.sensors.base_sensor_operator.BaseSensorOperator`` → ``airflow.sensors.base.BaseSensorOperator`` + * [x] ``airflow.sensors.date_time_sensor.DateTimeSensor`` → ``airflow.sensors.date_time.DateTimeSensor`` + * [x] ``airflow.sensors.external_task_sensor.ExternalTaskMarker`` → ``airflow.sensors.external_task.ExternalTaskMarker`` + * [x] ``airflow.sensors.external_task_sensor.ExternalTaskSensor`` → ``airflow.sensors.external_task.ExternalTaskSensor`` + * [x] ``airflow.sensors.external_task_sensor.ExternalTaskSensorLink`` → ``airflow.sensors.external_task.ExternalTaskSensorLink`` + * [x] ``airflow.sensors.time_delta_sensor.TimeDeltaSensor`` → ``airflow.sensors.time_delta.TimeDeltaSensor`` + * [x] ``airflow.hooks.base_hook.BaseHook`` → ``airflow.hooks.base.BaseHook`` + * [x] ``airflow.operators.dagrun_operator.TriggerDagRunLink`` → ``airflow.operators.trigger_dagrun.TriggerDagRunLink`` + * [x] ``airflow.operators.dagrun_operator.TriggerDagRunOperator`` → ``airflow.operators.trigger_dagrun.TriggerDagRunOperator`` + * [x] ``airflow.operators.python_operator.BranchPythonOperator`` → ``airflow.operators.python.BranchPythonOperator`` + * [x] ``airflow.operators.python_operator.PythonOperator`` → ``airflow.operators.python.PythonOperator`` + * [x] ``airflow.operators.python_operator.PythonVirtualenvOperator`` → ``airflow.operators.python.PythonVirtualenvOperator`` + * [x] ``airflow.operators.python_operator.ShortCircuitOperator`` → ``airflow.operators.python.ShortCircuitOperator`` + * [x] ``airflow.operators.latest_only_operator.LatestOnlyOperator`` → ``airflow.operators.latest_only.LatestOnlyOperator`` + + * AIR303 + + * [x] ``airflow.executors.celery_executor.CeleryExecutor`` → ``airflow.providers.celery.executors.celery_executor.CeleryExecutor`` + * [x] ``airflow.executors.celery_kubernetes_executor.CeleryKubernetesExecutor`` → ``airflow.providers.celery.executors.celery_kubernetes_executor.CeleryKubernetesExecutor`` + * [x] ``airflow.executors.dask_executor.DaskExecutor`` → ``airflow.providers.daskexecutor.executors.dask_executor.DaskExecutor`` + * [x] ``airflow.executors.kubernetes_executor.KubernetesExecutor`` → ``airflow.providers.cncf.kubernetes.executors.kubernetes_executor.KubernetesExecutor`` + * [x] ``airflow.executors.kubernetes_executor_utils.AirflowKubernetesScheduler`` → ``airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.AirflowKubernetesScheduler`` + * [x] ``airflow.executors.kubernetes_executor_utils.KubernetesJobWatcher`` → ``airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.KubernetesJobWatcher`` + * [x] ``airflow.executors.kubernetes_executor_utils.ResourceVersion`` → ``airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.ResourceVersion`` + * [x] ``airflow.executors.local_kubernetes_executor.LocalKubernetesExecutor`` → ``airflow.providers.cncf.kubernetes.executors.LocalKubernetesExecutor`` + * [x] ``airflow.hooks.S3_hook.S3Hook`` → ``airflow.providers.amazon.aws.hooks.s3.S3Hook`` + * [x] ``airflow.hooks.S3_hook.provide_bucket_name`` → ``airflow.providers.amazon.aws.hooks.s3.provide_bucket_name`` + * [x] ``airflow.hooks.base_hook.BaseHook`` → ``airflow.hooks.base.BaseHook`` + * [x] ``airflow.hooks.dbapi_hook.DbApiHook`` → ``airflow.providers.common.sql.hooks.sql.DbApiHook`` + * [x] ``airflow.hooks.docker_hook.DockerHook`` → ``airflow.providers.docker.hooks.docker.DockerHook`` + * [x] ``airflow.hooks.druid_hook.DruidDbApiHook`` → ``airflow.providers.apache.druid.hooks.druid.DruidDbApiHook`` + * [x] ``airflow.hooks.druid_hook.DruidHook`` → ``airflow.providers.apache.druid.hooks.druid.DruidHook`` + * [x] ``airflow.hooks.hive_hooks.HIVE_QUEUE_PRIORITIES`` → ``airflow.providers.apache.hive.hooks.hive.HIVE_QUEUE_PRIORITIES`` + * [x] ``airflow.hooks.hive_hooks.HiveCliHook`` → ``airflow.providers.apache.hive.hooks.hive.HiveCliHook`` + * [x] ``airflow.hooks.hive_hooks.HiveMetastoreHook`` → ``airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook`` + * [x] ``airflow.hooks.hive_hooks.HiveServer2Hook`` → ``airflow.providers.apache.hive.hooks.hive.HiveServer2Hook`` + * [x] ``airflow.hooks.http_hook.HttpHook`` → ``airflow.providers.http.hooks.http.HttpHook`` + * [x] ``airflow.hooks.jdbc_hook.JdbcHook`` → ``airflow.providers.jdbc.hooks.jdbc.JdbcHook`` + * [x] ``airflow.hooks.jdbc_hook.jaydebeapi`` → ``airflow.providers.jdbc.hooks.jdbc.jaydebeapi`` + * [x] ``airflow.hooks.mssql_hook.MsSqlHook`` → ``airflow.providers.microsoft.mssql.hooks.mssql.MsSqlHook`` + * [x] ``airflow.hooks.mysql_hook.MySqlHook`` → ``airflow.providers.mysql.hooks.mysql.MySqlHook`` + * [x] ``airflow.hooks.oracle_hook.OracleHook`` → ``airflow.providers.oracle.hooks.oracle.OracleHook`` + * [x] ``airflow.hooks.pig_hook.PigCliHook`` → ``airflow.providers.apache.pig.hooks.pig.PigCliHook`` + * [x] ``airflow.hooks.postgres_hook.PostgresHook`` → ``airflow.providers.postgres.hooks.postgres.PostgresHook`` + * [x] ``airflow.hooks.presto_hook.PrestoHook`` → ``airflow.providers.presto.hooks.presto.PrestoHook`` + * [x] ``airflow.hooks.samba_hook.SambaHook`` → ``airflow.providers.samba.hooks.samba.SambaHook`` + * [x] ``airflow.hooks.slack_hook.SlackHook`` → ``airflow.providers.slack.hooks.slack.SlackHook`` + * [x] ``airflow.hooks.sqlite_hook.SqliteHook`` → ``airflow.providers.sqlite.hooks.sqlite.SqliteHook`` + * [x] ``airflow.hooks.webhdfs_hook.WebHDFSHook`` → ``airflow.providers.apache.hdfs.hooks.webhdfs.WebHDFSHook`` + * [x] ``airflow.hooks.zendesk_hook.ZendeskHook`` → ``airflow.providers.zendesk.hooks.zendesk.ZendeskHook`` + * [x] ``airflow.operators.check_operator.SQLCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLCheckOperator`` + * [x] ``airflow.operators.check_operator.SQLIntervalCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLIntervalCheckOperator`` + * [x] ``airflow.operators.check_operator.SQLThresholdCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLThresholdCheckOperator`` + * [x] ``airflow.operators.check_operator.SQLValueCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLValueCheckOperator`` + * [x] ``airflow.operators.check_operator.CheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLCheckOperator`` + * [x] ``airflow.operators.check_operator.IntervalCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLIntervalCheckOperator`` + * [x] ``airflow.operators.check_operator.ThresholdCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLThresholdCheckOperator`` + * [x] ``airflow.operators.check_operator.ValueCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLValueCheckOperator`` + * [x] ``airflow.operators.dagrun_operator.TriggerDagRunLink`` → ``airflow.operators.trigger_dagrun.TriggerDagRunLink`` + * [x] ``airflow.operators.dagrun_operator.TriggerDagRunOperator`` → ``airflow.operators.trigger_dagrun.TriggerDagRunOperator`` + * [x] ``airflow.operators.docker_operator.DockerOperator`` → ``airflow.providers.docker.operators.docker.DockerOperator`` + * [x] ``airflow.operators.druid_check_operator.DruidCheckOperator`` → ``airflow.providers.apache.druid.operators.druid_check.DruidCheckOperator`` + * [x] ``airflow.operators.gcs_to_s3.GCSToS3Operator`` → ``airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Operator`` + * [x] ``airflow.operators.google_api_to_s3_transfer.GoogleApiToS3Operator`` → ``airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleApiToS3Operator`` + * [x] ``airflow.operators.google_api_to_s3_transfer.GoogleApiToS3Transfer`` → ``airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleApiToS3Operator`` + * [x] ``airflow.operators.hive_operator.HiveOperator`` → ``airflow.providers.apache.hive.operators.hive.HiveOperator`` + * [x] ``airflow.operators.hive_stats_operator.HiveStatsCollectionOperator`` → ``airflow.providers.apache.hive.operators.hive_stats.HiveStatsCollectionOperator`` + * [x] ``airflow.operators.hive_to_druid.HiveToDruidOperator`` → ``airflow.providers.apache.druid.transfers.hive_to_druid.HiveToDruidOperator`` + * [x] ``airflow.operators.hive_to_druid.HiveToDruidTransfer`` → ``airflow.providers.apache.druid.transfers.hive_to_druid.HiveToDruidOperator`` + * [x] ``airflow.operators.hive_to_mysql.HiveToMySqlOperator`` → ``airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator`` + * [x] ``airflow.operators.hive_to_mysql.HiveToMySqlTransfer`` → ``airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator`` + * [x] ``airflow.operators.local_kubernetes_executor.HiveToSambaOperator`` → ``airflow.providers.apache.hive.transfers.hive_to_samba.HiveToSambaOperator`` + * [x] ``airflow.operators.hive_to_samba_operator.SimpleHttpOperator`` → ``airflow.providers.http.operators.http.SimpleHttpOperator`` + * [x] ``airflow.operators.jdbc_operator.JdbcOperator`` → ``airflow.providers.jdbc.operators.jdbc.JdbcOperator`` + * [x] ``airflow.operators.latest_only_operator.LatestOnlyOperator`` → ``airflow.operators.latest_only.LatestOnlyOperator`` + * [x] ``airflow.operators.mssql_operator.MsSqlOperator`` → ``airflow.providers.microsoft.mssql.operators.mssql.MsSqlOperator`` + * [x] ``airflow.operators.mssql_to_hive.MsSqlToHiveOperator`` → ``airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator`` + * [x] ``airflow.operators.mssql_to_hive.MsSqlToHiveTransfer`` → ``airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator`` + * [x] ``airflow.operators.mysql_operator.MySqlOperator`` → ``airflow.providers.mysql.operators.mysql.MySqlOperator`` + * [x] ``airflow.operators.mysql_to_hive.MySqlToHiveOperator`` → ``airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator`` + * [x] ``airflow.operators.mysql_to_hive.MySqlToHiveTransfer`` → ``airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator`` + * [x] ``airflow.operators.oracle_operator.OracleOperator`` → ``airflow.providers.oracle.operators.oracle.OracleOperator`` + * [x] ``airflow.operators.papermill_operator.PapermillOperator`` → ``airflow.providers.papermill.operators.papermill.PapermillOperator`` + * [x] ``airflow.operators.pig_operator.PigOperator`` → ``airflow.providers.apache.pig.operators.pig.PigOperator`` + * [x] ``airflow.operators.postgres_operator.Mapping`` → ``airflow.providers.postgres.operators.postgres.Mapping`` + * [x] ``airflow.operators.postgres_operator.PostgresOperator`` → ``airflow.providers.postgres.operators.postgres.PostgresOperator`` + * [x] ``airflow.operators.presto_check_operator.SQLCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLCheckOperator`` + * [x] ``airflow.operators.presto_check_operator.SQLIntervalCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLIntervalCheckOperator`` + * [x] ``airflow.operators.presto_check_operator.SQLValueCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLValueCheckOperator`` + * [x] ``airflow.operators.presto_check_operator.PrestoCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLCheckOperator`` + * [x] ``airflow.operators.presto_check_operator.PrestoIntervalCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLIntervalCheckOperator`` + * [x] ``airflow.operators.presto_check_operator.PrestoValueCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLValueCheckOperator`` + * [x] ``airflow.operators.presto_to_mysql.PrestoToMySqlOperator`` → ``airflow.providers.mysql.transfers.presto_to_mysql.PrestoToMySqlOperator`` + * [x] ``airflow.operators.presto_to_mysql.PrestoToMySqlTransfer`` → ``airflow.providers.mysql.transfers.presto_to_mysql.PrestoToMySqlOperator`` + * [x] ``airflow.operators.python_operator.BranchPythonOperator`` → ``airflow.operators.python.BranchPythonOperator`` + * [x] ``airflow.operators.python_operator.PythonOperator`` → ``airflow.operators.python.PythonOperator`` + * [x] ``airflow.operators.python_operator.PythonVirtualenvOperator`` → ``airflow.operators.python.PythonVirtualenvOperator`` + * [x] ``airflow.operators.python_operator.ShortCircuitOperator`` → ``airflow.operators.python.ShortCircuitOperator`` + * [x] ``airflow.operators.redshift_to_s3_operator.RedshiftToS3Operator`` → ``airflow.providers.amazon.aws.transfers.redshift_to_s3.RedshiftToS3Operator`` + * [x] ``airflow.operators.redshift_to_s3_operator.RedshiftToS3Transfer`` → ``airflow.providers.amazon.aws.transfers.redshift_to_s3.RedshiftToS3Operator`` + * [x] ``airflow.operators.s3_file_transform_operator.S3FileTransformOperator`` → ``airflow.providers.amazon.aws.operators.s3_file_transform.S3FileTransformOperator`` + * [x] ``airflow.operators.s3_to_hive_operator.S3ToHiveOperator`` → ``airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator`` + * [x] ``airflow.operators.s3_to_hive_operator.S3ToHiveTransfer`` → ``airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator`` + * [x] ``airflow.operators.s3_to_redshift_operator.S3ToRedshiftOperator`` → ``airflow.providers.amazon.aws.transfers.s3_to_redshift.S3ToRedshiftOperator`` + * [x] ``airflow.operators.s3_to_redshift_operator.S3ToRedshiftTransfer`` → ``airflow.providers.amazon.aws.transfers.s3_to_redshift.S3ToRedshiftOperator`` + * [x] ``airflow.operators.slack_operator.SlackAPIOperator`` → ``airflow.providers.slack.operators.slack.SlackAPIOperator`` + * [x] ``airflow.operators.slack_operator.SlackAPIPostOperator`` → ``airflow.providers.slack.operators.slack.SlackAPIPostOperator`` + * [x] ``airflow.operators.sql.BaseSQLOperator`` → ``airflow.providers.common.sql.operators.sql.BaseSQLOperator`` + * [x] ``airflow.operators.sql.BranchSQLOperator`` → ``airflow.providers.common.sql.operators.sql.BranchSQLOperator`` + * [x] ``airflow.operators.sql.SQLCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLCheckOperator`` + * [x] ``airflow.operators.sql.SQLColumnCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLColumnCheckOperator`` + * [x] ``airflow.operators.sql.SQLIntervalCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLIntervalCheckOperator`` + * [x] ``airflow.operators.sql.SQLTableCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLTableCheckOperator`` + * [x] ``airflow.operators.sql.SQLThresholdCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLThresholdCheckOperator`` + * [x] ``airflow.operators.sql.SQLValueCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLValueCheckOperator`` + * [x] ``airflow.operators.sql._convert_to_float_if_possible`` → ``airflow.providers.common.sql.operators.sql._convert_to_float_if_possible`` + * [x] ``airflow.operators.sql.parse_boolean`` → ``airflow.providers.common.sql.operators.sql.parse_boolean`` + * [x] ``airflow.operators.sql_branch_operator.BranchSQLOperator`` → ``airflow.providers.common.sql.operators.sql.BranchSQLOperator`` + * [x] ``airflow.operators.sql_branch_operator.BranchSqlOperator`` → ``airflow.providers.common.sql.operators.sql.BranchSQLOperator`` + * [x] ``airflow.operators.sqlite_operator.SqliteOperator`` → ``airflow.providers.sqlite.operators.sqlite.SqliteOperator`` + * [x] ``airflow.sensors.hive_partition_sensor.HivePartitionSensor`` → ``airflow.providers.apache.hive.sensors.hive_partition.HivePartitionSensor`` + * [x] ``airflow.sensors.http_sensor.HttpSensor`` → ``airflow.providers.http.sensors.http.HttpSensor`` + * [x] ``airflow.sensors.metastore_partition_sensor.MetastorePartitionSensor`` → ``airflow.providers.apache.hive.sensors.metastore_partition.MetastorePartitionSensor`` + * [x] ``airflow.sensors.named_hive_partition_sensor.NamedHivePartitionSensor`` → ``airflow.providers.apache.hive.sensors.named_hive_partition.NamedHivePartitionSensor`` + * [x] ``airflow.sensors.s3_key_sensor.S3KeySensor`` → ``airflow.providers.amazon.aws.sensors.s3.S3KeySensor`` + * [x] ``airflow.sensors.sql.SqlSensor`` → ``airflow.providers.common.sql.sensors.sql.SqlSensor`` + * [x] ``airflow.sensors.sql_sensor.SqlSensor`` → ``airflow.providers.common.sql.sensors.sql.SqlSensor`` + * [x] ``airflow.sensors.web_hdfs_sensor.WebHdfsSensor`` → ``airflow.providers.apache.hdfs.sensors.web_hdfs.WebHdfsSensor`` + * [x] ``airflow.executors.kubernetes_executor_types.ALL_NAMESPACES`` → ``airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types.ALL_NAMESPACES`` + * [x] ``airflow.executors.kubernetes_executor_types.POD_EXECUTOR_DONE_KEY`` → ``airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types.POD_EXECUTOR_DONE_KEY`` + * [x] ``airflow.hooks.hive_hooks.HIVE_QUEUE_PRIORITIES`` → ``airflow.providers.apache.hive.hooks.hive.HIVE_QUEUE_PRIORITIES`` + * [x] ``airflow.executors.celery_executor.app`` → ``airflow.providers.celery.executors.celery_executor_utils.app`` + * [x] ``airflow.macros.hive.closest_ds_partition`` → ``airflow.providers.apache.hive.macros.hive.closest_ds_partition`` + * [x] ``airflow.macros.hive.max_partition`` → ``airflow.providers.apache.hive.macros.hive.max_partition`` diff --git a/newsfragments/41390.significant.rst b/newsfragments/41390.significant.rst index 37ddf0732449e..5a88c547048fe 100644 --- a/newsfragments/41390.significant.rst +++ b/newsfragments/41390.significant.rst @@ -1,7 +1,5 @@ Support for SubDags is removed -**Breaking Change** - Subdags have been removed from the following locations: - CLI @@ -12,3 +10,22 @@ This removal marks the end of Subdag support across all interfaces. Users should transition to using TaskGroups as a more efficient and maintainable alternative. Please ensure your DAGs are updated to remove any usage of Subdags to maintain compatibility with future Airflow releases. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [x] API changes + * [x] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.operators.subdag.*`` diff --git a/newsfragments/41391.significant.rst b/newsfragments/41391.significant.rst index 0513aa3ab93b3..de169be0e4835 100644 --- a/newsfragments/41391.significant.rst +++ b/newsfragments/41391.significant.rst @@ -1,5 +1,23 @@ -**Breaking Change** - The ``airflow.providers.standard.sensors.external_task.ExternalTaskSensorLink`` class has been removed. + This class was deprecated and is no longer available. Users should now use the ``airflow.providers.standard.sensors.external_task.ExternalDagLink`` class directly. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.sensors.external_task.ExternalTaskSensorLink`` → ``airflow.sensors.external_task.ExternalDagLink`` diff --git a/newsfragments/41393.significant.rst b/newsfragments/41393.significant.rst index d81e2858bb59b..26b3724ca4aa0 100644 --- a/newsfragments/41393.significant.rst +++ b/newsfragments/41393.significant.rst @@ -1,7 +1,4 @@ -**Breaking Change** - -The ``use_task_execution_day`` parameter has been removed from the ``DayOfWeekSensor`` class. -This parameter was previously deprecated in favor of ``use_task_logical_date``. +The ``use_task_execution_day`` parameter has been removed from the ``DayOfWeekSensor`` class. This parameter was previously deprecated in favor of ``use_task_logical_date``. If your code still uses ``use_task_execution_day``, you should update it to use ``use_task_logical_date`` instead to ensure compatibility with future Airflow versions. @@ -16,3 +13,22 @@ Example update: use_task_logical_date=True, dag=dag, ) + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] arguments ``use_task_execution_day`` → ``use_task_logical_date`` in ``airflow.operators.weekday.DayOfWeekSensor`` diff --git a/newsfragments/41394.significant.rst b/newsfragments/41394.significant.rst index d973efb81daef..49414a50b8e31 100644 --- a/newsfragments/41394.significant.rst +++ b/newsfragments/41394.significant.rst @@ -1,6 +1,15 @@ -**Breaking Change** +The ``airflow.models.taskMixin.TaskMixin`` class has been removed. It was previously deprecated in favor of the ``airflow.models.taskMixin.DependencyMixin`` class. -The ``airflow.models.taskMixin.TaskMixin`` class has been removed. It was previously -deprecated in favor of the ``airflow.models.taskMixin.DependencyMixin`` class. If your code relies on ``TaskMixin``, please update it to use ``DependencyMixin`` instead to ensure compatibility with Airflow 3.0 and beyond. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/41395.significant.rst b/newsfragments/41395.significant.rst index 77be51aff89d3..be9427b8b9d6f 100644 --- a/newsfragments/41395.significant.rst +++ b/newsfragments/41395.significant.rst @@ -8,3 +8,26 @@ The following deprecated functions, constants, and classes have been removed as - ``airflow.utils.file.mkdirs`` function: Use ``pathlib.Path.mkdir`` instead. - ``airflow.utils.state.SHUTDOWN`` state: No action needed; this state is no longer used. - ``airflow.utils.state.terminating_states`` constant: No action needed; this constant is no longer used. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.utils.file.TemporaryDirectory`` → ``tempfile.TemporaryDirectory`` + * [x] ``airflow.utils.file.mkdirs`` → ``pathlib.Path({path}).mkdir`` + * [x] ``airflow.utils.dag_cycle_tester.test_cycle`` + * [x] ``airflow.utils.state.SHUTDOWN`` + * [x] ``airflow.utils.state.terminating_states`` diff --git a/newsfragments/41420.significant.rst b/newsfragments/41420.significant.rst index 361b8c7ea9c48..b2d05c97c60b4 100644 --- a/newsfragments/41420.significant.rst +++ b/newsfragments/41420.significant.rst @@ -1,5 +1,3 @@ -**Breaking Change** - Replaced Python's ``list`` with ``MutableSet`` for the property ``DAG.tags``. At the constractur you still can use list, @@ -9,3 +7,14 @@ you actually can use any data structure that implements the The ``tags`` property of the ``DAG`` model would be of type ``MutableSet`` instead of ``list``, as there are no actual duplicates at the tags. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/41434.significant.rst b/newsfragments/41434.significant.rst index e0aa50ecd1865..7a4eed657d11d 100644 --- a/newsfragments/41434.significant.rst +++ b/newsfragments/41434.significant.rst @@ -1,6 +1,15 @@ Experimental API is removed -**Breaking Change** - Experimental API is no longer available in Airflow. Users should transition to using Rest API as an alternative. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [x] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/41440.significant.rst b/newsfragments/41440.significant.rst index 4f819bb4d8f99..c23b6edfb5efc 100644 --- a/newsfragments/41440.significant.rst +++ b/newsfragments/41440.significant.rst @@ -1,16 +1,28 @@ -Removed unused methods / properties in models/dag.py +Removed unused methods / properties in ``airflow/models/dag.py`` -Methods removed: - * date_range - * is_fixed_time_schedule - * next_dagrun_after_date - * get_run_dates - * normalize_schedule - * full_filepath - * concurrency - * filepath - * concurrency_reached - * normalized_schedule_interval - * latest_execution_date - * set_dag_runs_state - * bulk_sync_to_db +* Methods removed + + * ``date_range`` + * ``is_fixed_time_schedule`` + * ``next_dagrun_after_date`` + * ``get_run_dates`` + * ``normalize_schedule`` + * ``full_filepath`` + * ``concurrency`` + * ``filepath`` + * ``concurrency_reached`` + * ``normalized_schedule_interval`` + * ``latest_execution_date`` + * ``set_dag_runs_state`` + * ``bulk_sync_to_db`` + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/41453.significant.rst b/newsfragments/41453.significant.rst index fc7269f9280ca..8ca0de3df3202 100644 --- a/newsfragments/41453.significant.rst +++ b/newsfragments/41453.significant.rst @@ -9,3 +9,23 @@ presentation purposes. Since the DAG object no longer has the ``schedule_interval`` attribute, OpenLineage facets that contain the ``dag`` key produced on Airflow 3.0 or later will also no longer contain the field. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [x] API changes + * [ ] CLI changes + * [x] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] arguments ``schedule_interval`` in ``DAG`` + * [x] arguments ``timetable`` in ``DAG`` diff --git a/newsfragments/41496.significant.rst b/newsfragments/41496.significant.rst index aecb35e723665..e19f1df46c5a7 100644 --- a/newsfragments/41496.significant.rst +++ b/newsfragments/41496.significant.rst @@ -1,5 +1,26 @@ -Removed deprecated methods in airflow/utils/dates.py +Removed deprecated methods in ``airflow/utils/dates.py`` -Methods removed: - * date_range - * days_ago (Use ``pendulum.today('UTC').add(days=-N, ...)``) +* Methods removed + + * ``date_range`` + * ``days_ago`` (Use ``pendulum.today('UTC').add(days=-N, ...)``) + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.utils.dates.date_range`` + * [x] ``airflow.utils.dates.days_ago`` → ``pendulum.today("UTC").add(days=-N, ...)`` diff --git a/newsfragments/41520.significant.rst b/newsfragments/41520.significant.rst index 6f5c605da59a5..bfd220fa4446f 100644 --- a/newsfragments/41520.significant.rst +++ b/newsfragments/41520.significant.rst @@ -1,5 +1,26 @@ -Removed deprecated methods in airflow/utils/helpers.py +Removed deprecated methods in ``airflow/utils/helpers.py`` - Methods removed: - * chain (Use ``airflow.models.baseoperator.chain``) - * cross_downstream (Use ``airflow.models.baseoperator.cross_downstream``) +* Methods removed: + + * ``chain`` (Use ``airflow.models.baseoperator.chain``) + * ``cross_downstream`` (Use ``airflow.models.baseoperator.cross_downstream``) + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.utils.helpers.chain`` → ``airflow.models.baseoperator.chain`` + * [x] ``airflow.utils.helpers.cross_downstream`` → ``airflow.models.baseoperator.cross_downstream`` diff --git a/newsfragments/41533.significant.rst b/newsfragments/41533.significant.rst index 65f805186f79e..0f898f21ad332 100644 --- a/newsfragments/41533.significant.rst +++ b/newsfragments/41533.significant.rst @@ -1,6 +1,5 @@ -**Breaking Change** - The ``load_connections`` function has been removed from the ``local_file_system``. + This function was previously deprecated in favor of ``load_connections_dict``. If your code still uses ``load_connections``, you should update it to use ``load_connections_dict`` @@ -24,3 +23,23 @@ Example update: .. code-block:: python connection_by_conn_id = LocalFilesystemBackend().get_connection(conn_id="conn_id") + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.secrets.local_filesystem.load_connections`` → ``airflow.secrets.local_filesystem.load_connections_dict`` + * [x] ``airflow.secrets.local_filesystem.LocalFilesystemBackend.get_connection`` → ``airflow.secrets.local_filesystem.LocalFilesystemBackend.load_connections_dict`` diff --git a/newsfragments/41539.significant.rst b/newsfragments/41539.significant.rst index 31a497f4582c1..097dc6db75b05 100644 --- a/newsfragments/41539.significant.rst +++ b/newsfragments/41539.significant.rst @@ -1 +1,19 @@ Removed deprecated ``smtp_user`` and ``smtp_password`` configuration parameters from ``smtp`` section. Please use smtp connection (``smtp_default``). + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ``airflow config lint`` + + * [x] ``smtp.smtp_user`` + * [x] ``smtp.smtp_password`` diff --git a/newsfragments/41550.significant.rst b/newsfragments/41550.significant.rst index 7589419f12275..6e4ff5827d297 100644 --- a/newsfragments/41550.significant.rst +++ b/newsfragments/41550.significant.rst @@ -1,3 +1,22 @@ Removed deprecated ``session_lifetime_days`` and ``force_log_out_after`` configuration parameters from ``webserver`` section. Please use ``session_lifetime_minutes``. Removed deprecated ``policy`` parameter from ``airflow_local_settings``. Please use ``task_policy``. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ``airflow config lint`` + + * [x] ``webserver.session_lifetime_days`` → ``webserver.session_lifetime_minutes`` + * [x] ``webserver.force_log_out_after`` → ``webserver.session_lifetime_minutes`` + * [x] ``policy`` → ``task_policy`` diff --git a/newsfragments/41552.significant.rst b/newsfragments/41552.significant.rst index 475b0e159474a..99d6881e4032c 100644 --- a/newsfragments/41552.significant.rst +++ b/newsfragments/41552.significant.rst @@ -1 +1,20 @@ Removed deprecated ``filename_template`` argument from ``airflow.utils.log.file_task_handler.FileTaskHandler``. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] argument ``filename_template`` in ``airflow.utils.log.file_task_handler.FileTaskHandler`` and its subclassses diff --git a/newsfragments/41564.significant.rst b/newsfragments/41564.significant.rst index 012fe7db92a8f..eed922f86c1f1 100644 --- a/newsfragments/41564.significant.rst +++ b/newsfragments/41564.significant.rst @@ -17,9 +17,9 @@ Move all time operators and sensors from airflow core to standard provider * AIR303 - * [ ] ``airflow.operators.datetime.*`` → ``airflow.providers.standard.time.operators.datetime.*`` - * [ ] ``airflow.operators.weekday.*`` → ``airflow.providers.standard.time.operators.weekday.*`` - * [ ] ``airflow.sensors.date_time.*`` → ``airflow.providers.standard.time.sensors.date_time.*`` - * [ ] ``airflow.sensors.time_sensor.*`` → ``airflow.providers.standard.time.sensors.time.*`` - * [ ] ``airflow.sensors.time_delta.*`` → ``airflow.providers.standard.time.sensors.time_delta.*`` - * [ ] ``airflow.sensors.weekday.*`` → ``airflow.providers.standard.time.sensors.weekday.*`` + * [x] ``airflow.operators.datetime.*`` → ``airflow.providers.standard.time.operators.datetime.*`` + * [x] ``airflow.operators.weekday.*`` → ``airflow.providers.standard.time.operators.weekday.*`` + * [x] ``airflow.sensors.date_time.*`` → ``airflow.providers.standard.time.sensors.date_time.*`` + * [x] ``airflow.sensors.time_sensor.*`` → ``airflow.providers.standard.time.sensors.time.*`` + * [x] ``airflow.sensors.time_delta.*`` → ``airflow.providers.standard.time.sensors.time_delta.*`` + * [x] ``airflow.sensors.weekday.*`` → ``airflow.providers.standard.time.sensors.weekday.*`` diff --git a/newsfragments/41579.significant.rst b/newsfragments/41579.significant.rst index 1aae04e2be13e..d554b5b85303a 100644 --- a/newsfragments/41579.significant.rst +++ b/newsfragments/41579.significant.rst @@ -1 +1,20 @@ Removed deprecated ``apply_defaults`` function from ``airflow/utils/decorators.py``. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.utils.decorators.apply_defaults`` (auto applied) diff --git a/newsfragments/41609.significant.rst b/newsfragments/41609.significant.rst index dff7b26a55596..b691aaea7d188 100644 --- a/newsfragments/41609.significant.rst +++ b/newsfragments/41609.significant.rst @@ -1 +1,18 @@ Removed deprecated ``dependency_detector`` parameter from ``scheduler``. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ``airflow config lint`` + + * [x] ``scheduler.dependency_detector`` diff --git a/newsfragments/41635.significant.rst b/newsfragments/41635.significant.rst index 0566d6074763f..da3a6e719f4de 100644 --- a/newsfragments/41635.significant.rst +++ b/newsfragments/41635.significant.rst @@ -1 +1,12 @@ Removed deprecated ``--ignore-depends-on-past`` cli option from task command. Please use ``--depends-on-past ignore``. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [x] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/41642.significant.rst b/newsfragments/41642.significant.rst index 566061d0d893c..a0748da360d10 100644 --- a/newsfragments/41642.significant.rst +++ b/newsfragments/41642.significant.rst @@ -1,3 +1,23 @@ Removed deprecated secrets backend methods ``get_conn_uri`` and ``get_connections``. Please use ``get_conn_value`` and ``get_connection`` instead. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.secrets.base_secrets.BaseSecretsBackend.get_conn_uri`` → ``airflow.secrets.base_secrets.BaseSecretsBackend.get_conn_value`` + * [x] ``airflow.secrets.base_secrets.BaseSecretsBackend.get_connections`` → ``airflow.secrets.base_secrets.BaseSecretsBackend.get_connection`` diff --git a/newsfragments/41663.significant.rst b/newsfragments/41663.significant.rst index 169d7fb5c6732..2cfd9d3f2df27 100644 --- a/newsfragments/41663.significant.rst +++ b/newsfragments/41663.significant.rst @@ -1 +1,20 @@ Removed deprecated auth ``airflow.api.auth.backend.basic_auth`` from ``auth_backends``. Please use ``airflow.providers.fab.auth_manager.api.auth.backend.basic_auth`` instead. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR303 + + * [x] ``airflow.api.auth.backend.basic_auth`` → ``airflow.providers.fab.auth_manager.api.auth.backend.basic_auth`` diff --git a/newsfragments/41693.significant.rst b/newsfragments/41693.significant.rst index 3479f53dea300..89fbe92567725 100644 --- a/newsfragments/41693.significant.rst +++ b/newsfragments/41693.significant.rst @@ -1 +1,21 @@ Removed deprecated auth ``airflow.api.auth.backend.kerberos_auth`` and ``airflow.auth.managers.fab.api.auth.backend.kerberos_auth`` from ``auth_backends``. Please use ``airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth`` instead. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR303 + + * [x] ``airflow.api.auth.backend.kerberos_auth`` → ``airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth`` + * [x] ``airflow.auth.managers.fab.api.auth.backend.kerberos_auth`` → ``airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth`` diff --git a/newsfragments/41708.significant.rst b/newsfragments/41708.significant.rst index 0b994cf0ffa30..5f14dd77d1107 100644 --- a/newsfragments/41708.significant.rst +++ b/newsfragments/41708.significant.rst @@ -1 +1,21 @@ Removed deprecated auth manager ``airflow.auth.managers.fab.fab_auth_manager`` and ``airflow.auth.managers.fab.security_manager.override``. Please use ``airflow.providers.fab.auth_manager.security_manager.override`` instead. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR303 + + * [x] ``airflow.auth.managers.fab.fab_auth_manager`` → ``airflow.providers.fab.auth_manager.security_manager.override`` + * [x] ``airflow.auth.managers.fab.security_manager.override`` → ``airflow.providers.fab.auth_manager.security_manager.override`` diff --git a/newsfragments/41733.significant.rst b/newsfragments/41733.significant.rst index b838586e47e04..dda6856aad8b5 100644 --- a/newsfragments/41733.significant.rst +++ b/newsfragments/41733.significant.rst @@ -1 +1,20 @@ Removed deprecated function ``get_connections()`` function in ``airflow.hooks.base.BaseHook``. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.hooks.base.BaseHook.get_connections`` → ``airflow.hooks.base.BaseHook.get_connection`` diff --git a/newsfragments/41735.significant.rst b/newsfragments/41735.significant.rst index 5e6c717f0596f..751ea773bf761 100644 --- a/newsfragments/41735.significant.rst +++ b/newsfragments/41735.significant.rst @@ -1 +1,56 @@ Removed deprecated module ``airflow.kubernetes``. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR303 + + * [x] ``airflow.kubernetes.kubernetes_helper_functions.add_pod_suffix`` → ``airflow.providers.cncf.kubernetes.kubernetes_helper_functions.add_pod_suffix`` + * [x] ``airflow.kubernetes.kubernetes_helper_functions.annotations_for_logging_task_metadata`` → ``airflow.providers.cncf.kubernetes.kubernetes_helper_functions.annotations_for_logging_task_metadata`` + * [x] ``airflow.kubernetes.kubernetes_helper_functions.annotations_to_key`` → ``airflow.providers.cncf.kubernetes.kubernetes_helper_functions.annotations_to_key`` + * [x] ``airflow.kubernetes.kubernetes_helper_functions.create_pod_id`` → ``airflow.providers.cncf.kubernetes.kubernetes_helper_functions.create_pod_id`` + * [x] ``airflow.kubernetes.kubernetes_helper_functions.get_logs_task_metadata`` → ``airflow.providers.cncf.kubernetes.kubernetes_helper_functions.get_logs_task_metadata`` + * [x] ``airflow.kubernetes.kubernetes_helper_functions.rand_str`` → ``airflow.providers.cncf.kubernetes.kubernetes_helper_functions.rand_str`` + * [x] ``airflow.kubernetes.pod.Port`` → ``kubernetes.client.models.V1ContainerPort`` + * [x] ``airflow.kubernetes.pod.Resources`` → ``kubernetes.client.models.V1ResourceRequirements`` + * [x] ``airflow.kubernetes.pod_launcher.PodLauncher`` → ``airflow.providers.cncf.kubernetes.pod_launcher.PodLauncher`` + * [x] ``airflow.kubernetes.pod_launcher.PodStatus`` → ``airflow.providers.cncf.kubernetes.pod_launcher.PodStatus`` + * [x] ``airflow.kubernetes.pod_launcher_deprecated.PodLauncher`` → ``airflow.providers.cncf.kubernetes.pod_launcher_deprecated.PodLauncher`` + * [x] ``airflow.kubernetes.pod_launcher_deprecated.PodStatus`` → ``airflow.providers.cncf.kubernetes.pod_launcher_deprecated.PodStatus`` + * [x] ``airflow.kubernetes.pod_launcher_deprecated.get_kube_client`` → ``airflow.providers.cncf.kubernetes.kube_client.get_kube_client`` + * [x] ``airflow.kubernetes.pod_launcher_deprecated.PodDefaults`` → ``airflow.providers.cncf.kubernetes.pod_generator_deprecated.PodDefaults`` + * [x] ``airflow.kubernetes.pod_runtime_info_env.PodRuntimeInfoEnv`` → ``kubernetes.client.models.V1EnvVar`` + * [x] ``airflow.kubernetes.volume.Volume`` → ``kubernetes.client.models.V1Volume`` + * [x] ``airflow.kubernetes.volume_mount.VolumeMount`` → ``kubernetes.client.models.V1VolumeMount`` + * [x] ``airflow.kubernetes.k8s_model.K8SModel`` → ``airflow.providers.cncf.kubernetes.k8s_model.K8SModel`` + * [x] ``airflow.kubernetes.k8s_model.append_to_pod`` → ``airflow.providers.cncf.kubernetes.k8s_model.append_to_pod`` + * [x] ``airflow.kubernetes.kube_client._disable_verify_ssl`` → ``airflow.kubernetes.airflow.providers.cncf.kubernetes.kube_client._disable_verify_ssl`` + * [x] ``airflow.kubernetes.kube_client._enable_tcp_keepalive`` → ``airflow.kubernetes.airflow.providers.cncf.kubernetes.kube_client._enable_tcp_keepalive`` + * [x] ``airflow.kubernetes.kube_client.get_kube_client`` → ``airflow.kubernetes.airflow.providers.cncf.kubernetes.kube_client.get_kube_client`` + * [x] ``airflow.kubernetes.pod_generator.datetime_to_label_safe_datestring`` → ``airflow.providers.cncf.kubernetes.pod_generator.datetime_to_label_safe_datestring`` + * [x] ``airflow.kubernetes.pod_generator.extend_object_field`` → ``airflow.kubernetes.airflow.providers.cncf.kubernetes.pod_generator.extend_object_field`` + * [x] ``airflow.kubernetes.pod_generator.label_safe_datestring_to_datetime`` → ``airflow.providers.cncf.kubernetes.pod_generator.label_safe_datestring_to_datetime`` + * [x] ``airflow.kubernetes.pod_generator.make_safe_label_value`` → ``airflow.providers.cncf.kubernetes.pod_generator.make_safe_label_value`` + * [x] ``airflow.kubernetes.pod_generator.merge_objects`` → ``airflow.providers.cncf.kubernetes.pod_generator.merge_objects`` + * [x] ``airflow.kubernetes.pod_generator.PodGenerator`` → ``airflow.providers.cncf.kubernetes.pod_generator.PodGenerator`` + * [x] ``airflow.kubernetes.pod_generator.PodGeneratorDeprecated`` → ``airflow.providers.cncf.kubernetes.pod_generator.PodGenerator`` + * [x] ``airflow.kubernetes.pod_generator.PodDefaults`` → ``airflow.providers.cncf.kubernetes.pod_generator_deprecated.PodDefaults`` + * [x] ``airflow.kubernetes.pod_generator.add_pod_suffix`` → ``airflow.providers.cncf.kubernetes.kubernetes_helper_functions.add_pod_suffix`` + * [x] ``airflow.kubernetes.pod_generator.rand_str`` → ``airflow.providers.cncf.kubernetes.kubernetes_helper_functions.rand_str`` + * [x] ``airflow.kubernetes.pod_generator_deprecated.make_safe_label_value`` → ``airflow.providers.cncf.kubernetes.pod_generator_deprecated.make_safe_label_value`` + * [x] ``airflow.kubernetes.pod_generator_deprecated.PodDefaults`` → ``airflow.providers.cncf.kubernetes.pod_generator_deprecated.PodDefaults`` + * [x] ``airflow.kubernetes.pod_generator_deprecated.PodGenerator`` → ``airflow.providers.cncf.kubernetes.pod_generator_deprecated.PodGenerator`` + * [x] ``airflow.kubernetes.secret.Secret`` → ``airflow.providers.cncf.kubernetes.secret.Secret`` + * [x] ``airflow.kubernetes.secret.K8SModel`` → ``airflow.providers.cncf.kubernetes.k8s_model.K8SModel`` diff --git a/newsfragments/41736.significant.rst b/newsfragments/41736.significant.rst index 2c90979d80f06..00f356217b59c 100644 --- a/newsfragments/41736.significant.rst +++ b/newsfragments/41736.significant.rst @@ -2,6 +2,27 @@ Removed deprecated parameters from core-operators. Parameters removed: -- airflow.operators.datetime.BranchDateTimeOperator: use_task_execution_date -- airflow.operators.trigger_dagrun.TriggerDagRunOperator: execution_date -- airflow.operators.weekday.BranchDayOfWeekOperator: use_task_execution_day +- ``airflow.operators.datetime.BranchDateTimeOperator``: ``use_task_execution_date`` +- ``airflow.operators.trigger_dagrun.TriggerDagRunOperator``: ``execution_date`` +- ``airflow.operators.weekday.BranchDayOfWeekOperator``: ``use_task_execution_day`` + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] argument ``execution_date`` in ``airflow.operators.trigger_dagrun.TriggerDagRunOperator`` + * [x] argument ``use_task_execution_day`` → ``use_task_logical_date`` in ``airflow.operators.datetime.BranchDateTimeOperator`` + * [x] argument ``use_task_execution_day`` → ``use_task_logical_date`` in ``airflow.operators.weekday.BranchDayOfWeekOperator`` diff --git a/newsfragments/41737.significant.rst b/newsfragments/41737.significant.rst index 55704581be9b2..1bf2ed6c13663 100644 --- a/newsfragments/41737.significant.rst +++ b/newsfragments/41737.significant.rst @@ -1 +1,20 @@ Removed deprecated ``TaskStateTrigger`` from ``airflow.triggers.external_task`` module. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.triggers.external_task.TaskStateTrigger`` diff --git a/newsfragments/41739.significant.rst b/newsfragments/41739.significant.rst index 51947218eeec5..fc92582ee6d2a 100644 --- a/newsfragments/41739.significant.rst +++ b/newsfragments/41739.significant.rst @@ -1,3 +1,14 @@ Removed backfill job command cli option ``ignore-first-depends-on-past``. Its value always set to True. No replcaement cli option. Removed backfill job command cli option ``treat-dag-as-regex``. Please use ``treat-dag-id-as-regex`` instead. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [x] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/41748.significant.rst b/newsfragments/41748.significant.rst index b44f3640462de..33546c3841995 100644 --- a/newsfragments/41748.significant.rst +++ b/newsfragments/41748.significant.rst @@ -1 +1,20 @@ Deprecated module ``airflow.hooks.dbapi`` removed. Please use ``airflow.providers.common.sql.hooks.sql`` instead. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR303 + + * [x] ``airflow.hooks.dbapi`` → ``airflow.providers.common.sql.hooks.sql`` diff --git a/newsfragments/41758.significant.rst b/newsfragments/41758.significant.rst index f8b86d88564a5..cf87332b94cfb 100644 --- a/newsfragments/41758.significant.rst +++ b/newsfragments/41758.significant.rst @@ -10,3 +10,28 @@ Removed deprecated functions and modules from ``airflow.www`` module. ``airflow.utils.log.secrets_masker.get_sensitive_variables_fields`` instead. - Removed the method ``should_hide_value_for_key()`` from ``airflow.www.utils``: Please use ``airflow.utils.log.secrets_masker.should_hide_value_for_key`` instead. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [x] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.www.auth.has_access`` → ``airflow.www.auth.has_access_*`` + * [x] ``airflow.www.utils.get_sensitive_variables_fields`` → ``airflow.utils.log.secrets_masker.get_sensitive_variables_fields`` + * [x] ``airflow.www.utils.should_hide_value_for_key`` → ``airflow.utils.log.secrets_masker.should_hide_value_for_key`` + + * AIR303 + + * [x] ``airflow.www.security.FabAirflowSecurityManagerOverride`` → ``airflow.providers.fab.auth_manager.security_manager.override.FabAirflowSecurityManagerOverride`` diff --git a/newsfragments/41761.significant.rst b/newsfragments/41761.significant.rst index c54f53dd51ed6..20b472783e142 100644 --- a/newsfragments/41761.significant.rst +++ b/newsfragments/41761.significant.rst @@ -5,3 +5,28 @@ Removed a set of deprecations in BaseOperator. - Support for trigger rule ``dummy`` removed. Please use ``always``. - Support for trigger rule ``none_failed_or_skipped`` removed. Please use ``none_failed_min_one_success``. - Support to load ``BaseOperatorLink`` via ``airflow.models.baseoperator`` module removed. +- Config ``operators.allow_illegal_arguments`` removed. + +* Types of change + + * [x] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ``airflow config lint`` + + * [x] ``operators.allow_illegal_arguments`` + + * ruff + + * AIR302 + + * [x] ``airflow.utils.trigger_rule.TriggerRule.NONE_FAILED_OR_SKIPPED`` + * [x] argument ``task_concurrency`` → ``max_active_tis_per_dag`` in ``BaseOperator`` and its subclassses diff --git a/newsfragments/41762.significant.rst b/newsfragments/41762.significant.rst index 0a768dde2f983..a2d6271a247ed 100644 --- a/newsfragments/41762.significant.rst +++ b/newsfragments/41762.significant.rst @@ -5,3 +5,14 @@ Removed a set of deprecations in ``Connection`` from ``airflow.models``. - Removed utility method ``parse_netloc_to_hostname()`` - Removed utility method ``parse_from_uri()``. - Removed utility method ``log_info()`` and ``debug_info()``. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/41774.significant.rst b/newsfragments/41774.significant.rst index 2d6456fb14952..048e2948927c5 100644 --- a/newsfragments/41774.significant.rst +++ b/newsfragments/41774.significant.rst @@ -10,3 +10,14 @@ Removed a set of deprecations in ``DAG`` from ``airflow.models``. - Removed implicit support to call ``create_dagrun()`` without data interval. - Removed support for deprecated parameter ``concurrency`` in ``DagModel``. - Removed support for ``datetime`` in ``DagModel.calculate_dagrun_date_fields``. Use ``DataInterval``. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [x] Code interface changes diff --git a/newsfragments/41776.significant.rst b/newsfragments/41776.significant.rst index 62bc7986d2ba3..0f8f53648da11 100644 --- a/newsfragments/41776.significant.rst +++ b/newsfragments/41776.significant.rst @@ -3,3 +3,14 @@ Removed a set of deprecations in from ``airflow.models.param``. - Removed deprecated direct access to DagParam as module. Please import from ``airflow.models.param``. - Ensure all param values are JSON serialiazable and raise a ``ParamValidationError`` if not. - Ensure parsed date and time values are RFC3339 compliant. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/41778.significant.rst b/newsfragments/41778.significant.rst index a26f1af939878..017ec59d3a2c8 100644 --- a/newsfragments/41778.significant.rst +++ b/newsfragments/41778.significant.rst @@ -2,3 +2,14 @@ Removed a set of deprecations in from ``airflow.models.dagrun``. - Removed deprecated method ``DagRun.get_run()``. Instead you should use standard Sqlalchemy DagRun model retrieval. - Removed deprecated method ``DagRun.get_log_filename_template()``. Please use ``get_log_template()`` instead. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/41779.significant.rst b/newsfragments/41779.significant.rst index f25e0c9080a5a..0c2861a856891 100644 --- a/newsfragments/41779.significant.rst +++ b/newsfragments/41779.significant.rst @@ -1 +1,12 @@ Remove deprecated support for ``airflow.models.errors.ImportError`` which has been renamed to ``ParseImportError``. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/41780.significant.rst b/newsfragments/41780.significant.rst index 2e7aed291cf0c..453c961d5af8e 100644 --- a/newsfragments/41780.significant.rst +++ b/newsfragments/41780.significant.rst @@ -1 +1,12 @@ Remove deprecated support for passing ``execution_date`` to ``airflow.models.skipmixin.SkipMixin.skip()``. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/41784.significant.rst b/newsfragments/41784.significant.rst index d0d8a07cd0a72..86669b5c011af 100644 --- a/newsfragments/41784.significant.rst +++ b/newsfragments/41784.significant.rst @@ -10,3 +10,14 @@ Removed a set of deprecations in from ``airflow.models.taskinstance``. - Removed deprecated property ``previous_start_date_success`` from ``TaskInstance``. Please use ``get_previous_start_date`` instead. - Removed deprecated function ``as_dict`` from ``SimpleTaskInstance``. Please use ``BaseSerialization.serialize`` instead. - Removed deprecated function ``from_dict`` from ``SimpleTaskInstance``. Please use ``BaseSerialization.deserialize`` instead. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/41808.significant.rst b/newsfragments/41808.significant.rst index 91316bbd95959..38b66a9959588 100644 --- a/newsfragments/41808.significant.rst +++ b/newsfragments/41808.significant.rst @@ -6,3 +6,14 @@ Removed methods: - ``find_for_task_instance()`` Note: there are no replacements. Direct access to DB is discouraged and will not be possible in Airflow 3 for tasks. The public REST API is the future way to interact with Airflow + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [x] Code interface changes diff --git a/newsfragments/41857.significant.rst b/newsfragments/41857.significant.rst index f0b06f2811b1f..fd88c97bfa522 100644 --- a/newsfragments/41857.significant.rst +++ b/newsfragments/41857.significant.rst @@ -1,3 +1,12 @@ -**Breaking Change** - Airflow core now depends on Pydantic v2. If you have Pydantic v1 installed, please upgrade. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [x] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/41910.significant.rst b/newsfragments/41910.significant.rst index 08b0cb9d30cc7..313291486f2e4 100644 --- a/newsfragments/41910.significant.rst +++ b/newsfragments/41910.significant.rst @@ -1 +1,20 @@ Removed deprecated method ``requires_access`` from module ``airflow.api_connexion.security``. Please use ``requires_access_*`` instead. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.api_connexion.security.requires_access`` → ``airflow.api_connexion.security.requires_access_*`` diff --git a/newsfragments/41964.significant.rst b/newsfragments/41964.significant.rst index 918587ae6ca21..f004b61636733 100644 --- a/newsfragments/41964.significant.rst +++ b/newsfragments/41964.significant.rst @@ -4,3 +4,14 @@ The format of the output with that flag can be expensive to generate and extreme ``airflow dag show`` is a better way to visualize the relationship of tasks in a DAG. ``DAG.tree_view`` and ``DAG.get_tree_view`` have also been removed. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [x] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/41975.significant.rst b/newsfragments/41975.significant.rst index 7931329dd6c23..1569035d713ed 100644 --- a/newsfragments/41975.significant.rst +++ b/newsfragments/41975.significant.rst @@ -1 +1,25 @@ Metrics basic deprecated validators (``AllowListValidator`` and ``BlockListValidator``) were removed in favor of pattern matching. Pattern matching validators (``PatternAllowListValidator`` and ``PatternBlockListValidator``) are enabled by default. Configuration parameter ``metrics_use_pattern_match``was removed from the ``metrics`` section. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [x] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ``airflow config liint`` + + * [x] ``metrics.metrics_use_pattern_match`` + + * ruff + + * AIR302 + + * [x] ``airflow.metrics.validators.AllowListValidator`` → ``airflow.metrics.validators.PatternAllowListValidator`` + * [x] ``airflow.metrics.validators.BlockListValidator`` → ``airflow.metrics.validators.PatternBlockListValidator`` diff --git a/newsfragments/42023.significant.rst b/newsfragments/42023.significant.rst index 9cd68cebd0d66..48e8f7344da53 100644 --- a/newsfragments/42023.significant.rst +++ b/newsfragments/42023.significant.rst @@ -1,9 +1,19 @@ -**Breaking Changes** +Rename ``Dataset`` as ``Asset`` in API endpoints -* Rename property run_type value ``dataset_triggered`` as ``asset_triggered`` in DAGRun endpoint +* list of changes -* Rename property ``dataset_expression`` as ``asset_expression`` in DAGDetail endpoint + * Rename property run_type value ``dataset_triggered`` as ``asset_triggered`` in DAGRun endpoint + * Rename property ``dataset_expression`` as ``asset_expression`` in DAGDetail endpoint + * Change the string ``dataset_triggered`` in RUN_ID_REGEX as ``asset_triggered`` which affects the valid run id that an user can provide + * Rename ``dataset`` as ``asset`` in all the database tables -* Change the string ``dataset_triggered`` in RUN_ID_REGEX as ``asset_triggered`` which affects the valid run id that an user can provide +* Types of change -* Rename ``dataset`` as ``asset`` in all the database tables + * [ ] Dag changes + * [ ] Config changes + * [x] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/42042.significant.rst b/newsfragments/42042.significant.rst index 8a22f5d4db619..a743da5326c9d 100644 --- a/newsfragments/42042.significant.rst +++ b/newsfragments/42042.significant.rst @@ -1 +1,12 @@ Removed ``is_active`` property from ``BaseUser``. This property is longer used. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [x] Code interface changes diff --git a/newsfragments/42054.significant.rst b/newsfragments/42054.significant.rst index aebf70757fa07..dbe243df3854e 100644 --- a/newsfragments/42054.significant.rst +++ b/newsfragments/42054.significant.rst @@ -2,3 +2,14 @@ Dataset and DatasetAlias are no longer hashable This means they can no longer be used as dict keys or put into a set. Dataset's equality logic is also tweaked slightly to consider the extra dict. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [x] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/42060.significant.rst b/newsfragments/42060.significant.rst index 3f767d80eef7d..fc806729e0ec0 100644 --- a/newsfragments/42060.significant.rst +++ b/newsfragments/42060.significant.rst @@ -1 +1,21 @@ Removed deprecated configuration ``stalled_task_timeout`` from ``celery``, ``task_adoption_timeout`` from ``celery`` and ``worker_pods_pending_timeout`` from ``kubernetes_executor``. Please use ``task_queued_timeout`` from ``scheduler`` instead. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ``airflow config lint`` + + * [x] ``celery.stalled_task_timeout`` + * [x] ``kubernetes_executor.worker_pods_pending_timeout`` → ``scheduler.task_queued_timeout`` + * [x] ``celery.task_adoption_timeout`` → ``scheduler.task_queued_timeout`` + * [x] ``kubernetes_executor.worker_pods_pending_timeout`` → ``scheduler.task_queued_timeout`` diff --git a/newsfragments/42088.significant.rst b/newsfragments/42088.significant.rst index fbff8dbd0a1e3..db2d65c3bd882 100644 --- a/newsfragments/42088.significant.rst +++ b/newsfragments/42088.significant.rst @@ -12,3 +12,31 @@ Removed deprecated metrics configuration. * Removed deprecated configuration ``statsd_datadog_tags`` from ``scheduler``. Please use ``statsd_datadog_tags`` from ``metrics`` instead. * Removed deprecated configuration ``statsd_datadog_metrics_tags`` from ``scheduler``. Please use ``statsd_datadog_metrics_tags`` from ``metrics`` instead. * Removed deprecated configuration ``statsd_custom_client_path`` from ``scheduler``. Please use ``statsd_custom_client_path`` from ``metrics`` instead. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ``airflow config lint`` + + * [x] ``metrics.statsd_allow_list`` → ``metrics.metrics_allow_list`` + * [x] ``metrics.statsd_block_list`` → ``metrics.metrics_block_list`` + * [x] ``scheduler.statsd_on`` → ``metrics.statsd_on`` + * [x] ``scheduler.statsd_host`` → ``metrics.statsd_host`` + * [x] ``scheduler.statsd_port`` → ``metrics.statsd_port`` + * [x] ``scheduler.statsd_prefix`` → ``metrics.statsd_prefix`` + * [x] ``scheduler.statsd_allow_list`` → ``metrics.statsd_allow_list`` + * [x] ``scheduler.stat_name_handler`` → ``metrics.stat_name_handler`` + * [x] ``scheduler.statsd_datadog_enabled`` → ``metrics.statsd_datadog_enabled`` + * [x] ``scheduler.statsd_datadog_tags`` → ``metrics.statsd_datadog_tags`` + * [x] ``scheduler.statsd_datadog_metrics_tags`` → ``metrics.statsd_datadog_metrics_tags`` + * [x] ``scheduler.statsd_custom_client_path`` → ``metrics.statsd_custom_client_path`` diff --git a/newsfragments/42100.significant.rst b/newsfragments/42100.significant.rst index c256d575a01bf..801626a21c158 100644 --- a/newsfragments/42100.significant.rst +++ b/newsfragments/42100.significant.rst @@ -19,3 +19,38 @@ Removed deprecated logging configuration. * Removed deprecated configuration ``log_processor_filename_template`` from ``core``. Please use ``log_processor_filename_template`` from ``logging`` instead. * Removed deprecated configuration ``dag_processor_manager_log_location`` from ``core``. Please use ``dag_processor_manager_log_location`` from ``logging`` instead. * Removed deprecated configuration ``task_log_reader`` from ``core``. Please use ``task_log_reader`` from ``logging`` instead. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ``airflow config lint`` + + * [x] ``core.interleave_timestamp_parser`` → ``logging.interleave_timestamp_parser`` + * [x] ``core.base_log_folder`` → ``logging.base_log_folder`` + * [x] ``core.remote_logging`` → ``logging.remote_logging`` + * [x] ``core.remote_log_conn_id`` → ``logging.remote_log_conn_id`` + * [x] ``core.remote_base_log_folder`` → ``logging.remote_base_log_folder`` + * [x] ``core.encrypt_s3_logs`` → ``logging.encrypt_s3_logs`` + * [x] ``core.logging_level`` → ``logging.logging_level`` + * [x] ``core.fab_logging_level`` → ``logging.fab_logging_level`` + * [x] ``core.logging_config_class`` → ``logging.logging_config_class`` + * [x] ``core.colored_console_log`` → ``logging.colored_console_log`` + * [x] ``core.colored_log_format`` → ``logging.colored_log_format`` + * [x] ``core.colored_formatter_class`` → ``logging.colored_formatter_class`` + * [x] ``core.log_format`` → ``logging.log_format`` + * [x] ``core.simple_log_format`` → ``logging.simple_log_format`` + * [x] ``core.task_log_prefix_template`` → ``logging.task_log_prefix_template`` + * [x] ``core.log_filename_template`` → ``logging.log_filename_template`` + * [x] ``core.log_processor_filename_template`` → ``logging.log_processor_filename_template`` + * [x] ``core.dag_processor_manager_log_location`` → ``logging.dag_processor_manager_log_location`` + * [x] ``core.task_log_reader`` → ``logging.task_log_reader`` diff --git a/newsfragments/42126.significant.rst b/newsfragments/42126.significant.rst index b7fe76179be08..e1b6f431ac4fb 100644 --- a/newsfragments/42126.significant.rst +++ b/newsfragments/42126.significant.rst @@ -12,3 +12,32 @@ Removed deprecated database configuration. * Removed deprecated configuration ``sql_alchemy_connect_args`` from ``core``. Please use ``sql_alchemy_connect_args`` from ``database`` instead. * Removed deprecated configuration ``load_default_connections`` from ``core``. Please use ``load_default_connections`` from ``database`` instead. * Removed deprecated configuration ``max_db_retries`` from ``core``. Please use ``max_db_retries`` from ``database`` instead. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + + * ``airflow config lint`` + + * [x] ``core.sql_alchemy_conn`` → ``database.sql_alchemy_conn`` + * [x] ``core.sql_engine_encoding`` → ``database.sql_engine_encoding`` + * [x] ``core.sql_engine_collation_for_ids`` → ``database.sql_engine_collation_for_ids`` + * [x] ``core.sql_alchemy_pool_enabled`` → ``database.sql_alchemy_pool_enabled`` + * [x] ``core.sql_alchemy_pool_size`` → ``database.sql_alchemy_pool_size`` + * [x] ``core.sql_alchemy_max_overflow`` → ``database.sql_alchemy_max_overflow`` + * [x] ``core.sql_alchemy_pool_recycle`` → ``database.sql_alchemy_pool_recycle`` + * [x] ``core.sql_alchemy_pool_pre_ping`` → ``database.sql_alchemy_pool_pre_ping`` + * [x] ``core.sql_alchemy_schema`` → ``database.sql_alchemy_schema`` + * [x] ``core.sql_alchemy_connect_args`` → ``database.sql_alchemy_connect_args`` + * [x] ``core.load_default_connections`` → ``database.load_default_connections`` + * [x] ``core.max_db_retries`` → ``database.max_db_retries`` diff --git a/newsfragments/42129.significant.rst b/newsfragments/42129.significant.rst index 06a5da7fc889a..406850a7792e6 100644 --- a/newsfragments/42129.significant.rst +++ b/newsfragments/42129.significant.rst @@ -15,3 +15,34 @@ Removed deprecated configuration. * Removed deprecated configuration ``auth_rate_limited`` from ``webserver``. Please use ``auth_rate_limited`` from ``fab`` instead. * Removed deprecated configuration ``auth_rate_limit`` from ``webserver``. Please use ``auth_rate_limit`` from ``fab`` instead. * Removed deprecated configuration section ``kubernetes``. Please use ``kubernetes_executor`` instead. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ``airflow config lint`` + + * [x] ``core.worker_precheck`` → ``celery.worker_precheck`` + * [x] ``scheduler.max_threads`` → ``scheduler.parsing_processes`` + * [x] ``celery.default_queue`` → ``operators.default_queue`` + * [x] ``admin.hide_sensitive_variable_fields`` → ``core.hide_sensitive_var_conn_fields`` + * [x] ``admin.sensitive_variable_fields`` → ``core.sensitive_var_conn_names`` + * [x] ``core.non_pooled_task_slot_count`` → ``core.default_pool_task_slot_count`` + * [x] ``core.dag_concurrency`` → ``core.max_active_tasks_per_dag`` + * [x] ``api.access_control_allow_origin`` → ``api.access_control_allow_origins`` + * [x] ``api.auth_backend`` → ``api.auth_backends`` + * [x] ``scheduler.deactivate_stale_dags_interval`` → ``scheduler.parsing_cleanup_interval`` + * [x] ``kubernetes_executor.worker_pods_pending_timeout_check_interval`` → ``scheduler.task_queued_timeout_check_interval`` + * [x] ``webserver.update_fab_perms`` → ``fab.update_fab_perms`` + * [x] ``webserver.auth_rate_limited`` → ``fab.auth_rate_limited`` + * [x] ``webserver.auth_rate_limit`` → ``fab.auth_rate_limit`` + * [x] ``kubernetes`` → ``kubernetes_executor`` diff --git a/newsfragments/42137.significant.rst b/newsfragments/42137.significant.rst index 0e1848933a0ae..1055032dc2a64 100644 --- a/newsfragments/42137.significant.rst +++ b/newsfragments/42137.significant.rst @@ -1 +1,12 @@ Optional ``[saml]`` extra has been removed from Airflow core - instead Amazon Provider gets saml as required dependency. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [x] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/42280.significant.rst b/newsfragments/42280.significant.rst index 00d80d259b123..35e37727cda6a 100644 --- a/newsfragments/42280.significant.rst +++ b/newsfragments/42280.significant.rst @@ -3,3 +3,14 @@ Removed deprecated Rest API endpoints: * /api/v1/roles. Use /auth/fab/v1/roles instead * /api/v1/permissions. Use /auth/fab/v1/permissions instead * /api/v1/users. Use /auth/fab/v1/users instead + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [x] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/42285.significant.rst b/newsfragments/42285.significant.rst index 8f8cfa0dee298..c7a545664b484 100644 --- a/newsfragments/42285.significant.rst +++ b/newsfragments/42285.significant.rst @@ -1 +1,27 @@ The SLA feature is removed in Airflow 3.0, to be replaced with Airflow Alerts in 3.1 + +* Types of change + + * [x] Dag changes + * [x] Config changes + * [x] API changes + * [ ] CLI changes + * [x] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +.. List the migration rules needed for this change (see https://github.com/apache/airflow/issues/41641) + +* Migration rules needed + + * ``airflow config lint`` + + * [x] ``core.check_slas`` + + * ruff + + * AIR302 + + * [x] argument ``sla`` in ``BaseOperator`` and its subclassses + * [x] argument ``sla_miss_callback`` in ``DAG`` diff --git a/newsfragments/42343.significant.rst b/newsfragments/42343.significant.rst index d9e1ba6b1229b..7af4954e265cc 100644 --- a/newsfragments/42343.significant.rst +++ b/newsfragments/42343.significant.rst @@ -5,3 +5,14 @@ receives ``Dataset`` objects instead. A list of ``DatasetModel`` objects are created inside, and returned by the function. Also, the ``session`` argument is now keyword-only. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/42404.significant.rst b/newsfragments/42404.significant.rst index 47546b76ffaed..c9d1212a3f204 100644 --- a/newsfragments/42404.significant.rst +++ b/newsfragments/42404.significant.rst @@ -4,3 +4,14 @@ The shift towards using ``run_id`` as the sole identifier for DAG runs eliminate - Removed ``logical_date`` arguments from public APIs and Python functions related to DAG run lookups. - ``run_id`` is now the exclusive identifier for DAG runs in these contexts. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [x] API changes + * [ ] CLI changes + * [x] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/42436.significant.rst b/newsfragments/42436.significant.rst index d9dbcfc4c9f5d..f3c0117e641a5 100644 --- a/newsfragments/42436.significant.rst +++ b/newsfragments/42436.significant.rst @@ -5,3 +5,14 @@ been changed to ``glob``, which better matches the ignore file behavior of many popular tools. To revert to the previous behavior, set the configuration to ``regexp``. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/42548.significant.rst b/newsfragments/42548.significant.rst index 28d6795eebcc6..93a5a7db417e4 100644 --- a/newsfragments/42548.significant.rst +++ b/newsfragments/42548.significant.rst @@ -1 +1,12 @@ Remove is_backfill attribute from DagRun object + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/42579.significant.rst b/newsfragments/42579.significant.rst index 8a43583e25766..fd386f83385ea 100644 --- a/newsfragments/42579.significant.rst +++ b/newsfragments/42579.significant.rst @@ -1,20 +1,32 @@ -**Breaking Change** +Rename ``Dataset`` endpoints as ``Asset`` endpoints -* Rename dataset endpoints as asset endpoints +* list of changes - * Rename ``/datasets`` as ``/assets`` - * Rename ``/datasets/{uri}`` as ``/assets/{uri}`` - * Rename ``/datasets/events`` as ``/assets/events`` - * Rename ``/datasets/queuedEvent/{uri}`` as ``/ui/next_run_assets/upstream`` - * Rename ``/dags/{dag_id}/dagRuns/{dag_run_id}/upstreamDatasetEvents`` as ``/ui/next_run_assets/upstream`` - * Rename ``/dags/{dag_id}/datasets/queuedEvent/{uri}`` as ``/ui/next_run_assets/upstream`` - * Rename ``/dags/{dag_id}/datasets/queuedEvent`` as ``/ui/next_run_assets/upstream`` - * Rename ``/ui/next_run_datasets/upstream`` as ``/ui/next_run_assets/upstream`` + * Rename dataset endpoints as asset endpoints + * Rename ``/datasets`` as ``/assets`` + * Rename ``/datasets/{uri}`` as ``/assets/{uri}`` + * Rename ``/datasets/events`` as ``/assets/events`` + * Rename ``/datasets/queuedEvent/{uri}`` as ``/ui/next_run_assets/upstream`` + * Rename ``/dags/{dag_id}/dagRuns/{dag_run_id}/upstreamDatasetEvents`` as ``/ui/next_run_assets/upstream`` + * Rename ``/dags/{dag_id}/datasets/queuedEvent/{uri}`` as ``/ui/next_run_assets/upstream`` + * Rename ``/dags/{dag_id}/datasets/queuedEvent`` as ``/ui/next_run_assets/upstream`` + * Rename ``/ui/next_run_datasets/upstream`` as ``/ui/next_run_assets/upstream`` -* Rename dataset schema as asset endpoints + * Rename dataset schema as asset endpoints - * Rename ``AssetCollection.datasets`` as ``AssetCollection.assets`` - * Rename ``AssetEventCollection.dataset_events`` as ``AssetEventCollection.asset_events`` - * Rename ``AssetEventCollectionSchema.dataset_events`` as ``AssetEventCollectionSchema.asset_events`` - * Rename ``CreateAssetEventSchema.dataset_uri`` as ``CreateAssetEventSchema.asset_uri`` + * Rename ``AssetCollection.datasets`` as ``AssetCollection.assets`` + * Rename ``AssetEventCollection.dataset_events`` as ``AssetEventCollection.asset_events`` + * Rename ``AssetEventCollectionSchema.dataset_events`` as ``AssetEventCollectionSchema.asset_events`` + * Rename ``CreateAssetEventSchema.dataset_uri`` as ``CreateAssetEventSchema.asset_uri`` + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [x] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/42640.significant.rst b/newsfragments/42640.significant.rst index a6f09eba8b437..e5d0e81f3da11 100644 --- a/newsfragments/42640.significant.rst +++ b/newsfragments/42640.significant.rst @@ -1 +1,12 @@ Removed deprecated custom dag dependency detector. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/42647.significant.rst b/newsfragments/42647.significant.rst index c4d53d431c91a..71bee3bb3fb9b 100644 --- a/newsfragments/42647.significant.rst +++ b/newsfragments/42647.significant.rst @@ -1,6 +1,5 @@ Removed deprecated aliases support for providers. - * Removed deprecated ``atlas`` alias support. Please use ``apache-atlas`` instead. * Removed deprecated ``aws`` alias support. Please use ``amazon`` instead. * Removed deprecated ``azure`` alias support. Please use ``microsoft-azure`` instead. @@ -18,3 +17,14 @@ Removed deprecated aliases support for providers. * Removed deprecated ``spark`` alias support. Please use ``apache-spark`` instead. * Removed deprecated ``webhdfs`` alias support. Please use ``apache-webhdfs`` instead. * Removed deprecated ``winrm`` alias support. Please use ``microsoft-winrm`` instead. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/42658.significant.rst b/newsfragments/42658.significant.rst index a00702cf69fc6..84381da3f02a5 100644 --- a/newsfragments/42658.significant.rst +++ b/newsfragments/42658.significant.rst @@ -1 +1,12 @@ * Changing dag_id from flag (-d, --dag-id) to a positional argument in the 'dags list-runs' CLI command. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [x] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/42660.significant.rst b/newsfragments/42660.significant.rst index cdedf64687471..924259e11151d 100644 --- a/newsfragments/42660.significant.rst +++ b/newsfragments/42660.significant.rst @@ -1 +1,12 @@ Deprecated field ``concurrency`` from ``DAGDetailSchema`` has been removed. Please use ``max_active_tasks`` from ``DAGDetailSchema`` instead. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [x] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/42739.significant.rst b/newsfragments/42739.significant.rst index 542f761720b75..eb565c37e494a 100644 --- a/newsfragments/42739.significant.rst +++ b/newsfragments/42739.significant.rst @@ -1 +1,12 @@ Remove support for Python 3.8 as this version is not maintained within Python release schedule, see https://peps.python.org/pep-0596/. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [x] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/42776.significant.rst b/newsfragments/42776.significant.rst index 5fad7dfe79b66..188bcd8315442 100644 --- a/newsfragments/42776.significant.rst +++ b/newsfragments/42776.significant.rst @@ -1 +1,12 @@ Removed deprecated ``Chainable`` type from ``BaseOperator``. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/42794.significant.rst b/newsfragments/42794.significant.rst index 2303d90e9107f..7113d0ced5094 100644 --- a/newsfragments/42794.significant.rst +++ b/newsfragments/42794.significant.rst @@ -17,6 +17,6 @@ Move filesystem, package_index, subprocess hooks to standard provider * AIR303 - * [ ] ``airflow.hooks.filesystem.*`` → ``airflow.providers.standard.hooks.filesystem.*`` - * [ ] ``airflow.hooks.package_index.*`` → ``airflow.providers.standard.hooks.package_index.*`` - * [ ] ``airflow.hooks.subprocess.*`` → ``airflow.providers.standard.hooks.subprocess.*`` + * [x] ``airflow.hooks.filesystem.*`` → ``airflow.providers.standard.hooks.filesystem.*`` + * [x] ``airflow.hooks.package_index.*`` → ``airflow.providers.standard.hooks.package_index.*`` + * [x] ``airflow.hooks.subprocess.*`` → ``airflow.providers.standard.hooks.subprocess.*`` diff --git a/newsfragments/42953.significant.rst b/newsfragments/42953.significant.rst index f138dde730340..eb7c229b9c3d5 100644 --- a/newsfragments/42953.significant.rst +++ b/newsfragments/42953.significant.rst @@ -2,3 +2,14 @@ Previously, this was evaluated across all runs of the dag. This behavior change was passed by lazy consensus. Vote thread: https://lists.apache.org/thread/9o84d3yn934m32gtlpokpwtbbmtxj47l. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [x] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/43067.significant.rst b/newsfragments/43067.significant.rst index d57fec6be9e72..65501c54a6bf5 100644 --- a/newsfragments/43067.significant.rst +++ b/newsfragments/43067.significant.rst @@ -2,3 +2,14 @@ Remove DAG.get_num_active_runs We don't need this function. There's already an almost-identical function on DagRun that we can use, namely DagRun.active_runs_of_dags. Also, make DagRun.active_runs_of_dags private. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/43073.significant.rst b/newsfragments/43073.significant.rst index 46bd71a6f0d80..9dd26a3bf175f 100644 --- a/newsfragments/43073.significant.rst +++ b/newsfragments/43073.significant.rst @@ -1 +1,12 @@ Rename ``DagRunTriggeredByType.DATASET`` as ``DagRunTriggeredByType.ASSET`` and all the name ``dataset`` in all the UI component to ``asset``. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/43096.significant.rst b/newsfragments/43096.significant.rst index b252e39916c03..1803b9ac041b7 100644 --- a/newsfragments/43096.significant.rst +++ b/newsfragments/43096.significant.rst @@ -1 +1,20 @@ Removed auth backend ``airflow.api.auth.backend.default`` + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR303 + + * [x] ``airflow.api.auth.backend.default`` → ``airflow.providers.fab.auth_manager.api.auth.backend.session`` diff --git a/newsfragments/43102.significant.rst b/newsfragments/43102.significant.rst index e626ba5a42c34..18e59e5ac22ac 100644 --- a/newsfragments/43102.significant.rst +++ b/newsfragments/43102.significant.rst @@ -16,3 +16,14 @@ After: .. code-block:: http://:/?param=item1¶m=item2 + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [x] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/43183.significant.rst b/newsfragments/43183.significant.rst index e363824b6db5f..7f3e28aa0c5a0 100644 --- a/newsfragments/43183.significant.rst +++ b/newsfragments/43183.significant.rst @@ -3,3 +3,20 @@ Remove TaskContextLogger We introduced this as a way to inject messages into task logs from places other than the task execution context. We later realized that we were better off just using the Log table. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ``airflow config list`` + + * [x] ``logging.enable_task_context_logger`` diff --git a/newsfragments/43289.significant.rst b/newsfragments/43289.significant.rst index 15063202640fb..aa6a51d89907f 100644 --- a/newsfragments/43289.significant.rst +++ b/newsfragments/43289.significant.rst @@ -2,3 +2,24 @@ Support for adding executors via Airflow Plugins is removed Executors should no longer be registered or imported via Airflow's plugin mechanism -- these types of classes are just treated as plain Python classes by Airflow, so there is no need to register them with Airflow. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [x] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] extension ``executors`` in ``airflow.plugins_manager.AirflowPlugin`` + * [x] extension ``operators`` in ``airflow.plugins_manager.AirflowPlugin`` + * [x] extension ``sensors`` in ``airflow.plugins_manager.AirflowPlugin`` diff --git a/newsfragments/43291.significant.rst b/newsfragments/43291.significant.rst index ec7cacf6f153b..227ccda5fdd14 100644 --- a/newsfragments/43291.significant.rst +++ b/newsfragments/43291.significant.rst @@ -14,3 +14,22 @@ You should instead import it as: .. code-block:: python from my_plugin import MyHook + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [x] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] extension ``hooks`` in ``airflow.plugins_manager.AirflowPlugin`` diff --git a/newsfragments/43349.significant.rst b/newsfragments/43349.significant.rst index efc6240978e6d..23af4ec5697f8 100644 --- a/newsfragments/43349.significant.rst +++ b/newsfragments/43349.significant.rst @@ -3,3 +3,22 @@ Deprecated trigger rule ``TriggerRule.DUMMY`` removed **Breaking Change** The trigger rule ``TriggerRule.DUMMY`` was removed. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.utils.trigger_rule.TriggerRule.DUMMY`` diff --git a/newsfragments/43490.significant.rst b/newsfragments/43490.significant.rst index 6d0a0af8ec48b..7d17cbec439a1 100644 --- a/newsfragments/43490.significant.rst +++ b/newsfragments/43490.significant.rst @@ -2,3 +2,14 @@ The ``task_fail`` table has been removed from the Airflow database. This table was used to store task failures, but it was not used by any Airflow components. Use the REST API to get task failures instead (which gets it from the ``task_instance`` table) + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/43530.significant.rst b/newsfragments/43530.significant.rst index 6f6e89be0e5cc..0876bd92ef9d8 100644 --- a/newsfragments/43530.significant.rst +++ b/newsfragments/43530.significant.rst @@ -19,3 +19,29 @@ After: from airflow.configuration import conf value = conf.get("section", "key") + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.configuration.getboolean`` → ``airflow.configuration.conf.getboolean`` + * [x] ``airflow.configuration.getfloat`` → ``airflow.configuration.conf.getfloat`` + * [x] ``airflow.configuration.getint`` → ``airflow.configuration.conf.getint`` + * [x] ``airflow.configuration.has_option`` → ``airflow.configuration.conf.has_option`` + * [x] ``airflow.configuration.remove_option`` → ``airflow.configuration.conf.remove_option`` + * [x] ``airflow.configuration.as_dict`` → ``airflow.configuration.conf.as_dict`` + * [x] ``airflow.configuration.set`` → ``airflow.configuration.conf.set`` + * [x] ``airflow.configuration.get`` → ``airflow.configuration.conf.get`` diff --git a/newsfragments/43533.significant.rst b/newsfragments/43533.significant.rst index 7b84c2bf87ff9..c59f931005300 100644 --- a/newsfragments/43533.significant.rst +++ b/newsfragments/43533.significant.rst @@ -6,3 +6,25 @@ Following functions are removed: - ``round_time`` - ``scale_time_units`` - ``infer_time_unit`` + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.utils.dates.parse_execution_date`` + * [x] ``airflow.utils.dates.round_time`` + * [x] ``airflow.utils.dates.scale_time_units`` + * [x] ``airflow.utils.dates.infer_time_unit`` diff --git a/newsfragments/43562.significant.rst b/newsfragments/43562.significant.rst index 4e50fa609da21..98b232213e05d 100644 --- a/newsfragments/43562.significant.rst +++ b/newsfragments/43562.significant.rst @@ -22,3 +22,23 @@ After: if sys.version_info >= (3, 6): # perform some action ... + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + + +* Migration rules needed + + * ruff + + * AIR302 + + * [x] ``airflow.PY\d\d`` diff --git a/newsfragments/43568.significant.rst b/newsfragments/43568.significant.rst index 450ca5b4a27f9..801e7739f661e 100644 --- a/newsfragments/43568.significant.rst +++ b/newsfragments/43568.significant.rst @@ -1 +1,12 @@ Remove ``virtualenv`` extra as PythonVirtualenvOperator has been moved to standard provider and switched to use built-in venv package. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [x] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/43608.significant.rst b/newsfragments/43608.significant.rst index edb1c7ddddae2..8082b0725d3f4 100644 --- a/newsfragments/43608.significant.rst +++ b/newsfragments/43608.significant.rst @@ -17,6 +17,6 @@ Move Airflow core triggers to standard provider * AIR303 - * [ ] ``airflow.triggers.external_task.*`` → ``airflow.providers.standard.triggers.external_task.*`` - * [ ] ``airflow.triggers.file.*`` → ``airflow.providers.standard.triggers.file.*`` - * [ ] ``airflow.triggers.temporal.*`` → ``airflow.providers.standard.triggers.temporal.*`` + * [x] ``airflow.triggers.external_task.*`` → ``airflow.providers.standard.triggers.external_task.*`` + * [x] ``airflow.triggers.file.*`` → ``airflow.providers.standard.triggers.file.*`` + * [x] ``airflow.triggers.temporal.*`` → ``airflow.providers.standard.triggers.temporal.*`` diff --git a/newsfragments/43611.significant.rst b/newsfragments/43611.significant.rst index e25fb2a5bba4b..2f6e51fd78de4 100644 --- a/newsfragments/43611.significant.rst +++ b/newsfragments/43611.significant.rst @@ -4,3 +4,14 @@ Some database engines are limited to 32-bit integer values. As some users report weight rolled-over to negative values, we decided to cap the value to the 32-bit integer. Even if internally in python smaller or larger values to 64 bit are supported, ``priority_weight`` is capped and only storing values from -2147483648 to 2147483647. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [x] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/43612.significant.rst b/newsfragments/43612.significant.rst index 8c0ec597c1176..1b37228e11510 100644 --- a/newsfragments/43612.significant.rst +++ b/newsfragments/43612.significant.rst @@ -6,3 +6,14 @@ AIRFLOW__STANDARD__VENV_INSTALL_METHOD option. The possible values are: - ``auto``: Automatically select, use ``uv`` if available, otherwise use ``pip``. - ``pip``: Use pip to install the virtual environment. - ``uv``: Use uv to install the virtual environment. Must be available in environment PATH. + +* Types of change + + * [x] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [x] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/43774.significant.rst b/newsfragments/43774.significant.rst index b716e1fc83f94..7b38908417033 100644 --- a/newsfragments/43774.significant.rst +++ b/newsfragments/43774.significant.rst @@ -7,16 +7,19 @@ For easier change in the future, this function now takes only keyword arguments. * Types of change - * [ ] DAG changes + * [ ] Dag changes * [ ] Config changes * [ ] API changes * [ ] CLI changes * [x] Behaviour changes * [ ] Plugin changes - * [ ] Dependency change - -.. List the migration rules needed for this change (see https://github.com/apache/airflow/issues/41641) + * [ ] Dependency changes + * [ ] Code interface changes * Migrations rules needed - * Calling ``HookLineageCollector.create_asset`` with positional argument should raise an error + * ruff + + * AIR302 + + * [ ] Calling ``HookLineageCollector.create_asset`` with positional argument should raise an error diff --git a/newsfragments/43890.significant.rst b/newsfragments/43890.significant.rst index b7ca80ba686a1..9bf17e35a0890 100644 --- a/newsfragments/43890.significant.rst +++ b/newsfragments/43890.significant.rst @@ -17,4 +17,4 @@ Move filesystem sensor to standard provider * AIR303 - * [ ] ``airflow.sensors.filesystem.FileSensor`` → ``airflow.providers.standard.sensors.filesystem.FileSensor`` + * [x] ``airflow.sensors.filesystem.FileSensor`` → ``airflow.providers.standard.sensors.filesystem.FileSensor`` diff --git a/newsfragments/43902.significant.rst b/newsfragments/43902.significant.rst index f51eafb8d0a94..58de47893b816 100644 --- a/newsfragments/43902.significant.rst +++ b/newsfragments/43902.significant.rst @@ -4,3 +4,32 @@ The shift towards ``logical_date`` helps move away from the limitations of ``exe - Renamed columns and function references to ``logical_date``. - Removed ``execution_date``, ``next_ds``, ``next_ds_nodash``, ``next_execution_date``, ``prev_ds``, ``prev_ds_nodash``, ``prev_execution_date``, ``prev_execution_date_success``, ``tomorrow_ds``, ``yesterday_ds`` and ``yesterday_ds_nodash`` from Airflow ``context``. + +* Types of change + + * [x] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migrations rules needed + + * ruff + + * AIR302 + + * [ ] context key ``execution_date`` + * [ ] context key ``next_ds`` + * [ ] context key ``next_ds_nodash`` + * [ ] context key ``next_execution_date`` + * [ ] context key ``prev_ds`` + * [ ] context key ``prev_ds_nodash`` + * [ ] context key ``prev_execution_date`` + * [ ] context key ``prev_execution_date_success`` + * [ ] context key ``tomorrow_ds`` + * [ ] context key ``yesterday_ds`` + * [ ] context key ``yesterday_ds_nodash`` diff --git a/newsfragments/43915.significant.rst b/newsfragments/43915.significant.rst index efcc48f116b0e..51b0f53d7216d 100644 --- a/newsfragments/43915.significant.rst +++ b/newsfragments/43915.significant.rst @@ -2,3 +2,20 @@ Configuration ``[core] strict_dataset_uri_validation`` is removed Asset URI with a defined scheme will now always be validated strictly, raising a hard error on validation failure. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migrations rules needed + + * ``airflow config lint`` + + * [x] ``core.strict_dataset_uri_validation`` diff --git a/newsfragments/43943.significant.rst b/newsfragments/43943.significant.rst index 590caf792d219..dba684d1135c9 100644 --- a/newsfragments/43943.significant.rst +++ b/newsfragments/43943.significant.rst @@ -3,3 +3,20 @@ Remove the ``traces`` ``otel_task_log_event`` event config option and feature This was sending the task logs form the scheduler, and would be a huge scheduling performance hit (blocking the entire all scheduling while it was fetching logs to attach to the trace) + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migrations rules needed + + * ``airflow config lint`` + + * [x] ``traces.otel_task_log_event`` diff --git a/newsfragments/43949.significant.rst b/newsfragments/43949.significant.rst index 2d1cf53797e5b..745c10d455def 100644 --- a/newsfragments/43949.significant.rst +++ b/newsfragments/43949.significant.rst @@ -3,3 +3,14 @@ The ``--clear-only`` option of ``airflow dags reserialize`` command is now remov The ``--clear-only`` option was added to clear the serialized DAGs without reserializing them. This option has been removed as it is no longer needed. We have implemented DAG versioning and can no longer delete serialized dag without going through ``airflow db-clean`` command. This command is now only for reserializing DAGs. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [x] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/43975.significant.rst b/newsfragments/43975.significant.rst index 6d116ac1eedfc..2c437ef3a4e37 100644 --- a/newsfragments/43975.significant.rst +++ b/newsfragments/43975.significant.rst @@ -6,3 +6,20 @@ This is done to standardize all timer and timing metrics to milliseconds across Airflow 2.11 introduced the ``timer_unit_consistency`` setting in the ``metrics`` section of the configuration file. The default value was ``False`` which meant that the timer and timing metrics were logged in seconds. This was done to maintain backwards compatibility with the previous versions of Airflow. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +* Migrations rules needed + + * ``airflow config lint`` + + * [x] ``metrics.timer_unit_consistency`` diff --git a/newsfragments/44053.significant.rst b/newsfragments/44053.significant.rst index 1e80fbe1e4250..7b3054394d87d 100644 --- a/newsfragments/44053.significant.rst +++ b/newsfragments/44053.significant.rst @@ -17,4 +17,4 @@ Move ``TriggerDagRunOperator`` to standard provider * AIR303 - * [ ] ``airflow.operators.trigger_dagrun import TriggerDagRunOperator`` → ``airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunOperator`` + * [x] ``airflow.operators.trigger_dagrun import TriggerDagRunOperator`` → ``airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunOperator`` diff --git a/newsfragments/44080.significant.rst b/newsfragments/44080.significant.rst index 040e27fbf1108..3aeca5191e4b6 100644 --- a/newsfragments/44080.significant.rst +++ b/newsfragments/44080.significant.rst @@ -1,3 +1,14 @@ PostgreSQL 12 is no longer supported PostgreSQL 12 is no longer being supported by the PostgreSQL community. You must upgrade to PostgreSQL 13+ to use this version of Airflow. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [x] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/44288.significant.rst b/newsfragments/44288.significant.rst index 90d41dd6147c9..e1a38ac27cd39 100644 --- a/newsfragments/44288.significant.rst +++ b/newsfragments/44288.significant.rst @@ -17,5 +17,5 @@ * AIR303 - * [ ] ``airflow.sensors.external_task.ExternalTaskMarker`` → ``airflow.providers.standard.sensors.external_task.ExternalTaskMarker`` - * [ ] ``airflow.sensors.external_task.ExternalTaskSensor`` → ``airflow.providers.standard.sensors.external_task.ExternalTaskSensor`` + * [x] ``airflow.sensors.external_task.ExternalTaskMarker`` → ``airflow.providers.standard.sensors.external_task.ExternalTaskMarker`` + * [x] ``airflow.sensors.external_task.ExternalTaskSensor`` → ``airflow.providers.standard.sensors.external_task.ExternalTaskSensor`` diff --git a/newsfragments/44475.significant.rst b/newsfragments/44475.significant.rst index 691af8730bf53..bf5ec44939432 100644 --- a/newsfragments/44475.significant.rst +++ b/newsfragments/44475.significant.rst @@ -6,16 +6,21 @@ Remove ``TriggerRule.NONE_FAILED_OR_SKIPPED`` * Types of change - * [x] DAG changes + * [x] Dag changes * [ ] Config changes * [ ] API changes * [ ] CLI changes * [ ] Behaviour changes * [ ] Plugin changes - * [ ] Dependency change + * [ ] Dependency changes + * [ ] Code interface changes .. List the migration rules needed for this change (see https://github.com/apache/airflow/issues/41641) * Migrations rules needed - * Remove attribute ``TriggerRule.NONE_FAILED_OR_SKIPPED`` + * ruff + + * AIR302 + + * [x] ``TriggerRule.NONE_FAILED_OR_SKIPPED`` diff --git a/newsfragments/44533.significant.rst b/newsfragments/44533.significant.rst index 55619c244f5ef..5943741428d7f 100644 --- a/newsfragments/44533.significant.rst +++ b/newsfragments/44533.significant.rst @@ -3,3 +3,14 @@ During offline migration, ``DagRun.conf`` is cleared .. Provide additional contextual information The ``conf`` column is changing from pickle to json, thus, the values in that column cannot be migrated during offline migrations. If you want to retain ``conf`` values for existing DagRuns, you must do a normal, non-offline, migration. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [x] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/44706.significant.rst b/newsfragments/44706.significant.rst index bd3efa6972588..6391000d4caf7 100644 --- a/newsfragments/44706.significant.rst +++ b/newsfragments/44706.significant.rst @@ -6,10 +6,11 @@ If you would like to create default connections use ``airflow connections create * Types of change - * [ ] DAG changes + * [ ] Dag changes * [ ] Config changes * [ ] API changes * [x] CLI changes * [ ] Behaviour changes * [ ] Plugin changes - * [ ] Dependency change + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/44820.significant.rst b/newsfragments/44820.significant.rst index 61b7c968ff333..c37be748c780c 100644 --- a/newsfragments/44820.significant.rst +++ b/newsfragments/44820.significant.rst @@ -25,14 +25,19 @@ Example: * Types of change - * [x] DAG changes + * [x] Dag changes * [ ] Config changes * [ ] API changes * [ ] CLI changes * [ ] Behaviour changes * [ ] Plugin changes * [ ] Dependency changes + * [ ] Code interface changes * Migration rules needed - * Remove context key ``conf`` + * ruff + + * AIR302 + + * [ ] context key ``conf`` diff --git a/newsfragments/45017.significant.rst b/newsfragments/45017.significant.rst index aef05e779e6e7..b1140fcf8d434 100644 --- a/newsfragments/45017.significant.rst +++ b/newsfragments/45017.significant.rst @@ -5,14 +5,19 @@ should be imported from ``airflow.providers.celery.executors.default_celery.DEFA * Types of change - * [ ] DAG changes - * [x] Config changes + * [x] Dag changes + * [ ] Config changes * [ ] API changes * [ ] CLI changes * [ ] Behaviour changes * [ ] Plugin changes - * [ ] Dependency change + * [ ] Dependency changes + * [ ] Code interface changes * Migration rules needed - * AIR303 rewrite ``airflow.config_templates.default_celery.DEFAULT_CELERY_CONFIG`` to ``airflow.providers.celery.executors.default_celery.DEFAULT_CELERY_CONFIG`` + * ruff + + * AIR303 + + * [x] ``airflow.config_templates.default_celery.DEFAULT_CELERY_CONFIG`` → ``airflow.providers.celery.executors.default_celery.DEFAULT_CELERY_CONFIG`` diff --git a/newsfragments/45327.significant.rst b/newsfragments/45327.significant.rst index db385eb7059df..1e7423cd2e6ea 100644 --- a/newsfragments/45327.significant.rst +++ b/newsfragments/45327.significant.rst @@ -18,6 +18,4 @@ Renamed DAG argument ``fail_stop`` to ``fail_fast`` across the codebase to align * AIR302 - * arguments in ``DAG`` - - * [ ] ``fail_stop`` → ``fail_fast`` + * [x] arguments ``fail_stop`` → ``fail_fast`` in ``DAG`` diff --git a/newsfragments/45530.significant.rst b/newsfragments/45530.significant.rst index 7e2ae8e8ac6a5..5805dc3789e2b 100644 --- a/newsfragments/45530.significant.rst +++ b/newsfragments/45530.significant.rst @@ -10,3 +10,14 @@ tasks are executed. Teardown tasks are skipped if the setup was also skipped. As a side effect this means if the DAG contains teardown tasks, then the manual marking of DAG as "failed" or "success" will need to keep the DAG in running state to ensure that teardown tasks will be scheduled. They would not be scheduled if the DAG is diorectly set to "failed" or "success". + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [x] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/45694.significant.rst b/newsfragments/45694.significant.rst index 39985f279ebef..222083bdf2e7e 100644 --- a/newsfragments/45694.significant.rst +++ b/newsfragments/45694.significant.rst @@ -34,13 +34,14 @@ After: * Types of change - * [x] DAG changes + * [x] Dag changes * [ ] Config changes * [ ] API changes * [ ] CLI changes * [ ] Behaviour changes * [ ] Plugin changes - * [ ] Dependency change + * [ ] Dependency changes + * [ ] Code interface changes * Migration rules needed @@ -48,4 +49,4 @@ After: * AIR302 - * [ ] ``airflow.utils.dag_parsing_context.get_parsing_context`` -> ``airflow.sdk.get_parsing_context`` + * [x] ``airflow.utils.dag_parsing_context.get_parsing_context`` -> ``airflow.sdk.get_parsing_context`` diff --git a/newsfragments/45722.significant.rst b/newsfragments/45722.significant.rst index 3e9068a1ac13d..aa7acda9e5e2f 100644 --- a/newsfragments/45722.significant.rst +++ b/newsfragments/45722.significant.rst @@ -2,14 +2,14 @@ Move airflow config ``scheduler.dag_dir_list_interval`` to ``dag_bundles.refresh * Types of change - * [ ] DAG changes + * [ ] Dag changes * [x] Config changes * [ ] API changes * [ ] CLI changes * [ ] Behaviour changes * [ ] Plugin changes - * [ ] Dependency change - * [ ] Code interface change + * [ ] Dependency changes + * [ ] Code interface changes * Migration rules needed diff --git a/newsfragments/45729.significant.rst b/newsfragments/45729.significant.rst index a21491c90ce22..5a722119cdf7d 100644 --- a/newsfragments/45729.significant.rst +++ b/newsfragments/45729.significant.rst @@ -1,3 +1,14 @@ Standalone DAG processor is now required The scheduler is no longer able to parse DAGs itself - it relies on the standalone DAG processor (introduced in Airflow 2.3) to do it instead. You can start one by running ``airflow dag-processor``. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [x] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/newsfragments/aip-72.significant.rst b/newsfragments/aip-72.significant.rst index 5133a1ac9c0c2..4ad2ab67f6335 100644 --- a/newsfragments/aip-72.significant.rst +++ b/newsfragments/aip-72.significant.rst @@ -42,12 +42,21 @@ As part of this change the following breaking changes have occurred: It is recommended that you replace such a custom operator with a deferrable sensor, a condition or another triggering mechanism. + * Types of change - * [ ] DAG changes - * [ ] Config changes + * [x] Dag changes + * [x] Config changes * [ ] API changes * [ ] CLI changes * [x] Behaviour changes * [ ] Plugin changes - * [ ] Dependency change + * [ ] Dependency changes + * [ ] Code interface changes + +* Migrations rules needed + + * ``airflow config lint`` + + * [x] ``core.task_runner`` + * [x] ``core.enable_xcom_pickling`` diff --git a/newsfragments/aip-79.significant.rst b/newsfragments/aip-79.significant.rst index 8bcfde7f321a1..9ca9222333ed2 100644 --- a/newsfragments/aip-79.significant.rst +++ b/newsfragments/aip-79.significant.rst @@ -13,3 +13,14 @@ As part of this change the following breaking changes have occurred: - The property ``security_manager`` has been removed from the interface - The method ``filter_permitted_menu_items`` is now abstract and must be implemented + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [x] Code interface changes diff --git a/newsfragments/template.significant.rst b/newsfragments/template.significant.rst index 64dbe7ea4bd77..4877b0cbd1e19 100644 --- a/newsfragments/template.significant.rst +++ b/newsfragments/template.significant.rst @@ -3,20 +3,29 @@ .. Provide additional contextual information .. Check the type of change that applies to this change +.. Dag changes: requires users to change their dag code +.. Config changes: requires users to change their airflow config +.. API changes: requires users to change their Airflow REST API calls +.. CLI changes: requires users to change their Airflow CLI usage +.. Behaviour changes: the existing code won't break, but the behavior is different +.. Plugin changes: requires users to change their Airflow plugin implementation +.. Dependency changes: requires users to change their dependencies (e.g., Postgres 12) +.. Code interface changes: requires users to change other implementations (e.g., auth manager) * Types of change - * [ ] DAG changes + * [ ] Dag changes * [ ] Config changes * [ ] API changes * [ ] CLI changes * [ ] Behaviour changes * [ ] Plugin changes - * [ ] Dependency change + * [ ] Dependency changes + * [ ] Code interface changes .. List the migration rules needed for this change (see https://github.com/apache/airflow/issues/41641) -* [ ] Migration rules needed +* Migration rules needed .. e.g., .. * Remove context key ``execution_date`` diff --git a/providers/tests/cncf/kubernetes/log_handlers/test_log_handlers.py b/providers/tests/cncf/kubernetes/log_handlers/test_log_handlers.py index 9cbebcf8df9ec..d89fbdf6edb15 100644 --- a/providers/tests/cncf/kubernetes/log_handlers/test_log_handlers.py +++ b/providers/tests/cncf/kubernetes/log_handlers/test_log_handlers.py @@ -74,6 +74,7 @@ def teardown_method(self): "airflow.providers.cncf.kubernetes.executors.kubernetes_executor.KubernetesExecutor.get_task_log" ) @pytest.mark.parametrize("state", [TaskInstanceState.RUNNING, TaskInstanceState.SUCCESS]) + @pytest.mark.usefixtures("clean_executor_loader") def test__read_for_k8s_executor(self, mock_k8s_get_task_log, create_task_instance, state): """Test for k8s executor, the log is read from get_task_log method""" mock_k8s_get_task_log.return_value = ([], []) @@ -86,6 +87,7 @@ def test__read_for_k8s_executor(self, mock_k8s_get_task_log, create_task_instanc ) ti.state = state ti.triggerer_job = None + ti.executor = executor_name with conf_vars({("core", "executor"): executor_name}): reload(executor_loader) fth = FileTaskHandler("") @@ -105,11 +107,12 @@ def test__read_for_k8s_executor(self, mock_k8s_get_task_log, create_task_instanc pytest.param(k8s.V1Pod(metadata=k8s.V1ObjectMeta(name="pod-name-xxx")), "default"), ], ) - @patch.dict("os.environ", AIRFLOW__CORE__EXECUTOR="KubernetesExecutor") + @conf_vars({("core", "executor"): "KubernetesExecutor"}) @patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client") def test_read_from_k8s_under_multi_namespace_mode( self, mock_kube_client, pod_override, namespace_to_call ): + reload(executor_loader) mock_read_log = mock_kube_client.return_value.read_namespaced_pod_log mock_list_pod = mock_kube_client.return_value.list_namespaced_pod @@ -139,6 +142,7 @@ def task_callable(ti): ) ti = TaskInstance(task=task, run_id=dagrun.run_id) ti.try_number = 3 + ti.executor = "KubernetesExecutor" logger = ti.log ti.log.disabled = False @@ -147,6 +151,8 @@ def task_callable(ti): set_context(logger, ti) ti.run(ignore_ti_state=True) ti.state = TaskInstanceState.RUNNING + # clear executor_instances cache + file_handler.executor_instances = {} file_handler.read(ti, 2) # first we find pod name diff --git a/task_sdk/src/airflow/sdk/api/client.py b/task_sdk/src/airflow/sdk/api/client.py index b984669aa747d..443256e3a67ee 100644 --- a/task_sdk/src/airflow/sdk/api/client.py +++ b/task_sdk/src/airflow/sdk/api/client.py @@ -44,6 +44,7 @@ TIHeartbeatInfo, TIRescheduleStatePayload, TIRunContext, + TISuccessStatePayload, TITerminalStatePayload, ValidationError as RemoteValidationError, VariablePostBody, @@ -136,6 +137,11 @@ def finish(self, id: uuid.UUID, state: TerminalTIState, when: datetime): body = TITerminalStatePayload(end_date=when, state=TerminalTIState(state)) self.client.patch(f"task-instances/{id}/state", content=body.model_dump_json()) + def succeed(self, id: uuid.UUID, when: datetime, task_outlets, outlet_events): + """Tell the API server that this TI has succeeded.""" + body = TISuccessStatePayload(end_date=when, task_outlets=task_outlets, outlet_events=outlet_events) + self.client.patch(f"task-instances/{id}/state", content=body.model_dump_json()) + def heartbeat(self, id: uuid.UUID, pid: int): body = TIHeartbeatInfo(pid=pid, hostname=get_hostname()) self.client.put(f"task-instances/{id}/heartbeat", content=body.model_dump_json()) diff --git a/task_sdk/src/airflow/sdk/api/datamodels/_generated.py b/task_sdk/src/airflow/sdk/api/datamodels/_generated.py index d91ecf841d9bd..3383e61d1c395 100644 --- a/task_sdk/src/airflow/sdk/api/datamodels/_generated.py +++ b/task_sdk/src/airflow/sdk/api/datamodels/_generated.py @@ -29,6 +29,20 @@ from pydantic import BaseModel, ConfigDict, Field +class AssetProfile(BaseModel): + """ + Profile of an Asset. + + Asset will have name, uri and asset_type defined. + AssetNameRef will have name and asset_type defined. + AssetUriRef will have uri and asset_type defined. + """ + + name: Annotated[str | None, Field(title="Name")] = None + uri: Annotated[str | None, Field(title="Uri")] = None + asset_type: Annotated[str, Field(title="Asset Type")] + + class AssetResponse(BaseModel): """ Asset schema for responses with fields that are needed for Runtime. @@ -134,6 +148,17 @@ class TIRescheduleStatePayload(BaseModel): end_date: Annotated[datetime, Field(title="End Date")] +class TISuccessStatePayload(BaseModel): + """ + Schema for updating TaskInstance to success state. + """ + + state: Annotated[Literal["success"] | None, Field(title="State")] = "success" + end_date: Annotated[datetime, Field(title="End Date")] + task_outlets: Annotated[list[AssetProfile] | None, Field(title="Task Outlets")] = None + outlet_events: Annotated[list | None, Field(title="Outlet Events")] = None + + class TITargetStatePayload(BaseModel): """ Schema for updating TaskInstance to a target state, excluding terminal and running states. @@ -243,7 +268,7 @@ class TIRunContext(BaseModel): class TITerminalStatePayload(BaseModel): """ - Schema for updating TaskInstance to a terminal state (e.g., SUCCESS or FAILED). + Schema for updating TaskInstance to a terminal state except SUCCESS state. """ state: TerminalTIState diff --git a/task_sdk/src/airflow/sdk/execution_time/comms.py b/task_sdk/src/airflow/sdk/execution_time/comms.py index 007e3fe10fe01..3ab8addc8bbf8 100644 --- a/task_sdk/src/airflow/sdk/execution_time/comms.py +++ b/task_sdk/src/airflow/sdk/execution_time/comms.py @@ -60,6 +60,7 @@ TIDeferredStatePayload, TIRescheduleStatePayload, TIRunContext, + TISuccessStatePayload, VariableResponse, XComResponse, ) @@ -191,11 +192,22 @@ class TaskState(BaseModel): - anything else = FAILED """ - state: TerminalTIState + state: Literal[ + TerminalTIState.FAILED, + TerminalTIState.SKIPPED, + TerminalTIState.REMOVED, + TerminalTIState.FAIL_WITHOUT_RETRY, + ] end_date: datetime | None = None type: Literal["TaskState"] = "TaskState" +class SucceedTask(TISuccessStatePayload): + """Update a task's state to success. Includes task_outlets and outlet_events for registering asset events.""" + + type: Literal["SucceedTask"] = "SucceedTask" + + class DeferTask(TIDeferredStatePayload): """Update a task instance state to deferred.""" @@ -292,6 +304,7 @@ class GetPrevSuccessfulDagRun(BaseModel): ToSupervisor = Annotated[ Union[ + SucceedTask, DeferTask, GetAssetByName, GetAssetByUri, diff --git a/task_sdk/src/airflow/sdk/execution_time/supervisor.py b/task_sdk/src/airflow/sdk/execution_time/supervisor.py index 45da306722f2b..569855016cfe6 100644 --- a/task_sdk/src/airflow/sdk/execution_time/supervisor.py +++ b/task_sdk/src/airflow/sdk/execution_time/supervisor.py @@ -76,6 +76,7 @@ SetRenderedFields, SetXCom, StartupDetails, + SucceedTask, TaskState, ToSupervisor, VariableResult, @@ -104,7 +105,11 @@ # These are the task instance states that require some additional information to transition into. # "Directly" here means that the PATCH API calls to transition into these states are # made from _handle_request() itself and don't have to come all the way to wait(). -STATES_SENT_DIRECTLY = [IntermediateTIState.DEFERRED, IntermediateTIState.UP_FOR_RESCHEDULE] +STATES_SENT_DIRECTLY = [ + IntermediateTIState.DEFERRED, + IntermediateTIState.UP_FOR_RESCHEDULE, + TerminalTIState.SUCCESS, +] @overload @@ -762,6 +767,14 @@ def _handle_request(self, msg: ToSupervisor, log: FilteringBoundLogger): if isinstance(msg, TaskState): self._terminal_state = msg.state self._task_end_time_monotonic = time.monotonic() + elif isinstance(msg, SucceedTask): + self._terminal_state = msg.state + self.client.task_instances.succeed( + id=self.id, + when=msg.end_date, + task_outlets=msg.task_outlets, + outlet_events=msg.outlet_events, + ) elif isinstance(msg, GetConnection): conn = self.client.connections.get(msg.conn_id) if isinstance(conn, ConnectionResponse): diff --git a/task_sdk/src/airflow/sdk/execution_time/task_runner.py b/task_sdk/src/airflow/sdk/execution_time/task_runner.py index c35a79e13c368..c2d2c51b630be 100644 --- a/task_sdk/src/airflow/sdk/execution_time/task_runner.py +++ b/task_sdk/src/airflow/sdk/execution_time/task_runner.py @@ -33,8 +33,9 @@ from pydantic import BaseModel, ConfigDict, Field, JsonValue, TypeAdapter from airflow.dag_processing.bundles.manager import DagBundlesManager -from airflow.sdk.api.datamodels._generated import TaskInstance, TerminalTIState, TIRunContext +from airflow.sdk.api.datamodels._generated import AssetProfile, TaskInstance, TerminalTIState, TIRunContext from airflow.sdk.definitions._internal.dag_parsing_context import _airflow_parsing_context_manager +from airflow.sdk.definitions.asset import Asset, AssetAlias, AssetNameRef, AssetUriRef from airflow.sdk.definitions.baseoperator import BaseOperator from airflow.sdk.execution_time.comms import ( DeferTask, @@ -43,6 +44,7 @@ SetRenderedFields, SetXCom, StartupDetails, + SucceedTask, TaskState, ToSupervisor, ToTask, @@ -446,6 +448,36 @@ def _get_rendered_fields(task: BaseOperator) -> dict[str, JsonValue]: return {field: serialize_template_field(getattr(task, field), field) for field in task.template_fields} +def _process_outlets(context: Context, outlets: list[AssetProfile]): + added_alias_to_task_outlet = False + task_outlets: list[AssetProfile] = [] + outlet_events: list[Any] = [] + events = context["outlet_events"] + + for obj in outlets or []: + # Lineage can have other types of objects besides assets + asset_type = type(obj).__name__ + if isinstance(obj, Asset): + task_outlets.append(AssetProfile(name=obj.name, uri=obj.uri, asset_type=asset_type)) + outlet_events.append(attrs.asdict(events[obj])) # type: ignore + elif isinstance(obj, AssetNameRef): + task_outlets.append(AssetProfile(name=obj.name, asset_type=asset_type)) + # Send all events, filtering can be done in API server. + outlet_events.append(attrs.asdict(events)) # type: ignore + elif isinstance(obj, AssetUriRef): + task_outlets.append(AssetProfile(uri=obj.uri, asset_type=asset_type)) + # Send all events, filtering can be done in API server. + outlet_events.append(attrs.asdict(events)) # type: ignore + elif isinstance(obj, AssetAlias): + if not added_alias_to_task_outlet: + task_outlets.append(AssetProfile(asset_type=asset_type)) + added_alias_to_task_outlet = True + for asset_alias_event in events[obj].asset_alias_events: + outlet_events.append(attrs.asdict(asset_alias_event)) + + return task_outlets, outlet_events + + def run(ti: RuntimeTaskInstance, log: Logger): """Run the task in this process.""" from airflow.exceptions import ( @@ -477,12 +509,18 @@ def run(ti: RuntimeTaskInstance, log: Logger): _push_xcom_if_needed(result, ti) + task_outlets, outlet_events = _process_outlets(context, ti.task.outlets) + # TODO: Get things from _execute_task_with_callbacks # - Clearing XCom # - Update RTIF # - Pre Execute # etc - msg = TaskState(state=TerminalTIState.SUCCESS, end_date=datetime.now(tz=timezone.utc)) + msg = SucceedTask( + end_date=datetime.now(tz=timezone.utc), + task_outlets=task_outlets, + outlet_events=outlet_events, + ) except TaskDeferred as defer: # TODO: Should we use structlog.bind_contextvars here for dag_id, task_id & run_id? log.info("Pausing task as DEFERRED. ", dag_id=ti.dag_id, task_id=ti.task_id, run_id=ti.run_id) diff --git a/task_sdk/tests/execution_time/test_supervisor.py b/task_sdk/tests/execution_time/test_supervisor.py index f2fcca8a2ab2c..ed921ed216ac1 100644 --- a/task_sdk/tests/execution_time/test_supervisor.py +++ b/task_sdk/tests/execution_time/test_supervisor.py @@ -55,6 +55,7 @@ RescheduleTask, SetRenderedFields, SetXCom, + SucceedTask, TaskState, VariableResult, XComResult, @@ -978,6 +979,20 @@ def watched_subprocess(self, mocker): AssetResult(name="asset", uri="s3://bucket/obj", group="asset"), id="get_asset_by_uri", ), + pytest.param( + SucceedTask(end_date=timezone.parse("2024-10-31T12:00:00Z")), + b"", + "task_instances.succeed", + (), + { + "id": TI_ID, + "outlet_events": None, + "task_outlets": None, + "when": timezone.parse("2024-10-31T12:00:00Z"), + }, + "", + id="succeed_task", + ), pytest.param( GetPrevSuccessfulDagRun(ti_id=TI_ID), ( @@ -1002,6 +1017,7 @@ def test_handle_requests( self, watched_subprocess, mocker, + time_machine, message, expected_buffer, client_attr_path, @@ -1030,6 +1046,7 @@ def test_handle_requests( next(generator) msg = message.model_dump_json().encode() + b"\n" generator.send(msg) + time_machine.move_to(timezone.datetime(2024, 10, 31), tick=False) # Verify the correct client method was called if client_attr_path: diff --git a/task_sdk/tests/execution_time/test_task_runner.py b/task_sdk/tests/execution_time/test_task_runner.py index 17c0029844e56..e95d8cdf36baf 100644 --- a/task_sdk/tests/execution_time/test_task_runner.py +++ b/task_sdk/tests/execution_time/test_task_runner.py @@ -37,7 +37,8 @@ AirflowTaskTerminated, ) from airflow.sdk import DAG, BaseOperator, Connection, get_current_context -from airflow.sdk.api.datamodels._generated import TaskInstance, TerminalTIState +from airflow.sdk.api.datamodels._generated import AssetProfile, TaskInstance, TerminalTIState +from airflow.sdk.definitions.asset import Asset, AssetAlias from airflow.sdk.definitions.variable import Variable from airflow.sdk.execution_time.comms import ( BundleInfo, @@ -49,6 +50,7 @@ PrevSuccessfulDagRunResult, SetRenderedFields, StartupDetails, + SucceedTask, TaskState, VariableResult, XComResult, @@ -172,7 +174,8 @@ def test_run_basic(time_machine, create_runtime_ti, spy_agency, mock_supervisor_ assert ti.task._lock_for_execution mock_supervisor_comms.send_request.assert_called_once_with( - msg=TaskState(state=TerminalTIState.SUCCESS, end_date=instant), log=mock.ANY + msg=SucceedTask(state=TerminalTIState.SUCCESS, end_date=instant, task_outlets=[], outlet_events=[]), + log=mock.ANY, ) @@ -443,7 +446,12 @@ def execute(self, context): log=mock.ANY, ), mock.call.send_request( - msg=TaskState(end_date=instant, state=TerminalTIState.SUCCESS), + msg=SucceedTask( + end_date=instant, + state=TerminalTIState.SUCCESS, + task_outlets=[], + outlet_events=[], + ), log=mock.ANY, ), ] @@ -498,7 +506,8 @@ def execute(self, context): # Ensure the task is Successful mock_supervisor_comms.send_request.assert_called_once_with( - msg=TaskState(state=TerminalTIState.SUCCESS, end_date=instant), log=mock.ANY + msg=SucceedTask(state=TerminalTIState.SUCCESS, end_date=instant, task_outlets=[], outlet_events=[]), + log=mock.ANY, ) @@ -588,6 +597,60 @@ def test_dag_parsing_context(make_ti_context, mock_supervisor_comms, monkeypatch assert ti.task.dag.task_dict.keys() == {"visible_task", "conditional_task"} +@pytest.mark.parametrize( + ["task_outlets", "expected_msg"], + [ + pytest.param( + [Asset(name="s3://bucket/my-task", uri="s3://bucket/my-task")], + SucceedTask( + state="success", + end_date=timezone.datetime(2024, 12, 3, 10, 0), + task_outlets=[ + AssetProfile(name="s3://bucket/my-task", uri="s3://bucket/my-task", asset_type="Asset") + ], + outlet_events=[ + { + "key": {"name": "s3://bucket/my-task", "uri": "s3://bucket/my-task"}, + "extra": {}, + "asset_alias_events": [], + } + ], + ), + id="asset", + ), + pytest.param( + [AssetAlias(name="example-alias", group="asset")], + SucceedTask( + state="success", + end_date=timezone.datetime(2024, 12, 3, 10, 0), + task_outlets=[AssetProfile(asset_type="AssetAlias")], + outlet_events=[], + ), + id="asset-alias", + ), + ], +) +def test_run_with_asset_outlets( + time_machine, create_runtime_ti, mock_supervisor_comms, task_outlets, expected_msg +): + """Test running a basic task that contains asset outlets.""" + from airflow.providers.standard.operators.bash import BashOperator + + task = BashOperator( + outlets=task_outlets, + task_id="asset-outlet-task", + bash_command="echo 'hi'", + ) + + ti = create_runtime_ti(task=task, dag_id="dag_with_asset_outlet_task") + instant = timezone.datetime(2024, 12, 3, 10, 0) + time_machine.move_to(instant, tick=False) + + run(ti, log=mock.MagicMock()) + + mock_supervisor_comms.send_request.assert_any_call(msg=expected_msg, log=mock.ANY) + + class TestRuntimeTaskInstance: def test_get_context_without_ti_context_from_server(self, mocked_parse, make_ti_context): """Test get_template_context without ti_context_from_server.""" diff --git a/tests/api_fastapi/core_api/routes/public/test_assets.py b/tests/api_fastapi/core_api/routes/public/test_assets.py index 563fbac961986..a48c0da87fc7a 100644 --- a/tests/api_fastapi/core_api/routes/public/test_assets.py +++ b/tests/api_fastapi/core_api/routes/public/test_assets.py @@ -542,7 +542,10 @@ def test_should_respond_200(self, test_client, session): { "id": 1, "asset_id": 1, + "uri": "s3://bucket/key/1", "extra": {"foo": "bar"}, + "group": "asset", + "name": "simple1", "source_task_id": "source_task_id", "source_dag_id": "source_dag_id", "source_run_id": "source_run_id_1", @@ -564,6 +567,9 @@ def test_should_respond_200(self, test_client, session): { "id": 2, "asset_id": 2, + "uri": "s3://bucket/key/2", + "group": "asset", + "name": "simple2", "extra": {"foo": "bar"}, "source_task_id": "source_task_id", "source_dag_id": "source_dag_id", @@ -704,6 +710,9 @@ def test_should_mask_sensitive_extra(self, test_client, session): { "id": 1, "asset_id": 1, + "uri": "s3://bucket/key/1", + "group": "asset", + "name": "sensitive1", "extra": {"password": "***"}, "source_task_id": "source_task_id", "source_dag_id": "source_dag_id", @@ -726,6 +735,9 @@ def test_should_mask_sensitive_extra(self, test_client, session): { "id": 2, "asset_id": 2, + "uri": "s3://bucket/key/2", + "group": "asset", + "name": "sensitive2", "extra": {"password": "***"}, "source_task_id": "source_task_id", "source_dag_id": "source_dag_id", @@ -912,6 +924,9 @@ def test_should_respond_200(self, test_client, session): assert response.json() == { "id": mock.ANY, "asset_id": 1, + "uri": "s3://bucket/key/1", + "group": "asset", + "name": "simple1", "extra": {"foo": "bar", "from_rest_api": True}, "source_task_id": None, "source_dag_id": None, @@ -938,6 +953,9 @@ def test_should_mask_sensitive_extra(self, test_client, session): assert response.json() == { "id": mock.ANY, "asset_id": 1, + "uri": "s3://bucket/key/1", + "group": "asset", + "name": "simple1", "extra": {"password": "***", "from_rest_api": True}, "source_task_id": None, "source_dag_id": None, diff --git a/tests/api_fastapi/core_api/routes/public/test_dag_run.py b/tests/api_fastapi/core_api/routes/public/test_dag_run.py index fc171150534c2..dd32873b084c9 100644 --- a/tests/api_fastapi/core_api/routes/public/test_dag_run.py +++ b/tests/api_fastapi/core_api/routes/public/test_dag_run.py @@ -1016,8 +1016,11 @@ def test_should_respond_200(self, test_client, dag_maker, session): { "timestamp": from_datetime_to_zulu(event.timestamp), "asset_id": asset1_id, + "uri": "file:///da1", "extra": {}, "id": event.id, + "group": "asset", + "name": "ds1", "source_dag_id": ti.dag_id, "source_map_index": ti.map_index, "source_run_id": ti.run_id, diff --git a/tests/api_fastapi/execution_api/routes/test_task_instances.py b/tests/api_fastapi/execution_api/routes/test_task_instances.py index e3aef1505bc74..9ccd1b0d088a6 100644 --- a/tests/api_fastapi/execution_api/routes/test_task_instances.py +++ b/tests/api_fastapi/execution_api/routes/test_task_instances.py @@ -26,11 +26,12 @@ from sqlalchemy.exc import SQLAlchemyError from airflow.models import RenderedTaskInstanceFields, TaskReschedule, Trigger +from airflow.models.asset import AssetActive, AssetAliasModel, AssetEvent, AssetModel from airflow.models.taskinstance import TaskInstance from airflow.utils import timezone from airflow.utils.state import State, TaskInstanceState, TerminalTIState -from tests_common.test_utils.db import clear_db_runs, clear_rendered_ti_fields +from tests_common.test_utils.db import clear_db_assets, clear_db_runs, clear_rendered_ti_fields pytestmark = pytest.mark.db_test @@ -39,6 +40,19 @@ DEFAULT_END_DATE = timezone.parse("2024-10-31T12:00:00Z") +def _create_asset_aliases(session, num: int = 2) -> None: + asset_aliases = [ + AssetAliasModel( + id=i, + name=f"simple{i}", + group="alias", + ) + for i in range(1, 1 + num) + ] + session.add_all(asset_aliases) + session.commit() + + class TestTIRunState: def setup_method(self): clear_db_runs() @@ -267,6 +281,87 @@ def test_ti_update_state_to_terminal( assert ti.state == expected_state assert ti.end_date == end_date + @pytest.mark.parametrize( + ("task_outlets", "outlet_events"), + [ + ( + [{"name": "s3://bucket/my-task", "uri": "s3://bucket/my-task", "asset_type": "Asset"}], + [ + { + "key": {"name": "s3://bucket/my-task", "uri": "s3://bucket/my-task"}, + "extra": {}, + "asset_alias_events": [], + } + ], + ), + ( + [{"asset_type": "AssetAlias"}], + [ + { + "source_alias_name": "example-alias", + "dest_asset_key": {"name": "s3://bucket/my-task", "uri": "s3://bucket/my-task"}, + "extra": {}, + } + ], + ), + ], + ) + def test_ti_update_state_to_success_with_asset_events( + self, client, session, create_task_instance, task_outlets, outlet_events + ): + clear_db_assets() + clear_db_runs() + + asset = AssetModel( + id=1, + name="s3://bucket/my-task", + uri="s3://bucket/my-task", + group="asset", + extra={}, + ) + asset_active = AssetActive.for_asset(asset) + session.add_all([asset, asset_active]) + asset_type = task_outlets[0]["asset_type"] + if asset_type == "AssetAlias": + _create_asset_aliases(session, num=1) + asset_alias = session.query(AssetAliasModel).all() + assert len(asset_alias) == 1 + assert asset_alias == [AssetAliasModel(name="simple1")] + + ti = create_task_instance( + task_id="test_ti_update_state_to_success_with_asset_events", + start_date=DEFAULT_START_DATE, + state=State.RUNNING, + ) + session.commit() + + response = client.patch( + f"/execution/task-instances/{ti.id}/state", + json={ + "state": "success", + "end_date": DEFAULT_END_DATE.isoformat(), + "task_outlets": task_outlets, + "outlet_events": outlet_events, + }, + ) + + assert response.status_code == 204 + assert response.text == "" + session.expire_all() + + # check if asset was created properly + asset = session.query(AssetModel).all() + assert len(asset) == 1 + assert asset == [AssetModel(name="s3://bucket/my-task", uri="s3://bucket/my-task", extra={})] + + event = session.query(AssetEvent).all() + assert len(event) == 1 + assert event[0].asset_id == 1 + assert event[0].asset == AssetModel(name="s3://bucket/my-task", uri="s3://bucket/my-task", extra={}) + assert event[0].extra == {} + if asset_type == "AssetAlias": + assert event[0].source_aliases == [AssetAliasModel(name="example-alias")] + def test_ti_update_state_not_found(self, client, session): """ Test that a 404 error is returned when the Task Instance does not exist. @@ -319,13 +414,20 @@ def test_ti_update_state_database_error(self, client, session, create_task_insta "end_date": "2024-10-31T12:00:00Z", } - with mock.patch( - "airflow.api_fastapi.common.db.common.Session.execute", - side_effect=[ - mock.Mock(one=lambda: ("running", 1, 0)), # First call returns "queued" - SQLAlchemyError("Database error"), # Second call raises an error - ], + with ( + mock.patch( + "airflow.api_fastapi.common.db.common.Session.execute", + side_effect=[ + mock.Mock(one=lambda: ("running", 1, 0)), # First call returns "queued" + mock.Mock(one=lambda: ("running", 1, 0)), # Second call returns "queued" + SQLAlchemyError("Database error"), # Last call raises an error + ], + ), + mock.patch( + "airflow.models.taskinstance.TaskInstance.register_asset_changes_in_db", + ) as mock_register_asset_changes_in_db, ): + mock_register_asset_changes_in_db.return_value = None response = client.patch(f"/execution/task-instances/{ti.id}/state", json=payload) assert response.status_code == 500 assert response.json()["detail"] == "Database error occurred" diff --git a/tests/executors/test_executor_loader.py b/tests/executors/test_executor_loader.py index 87455bd841b3d..de6703954b10d 100644 --- a/tests/executors/test_executor_loader.py +++ b/tests/executors/test_executor_loader.py @@ -16,14 +16,13 @@ # under the License. from __future__ import annotations -from importlib import reload from unittest import mock import pytest from airflow.exceptions import AirflowConfigException from airflow.executors import executor_loader -from airflow.executors.executor_loader import ConnectorSource, ExecutorLoader, ExecutorName +from airflow.executors.executor_loader import ConnectorSource, ExecutorName from airflow.executors.local_executor import LocalExecutor from airflow.providers.amazon.aws.executors.ecs.ecs_executor import AwsEcsExecutor from airflow.providers.celery.executors.celery_executor import CeleryExecutor @@ -35,24 +34,12 @@ class FakeExecutor: pass +@pytest.mark.usefixtures("clean_executor_loader") class TestExecutorLoader: - def setup_method(self) -> None: - from airflow.executors import executor_loader - - reload(executor_loader) - global ExecutorLoader - ExecutorLoader = executor_loader.ExecutorLoader # type: ignore - - def teardown_method(self) -> None: - from airflow.executors import executor_loader - - reload(executor_loader) - ExecutorLoader.init_executors() - def test_no_executor_configured(self): with conf_vars({("core", "executor"): None}): with pytest.raises(AirflowConfigException, match=r".*not found in config$"): - ExecutorLoader.get_default_executor() + executor_loader.ExecutorLoader.get_default_executor() @pytest.mark.parametrize( "executor_name", @@ -66,16 +53,18 @@ def test_no_executor_configured(self): ) def test_should_support_executor_from_core(self, executor_name): with conf_vars({("core", "executor"): executor_name}): - executor = ExecutorLoader.get_default_executor() + executor = executor_loader.ExecutorLoader.get_default_executor() assert executor is not None assert executor_name == executor.__class__.__name__ assert executor.name is not None - assert executor.name == ExecutorName(ExecutorLoader.executors[executor_name], alias=executor_name) + assert executor.name == ExecutorName( + executor_loader.ExecutorLoader.executors[executor_name], alias=executor_name + ) assert executor.name.connector_source == ConnectorSource.CORE def test_should_support_custom_path(self): with conf_vars({("core", "executor"): "tests.executors.test_executor_loader.FakeExecutor"}): - executor = ExecutorLoader.get_default_executor() + executor = executor_loader.ExecutorLoader.get_default_executor() assert executor is not None assert executor.__class__.__name__ == "FakeExecutor" assert executor.name is not None @@ -249,17 +238,17 @@ def test_get_hybrid_executors_from_config( "airflow.executors.executor_loader.ExecutorLoader._get_team_executor_configs", return_value=team_executor_config, ): - executors = ExecutorLoader._get_executor_names() + executors = executor_loader.ExecutorLoader._get_executor_names() assert executors == expected_executors_list def test_init_executors(self): with conf_vars({("core", "executor"): "CeleryExecutor"}): - executors = ExecutorLoader.init_executors() - executor_name = ExecutorLoader.get_default_executor_name() + executors = executor_loader.ExecutorLoader.init_executors() + executor_name = executor_loader.ExecutorLoader.get_default_executor_name() assert len(executors) == 1 assert isinstance(executors[0], CeleryExecutor) - assert "CeleryExecutor" in ExecutorLoader.executors - assert ExecutorLoader.executors["CeleryExecutor"] == executor_name.module_path + assert "CeleryExecutor" in executor_loader.ExecutorLoader.executors + assert executor_loader.ExecutorLoader.executors["CeleryExecutor"] == executor_name.module_path @pytest.mark.parametrize( "executor_config", @@ -276,7 +265,7 @@ def test_get_hybrid_executors_from_config_duplicates_should_fail(self, executor_ with pytest.raises( AirflowConfigException, match=r".+Duplicate executors are not yet supported.+" ): - ExecutorLoader._get_executor_names() + executor_loader.ExecutorLoader._get_executor_names() @pytest.mark.parametrize( "executor_config", @@ -292,7 +281,7 @@ def test_get_hybrid_executors_from_config_duplicates_should_fail(self, executor_ def test_get_hybrid_executors_from_config_core_executors_bad_config_format(self, executor_config): with conf_vars({("core", "executor"): executor_config}): with pytest.raises(AirflowConfigException): - ExecutorLoader._get_executor_names() + executor_loader.ExecutorLoader._get_executor_names() @pytest.mark.parametrize( ("executor_config", "expected_value"), @@ -308,7 +297,7 @@ def test_get_hybrid_executors_from_config_core_executors_bad_config_format(self, ) def test_should_support_import_executor_from_core(self, executor_config, expected_value): with conf_vars({("core", "executor"): executor_config}): - executor, import_source = ExecutorLoader.import_default_executor_cls() + executor, import_source = executor_loader.ExecutorLoader.import_default_executor_cls() assert expected_value == executor.__name__ assert import_source == ConnectorSource.CORE @@ -322,26 +311,43 @@ def test_should_support_import_executor_from_core(self, executor_config, expecte ) def test_should_support_import_custom_path(self, executor_config): with conf_vars({("core", "executor"): executor_config}): - executor, import_source = ExecutorLoader.import_default_executor_cls() + executor, import_source = executor_loader.ExecutorLoader.import_default_executor_cls() assert executor.__name__ == "FakeExecutor" assert import_source == ConnectorSource.CUSTOM_PATH def test_load_executor(self): with conf_vars({("core", "executor"): "LocalExecutor"}): - ExecutorLoader.init_executors() - assert isinstance(ExecutorLoader.load_executor("LocalExecutor"), LocalExecutor) - assert isinstance(ExecutorLoader.load_executor(executor_loader._executor_names[0]), LocalExecutor) - assert isinstance(ExecutorLoader.load_executor(None), LocalExecutor) + executor_loader.ExecutorLoader.init_executors() + assert isinstance(executor_loader.ExecutorLoader.load_executor("LocalExecutor"), LocalExecutor) + assert isinstance( + executor_loader.ExecutorLoader.load_executor(executor_loader._executor_names[0]), + LocalExecutor, + ) + assert isinstance(executor_loader.ExecutorLoader.load_executor(None), LocalExecutor) def test_load_executor_alias(self): with conf_vars({("core", "executor"): "local_exec:airflow.executors.local_executor.LocalExecutor"}): - ExecutorLoader.init_executors() - assert isinstance(ExecutorLoader.load_executor("local_exec"), LocalExecutor) + executor_loader.ExecutorLoader.init_executors() + assert isinstance(executor_loader.ExecutorLoader.load_executor("local_exec"), LocalExecutor) assert isinstance( - ExecutorLoader.load_executor("airflow.executors.local_executor.LocalExecutor"), + executor_loader.ExecutorLoader.load_executor( + "airflow.executors.local_executor.LocalExecutor" + ), + LocalExecutor, + ) + assert isinstance( + executor_loader.ExecutorLoader.load_executor(executor_loader._executor_names[0]), LocalExecutor, ) - assert isinstance(ExecutorLoader.load_executor(executor_loader._executor_names[0]), LocalExecutor) + + @mock.patch( + "airflow.executors.executor_loader.ExecutorLoader._get_executor_names", + wraps=executor_loader.ExecutorLoader._get_executor_names, + ) + def test_call_load_executor_method_without_init_executors(self, mock_get_executor_names): + with conf_vars({("core", "executor"): "LocalExecutor"}): + executor_loader.ExecutorLoader.load_executor("LocalExecutor") + mock_get_executor_names.assert_called_once() @mock.patch("airflow.providers.amazon.aws.executors.ecs.ecs_executor.AwsEcsExecutor", autospec=True) def test_load_custom_executor_with_classname(self, mock_executor): @@ -353,15 +359,16 @@ def test_load_custom_executor_with_classname(self, mock_executor): ): "my_alias:airflow.providers.amazon.aws.executors.ecs.ecs_executor.AwsEcsExecutor" } ): - ExecutorLoader.init_executors() - assert isinstance(ExecutorLoader.load_executor("my_alias"), AwsEcsExecutor) - assert isinstance(ExecutorLoader.load_executor("AwsEcsExecutor"), AwsEcsExecutor) + executor_loader.ExecutorLoader.init_executors() + assert isinstance(executor_loader.ExecutorLoader.load_executor("my_alias"), AwsEcsExecutor) + assert isinstance(executor_loader.ExecutorLoader.load_executor("AwsEcsExecutor"), AwsEcsExecutor) assert isinstance( - ExecutorLoader.load_executor( + executor_loader.ExecutorLoader.load_executor( "airflow.providers.amazon.aws.executors.ecs.ecs_executor.AwsEcsExecutor" ), AwsEcsExecutor, ) assert isinstance( - ExecutorLoader.load_executor(executor_loader._executor_names[0]), AwsEcsExecutor + executor_loader.ExecutorLoader.load_executor(executor_loader._executor_names[0]), + AwsEcsExecutor, ) diff --git a/tests/ti_deps/deps/test_ready_to_reschedule_dep.py b/tests/ti_deps/deps/test_ready_to_reschedule_dep.py index d982cf4b27107..7e6f1b2253e17 100644 --- a/tests/ti_deps/deps/test_ready_to_reschedule_dep.py +++ b/tests/ti_deps/deps/test_ready_to_reschedule_dep.py @@ -49,6 +49,7 @@ def side_effect(*args, **kwargs): yield m +@pytest.mark.usefixtures("clean_executor_loader") class TestNotInReschedulePeriodDep: @pytest.fixture(autouse=True) def setup_test_cases(self, request, create_task_instance): diff --git a/tests/utils/test_log_handlers.py b/tests/utils/test_log_handlers.py index 454af48d66763..fda432e01d1af 100644 --- a/tests/utils/test_log_handlers.py +++ b/tests/utils/test_log_handlers.py @@ -33,7 +33,7 @@ from requests.adapters import Response from airflow.config_templates.airflow_local_settings import DEFAULT_LOGGING_CONFIG -from airflow.executors import executor_loader +from airflow.executors import executor_constants, executor_loader from airflow.jobs.job import Job from airflow.jobs.triggerer_job_runner import TriggererJobRunner from airflow.models.dagrun import DagRun @@ -187,6 +187,95 @@ def task_callable(ti): # Remove the generated tmp log file. os.remove(log_filename) + @pytest.mark.parametrize( + "executor_name", + [ + (executor_constants.LOCAL_KUBERNETES_EXECUTOR), + (executor_constants.CELERY_KUBERNETES_EXECUTOR), + (executor_constants.KUBERNETES_EXECUTOR), + (None), + ], + ) + @conf_vars( + { + ("core", "EXECUTOR"): ",".join( + [ + executor_constants.LOCAL_KUBERNETES_EXECUTOR, + executor_constants.CELERY_KUBERNETES_EXECUTOR, + executor_constants.KUBERNETES_EXECUTOR, + ] + ), + } + ) + @patch( + "airflow.executors.executor_loader.ExecutorLoader.load_executor", + wraps=executor_loader.ExecutorLoader.load_executor, + ) + @patch( + "airflow.executors.executor_loader.ExecutorLoader.get_default_executor", + wraps=executor_loader.ExecutorLoader.get_default_executor, + ) + def test_file_task_handler_with_multiple_executors( + self, + mock_get_default_executor, + mock_load_executor, + executor_name, + create_task_instance, + clean_executor_loader, + ): + executors_mapping = executor_loader.ExecutorLoader.executors + default_executor_name = executor_loader.ExecutorLoader.get_default_executor_name() + path_to_executor_class: str + if executor_name is None: + path_to_executor_class = executors_mapping.get(default_executor_name.alias) + else: + path_to_executor_class = executors_mapping.get(executor_name) + + with patch(f"{path_to_executor_class}.get_task_log", return_value=([], [])) as mock_get_task_log: + mock_get_task_log.return_value = ([], []) + ti = create_task_instance( + dag_id="dag_for_testing_multiple_executors", + task_id="task_for_testing_multiple_executors", + run_type=DagRunType.SCHEDULED, + logical_date=DEFAULT_DATE, + ) + if executor_name is not None: + ti.executor = executor_name + ti.try_number = 1 + ti.state = TaskInstanceState.RUNNING + logger = ti.log + ti.log.disabled = False + + file_handler = next( + (handler for handler in logger.handlers if handler.name == FILE_TASK_HANDLER), None + ) + assert file_handler is not None + + set_context(logger, ti) + # clear executor_instances cache + file_handler.executor_instances = {} + assert file_handler.handler is not None + # We expect set_context generates a file locally. + log_filename = file_handler.handler.baseFilename + assert os.path.isfile(log_filename) + assert log_filename.endswith("1.log"), log_filename + + file_handler.flush() + file_handler.close() + + assert hasattr(file_handler, "read") + file_handler.read(ti) + os.remove(log_filename) + mock_get_task_log.assert_called_once() + + if executor_name is None: + mock_get_default_executor.assert_called_once() + # will be called in `ExecutorLoader.get_default_executor` method + mock_load_executor.assert_called_once_with(default_executor_name) + else: + mock_get_default_executor.assert_not_called() + mock_load_executor.assert_called_once_with(executor_name) + def test_file_task_handler_running(self, dag_maker): def task_callable(ti): ti.log.info("test") diff --git a/tests_common/pytest_plugin.py b/tests_common/pytest_plugin.py index 1b68f039eaa17..969d0b2a61c8c 100644 --- a/tests_common/pytest_plugin.py +++ b/tests_common/pytest_plugin.py @@ -1567,6 +1567,19 @@ def clean_dags_and_dagruns(): clear_db_runs() +@pytest.fixture +def clean_executor_loader(): + """Clean the executor_loader state, as it stores global variables in the module, causing side effects for some tests.""" + from airflow.executors.executor_loader import ExecutorLoader + + from tests_common.test_utils.executor_loader import clean_executor_loader_module + + clean_executor_loader_module() + yield # Test runs here + clean_executor_loader_module() + ExecutorLoader.init_executors() + + @pytest.fixture(scope="session") def app(): from tests_common.test_utils.config import conf_vars diff --git a/tests_common/test_utils/executor_loader.py b/tests_common/test_utils/executor_loader.py new file mode 100644 index 0000000000000..f7dd98b726428 --- /dev/null +++ b/tests_common/test_utils/executor_loader.py @@ -0,0 +1,34 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import TYPE_CHECKING + +import airflow.executors.executor_loader as executor_loader + +if TYPE_CHECKING: + from airflow.executors.executor_utils import ExecutorName + + +def clean_executor_loader_module(): + """Clean the executor_loader state, as it stores global variables in the module, causing side effects for some tests.""" + executor_loader._alias_to_executors: dict[str, ExecutorName] = {} + executor_loader._module_to_executors: dict[str, ExecutorName] = {} + executor_loader._team_id_to_executors: dict[str | None, ExecutorName] = {} + executor_loader._classname_to_executors: dict[str, ExecutorName] = {} + executor_loader._executor_names: list[ExecutorName] = []