From 63ff22f4038f34354dc5807036d1bf10653c2ecd Mon Sep 17 00:00:00 2001 From: Jens Scheffler <95105677+jscheffl@users.noreply.github.com> Date: Tue, 8 Oct 2024 12:00:11 +0200 Subject: [PATCH] Drop python3.8 support core and providers (#42766) * Drop Python 3.8 support in core Add newsfragment * Drop Python 3.8 support in provider packages --- .github/actions/breeze/action.yml | 3 - .github/workflows/build-images.yml | 18 +- .github/workflows/check-providers.yml | 5 +- .pre-commit-config.yaml | 7 - .readthedocs.yml | 2 +- Dockerfile | 2 +- Dockerfile.ci | 4 +- INSTALL | 12 +- README.md | 8 +- airflow/cli/commands/connection_command.py | 2 +- airflow/compat/functools.py | 33 -- airflow/configuration.py | 4 +- airflow/io/__init__.py | 2 +- airflow/models/taskinstance.py | 2 +- airflow/operators/python.py | 2 +- .../MANAGING_PROVIDERS_LIFECYCLE.rst | 2 +- .../amazon/aws/transfers/sql_to_s3.py | 9 +- airflow/providers/amazon/aws/utils/mixins.py | 3 +- airflow/providers/cloudant/provider.yaml | 3 +- .../kubernetes/kubernetes_helper_functions.py | 2 +- airflow/providers/common/io/xcom/backend.py | 21 +- airflow/providers/openlineage/conf.py | 2 +- airflow/serialization/serialized_objects.py | 2 +- airflow/serialization/serializers/timezone.py | 16 +- airflow/utils/log/secrets_masker.py | 3 +- airflow/utils/platform.py | 3 +- airflow/www/forms.py | 2 +- airflow/www/views.py | 5 +- clients/python/README.md | 2 +- clients/python/pyproject.toml | 5 +- clients/python/test_python_client.py | 2 +- constraints/README.md | 6 +- .../03_contributors_quick_start.rst | 49 ++- contributing-docs/05_pull_requests.rst | 2 +- contributing-docs/07_local_virtualenv.rst | 14 +- contributing-docs/08_static_code_checks.rst | 4 +- .../12_airflow_dependencies_and_extras.rst | 8 +- .../testing/docker_compose_tests.rst | 8 +- contributing-docs/testing/k8s_tests.rst | 84 ++--- contributing-docs/testing/unit_tests.rst | 12 +- dev/README_RELEASE_AIRFLOW.md | 8 +- dev/README_RELEASE_PROVIDER_PACKAGES.md | 2 +- dev/breeze/README.md | 2 +- dev/breeze/doc/01_installation.rst | 6 +- dev/breeze/doc/03_developer_tasks.rst | 10 +- dev/breeze/doc/06_managing_docker_images.rst | 4 +- dev/breeze/doc/10_advanced_breeze_topics.rst | 6 +- ...002-implement-standalone-python-command.md | 2 +- dev/breeze/doc/ci/02_images.md | 63 ++-- dev/breeze/doc/ci/04_selective_checks.md | 12 +- dev/breeze/doc/ci/08_running_ci_locally.md | 8 +- dev/breeze/doc/images/output-commands.svg | 4 +- .../doc/images/output_ci-image_build.svg | 6 +- .../doc/images/output_ci-image_build.txt | 2 +- .../doc/images/output_ci-image_pull.svg | 64 ++-- .../doc/images/output_ci-image_pull.txt | 2 +- .../doc/images/output_ci-image_verify.svg | 6 +- .../doc/images/output_ci-image_verify.txt | 2 +- .../doc/images/output_k8s_build-k8s-image.svg | 6 +- .../doc/images/output_k8s_build-k8s-image.txt | 2 +- .../images/output_k8s_configure-cluster.svg | 60 ++-- .../images/output_k8s_configure-cluster.txt | 2 +- .../doc/images/output_k8s_create-cluster.svg | 6 +- .../doc/images/output_k8s_create-cluster.txt | 2 +- .../doc/images/output_k8s_delete-cluster.svg | 34 +- .../doc/images/output_k8s_delete-cluster.txt | 2 +- .../doc/images/output_k8s_deploy-airflow.svg | 6 +- .../doc/images/output_k8s_deploy-airflow.txt | 2 +- dev/breeze/doc/images/output_k8s_k9s.svg | 32 +- dev/breeze/doc/images/output_k8s_k9s.txt | 2 +- dev/breeze/doc/images/output_k8s_logs.svg | 34 +- dev/breeze/doc/images/output_k8s_logs.txt | 2 +- .../images/output_k8s_run-complete-tests.svg | 70 ++-- .../images/output_k8s_run-complete-tests.txt | 2 +- dev/breeze/doc/images/output_k8s_shell.svg | 42 ++- dev/breeze/doc/images/output_k8s_shell.txt | 2 +- dev/breeze/doc/images/output_k8s_status.svg | 4 +- dev/breeze/doc/images/output_k8s_status.txt | 2 +- dev/breeze/doc/images/output_k8s_tests.svg | 70 ++-- dev/breeze/doc/images/output_k8s_tests.txt | 2 +- .../images/output_k8s_upload-k8s-image.svg | 58 ++-- .../images/output_k8s_upload-k8s-image.txt | 2 +- .../doc/images/output_prod-image_build.svg | 6 +- .../doc/images/output_prod-image_build.txt | 2 +- .../doc/images/output_prod-image_pull.svg | 64 ++-- .../doc/images/output_prod-image_pull.txt | 2 +- .../doc/images/output_prod-image_verify.svg | 6 +- .../doc/images/output_prod-image_verify.txt | 2 +- ...elease-management_generate-constraints.svg | 6 +- ...elease-management_generate-constraints.txt | 2 +- ...e-management_install-provider-packages.svg | 126 ++++--- ...e-management_install-provider-packages.txt | 2 +- ...release-management_release-prod-images.svg | 2 +- ...release-management_release-prod-images.txt | 2 +- ...se-management_verify-provider-packages.svg | 132 ++++---- ...se-management_verify-provider-packages.txt | 2 +- ...put_sbom_export-dependency-information.svg | 4 +- ...put_sbom_export-dependency-information.txt | 2 +- dev/breeze/doc/images/output_setup_config.svg | 4 +- dev/breeze/doc/images/output_setup_config.txt | 2 +- dev/breeze/doc/images/output_shell.svg | 4 +- dev/breeze/doc/images/output_shell.txt | 2 +- .../doc/images/output_start-airflow.svg | 4 +- .../doc/images/output_start-airflow.txt | 2 +- .../doc/images/output_static-checks.svg | 26 +- .../doc/images/output_static-checks.txt | 2 +- .../doc/images/output_testing_db-tests.svg | 4 +- .../doc/images/output_testing_db-tests.txt | 2 +- .../output_testing_docker-compose-tests.svg | 4 +- .../output_testing_docker-compose-tests.txt | 2 +- .../output_testing_integration-tests.svg | 4 +- .../output_testing_integration-tests.txt | 2 +- .../images/output_testing_non-db-tests.svg | 4 +- .../images/output_testing_non-db-tests.txt | 2 +- .../doc/images/output_testing_tests.svg | 4 +- .../doc/images/output_testing_tests.txt | 2 +- dev/breeze/pyproject.toml | 3 +- .../airflow_breeze/commands/ci_commands.py | 3 +- .../commands/developer_commands.py | 2 +- .../commands/minor_release_command.py | 2 +- .../commands/release_candidate_command.py | 2 +- .../commands/release_management_commands.py | 3 +- .../src/airflow_breeze/global_constants.py | 28 +- .../params/common_build_params.py | 2 +- .../src/airflow_breeze/pre_commit_ids.py | 1 - .../templates/pyproject_TEMPLATE.toml.jinja2 | 2 +- .../src/airflow_breeze/utils/backtracking.py | 2 +- .../src/airflow_breeze/utils/black_utils.py | 4 +- dev/breeze/src/airflow_breeze/utils/cdxgen.py | 3 +- .../src/airflow_breeze/utils/coertions.py | 2 +- .../src/airflow_breeze/utils/console.py | 6 +- .../utils/custom_param_types.py | 3 +- .../src/airflow_breeze/utils/packages.py | 5 +- .../src/airflow_breeze/utils/parallel.py | 3 +- .../src/airflow_breeze/utils/path_utils.py | 4 +- .../src/airflow_breeze/utils/run_utils.py | 7 +- .../airflow_breeze/utils/selective_checks.py | 8 +- .../airflow_breeze/utils/virtualenv_utils.py | 2 +- dev/breeze/tests/test_cache.py | 4 +- dev/breeze/tests/test_packages.py | 4 +- dev/breeze/tests/test_run_test_args.py | 6 +- dev/breeze/tests/test_selective_checks.py | 315 +++++++++--------- dev/breeze/tests/test_shell_params.py | 8 +- dev/check_files.py | 2 +- dev/refresh_images.sh | 4 +- dev/retag_docker_images.py | 2 +- docker_tests/constants.py | 2 +- docker_tests/docker_utils.py | 4 +- .../executors/general.rst | 6 +- .../installing-from-pypi.rst | 2 +- .../modules_management.rst | 26 +- docs/apache-airflow/extra-packages-ref.rst | 2 +- .../installation/installing-from-pypi.rst | 24 +- .../installation/prerequisites.rst | 2 +- .../installation/supported-versions.rst | 4 +- docs/apache-airflow/start.rst | 4 +- docs/docker-stack/README.md | 8 +- docs/docker-stack/build-arg-ref.rst | 2 +- docs/docker-stack/build.rst | 20 +- .../customizing/add-build-essential-custom.sh | 2 +- .../customizing/custom-sources.sh | 2 +- .../github-different-repository.sh | 2 +- .../customizing/github-main.sh | 2 +- .../customizing/github-v2-2-test.sh | 2 +- .../customizing/pypi-dev-runtime-deps.sh | 2 +- .../customizing/pypi-extras-and-deps.sh | 2 +- .../customizing/pypi-selected-version.sh | 2 +- .../restricted/restricted_environments.sh | 10 +- docs/docker-stack/entrypoint.rst | 14 +- docs/docker-stack/index.rst | 8 +- generated/PYPI_README.md | 6 +- generated/provider_dependencies.json | 1 - hatch_build.py | 2 +- .../test_kubernetes_pod_operator.py | 2 +- newsfragments/42739.significant.rst | 1 + pyproject.toml | 13 +- scripts/ci/docker-compose/devcontainer.env | 4 +- scripts/ci/docker-compose/devcontainer.yml | 2 +- scripts/ci/kubernetes/k8s_requirements.txt | 2 +- .../ci/pre_commit/common_precommit_utils.py | 6 +- .../ci/pre_commit/compat_cache_on_methods.py | 69 ---- scripts/ci/pre_commit/mypy.py | 2 +- scripts/ci/pre_commit/mypy_folder.py | 2 +- scripts/ci/pre_commit/sync_init_decorator.py | 11 +- .../pre_commit/update_build_dependencies.py | 4 +- scripts/docker/entrypoint_ci.sh | 2 +- .../install_airflow_and_providers.py | 2 +- .../in_container/run_generate_constraints.py | 4 +- .../providers/cloudant/hooks/test_cloudant.py | 2 +- .../google/cloud/operators/test_mlengine.py | 2 +- .../google/cloud/triggers/test_mlengine.py | 2 +- .../example_kubernetes_decorator.py | 4 +- tests/www/views/test_views.py | 4 +- 193 files changed, 998 insertions(+), 1215 deletions(-) delete mode 100644 airflow/compat/functools.py create mode 100644 newsfragments/42739.significant.rst delete mode 100755 scripts/ci/pre_commit/compat_cache_on_methods.py diff --git a/.github/actions/breeze/action.yml b/.github/actions/breeze/action.yml index 164914c3d525b..69ebcc7c66e6e 100644 --- a/.github/actions/breeze/action.yml +++ b/.github/actions/breeze/action.yml @@ -21,9 +21,6 @@ description: 'Sets up Python and Breeze' inputs: python-version: description: 'Python version to use' - # Version of Python used for reproducibility of the packages built - # Python 3.8 tarfile produces different tarballs than Python 3.9+ tarfile that's why we are forcing - # Python 3.9 for all release preparation commands to make sure that the tarballs are reproducible default: "3.9" outputs: host-python-version: diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index abf966faede02..6c6d55d75045e 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -16,7 +16,7 @@ # under the License. # --- -name: "Build Images" +name: Build Images run-name: > Build images for ${{ github.event.pull_request.title }} ${{ github.event.pull_request._links.html.href }} on: # yamllint disable-line rule:truthy @@ -54,7 +54,7 @@ concurrency: jobs: build-info: timeout-minutes: 10 - name: "Build Info" + name: Build Info # At build-info stage we do not yet have outputs so we need to hard-code the runs-on to public runners runs-on: ["ubuntu-22.04"] env: @@ -89,7 +89,7 @@ jobs: }}" if: github.repository == 'apache/airflow' steps: - - name: "Cleanup repo" + - name: Cleanup repo shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: Discover PR merge commit @@ -154,13 +154,13 @@ jobs: # COMPOSITE ACTIONS. WE CAN RUN ANYTHING THAT IS IN THE TARGET BRANCH AND THERE IS NO RISK THAT # CODE WILL BE RUN FROM THE PR. #################################################################################################### - - name: "Cleanup docker" + - name: Cleanup docker run: ./scripts/ci/cleanup_docker.sh - - name: "Setup python" + - name: Setup python uses: actions/setup-python@v5 with: - python-version: 3.8 - - name: "Install Breeze" + python-version: "3.9" + - name: Install Breeze uses: ./.github/actions/breeze #################################################################################################### # WE RUN SELECTIVE CHECKS HERE USING THE TARGET COMMIT AND ITS PARENT TO BE ABLE TO COMPARE THEM @@ -212,7 +212,7 @@ jobs: docker-cache: ${{ needs.build-info.outputs.docker-cache }} generate-constraints: - name: "Generate constraints" + name: Generate constraints needs: [build-info, build-ci-images] uses: ./.github/workflows/generate-constraints.yml with: @@ -247,7 +247,7 @@ jobs: push-image: "true" use-uv: "true" image-tag: ${{ needs.build-info.outputs.image-tag }} - platform: "linux/amd64" + platform: linux/amd64 python-versions: ${{ needs.build-info.outputs.python-versions }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} branch: ${{ needs.build-info.outputs.default-branch }} diff --git a/.github/workflows/check-providers.yml b/.github/workflows/check-providers.yml index b394f7927329a..f5ff95b73c2f7 100644 --- a/.github/workflows/check-providers.yml +++ b/.github/workflows/check-providers.yml @@ -108,10 +108,9 @@ jobs: run: > breeze release-management generate-issue-content-providers --only-available-in-dist --disable-progress - - name: > - Remove incompatible Python ${{ matrix.python-version }} provider packages + - name: Remove Python 3.9-incompatible provider packages run: | - echo "Removing Python 3.8-incompatible provider: cloudant" + echo "Removing Python 3.9-incompatible provider: cloudant" rm -vf dist/apache_airflow_providers_cloudant* - name: "Generate source constraints from CI image" shell: bash diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2ad8c6a959cec..e33038432b541 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1066,13 +1066,6 @@ repos: files: \.py$ exclude: ^.*/.*_vendor/ additional_dependencies: ['rich>=12.4.4'] - - id: check-compat-cache-on-methods - name: Check that compat cache do not use on class methods - entry: ./scripts/ci/pre_commit/compat_cache_on_methods.py - language: python - pass_filenames: true - files: ^airflow/.*\.py$ - exclude: ^.*/.*_vendor/ - id: check-code-deprecations name: Check deprecations categories in decorators entry: ./scripts/ci/pre_commit/check_deprecations.py diff --git a/.readthedocs.yml b/.readthedocs.yml index aa16e3a8e3d57..4aa28ee78b036 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -20,7 +20,7 @@ formats: [] sphinx: configuration: docs/rtd-deprecation/conf.py python: - version: "3.8" + version: "3.9" install: - method: pip path: . diff --git a/Dockerfile b/Dockerfile index cfb894ac87d22..4525a717728a8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -47,7 +47,7 @@ ARG AIRFLOW_USER_HOME_DIR=/home/airflow # latest released version here ARG AIRFLOW_VERSION="2.10.2" -ARG PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" +ARG PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" ARG AIRFLOW_PIP_VERSION=24.2 ARG AIRFLOW_UV_VERSION=0.4.17 diff --git a/Dockerfile.ci b/Dockerfile.ci index f7b7bb4172025..304d2a4a2d46d 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -16,7 +16,7 @@ # # WARNING: THIS DOCKERFILE IS NOT INTENDED FOR PRODUCTION USE OR DEPLOYMENT. # -ARG PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" +ARG PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" ############################################################################################## # This is the script image where we keep all inlined bash scripts needed in other segments @@ -810,7 +810,7 @@ chmod 1777 /tmp AIRFLOW_SOURCES=$(cd "${IN_CONTAINER_DIR}/../.." || exit 1; pwd) -PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.8} +PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.9} export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}} diff --git a/INSTALL b/INSTALL index 5ccabe2ff3270..6583d9de44206 100644 --- a/INSTALL +++ b/INSTALL @@ -141,9 +141,7 @@ This is what it shows currently: ┏━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ ┃ Name ┃ Type ┃ Description ┃ ┡━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ -│ default │ virtual │ Default environment with Python 3.8 for maximum compatibility │ -├─────────────┼─────────┼───────────────────────────────────────────────────────────────┤ -│ airflow-38 │ virtual │ Environment with Python 3.8. No devel installed. │ +│ default │ virtual │ Default environment with Python 3.9 for maximum compatibility │ ├─────────────┼─────────┼───────────────────────────────────────────────────────────────┤ │ airflow-39 │ virtual │ Environment with Python 3.9. No devel installed. │ ├─────────────┼─────────┼───────────────────────────────────────────────────────────────┤ @@ -154,7 +152,7 @@ This is what it shows currently: │ airflow-312 │ virtual │ Environment with Python 3.12. No devel installed │ └─────────────┴─────────┴───────────────────────────────────────────────────────────────┘ -The default env (if you have not used one explicitly) is `default` and it is a Python 3.8 +The default env (if you have not used one explicitly) is `default` and it is a Python 3.9 virtualenv for maximum compatibility with `devel` extra installed - this devel extra contains the minimum set of dependencies and tools that should be used during unit testing of core Airflow and running all `airflow` CLI commands - without support for providers or databases. @@ -228,15 +226,15 @@ to avoid "works-for-me" syndrome, where you use different versions of dependenci that are used in main CI tests and by other contributors. There are different constraint files for different Python versions. For example, this command will install -all basic devel requirements and requirements of Google provider as last successfully tested for Python 3.8: +all basic devel requirements and requirements of Google provider as last successfully tested for Python 3.9: pip install -e ".[devel,google]"" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt" Using the 'constraints-no-providers' constraint files, you can upgrade Airflow without paying attention to the provider's dependencies. This allows you to keep installed provider dependencies and install the latest supported ones using pure Airflow core. pip install -e ".[devel]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.9.txt" Airflow extras ============== diff --git a/README.md b/README.md index 3cd6416e93405..0419ae0456070 100644 --- a/README.md +++ b/README.md @@ -99,7 +99,7 @@ Apache Airflow is tested with: | | Main version (dev) | Stable version (2.10.2) | |------------|----------------------------|----------------------------| -| Python | 3.8, 3.9, 3.10, 3.11, 3.12 | 3.8, 3.9, 3.10, 3.11, 3.12 | +| Python | 3.9, 3.10, 3.11, 3.12 | 3.8, 3.9, 3.10, 3.11, 3.12 | | Platform | AMD64/ARM64(\*) | AMD64/ARM64(\*) | | Kubernetes | 1.28, 1.29, 1.30, 1.31 | 1.27, 1.28, 1.29, 1.30 | | PostgreSQL | 12, 13, 14, 15, 16, 17 | 12, 13, 14, 15, 16 | @@ -178,14 +178,14 @@ them to the appropriate format and workflow that your tool requires. ```bash pip install 'apache-airflow==2.10.2' \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.9.txt" ``` 2. Installing with extras (i.e., postgres, google) ```bash pip install 'apache-airflow[postgres,google]==2.10.2' \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.9.txt" ``` For information on installing provider packages, check @@ -313,7 +313,7 @@ They are based on the official release schedule of Python and Kubernetes, nicely 1. We drop support for Python and Kubernetes versions when they reach EOL. Except for Kubernetes, a version stays supported by Airflow if two major cloud providers still provide support for it. We drop support for those EOL versions in main right after EOL date, and it is effectively removed when we release - the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow. For example, for Python 3.8 it + the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow. For example, for Python 3.9 it means that we will drop support in main right after 27.06.2023, and the first MAJOR or MINOR version of Airflow released after will not have it. diff --git a/airflow/cli/commands/connection_command.py b/airflow/cli/commands/connection_command.py index f68830c490bc8..aace3f9c9aede 100644 --- a/airflow/cli/commands/connection_command.py +++ b/airflow/cli/commands/connection_command.py @@ -21,6 +21,7 @@ import json import os import warnings +from functools import cache from pathlib import Path from typing import Any from urllib.parse import urlsplit, urlunsplit @@ -30,7 +31,6 @@ from airflow.cli.simple_table import AirflowConsole from airflow.cli.utils import is_stdout, print_export_output -from airflow.compat.functools import cache from airflow.configuration import conf from airflow.exceptions import AirflowNotFoundException from airflow.hooks.base import BaseHook diff --git a/airflow/compat/functools.py b/airflow/compat/functools.py deleted file mode 100644 index 7b521f9f9da2e..0000000000000 --- a/airflow/compat/functools.py +++ /dev/null @@ -1,33 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import sys - -if sys.version_info >= (3, 9): - from functools import cache -else: - from functools import lru_cache - - cache = lru_cache(maxsize=None) - -# We need to keep it around, in case it was used in the code of old providers, but since we are -# Python 3.8+ we can directly import the functools one -from functools import cached_property # type: ignore - -__all__ = ["cache", "cached_property"] diff --git a/airflow/configuration.py b/airflow/configuration.py index f50e19268380b..81dc18365392e 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -1790,9 +1790,7 @@ def load_providers_configuration(self): ) self._default_values = create_default_config_parser(self.configuration_description) # sensitive_config_values needs to be refreshed here. This is a cached_property, so we can delete - # the cached values, and it will be refreshed on next access. This has been an implementation - # detail in Python 3.8 but as of Python 3.9 it is documented behaviour. - # See https://docs.python.org/3/library/functools.html#functools.cached_property + # the cached values, and it will be refreshed on next access. try: del self.sensitive_config_values except AttributeError: diff --git a/airflow/io/__init__.py b/airflow/io/__init__.py index 9996a77717ae0..49f2711c3c6cd 100644 --- a/airflow/io/__init__.py +++ b/airflow/io/__init__.py @@ -18,6 +18,7 @@ import inspect import logging +from functools import cache from typing import ( TYPE_CHECKING, Callable, @@ -26,7 +27,6 @@ from fsspec.implementations.local import LocalFileSystem -from airflow.compat.functools import cache from airflow.providers_manager import ProvidersManager from airflow.stats import Stats from airflow.utils.module_loading import import_string diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index 31f7b6535a9c2..e75ad83923bf2 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -31,6 +31,7 @@ from contextlib import nullcontext from datetime import timedelta from enum import Enum +from functools import cache from typing import TYPE_CHECKING, Any, Callable, Collection, Generator, Iterable, Mapping, Tuple from urllib.parse import quote @@ -69,7 +70,6 @@ from airflow.api_internal.internal_api_call import InternalApiConfig, internal_api_call from airflow.assets import Asset, AssetAlias from airflow.assets.manager import asset_manager -from airflow.compat.functools import cache from airflow.configuration import conf from airflow.exceptions import ( AirflowException, diff --git a/airflow/operators/python.py b/airflow/operators/python.py index a4788caedf438..b032b45ed3e6e 100644 --- a/airflow/operators/python.py +++ b/airflow/operators/python.py @@ -30,13 +30,13 @@ import warnings from abc import ABCMeta, abstractmethod from collections.abc import Container +from functools import cache from pathlib import Path from tempfile import TemporaryDirectory from typing import TYPE_CHECKING, Any, Callable, Collection, Iterable, Mapping, NamedTuple, Sequence import lazy_object_proxy -from airflow.compat.functools import cache from airflow.exceptions import ( AirflowConfigException, AirflowException, diff --git a/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst b/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst index 48980f2153cd0..3d3e95c28b172 100644 --- a/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst +++ b/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst @@ -454,7 +454,7 @@ If you have pre-commit installed, pre-commit will be run automatically on commit manually after commit, you can run it via ``breeze static-checks --last-commit`` some of the tests might fail because suspension of the provider might cause changes in the dependencies, so if you see errors about missing dependencies imports, non-usable classes etc., you will need to build the CI image locally -via ``breeze build-image --python 3.8 --upgrade-to-newer-dependencies`` after the first pre-commit run +via ``breeze build-image --python 3.9 --upgrade-to-newer-dependencies`` after the first pre-commit run and then run the static checks again. If you want to be absolutely sure to run all static checks you can always do this via diff --git a/airflow/providers/amazon/aws/transfers/sql_to_s3.py b/airflow/providers/amazon/aws/transfers/sql_to_s3.py index 65e40797a59b1..19bc7f016b186 100644 --- a/airflow/providers/amazon/aws/transfers/sql_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/sql_to_s3.py @@ -223,12 +223,9 @@ def _partition_dataframe(self, df: pd.DataFrame) -> Iterable[tuple[str, pd.DataF for group_label in (grouped_df := df.groupby(**self.groupby_kwargs)).groups: yield ( cast(str, group_label), - cast( - "pd.DataFrame", - grouped_df.get_group(group_label) - .drop(random_column_name, axis=1, errors="ignore") - .reset_index(drop=True), - ), + grouped_df.get_group(group_label) + .drop(random_column_name, axis=1, errors="ignore") + .reset_index(drop=True), ) def _get_hook(self) -> DbApiHook: diff --git a/airflow/providers/amazon/aws/utils/mixins.py b/airflow/providers/amazon/aws/utils/mixins.py index 9dbbde914874c..2fb80bcb1517c 100644 --- a/airflow/providers/amazon/aws/utils/mixins.py +++ b/airflow/providers/amazon/aws/utils/mixins.py @@ -27,12 +27,11 @@ from __future__ import annotations -from functools import cached_property +from functools import cache, cached_property from typing import Any, Generic, NamedTuple, TypeVar from typing_extensions import final -from airflow.compat.functools import cache from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook AwsHookType = TypeVar("AwsHookType", bound=AwsGenericHook) diff --git a/airflow/providers/cloudant/provider.yaml b/airflow/providers/cloudant/provider.yaml index 09857936b877b..a2de5b4335468 100644 --- a/airflow/providers/cloudant/provider.yaml +++ b/airflow/providers/cloudant/provider.yaml @@ -51,10 +51,9 @@ dependencies: excluded-python-versions: # ibmcloudant transitively brings in urllib3 2.x, but the snowflake provider has a dependency that pins - # urllib3 to 1.x on Python 3.8 and 3.9; thus we exclude those Python versions from taking the update + # urllib3 to 1.x on Python 3.9; thus we exclude those Python versions from taking the update # to ibmcloudant. # See #21004, #41555, and https://github.com/snowflakedb/snowflake-connector-python/issues/2016 - - "3.8" - "3.9" integrations: diff --git a/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py b/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py index d337caf14c113..c603f8a178b37 100644 --- a/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py +++ b/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py @@ -19,6 +19,7 @@ import logging import secrets import string +from functools import cache from typing import TYPE_CHECKING import pendulum @@ -26,7 +27,6 @@ from kubernetes.client.rest import ApiException from slugify import slugify -from airflow.compat.functools import cache from airflow.configuration import conf from airflow.exceptions import AirflowProviderDeprecationWarning diff --git a/airflow/providers/common/io/xcom/backend.py b/airflow/providers/common/io/xcom/backend.py index af55baa4c0628..256b503181e0e 100644 --- a/airflow/providers/common/io/xcom/backend.py +++ b/airflow/providers/common/io/xcom/backend.py @@ -19,12 +19,13 @@ import contextlib import json import uuid +from functools import cache +from pathlib import Path from typing import TYPE_CHECKING, Any, TypeVar from urllib.parse import urlsplit import fsspec.utils -from airflow.compat.functools import cache from airflow.configuration import conf from airflow.io.path import ObjectStoragePath from airflow.models.xcom import BaseXCom @@ -40,22 +41,6 @@ SECTION = "common.io" -def _is_relative_to(o: ObjectStoragePath, other: ObjectStoragePath) -> bool: - """ - Return whether or not this path is relative to the other path. - - This is a port of the pathlib.Path.is_relative_to method. It is not available in python 3.8. - """ - if hasattr(o, "is_relative_to"): - return o.is_relative_to(other) - - try: - o.relative_to(other) - return True - except ValueError: - return False - - def _get_compression_suffix(compression: str) -> str: """ Return the compression suffix for the given compression. @@ -111,7 +96,7 @@ def _get_full_path(data: str) -> ObjectStoragePath: raise TypeError(f"Not a valid url: {data}") from None if url.scheme: - if not _is_relative_to(ObjectStoragePath(data), p): + if not Path.is_relative_to(ObjectStoragePath(data), p): raise ValueError(f"Invalid key: {data}") return p / data.replace(str(p), "", 1).lstrip("/") diff --git a/airflow/providers/openlineage/conf.py b/airflow/providers/openlineage/conf.py index b0c763b280a46..9c0253bbab408 100644 --- a/airflow/providers/openlineage/conf.py +++ b/airflow/providers/openlineage/conf.py @@ -35,7 +35,7 @@ def decorator(func): cache = decorator else: - from airflow.compat.functools import cache + from functools import cache from airflow.configuration import conf _CONFIG_SECTION = "openlineage" diff --git a/airflow/serialization/serialized_objects.py b/airflow/serialization/serialized_objects.py index a4801b767acc5..9f180c2a5deac 100644 --- a/airflow/serialization/serialized_objects.py +++ b/airflow/serialization/serialized_objects.py @@ -24,6 +24,7 @@ import inspect import logging import weakref +from functools import cache from inspect import signature from textwrap import dedent from typing import TYPE_CHECKING, Any, Collection, Iterable, Mapping, NamedTuple, Union, cast @@ -43,7 +44,6 @@ _AssetAliasCondition, ) from airflow.callbacks.callback_requests import DagCallbackRequest, TaskCallbackRequest -from airflow.compat.functools import cache from airflow.exceptions import AirflowException, SerializationError, TaskDeferred from airflow.jobs.job import Job from airflow.models import Trigger diff --git a/airflow/serialization/serializers/timezone.py b/airflow/serialization/serializers/timezone.py index a1f40e67c6972..3d2a29ea08bc4 100644 --- a/airflow/serialization/serializers/timezone.py +++ b/airflow/serialization/serializers/timezone.py @@ -18,7 +18,6 @@ from __future__ import annotations import datetime -import sys from typing import TYPE_CHECKING, Any, cast from airflow.utils.module_loading import qualname @@ -30,15 +29,9 @@ serializers = [ "pendulum.tz.timezone.FixedTimezone", "pendulum.tz.timezone.Timezone", + "zoneinfo.ZoneInfo", ] -PY39 = sys.version_info >= (3, 9) - -if PY39: - serializers.append("zoneinfo.ZoneInfo") -else: - serializers.append("backports.zoneinfo.ZoneInfo") - deserializers = serializers __version__ = 1 @@ -83,11 +76,8 @@ def deserialize(classname: str, version: int, data: object) -> Any: if version > __version__: raise TypeError(f"serialized {version} of {classname} > {__version__}") - if "zoneinfo.ZoneInfo" in classname: - try: - from zoneinfo import ZoneInfo - except ImportError: - from backports.zoneinfo import ZoneInfo + if classname == "backports.zoneinfo.ZoneInfo" and isinstance(data, str): + from zoneinfo import ZoneInfo return ZoneInfo(data) diff --git a/airflow/utils/log/secrets_masker.py b/airflow/utils/log/secrets_masker.py index 13c93d992fffa..4f9604aced7f4 100644 --- a/airflow/utils/log/secrets_masker.py +++ b/airflow/utils/log/secrets_masker.py @@ -22,7 +22,7 @@ import logging import sys from enum import Enum -from functools import cached_property +from functools import cache, cached_property from typing import ( TYPE_CHECKING, Any, @@ -42,7 +42,6 @@ import re2 from airflow import settings -from airflow.compat.functools import cache if TYPE_CHECKING: from kubernetes.client import V1EnvVar diff --git a/airflow/utils/platform.py b/airflow/utils/platform.py index 7945e2b945382..74f56a0ab037d 100644 --- a/airflow/utils/platform.py +++ b/airflow/utils/platform.py @@ -24,8 +24,7 @@ import pkgutil import platform import sys - -from airflow.compat.functools import cache +from functools import cache IS_WINDOWS = platform.system() == "Windows" diff --git a/airflow/www/forms.py b/airflow/www/forms.py index 5b746ab633c06..a3b1d5262db20 100644 --- a/airflow/www/forms.py +++ b/airflow/www/forms.py @@ -20,6 +20,7 @@ import datetime import json import operator +from functools import cache from typing import Iterator import pendulum @@ -36,7 +37,6 @@ from wtforms.fields import Field, IntegerField, PasswordField, SelectField, StringField, TextAreaField from wtforms.validators import InputRequired, Optional -from airflow.compat.functools import cache from airflow.configuration import conf from airflow.providers_manager import ProvidersManager from airflow.utils.types import DagRunType diff --git a/airflow/www/views.py b/airflow/www/views.py index 47c548d5e7667..7782da955c92e 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -32,7 +32,7 @@ import warnings from bisect import insort_left from collections import defaultdict -from functools import cached_property +from functools import cache, cached_property from json import JSONDecodeError from pathlib import Path from typing import TYPE_CHECKING, Any, Collection, Iterator, Mapping, MutableMapping, Sequence @@ -89,7 +89,6 @@ ) from airflow.assets import Asset, AssetAlias from airflow.auth.managers.models.resource_details import AccessView, DagAccessEntity, DagDetails -from airflow.compat.functools import cache from airflow.configuration import AIRFLOW_CONFIG, conf from airflow.exceptions import ( AirflowConfigException, @@ -178,7 +177,7 @@ def sanitize_args(args: dict[str, Any]) -> dict[str, Any]: return {key: value for key, value in args.items() if not key.startswith("_")} -# Following the release of https://github.com/python/cpython/issues/102153 in Python 3.8.17 and 3.9.17 on +# Following the release of https://github.com/python/cpython/issues/102153 in Python 3.9.17 on # June 6, 2023, we are adding extra sanitization of the urls passed to get_safe_url method to make it works # the same way regardless if the user uses latest Python patchlevel versions or not. This also follows # a recommended solution by the Python core team. diff --git a/clients/python/README.md b/clients/python/README.md index e1427fce92d46..7bba821e43e62 100644 --- a/clients/python/README.md +++ b/clients/python/README.md @@ -248,7 +248,7 @@ For more information, please visit [https://airflow.apache.org](https://airflow. ## Requirements. -Python >=3.8 +Python >=3.9 ## Installation & Usage diff --git a/clients/python/pyproject.toml b/clients/python/pyproject.toml index 1a5ccdc9e2b63..1584744249646 100644 --- a/clients/python/pyproject.toml +++ b/clients/python/pyproject.toml @@ -25,7 +25,7 @@ dynamic = ["version"] description = "Apache Airflow API (Stable)" readme = "README.md" license-files.globs = ["LICENSE", "NOTICE"] -requires-python = "~=3.8" +requires-python = "~=3.9" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] @@ -42,7 +42,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -74,7 +73,7 @@ run-coverage = "pytest test" run = "run-coverage --no-cov" [[tool.hatch.envs.test.matrix]] -python = ["3.8", "3.9", "3.10", "3.11"] +python = ["3.9", "3.10", "3.11"] [tool.hatch.version] path = "./version.txt" diff --git a/clients/python/test_python_client.py b/clients/python/test_python_client.py index d4d3f98efd834..5d0accdc019ff 100644 --- a/clients/python/test_python_client.py +++ b/clients/python/test_python_client.py @@ -17,7 +17,7 @@ # # PEP 723 compliant inline script metadata (not yet widely supported) # /// script -# requires-python = ">=3.8" +# requires-python = ">=3.9" # dependencies = [ # "apache-airflow-client", # "rich", diff --git a/constraints/README.md b/constraints/README.md index 791450d1bd7c9..9d02755dc5cd6 100644 --- a/constraints/README.md +++ b/constraints/README.md @@ -29,12 +29,12 @@ This allows you to iterate on dependencies without having to run `--upgrade-to-n Typical workflow in this case is: * download and copy the constraint file to the folder (for example via -[The GitHub Raw Link](https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.8.txt) +[The GitHub Raw Link](https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt) * modify the constraint file in "constraints" folder * build the image using this command ```bash -breeze ci-image build --python 3.8 --airflow-constraints-location constraints/constraints-3.8txt +breeze ci-image build --python 3.9 --airflow-constraints-location constraints/constraints-3.9.txt ``` You can continue iterating and updating the constraint file (and rebuilding the image) @@ -46,7 +46,7 @@ pip freeze | sort | \ grep -v "apache_airflow" | \ grep -v "apache-airflow==" | \ grep -v "@" | \ - grep -v "/opt/airflow" > /opt/airflow/constraints/constraints-3.8.txt + grep -v "/opt/airflow" > /opt/airflow/constraints/constraints-3.9.txt ``` If you are working with others on updating the dependencies, you can also commit the constraint diff --git a/contributing-docs/03_contributors_quick_start.rst b/contributing-docs/03_contributors_quick_start.rst index 8f7ead6deacc4..bddecc6f1e486 100644 --- a/contributing-docs/03_contributors_quick_start.rst +++ b/contributing-docs/03_contributors_quick_start.rst @@ -256,7 +256,7 @@ Setting up Breeze .. code-block:: bash - breeze --python 3.8 --backend postgres + breeze --python 3.9 --backend postgres .. note:: If you encounter an error like "docker.credentials.errors.InitializationError: @@ -313,7 +313,7 @@ Using Breeze ------------ 1. Starting breeze environment using ``breeze start-airflow`` starts Breeze environment with last configuration run( - In this case python and backend will be picked up from last execution ``breeze --python 3.8 --backend postgres``) + In this case python and backend will be picked up from last execution ``breeze --python 3.9 --backend postgres``) It also automatically starts webserver, backend and scheduler. It drops you in tmux with scheduler in bottom left and webserver in bottom right. Use ``[Ctrl + B] and Arrow keys`` to navigate. @@ -324,9 +324,9 @@ Using Breeze Use CI image. Branch name: main - Docker image: ghcr.io/apache/airflow/main/ci/python3.8:latest + Docker image: ghcr.io/apache/airflow/main/ci/python3.9:latest Airflow source version: 2.4.0.dev0 - Python version: 3.8 + Python version: 3.9 Backend: mysql 5.7 @@ -365,7 +365,7 @@ Using Breeze .. code-block:: bash - breeze --python 3.8 --backend postgres + breeze --python 3.9 --backend postgres 2. Open tmux @@ -601,34 +601,27 @@ All Tests are inside ./tests directory. root@63528318c8b1:/opt/airflow# pytest tests/utils/test_dates.py ============================================================= test session starts ============================================================== - platform linux -- Python 3.8.16, pytest-7.2.1, pluggy-1.0.0 -- /usr/local/bin/python + platform linux -- Python 3.9.20, pytest-8.3.3, pluggy-1.5.0 -- /usr/local/bin/python cachedir: .pytest_cache - rootdir: /opt/airflow, configfile: pytest.ini - plugins: timeouts-1.2.1, capture-warnings-0.0.4, cov-4.0.0, requests-mock-1.10.0, rerunfailures-11.1.1, anyio-3.6.2, instafail-0.4.2, time-machine-2.9.0, asyncio-0.20.3, httpx-0.21.3, xdist-3.2.0 - asyncio: mode=strict + rootdir: /opt/airflow + configfile: pyproject.toml + plugins: anyio-4.6.0, time-machine-2.15.0, icdiff-0.9, rerunfailures-14.0, instafail-0.5.0, custom-exit-code-0.3.0, xdist-3.6.1, mock-3.14.0, cov-5.0.0, asyncio-0.24.0, requests-mock-1.12.1, timeouts-1.2.1 + asyncio: mode=strict, default_loop_scope=None setup timeout: 0.0s, execution timeout: 0.0s, teardown timeout: 0.0s - collected 12 items - - tests/utils/test_dates.py::TestDates::test_days_ago PASSED [ 8%] - tests/utils/test_dates.py::TestDates::test_parse_execution_date PASSED [ 16%] - tests/utils/test_dates.py::TestDates::test_round_time PASSED [ 25%] - tests/utils/test_dates.py::TestDates::test_infer_time_unit PASSED [ 33%] - tests/utils/test_dates.py::TestDates::test_scale_time_units PASSED [ 41%] - tests/utils/test_dates.py::TestUtilsDatesDateRange::test_no_delta PASSED [ 50%] - tests/utils/test_dates.py::TestUtilsDatesDateRange::test_end_date_before_start_date PASSED [ 58%] - tests/utils/test_dates.py::TestUtilsDatesDateRange::test_both_end_date_and_num_given PASSED [ 66%] - tests/utils/test_dates.py::TestUtilsDatesDateRange::test_invalid_delta PASSED [ 75%] - tests/utils/test_dates.py::TestUtilsDatesDateRange::test_positive_num_given PASSED [ 83%] - tests/utils/test_dates.py::TestUtilsDatesDateRange::test_negative_num_given PASSED [ 91%] - tests/utils/test_dates.py::TestUtilsDatesDateRange::test_delta_cron_presets PASSED [100%] - - ============================================================== 12 passed in 0.24s ============================================================== + collected 4 items + + tests/utils/test_dates.py::TestDates::test_parse_execution_date PASSED [ 25%] + tests/utils/test_dates.py::TestDates::test_round_time PASSED [ 50%] + tests/utils/test_dates.py::TestDates::test_infer_time_unit PASSED [ 75%] + tests/utils/test_dates.py::TestDates::test_scale_time_units PASSED [100%] + + ================================================================== 4 passed in 3.30s =================================================================== - Running All the test with Breeze by specifying required python version, backend, backend version .. code-block:: bash - breeze --backend postgres --postgres-version 15 --python 3.8 --db-reset testing tests --test-type All + breeze --backend postgres --postgres-version 15 --python 3.9 --db-reset testing tests --test-type All - Running specific type of test @@ -638,7 +631,7 @@ All Tests are inside ./tests directory. .. code-block:: bash - breeze --backend postgres --postgres-version 15 --python 3.8 --db-reset testing tests --test-type Core + breeze --backend postgres --postgres-version 15 --python 3.9 --db-reset testing tests --test-type Core - Running Integration test for specific test type @@ -647,7 +640,7 @@ All Tests are inside ./tests directory. .. code-block:: bash - breeze --backend postgres --postgres-version 15 --python 3.8 --db-reset testing tests --test-type All --integration mongo + breeze --backend postgres --postgres-version 15 --python 3.9 --db-reset testing tests --test-type All --integration mongo - For more information on Testing visit : |09_testing.rst| diff --git a/contributing-docs/05_pull_requests.rst b/contributing-docs/05_pull_requests.rst index ea9300f9c643f..1e14167943497 100644 --- a/contributing-docs/05_pull_requests.rst +++ b/contributing-docs/05_pull_requests.rst @@ -92,7 +92,7 @@ these guidelines: you can push your code to PR and see results of the tests in the CI. - You can use any supported python version to run the tests, but the best is to check - if it works for the oldest supported version (Python 3.8 currently). In rare cases + if it works for the oldest supported version (Python 3.9 currently). In rare cases tests might fail with the oldest version when you use features that are available in newer Python versions. For that purpose we have ``airflow.compat`` package where we keep back-ported useful features from newer versions. diff --git a/contributing-docs/07_local_virtualenv.rst b/contributing-docs/07_local_virtualenv.rst index 8439eb2ab2089..2c92edee97a7b 100644 --- a/contributing-docs/07_local_virtualenv.rst +++ b/contributing-docs/07_local_virtualenv.rst @@ -37,7 +37,7 @@ Required Software Packages Use system-level package managers like yum, apt-get for Linux, or Homebrew for macOS to install required software packages: -* Python (One of: 3.8, 3.9, 3.10, 3.11, 3.12) +* Python (One of: 3.9, 3.10, 3.11, 3.12) * MySQL 5.7+ * libxml * helm (only for helm chart tests) @@ -187,9 +187,7 @@ This is what it shows currently: +-------------+---------+---------------------------------------------------------------+ | Name | Type | Description | +=============+=========+===============================================================+ -| default | virtual | Default environment with Python 3.8 for maximum compatibility | -+-------------+---------+---------------------------------------------------------------+ -| airflow-38 | virtual | Environment with Python 3.8. No devel installed. | +| default | virtual | Default environment with Python 3.9 for maximum compatibility | +-------------+---------+---------------------------------------------------------------+ | airflow-39 | virtual | Environment with Python 3.9. No devel installed. | +-------------+---------+---------------------------------------------------------------+ @@ -200,7 +198,7 @@ This is what it shows currently: | airflow-312 | virtual | Environment with Python 3.12. No devel installed | +-------------+---------+---------------------------------------------------------------+ -The default env (if you have not used one explicitly) is ``default`` and it is a Python 3.8 +The default env (if you have not used one explicitly) is ``default`` and it is a Python 3.9 virtualenv for maximum compatibility. You can install devel set of dependencies with it by running: @@ -381,12 +379,12 @@ to avoid "works-for-me" syndrome, where you use different version of dependencie that are used in main, CI tests and by other contributors. There are different constraint files for different python versions. For example this command will install -all basic devel requirements and requirements of google provider as last successfully tested for Python 3.8: +all basic devel requirements and requirements of google provider as last successfully tested for Python 3.9: .. code:: bash pip install -e ".[devel,google]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.9.txt" Make sure to use latest main for such installation, those constraints are "development constraints" and they are refreshed several times a day to make sure they are up to date with the latest changes in the main branch. @@ -403,7 +401,7 @@ and install to latest supported ones by pure airflow core. .. code:: bash pip install -e ".[devel]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.9.txt" These are examples of the development options available with the local virtualenv in your IDE: diff --git a/contributing-docs/08_static_code_checks.rst b/contributing-docs/08_static_code_checks.rst index d50b9db3e607f..422a9f027e1ed 100644 --- a/contributing-docs/08_static_code_checks.rst +++ b/contributing-docs/08_static_code_checks.rst @@ -40,7 +40,7 @@ use. So, you can be sure your modifications will also work for CI if they pass pre-commit hooks. We have integrated the fantastic `pre-commit `__ framework -in our development workflow. To install and use it, you need at least Python 3.8 locally. +in our development workflow. To install and use it, you need at least Python 3.9 locally. Installing pre-commit hooks --------------------------- @@ -152,8 +152,6 @@ require Breeze Docker image to be built locally. +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-common-compat-used-for-openlineage | Check common.compat is used for OL deprecated classes | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ -| check-compat-cache-on-methods | Check that compat cache do not use on class methods | | -+-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-core-deprecation-classes | Verify usage of Airflow deprecation classes in core | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-daysago-import-from-utils | days_ago imported from airflow.utils.dates | | diff --git a/contributing-docs/12_airflow_dependencies_and_extras.rst b/contributing-docs/12_airflow_dependencies_and_extras.rst index 16d2f32ee172d..8bfbdb630c9f7 100644 --- a/contributing-docs/12_airflow_dependencies_and_extras.rst +++ b/contributing-docs/12_airflow_dependencies_and_extras.rst @@ -86,7 +86,7 @@ from the PyPI package: .. code-block:: bash pip install "apache-airflow[google,amazon,async]==2.2.5" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.2.5/constraints-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.2.5/constraints-3.9.txt" The last one can be used to install Airflow in "minimal" mode - i.e when bare Airflow is installed without extras. @@ -98,7 +98,7 @@ requirements). .. code-block:: bash pip install -e ".[devel]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.9.txt" This also works with extras - for example: @@ -106,7 +106,7 @@ This also works with extras - for example: .. code-block:: bash pip install ".[ssh]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.9.txt" There are different set of fixed constraint files for different python major/minor versions and you should @@ -118,7 +118,7 @@ using ``constraints-no-providers`` constraint files as well. .. code-block:: bash pip install . --upgrade \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.9.txt" The ``constraints-.txt`` and ``constraints-no-providers-.txt`` diff --git a/contributing-docs/testing/docker_compose_tests.rst b/contributing-docs/testing/docker_compose_tests.rst index 94864b4137de8..921a3cafb193b 100644 --- a/contributing-docs/testing/docker_compose_tests.rst +++ b/contributing-docs/testing/docker_compose_tests.rst @@ -48,7 +48,7 @@ Running complete test with breeze: .. code-block:: bash - breeze prod-image build --python 3.8 + breeze prod-image build --python 3.9 breeze testing docker-compose-tests In case the test fails, it will dump the logs from the running containers to the console and it @@ -65,8 +65,8 @@ to see the output of the test as it happens (it can be also set via The test can be also run manually with ``pytest docker_tests/test_docker_compose_quick_start.py`` command, provided that you have a local airflow venv with ``dev`` extra set and the ``DOCKER_IMAGE`` environment variable is set to the image you want to test. The variable defaults -to ``ghcr.io/apache/airflow/main/prod/python3.8:latest`` which is built by default -when you run ``breeze prod-image build --python 3.8``. also the switches ``--skip-docker-compose-deletion`` +to ``ghcr.io/apache/airflow/main/prod/python3.9:latest`` which is built by default +when you run ``breeze prod-image build --python 3.9``. also the switches ``--skip-docker-compose-deletion`` and ``--wait-for-containers-timeout`` can only be passed via environment variables. If you want to debug the deployment using ``docker compose`` commands after ``SKIP_DOCKER_COMPOSE_DELETION`` @@ -87,7 +87,7 @@ the prod image build command above. .. code-block:: bash - export AIRFLOW_IMAGE_NAME=ghcr.io/apache/airflow/main/prod/python3.8:latest + export AIRFLOW_IMAGE_NAME=ghcr.io/apache/airflow/main/prod/python3.9:latest and follow the instructions in the `Running Airflow in Docker `_ diff --git a/contributing-docs/testing/k8s_tests.rst b/contributing-docs/testing/k8s_tests.rst index a4a6f67da0e2c..a9ba3151fe9bb 100644 --- a/contributing-docs/testing/k8s_tests.rst +++ b/contributing-docs/testing/k8s_tests.rst @@ -47,7 +47,7 @@ per each combination of Python and Kubernetes version. This is used during CI wh tests against those different clusters - even in parallel. The cluster name follows the pattern ``airflow-python-X.Y-vA.B.C`` where X.Y is a major/minor Python version -and A.B.C is Kubernetes version. Example cluster name: ``airflow-python-3.8-v1.24.0`` +and A.B.C is Kubernetes version. Example cluster name: ``airflow-python-3.9-v1.24.0`` Most of the commands can be executed in parallel for multiple images/clusters by adding ``--run-in-parallel`` to create clusters or deploy airflow. Similarly checking for status, dumping logs and deleting clusters @@ -215,7 +215,7 @@ Should result in KinD creating the K8S cluster. .. code-block:: text - Config created in /Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.8-v1.24.2/.kindconfig.yaml: + Config created in /Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.9-v1.24.2/.kindconfig.yaml: # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -251,7 +251,7 @@ Should result in KinD creating the K8S cluster. - Creating cluster "airflow-python-3.8-v1.24.2" ... + Creating cluster "airflow-python-3.9-v1.24.2" ... ✓ Ensuring node image (kindest/node:v1.24.2) 🖼 ✓ Preparing nodes 📦 📦 ✓ Writing configuration 📜 @@ -259,10 +259,10 @@ Should result in KinD creating the K8S cluster. ✓ Installing CNI 🔌 ✓ Installing StorageClass 💾 ✓ Joining worker nodes 🚜 - Set kubectl context to "kind-airflow-python-3.8-v1.24.2" + Set kubectl context to "kind-airflow-python-3.9-v1.24.2" You can now use your cluster with: - kubectl cluster-info --context kind-airflow-python-3.8-v1.24.2 + kubectl cluster-info --context kind-airflow-python-3.9-v1.24.2 Not sure what to do next? 😅 Check out https://kind.sigs.k8s.io/docs/user/quick-start/ @@ -270,9 +270,9 @@ Should result in KinD creating the K8S cluster. Connecting to localhost:18150. Num try: 1 Error when connecting to localhost:18150 : ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response')) - Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.8 --kubernetes-version v1.24.2` to (re)deploy airflow + Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.9 --kubernetes-version v1.24.2` to (re)deploy airflow - KinD cluster airflow-python-3.8-v1.24.2 created! + KinD cluster airflow-python-3.9-v1.24.2 created! NEXT STEP: You might now configure your cluster by: @@ -286,20 +286,20 @@ Should result in KinD creating the K8S cluster. .. code-block:: text - Configuring airflow-python-3.8-v1.24.2 to be ready for Airflow deployment - Deleting K8S namespaces for kind-airflow-python-3.8-v1.24.2 + Configuring airflow-python-3.9-v1.24.2 to be ready for Airflow deployment + Deleting K8S namespaces for kind-airflow-python-3.9-v1.24.2 Error from server (NotFound): namespaces "airflow" not found Error from server (NotFound): namespaces "test-namespace" not found Creating namespaces namespace/airflow created namespace/test-namespace created - Created K8S namespaces for cluster kind-airflow-python-3.8-v1.24.2 + Created K8S namespaces for cluster kind-airflow-python-3.9-v1.24.2 - Deploying test resources for cluster kind-airflow-python-3.8-v1.24.2 + Deploying test resources for cluster kind-airflow-python-3.9-v1.24.2 persistentvolume/test-volume created persistentvolumeclaim/test-volume created service/airflow-webserver-node-port created - Deployed test resources for cluster kind-airflow-python-3.8-v1.24.2 + Deployed test resources for cluster kind-airflow-python-3.9-v1.24.2 NEXT STEP: You might now build your k8s image by: @@ -317,45 +317,45 @@ Should show the status of current KinD cluster. .. code-block:: text ======================================================================================================================== - Cluster: airflow-python-3.8-v1.24.2 + Cluster: airflow-python-3.9-v1.24.2 - * KUBECONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.8-v1.24.2/.kubeconfig - * KINDCONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.8-v1.24.2/.kindconfig.yaml + * KUBECONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.9-v1.24.2/.kubeconfig + * KINDCONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.9-v1.24.2/.kindconfig.yaml - Cluster info: airflow-python-3.8-v1.24.2 + Cluster info: airflow-python-3.9-v1.24.2 Kubernetes control plane is running at https://127.0.0.1:48366 CoreDNS is running at https://127.0.0.1:48366/api/v1/namespaces/kube-system/services/kube-dns:dns/proxy To further debug and diagnose cluster problems, use 'kubectl cluster-info dump'. - Storage class for airflow-python-3.8-v1.24.2 + Storage class for airflow-python-3.9-v1.24.2 NAME PROVISIONER RECLAIMPOLICY VOLUMEBINDINGMODE ALLOWVOLUMEEXPANSION AGE standard (default) rancher.io/local-path Delete WaitForFirstConsumer false 83s - Running pods for airflow-python-3.8-v1.24.2 + Running pods for airflow-python-3.9-v1.24.2 NAME READY STATUS RESTARTS AGE coredns-6d4b75cb6d-rwp9d 1/1 Running 0 71s coredns-6d4b75cb6d-vqnrc 1/1 Running 0 71s - etcd-airflow-python-3.8-v1.24.2-control-plane 1/1 Running 0 84s + etcd-airflow-python-3.9-v1.24.2-control-plane 1/1 Running 0 84s kindnet-ckc8l 1/1 Running 0 69s kindnet-qqt8k 1/1 Running 0 71s - kube-apiserver-airflow-python-3.8-v1.24.2-control-plane 1/1 Running 0 84s - kube-controller-manager-airflow-python-3.8-v1.24.2-control-plane 1/1 Running 0 84s + kube-apiserver-airflow-python-3.9-v1.24.2-control-plane 1/1 Running 0 84s + kube-controller-manager-airflow-python-3.9-v1.24.2-control-plane 1/1 Running 0 84s kube-proxy-6g7hn 1/1 Running 0 69s kube-proxy-dwfvp 1/1 Running 0 71s - kube-scheduler-airflow-python-3.8-v1.24.2-control-plane 1/1 Running 0 84s + kube-scheduler-airflow-python-3.9-v1.24.2-control-plane 1/1 Running 0 84s KinD Cluster API server URL: http://localhost:48366 Connecting to localhost:18150. Num try: 1 Error when connecting to localhost:18150 : ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response')) - Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.8 --kubernetes-version v1.24.2` to (re)deploy airflow + Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.9 --kubernetes-version v1.24.2` to (re)deploy airflow - Cluster healthy: airflow-python-3.8-v1.24.2 + Cluster healthy: airflow-python-3.9-v1.24.2 5. Build the image base on PROD Airflow image. You need to build the PROD image first (the command will guide you if you did not) either by running the build separately or passing ``--rebuild-base-image`` @@ -373,15 +373,15 @@ Should show the status of current KinD cluster. .. code-block:: text - Building the K8S image for Python 3.8 using airflow base image: ghcr.io/apache/airflow/main/prod/python3.8:latest + Building the K8S image for Python 3.9 using airflow base image: ghcr.io/apache/airflow/main/prod/python3.9:latest [+] Building 0.1s (8/8) FINISHED => [internal] load build definition from Dockerfile 0.0s => => transferring dockerfile: 301B 0.0s => [internal] load .dockerignore 0.0s => => transferring context: 35B 0.0s - => [internal] load metadata for ghcr.io/apache/airflow/main/prod/python3.8:latest 0.0s - => [1/3] FROM ghcr.io/apache/airflow/main/prod/python3.8:latest 0.0s + => [internal] load metadata for ghcr.io/apache/airflow/main/prod/python3.9:latest 0.0s + => [1/3] FROM ghcr.io/apache/airflow/main/prod/python3.9:latest 0.0s => [internal] load build context 0.0s => => transferring context: 3.00kB 0.0s => CACHED [2/3] COPY airflow/example_dags/ /opt/airflow/dags/ 0.0s @@ -389,7 +389,7 @@ Should show the status of current KinD cluster. => exporting to image 0.0s => => exporting layers 0.0s => => writing image sha256:c0bdd363c549c3b0731b8e8ce34153d081f239ee2b582355b7b3ffd5394c40bb 0.0s - => => naming to ghcr.io/apache/airflow/main/prod/python3.8-kubernetes:latest + => => naming to ghcr.io/apache/airflow/main/prod/python3.9-kubernetes:latest NEXT STEP: You might now upload your k8s image by: @@ -409,9 +409,9 @@ Should show the status of current KinD cluster. Good version of kubectl installed: 1.25.0 in /Users/jarek/IdeaProjects/airflow/.build/.k8s-env/bin Good version of helm installed: 3.9.2 in /Users/jarek/IdeaProjects/airflow/.build/.k8s-env/bin Stable repo is already added - Uploading Airflow image ghcr.io/apache/airflow/main/prod/python3.8-kubernetes to cluster airflow-python-3.8-v1.24.2 - Image: "ghcr.io/apache/airflow/main/prod/python3.8-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.8-v1.24.2-worker", loading... - Image: "ghcr.io/apache/airflow/main/prod/python3.8-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.8-v1.24.2-control-plane", loading... + Uploading Airflow image ghcr.io/apache/airflow/main/prod/python3.9-kubernetes to cluster airflow-python-3.9-v1.24.2 + Image: "ghcr.io/apache/airflow/main/prod/python3.9-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.9-v1.24.2-worker", loading... + Image: "ghcr.io/apache/airflow/main/prod/python3.9-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.9-v1.24.2-control-plane", loading... NEXT STEP: You might now deploy airflow by: @@ -426,8 +426,8 @@ Should show the status of current KinD cluster. .. code-block:: text - Deploying Airflow for cluster airflow-python-3.8-v1.24.2 - Deploying kind-airflow-python-3.8-v1.24.2 with airflow Helm Chart. + Deploying Airflow for cluster airflow-python-3.9-v1.24.2 + Deploying kind-airflow-python-3.9-v1.24.2 with airflow Helm Chart. Copied chart sources to /private/var/folders/v3/gvj4_mw152q556w2rrh7m46w0000gn/T/chart_edu__kir/chart Deploying Airflow from /private/var/folders/v3/gvj4_mw152q556w2rrh7m46w0000gn/T/chart_edu__kir/chart NAME: airflow @@ -469,12 +469,12 @@ Should show the status of current KinD cluster. Information on how to set a static webserver secret key can be found here: https://airflow.apache.org/docs/helm-chart/stable/production-guide.html#webserver-secret-key - Deployed kind-airflow-python-3.8-v1.24.2 with airflow Helm Chart. + Deployed kind-airflow-python-3.9-v1.24.2 with airflow Helm Chart. - Airflow for Python 3.8 and K8S version v1.24.2 has been successfully deployed. + Airflow for Python 3.9 and K8S version v1.24.2 has been successfully deployed. - The KinD cluster name: airflow-python-3.8-v1.24.2 - The kubectl cluster name: kind-airflow-python-3.8-v1.24.2. + The KinD cluster name: airflow-python-3.9-v1.24.2 + The kubectl cluster name: kind-airflow-python-3.9-v1.24.2. KinD Cluster API server URL: http://localhost:48366 @@ -508,7 +508,7 @@ The virtualenv required will be created automatically when the scripts are run. .. code-block:: text - Running tests with kind-airflow-python-3.8-v1.24.2 cluster. + Running tests with kind-airflow-python-3.9-v1.24.2 cluster. Command to run: pytest kubernetes_tests ========================================================================================= test session starts ========================================================================================== platform darwin -- Python 3.9.9, pytest-6.2.5, py-1.11.0, pluggy-1.0.0 -- /Users/jarek/IdeaProjects/airflow/.build/.k8s-env/bin/python @@ -537,7 +537,7 @@ Once you enter the environment, you receive this information: Entering interactive k8s shell. - (kind-airflow-python-3.8-v1.24.2:KubernetesExecutor)> + (kind-airflow-python-3.9-v1.24.2:KubernetesExecutor)> In a separate terminal you can open the k9s CLI: @@ -647,9 +647,9 @@ Kind has also useful commands to inspect your running cluster: .. code-block:: text - Deleting KinD cluster airflow-python-3.8-v1.24.2! - Deleting cluster "airflow-python-3.8-v1.24.2" ... - KinD cluster airflow-python-3.8-v1.24.2 deleted! + Deleting KinD cluster airflow-python-3.9-v1.24.2! + Deleting cluster "airflow-python-3.9-v1.24.2" ... + KinD cluster airflow-python-3.9-v1.24.2 deleted! Running complete k8s tests diff --git a/contributing-docs/testing/unit_tests.rst b/contributing-docs/testing/unit_tests.rst index cd0b9ab00bf44..dff3835a02fba 100644 --- a/contributing-docs/testing/unit_tests.rst +++ b/contributing-docs/testing/unit_tests.rst @@ -209,7 +209,7 @@ rerun in Breeze as you will (``-n auto`` will parallelize tests using ``pytest-x .. code-block:: bash - breeze shell --backend none --python 3.8 + breeze shell --backend none --python 3.9 > pytest tests --skip-db-tests -n auto @@ -251,7 +251,7 @@ You can also run DB tests with ``breeze`` dockerized environment. You can choose ``--backend`` flag. The default is ``sqlite`` but you can also use others such as ``postgres`` or ``mysql``. You can also select backend version and Python version to use. You can specify the ``test-type`` to run - breeze will list the test types you can run with ``--help`` and provide auto-complete for them. Example -below runs the ``Core`` tests with ``postgres`` backend and ``3.8`` Python version: +below runs the ``Core`` tests with ``postgres`` backend and ``3.9`` Python version: We have a dedicated, opinionated ``breeze testing db-tests`` command as well that runs DB tests (it is also used in CI to run the DB tests, where you do not have to specify extra flags for @@ -286,7 +286,7 @@ either by package/module/test or by test type - whatever ``pytest`` supports. .. code-block:: bash - breeze shell --backend postgres --python 3.8 + breeze shell --backend postgres --python 3.9 > pytest tests --run-db-tests-only As explained before, you cannot run DB tests in parallel using ``pytest-xdist`` plugin, but ``breeze`` has @@ -296,7 +296,7 @@ you use ``breeze testing db-tests`` command): .. code-block:: bash - breeze testing tests --run-db-tests-only --backend postgres --python 3.8 --run-in-parallel + breeze testing tests --run-db-tests-only --backend postgres --python 3.9 --run-in-parallel Examples of marking test as DB test ................................... @@ -1133,7 +1133,7 @@ directly to the container. .. code-block:: bash - breeze ci-image build --python 3.8 + breeze ci-image build --python 3.9 2. Enter breeze environment by selecting the appropriate airflow version and choosing ``providers-and-tests`` option for ``--mount-sources`` flag. @@ -1241,7 +1241,7 @@ Herr id how to reproduce it. .. code-block:: bash - breeze ci-image build --python 3.8 + breeze ci-image build --python 3.9 2. Build providers from latest sources: diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md index e0e014cfcb13d..1f56def27646c 100644 --- a/dev/README_RELEASE_AIRFLOW.md +++ b/dev/README_RELEASE_AIRFLOW.md @@ -682,7 +682,7 @@ Optionally it can be followed with constraints ```shell script pip install apache-airflow==rc \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-/constraints-3.8.txt"` + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-/constraints-3.9.txt"` ``` Note that the constraints contain python version that you are installing it with. @@ -694,7 +694,7 @@ There is also an easy way of installation with Breeze if you have the latest sou Running the following command will use tmux inside breeze, create `admin` user and run Webserver & Scheduler: ```shell script -breeze start-airflow --use-airflow-version 2.7.0rc1 --python 3.8 --backend postgres +breeze start-airflow --use-airflow-version 2.7.0rc1 --python 3.9 --backend postgres ``` You can also choose different executors and extras to install when you are installing airflow this way. For @@ -702,7 +702,7 @@ example in order to run Airflow with CeleryExecutor and install celery, google a Airflow 2.7.0, you need to have celery provider installed to run Airflow with CeleryExecutor) you can run: ```shell script -breeze start-airflow --use-airflow-version 2.7.0rc1 --python 3.8 --backend postgres \ +breeze start-airflow --use-airflow-version 2.7.0rc1 --python 3.9 --backend postgres \ --executor CeleryExecutor --airflow-extras "celery,google,amazon" ``` @@ -838,7 +838,7 @@ the older branches, you should set the "skip" field to true. ## Verify production images ```shell script -for PYTHON in 3.8 3.9 3.10 3.11 3.12 +for PYTHON in 3.9 3.10 3.11 3.12 do docker pull apache/airflow:${VERSION}-python${PYTHON} breeze prod-image verify --image-name apache/airflow:${VERSION}-python${PYTHON} diff --git a/dev/README_RELEASE_PROVIDER_PACKAGES.md b/dev/README_RELEASE_PROVIDER_PACKAGES.md index 67d1bfa30fc07..dbb785200f74b 100644 --- a/dev/README_RELEASE_PROVIDER_PACKAGES.md +++ b/dev/README_RELEASE_PROVIDER_PACKAGES.md @@ -1031,7 +1031,7 @@ pip install apache-airflow-providers-==rc ### Installing with Breeze ```shell -breeze start-airflow --use-airflow-version 2.2.4 --python 3.8 --backend postgres \ +breeze start-airflow --use-airflow-version 2.2.4 --python 3.9 --backend postgres \ --load-example-dags --load-default-connections ``` diff --git a/dev/breeze/README.md b/dev/breeze/README.md index 15c0b66f57f46..d28cfd5353a2c 100644 --- a/dev/breeze/README.md +++ b/dev/breeze/README.md @@ -66,6 +66,6 @@ PLEASE DO NOT MODIFY THE HASH BELOW! IT IS AUTOMATICALLY UPDATED BY PRE-COMMIT. --------------------------------------------------------------------------------------------------------- -Package config hash: 8e382ff46231b261a569886a45480104eb5436434d2845c3eb011ee9dd4da3c2fa33f561eaa36f2245a29c8719ae2e86d7ffec39463c46e0b3b4bde56a27abe6 +Package config hash: 2ae1201c56227b6fcb599f020360a906100a80b32ed3a0d4927c8721e738afee3867f9ed567fd75ec9f368933c3a94c1336f8ab068f7892ed1ebe6244ccf20fe --------------------------------------------------------------------------------------------------------- diff --git a/dev/breeze/doc/01_installation.rst b/dev/breeze/doc/01_installation.rst index 7107be900d97b..1c7ad0ee62838 100644 --- a/dev/breeze/doc/01_installation.rst +++ b/dev/breeze/doc/01_installation.rst @@ -313,7 +313,7 @@ that Breeze works on .. warning:: Upgrading from earlier Python version - If you used Breeze with Python 3.7 and when running it, it will complain that it needs Python 3.8. In this + If you used Breeze with Python 3.8 and when running it, it will complain that it needs Python 3.9. In this case you should force-reinstall Breeze with ``pipx``: .. code-block:: bash @@ -342,14 +342,14 @@ that Breeze works on .. code-block:: bash - pipx reinstall --python /Users/airflow/.pyenv/versions/3.8.16/bin/python apache-airflow-breeze + pipx reinstall --python /Users/airflow/.pyenv/versions/3.9.16/bin/python apache-airflow-breeze Or you can uninstall breeze and install it with a specific python version: .. code-block:: bash pipx uninstall apache-airflow-breeze - pipx install -e ./dev/breeze --python /Users/airflow/.pyenv/versions/3.8.16/bin/python + pipx install -e ./dev/breeze --python /Users/airflow/.pyenv/versions/3.9.16/bin/python Running Breeze for the first time diff --git a/dev/breeze/doc/03_developer_tasks.rst b/dev/breeze/doc/03_developer_tasks.rst index 76f43606837e8..87bb2713b93fa 100644 --- a/dev/breeze/doc/03_developer_tasks.rst +++ b/dev/breeze/doc/03_developer_tasks.rst @@ -34,12 +34,12 @@ You can use additional ``breeze`` flags to choose your environment. You can spec version to use, and backend (the meta-data database). Thanks to that, with Breeze, you can recreate the same environments as we have in matrix builds in the CI. See next chapter for backend selection. -For example, you can choose to run Python 3.8 tests with MySQL as backend and with mysql version 8 +For example, you can choose to run Python 3.9 tests with MySQL as backend and with mysql version 8 as follows: .. code-block:: bash - breeze --python 3.8 --backend mysql --mysql-version 8.0 + breeze --python 3.9 --backend mysql --mysql-version 8.0 .. note:: Note for Windows WSL2 users @@ -55,7 +55,7 @@ Try adding ``--builder=default`` to your command. For example: .. code-block:: bash - breeze --builder=default --python 3.8 --backend mysql --mysql-version 8.0 + breeze --builder=default --python 3.9 --backend mysql --mysql-version 8.0 The choices you make are persisted in the ``./.build/`` cache directory so that next time when you use the ``breeze`` script, it could use the values that were used previously. This way you do not have to specify @@ -331,7 +331,7 @@ When you are starting airflow from local sources, www asset compilation is autom .. code-block:: bash - breeze --python 3.8 --backend mysql start-airflow + breeze --python 3.9 --backend mysql start-airflow You can also use it to start different executor. @@ -344,7 +344,7 @@ You can also use it to start any released version of Airflow from ``PyPI`` with .. code-block:: bash - breeze start-airflow --python 3.8 --backend mysql --use-airflow-version 2.7.0 + breeze start-airflow --python 3.9 --backend mysql --use-airflow-version 2.7.0 When you are installing version from PyPI, it's also possible to specify extras that should be used when installing Airflow - you can provide several extras separated by coma - for example to install diff --git a/dev/breeze/doc/06_managing_docker_images.rst b/dev/breeze/doc/06_managing_docker_images.rst index 294f1540f3667..bb4c4f9e06f62 100644 --- a/dev/breeze/doc/06_managing_docker_images.rst +++ b/dev/breeze/doc/06_managing_docker_images.rst @@ -140,10 +140,10 @@ suffix and they need to also be paired with corresponding runtime dependency add .. code-block:: bash - breeze prod-image build --python 3.8 --additional-dev-deps "libasound2-dev" \ + breeze prod-image build --python 3.9 --additional-dev-deps "libasound2-dev" \ --additional-runtime-apt-deps "libasound2" -Same as above but uses python 3.8. +Same as above but uses python 3.9. Building PROD image ................... diff --git a/dev/breeze/doc/10_advanced_breeze_topics.rst b/dev/breeze/doc/10_advanced_breeze_topics.rst index ac5421f85aa9a..a4f9384863009 100644 --- a/dev/breeze/doc/10_advanced_breeze_topics.rst +++ b/dev/breeze/doc/10_advanced_breeze_topics.rst @@ -33,8 +33,8 @@ For testing, you can create your own virtual environment, or use the one that `` already installed breeze following the recommended ``pipx install -e ./dev/breeze`` command. For local virtualenv, you can use ``pyenv`` or any other virtualenv wrapper. For example with ``pyenv``, -you can use ``pyenv virtualenv 3.8.6 airflow-breeze`` to create virtualenv called ``airflow-breeze`` -with Python 3.8.6. Then you can use ``pyenv activate airflow-breeze`` to activate it and install breeze +you can use ``pyenv virtualenv 3.9.6 airflow-breeze`` to create virtualenv called ``airflow-breeze`` +with Python 3.9.6. Then you can use ``pyenv activate airflow-breeze`` to activate it and install breeze in editable mode with ``pip install -e ./dev/breeze``. For ``pipx`` virtualenv, you can use the virtualenv that ``pipx`` created for you. You can find the name @@ -56,7 +56,7 @@ make sure to follow these steps: this will bypass the check we run in Breeze to see if there are new requirements to install for it See example configuration for PyCharm which has run/debug configuration for -``breeze sbom generate-providers-requirements --provider-id sqlite --python 3.8`` +``breeze sbom generate-providers-requirements --provider-id sqlite --python 3.9`` .. raw:: html diff --git a/dev/breeze/doc/adr/0002-implement-standalone-python-command.md b/dev/breeze/doc/adr/0002-implement-standalone-python-command.md index 37eebcf3e15d1..ddd005fd92dde 100644 --- a/dev/breeze/doc/adr/0002-implement-standalone-python-command.md +++ b/dev/breeze/doc/adr/0002-implement-standalone-python-command.md @@ -138,7 +138,7 @@ There are a few properties of Breeze/CI scripts that should be maintained though run a command and get everything done with the least number of prerequisites * The prerequisites for Breeze and CI are: - * Python 3.8+ (Python 3.8 end of life is October 2024) + * Python 3.9+ (Python 3.9 end of life is October 2025) * Docker (23.0+) * Docker Compose (2.16.0+) * No other tools and CLI commands should be needed diff --git a/dev/breeze/doc/ci/02_images.md b/dev/breeze/doc/ci/02_images.md index 6dfa8f350f828..8c699e43b5dbb 100644 --- a/dev/breeze/doc/ci/02_images.md +++ b/dev/breeze/doc/ci/02_images.md @@ -129,17 +129,17 @@ The images are built with default extras - different extras for CI and production image and you can change the extras via the `--airflow-extras` parameters and add new ones with `--additional-airflow-extras`. -For example if you want to build Python 3.8 version of production image +For example if you want to build Python 3.9 version of production image with "all" extras installed you should run this command: ``` bash -breeze prod-image build --python 3.8 --airflow-extras "all" +breeze prod-image build --python 3.9 --airflow-extras "all" ``` If you just want to add new extras you can add them like that: ``` bash -breeze prod-image build --python 3.8 --additional-airflow-extras "all" +breeze prod-image build --python 3.9 --additional-airflow-extras "all" ``` The command that builds the CI image is optimized to minimize the time @@ -160,7 +160,7 @@ You can also build production images from PIP packages via providing `--install-airflow-version` parameter to Breeze: ``` bash -breeze prod-image build --python 3.8 --additional-airflow-extras=trino --install-airflow-version=2.0.0 +breeze prod-image build --python 3.9 --additional-airflow-extras=trino --install-airflow-version=2.0.0 ``` This will build the image using command similar to: @@ -168,7 +168,7 @@ This will build the image using command similar to: ``` bash pip install \ apache-airflow[async,amazon,celery,cncf.kubernetes,docker,elasticsearch,ftp,grpc,hashicorp,http,ldap,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv]==2.0.0 \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.0.0/constraints-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.0.0/constraints-3.9.txt" ``` > [!NOTE] @@ -199,7 +199,7 @@ HEAD of development for constraints): ``` bash pip install "https://github.com/apache/airflow/archive/.tar.gz#egg=apache-airflow" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt" ``` You can also skip installing airflow and install it from locally @@ -207,7 +207,7 @@ provided files by using `--install-packages-from-context` parameter to Breeze: ``` bash -breeze prod-image build --python 3.8 --additional-airflow-extras=trino --install-packages-from-context +breeze prod-image build --python 3.9 --additional-airflow-extras=trino --install-packages-from-context ``` In this case you airflow and all packages (.whl files) should be placed @@ -241,20 +241,20 @@ flags: `registry` (default), `local`, or `disabled` flags when you run Breeze commands. For example: ``` bash -breeze ci-image build --python 3.8 --docker-cache local +breeze ci-image build --python 3.9 --docker-cache local ``` Will build the CI image using local build cache (note that it will take quite a long time the first time you run it). ``` bash -breeze prod-image build --python 3.8 --docker-cache registry +breeze prod-image build --python 3.9 --docker-cache registry ``` Will build the production image with cache used from registry. ``` bash -breeze prod-image build --python 3.8 --docker-cache disabled +breeze prod-image build --python 3.9 --docker-cache disabled ``` Will build the production image from the scratch. @@ -336,12 +336,12 @@ faster. It is enough to pass `--image-tag` and the registry and Breeze will download and execute commands using the same image that was used during the CI tests. -For example this command will run the same Python 3.8 image as was used +For example this command will run the same Python 3.9 image as was used in build identified with 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e commit SHA with enabled rabbitmq integration. ``` bash -breeze --image-tag 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e --python 3.8 --integration rabbitmq +breeze --image-tag 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e --python 3.9 --integration rabbitmq ``` You can see more details and examples in[Breeze](../README.rst) @@ -361,7 +361,7 @@ you can build the image in the Here just a few examples are presented which should give you general understanding of what you can customize. -This builds the production image in version 3.8 with additional airflow +This builds the production image in version 3.9 with additional airflow extras from 2.0.0 PyPI package and additional apt dev and runtime dependencies. @@ -373,7 +373,7 @@ plugin installed. ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \ --build-arg ADDITIONAL_PYTHON_DEPS="pandas" \ --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" \ @@ -384,7 +384,7 @@ the same image can be built using `breeze` (it supports auto-completion of the options): ``` bash -breeze ci-image build --python 3.8 --additional-airflow-extras=jdbc --additional-python-deps="pandas" \ +breeze ci-image build --python 3.9 --additional-airflow-extras=jdbc --additional-python-deps="pandas" \ --additional-dev-apt-deps="gcc g++" ``` @@ -398,7 +398,7 @@ comment](https://github.com/apache/airflow/issues/8605#issuecomment-690065621): ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="slack" \ --build-arg ADDITIONAL_PYTHON_DEPS="apache-airflow-providers-odbc \ @@ -423,8 +423,8 @@ can be used for CI images: | Build argument | Default value | Description | |-----------------------------------|-------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `PYTHON_BASE_IMAGE` | `python:3.8-slim-bookworm` | Base Python image | -| `PYTHON_MAJOR_MINOR_VERSION` | `3.8` | major/minor version of Python (should match base image) | +| `PYTHON_BASE_IMAGE` | `python:3.9-slim-bookworm` | Base Python image | +| `PYTHON_MAJOR_MINOR_VERSION` | `3.9` | major/minor version of Python (should match base image) | | `DEPENDENCIES_EPOCH_NUMBER` | `2` | increasing this number will reinstall all apt dependencies | | `ADDITIONAL_PIP_INSTALL_FLAGS` | | additional `pip` flags passed to the installation commands (except when reinstalling `pip` itself) | | `PIP_NO_CACHE_DIR` | `true` | if true, then no pip cache will be stored | @@ -455,59 +455,59 @@ can be used for CI images: Here are some examples of how CI images can built manually. CI is always built from local sources. -This builds the CI image in version 3.8 with default extras ("all"). +This builds the CI image in version 3.9 with default extras ("all"). ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" --tag my-image:0.0.1 + --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" --tag my-image:0.0.1 ``` -This builds the CI image in version 3.8 with "gcp" extra only. +This builds the CI image in version 3.9 with "gcp" extra only. ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ --build-arg AIRFLOW_EXTRAS=gcp --tag my-image:0.0.1 ``` -This builds the CI image in version 3.8 with "apache-beam" extra added. +This builds the CI image in version 3.9 with "apache-beam" extra added. ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="apache-beam" --tag my-image:0.0.1 ``` -This builds the CI image in version 3.8 with "mssql" additional package +This builds the CI image in version 3.9 with "mssql" additional package added. ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ --build-arg ADDITIONAL_PYTHON_DEPS="mssql" --tag my-image:0.0.1 ``` -This builds the CI image in version 3.8 with "gcc" and "g++" additional +This builds the CI image in version 3.9 with "gcc" and "g++" additional apt dev dependencies added. ``` DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull - --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" --tag my-image:0.0.1 ``` -This builds the CI image in version 3.8 with "jdbc" extra and +This builds the CI image in version 3.9 with "jdbc" extra and "default-jre-headless" additional apt runtime dependencies added. ``` DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ --build-arg AIRFLOW_EXTRAS=jdbc \ --tag my-image:0.0.1 ``` @@ -573,8 +573,7 @@ percent-encoded when you access them via UI (/ = %2F) | PROD image | airflow/\/prod/python\:\ | faster to build or pull. Production image optimized for size. | - \ might be either "main" or "v2-\*-test" -- \ - Python version (Major + Minor).Should be one of \["3.8", - "3.9", "3.10", "3.11", "3.12" \]. +- \ - Python version (Major + Minor).Should be one of \["3.9", "3.10", "3.11", "3.12" \]. - \ - full-length SHA of commit either from the tip of the branch (for pushes/schedule) or commit from the tip of the branch used for the PR. diff --git a/dev/breeze/doc/ci/04_selective_checks.md b/dev/breeze/doc/ci/04_selective_checks.md index 23131ec893948..e5894b0296875 100644 --- a/dev/breeze/doc/ci/04_selective_checks.md +++ b/dev/breeze/doc/ci/04_selective_checks.md @@ -169,8 +169,8 @@ Github Actions to pass the list of parameters to a command to execute | Output | Meaning of the output | Example value | List as string | |----------------------------------------|------------------------------------------------------------------------------------------------------|-------------------------------------------|----------------| | affected-providers-list-as-string | List of providers affected when they are selectively affected. | airbyte http | * | -| all-python-versions | List of all python versions there are available in the form of JSON array | ['3.8', '3.9', '3.10'] | | -| all-python-versions-list-as-string | List of all python versions there are available in the form of space separated string | 3.8 3.9 3.10 | * | +| all-python-versions | List of all python versions there are available in the form of JSON array | ['3.9', '3.10'] | | +| all-python-versions-list-as-string | List of all python versions there are available in the form of space separated string | 3.9 3.10 | * | | all-versions | If set to true, then all python, k8s, DB versions are used for tests. | false | | | basic-checks-only | Whether to run all static checks ("false") or only basic set of static checks ("true") | false | | | build_system_changed_in_pyproject_toml | When builds system dependencies changed in pyproject.toml changed in the PR. | false | | @@ -184,7 +184,7 @@ Github Actions to pass the list of parameters to a command to execute | default-kubernetes-version | Which Kubernetes version to use as default | v1.25.2 | | | default-mysql-version | Which MySQL version to use as default | 5.7 | | | default-postgres-version | Which Postgres version to use as default | 10 | | -| default-python-version | Which Python version to use as default | 3.8 | | +| default-python-version | Which Python version to use as default | 3.9 | | | docker-cache | Which cache should be used for images ("registry", "local" , "disabled") | registry | | | docs-build | Whether to build documentation ("true"/"false") | true | | | docs-list-as-string | What filter to apply to docs building - based on which documentation packages should be built | apache-airflow helm-chart google | | @@ -200,7 +200,7 @@ Github Actions to pass the list of parameters to a command to execute | is-self-hosted-runner | Whether the runner is self-hosted | false | | | is-vm-runner | Whether the runner uses VM to run | true | | | kind-version | Which Kind version to use for tests | v0.16.0 | | -| kubernetes-combos-list-as-string | All combinations of Python version and Kubernetes version to use for tests as space-separated string | 3.8-v1.25.2 3.9-v1.26.4 | * | +| kubernetes-combos-list-as-string | All combinations of Python version and Kubernetes version to use for tests as space-separated string | 3.9-v1.25.2 3.9-v1.26.4 | * | | kubernetes-versions | All Kubernetes versions to use for tests as JSON array | ['v1.25.2'] | | | kubernetes-versions-list-as-string | All Kubernetes versions to use for tests as space-separated string | v1.25.2 | * | | mypy-folders | List of folders to be considered for mypy | [] | | @@ -219,8 +219,8 @@ Github Actions to pass the list of parameters to a command to execute | prod-image-build | Whether PROD image build is needed | true | | | providers-compatibility-checks | List of dicts: (python_version, airflow_version, removed_providers) for compatibility checks | [] | | | pyproject-toml-changed | When pyproject.toml changed in the PR. | false | | -| python-versions | List of python versions to use for that build | ['3.8'] | * | -| python-versions-list-as-string | Which versions of MySQL to use for tests as space-separated string | 3.8 | * | +| python-versions | List of python versions to use for that build | ['3.9'] | * | +| python-versions-list-as-string | Which versions of MySQL to use for tests as space-separated string | 3.9 | * | | run-amazon-tests | Whether Amazon tests should be run ("true"/"false") | true | | | run-kubernetes-tests | Whether Kubernetes tests should be run ("true"/"false") | true | | | run-tests | Whether unit tests should be run ("true"/"false") | true | | diff --git a/dev/breeze/doc/ci/08_running_ci_locally.md b/dev/breeze/doc/ci/08_running_ci_locally.md index 6e1cbb0917536..cc9c89954df3e 100644 --- a/dev/breeze/doc/ci/08_running_ci_locally.md +++ b/dev/breeze/doc/ci/08_running_ci_locally.md @@ -72,19 +72,19 @@ For example knowing that the CI job was for commit `cd27124534b46c9688a1d89e75fcd137ab5137e3`: ``` bash -docker pull ghcr.io/apache/airflow/main/ci/python3.8:cd27124534b46c9688a1d89e75fcd137ab5137e3 +docker pull ghcr.io/apache/airflow/main/ci/python3.9:cd27124534b46c9688a1d89e75fcd137ab5137e3 -docker run -it ghcr.io/apache/airflow/main/ci/python3.8:cd27124534b46c9688a1d89e75fcd137ab5137e3 +docker run -it ghcr.io/apache/airflow/main/ci/python3.9:cd27124534b46c9688a1d89e75fcd137ab5137e3 ``` But you usually need to pass more variables and complex setup if you want to connect to a database or enable some integrations. Therefore it is easiest to use [Breeze](../README.rst) for that. For -example if you need to reproduce a MySQL environment in python 3.8 +example if you need to reproduce a MySQL environment in python 3.9 environment you can run: ``` bash -breeze --image-tag cd27124534b46c9688a1d89e75fcd137ab5137e3 --python 3.8 --backend mysql +breeze --image-tag cd27124534b46c9688a1d89e75fcd137ab5137e3 --python 3.9 --backend mysql ``` You will be dropped into a shell with the exact version that was used diff --git a/dev/breeze/doc/images/output-commands.svg b/dev/breeze/doc/images/output-commands.svg index 78c753526e449..f80a72a9fbc54 100644 --- a/dev/breeze/doc/images/output-commands.svg +++ b/dev/breeze/doc/images/output-commands.svg @@ -302,8 +302,8 @@ ╭─ Execution mode ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ --python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               +(>3.9< | 3.10 | 3.11 | 3.12)                                 +[default: 3.9]                                               --integrationIntegration(s) to enable when running (can be more than one).                        (all | all-testable | cassandra | celery | drill | kafka | kerberos | mongo | mssql  | openlineage | otel | pinot | qdrant | redis | statsd | trino | ydb)                diff --git a/dev/breeze/doc/images/output_ci-image_build.svg b/dev/breeze/doc/images/output_ci-image_build.svg index 131b618e403ce..6dd856c3dc8db 100644 --- a/dev/breeze/doc/images/output_ci-image_build.svg +++ b/dev/breeze/doc/images/output_ci-image_build.svg @@ -352,8 +352,8 @@ ╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ --python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               +(>3.9< | 3.10 | 3.11 | 3.12)                                 +[default: 3.9]                                               --upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. --upgrade-on-failure/--no-upgrade-on-failureWhen set, attempt to run upgrade to newer dependencies when        regular build fails. It is set to False by default on CI and True  @@ -380,7 +380,7 @@ (INTEGER RANGE)                                                             [default: 4; 1<=x<=8]                                                       --python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.8 3.9 3.10 3.11 3.12]                                              +[default: 3.9 3.10 3.11 3.12]                                                  --run-in-parallelRun the operation in parallel on all or selected subset of parameters. --skip-cleanupSkip cleanup of temporary files created during parallel run. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_ci-image_build.txt b/dev/breeze/doc/images/output_ci-image_build.txt index b56ee7c68abb5..905847bedc783 100644 --- a/dev/breeze/doc/images/output_ci-image_build.txt +++ b/dev/breeze/doc/images/output_ci-image_build.txt @@ -1 +1 @@ -d64cda52af48f5c2dc704fac8a738d49 +686950c27e41fa50cf22abb8c25e496a diff --git a/dev/breeze/doc/images/output_ci-image_pull.svg b/dev/breeze/doc/images/output_ci-image_pull.svg index b0ebd7a1ea5f1..a784b4c9a2389 100644 --- a/dev/breeze/doc/images/output_ci-image_pull.svg +++ b/dev/breeze/doc/images/output_ci-image_pull.svg @@ -1,4 +1,4 @@ - + ╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ --image-tag-tTag of the image which is used to pull the image.(TEXT)[default: latest] ---python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               ---verifyVerify image. ---wait-for-imageWait until image is available. ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you build -or pull image with --image-tag.                                                                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of parameters. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.8 3.9 3.10 3.11 3.12]                                              ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Github authentication ──────────────────────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---github-tokenThe token used to authenticate to GitHub.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12) +[default: 3.9]                                               +--verifyVerify image. +--wait-for-imageWait until image is available. +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you build +or pull image with --image-tag.                                                                +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of parameters. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.9 3.10 3.11 3.12]                                                  +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Github authentication ──────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--github-tokenThe token used to authenticate to GitHub.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_ci-image_pull.txt b/dev/breeze/doc/images/output_ci-image_pull.txt index c916be1bf34eb..aac2c46f1ad44 100644 --- a/dev/breeze/doc/images/output_ci-image_pull.txt +++ b/dev/breeze/doc/images/output_ci-image_pull.txt @@ -1 +1 @@ -3c950cd0e358661163e52c9f3726aee8 +eadf5c7b6661cf557c2ab5b38e462481 diff --git a/dev/breeze/doc/images/output_ci-image_verify.svg b/dev/breeze/doc/images/output_ci-image_verify.svg index 61a73a5df55a4..23acf81511254 100644 --- a/dev/breeze/doc/images/output_ci-image_verify.svg +++ b/dev/breeze/doc/images/output_ci-image_verify.svg @@ -157,8 +157,8 @@ ╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ --image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10 | 3.11 | 3.12) -[default: 3.8]                                               +--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12) +[default: 3.9]                                               --image-tag-tTag of the image when verifying it.(TEXT)[default: latest] --pullPull image is missing before attempting to verify it. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ @@ -168,7 +168,7 @@ (INTEGER RANGE)                                                             [default: 4; 1<=x<=8]                                                       --python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.8 3.9 3.10 3.11 3.12]                                              +[default: 3.9 3.10 3.11 3.12]                                                  --skip-cleanupSkip cleanup of temporary files created during parallel run. --debug-resourcesWhether to show resource information while running in parallel. --include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). diff --git a/dev/breeze/doc/images/output_ci-image_verify.txt b/dev/breeze/doc/images/output_ci-image_verify.txt index bad4f7d8a8bae..3d2f72ffa6857 100644 --- a/dev/breeze/doc/images/output_ci-image_verify.txt +++ b/dev/breeze/doc/images/output_ci-image_verify.txt @@ -1 +1 @@ -ec059842b7c4c41db33a6362a361b0ef +324f4d4949eef209e991d890ddf2ff4f diff --git a/dev/breeze/doc/images/output_k8s_build-k8s-image.svg b/dev/breeze/doc/images/output_k8s_build-k8s-image.svg index 8708f57a34e41..eff84069b0f97 100644 --- a/dev/breeze/doc/images/output_k8s_build-k8s-image.svg +++ b/dev/breeze/doc/images/output_k8s_build-k8s-image.svg @@ -157,8 +157,8 @@ ╭─ Build image flags ──────────────────────────────────────────────────────────────────────────────────────────────────╮ --python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               +(>3.9< | 3.10 | 3.11 | 3.12)                                 +[default: 3.9]                                               --image-tag-tImage tag used to build K8S image from.(TEXT)[default: latest] --rebuild-base-imageRebuilds base Airflow image before building K8S image. --copy-local-sources/--no-copy-local-sourcesCopy local sources to the image.[default: copy-local-sources] @@ -171,7 +171,7 @@ (INTEGER RANGE)                                                             [default: 4; 1<=x<=8]                                                       --python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.8 3.9 3.10 3.11 3.12]                                              +[default: 3.9 3.10 3.11 3.12]                                                  --skip-cleanupSkip cleanup of temporary files created during parallel run. --debug-resourcesWhether to show resource information while running in parallel. --include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). diff --git a/dev/breeze/doc/images/output_k8s_build-k8s-image.txt b/dev/breeze/doc/images/output_k8s_build-k8s-image.txt index 0afa131e5c16b..dd6b1b79cdf6c 100644 --- a/dev/breeze/doc/images/output_k8s_build-k8s-image.txt +++ b/dev/breeze/doc/images/output_k8s_build-k8s-image.txt @@ -1 +1 @@ -f9669ae229dfd2954ae7bf6f66bb92bf +7e4553f3179fe40e5c05bfc556cb2355 diff --git a/dev/breeze/doc/images/output_k8s_configure-cluster.svg b/dev/breeze/doc/images/output_k8s_configure-cluster.svg index 2ecb3db7ecb11..b46a5fd1175a5 100644 --- a/dev/breeze/doc/images/output_k8s_configure-cluster.svg +++ b/dev/breeze/doc/images/output_k8s_configure-cluster.svg @@ -1,4 +1,4 @@ - + parallel). ╭─ Configure cluster flags ────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.28.13< | v1.29.8 | v1.30.4 | v1.31.0)             -[default: v1.28.13]                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of parameters. ---parallelismMaximum number of processes to use while running the operation in parallel for cluster  -operations.                                                                             -(INTEGER RANGE)                                                                         -[default: 2; 1<=x<=4]                                                                   ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.8 3.9 3.10 3.11 3.12]                                              ---kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) -[default: v1.28.13 v1.29.8 v1.30.4 v1.31.0]                    ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12) +[default: 3.9]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.28.13< | v1.29.8 | v1.30.4 | v1.31.0)             +[default: v1.28.13]                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of parameters. +--parallelismMaximum number of processes to use while running the operation in parallel for cluster  +operations.                                                                             +(INTEGER RANGE)                                                                         +[default: 2; 1<=x<=4]                                                                   +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.9 3.10 3.11 3.12]                                                  +--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) +[default: v1.28.13 v1.29.8 v1.30.4 v1.31.0]                    +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_k8s_configure-cluster.txt b/dev/breeze/doc/images/output_k8s_configure-cluster.txt index ac398e9fbce58..7ed7f0e99146a 100644 --- a/dev/breeze/doc/images/output_k8s_configure-cluster.txt +++ b/dev/breeze/doc/images/output_k8s_configure-cluster.txt @@ -1 +1 @@ -c175cc4375923707f1aa91074df42c37 +6e0e5ed30fcde18abe30f7dab1c4f0b6 diff --git a/dev/breeze/doc/images/output_k8s_create-cluster.svg b/dev/breeze/doc/images/output_k8s_create-cluster.svg index 6d3f18d341a20..04176b416c166 100644 --- a/dev/breeze/doc/images/output_k8s_create-cluster.svg +++ b/dev/breeze/doc/images/output_k8s_create-cluster.svg @@ -160,8 +160,8 @@ ╭─ K8S cluster creation flags ─────────────────────────────────────────────────────────────────────────────────────────╮ --python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               +(>3.9< | 3.10 | 3.11 | 3.12)                                 +[default: 3.9]                                               --kubernetes-versionKubernetes version used to create the KinD cluster of. (>v1.28.13< | v1.29.8 | v1.30.4 | v1.31.0)             [default: v1.28.13]                                    @@ -174,7 +174,7 @@ (INTEGER RANGE)                                                                         [default: 2; 1<=x<=4]                                                                   --python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.8 3.9 3.10 3.11 3.12]                                              +[default: 3.9 3.10 3.11 3.12]                                                  --kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) [default: v1.28.13 v1.29.8 v1.30.4 v1.31.0]                    --skip-cleanupSkip cleanup of temporary files created during parallel run. diff --git a/dev/breeze/doc/images/output_k8s_create-cluster.txt b/dev/breeze/doc/images/output_k8s_create-cluster.txt index 29557703705c2..e9e52f4a391e9 100644 --- a/dev/breeze/doc/images/output_k8s_create-cluster.txt +++ b/dev/breeze/doc/images/output_k8s_create-cluster.txt @@ -1 +1 @@ -98956f766fe2b98109626b909082e4f3 +254a3805eeb108125d078abecf9226fb diff --git a/dev/breeze/doc/images/output_k8s_delete-cluster.svg b/dev/breeze/doc/images/output_k8s_delete-cluster.svg index 83efc5b324a8e..ebb3878309bb5 100644 --- a/dev/breeze/doc/images/output_k8s_delete-cluster.svg +++ b/dev/breeze/doc/images/output_k8s_delete-cluster.svg @@ -1,4 +1,4 @@ - + Delete the current KinD Cluster (optionally all clusters). ╭─ K8S cluster delete flags ───────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.28.13< | v1.29.8 | v1.30.4 | v1.31.0)             -[default: v1.28.13]                                    ---allApply it to all created clusters -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12) +[default: 3.9]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.28.13< | v1.29.8 | v1.30.4 | v1.31.0)             +[default: v1.28.13]                                    +--allApply it to all created clusters +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_k8s_delete-cluster.txt b/dev/breeze/doc/images/output_k8s_delete-cluster.txt index 9057ac44c262f..3c0998b1f3cde 100644 --- a/dev/breeze/doc/images/output_k8s_delete-cluster.txt +++ b/dev/breeze/doc/images/output_k8s_delete-cluster.txt @@ -1 +1 @@ -ab4aacbb44ee638a91f4ff225cb1dbfa +11cd0c89e39c35786fb33d00fbb927fc diff --git a/dev/breeze/doc/images/output_k8s_deploy-airflow.svg b/dev/breeze/doc/images/output_k8s_deploy-airflow.svg index 034b87a27dacf..1ad641587467d 100644 --- a/dev/breeze/doc/images/output_k8s_deploy-airflow.svg +++ b/dev/breeze/doc/images/output_k8s_deploy-airflow.svg @@ -181,8 +181,8 @@ ╭─ Airflow deploy flags ───────────────────────────────────────────────────────────────────────────────────────────────╮ --python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               +(>3.9< | 3.10 | 3.11 | 3.12)                                 +[default: 3.9]                                               --kubernetes-versionKubernetes version used to create the KinD cluster of. (>v1.28.13< | v1.29.8 | v1.30.4 | v1.31.0)             [default: v1.28.13]                                    @@ -202,7 +202,7 @@ (INTEGER RANGE)                                                                         [default: 2; 1<=x<=4]                                                                   --python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.8 3.9 3.10 3.11 3.12]                                              +[default: 3.9 3.10 3.11 3.12]                                                  --kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) [default: v1.28.13 v1.29.8 v1.30.4 v1.31.0]                    --skip-cleanupSkip cleanup of temporary files created during parallel run. diff --git a/dev/breeze/doc/images/output_k8s_deploy-airflow.txt b/dev/breeze/doc/images/output_k8s_deploy-airflow.txt index 8029f4f3e1ac6..207f911525643 100644 --- a/dev/breeze/doc/images/output_k8s_deploy-airflow.txt +++ b/dev/breeze/doc/images/output_k8s_deploy-airflow.txt @@ -1 +1 @@ -640037e62b7ed467a7a8134046444da2 +027d1bd159cdc426dd4a64051bf2dd8d diff --git a/dev/breeze/doc/images/output_k8s_k9s.svg b/dev/breeze/doc/images/output_k8s_k9s.svg index ed98b978fdcbd..55a6d16fbb127 100644 --- a/dev/breeze/doc/images/output_k8s_k9s.svg +++ b/dev/breeze/doc/images/output_k8s_k9s.svg @@ -1,4 +1,4 @@ - + ╭─ K8S k9s flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ --use-dockerUse Docker to start k8s executor (otherwise k9s from PATH is used and only run with docker if not found on PATH).                                                                     ---python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.28.13< | v1.29.8 | v1.30.4 | v1.31.0)             -[default: v1.28.13]                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12) +[default: 3.9]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.28.13< | v1.29.8 | v1.30.4 | v1.31.0)             +[default: v1.28.13]                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_k8s_k9s.txt b/dev/breeze/doc/images/output_k8s_k9s.txt index a6d65c87c5166..41cb876d4b85f 100644 --- a/dev/breeze/doc/images/output_k8s_k9s.txt +++ b/dev/breeze/doc/images/output_k8s_k9s.txt @@ -1 +1 @@ -52c49a84f6fa6752bbdda1a47765d117 +4330a248804808ca67646fd429521ea4 diff --git a/dev/breeze/doc/images/output_k8s_logs.svg b/dev/breeze/doc/images/output_k8s_logs.svg index fc2daaab41a44..706b4b621c177 100644 --- a/dev/breeze/doc/images/output_k8s_logs.svg +++ b/dev/breeze/doc/images/output_k8s_logs.svg @@ -1,4 +1,4 @@ - +