diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8229ddcbb0b5e..b7b6c6cd91ab4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -160,6 +160,7 @@ jobs: default-constraints-branch: ${{ steps.selective-checks.outputs.default-constraints-branch }} docs-filter: ${{ steps.selective-checks.outputs.docs-filter }} skip-pre-commits: ${{ steps.selective-checks.outputs.skip-pre-commits }} + debug-resources: ${{ steps.selective-checks.outputs.debug-resources }} source-head-repo: ${{ steps.source-run-info.outputs.source-head-repo }} pull-request-labels: ${{ steps.source-run-info.outputs.pr-labels }} in-workflow-build: ${{ steps.source-run-info.outputs.in-workflow-build }} @@ -1059,10 +1060,11 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: "Test Offline SQL generation" run: ./scripts/ci/testing/run_offline_sql_test.sh - name: "Tests: ${{needs.build-info.outputs.test-types}}" - run: ./scripts/ci/testing/ci_run_airflow_testing.sh + run: breeze testing tests --run-in-parallel env: PR_LABELS: "${{ needs.build-info.outputs.pull-request-labels }}" IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + DEBUG_RESOURCES: ${{ needs.build-info.outputs.debug-resources }} - name: "Upload airflow logs" uses: actions/upload-artifact@v3 if: failure() @@ -1130,10 +1132,11 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: "Test downgrade" run: ./scripts/ci/testing/run_downgrade_test.sh - name: "Tests: ${{needs.build-info.outputs.test-types}}" - run: ./scripts/ci/testing/ci_run_airflow_testing.sh + run: breeze testing tests --run-in-parallel env: PR_LABELS: "${{ needs.build-info.outputs.pull-request-labels }}" IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + DEBUG_RESOURCES: ${{ needs.build-info.outputs.debug-resources }} - name: "Upload airflow logs" uses: actions/upload-artifact@v3 if: failure() @@ -1201,10 +1204,11 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: "Test downgrade" run: ./scripts/ci/testing/run_downgrade_test.sh - name: "Tests: ${{needs.build-info.outputs.test-types}}" - run: ./scripts/ci/testing/ci_run_airflow_testing.sh + run: breeze testing tests --run-in-parallel env: PR_LABELS: "${{ needs.build-info.outputs.pull-request-labels }}" IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + DEBUG_RESOURCES: ${{ needs.build-info.outputs.debug-resources }} - name: "Upload airflow logs" uses: actions/upload-artifact@v3 if: failure() @@ -1270,10 +1274,11 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: "Test downgrade" run: ./scripts/ci/testing/run_downgrade_test.sh - name: "Tests: ${{needs.build-info.outputs.test-types}}" - run: ./scripts/ci/testing/ci_run_airflow_testing.sh + run: breeze testing tests --run-in-parallel env: PR_LABELS: "${{ needs.build-info.outputs.pull-request-labels }}" IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + DEBUG_RESOURCES: ${{ needs.build-info.outputs.debug-resources }} - name: "Upload airflow logs" uses: actions/upload-artifact@v3 if: failure() @@ -1309,7 +1314,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" RUNS_ON: ${{ needs.build-info.outputs.runs-on }} MYSQL_VERSION: ${{needs.build-info.outputs.default-mysql-version}} POSTGRES_VERSION: ${{needs.build-info.outputs.default-postgres-version}} - TEST_TYPES: "Quarantined" PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.default-python-version }} if: needs.build-info.outputs.run-tests == 'true' steps: @@ -1345,9 +1349,12 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" env: IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Tests: Quarantined" - run: ./scripts/ci/testing/ci_run_quarantined_tests.sh + run: breeze testing tests --run-in-parallel || true env: PR_LABELS: "${{ needs.build-info.outputs.pull-request-labels }}" + IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + TEST_TYPES: "Quarantined" + DEBUG_RESOURCES: ${{ needs.build-info.outputs.debug-resources }} - name: "Upload Quarantine test results" uses: actions/upload-artifact@v3 if: always() diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9fe392842676e..ca72f6eed5c7e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -798,16 +798,17 @@ repos: files: newsfragments/.*\.rst entry: ./scripts/ci/pre_commit/pre_commit_newsfragments.py pass_filenames: true + # We sometimes won't have newsfragments in the repo, so always run it so `check-hooks-apply` passes + # This is fast, so not too much downside + always_run: true - id: update-breeze-cmd-output name: Update output of breeze commands in BREEZE.rst entry: ./scripts/ci/pre_commit/pre_commit_breeze_cmd_line.py language: python files: ^BREEZE\.rst$|^dev/breeze/.*$|^\.pre-commit-config\.yaml$ + require_serial: true pass_filenames: false additional_dependencies: ['rich>=12.4.4', 'rich-click>=1.5'] - # We sometimes won't have newsfragments in the repo, so always run it so `check-hooks-apply` passes - # This is fast, so not too much downside - always_run: true - id: check-example-dags-urls name: Check that example dags url include provider versions entry: ./scripts/ci/pre_commit/pre_commit_update_example_dags_paths.py diff --git a/BREEZE.rst b/BREEZE.rst index a688c333f6023..d4f621d28ddd7 100644 --- a/BREEZE.rst +++ b/BREEZE.rst @@ -689,8 +689,24 @@ API, Providers. This how our CI runs them - running each group in parallel to ot replicate this behaviour. Another interesting use of the ``breeze testing tests`` command is that you can easily specify sub-set of the -tests for Providers. ``breeze testing tests --test-type "Providers[airbyte,http]`` for example will only run -tests for airbyte and http providers. +tests for Providers. + +For example this will only run provider tests for airbyte and http providers: + +.. code-block:: bash + + breeze testing tests --test-type "Providers[airbyte,http]`` + +You can also run parallel tests with ``--run-in-parallel`` flag - by default it will run all tests types +in parallel, but you can specify the test type that you want to run with space separated list of test +types passed to ``--test-types`` flag. + +For example this will run API and WWW tests in parallel: + +.. code-block:: bash + + breeze testing tests --test-types "API WWW" --run-in-parallel + Here is the detailed set of options for the ``breeze testing tests`` command. @@ -747,7 +763,6 @@ You can: * Enter the interactive kubernetes test environment with ``breeze k8s shell`` and ``breeze k8s k9s`` command * Run multi-cluster-operations ``breeze k8s list-all-clusters`` and ``breeze k8s delete-all-clusters`` commands as well as running complete tests in parallel - via ``breeze k8s run-complete-tests`` and export logs from all clusters to a temp directory via ``breeze k8s dump-logs`` command This is described in detail in `Testing Kubernetes `_. diff --git a/CI.rst b/CI.rst index ccabd58463a96..b155ccc370559 100644 --- a/CI.rst +++ b/CI.rst @@ -33,7 +33,7 @@ environments we use. Most of our CI jobs are written as bash scripts which are e the CI jobs. And we have a number of variables determine build behaviour. You can also take a look at the `CI Sequence Diagrams `_ for more graphical overview -of how Airlfow's CI works. +of how Airflow CI works. GitHub Actions runs ------------------- @@ -91,183 +91,63 @@ and cache is separately kept for different platform. The ``latest`` images of CI and PROD are ``amd64`` only images for CI, because there is no very easy way to push multiplatform images without merging the manifests and it is not really needed nor used for cache. -Locally replicating CI failures -------------------------------- -The main goal of the CI philosophy we have that no matter how complex the test and integration -infrastructure, as a developer you should be able to reproduce and re-run any of the failed checks -locally. One part of it are pre-commit checks, that allow you to run the same static checks in CI -and locally, but another part is the CI environment which is replicated locally with Breeze. - -You can read more about Breeze in `BREEZE.rst `_ but in essence it is a script that allows -you to re-create CI environment in your local development instance and interact with it. In its basic -form, when you do development you can run all the same tests that will be run in CI - but locally, -before you submit them as PR. Another use case where Breeze is useful is when tests fail on CI. You can -take the full ``COMMIT_SHA`` of the failed build pass it as ``--image-tag`` parameter of Breeze and it will -download the very same version of image that was used in CI and run it locally. This way, you can very -easily reproduce any failed test that happens in CI - even if you do not check out the sources -connected with the run. - -You can read more about it in `BREEZE.rst `_ and `TESTING.rst `_ - -Difference between local runs and GitHub Action workflows ---------------------------------------------------------- +Naming conventions for stored images +==================================== -Depending whether the scripts are run locally (most often via `Breeze `_) or whether they -are run in ``Build Images`` or ``Tests`` workflows they can take different values. +The images produced during the ``Build Images`` workflow of CI jobs are stored in the +`GitHub Container Registry `_ -You can use those variables when you try to reproduce the build locally. +The images are stored with both "latest" tag (for last main push image that passes all the tests as well +with the COMMIT_SHA id for images that were used in particular build. -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| Variable | Local | Build Images | Tests | Comment | -| | development | CI workflow | Workflow | | -+=========================================+=============+==============+============+=================================================+ -| Basic variables | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| ``PYTHON_MAJOR_MINOR_VERSION`` | | | | Major/Minor version of Python used. | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| ``DB_RESET`` | false | true | true | Determines whether database should be reset | -| | | | | at the container entry. By default locally | -| | | | | the database is not reset, which allows to | -| | | | | keep the database content between runs in | -| | | | | case of Postgres or MySQL. However, | -| | | | | it requires to perform manual init/reset | -| | | | | if you stop the environment. | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| Mount variables | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| ``MOUNT_SELECTED_LOCAL_SOURCES`` | true | false | false | Determines whether local sources are | -| | | | | mounted to inside the container. Useful for | -| | | | | local development, as changes you make | -| | | | | locally can be immediately tested in | -| | | | | the container. We mount only selected, | -| | | | | important folders. We do not mount the whole | -| | | | | project folder in order to avoid accidental | -| | | | | use of artifacts (such as ``egg-info`` | -| | | | | directories) generated locally on the | -| | | | | host during development. | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| ``MOUNT_ALL_LOCAL_SOURCES`` | false | false | false | Determines whether all local sources are | -| | | | | mounted to inside the container. Useful for | -| | | | | local development when you need to access .git | -| | | | | folders and other folders excluded when | -| | | | | ``MOUNT_SELECTED_LOCAL_SOURCES`` is true. | -| | | | | You might need to manually delete egg-info | -| | | | | folder when you enter breeze and the folder was | -| | | | | generated using different Python versions. | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| Force variables | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| ``ANSWER`` | | yes | yes | This variable determines if answer to questions | -| | | | | during the build process should be | -| | | | | automatically given. For local development, | -| | | | | the user is occasionally asked to provide | -| | | | | answers to questions such as - whether | -| | | | | the image should be rebuilt. By default | -| | | | | the user has to answer but in the CI | -| | | | | environment, we force "yes" answer. | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| Host variables | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| ``HOST_USER_ID`` | | | | User id of the host user. | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| ``HOST_GROUP_ID`` | | | | Group id of the host user. | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| ``HOST_OS`` | | linux | linux | OS of the Host (darwin/linux/windows). | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| Git variables | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| ``COMMIT_SHA`` | | GITHUB_SHA | GITHUB_SHA | SHA of the commit of the build is run | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| Initialization | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| ``SKIP_ENVIRONMENT_INITIALIZATION`` | false\* | false\* | false\* | Skip initialization of test environment | -| | | | | | -| | | | | \* set to true in pre-commits | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| ``SKIP_SSH_SETUP`` | false\* | false\* | false\* | Skip setting up SSH server for tests. | -| | | | | | -| | | | | \* set to true in GitHub CodeSpaces | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| Verbosity variables | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| ``PRINT_INFO_FROM_SCRIPTS`` | true\* | true\* | true\* | Allows to print output to terminal from running | -| | | | | scripts. It prints some extra outputs if true | -| | | | | including what the commands do, results of some | -| | | | | operations, summary of variable values, exit | -| | | | | status from the scripts, outputs of failing | -| | | | | commands. If verbose is on it also prints the | -| | | | | commands executed by docker, kind, helm, | -| | | | | kubectl. Disabled in pre-commit checks. | -| | | | | | -| | | | | \* set to false in pre-commits | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| ``VERBOSE`` | false | true | true | Determines whether docker, helm, kind, | -| | | | | kubectl commands should be printed before | -| | | | | execution. This is useful to determine | -| | | | | what exact commands were executed for | -| | | | | debugging purpose as well as allows | -| | | | | to replicate those commands easily by | -| | | | | copy&pasting them from the output. | -| | | | | requires ``PRINT_INFO_FROM_SCRIPTS`` set to | -| | | | | true. | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| ``VERBOSE_COMMANDS`` | false | false | false | Determines whether every command | -| | | | | executed in bash should also be printed | -| | | | | before execution. This is a low-level | -| | | | | debugging feature of bash (set -x) and | -| | | | | it should only be used if you are lost | -| | | | | at where the script failed. | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| Image build variables | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -| ``UPGRADE_TO_NEWER_DEPENDENCIES`` | false | false | false\* | Determines whether the build should | -| | | | | attempt to upgrade Python base image and all | -| | | | | PIP dependencies to latest ones matching | -| | | | | ``setup.py`` limits. This tries to replicate | -| | | | | the situation of "fresh" user who just installs | -| | | | | airflow and uses latest version of matching | -| | | | | dependencies. By default we are using a | -| | | | | tested set of dependency constraints | -| | | | | stored in separated "orphan" branches | -| | | | | of the airflow repository | -| | | | | ("constraints-main, "constraints-2-0") | -| | | | | but when this flag is set to anything but false | -| | | | | (for example random value), they are not used | -| | | | | used and "eager" upgrade strategy is used | -| | | | | when installing dependencies. We set it | -| | | | | to true in case of direct pushes (merges) | -| | | | | to main and scheduled builds so that | -| | | | | the constraints are tested. In those builds, | -| | | | | in case we determine that the tests pass | -| | | | | we automatically push latest set of | -| | | | | "tested" constraints to the repository. | -| | | | | | -| | | | | Setting the value to random value is best way | -| | | | | to assure that constraints are upgraded even if | -| | | | | there is no change to setup.py | -| | | | | | -| | | | | This way our constraints are automatically | -| | | | | tested and updated whenever new versions | -| | | | | of libraries are released. | -| | | | | | -| | | | | \* true in case of direct pushes and | -| | | | | scheduled builds | -+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +The image names follow the patterns (except the Python image, all the images are stored in +https://ghcr.io/ in ``apache`` organization. -Running CI Jobs locally -======================= +The packages are available under (CONTAINER_NAME is url-encoded name of the image). Note that "/" are +supported now in the ``ghcr.io`` as apart of the image name within ``apache`` organization, but they +have to be percent-encoded when you access them via UI (/ = %2F) -All our CI jobs are executed via ``breeze`` commands. You can replicate exactly what our CI is doing -by running the sequence of corresponding ``breeze`` command. Make sure however that you look at both: +``https://github.com/apache/airflow/pkgs/container/`` -* flags passed to ``breeze`` commands -* environment variables used when ``breeze`` command is run - this is useful when we want - to set a common flag for all ``breeze`` commands in the same job or even the whole workflow. For - example ``VERBOSE`` variable is set to ``true`` for all our workflows so that more detailed information - about internal commands executed in CI is printed. ++--------------+----------------------------------------------------------+----------------------------------------------------------+ +| Image | Name:tag (both cases latest version and per-build) | Description | ++==============+==========================================================+==========================================================+ +| Python image | python:-slim-bullseye | Base Python image used by both production and CI image. | +| (DockerHub) | | Python maintainer release new versions of those image | +| | | with security fixes every few weeks in DockerHub. | ++--------------+----------------------------------------------------------+----------------------------------------------------------+ +| Airflow | airflow//python:-slim-bullseye | Version of python base image used in Airflow Builds | +| python base | | We keep the "latest" version only to mark last "good" | +| image | | python base that went through testing and was pushed. | ++--------------+----------------------------------------------------------+----------------------------------------------------------+ +| PROD Build | airflow//prod-build/python:latest | Production Build image - this is the "build" stage of | +| image | | production image. It contains build-essentials and all | +| | | necessary apt packages to build/install PIP packages. | +| | | We keep the "latest" version only to speed up builds. | ++--------------+----------------------------------------------------------+----------------------------------------------------------+ +| Manifest | airflow//ci-manifest/python:latest | CI manifest image - this is the image used to optimize | +| CI image | | pulls and builds for Breeze development environment | +| | | They store hash indicating whether the image will be | +| | | faster to build or pull. | +| | | We keep the "latest" version only to help breeze to | +| | | check if new image should be pulled. | ++--------------+----------------------------------------------------------+----------------------------------------------------------+ +| CI image | airflow//ci/python:latest | CI image - this is the image used for most of the tests. | +| | or | Contains all provider dependencies and tools useful | +| | airflow//ci/python: | For testing. This image is used in Breeze. | ++--------------+----------------------------------------------------------+----------------------------------------------------------+ +| | | faster to build or pull. | +| PROD image | airflow//prod/python:latest | Production image. This is the actual production image | +| | or | optimized for size. | +| | airflow//prod/python: | It contains only compiled libraries and minimal set of | +| | | dependencies to run Airflow. | ++--------------+----------------------------------------------------------+----------------------------------------------------------+ -In the output of the CI jobs, you will find both - the flags passed and environment variables set. +* might be either "main" or "v2-*-test" +* - Python version (Major + Minor).Should be one of ["3.7", "3.8", "3.9"]. +* - full-length SHA of commit either from the tip of the branch (for pushes/schedule) or + commit from the tip of the branch used for the PR. GitHub Registry Variables ========================= @@ -472,39 +352,34 @@ This workflow is a regular workflow that performs all checks of Airflow code. +-----------------------------+----------------------------------------------------------+---------+----------+-----------+ | UI tests | React UI tests for new Airflow UI | Yes | Yes | Yes | +-----------------------------+----------------------------------------------------------+---------+----------+-----------+ -| WWW tests | React tests for current Airflow UI | Yes | Yes | Yes | -+-----------------------------+----------------------------------------------------------+---------+----------+-----------+ | Test image building | Tests if PROD image build examples work | Yes | Yes | Yes | +-----------------------------+----------------------------------------------------------+---------+----------+-----------+ -| CI Images | Waits for and verify CI Images (3) | Yes | Yes | Yes | +| CI Images | Waits for and verify CI Images (2) | Yes | Yes | Yes | +-----------------------------+----------------------------------------------------------+---------+----------+-----------+ | (Basic) Static checks | Performs static checks (full or basic) | Yes | Yes | Yes | +-----------------------------+----------------------------------------------------------+---------+----------+-----------+ | Build docs | Builds documentation | Yes | Yes | Yes | +-----------------------------+----------------------------------------------------------+---------+----------+-----------+ -| Tests | Run all the Pytest tests for Python code | Yes(2) | Yes | Yes | +| Tests | Run all the Pytest tests for Python code | Yes | Yes | Yes | +-----------------------------+----------------------------------------------------------+---------+----------+-----------+ | Tests provider packages | Tests if provider packages work | Yes | Yes | Yes | +-----------------------------+----------------------------------------------------------+---------+----------+-----------+ | Upload coverage | Uploads test coverage from all the tests | - | Yes | - | +-----------------------------+----------------------------------------------------------+---------+----------+-----------+ -| PROD Images | Waits for and verify PROD Images (3) | Yes | Yes | Yes | +| PROD Images | Waits for and verify PROD Images (2) | Yes | Yes | Yes | +-----------------------------+----------------------------------------------------------+---------+----------+-----------+ -| Tests Kubernetes | Run Kubernetes test | Yes(2) | Yes | Yes | +| Tests Kubernetes | Run Kubernetes test | Yes | Yes | Yes | +-----------------------------+----------------------------------------------------------+---------+----------+-----------+ -| Constraints | Upgrade constraints to latest ones (4) | - | Yes | Yes | +| Constraints | Upgrade constraints to latest ones (3) | - | Yes | Yes | +-----------------------------+----------------------------------------------------------+---------+----------+-----------+ -| Push cache & images | Pushes cache/images to GitHub Registry (4) | - | Yes | Yes | +| Push cache & images | Pushes cache/images to GitHub Registry (3) | - | Yes | Yes | +-----------------------------+----------------------------------------------------------+---------+----------+-----------+ ``(1)`` Scheduled jobs builds images from scratch - to test if everything works properly for clean builds -``(2)`` The tests are run when the Trigger Tests job determine that important files change (this allows -for example "no-code" changes to build much faster) - -``(3)`` The jobs wait for CI images to be available. +``(2)`` The jobs wait for CI images to be available. -``(4)`` PROD and CI cache & images are pushed as "latest" to GitHub Container registry and constraints are +``(3)`` PROD and CI cache & images are pushed as "latest" to GitHub Container registry and constraints are upgraded only if all tests are successful. The images are rebuilt in this step using constraints pushed in the previous step. @@ -529,65 +404,60 @@ For more information, see: Website endpoint: http://apache-airflow-docs.s3-website.eu-central-1.amazonaws.com/ -Naming conventions for stored images -==================================== -The images produced during the ``Build Images`` workflow of CI jobs are stored in the -`GitHub Container Registry `_ +Debugging CI Jobs in Github Actions +=================================== -The images are stored with both "latest" tag (for last main push image that passes all the tests as well -with the COMMIT_SHA id for images that were used in particular build. +The CI jobs are notoriously difficult to test, because you can only really see results of it when you run them +in CI environment, and the environment in which they run depend on who runs them (they might be either run +in our Self-Hosted runners (with 64 GB RAM 8 CPUs) or in the GitHub Public runners (6 GB of RAM, 2 CPUs) and +the results will vastly differ depending on which environment is used. We are utilizing parallelism to make +use of all the available CPU/Memory but sometimes you need to enable debugging and force certain environments. +Additional difficulty is that ``Build Images`` workflow is ``pull-request-target`` type, which means that it +will always run using the ``main`` version - no matter what is in your Pull Request. -The image names follow the patterns (except the Python image, all the images are stored in -https://ghcr.io/ in ``apache`` organization. +There are several ways how you can debug the CI jobs when you are maintainer. -The packages are available under (CONTAINER_NAME is url-encoded name of the image). Note that "/" are -supported now in the ``ghcr.io`` as apart of the image name within ``apache`` organization, but they -have to be percent-encoded when you access them via UI (/ = %2F) +* When you want to tests the build with all combinations of all python, backends etc on regular PR, + add ``full tests needed`` label to the PR. +* When you want to test maintainer PR using public runners, add ``public runners`` label to the PR +* When you want to see resources used by the run, add ``debug ci resources`` label to the PR +* When you want to test changes to breeze that include changes to how images are build you should push + your PR to ``apache`` repository not to your fork. This will run the images as part of the ``CI`` workflow + rather than using ``Build images`` workflow and use the same breeze version for building image and testing +* When you want to test changes to ``build-images.yml`` workflow you should push your branch as ``main`` + branch in your local fork. This will run changed ``build-images.yml`` workflow as it will be in ``main`` + branch of your fork -``https://github.com/apache/airflow/pkgs/container/`` +Replicating the CI Jobs locally +=============================== -+--------------+----------------------------------------------------------+----------------------------------------------------------+ -| Image | Name:tag (both cases latest version and per-build) | Description | -+==============+==========================================================+==========================================================+ -| Python image | python:-slim-bullseye | Base Python image used by both production and CI image. | -| (DockerHub) | | Python maintainer release new versions of those image | -| | | with security fixes every few weeks in DockerHub. | -+--------------+----------------------------------------------------------+----------------------------------------------------------+ -| Airflow | airflow//python:-slim-bullseye | Version of python base image used in Airflow Builds | -| python base | | We keep the "latest" version only to mark last "good" | -| image | | python base that went through testing and was pushed. | -+--------------+----------------------------------------------------------+----------------------------------------------------------+ -| PROD Build | airflow//prod-build/python:latest | Production Build image - this is the "build" stage of | -| image | | production image. It contains build-essentials and all | -| | | necessary apt packages to build/install PIP packages. | -| | | We keep the "latest" version only to speed up builds. | -+--------------+----------------------------------------------------------+----------------------------------------------------------+ -| Manifest | airflow//ci-manifest/python:latest | CI manifest image - this is the image used to optimize | -| CI image | | pulls and builds for Breeze development environment | -| | | They store hash indicating whether the image will be | -| | | faster to build or pull. | -| | | We keep the "latest" version only to help breeze to | -| | | check if new image should be pulled. | -+--------------+----------------------------------------------------------+----------------------------------------------------------+ -| CI image | airflow//ci/python:latest | CI image - this is the image used for most of the tests. | -| | or | Contains all provider dependencies and tools useful | -| | airflow//ci/python: | For testing. This image is used in Breeze. | -+--------------+----------------------------------------------------------+----------------------------------------------------------+ -| | | faster to build or pull. | -| PROD image | airflow//prod/python:latest | Production image. This is the actual production image | -| | or | optimized for size. | -| | airflow//prod/python: | It contains only compiled libraries and minimal set of | -| | | dependencies to run Airflow. | -+--------------+----------------------------------------------------------+----------------------------------------------------------+ +The main goal of the CI philosophy we have that no matter how complex the test and integration +infrastructure, as a developer you should be able to reproduce and re-run any of the failed checks +locally. One part of it are pre-commit checks, that allow you to run the same static checks in CI +and locally, but another part is the CI environment which is replicated locally with Breeze. -* might be either "main" or "v2-*-test" -* - Python version (Major + Minor).Should be one of ["3.7", "3.8", "3.9"]. -* - full-length SHA of commit either from the tip of the branch (for pushes/schedule) or - commit from the tip of the branch used for the PR. +You can read more about Breeze in `BREEZE.rst `_ but in essence it is a script that allows +you to re-create CI environment in your local development instance and interact with it. In its basic +form, when you do development you can run all the same tests that will be run in CI - but locally, +before you submit them as PR. Another use case where Breeze is useful is when tests fail on CI. You can +take the full ``COMMIT_SHA`` of the failed build pass it as ``--image-tag`` parameter of Breeze and it will +download the very same version of image that was used in CI and run it locally. This way, you can very +easily reproduce any failed test that happens in CI - even if you do not check out the sources +connected with the run. -Reproducing CI Runs locally -=========================== +All our CI jobs are executed via ``breeze`` commands. You can replicate exactly what our CI is doing +by running the sequence of corresponding ``breeze`` command. Make sure however that you look at both: + +* flags passed to ``breeze`` commands +* environment variables used when ``breeze`` command is run - this is useful when we want + to set a common flag for all ``breeze`` commands in the same job or even the whole workflow. For + example ``VERBOSE`` variable is set to ``true`` for all our workflows so that more detailed information + about internal commands executed in CI is printed. + +In the output of the CI jobs, you will find both - the flags passed and environment variables set. + +You can read more about it in `BREEZE.rst `_ and `TESTING.rst `_ Since we store images from every CI run, you should be able easily reproduce any of the CI tests problems locally. You can do it by pulling and using the right image and running it with the right docker command, @@ -614,11 +484,150 @@ this case, you do not need to checkout the sources that were used for that run - the image - but remember that any changes you make in those sources are lost when you leave the image as the sources are not mapped from your host machine. +Depending whether the scripts are run locally via `Breeze `_ or whether they +are run in ``Build Images`` or ``Tests`` workflows they can take different values. -Adding new Python versions to CI --------------------------------- +You can use those variables when you try to reproduce the build locally. + ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| Variable | Local | Build Images | CI | Comment | +| | development | workflow | Workflow | | ++=========================================+=============+==============+============+=================================================+ +| Basic variables | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``PYTHON_MAJOR_MINOR_VERSION`` | | | | Major/Minor version of Python used. | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``DB_RESET`` | false | true | true | Determines whether database should be reset | +| | | | | at the container entry. By default locally | +| | | | | the database is not reset, which allows to | +| | | | | keep the database content between runs in | +| | | | | case of Postgres or MySQL. However, | +| | | | | it requires to perform manual init/reset | +| | | | | if you stop the environment. | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| Mount variables | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``MOUNT_SELECTED_LOCAL_SOURCES`` | true | false | false | Determines whether local sources are | +| | | | | mounted to inside the container. Useful for | +| | | | | local development, as changes you make | +| | | | | locally can be immediately tested in | +| | | | | the container. We mount only selected, | +| | | | | important folders. We do not mount the whole | +| | | | | project folder in order to avoid accidental | +| | | | | use of artifacts (such as ``egg-info`` | +| | | | | directories) generated locally on the | +| | | | | host during development. | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``MOUNT_ALL_LOCAL_SOURCES`` | false | false | false | Determines whether all local sources are | +| | | | | mounted to inside the container. Useful for | +| | | | | local development when you need to access .git | +| | | | | folders and other folders excluded when | +| | | | | ``MOUNT_SELECTED_LOCAL_SOURCES`` is true. | +| | | | | You might need to manually delete egg-info | +| | | | | folder when you enter breeze and the folder was | +| | | | | generated using different Python versions. | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| Force variables | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``ANSWER`` | | yes | yes | This variable determines if answer to questions | +| | | | | during the build process should be | +| | | | | automatically given. For local development, | +| | | | | the user is occasionally asked to provide | +| | | | | answers to questions such as - whether | +| | | | | the image should be rebuilt. By default | +| | | | | the user has to answer but in the CI | +| | | | | environment, we force "yes" answer. | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| Host variables | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``HOST_USER_ID`` | | | | User id of the host user. | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``HOST_GROUP_ID`` | | | | Group id of the host user. | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``HOST_OS`` | | linux | linux | OS of the Host (darwin/linux/windows). | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| Git variables | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``COMMIT_SHA`` | | GITHUB_SHA | GITHUB_SHA | SHA of the commit of the build is run | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| Initialization | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``SKIP_ENVIRONMENT_INITIALIZATION`` | false\* | false\* | false\* | Skip initialization of test environment | +| | | | | | +| | | | | \* set to true in pre-commits | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``SKIP_SSH_SETUP`` | false\* | false\* | false\* | Skip setting up SSH server for tests. | +| | | | | | +| | | | | \* set to true in GitHub CodeSpaces | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| Verbosity variables | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``PRINT_INFO_FROM_SCRIPTS`` | true\* | true\* | true\* | Allows to print output to terminal from running | +| | | | | scripts. It prints some extra outputs if true | +| | | | | including what the commands do, results of some | +| | | | | operations, summary of variable values, exit | +| | | | | status from the scripts, outputs of failing | +| | | | | commands. If verbose is on it also prints the | +| | | | | commands executed by docker, kind, helm, | +| | | | | kubectl. Disabled in pre-commit checks. | +| | | | | | +| | | | | \* set to false in pre-commits | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``VERBOSE`` | false | true | true | Determines whether docker, helm, kind, | +| | | | | kubectl commands should be printed before | +| | | | | execution. This is useful to determine | +| | | | | what exact commands were executed for | +| | | | | debugging purpose as well as allows | +| | | | | to replicate those commands easily by | +| | | | | copy&pasting them from the output. | +| | | | | requires ``PRINT_INFO_FROM_SCRIPTS`` set to | +| | | | | true. | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``VERBOSE_COMMANDS`` | false | false | false | Determines whether every command | +| | | | | executed in bash should also be printed | +| | | | | before execution. This is a low-level | +| | | | | debugging feature of bash (set -x) and | +| | | | | it should only be used if you are lost | +| | | | | at where the script failed. | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| Image build variables | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``UPGRADE_TO_NEWER_DEPENDENCIES`` | false | false | false\* | Determines whether the build should | +| | | | | attempt to upgrade Python base image and all | +| | | | | PIP dependencies to latest ones matching | +| | | | | ``setup.py`` limits. This tries to replicate | +| | | | | the situation of "fresh" user who just installs | +| | | | | airflow and uses latest version of matching | +| | | | | dependencies. By default we are using a | +| | | | | tested set of dependency constraints | +| | | | | stored in separated "orphan" branches | +| | | | | of the airflow repository | +| | | | | ("constraints-main, "constraints-2-0") | +| | | | | but when this flag is set to anything but false | +| | | | | (for example random value), they are not used | +| | | | | used and "eager" upgrade strategy is used | +| | | | | when installing dependencies. We set it | +| | | | | to true in case of direct pushes (merges) | +| | | | | to main and scheduled builds so that | +| | | | | the constraints are tested. In those builds, | +| | | | | in case we determine that the tests pass | +| | | | | we automatically push latest set of | +| | | | | "tested" constraints to the repository. | +| | | | | | +| | | | | Setting the value to random value is best way | +| | | | | to assure that constraints are upgraded even if | +| | | | | there is no change to setup.py | +| | | | | | +| | | | | This way our constraints are automatically | +| | | | | tested and updated whenever new versions | +| | | | | of libraries are released. | +| | | | | | +| | | | | \* true in case of direct pushes and | +| | | | | scheduled builds | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ -In the ``main`` branch of development line we currently support Python 3.7, 3.8, 3.9, 3.10 +Adding new Python versions to CI +================================ In order to add a new version the following operations should be done (example uses Python 3.10) diff --git a/TESTING.rst b/TESTING.rst index ba9165f67123d..bacaff64c863d 100644 --- a/TESTING.rst +++ b/TESTING.rst @@ -202,15 +202,6 @@ You can also limit the set of providers you would like to run tests of breeze testing tests --test-type "Providers[airbyte,http]" -You can also write tests in "limited progress" mode (useful in the future to run CI). In this mode each -test just prints "percentage" summary of the run as single line and only dumps full output of the test -after it completes. - -.. code-block:: bash - - breeze testing tests --test-type Core --limit-progress-output - - Running Tests of a specified type from the Host ----------------------------------------------- @@ -554,12 +545,13 @@ test in parallel. This way we can decrease the time of running all tests in self Running full Airflow test suite in parallel =========================================== -If you run ``./scripts/ci/testing/ci_run_airflow_testing.sh`` tests run in parallel +If you run ``breeze testing tests --run-in-parallel`` tests run in parallel on your development machine - maxing out the number of parallel runs at the number of cores you have available in your Docker engine. -In case you do not have enough memory available to your Docker (~32 GB), the ``Integration`` test type -is always run sequentially - after all tests are completed (docker cleanup is performed in-between). +In case you do not have enough memory available to your Docker (8 GB), the ``Integration``. ``Provider`` +and ``Core`` test type are executed sequentially with cleaning the docker setup in-between. This +allows to print This allows for massive speedup in full test execution. On 8 CPU machine with 16 cores and 64 GB memory and fast SSD disk, the whole suite of tests completes in about 5 minutes (!). Same suite of tests takes diff --git a/dev/breeze/src/airflow_breeze/commands/ci_commands.py b/dev/breeze/src/airflow_breeze/commands/ci_commands.py index c5cd05c8b4577..111ee3e65f219 100644 --- a/dev/breeze/src/airflow_breeze/commands/ci_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/ci_commands.py @@ -60,7 +60,7 @@ ) from airflow_breeze.utils.find_newer_dependencies import find_newer_dependencies from airflow_breeze.utils.github_actions import get_ga_output -from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT +from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, MSSQL_TMP_DIR_NAME from airflow_breeze.utils.run_utils import run_command @@ -92,6 +92,11 @@ def free_space(verbose: bool, dry_run: bool, answer: str): ) run_command(["df", "-h"], verbose=verbose, dry_run=dry_run) run_command(["docker", "logout", "ghcr.io"], verbose=verbose, dry_run=dry_run, check=False) + run_command( + ["sudo", "rm", "-f", os.fspath(Path.home() / MSSQL_TMP_DIR_NAME)], + verbose=verbose, + dry_run=dry_run, + ) @ci_group.command(name="resource-check", help="Check if available docker resources are enough.") @@ -111,7 +116,7 @@ def resource_check(verbose: bool, dry_run: bool): HOME_DIR / ".azure", HOME_DIR / ".config/gcloud", HOME_DIR / ".docker", - AIRFLOW_SOURCES_ROOT, + HOME_DIR / MSSQL_TMP_DIR_NAME, ] diff --git a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py index 6cba47a6ef950..19fd65404db8e 100644 --- a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py @@ -37,6 +37,7 @@ option_airflow_constraints_reference_build, option_answer, option_builder, + option_debug_resources, option_dev_apt_command, option_dev_apt_deps, option_docker_cache, @@ -123,6 +124,7 @@ def run_build_in_parallel( include_success_outputs: bool, parallelism: int, skip_cleanup: bool, + debug_resources: bool, dry_run: bool, verbose: bool, ) -> None: @@ -130,7 +132,10 @@ def run_build_in_parallel( with ci_group(f"Building for {python_version_list}"): all_params = [f"CI {image_params.python}" for image_params in image_params_list] with run_with_pool( - parallelism=parallelism, all_params=all_params, progress_matcher=DockerBuildxProgressMatcher() + parallelism=parallelism, + all_params=all_params, + debug_resources=debug_resources, + progress_matcher=DockerBuildxProgressMatcher(), ) as (pool, outputs): results = [ pool.apply_async( @@ -167,6 +172,7 @@ def start_building(params: BuildCiParams, dry_run: bool, verbose: bool): @option_run_in_parallel @option_parallelism @option_skip_cleanup +@option_debug_resources @option_include_success_outputs @option_python_versions @option_upgrade_to_newer_dependencies @@ -200,12 +206,13 @@ def build( run_in_parallel: bool, parallelism: int, skip_cleanup: bool, + debug_resources: bool, include_success_outputs, python_versions: str, answer: str, **kwargs, ): - """Build CI image. Include building multiple images for all python versions (sequentially).""" + """Build CI image. Include building multiple images for all python versions.""" def run_build(ci_image_params: BuildCiParams) -> None: return_code, info = run_build_ci_image( @@ -237,6 +244,7 @@ def run_build(ci_image_params: BuildCiParams) -> None: include_success_outputs=include_success_outputs, parallelism=parallelism, skip_cleanup=skip_cleanup, + debug_resources=debug_resources, dry_run=dry_run, verbose=verbose, ) @@ -254,6 +262,7 @@ def run_build(ci_image_params: BuildCiParams) -> None: @option_run_in_parallel @option_parallelism @option_skip_cleanup +@option_debug_resources @option_include_success_outputs @option_python_versions @option_github_token @@ -273,6 +282,7 @@ def pull( github_token: str, parallelism: int, skip_cleanup: bool, + debug_resources: bool, include_success_outputs: bool, image_tag: str, wait_for_image: bool, @@ -297,6 +307,7 @@ def pull( dry_run=dry_run, parallelism=parallelism, skip_cleanup=skip_cleanup, + debug_resources=debug_resources, include_success_outputs=include_success_outputs, image_params_list=ci_image_params_list, python_version_list=python_version_list, diff --git a/dev/breeze/src/airflow_breeze/commands/ci_image_commands_config.py b/dev/breeze/src/airflow_breeze/commands/ci_image_commands_config.py index b04408058883e..c57b65e046fd5 100644 --- a/dev/breeze/src/airflow_breeze/commands/ci_image_commands_config.py +++ b/dev/breeze/src/airflow_breeze/commands/ci_image_commands_config.py @@ -43,8 +43,9 @@ "options": [ "--run-in-parallel", "--parallelism", - "--skip-cleanup", "--python-versions", + "--skip-cleanup", + "--debug-resources", "--include-success-outputs", ], }, @@ -96,8 +97,9 @@ "options": [ "--run-in-parallel", "--parallelism", - "--skip-cleanup", "--python-versions", + "--skip-cleanup", + "--debug-resources", "--include-success-outputs", ], }, diff --git a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py index 69c1328cb7429..917317d4a45f6 100644 --- a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py @@ -32,12 +32,14 @@ from airflow_breeze.utils.ci_group import ci_group from airflow_breeze.utils.click_utils import BreezeGroup from airflow_breeze.utils.common_options import ( + option_debug_resources, option_dry_run, option_include_success_outputs, option_parallelism, option_python, option_python_versions, option_run_in_parallel, + option_skip_cleanup, option_verbose, ) from airflow_breeze.utils.console import Output, get_console @@ -94,7 +96,7 @@ def kubernetes_group(): option_image_tag = click.option( '-t', '--image-tag', - help='Image tag used to build K8S image from', + help='Image tag used to build K8S image from.', default='latest', show_default=True, envvar='IMAGE_TAG', @@ -147,8 +149,7 @@ def kubernetes_group(): K8S_CONFIGURE_CLUSTER_PROGRESS_REGEXP = r'.*airflow-python-[0-9.]+-v[0-9.].*' K8S_DEPLOY_PROGRESS_REGEXP = r'.*airflow-python-[0-9.]+-v[0-9.].*' K8S_TEST_PROGRESS_REGEXP = r'.*airflow-python-[0-9.]+-v[0-9.].*|^kubernetes_tests/.*' -PERCENT_K8S_TEST_PROGRESS_REGEXP = r'^kubernetes_tests/.*\[[ \d*%]*\].*' -K8S_SKIP_TRUNCATION_REGEXP = r'^kubernetes_tests/.*' +PERCENT_K8S_TEST_PROGRESS_REGEXP = r'^kubernetes_tests/.*\[[ \d%]*\].*' @kubernetes_group.command(name="setup-env", help="Setup shared Kubernetes virtual environment and tools.") @@ -251,9 +252,11 @@ def _create_cluster( @option_kubernetes_version @option_run_in_parallel @option_parallelism +@option_skip_cleanup +@option_debug_resources +@option_include_success_outputs @option_kubernetes_versions @option_python_versions -@option_include_success_outputs @option_verbose @option_dry_run def create_cluster( @@ -261,10 +264,12 @@ def create_cluster( python: str, kubernetes_version: str, run_in_parallel: bool, + skip_cleanup: bool, + debug_resources: bool, + include_success_outputs: bool, parallelism: int, kubernetes_versions: str, python_versions: str, - include_success_outputs: bool, verbose: bool, dry_run: bool, ): @@ -282,8 +287,9 @@ def create_cluster( with run_with_pool( parallelism=parallelism, all_params=combo_titles, + debug_resources=debug_resources, progress_matcher=GenericRegexpProgressMatcher( - K8S_CLUSTER_CREATE_PROGRESS_REGEXP, lines_to_search=15 + regexp=K8S_CLUSTER_CREATE_PROGRESS_REGEXP, lines_to_search=15 ), ) as (pool, outputs): results = [ @@ -305,6 +311,7 @@ def create_cluster( results=results, success="All clusters created.", outputs=outputs, + skip_cleanup=skip_cleanup, include_success_outputs=include_success_outputs, ) else: @@ -632,8 +639,10 @@ def _upload_k8s_image( @option_rebuild_base_image @option_run_in_parallel @option_parallelism -@option_python_versions +@option_skip_cleanup +@option_debug_resources @option_include_success_outputs +@option_python_versions @option_verbose @option_dry_run def build_k8s_image( @@ -642,8 +651,10 @@ def build_k8s_image( rebuild_base_image: bool, run_in_parallel: bool, parallelism: int, - python_versions: str, + skip_cleanup: bool, + debug_resources: bool, include_success_outputs: bool, + python_versions: str, verbose: bool, dry_run: bool, ): @@ -657,6 +668,7 @@ def build_k8s_image( with run_with_pool( parallelism=parallelism, all_params=[f"Image {python}" for python in python_version_array], + debug_resources=debug_resources, progress_matcher=DockerBuildxProgressMatcher(), ) as (pool, outputs): results = [ @@ -677,6 +689,7 @@ def build_k8s_image( results=results, success="All K8S images built correctly.", outputs=outputs, + skip_cleanup=skip_cleanup, include_success_outputs=include_success_outputs, ) else: @@ -702,9 +715,11 @@ def build_k8s_image( @option_kubernetes_version @option_run_in_parallel @option_parallelism +@option_skip_cleanup +@option_debug_resources +@option_include_success_outputs @option_python_versions @option_kubernetes_versions -@option_include_success_outputs @option_verbose @option_dry_run def upload_k8s_image( @@ -712,9 +727,11 @@ def upload_k8s_image( kubernetes_version: str, run_in_parallel: bool, parallelism: int, + skip_cleanup: bool, + debug_resources: bool, + include_success_outputs: bool, python_versions: str, kubernetes_versions: str, - include_success_outputs: bool, verbose: bool, dry_run: bool, ): @@ -732,6 +749,7 @@ def upload_k8s_image( with run_with_pool( parallelism=parallelism, all_params=combo_titles, + debug_resources=debug_resources, progress_matcher=GenericRegexpProgressMatcher( regexp=K8S_UPLOAD_PROGRESS_REGEXP, lines_to_search=2 ), @@ -753,6 +771,7 @@ def upload_k8s_image( results=results, success="All K8S images uploaded correctly.", outputs=outputs, + skip_cleanup=skip_cleanup, include_success_outputs=include_success_outputs, ) else: @@ -899,9 +918,11 @@ def _configure_k8s_cluster( @option_kubernetes_version @option_run_in_parallel @option_parallelism +@option_skip_cleanup +@option_debug_resources +@option_include_success_outputs @option_python_versions @option_kubernetes_versions -@option_include_success_outputs @option_verbose @option_dry_run def configure_cluster( @@ -909,9 +930,11 @@ def configure_cluster( kubernetes_version: str, run_in_parallel: bool, parallelism: int, + skip_cleanup: bool, + debug_resources: bool, + include_success_outputs: bool, python_versions: str, kubernetes_versions: str, - include_success_outputs: bool, verbose: bool, dry_run: bool, ): @@ -929,6 +952,7 @@ def configure_cluster( with run_with_pool( parallelism=parallelism, all_params=combo_titles, + debug_resources=debug_resources, progress_matcher=GenericRegexpProgressMatcher( regexp=K8S_CONFIGURE_CLUSTER_PROGRESS_REGEXP, lines_to_search=10 ), @@ -950,6 +974,7 @@ def configure_cluster( results=results, success="All clusters configured correctly.", outputs=outputs, + skip_cleanup=skip_cleanup, include_success_outputs=include_success_outputs, ) else: @@ -1096,9 +1121,11 @@ def _deploy_airflow( @option_wait_time_in_seconds @option_run_in_parallel @option_parallelism +@option_skip_cleanup +@option_debug_resources +@option_include_success_outputs @option_python_versions @option_kubernetes_versions -@option_include_success_outputs @option_verbose @option_dry_run @click.argument('extra_options', nargs=-1, type=click.UNPROCESSED) @@ -1110,9 +1137,11 @@ def deploy_airflow( wait_time_in_seconds: int, run_in_parallel: bool, parallelism: int, + skip_cleanup: bool, + debug_resources: bool, + include_success_outputs: bool, python_versions: str, kubernetes_versions: str, - include_success_outputs: bool, verbose: bool, dry_run: bool, extra_options: tuple[str, ...] | None = None, @@ -1127,7 +1156,10 @@ def deploy_airflow( with run_with_pool( parallelism=parallelism, all_params=combo_titles, - progress_matcher=GenericRegexpProgressMatcher(K8S_DEPLOY_PROGRESS_REGEXP, lines_to_search=15), + debug_resources=debug_resources, + progress_matcher=GenericRegexpProgressMatcher( + regexp=K8S_DEPLOY_PROGRESS_REGEXP, lines_to_search=15 + ), ) as (pool, outputs): results = [ pool.apply_async( @@ -1150,6 +1182,7 @@ def deploy_airflow( results=results, success="All Airflow charts successfully deployed.", outputs=outputs, + skip_cleanup=skip_cleanup, include_success_outputs=include_success_outputs, ) else: @@ -1369,9 +1402,11 @@ def _run_tests( @option_force_venv_setup @option_run_in_parallel @option_parallelism +@option_skip_cleanup +@option_debug_resources +@option_include_success_outputs @option_python_versions @option_kubernetes_versions -@option_include_success_outputs @option_verbose @option_dry_run @click.argument('test_args', nargs=-1, type=click.Path()) @@ -1382,9 +1417,11 @@ def tests( force_venv_setup: bool, run_in_parallel: bool, parallelism: int, + skip_cleanup: bool, + debug_resources: bool, + include_success_outputs: bool, python_versions: str, kubernetes_versions: str, - include_success_outputs: bool, verbose: bool, dry_run: bool, test_args: tuple[str, ...], @@ -1427,11 +1464,11 @@ def tests( with run_with_pool( parallelism=parallelism, all_params=combo_titles, + debug_resources=debug_resources, progress_matcher=GenericRegexpProgressMatcher( - K8S_TEST_PROGRESS_REGEXP, - lines_to_search=15, + regexp=K8S_TEST_PROGRESS_REGEXP, regexp_for_joined_line=PERCENT_K8S_TEST_PROGRESS_REGEXP, - regexp_to_skip_truncation=K8S_SKIP_TRUNCATION_REGEXP, + lines_to_search=15, ), ) as (pool, outputs): results = [ @@ -1454,6 +1491,7 @@ def tests( success="All K8S tests successfully completed.", outputs=outputs, include_success_outputs=include_success_outputs, + skip_cleanup=skip_cleanup, ) else: result, _ = _run_tests( diff --git a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands_config.py b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands_config.py index b83c1f6ce422d..b440a4bef4658 100644 --- a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands_config.py +++ b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands_config.py @@ -62,6 +62,8 @@ "--parallelism", "--python-versions", "--kubernetes-versions", + "--skip-cleanup", + "--debug-resources", "--include-success-outputs", ], }, @@ -82,6 +84,8 @@ "--parallelism", "--python-versions", "--kubernetes-versions", + "--skip-cleanup", + "--debug-resources", "--include-success-outputs", ], }, @@ -102,6 +106,8 @@ "--parallelism", "--python-versions", "--kubernetes-versions", + "--skip-cleanup", + "--debug-resources", "--include-success-outputs", ], }, @@ -121,6 +127,8 @@ "--parallelism", "--python-versions", "--kubernetes-versions", + "--skip-cleanup", + "--debug-resources", "--include-success-outputs", ], }, @@ -143,6 +151,8 @@ "--parallelism", "--python-versions", "--kubernetes-versions", + "--skip-cleanup", + "--debug-resources", "--include-success-outputs", ], }, @@ -189,31 +199,12 @@ "--parallelism", "--python-versions", "--kubernetes-versions", + "--skip-cleanup", + "--debug-resources", "--include-success-outputs", ], }, ], - "breeze k8s run-complete-tests": [ - { - "name": "K8S setup & tests flags", - "options": [ - "--parallelism", - "--python-versions", - "--kubernetes-versions", - "--include-success-outputs", - "--executor", - "--force-venv-setup", - "--image-tag", - "--wait-time-in-seconds", - "--skip-rebuilding-base-image", - "--skip-recreating-namespaces", - "--skip-deploying-test-resources", - "--skip-recreating-clusters", - "--skip-deploying-airflow", - "--skip-deleting-clusters", - ], - } - ], "breeze k8s k9s": [ { "name": "K8S k9s flags", diff --git a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py index eef5f4add805c..32e7de2dd1565 100644 --- a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py @@ -40,6 +40,7 @@ option_airflow_constraints_reference_build, option_answer, option_builder, + option_debug_resources, option_dev_apt_command, option_dev_apt_deps, option_docker_cache, @@ -98,6 +99,7 @@ def run_build_in_parallel( parallelism: int, include_success_outputs: bool, skip_cleanup: bool, + debug_resources: bool, dry_run: bool, verbose: bool, ) -> None: @@ -105,7 +107,10 @@ def run_build_in_parallel( with ci_group(f"Building for {python_version_list}"): all_params = [f"PROD {image_params.python}" for image_params in image_params_list] with run_with_pool( - parallelism=parallelism, all_params=all_params, progress_matcher=DockerBuildxProgressMatcher() + parallelism=parallelism, + all_params=all_params, + debug_resources=debug_resources, + progress_matcher=DockerBuildxProgressMatcher(), ) as (pool, outputs): results = [ pool.apply_async( @@ -154,6 +159,7 @@ def prod_image(): @option_run_in_parallel @option_parallelism @option_skip_cleanup +@option_debug_resources @option_include_success_outputs @option_python_versions @option_upgrade_to_newer_dependencies @@ -228,6 +234,7 @@ def build( run_in_parallel: bool, parallelism: int, skip_cleanup: bool, + debug_resources: bool, include_success_outputs: bool, python_versions: str, answer: str | None, @@ -263,6 +270,7 @@ def run_build(prod_image_params: BuildProdParams) -> None: python_version_list=python_version_list, parallelism=parallelism, skip_cleanup=skip_cleanup, + debug_resources=debug_resources, include_success_outputs=include_success_outputs, dry_run=dry_run, verbose=verbose, @@ -281,6 +289,7 @@ def run_build(prod_image_params: BuildProdParams) -> None: @option_run_in_parallel @option_parallelism @option_skip_cleanup +@option_debug_resources @option_include_success_outputs @option_python_versions @option_github_token @@ -297,6 +306,7 @@ def pull_prod_image( run_in_parallel: bool, parallelism: int, skip_cleanup: bool, + debug_resources: bool, include_success_outputs, python_versions: str, github_token: str, @@ -323,6 +333,7 @@ def pull_prod_image( dry_run=dry_run, parallelism=parallelism, skip_cleanup=skip_cleanup, + debug_resources=debug_resources, include_success_outputs=include_success_outputs, image_params_list=prod_image_params_list, python_version_list=python_version_list, diff --git a/dev/breeze/src/airflow_breeze/commands/production_image_commands_config.py b/dev/breeze/src/airflow_breeze/commands/production_image_commands_config.py index d0d31c03e7f19..f9c1274ab6fb3 100644 --- a/dev/breeze/src/airflow_breeze/commands/production_image_commands_config.py +++ b/dev/breeze/src/airflow_breeze/commands/production_image_commands_config.py @@ -43,8 +43,9 @@ "options": [ "--run-in-parallel", "--parallelism", - "--skip-cleanup", "--python-versions", + "--skip-cleanup", + "--debug-resources", "--include-success-outputs", ], }, @@ -116,6 +117,9 @@ "--run-in-parallel", "--parallelism", "--python-versions", + "--skip-cleanup", + "--debug-resources", + "--include-success-outputs", ], }, ], diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index 5f9d7a7c96ff0..acfd485016e40 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -44,6 +44,7 @@ option_airflow_constraints_reference, option_airflow_extras, option_answer, + option_debug_resources, option_dry_run, option_github_repository, option_image_tag_for_running, @@ -244,7 +245,7 @@ def prepare_provider_documentation( @click.option( '--package-list-file', type=click.File('rt'), - help='Read list of packages from text file (one package per line)', + help='Read list of packages from text file (one package per line).', ) @option_debug_release_management @argument_packages @@ -318,6 +319,7 @@ def run_generate_constraints_in_parallel( include_success_outputs: bool, parallelism: int, skip_cleanup: bool, + debug_resources: bool, dry_run: bool, verbose: bool, ): @@ -330,6 +332,7 @@ def run_generate_constraints_in_parallel( with run_with_pool( parallelism=parallelism, all_params=all_params, + debug_resources=debug_resources, progress_matcher=GenericRegexpProgressMatcher( regexp=CONSTRAINT_PROGRESS_MATCHER, lines_to_search=6 ), @@ -367,6 +370,7 @@ def run_generate_constraints_in_parallel( @option_run_in_parallel @option_parallelism @option_skip_cleanup +@option_debug_resources @option_python_versions @option_image_tag_for_running @option_answer @@ -380,6 +384,7 @@ def generate_constraints( run_in_parallel: bool, parallelism: int, skip_cleanup: bool, + debug_resources: bool, python_versions: str, image_tag: str | None, answer: str | None, @@ -431,6 +436,7 @@ def generate_constraints( shell_params_list=shell_params_list, parallelism=parallelism, skip_cleanup=skip_cleanup, + debug_resources=debug_resources, include_success_outputs=True, dry_run=dry_run, verbose=verbose, diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands_config.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands_config.py index ddd6bbf1d70e4..fb6bfbb5d5895 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands_config.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands_config.py @@ -80,8 +80,10 @@ "options": [ "--run-in-parallel", "--parallelism", - "--skip-cleanup", "--python-versions", + "--skip-cleanup", + "--debug-resources", + "--include-success-outputs", ], }, ], diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands.py b/dev/breeze/src/airflow_breeze/commands/testing_commands.py index 2cd51b4dae9be..2f205e915bd61 100644 --- a/dev/breeze/src/airflow_breeze/commands/testing_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/testing_commands.py @@ -16,19 +16,18 @@ # under the License. from __future__ import annotations -import errno import os -import re -import shutil -import subprocess import sys -import tempfile -from threading import Event, Thread -from time import sleep +from datetime import datetime import click +from click import IntRange -from airflow_breeze.global_constants import ALLOWED_TEST_TYPE_CHOICES +from airflow_breeze.global_constants import ( + ALL_INTEGRATIONS, + ALLOWED_TEST_TYPE_CHOICES, + all_selective_test_types, +) from airflow_breeze.params.build_prod_params import BuildProdParams from airflow_breeze.params.shell_params import ShellParams from airflow_breeze.utils.ci_group import ci_group @@ -36,27 +35,41 @@ from airflow_breeze.utils.common_options import ( option_backend, option_db_reset, + option_debug_resources, option_dry_run, option_github_repository, option_image_name, option_image_tag_for_running, + option_include_success_outputs, option_integration, option_mount_sources, option_mssql_version, option_mysql_version, + option_parallelism, option_postgres_version, option_python, + option_run_in_parallel, + option_skip_cleanup, option_verbose, ) -from airflow_breeze.utils.console import get_console, message_type_from_return_code +from airflow_breeze.utils.console import Output, get_console from airflow_breeze.utils.custom_param_types import NotVerifiedBetterChoice from airflow_breeze.utils.docker_command_utils import ( DOCKER_COMPOSE_COMMAND, get_env_variables_for_docker_commands, perform_environment_checks, ) +from airflow_breeze.utils.parallel import ( + GenericRegexpProgressMatcher, + bytes2human, + check_async_run_results, + run_with_pool, +) +from airflow_breeze.utils.path_utils import FILES_DIR from airflow_breeze.utils.run_tests import run_docker_compose_tests -from airflow_breeze.utils.run_utils import RunCommandResult, run_command +from airflow_breeze.utils.run_utils import get_filesystem_type, run_command + +LOW_MEMORY_CONDITION = 8 * 1024 * 1024 * 1024 @click.group(cls=BreezeGroup, name='testing', help='Tools that developers can use to run tests') @@ -103,90 +116,213 @@ def docker_compose_tests( sys.exit(return_code) -class MonitoringThread(Thread): - """Thread class with a stop() method. The thread itself has to check - regularly for the stopped() condition.""" +TEST_PROGRESS_REGEXP = r'tests/.*|.*=====.*' +PERCENT_TEST_PROGRESS_REGEXP = r'^tests/.*\[[ \d%]*\].*' - def __init__(self, title: str, file_name: str): - super().__init__(target=self.peek_percent_at_last_lines_of_file, daemon=True) - self._stop_event = Event() - self.title = title - self.file_name = file_name - def peek_percent_at_last_lines_of_file(self) -> None: - max_line_length = 400 - matcher = re.compile(r"^.*\[([^\]]*)\]$") - while not self.stopped(): - if os.path.exists(self.file_name): - try: - with open(self.file_name, 'rb') as temp_f: - temp_f.seek(-(max_line_length * 2), os.SEEK_END) - tail = temp_f.read().decode() - try: - two_last_lines = tail.splitlines()[-2:] - previous_no_ansi_line = escape_ansi(two_last_lines[0]) - m = matcher.match(previous_no_ansi_line) - if m: - get_console().print(f"[info]{self.title}:[/] {m.group(1).strip()}") - print(f"\r{two_last_lines[0]}\r") - print(f"\r{two_last_lines[1]}\r") - except IndexError: - pass - except OSError as e: - if e.errno == errno.EINVAL: - pass - else: - raise - sleep(5) +def _run_test( + exec_shell_params: ShellParams, + extra_pytest_args: tuple, + db_reset: bool, + output: Output | None, + test_timeout: int, + dry_run: bool, + verbose: bool, +) -> tuple[int, str]: + env_variables = get_env_variables_for_docker_commands(exec_shell_params) + env_variables['RUN_TESTS'] = "true" + if test_timeout: + env_variables["TEST_TIMEOUT"] = str(test_timeout) + if db_reset: + env_variables["DB_RESET"] = "true" + perform_environment_checks(verbose=verbose) + env_variables["TEST_TYPE"] = exec_shell_params.test_type + if "[" in exec_shell_params.test_type and not exec_shell_params.test_type.startswith("Providers"): + get_console(output=output).print( + "[error]Only 'Providers' test type can specify actual tests with \\[\\][/]" + ) + sys.exit(1) + if exec_shell_params.integration: + integration = exec_shell_params.integration + if "trino" in integration and "kerberos" not in integration: + int_list = list(integration) + int_list.append("kerberos") + integration = tuple(int_list) + env_variables["LIST_OF_INTEGRATION_TESTS_TO_RUN"] = ' '.join(list(integration)) + project_name = _file_name_from_test_type(exec_shell_params.test_type) + down_cmd = [ + *DOCKER_COMPOSE_COMMAND, + "--project-name", + f'airflow-test-{project_name}', + 'down', + '--remove-orphans', + ] + run_command(down_cmd, verbose=verbose, dry_run=dry_run, env=env_variables, output=output, check=False) + run_cmd = [ + *DOCKER_COMPOSE_COMMAND, + "--project-name", + f'airflow-test-{project_name}', + 'run', + '-T', + '--service-ports', + '--rm', + 'airflow', + ] + run_cmd.extend(list(extra_pytest_args)) + try: + result = run_command( + run_cmd, verbose=verbose, dry_run=dry_run, env=env_variables, output=output, check=False + ) + if os.environ.get('CI') == "true" and result.returncode != 0: + ps_result = run_command( + ['docker', 'ps', '--all', '--format', '{{.Names}}'], + check=True, + capture_output=True, + text=True, + ) + container_ids = ps_result.stdout.splitlines() + get_console(output=output).print( + f"[info]Error {ps_result.returncode}. Dumping containers: {container_ids}." + ) + date_str = datetime.now().strftime("%Y_%d_%m_%H_%M_%S") + for container_id in container_ids: + dump_path = FILES_DIR / f"container_logs_{container_id}_{date_str}.log" + get_console(output=output).print(f"[info]Dumping container {container_id} to {dump_path}") + with open(dump_path, "wt") as outfile: + run_command(["docker", "logs", container_id], check=False, stdout=outfile) + finally: + run_command( + [ + *DOCKER_COMPOSE_COMMAND, + "--project-name", + f'airflow-test-{project_name}', + 'rm', + '--stop', + '--force', + '-v', + ], + verbose=False, + dry_run=dry_run, + env=env_variables, + output=output, + check=False, + ) + return result.returncode, f"Test: {exec_shell_params.test_type}" - def stop(self): - self._stop_event.set() - def stopped(self): - return self._stop_event.is_set() +def _file_name_from_test_type(test_type): + return test_type.lower().replace("[", "_").replace("]", "").replace(",", "_")[:30] -def escape_ansi(line): - ansi_escape = re.compile(r'(?:\x1B[@-_]|[\x80-\x9F])[0-?]*[ -/]*[@-~]') - return ansi_escape.sub('', line) +def _run_tests_in_pool( + tests_to_run: list[str], + parallelism: int, + exec_shell_params: ShellParams, + extra_pytest_args: tuple, + test_timeout: int, + db_reset: bool, + include_success_outputs: bool, + debug_resources: bool, + skip_cleanup: bool, + dry_run: bool, + verbose: bool, +): + with ci_group(f"Testing {' '.join(tests_to_run)}"): + all_params = [f"Test {test_type}" for test_type in tests_to_run] + with run_with_pool( + parallelism=parallelism, + all_params=all_params, + debug_resources=debug_resources, + progress_matcher=GenericRegexpProgressMatcher( + regexp=TEST_PROGRESS_REGEXP, + regexp_for_joined_line=PERCENT_TEST_PROGRESS_REGEXP, + lines_to_search=40, + ), + ) as (pool, outputs): + results = [ + pool.apply_async( + _run_test, + kwds={ + "exec_shell_params": exec_shell_params.clone_with_test( + test_type=test_type, + integration=ALL_INTEGRATIONS if test_type == "Integration" else (), + ), + "extra_pytest_args": extra_pytest_args, + "db_reset": db_reset, + "dry_run": dry_run, + "verbose": verbose, + "output": outputs[index], + "test_timeout": test_timeout, + }, + ) + for index, test_type in enumerate(tests_to_run) + ] + check_async_run_results( + results=results, + success=f"Tests {' '.join(tests_to_run)} completed successfully", + outputs=outputs, + include_success_outputs=include_success_outputs, + skip_cleanup=skip_cleanup, + ) -def run_with_progress( - cmd: list[str], - env_variables: dict[str, str], - test_type: str, - python: str, - backend: str, - verbose: bool, +def run_tests_in_parallel( + exec_shell_params: ShellParams, + test_types_list: list[str], + extra_pytest_args: tuple, + db_reset: bool, + test_timeout: int, + include_success_outputs: bool, + debug_resources: bool, + parallelism: int, + skip_cleanup: bool, dry_run: bool, - version: str | None = None, -) -> RunCommandResult: - backend_version = backend + (":" + version) if version else backend - title = f"Running tests: {test_type}, Python: {python}, Backend: {backend_version}" - try: - with tempfile.NamedTemporaryFile(mode='w+t', delete=False) as tf: - get_console().print(f"[info]Starting test = {title}[/]") - thread = MonitoringThread(title=title, file_name=tf.name) - thread.start() - try: - result = run_command( - cmd, - verbose=verbose, - dry_run=dry_run, - env=env_variables, - check=False, - stdout=tf, - stderr=subprocess.STDOUT, - ) - finally: - thread.stop() - thread.join() - with ci_group(f"Result of {title}", message_type=message_type_from_return_code(result.returncode)): - with open(tf.name) as f: - shutil.copyfileobj(f, sys.stdout) - finally: - os.unlink(f.name) - return result + verbose: bool, +) -> None: + import psutil + + memory_available = psutil.virtual_memory() + if memory_available.available < LOW_MEMORY_CONDITION and exec_shell_params.backend in ['mssql', 'mysql']: + # Run heavy tests sequentially + heavy_test_types = ["Core", "Integration", "Providers"] + if bool(set(heavy_test_types) & set(test_types_list)): + # some of those are requested + get_console().print( + f"[warning]Running {heavy_test_types} tests sequentially for {exec_shell_params.backend}" + f" backend due to low memory available: {bytes2human(memory_available.available)}" + ) + tests_to_run_sequentially = [] + for heavy_test_type in heavy_test_types: + for test_type in test_types_list: + if test_type.startswith(heavy_test_type): + test_types_list.remove(test_type) + tests_to_run_sequentially.append(test_type) + _run_tests_in_pool( + tests_to_run=tests_to_run_sequentially, + parallelism=1, + exec_shell_params=exec_shell_params, + extra_pytest_args=extra_pytest_args, + test_timeout=test_timeout, + db_reset=db_reset, + include_success_outputs=include_success_outputs, + debug_resources=debug_resources, + skip_cleanup=skip_cleanup, + dry_run=dry_run, + verbose=verbose, + ) + _run_tests_in_pool( + tests_to_run=test_types_list, + parallelism=parallelism, + exec_shell_params=exec_shell_params, + extra_pytest_args=extra_pytest_args, + test_timeout=test_timeout, + db_reset=db_reset, + include_success_outputs=include_success_outputs, + debug_resources=debug_resources, + skip_cleanup=skip_cleanup, + dry_run=dry_run, + verbose=verbose, + ) @testing.command( @@ -205,11 +341,6 @@ def run_with_progress( @option_mysql_version @option_mssql_version @option_integration -@click.option( - '--limit-progress-output', - help="Limit progress to percentage only and just show the summary when tests complete.", - is_flag=True, -) @option_image_tag_for_running @option_mount_sources @click.option( @@ -222,10 +353,23 @@ def run_with_progress( @click.option( "--test-timeout", help="Test timeout. Set the pytest setup, execution and teardown timeouts to this value", - default="60", + default=60, + type=IntRange(min=0), show_default=True, ) @option_db_reset +@option_run_in_parallel +@option_parallelism +@option_skip_cleanup +@option_debug_resources +@option_include_success_outputs +@click.option( + "--test-types", + help="Space separated list of test types used for testing in parallel.", + default=" ".join(all_selective_test_types()), + show_default=True, + envvar="TEST_TYPES", +) @click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED) def tests( dry_run: bool, @@ -235,67 +379,61 @@ def tests( postgres_version: str, mysql_version: str, mssql_version: str, - limit_progress_output: bool, integration: tuple, extra_pytest_args: tuple, test_type: str, - test_timeout: str, + test_timeout: int, db_reset: bool, image_tag: str | None, + run_in_parallel: bool, + parallelism: int, + skip_cleanup: bool, + debug_resources: bool, + include_success_outputs: bool, + test_types: str, mount_sources: str, ): + docker_filesystem = get_filesystem_type('/var/lib/docker') + get_console().print(f"Docker filesystem: {docker_filesystem}") exec_shell_params = ShellParams( verbose=verbose, dry_run=dry_run, python=python, backend=backend, + integration=integration, postgres_version=postgres_version, mysql_version=mysql_version, mssql_version=mssql_version, image_tag=image_tag, mount_sources=mount_sources, + forward_ports=False, + test_type=test_type, ) - env_variables = get_env_variables_for_docker_commands(exec_shell_params) - env_variables['RUN_TESTS'] = "true" - if test_type: - env_variables["TEST_TYPE"] = test_type - if "[" in test_type and not test_type.startswith("Providers"): - get_console().print("[error]Only 'Providers' test type can specify actual tests with \\[\\][/]") - sys.exit(1) - if test_timeout: - env_variables["TEST_TIMEOUT"] = test_timeout - if integration: - if "trino" in integration: - integration = integration + ("kerberos",) - env_variables["LIST_OF_INTEGRATION_TESTS_TO_RUN"] = ' '.join(list(integration)) - if db_reset: - env_variables["DB_RESET"] = "true" - perform_environment_checks(verbose=verbose) - cmd = [*DOCKER_COMPOSE_COMMAND, 'run', '--service-ports', '--rm', 'airflow'] - cmd.extend(list(extra_pytest_args)) - version = ( - mssql_version - if backend == "mssql" - else mysql_version - if backend == "mysql" - else postgres_version - if backend == "postgres" - else "none" - ) - if limit_progress_output: - result = run_with_progress( - cmd=cmd, - env_variables=env_variables, - test_type=test_type, - python=python, - backend=backend, - version=version, - verbose=verbose, + if run_in_parallel: + run_tests_in_parallel( + exec_shell_params=exec_shell_params, + test_types_list=test_types.split(" "), + extra_pytest_args=extra_pytest_args, + db_reset=db_reset, + test_timeout=test_timeout, + include_success_outputs=include_success_outputs, + parallelism=parallelism, + skip_cleanup=skip_cleanup, + debug_resources=debug_resources, dry_run=dry_run, + verbose=verbose, ) else: - result = run_command(cmd, verbose=verbose, dry_run=dry_run, env=env_variables, check=False) - sys.exit(result.returncode) + returncode, _ = _run_test( + exec_shell_params=exec_shell_params, + extra_pytest_args=extra_pytest_args, + db_reset=db_reset, + output=None, + test_timeout=test_timeout, + dry_run=dry_run, + verbose=verbose, + ) + sys.exit(returncode) @testing.command( diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands_config.py b/dev/breeze/src/airflow_breeze/commands/testing_commands_config.py index 18bef80dcf6ca..6ff622cc7491f 100644 --- a/dev/breeze/src/airflow_breeze/commands/testing_commands_config.py +++ b/dev/breeze/src/airflow_breeze/commands/testing_commands_config.py @@ -36,10 +36,20 @@ "--mssql-version", ], }, + { + "name": "Options for parallel test commands", + "options": [ + "--run-in-parallel", + "--parallelism", + "--test-types", + "--skip-cleanup", + "--debug-resources", + "--include-success-outputs", + ], + }, { "name": "Advanced flag for tests command", "options": [ - "--limit-progress-output", "--image-tag", "--mount-sources", ], diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index e67ee83c4bf4e..077cb604247b4 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -42,7 +42,7 @@ ALLOWED_BACKENDS = ['sqlite', 'mysql', 'postgres', 'mssql'] ALLOWED_PROD_BACKENDS = ['mysql', 'postgres', 'mssql'] DEFAULT_BACKEND = ALLOWED_BACKENDS[0] -ALLOWED_INTEGRATIONS = [ +ALL_INTEGRATIONS = [ 'cassandra', 'kerberos', 'mongo', @@ -52,6 +52,9 @@ 'redis', 'statsd', 'trino', +] +ALLOWED_INTEGRATIONS = [ + *ALL_INTEGRATIONS, 'all', ] ALLOWED_KUBERNETES_VERSIONS = ['v1.25.2', 'v1.24.6', 'v1.23.12', 'v1.22.15', 'v1.21.14'] @@ -91,13 +94,10 @@ class SelectiveUnitTestTypes(Enum): ALLOWED_TEST_TYPE_CHOICES = [ "All", - "Always", *all_selective_test_types(), "Helm", "Postgres", "MySQL", - "Integration", - "Other", "Quarantine", ] diff --git a/dev/breeze/src/airflow_breeze/params/shell_params.py b/dev/breeze/src/airflow_breeze/params/shell_params.py index fee24aebdf6bc..34afb4773f09c 100644 --- a/dev/breeze/src/airflow_breeze/params/shell_params.py +++ b/dev/breeze/src/airflow_breeze/params/shell_params.py @@ -17,6 +17,7 @@ from __future__ import annotations import os +from copy import deepcopy from dataclasses import dataclass from pathlib import Path @@ -39,9 +40,24 @@ get_airflow_version, ) from airflow_breeze.utils.console import get_console -from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, BUILD_CACHE_DIR, SCRIPTS_CI_DIR +from airflow_breeze.utils.path_utils import ( + AIRFLOW_SOURCES_ROOT, + BUILD_CACHE_DIR, + MSSQL_TMP_DIR_NAME, + SCRIPTS_CI_DIR, +) from airflow_breeze.utils.run_utils import get_filesystem_type, run_command +DOCKER_COMPOSE_DIR = SCRIPTS_CI_DIR / "docker-compose" + + +def add_mssql_compose_file(compose_file_list: list[Path]): + docker_filesystem = get_filesystem_type('/var/lib/docker') + if docker_filesystem == 'tmpfs': + compose_file_list.append(DOCKER_COMPOSE_DIR / "backend-mssql-tmpfs-volume.yml") + else: + compose_file_list.append(DOCKER_COMPOSE_DIR / "backend-mssql-docker-volume.yml") + @dataclass class ShellParams: @@ -63,6 +79,7 @@ class ShellParams: dry_run: bool = False extra_args: tuple = () force_build: bool = False + forward_ports: bool = True forward_credentials: str = "false" airflow_constraints_mode: str = ALLOWED_CONSTRAINTS_MODES_CI[0] github_actions: str = os.environ.get('GITHUB_ACTIONS', "false") @@ -87,11 +104,18 @@ class ShellParams: skip_environment_initialization: bool = False skip_constraints: bool = False start_airflow: str = "false" + test_type: str | None = None use_airflow_version: str | None = None use_packages_from_dist: bool = False verbose: bool = False version_suffix_for_pypi: str = "" + def clone_with_test(self, test_type: str, integration: tuple[str, ...]) -> ShellParams: + new_params = deepcopy(self) + new_params.test_type = test_type + new_params.integration = integration if test_type == "Integration" else () + return new_params + @property def airflow_version(self): return get_airflow_version() @@ -173,41 +197,28 @@ def print_badge_info(self): get_console().print(f'[info]Backend: {self.backend} {self.backend_version}[/]') get_console().print(f'[info]Airflow used at runtime: {self.use_airflow_version}[/]') - def get_backend_compose_files(self, backend: str): - backend_docker_compose_file = f"{str(SCRIPTS_CI_DIR)}/docker-compose/backend-{backend}.yml" - backend_port_docker_compose_file = f"{str(SCRIPTS_CI_DIR)}/docker-compose/backend-{backend}-port.yml" - return backend_docker_compose_file, backend_port_docker_compose_file + def get_backend_compose_files(self, backend: str) -> list[Path]: + backend_docker_compose_file = DOCKER_COMPOSE_DIR / f"backend-{backend}.yml" + if backend == 'sqlite' or not self.forward_ports: + return [backend_docker_compose_file] + return [backend_docker_compose_file, DOCKER_COMPOSE_DIR / f"backend-{backend}-port.yml"] @property - def compose_files(self): - compose_ci_file = [] - main_ci_docker_compose_file = f"{str(SCRIPTS_CI_DIR)}/docker-compose/base.yml" + def compose_file(self) -> str: + compose_file_list: list[Path] = [] + backend_files: list[Path] = [] if self.backend != "all": backend_files = self.get_backend_compose_files(self.backend) + if self.backend == 'mssql': + add_mssql_compose_file(compose_file_list) else: - backend_files = [] for backend in ALLOWED_BACKENDS: backend_files.extend(self.get_backend_compose_files(backend)) - compose_ci_file.append(f"{str(SCRIPTS_CI_DIR)}/docker-compose/backend-mssql-bind-volume.yml") - compose_ci_file.append(f"{str(SCRIPTS_CI_DIR)}/docker-compose/backend-mssql-docker-volume.yml") - local_docker_compose_file = f"{str(SCRIPTS_CI_DIR)}/docker-compose/local.yml" - local_all_sources_docker_compose_file = f"{str(SCRIPTS_CI_DIR)}/docker-compose/local-all-sources.yml" - files_docker_compose_file = f"{str(SCRIPTS_CI_DIR)}/docker-compose/files.yml" - remove_sources_docker_compose_file = f"{str(SCRIPTS_CI_DIR)}/docker-compose/remove-sources.yml" - mypy_docker_compose_file = f"{str(SCRIPTS_CI_DIR)}/docker-compose/mypy.yml" - forward_credentials_docker_compose_file = ( - f"{str(SCRIPTS_CI_DIR)}/docker-compose/forward-credentials.yml" - ) - # mssql based check have to be added - if self.backend == 'mssql': - docker_filesystem = get_filesystem_type('.') - if docker_filesystem == 'tmpfs': - compose_ci_file.append(f"{str(SCRIPTS_CI_DIR)}/docker-compose/backend-mssql-bind-volume.yml") - else: - compose_ci_file.append( - f"{str(SCRIPTS_CI_DIR)}/docker-compose/backend-mssql-docker-volume.yml" - ) - compose_ci_file.extend([main_ci_docker_compose_file, *backend_files, files_docker_compose_file]) + add_mssql_compose_file(compose_file_list) + + compose_file_list.append(DOCKER_COMPOSE_DIR / "base.yml") + compose_file_list.extend(backend_files) + compose_file_list.append(DOCKER_COMPOSE_DIR / "files.yml") if self.image_tag is not None and self.image_tag != "latest": get_console().print( @@ -221,26 +232,28 @@ def compose_files(self): f"from sources but from {self.use_airflow_version}[/]" ) self.mount_sources = MOUNT_REMOVE + if self.forward_ports: + compose_file_list.append(DOCKER_COMPOSE_DIR / "base-ports.yml") if self.mount_sources == MOUNT_SELECTED: - compose_ci_file.extend([local_docker_compose_file]) + compose_file_list.append(DOCKER_COMPOSE_DIR / "local.yml") elif self.mount_sources == MOUNT_ALL: - compose_ci_file.extend([local_all_sources_docker_compose_file]) + compose_file_list.append(DOCKER_COMPOSE_DIR / "local-all-sources.yml") elif self.mount_sources == MOUNT_REMOVE: - compose_ci_file.extend([remove_sources_docker_compose_file]) + compose_file_list.append(DOCKER_COMPOSE_DIR / "remove-sources.yml") if self.forward_credentials: - compose_ci_file.append(forward_credentials_docker_compose_file) + compose_file_list.append(DOCKER_COMPOSE_DIR / "forward-credentials.yml") if self.use_airflow_version is not None: - compose_ci_file.append(remove_sources_docker_compose_file) + compose_file_list.append(DOCKER_COMPOSE_DIR / "remove-sources.yml") if self.include_mypy_volume: - compose_ci_file.append(mypy_docker_compose_file) + compose_file_list.append(DOCKER_COMPOSE_DIR / "mypy.yml") if "all" in self.integration: integrations = AVAILABLE_INTEGRATIONS else: integrations = self.integration if len(integrations) > 0: for integration in integrations: - compose_ci_file.append(f"{str(SCRIPTS_CI_DIR)}/docker-compose/integration-{integration}.yml") - return os.pathsep.join(compose_ci_file) + compose_file_list.append(DOCKER_COMPOSE_DIR / f"integration-{integration}.yml") + return os.pathsep.join([os.fspath(f) for f in compose_file_list]) @property def command_passed(self): @@ -248,3 +261,13 @@ def command_passed(self): if len(self.extra_args) > 0: cmd = str(self.extra_args[0]) return cmd + + @property + def mssql_data_volume(self) -> str: + docker_filesystem = get_filesystem_type("/var/lib/docker") + volume_name = f"tmp-mssql-volume-{self.test_type}" if self.test_type else "tmp-mssql-volume" + if docker_filesystem == "tmpfs": + return os.fspath(Path.home() / MSSQL_TMP_DIR_NAME / f"{volume_name}-{self.mssql_version}") + else: + # mssql_data_volume variable is only used in case of tmpfs + return "" diff --git a/dev/breeze/src/airflow_breeze/utils/common_options.py b/dev/breeze/src/airflow_breeze/utils/common_options.py index faec848dacd18..a42ac68ef2839 100644 --- a/dev/breeze/src/airflow_breeze/utils/common_options.py +++ b/dev/breeze/src/airflow_breeze/utils/common_options.py @@ -212,7 +212,7 @@ def _set_default_from_parent(ctx: click.core.Context, option: click.core.Option, option_image_tag_for_pulling = click.option( '-t', '--image-tag', - help='Tag of the image which is used to pull the image', + help='Tag of the image which is used to pull the image.', show_default=True, default="latest", envvar='IMAGE_TAG', @@ -220,7 +220,7 @@ def _set_default_from_parent(ctx: click.core.Context, option: click.core.Option, option_image_tag_for_building = click.option( '-t', '--image-tag', - help='Tag the image after building it', + help='Tag the image after building it.', show_default=True, default="latest", envvar='IMAGE_TAG', @@ -228,7 +228,7 @@ def _set_default_from_parent(ctx: click.core.Context, option: click.core.Option, option_image_tag_for_running = click.option( '-t', '--image-tag', - help='Tag of the image which is used to run the image (implies --mount-sources=skip)', + help='Tag of the image which is used to run the image (implies --mount-sources=skip).', show_default=True, default="latest", envvar='IMAGE_TAG', @@ -236,7 +236,7 @@ def _set_default_from_parent(ctx: click.core.Context, option: click.core.Option, option_image_tag_for_verifying = click.option( '-t', '--image-tag', - help='Tag of the image when verifying it', + help='Tag of the image when verifying it.', show_default=True, default="latest", envvar='IMAGE_TAG', @@ -446,18 +446,18 @@ def _set_default_from_parent(ctx: click.core.Context, option: click.core.Option, "--timezone", default="UTC", type=str, - help="Timezone to use during the check", + help="Timezone to use during the check.", ) option_updated_on_or_after = click.option( "--updated-on-or-after", type=str, - help="Date when the release was updated after", + help="Date when the release was updated after.", ) option_max_age = click.option( "--max-age", type=int, default=3, - help="Max age of the last release (used if no updated-on-or-after if specified)", + help="Max age of the last release (used if no updated-on-or-after if specified).", ) option_airflow_constraints_reference = click.option( "--airflow-constraints-reference", @@ -478,14 +478,14 @@ def _set_default_from_parent(ctx: click.core.Context, option: click.core.Option, type=BetterChoice(ALLOWED_CONSTRAINTS_MODES_CI), default=ALLOWED_CONSTRAINTS_MODES_CI[0], show_default=True, - help='Mode of constraints for CI image building', + help='Mode of constraints for CI image building.', ) option_airflow_constraints_mode_prod = click.option( '--airflow-constraints-mode', type=BetterChoice(ALLOWED_CONSTRAINTS_MODES_PROD), default=ALLOWED_CONSTRAINTS_MODES_PROD[0], show_default=True, - help='Mode of constraints for PROD image building', + help='Mode of constraints for PROD image building.', ) option_pull = click.option( '--pull', @@ -496,24 +496,24 @@ def _set_default_from_parent(ctx: click.core.Context, option: click.core.Option, option_python_image = click.option( '--python-image', help="If specified this is the base python image used to build the image. " - "Should be something like: python:VERSION-slim-bullseye", + "Should be something like: python:VERSION-slim-bullseye.", envvar='PYTHON_IMAGE', ) option_builder = click.option( '--builder', - help="Buildx builder used to perform `docker buildx build` commands", + help="Buildx builder used to perform `docker buildx build` commands.", envvar='BUILDER', default='default', ) option_include_success_outputs = click.option( '--include-success-outputs', - help="Whether to include outputs of successful parallel runs (by default they are not printed).", + help="Whether to include outputs of successful parallel runs (skipped by default).", is_flag=True, envvar='INCLUDE_SUCCESS_OUTPUTS', ) option_skip_cleanup = click.option( '--skip-cleanup', - help="Skip cleanup of temporary files created during parallel run", + help="Skip cleanup of temporary files created during parallel run.", is_flag=True, envvar='SKIP_CLEANUP', ) @@ -530,3 +530,9 @@ def _set_default_from_parent(ctx: click.core.Context, option: click.core.Option, envvar='MAX_TIME', callback=_set_default_from_parent, ) +option_debug_resources = click.option( + '--debug-resources', + is_flag=True, + help="Whether to show resource information while running in parallel.", + envvar='DEBUG_RESOURCES', +) diff --git a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py index 92e916ca415a5..53f3a3cb1e299 100644 --- a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py @@ -30,7 +30,7 @@ from airflow_breeze.params.shell_params import ShellParams from airflow_breeze.utils.host_info_utils import get_host_group_id, get_host_os, get_host_user_id from airflow_breeze.utils.image import find_available_ci_image -from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, MSSQL_DATA_VOLUME +from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT try: from packaging import version @@ -588,7 +588,6 @@ def update_expected_environment_variables(env: dict[str, str]) -> None: set_value_to_default_if_not_set(env, 'LIST_OF_INTEGRATION_TESTS_TO_RUN', "") set_value_to_default_if_not_set(env, 'LOAD_DEFAULT_CONNECTIONS', "false") set_value_to_default_if_not_set(env, 'LOAD_EXAMPLES', "false") - set_value_to_default_if_not_set(env, 'MSSQL_DATA_VOLUME', str(MSSQL_DATA_VOLUME)) set_value_to_default_if_not_set(env, 'PACKAGE_FORMAT', ALLOWED_PACKAGE_FORMATS[0]) set_value_to_default_if_not_set(env, 'PRINT_INFO_FROM_SCRIPTS', "true") set_value_to_default_if_not_set(env, 'PYTHONDONTWRITEBYTECODE', "true") @@ -618,7 +617,7 @@ def update_expected_environment_variables(env: dict[str, str]) -> None: "AIRFLOW_VERSION": "airflow_version", "ANSWER": "answer", "BACKEND": "backend", - "COMPOSE_FILE": "compose_files", + "COMPOSE_FILE": "compose_file", "DB_RESET": 'db_reset', "DEV_MODE": 'dev_mode', "DEFAULT_CONSTRAINTS_BRANCH": "default_constraints_branch", @@ -629,6 +628,7 @@ def update_expected_environment_variables(env: dict[str, str]) -> None: "ISSUE_ID": "issue_id", "LOAD_DEFAULT_CONNECTIONS": "load_default_connections", "LOAD_EXAMPLES": "load_example_dags", + "MSSQL_DATA_VOLUME": "mssql_data_volume", "MSSQL_VERSION": "mssql_version", "MYSQL_VERSION": "mysql_version", "NUM_RUNS": "num_runs", diff --git a/dev/breeze/src/airflow_breeze/utils/image.py b/dev/breeze/src/airflow_breeze/utils/image.py index 4e00293a326af..15ed2b9c0860d 100644 --- a/dev/breeze/src/airflow_breeze/utils/image.py +++ b/dev/breeze/src/airflow_breeze/utils/image.py @@ -47,6 +47,7 @@ def run_pull_in_parallel( dry_run: bool, parallelism: int, skip_cleanup: bool, + debug_resources: bool, image_params_list: list[BuildCiParams] | list[BuildProdParams], python_version_list: list[str], verbose: bool, @@ -62,6 +63,7 @@ def run_pull_in_parallel( with run_with_pool( parallelism=parallelism, all_params=all_params, + debug_resources=debug_resources, progress_matcher=GenericRegexpProgressMatcher(DOCKER_PULL_PROGRESS_REGEXP, lines_to_search=15), ) as (pool, outputs): diff --git a/dev/breeze/src/airflow_breeze/utils/parallel.py b/dev/breeze/src/airflow_breeze/utils/parallel.py index 7e0cfee37286c..9383be51d7894 100644 --- a/dev/breeze/src/airflow_breeze/utils/parallel.py +++ b/dev/breeze/src/airflow_breeze/utils/parallel.py @@ -27,11 +27,15 @@ from multiprocessing.pool import ApplyResult, Pool from pathlib import Path from tempfile import NamedTemporaryFile -from threading import Event, Thread -from typing import Generator, NamedTuple +from threading import Thread +from typing import Any, Generator, NamedTuple + +from rich.table import Table from airflow_breeze.utils.console import MessageType, Output, get_console +MAX_LINE_LENGTH = 155 + def create_pool(parallelism: int) -> Pool: return Pool(parallelism) @@ -85,18 +89,12 @@ def get_last_lines_of_file(file_name: str, num_lines: int = 2) -> tuple[list[str return last_lines, last_lines_no_colors -class ProgressLines(NamedTuple): - lines: list[str] - skip_truncation: list[bool] - - class AbstractProgressInfoMatcher(metaclass=ABCMeta): @abstractmethod - def get_best_matching_lines(self, output: Output) -> ProgressLines | None: + def get_best_matching_lines(self, output: Output) -> list[str] | None: """ - Return best matching lines of the output. It also indicates if the lines potentially need truncation - :param output: file that should be analysed for the output - :return: tuple of array of lines to print and boolean indications whether the lines need truncation + Return best matching lines of the output. + :return: array of lines to print """ @@ -104,9 +102,9 @@ class DockerBuildxProgressMatcher(AbstractProgressInfoMatcher): DOCKER_BUILDX_PROGRESS_MATCHER = re.compile(r'\s*#(\d*) ') def __init__(self): - self.last_docker_build_line: str | None = None + self.last_docker_build_lines: dict[str, str] = {} - def get_best_matching_lines(self, output) -> ProgressLines | None: + def get_best_matching_lines(self, output: Output) -> list[str] | None: last_lines, last_lines_no_colors = get_last_lines_of_file(output.file_name, num_lines=5) best_progress: int = 0 best_line: str | None = None @@ -118,31 +116,39 @@ def get_best_matching_lines(self, output) -> ProgressLines | None: best_progress = docker_progress best_line = last_lines[index] if best_line is None: - best_line = self.last_docker_build_line + best_line = self.last_docker_build_lines.get(output.file_name) else: - self.last_docker_build_line = best_line + self.last_docker_build_lines[output.file_name] = best_line if best_line is None: return None - return ProgressLines(lines=[best_line], skip_truncation=[False]) + return [best_line] class GenericRegexpProgressMatcher(AbstractProgressInfoMatcher): + """ + Matches lines from the output based on regular expressions: + + :param regexp: regular expression matching lines that should be displayed + :param regexp_for_joined_line: optional regular expression for lines that might be shown together with the + following matching lines. Useful, when progress status is only visible in previous line, for + example when you have test output like that, you want to show both lines: + + test1 ....... [ 50%] + test2 ... + """ + def __init__( self, regexp: str, lines_to_search: int, regexp_for_joined_line: str | None = None, - regexp_to_skip_truncation: str | None = None, ): - self.last_good_match: str | None = None + self.last_good_match: dict[str, str] = {} self.matcher = re.compile(regexp) self.lines_to_search = lines_to_search self.matcher_for_joined_line = re.compile(regexp_for_joined_line) if regexp_for_joined_line else None - self.matcher_to_skip_truncation = ( - re.compile(regexp_to_skip_truncation) if regexp_to_skip_truncation else None - ) - def get_best_matching_lines(self, output: Output) -> ProgressLines | None: + def get_best_matching_lines(self, output: Output) -> list[str] | None: last_lines, last_lines_no_colors = get_last_lines_of_file( output.file_name, num_lines=self.lines_to_search ) @@ -158,28 +164,82 @@ def get_best_matching_lines(self, output: Output) -> ProgressLines | None: if best_line is not None: if self.matcher_for_joined_line is not None and previous_line is not None: list_to_return: list[str] = [previous_line, best_line] - skip_truncation: list[bool] = [ - bool(self.matcher_to_skip_truncation.match(line)) - if self.matcher_to_skip_truncation - else False - for line in list_to_return - ] - return ProgressLines(lines=list_to_return, skip_truncation=skip_truncation) + return list_to_return else: - self.last_good_match = best_line - if self.last_good_match is None: + self.last_good_match[output.file_name] = best_line + last_match = self.last_good_match.get(output.file_name) + if last_match is None: return None - return ProgressLines( - lines=[self.last_good_match], - skip_truncation=[ - bool(self.matcher_to_skip_truncation.match(self.last_good_match)) - if self.matcher_to_skip_truncation - else False - ], - ) - - -DOCKER_PULL_PROGRESS_REGEXP = r'^[0-9a-f]+: .*|.*\[[ 0-9]+%].*|^Waiting' + return [last_match] + + +DOCKER_PULL_PROGRESS_REGEXP = r'^[0-9a-f]+: .*|.*\[[ \d%]*\].*|^Waiting' + + +def bytes2human(n): + symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') + prefix = {} + for i, s in enumerate(symbols): + prefix[s] = 1 << (i + 1) * 10 + for s in reversed(symbols): + if n >= prefix[s]: + value = float(n) / prefix[s] + return f'{value:.1f}{s}' + return f"{n}B" + + +def get_printable_value(key: str, value: Any) -> str: + if key == 'percent': + return f"{value} %" + if isinstance(value, (int, float)): + return bytes2human(value) + return str(value) + + +def get_single_tuple_array(title: str, t: NamedTuple) -> Table: + table = Table(title=title) + row = [] + for key, value in t._asdict().items(): + table.add_column(header=key, header_style="info") + row.append(get_printable_value(key, value)) + table.add_row(*row, style="magenta") + return table + + +def get_multi_tuple_array(title: str, tuples: list[tuple[NamedTuple, ...]]) -> Table: + table = Table(title=title) + first_tuple = tuples[0] + keys: list[str] = [] + for named_tuple in first_tuple: + keys.extend(named_tuple._asdict().keys()) + for key in keys: + table.add_column(header=key, header_style="info") + for t in tuples: + row = [] + for named_tuple in t: + for key, value in named_tuple._asdict().items(): + row.append(get_printable_value(key, value)) + table.add_row(*row, style="magenta") + return table + + +IGNORED_FSTYPES = [ + 'autofs', + 'bps', + 'cgroup', + 'cgroup2', + 'configfs', + 'debugfs', + 'devpts', + 'fusectl', + 'mqueue', + 'nsfs', + 'overlay', + 'proc', + 'pstore', + 'squashfs', + 'tracefs', +] class ParallelMonitor(Thread): @@ -188,29 +248,31 @@ def __init__( outputs: list[Output], initial_time_in_seconds: int = 2, time_in_seconds: int = 10, + debug_resources: bool = False, progress_matcher: AbstractProgressInfoMatcher | None = None, ): - super().__init__() + super().__init__(daemon=True) self.outputs = outputs self.initial_time_in_seconds = initial_time_in_seconds self.time_in_seconds = time_in_seconds - self.exit_event = Event() + self.debug_resources = debug_resources self.progress_matcher = progress_matcher self.start_time = datetime.datetime.utcnow() - self.last_custom_progress: ProgressLines | None = None + self.last_custom_progress: list[str] | None = None def print_single_progress(self, output: Output): if self.progress_matcher: - custom_progress: ProgressLines | None = self.progress_matcher.get_best_matching_lines(output) - custom_progress = self.last_custom_progress if custom_progress is None else custom_progress - if custom_progress is not None: + progress_lines: list[str] | None = self.progress_matcher.get_best_matching_lines(output) + progress_lines = self.last_custom_progress if progress_lines is None else progress_lines + if progress_lines is not None: first_line = True - for index, line in enumerate(custom_progress.lines): - if not custom_progress.skip_truncation[index]: - # Clear color just in case color reset is removed by textwrap.shorten - current_line = textwrap.shorten(custom_progress.lines[index], 155) + "\033[0;0m" + for index, line in enumerate(progress_lines): + if len(remove_ansi_colours(line)) > MAX_LINE_LENGTH: + # This is a bit cheating - the line will be much shorter in case it contains colors + # Also we need to clear color just in case color reset is removed by textwrap.shorten + current_line = textwrap.shorten(progress_lines[index], MAX_LINE_LENGTH) + "\033[0;0m" else: - current_line = custom_progress.lines[index] + current_line = progress_lines[index] if current_line: prefix = f"Progress: {output.title:<30}" if not first_line: @@ -220,25 +282,34 @@ def print_single_progress(self, output: Output): first_line = False else: size = os.path.getsize(output.file_name) if Path(output.file_name).exists() else 0 - print(f"Progress: {output.title:<30} {size:>153} bytes") + default_output = f"File: {output.file_name} Size: {size:>10} bytes" + get_console().print(f"Progress: {output.title[:30]:<30} {default_output:>161}") def print_summary(self): + import psutil + time_passed = datetime.datetime.utcnow() - self.start_time get_console().rule() for output in self.outputs: self.print_single_progress(output) get_console().rule(title=f"Time passed: {nice_timedelta(time_passed)}") - - def cancel(self): - get_console().print("[info]Finishing progress monitoring.") - self.exit_event.set() + if self.debug_resources: + get_console().print(get_single_tuple_array("Virtual memory", psutil.virtual_memory())) + disk_stats = [] + for partition in psutil.disk_partitions(all=True): + if partition.fstype not in IGNORED_FSTYPES: + try: + disk_stats.append((partition, psutil.disk_usage(partition.mountpoint))) + except Exception: + get_console().print(f"No disk usage info for {partition.mountpoint}") + get_console().print(get_multi_tuple_array("Disk usage", disk_stats)) def run(self): try: - self.exit_event.wait(self.initial_time_in_seconds) - while not self.exit_event.is_set(): + time.sleep(self.initial_time_in_seconds) + while True: self.print_summary() - self.exit_event.wait(self.time_in_seconds) + time.sleep(self.time_in_seconds) except Exception: get_console().print_exception(show_locals=True) @@ -336,6 +407,7 @@ def run_with_pool( all_params: list[str], initial_time_in_seconds: int = 2, time_in_seconds: int = 10, + debug_resources: bool = False, progress_matcher: AbstractProgressInfoMatcher | None = None, ) -> Generator[tuple[Pool, list[Output]], None, None]: get_console().print(f"Running with parallelism: {parallelism}") @@ -345,10 +417,10 @@ def run_with_pool( outputs=outputs, initial_time_in_seconds=initial_time_in_seconds, time_in_seconds=time_in_seconds, + debug_resources=debug_resources, progress_matcher=progress_matcher, ) m.start() yield pool, outputs pool.close() pool.join() - m.cancel() diff --git a/dev/breeze/src/airflow_breeze/utils/path_utils.py b/dev/breeze/src/airflow_breeze/utils/path_utils.py index d48bab04f683d..eee85b20de861 100644 --- a/dev/breeze/src/airflow_breeze/utils/path_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/path_utils.py @@ -240,7 +240,6 @@ def find_airflow_sources_root_to_operate_on() -> Path: DAGS_DIR = AIRFLOW_SOURCES_ROOT / 'dags' FILES_DIR = AIRFLOW_SOURCES_ROOT / 'files' HOOKS_DIR = AIRFLOW_SOURCES_ROOT / 'hooks' -MSSQL_DATA_VOLUME = AIRFLOW_SOURCES_ROOT / 'tmp_mssql_volume' KUBE_DIR = AIRFLOW_SOURCES_ROOT / ".kube" LOGS_DIR = AIRFLOW_SOURCES_ROOT / 'logs' DIST_DIR = AIRFLOW_SOURCES_ROOT / 'dist' @@ -250,6 +249,8 @@ def find_airflow_sources_root_to_operate_on() -> Path: OUTPUT_LOG = Path(CACHE_TMP_FILE_DIR.name, 'out.log') BREEZE_SOURCES_ROOT = AIRFLOW_SOURCES_ROOT / "dev" / "breeze" +MSSQL_TMP_DIR_NAME = ".tmp-mssql" + def create_volume_if_missing(volume_name: str): from airflow_breeze.utils.run_utils import run_command @@ -287,7 +288,6 @@ def create_directories_and_files() -> None: DAGS_DIR.mkdir(parents=True, exist_ok=True) FILES_DIR.mkdir(parents=True, exist_ok=True) HOOKS_DIR.mkdir(parents=True, exist_ok=True) - MSSQL_DATA_VOLUME.mkdir(parents=True, exist_ok=True) KUBE_DIR.mkdir(parents=True, exist_ok=True) LOGS_DIR.mkdir(parents=True, exist_ok=True) DIST_DIR.mkdir(parents=True, exist_ok=True) diff --git a/dev/breeze/src/airflow_breeze/utils/run_utils.py b/dev/breeze/src/airflow_breeze/utils/run_utils.py index 72ec10114849e..760d08bab9ba8 100644 --- a/dev/breeze/src/airflow_breeze/utils/run_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/run_utils.py @@ -226,7 +226,7 @@ def assert_pre_commit_installed(verbose: bool): sys.exit(1) -def get_filesystem_type(filepath): +def get_filesystem_type(filepath: str): """ Determine the type of filesystem used - we might want to use different parameters if tmpfs is used. :param filepath: path to check @@ -236,7 +236,7 @@ def get_filesystem_type(filepath): import psutil root_type = "unknown" - for part in psutil.disk_partitions(): + for part in psutil.disk_partitions(all=True): if part.mountpoint == '/': root_type = part.fstype continue diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py index 30d0785668f3c..343c079266861 100644 --- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -56,6 +56,7 @@ from airflow_breeze.utils.console import get_console FULL_TESTS_NEEDED_LABEL = "full tests needed" +DEBUG_CI_RESOURCES_LABEL = "debug ci resources" class FileGroupForCi(Enum): @@ -578,3 +579,7 @@ def skip_pre_commits(self) -> str: @cached_property def cache_directive(self) -> str: return "disabled" if self._github_event == GithubEvents.SCHEDULE else "registry" + + @cached_property + def debug_resources(self) -> bool: + return DEBUG_CI_RESOURCES_LABEL in self._pr_labels diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index 446031b346e25..b4894663d8cc4 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -3,55 +3,55 @@ # This command should fix the conflict and regenerate help images that you have conflict with. main:7322af669ed8bab9cc53ff82372d7b6d build-docs:e49dcbf792c2b6f6c24e108fd4ca9fc2 -ci:find-newer-dependencies:00000f7afb289e36e8c573fcc654df44 +ci:find-newer-dependencies:8fa2b57f5f0523c928743b235ee3ab5a ci:fix-ownership:84902165a54467564fbdd3598fa273e2 ci:free-space:bb8e7ac63d12ab3ede272a898de2f527 ci:get-workflow-info:01ee34c33ad62fa5dc33e0ac8773223f ci:resource-check:0fb929ac3496dbbe97acfe99e35accd7 ci:selective-check:d4e3c250cd6f2b0040fbe6557fa423f6 -ci:31566cdcdde216086f559215223b2378 -ci-image:build:d39a25675e6b74af9bbb1fc2582aacc5 -ci-image:pull:8aca8679e6030ad0d6e59216af40c0b3 -ci-image:verify:a2daeaa820c0baca31da2737929b38b9 -ci-image:b1c7a3c6dfa72b127fac559dcfdbb0d3 +ci:6d3ac64938416e047a435ce42db6cd06 +ci-image:build:21c0b2bc553409e3719569a0ca7eaa64 +ci-image:pull:73266206a4d1908f389352366bacec7f +ci-image:verify:e582cf1b2c00716442b6409ca8641264 +ci-image:60b3db541f8dfa5464acded261800d90 cleanup:9bf46a1dfd9db4fe13a1c233ad1bb96b compile-www-assets:23675c1862d0968cbff6ab6f1d93d488 exec:89b81bc34d45b0fe6653a6db5482258c -k8s:build-k8s-image:0aa138f48c82fde2d12f76599803591c -k8s:configure-cluster:5f09751548eee2e0eb0165ae0654619d -k8s:create-cluster:1852d2718769417b76ee31f1f87f4d48 +k8s:build-k8s-image:490592aa0af35b4a035f5fcbd1d1eba4 +k8s:configure-cluster:19ab6733ffdc0048e357116d6fa6042f +k8s:create-cluster:43dd0394bb72ec9fffe6241209a65ad1 k8s:delete-cluster:3c1165914eeec4729c4e2f325853edfa -k8s:deploy-airflow:ab15a9ab07b16b7e460865746a636e49 +k8s:deploy-airflow:8440a191fc9f4d4a7a56111d7c9dd19d k8s:k9s:64be156ce5cea8f1286c074e590f57e7 k8s:logs:6e666b2318fd809724dc1f6a23661cb8 k8s:setup-env:9a04e2954ff6362eea189742b3829d70 k8s:shell:3e06c69cf16b174888fcc5d76b34dbef k8s:status:08c5dba9d6bd155ab6f537b1054d7f2b -k8s:tests:0f624a5b57f49e429c69ff04ba900772 -k8s:upload-k8s-image:588f09914bb05ec0967a7944daadbfb9 -k8s:5e3084e02eaaa288c6425d41e0881b4f -prod-image:build:b06e952a64e930a32f6ccec38439078f -prod-image:pull:1aef99d8fd0706c7666170949d7ba834 -prod-image:verify:c9242a00b49e0081f34a19420e8032f5 -prod-image:d86efbdba5c30524157a0febb3442942 -release-management:generate-constraints:48d2de2aa34bcbaacdef9ac92ec1bab7 +k8s:tests:bb3d973de830913fa6f4ca57aea3d310 +k8s:upload-k8s-image:6a7fe33c173f3a60579a689d299971eb +k8s:02e236d621a8c352ad9221b54a18946d +prod-image:build:26b5a3d800e8d59eb2c566acc77347c6 +prod-image:pull:ba0f383e5adbb1a8a295138ec7f88038 +prod-image:verify:4d40bd67bc559d6cab253dee34ce1856 +prod-image:8c910c18755af26531865ed2bccbd0fa +release-management:generate-constraints:d4acf4ba8a33fbf9579265aa2ac8f788 release-management:prepare-airflow-package:cff9d88ca313db10f3cc464c6798f6be release-management:prepare-provider-documentation:3ac547738c671e460523b5c01e295988 -release-management:prepare-provider-packages:89502ebc78a68f15f4fffaf118b9c68f +release-management:prepare-provider-packages:a3295f63458288fc3cce90b8d5cf17d9 release-management:release-prod-images:8858fe5a13989c7c65a79dc97a880928 release-management:verify-provider-packages:797e60067fc4611112527de808b5c1c1 -release-management:b633b97e7912daa45259555f3b407e63 +release-management:9557da68d168659412c2e63fbb181aad setup:autocomplete:9787fd9ac963e3c29e54ac53ec2b9565 setup:config:92653afc11889e1b78e3a2e38f41107f setup:regenerate-command-images:861bb8703c071508f342b79fd9a2c01a setup:self-upgrade:d02f70c7a230eae3463ceec2056b63fa setup:version:d11da4c17a23179830079b646160149c setup:09e876968e669155b4aae7423a19e7b2 -shell:4de9c18e581853f332767beddb95b425 -start-airflow:eef91445684e015f83d91d02f4f03ccc +shell:557ee58e7c70af052f5ea7a173b39137 +start-airflow:b089191910e9c3f2ffd9c0079cdf38c6 static-checks:425cd78507278494e345fb7648260c24 stop:8ebd8a42f1003495d37b884de5ac7ce6 -testing:docker-compose-tests:3e07be65e30219930d3c62a593dd8c6a -testing:helm-tests:403231f0a94b261f9c7aae8aea03ec50 -testing:tests:32deda30f3899e8ae6e241238f990d68 -testing:e747ece268ba502c106924eb2f46c550 +testing:docker-compose-tests:70c744105ff61025f25d93a2f1f427c2 +testing:helm-tests:35f7ecef86fd9c9dbad73f20ebd64496 +testing:tests:dbca53292ca6b3fbd068a62d6478d6d7 +testing:7abaa71b4c100242212ef2c0005c1a30 diff --git a/images/breeze/output-commands.svg b/images/breeze/output-commands.svg index 7482f1b6d7293..9986e417f32e9 100644 --- a/images/breeze/output-commands.svg +++ b/images/breeze/output-commands.svg @@ -35,8 +35,8 @@ .terminal-4235104576-r1 { fill: #c5c8c6;font-weight: bold } .terminal-4235104576-r2 { fill: #c5c8c6 } .terminal-4235104576-r3 { fill: #d0b344;font-weight: bold } -.terminal-4235104576-r4 { fill: #68a0b3;font-weight: bold } -.terminal-4235104576-r5 { fill: #868887 } +.terminal-4235104576-r4 { fill: #868887 } +.terminal-4235104576-r5 { fill: #68a0b3;font-weight: bold } .terminal-4235104576-r6 { fill: #98a84b;font-weight: bold } .terminal-4235104576-r7 { fill: #8d7b39 } @@ -190,50 +190,50 @@ -Usage: breeze [OPTIONSCOMMAND [ARGS]... +Usage: breeze [OPTIONS] COMMAND [ARGS]... -╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---integrationIntegration(s) to enable when running (can be more than one).                             -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    -all)                                                                                      ---forward-credentials-fForward local credentials to container when running. ---db-reset-dReset DB when entering the container. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---max-timeMaximum time that the command should take - if it takes longer, the command will fail. -(INTEGER RANGE)                                                                        ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Basic developer commands ───────────────────────────────────────────────────────────────────────────────────────────╮ -start-airflow     Enter breeze environment and starts all Airflow components in the tmux session. Compile assets   -if contents of www directory changed.                                                            -static-checks     Run static checks.                                                                               -build-docs        Build documentation in the container.                                                            -stop              Stop running breeze environment.                                                                 -shell             Enter breeze environment. this is the default command use when no other is selected.             -exec              Joins the interactive shell of running airflow container.                                        -compile-www-assetsCompiles www assets.                                                                             -cleanup           Cleans the cache of parameters, docker cache and optionally built CI/PROD images.                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced command groups ────────────────────────────────────────────────────────────────────────────────────────────╮ -testing                Tools that developers can use to run tests                                                  -ci-image               Tools that developers can use to manually manage CI images                                  -k8s                    Tools that developers use to run Kubernetes tests                                           -prod-image             Tools that developers can use to manually manage PROD images                                -setup                  Tools that developers can use to configure Breeze                                           -release-management     Tools that release managers can use to prepare and manage Airflow releases                  -ci                     Tools that CI workflows use to cleanup/manage CI environment                                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                             +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    +all)                                                                                      +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--max-timeMaximum time that the command should take - if it takes longer, the command will fail. +(INTEGER RANGE)                                                                        +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Basic developer commands ───────────────────────────────────────────────────────────────────────────────────────────╮ +start-airflow     Enter breeze environment and starts all Airflow components in the tmux session. Compile assets   +if contents of www directory changed.                                                            +static-checks     Run static checks.                                                                               +build-docs        Build documentation in the container.                                                            +stop              Stop running breeze environment.                                                                 +shell             Enter breeze environment. this is the default command use when no other is selected.             +exec              Joins the interactive shell of running airflow container.                                        +compile-www-assetsCompiles www assets.                                                                             +cleanup           Cleans the cache of parameters, docker cache and optionally built CI/PROD images.                +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced command groups ────────────────────────────────────────────────────────────────────────────────────────────╮ +testing                Tools that developers can use to run tests                                                  +ci-image               Tools that developers can use to manually manage CI images                                  +k8s                    Tools that developers use to run Kubernetes tests                                           +prod-image             Tools that developers can use to manually manage PROD images                                +setup                  Tools that developers can use to configure Breeze                                           +release-management     Tools that release managers can use to prepare and manage Airflow releases                  +ci                     Tools that CI workflows use to cleanup/manage CI environment                                +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_build-docs.svg b/images/breeze/output_build-docs.svg index 31539771f3734..cbc98721cb0f5 100644 --- a/images/breeze/output_build-docs.svg +++ b/images/breeze/output_build-docs.svg @@ -35,8 +35,8 @@ .terminal-4255383784-r1 { fill: #c5c8c6;font-weight: bold } .terminal-4255383784-r2 { fill: #c5c8c6 } .terminal-4255383784-r3 { fill: #d0b344;font-weight: bold } -.terminal-4255383784-r4 { fill: #68a0b3;font-weight: bold } -.terminal-4255383784-r5 { fill: #868887 } +.terminal-4255383784-r4 { fill: #868887 } +.terminal-4255383784-r5 { fill: #68a0b3;font-weight: bold } .terminal-4255383784-r6 { fill: #98a84b;font-weight: bold } .terminal-4255383784-r7 { fill: #8d7b39 } @@ -232,64 +232,64 @@ -Usage: breeze build-docs [OPTIONS] +Usage: breeze build-docs [OPTIONS] Build documentation in the container. -╭─ Doc flags ──────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---docs-only-dOnly build documentation. ---spellcheck-only-sOnly run spell checking. ---clean-buildClean inventories of Inter-Sphinx documentation and generated APIs and sphinx artifacts       -before the build - useful for a clean build.                                                  ---for-productionBuilds documentation for official release i.e. all links point to stable version. Implies     ---clean-build ---package-filterList of packages to consider.                                                                 -(apache-airflow | apache-airflow-providers-airbyte | apache-airflow-providers-alibaba |       -apache-airflow-providers-amazon | apache-airflow-providers-apache-beam |                      -apache-airflow-providers-apache-cassandra | apache-airflow-providers-apache-drill |           -apache-airflow-providers-apache-druid | apache-airflow-providers-apache-hdfs |                -apache-airflow-providers-apache-hive | apache-airflow-providers-apache-kylin |                -apache-airflow-providers-apache-livy | apache-airflow-providers-apache-pig |                  -apache-airflow-providers-apache-pinot | apache-airflow-providers-apache-spark |               -apache-airflow-providers-apache-sqoop | apache-airflow-providers-arangodb |                   -apache-airflow-providers-asana | apache-airflow-providers-atlassian-jira |                    -apache-airflow-providers-celery | apache-airflow-providers-cloudant |                         -apache-airflow-providers-cncf-kubernetes | apache-airflow-providers-common-sql |              -apache-airflow-providers-databricks | apache-airflow-providers-datadog |                      -apache-airflow-providers-dbt-cloud | apache-airflow-providers-dingding |                      -apache-airflow-providers-discord | apache-airflow-providers-docker |                          -apache-airflow-providers-elasticsearch | apache-airflow-providers-exasol |                    -apache-airflow-providers-facebook | apache-airflow-providers-ftp |                            -apache-airflow-providers-github | apache-airflow-providers-google |                           -apache-airflow-providers-grpc | apache-airflow-providers-hashicorp |                          -apache-airflow-providers-http | apache-airflow-providers-imap |                               -apache-airflow-providers-influxdb | apache-airflow-providers-jdbc |                           -apache-airflow-providers-jenkins | apache-airflow-providers-jira |                            -apache-airflow-providers-microsoft-azure | apache-airflow-providers-microsoft-mssql |         -apache-airflow-providers-microsoft-psrp | apache-airflow-providers-microsoft-winrm |          -apache-airflow-providers-mongo | apache-airflow-providers-mysql |                             -apache-airflow-providers-neo4j | apache-airflow-providers-odbc |                              -apache-airflow-providers-openfaas | apache-airflow-providers-opsgenie |                       -apache-airflow-providers-oracle | apache-airflow-providers-pagerduty |                        -apache-airflow-providers-papermill | apache-airflow-providers-plexus |                        -apache-airflow-providers-postgres | apache-airflow-providers-presto |                         -apache-airflow-providers-qubole | apache-airflow-providers-redis |                            -apache-airflow-providers-salesforce | apache-airflow-providers-samba |                        -apache-airflow-providers-segment | apache-airflow-providers-sendgrid |                        -apache-airflow-providers-sftp | apache-airflow-providers-singularity |                        -apache-airflow-providers-slack | apache-airflow-providers-snowflake |                         -apache-airflow-providers-sqlite | apache-airflow-providers-ssh |                              -apache-airflow-providers-tableau | apache-airflow-providers-tabular |                         -apache-airflow-providers-telegram | apache-airflow-providers-trino |                          -apache-airflow-providers-vertica | apache-airflow-providers-yandex |                          -apache-airflow-providers-zendesk | docker-stack | helm-chart)                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Doc flags ──────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--docs-only-dOnly build documentation. +--spellcheck-only-sOnly run spell checking. +--clean-buildClean inventories of Inter-Sphinx documentation and generated APIs and sphinx artifacts       +before the build - useful for a clean build.                                                  +--for-productionBuilds documentation for official release i.e. all links point to stable version. Implies     +--clean-build +--package-filterList of packages to consider.                                                                 +(apache-airflow | apache-airflow-providers-airbyte | apache-airflow-providers-alibaba |       +apache-airflow-providers-amazon | apache-airflow-providers-apache-beam |                      +apache-airflow-providers-apache-cassandra | apache-airflow-providers-apache-drill |           +apache-airflow-providers-apache-druid | apache-airflow-providers-apache-hdfs |                +apache-airflow-providers-apache-hive | apache-airflow-providers-apache-kylin |                +apache-airflow-providers-apache-livy | apache-airflow-providers-apache-pig |                  +apache-airflow-providers-apache-pinot | apache-airflow-providers-apache-spark |               +apache-airflow-providers-apache-sqoop | apache-airflow-providers-arangodb |                   +apache-airflow-providers-asana | apache-airflow-providers-atlassian-jira |                    +apache-airflow-providers-celery | apache-airflow-providers-cloudant |                         +apache-airflow-providers-cncf-kubernetes | apache-airflow-providers-common-sql |              +apache-airflow-providers-databricks | apache-airflow-providers-datadog |                      +apache-airflow-providers-dbt-cloud | apache-airflow-providers-dingding |                      +apache-airflow-providers-discord | apache-airflow-providers-docker |                          +apache-airflow-providers-elasticsearch | apache-airflow-providers-exasol |                    +apache-airflow-providers-facebook | apache-airflow-providers-ftp |                            +apache-airflow-providers-github | apache-airflow-providers-google |                           +apache-airflow-providers-grpc | apache-airflow-providers-hashicorp |                          +apache-airflow-providers-http | apache-airflow-providers-imap |                               +apache-airflow-providers-influxdb | apache-airflow-providers-jdbc |                           +apache-airflow-providers-jenkins | apache-airflow-providers-jira |                            +apache-airflow-providers-microsoft-azure | apache-airflow-providers-microsoft-mssql |         +apache-airflow-providers-microsoft-psrp | apache-airflow-providers-microsoft-winrm |          +apache-airflow-providers-mongo | apache-airflow-providers-mysql |                             +apache-airflow-providers-neo4j | apache-airflow-providers-odbc |                              +apache-airflow-providers-openfaas | apache-airflow-providers-opsgenie |                       +apache-airflow-providers-oracle | apache-airflow-providers-pagerduty |                        +apache-airflow-providers-papermill | apache-airflow-providers-plexus |                        +apache-airflow-providers-postgres | apache-airflow-providers-presto |                         +apache-airflow-providers-qubole | apache-airflow-providers-redis |                            +apache-airflow-providers-salesforce | apache-airflow-providers-samba |                        +apache-airflow-providers-segment | apache-airflow-providers-sendgrid |                        +apache-airflow-providers-sftp | apache-airflow-providers-singularity |                        +apache-airflow-providers-slack | apache-airflow-providers-snowflake |                         +apache-airflow-providers-sqlite | apache-airflow-providers-ssh |                              +apache-airflow-providers-tableau | apache-airflow-providers-tabular |                         +apache-airflow-providers-telegram | apache-airflow-providers-trino |                          +apache-airflow-providers-vertica | apache-airflow-providers-yandex |                          +apache-airflow-providers-zendesk | docker-stack | helm-chart)                                 +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_ci-image.svg b/images/breeze/output_ci-image.svg index dc47b2e574977..db16ef837f146 100644 --- a/images/breeze/output_ci-image.svg +++ b/images/breeze/output_ci-image.svg @@ -19,92 +19,92 @@ font-weight: 700; } - .terminal-1839360107-matrix { + .terminal-3949946042-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1839360107-title { + .terminal-3949946042-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1839360107-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-1839360107-r2 { fill: #c5c8c6 } -.terminal-1839360107-r3 { fill: #d0b344;font-weight: bold } -.terminal-1839360107-r4 { fill: #68a0b3;font-weight: bold } -.terminal-1839360107-r5 { fill: #868887 } -.terminal-1839360107-r6 { fill: #98a84b;font-weight: bold } + .terminal-3949946042-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-3949946042-r2 { fill: #c5c8c6 } +.terminal-3949946042-r3 { fill: #d0b344;font-weight: bold } +.terminal-3949946042-r4 { fill: #868887 } +.terminal-3949946042-r5 { fill: #68a0b3;font-weight: bold } +.terminal-3949946042-r6 { fill: #98a84b;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: ci-image + Command: ci-image - + - - -Usage: breeze ci-image [OPTIONSCOMMAND [ARGS]... - -Tools that developers can use to manually manage CI images - -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ CI Image tools ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ -build   Build CI image. Include building multiple images for all python versions (sequentially).                   -pull    Pull and optionally verify CI images - possibly in parallel for all Python versions.                       -verify  Verify CI image.                                                                                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze ci-image [OPTIONS] COMMAND [ARGS]... + +Tools that developers can use to manually manage CI images + +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ CI Image tools ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +build   Build CI image. Include building multiple images for all python versions.                                  +pull    Pull and optionally verify CI images - possibly in parallel for all Python versions.                       +verify  Verify CI image.                                                                                           +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_ci-image_build.svg b/images/breeze/output_ci-image_build.svg index 19ef82bbf6ee1..7af488fba364a 100644 --- a/images/breeze/output_ci-image_build.svg +++ b/images/breeze/output_ci-image_build.svg @@ -19,309 +19,309 @@ font-weight: 700; } - .terminal-1523357919-matrix { + .terminal-3550258889-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1523357919-title { + .terminal-3550258889-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1523357919-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-1523357919-r2 { fill: #c5c8c6 } -.terminal-1523357919-r3 { fill: #d0b344;font-weight: bold } -.terminal-1523357919-r4 { fill: #68a0b3;font-weight: bold } -.terminal-1523357919-r5 { fill: #868887 } -.terminal-1523357919-r6 { fill: #98a84b;font-weight: bold } -.terminal-1523357919-r7 { fill: #8d7b39 } + .terminal-3550258889-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-3550258889-r2 { fill: #c5c8c6 } +.terminal-3550258889-r3 { fill: #d0b344;font-weight: bold } +.terminal-3550258889-r4 { fill: #868887 } +.terminal-3550258889-r5 { fill: #68a0b3;font-weight: bold } +.terminal-3550258889-r6 { fill: #98a84b;font-weight: bold } +.terminal-3550258889-r7 { fill: #8d7b39 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: ci-image build + Command: ci-image build - + - - -Usage: breeze ci-image build [OPTIONS] - -Build CI image. Include building multiple images for all python versions (sequentially). - -╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. ---upgrade-on-failure-uWhen set, attempt to run upgrade to newer dependencies when regular build       -fails.                                                                          ---image-tag-tTag the image after building it(TEXT)[default: latest] ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful -when you build or pull image with --image-tag.                                  ---docker-cache-cCache option for image used during the build.(registry | local | disabled) -[default: registry]                           ---force-buildForce image build no matter if it is determined as needed. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---skip-cleanupSkip cleanup of temporary files created during parallel run ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---include-success-outputsWhether to include outputs of successful parallel runs (by default they are not         -printed).                                                                               -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced options (for power users) ─────────────────────────────────────────────────────────────────────────────────╮ ---builderBuildx builder used to perform `docker buildx build` commands(TEXT) ---install-providers-from-sourcesInstall providers from sources when installing. ---airflow-constraints-modeMode of constraints for CI image building                               -(constraints-source-providers | constraints | constraints-no-providers) -[default: constraints-source-providers]                                 ---airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) ---python-imageIf specified this is the base python image used to build the image. Should be    -something like: python:VERSION-slim-bullseye                                     -(TEXT)                                                                           ---additional-python-depsAdditional python dependencies to use when building the images.(TEXT) ---additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) ---additional-pip-install-flagsAdditional flags added to `pip install` commands (except reinstalling `pip`      -itself).                                                                         -(TEXT)                                                                           ---additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) ---additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) ---additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) ---dev-apt-depsApt dev dependencies to use when building the images.(TEXT) ---dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---github-usernameThe user name used to authenticate to GitHub.(TEXT) ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) ---pushPush image after building it. ---empty-imagePrepare empty image tagged with the same name as the Airflow image. ---prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  -image).                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze ci-image build [OPTIONS] + +Build CI image. Include building multiple images for all python versions. + +╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. +--upgrade-on-failure-uWhen set, attempt to run upgrade to newer dependencies when regular build       +fails.                                                                          +--image-tag-tTag the image after building it.(TEXT)[default: latest] +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful +when you build or pull image with --image-tag.                                  +--docker-cache-cCache option for image used during the build.(registry | local | disabled) +[default: registry]                           +--force-buildForce image build no matter if it is determined as needed. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced options (for power users) ─────────────────────────────────────────────────────────────────────────────────╮ +--builderBuildx builder used to perform `docker buildx build` commands.(TEXT) +--install-providers-from-sourcesInstall providers from sources when installing. +--airflow-constraints-modeMode of constraints for CI image building.                              +(constraints-source-providers | constraints | constraints-no-providers) +[default: constraints-source-providers]                                 +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +--python-imageIf specified this is the base python image used to build the image. Should be    +something like: python:VERSION-slim-bullseye.                                    +(TEXT)                                                                           +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-pip-install-flagsAdditional flags added to `pip install` commands (except reinstalling `pip`      +itself).                                                                         +(TEXT)                                                                           +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--github-usernameThe user name used to authenticate to GitHub.(TEXT) +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--pushPush image after building it. +--empty-imagePrepare empty image tagged with the same name as the Airflow image. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_ci-image_pull.svg b/images/breeze/output_ci-image_pull.svg index 9195acf84b738..abcc06b9b39d5 100644 --- a/images/breeze/output_ci-image_pull.svg +++ b/images/breeze/output_ci-image_pull.svg @@ -19,169 +19,169 @@ font-weight: 700; } - .terminal-2117817669-matrix { + .terminal-1876190902-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2117817669-title { + .terminal-1876190902-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2117817669-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-2117817669-r2 { fill: #c5c8c6 } -.terminal-2117817669-r3 { fill: #d0b344;font-weight: bold } -.terminal-2117817669-r4 { fill: #68a0b3;font-weight: bold } -.terminal-2117817669-r5 { fill: #868887 } -.terminal-2117817669-r6 { fill: #98a84b;font-weight: bold } -.terminal-2117817669-r7 { fill: #8d7b39 } + .terminal-1876190902-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-1876190902-r2 { fill: #c5c8c6 } +.terminal-1876190902-r3 { fill: #d0b344;font-weight: bold } +.terminal-1876190902-r4 { fill: #868887 } +.terminal-1876190902-r5 { fill: #68a0b3;font-weight: bold } +.terminal-1876190902-r6 { fill: #98a84b;font-weight: bold } +.terminal-1876190902-r7 { fill: #8d7b39 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: ci-image pull + Command: ci-image pull - + - - -Usage: breeze ci-image pull [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Pull and optionally verify CI images - possibly in parallel for all Python versions. - -╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-tag-tTag of the image which is used to pull the image(TEXT)[default: latest] ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---verifyVerify image. ---wait-for-imageWait until image is available. ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you build -or pull image with --image-tag.                                                                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---skip-cleanupSkip cleanup of temporary files created during parallel run ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---include-success-outputsWhether to include outputs of successful parallel runs (by default they are not         -printed).                                                                               -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze ci-image pull [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Pull and optionally verify CI images - possibly in parallel for all Python versions. + +╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-tag-tTag of the image which is used to pull the image.(TEXT)[default: latest] +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--verifyVerify image. +--wait-for-imageWait until image is available. +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you build +or pull image with --image-tag.                                                                +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_ci-image_verify.svg b/images/breeze/output_ci-image_verify.svg index 01f80a1555c8d..3b7d4be25b787 100644 --- a/images/breeze/output_ci-image_verify.svg +++ b/images/breeze/output_ci-image_verify.svg @@ -19,113 +19,113 @@ font-weight: 700; } - .terminal-2731477099-matrix { + .terminal-4266854521-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2731477099-title { + .terminal-4266854521-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2731477099-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-2731477099-r2 { fill: #c5c8c6 } -.terminal-2731477099-r3 { fill: #d0b344;font-weight: bold } -.terminal-2731477099-r4 { fill: #868887 } -.terminal-2731477099-r5 { fill: #68a0b3;font-weight: bold } -.terminal-2731477099-r6 { fill: #98a84b;font-weight: bold } -.terminal-2731477099-r7 { fill: #8d7b39 } + .terminal-4266854521-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-4266854521-r2 { fill: #c5c8c6 } +.terminal-4266854521-r3 { fill: #d0b344;font-weight: bold } +.terminal-4266854521-r4 { fill: #868887 } +.terminal-4266854521-r5 { fill: #68a0b3;font-weight: bold } +.terminal-4266854521-r6 { fill: #98a84b;font-weight: bold } +.terminal-4266854521-r7 { fill: #8d7b39 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: ci-image verify + Command: ci-image verify - + - - -Usage: breeze ci-image verify [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Verify CI image. - -╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---image-tag-tTag of the image when verifying it(TEXT)[default: latest] ---pullPull image is missing before attempting to verify it. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze ci-image verify [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Verify CI image. + +╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--image-tag-tTag of the image when verifying it.(TEXT)[default: latest] +--pullPull image is missing before attempting to verify it. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_ci_find-newer-dependencies.svg b/images/breeze/output_ci_find-newer-dependencies.svg index eb6232fd4dc1b..95bba2a0b9b54 100644 --- a/images/breeze/output_ci_find-newer-dependencies.svg +++ b/images/breeze/output_ci_find-newer-dependencies.svg @@ -19,121 +19,121 @@ font-weight: 700; } - .terminal-1503861839-matrix { + .terminal-1646402681-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1503861839-title { + .terminal-1646402681-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1503861839-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-1503861839-r2 { fill: #c5c8c6 } -.terminal-1503861839-r3 { fill: #d0b344;font-weight: bold } -.terminal-1503861839-r4 { fill: #868887 } -.terminal-1503861839-r5 { fill: #68a0b3;font-weight: bold } -.terminal-1503861839-r6 { fill: #98a84b;font-weight: bold } -.terminal-1503861839-r7 { fill: #8d7b39 } + .terminal-1646402681-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-1646402681-r2 { fill: #c5c8c6 } +.terminal-1646402681-r3 { fill: #d0b344;font-weight: bold } +.terminal-1646402681-r4 { fill: #868887 } +.terminal-1646402681-r5 { fill: #68a0b3;font-weight: bold } +.terminal-1646402681-r6 { fill: #98a84b;font-weight: bold } +.terminal-1646402681-r7 { fill: #8d7b39 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: ci find-newer-dependencies + Command: ci find-newer-dependencies - + - - -Usage: breeze ci find-newer-dependencies [OPTIONS] - -Finds which dependencies are being upgraded. - -╭─ Find newer dependencies flags ──────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---timezoneTimezone to use during the check(TEXT) ---airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     -specify constraints for the installed version and to find newer dependencies    -(TEXT)                                                                          ---updated-on-or-afterDate when the release was updated after(TEXT) ---max-ageMax age of the last release (used if no updated-on-or-after if specified) -(INTEGER)                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze ci find-newer-dependencies [OPTIONS] + +Finds which dependencies are being upgraded. + +╭─ Find newer dependencies flags ──────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--timezoneTimezone to use during the check.(TEXT) +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     +specify constraints for the installed version and to find newer dependencies    +(TEXT)                                                                          +--updated-on-or-afterDate when the release was updated after.(TEXT) +--max-ageMax age of the last release (used if no updated-on-or-after if specified). +(INTEGER)                                                                  +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s.svg b/images/breeze/output_k8s.svg index 0a6e1a63d6596..ef8be3919de1e 100644 --- a/images/breeze/output_k8s.svg +++ b/images/breeze/output_k8s.svg @@ -35,8 +35,8 @@ .terminal-343177163-r1 { fill: #c5c8c6;font-weight: bold } .terminal-343177163-r2 { fill: #c5c8c6 } .terminal-343177163-r3 { fill: #d0b344;font-weight: bold } -.terminal-343177163-r4 { fill: #68a0b3;font-weight: bold } -.terminal-343177163-r5 { fill: #868887 } +.terminal-343177163-r4 { fill: #868887 } +.terminal-343177163-r5 { fill: #68a0b3;font-weight: bold } .terminal-343177163-r6 { fill: #98a84b;font-weight: bold } @@ -141,34 +141,34 @@ -Usage: breeze k8s [OPTIONSCOMMAND [ARGS]... +Usage: breeze k8s [OPTIONS] COMMAND [ARGS]... Tools that developers use to run Kubernetes tests -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ K8S cluster management commands ────────────────────────────────────────────────────────────────────────────────────╮ -setup-env        Setup shared Kubernetes virtual environment and tools.                                            -create-cluster   Create a KinD Cluster for Python and Kubernetes version specified (optionally create all clusters -in parallel).                                                                                     -build-k8s-image  Build k8s-ready airflow image (optionally all images in parallel).                                -upload-k8s-image Upload k8s-ready airflow image to the KinD cluster (optionally to all clusters in parallel)       -configure-clusterConfigures cluster for airflow deployment - creates namespaces and test resources (optionally for -all clusters in parallel).                                                                        -deploy-airflow   Deploy airflow image to the current KinD cluster (or all clusters).                               -delete-cluster   Delete the current KinD Cluster (optionally all clusters).                                        -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ K8S inspection commands ────────────────────────────────────────────────────────────────────────────────────────────╮ -status  Check status of the current cluster and airflow deployed to it (optionally all clusters).                  -logs    Dump k8s logs to ${TMP_DIR}/kind_logs_<cluster_name> directory (optionally all clusters).                  -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ K8S testing commands ───────────────────────────────────────────────────────────────────────────────────────────────╮ -tests  Run tests against the current KinD cluster (optionally for all clusters in parallel).                       -shell  Run shell environment for the current KinD cluster.                                                         -k9s    Run k9s tool. You can pass any k9s args as extra args.                                                      -logs   Dump k8s logs to ${TMP_DIR}/kind_logs_<cluster_name> directory (optionally all clusters).                   -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S cluster management commands ────────────────────────────────────────────────────────────────────────────────────╮ +setup-env        Setup shared Kubernetes virtual environment and tools.                                            +create-cluster   Create a KinD Cluster for Python and Kubernetes version specified (optionally create all clusters +in parallel).                                                                                     +build-k8s-image  Build k8s-ready airflow image (optionally all images in parallel).                                +upload-k8s-image Upload k8s-ready airflow image to the KinD cluster (optionally to all clusters in parallel)       +configure-clusterConfigures cluster for airflow deployment - creates namespaces and test resources (optionally for +all clusters in parallel).                                                                        +deploy-airflow   Deploy airflow image to the current KinD cluster (or all clusters).                               +delete-cluster   Delete the current KinD Cluster (optionally all clusters).                                        +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S inspection commands ────────────────────────────────────────────────────────────────────────────────────────────╮ +status  Check status of the current cluster and airflow deployed to it (optionally all clusters).                  +logs    Dump k8s logs to ${TMP_DIR}/kind_logs_<cluster_name> directory (optionally all clusters).                  +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S testing commands ───────────────────────────────────────────────────────────────────────────────────────────────╮ +tests  Run tests against the current KinD cluster (optionally for all clusters in parallel).                       +shell  Run shell environment for the current KinD cluster.                                                         +k9s    Run k9s tool. You can pass any k9s args as extra args.                                                      +logs   Dump k8s logs to ${TMP_DIR}/kind_logs_<cluster_name> directory (optionally all clusters).                   +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_build-k8s-image.svg b/images/breeze/output_k8s_build-k8s-image.svg index 6e3dba127fb82..d34d962cc977d 100644 --- a/images/breeze/output_k8s_build-k8s-image.svg +++ b/images/breeze/output_k8s_build-k8s-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: k8s build-k8s-image + Command: k8s build-k8s-image - + - - -Usage: breeze k8s build-k8s-image [OPTIONS] - -Build k8s-ready airflow image (optionally all images in parallel). - -╭─ Build image flags ──────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---rebuild-base-imageRebuilds base Airflow image before building K8S image. ---image-tag-tImage tag used to build K8S image from(TEXT)[default: latest] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---include-success-outputsWhether to include outputs of successful parallel runs (by default they are not         -printed).                                                                               -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze k8s build-k8s-image [OPTIONS] + +Build k8s-ready airflow image (optionally all images in parallel). + +╭─ Build image flags ──────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--rebuild-base-imageRebuilds base Airflow image before building K8S image. +--image-tag-tImage tag used to build K8S image from.(TEXT)[default: latest] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_configure-cluster.svg b/images/breeze/output_k8s_configure-cluster.svg index 553053d1153ba..d20f0a6fa0102 100644 --- a/images/breeze/output_k8s_configure-cluster.svg +++ b/images/breeze/output_k8s_configure-cluster.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: k8s configure-cluster + Command: k8s configure-cluster - + - - -Usage: breeze k8s configure-cluster [OPTIONS] - -Configures cluster for airflow deployment - creates namespaces and test resources (optionally for all clusters in  -parallel). - -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) -[default: v1.25.2]                                     ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) -[default: v1.25.2 v1.24.6 v1.23.12 v1.22.15 v1.21.14]          ---include-success-outputsWhether to include outputs of successful parallel runs (by default they are not       -printed).                                                                             ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze k8s configure-cluster [OPTIONS] + +Configures cluster for airflow deployment - creates namespaces and test resources (optionally for all clusters in  +parallel). + +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) +[default: v1.25.2]                                     +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) +[default: v1.25.2 v1.24.6 v1.23.12 v1.22.15 v1.21.14]          +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_create-cluster.svg b/images/breeze/output_k8s_create-cluster.svg index 5c599cedf4e56..37cea958332ec 100644 --- a/images/breeze/output_k8s_create-cluster.svg +++ b/images/breeze/output_k8s_create-cluster.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: k8s create-cluster + Command: k8s create-cluster - + - - -Usage: breeze k8s create-cluster [OPTIONS] - -Create a KinD Cluster for Python and Kubernetes version specified (optionally create all clusters in parallel). - -╭─ K8S cluster creation flags ─────────────────────────────────────────────────────────────────────────────────────────╮ ---forceForce recreation of the cluster even if it is already created. ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) -[default: v1.25.2]                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) -[default: v1.25.2 v1.24.6 v1.23.12 v1.22.15 v1.21.14]          ---include-success-outputsWhether to include outputs of successful parallel runs (by default they are not         -printed).                                                                               -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze k8s create-cluster [OPTIONS] + +Create a KinD Cluster for Python and Kubernetes version specified (optionally create all clusters in parallel). + +╭─ K8S cluster creation flags ─────────────────────────────────────────────────────────────────────────────────────────╮ +--forceForce recreation of the cluster even if it is already created. +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) +[default: v1.25.2]                                     +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) +[default: v1.25.2 v1.24.6 v1.23.12 v1.22.15 v1.21.14]          +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_delete-cluster.svg b/images/breeze/output_k8s_delete-cluster.svg index c8245ae54dc19..aa0ddab2db150 100644 --- a/images/breeze/output_k8s_delete-cluster.svg +++ b/images/breeze/output_k8s_delete-cluster.svg @@ -35,8 +35,8 @@ .terminal-4127297943-r1 { fill: #c5c8c6;font-weight: bold } .terminal-4127297943-r2 { fill: #c5c8c6 } .terminal-4127297943-r3 { fill: #d0b344;font-weight: bold } -.terminal-4127297943-r4 { fill: #68a0b3;font-weight: bold } -.terminal-4127297943-r5 { fill: #868887 } +.terminal-4127297943-r4 { fill: #868887 } +.terminal-4127297943-r5 { fill: #68a0b3;font-weight: bold } .terminal-4127297943-r6 { fill: #98a84b;font-weight: bold } .terminal-4127297943-r7 { fill: #8d7b39 } @@ -115,25 +115,25 @@ -Usage: breeze k8s delete-cluster [OPTIONS] +Usage: breeze k8s delete-cluster [OPTIONS] Delete the current KinD Cluster (optionally all clusters). -╭─ K8S cluster delete flags ───────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) -[default: v1.25.2]                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ K8S multi-cluster flags ────────────────────────────────────────────────────────────────────────────────────────────╮ ---allApply it to all created clusters -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S cluster delete flags ───────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) +[default: v1.25.2]                                     +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S multi-cluster flags ────────────────────────────────────────────────────────────────────────────────────────────╮ +--allApply it to all created clusters +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_deploy-airflow.svg b/images/breeze/output_k8s_deploy-airflow.svg index 7f425acfb43aa..ce808cb4a80ec 100644 --- a/images/breeze/output_k8s_deploy-airflow.svg +++ b/images/breeze/output_k8s_deploy-airflow.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: k8s deploy-airflow + Command: k8s deploy-airflow - + - - -Usage: breeze k8s deploy-airflow [OPTIONS] [EXTRA_OPTIONS]... - -Deploy airflow image to the current KinD cluster (or all clusters). - -╭─ Airflow deploy flags ───────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) -[default: v1.25.2]                                     ---executorExecutor to use for a kubernetes cluster.                                          -(>KubernetesExecutor< | CeleryExecutor | LocalExecutor | CeleryKubernetesExecutor) -[default: KubernetesExecutor]                                                      ---upgradeUpgrade Helm Chart rather than installing it. ---wait-time-in-secondsWait for Airflow webserver for specified number of seconds.(INTEGER RANGE) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) -[default: v1.25.2 v1.24.6 v1.23.12 v1.22.15 v1.21.14]          ---include-success-outputsWhether to include outputs of successful parallel runs (by default they are not         -printed).                                                                               -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze k8s deploy-airflow [OPTIONS] [EXTRA_OPTIONS]... + +Deploy airflow image to the current KinD cluster (or all clusters). + +╭─ Airflow deploy flags ───────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) +[default: v1.25.2]                                     +--executorExecutor to use for a kubernetes cluster.                                          +(>KubernetesExecutor< | CeleryExecutor | LocalExecutor | CeleryKubernetesExecutor) +[default: KubernetesExecutor]                                                      +--upgradeUpgrade Helm Chart rather than installing it. +--wait-time-in-secondsWait for Airflow webserver for specified number of seconds.(INTEGER RANGE) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) +[default: v1.25.2 v1.24.6 v1.23.12 v1.22.15 v1.21.14]          +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_k9s.svg b/images/breeze/output_k8s_k9s.svg index 3cff8d1fa03d1..54af3bb8c5822 100644 --- a/images/breeze/output_k8s_k9s.svg +++ b/images/breeze/output_k8s_k9s.svg @@ -35,8 +35,8 @@ .terminal-3624456192-r1 { fill: #c5c8c6;font-weight: bold } .terminal-3624456192-r2 { fill: #c5c8c6 } .terminal-3624456192-r3 { fill: #d0b344;font-weight: bold } -.terminal-3624456192-r4 { fill: #68a0b3;font-weight: bold } -.terminal-3624456192-r5 { fill: #868887 } +.terminal-3624456192-r4 { fill: #868887 } +.terminal-3624456192-r5 { fill: #68a0b3;font-weight: bold } .terminal-3624456192-r6 { fill: #98a84b;font-weight: bold } .terminal-3624456192-r7 { fill: #8d7b39 } @@ -106,22 +106,22 @@ -Usage: breeze k8s k9s [OPTIONS] [K9S_ARGS]... +Usage: breeze k8s k9s [OPTIONS] [K9S_ARGS]... Run k9s tool. You can pass any k9s args as extra args. -╭─ K8S k9s flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) -[default: v1.25.2]                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S k9s flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) +[default: v1.25.2]                                     +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_logs.svg b/images/breeze/output_k8s_logs.svg index 7b3f24b518ed5..6d03a62388dd8 100644 --- a/images/breeze/output_k8s_logs.svg +++ b/images/breeze/output_k8s_logs.svg @@ -35,8 +35,8 @@ .terminal-3607011906-r1 { fill: #c5c8c6;font-weight: bold } .terminal-3607011906-r2 { fill: #c5c8c6 } .terminal-3607011906-r3 { fill: #d0b344;font-weight: bold } -.terminal-3607011906-r4 { fill: #68a0b3;font-weight: bold } -.terminal-3607011906-r5 { fill: #868887 } +.terminal-3607011906-r4 { fill: #868887 } +.terminal-3607011906-r5 { fill: #68a0b3;font-weight: bold } .terminal-3607011906-r6 { fill: #98a84b;font-weight: bold } .terminal-3607011906-r7 { fill: #8d7b39 } @@ -115,25 +115,25 @@ -Usage: breeze k8s logs [OPTIONS] +Usage: breeze k8s logs [OPTIONS] -Dump k8s logs to ${TMP_DIR}/kind_logs_<cluster_name> directory (optionally all clusters). +Dump k8s logs to ${TMP_DIR}/kind_logs_<cluster_name> directory (optionally all clusters). -╭─ K8S logs flags ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) -[default: v1.25.2]                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ K8S multi-cluster flags ────────────────────────────────────────────────────────────────────────────────────────────╮ ---allApply it to all created clusters -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S logs flags ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) +[default: v1.25.2]                                     +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S multi-cluster flags ────────────────────────────────────────────────────────────────────────────────────────────╮ +--allApply it to all created clusters +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_shell.svg b/images/breeze/output_k8s_shell.svg index 367e69fff9e7c..72affc47906e3 100644 --- a/images/breeze/output_k8s_shell.svg +++ b/images/breeze/output_k8s_shell.svg @@ -35,8 +35,8 @@ .terminal-2165789067-r1 { fill: #c5c8c6;font-weight: bold } .terminal-2165789067-r2 { fill: #c5c8c6 } .terminal-2165789067-r3 { fill: #d0b344;font-weight: bold } -.terminal-2165789067-r4 { fill: #68a0b3;font-weight: bold } -.terminal-2165789067-r5 { fill: #868887 } +.terminal-2165789067-r4 { fill: #868887 } +.terminal-2165789067-r5 { fill: #68a0b3;font-weight: bold } .terminal-2165789067-r6 { fill: #98a84b;font-weight: bold } .terminal-2165789067-r7 { fill: #8d7b39 } @@ -118,26 +118,26 @@ -Usage: breeze k8s shell [OPTIONS] [SHELL_ARGS]... +Usage: breeze k8s shell [OPTIONS] [SHELL_ARGS]... Run shell environment for the current KinD cluster. -╭─ K8S shell flags ────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) -[default: v1.25.2]                                     ---executorExecutor to use for a kubernetes cluster.                                          -(>KubernetesExecutor< | CeleryExecutor | LocalExecutor | CeleryKubernetesExecutor) -[default: KubernetesExecutor]                                                      ---force-venv-setupForce recreation of the virtualenv. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S shell flags ────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) +[default: v1.25.2]                                     +--executorExecutor to use for a kubernetes cluster.                                          +(>KubernetesExecutor< | CeleryExecutor | LocalExecutor | CeleryKubernetesExecutor) +[default: KubernetesExecutor]                                                      +--force-venv-setupForce recreation of the virtualenv. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_status.svg b/images/breeze/output_k8s_status.svg index 53c28aef3d74b..894a5d2956ccc 100644 --- a/images/breeze/output_k8s_status.svg +++ b/images/breeze/output_k8s_status.svg @@ -35,8 +35,8 @@ .terminal-1706343755-r1 { fill: #c5c8c6;font-weight: bold } .terminal-1706343755-r2 { fill: #c5c8c6 } .terminal-1706343755-r3 { fill: #d0b344;font-weight: bold } -.terminal-1706343755-r4 { fill: #68a0b3;font-weight: bold } -.terminal-1706343755-r5 { fill: #868887 } +.terminal-1706343755-r4 { fill: #868887 } +.terminal-1706343755-r5 { fill: #68a0b3;font-weight: bold } .terminal-1706343755-r6 { fill: #98a84b;font-weight: bold } .terminal-1706343755-r7 { fill: #8d7b39 } @@ -118,26 +118,26 @@ -Usage: breeze k8s status [OPTIONS] +Usage: breeze k8s status [OPTIONS] Check status of the current cluster and airflow deployed to it (optionally all clusters). -╭─ K8S cluster status flags ───────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) -[default: v1.25.2]                                     ---wait-time-in-secondsWait for Airflow webserver for specified number of seconds.(INTEGER RANGE) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ K8S multi-cluster flags ────────────────────────────────────────────────────────────────────────────────────────────╮ ---allApply it to all created clusters -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S cluster status flags ───────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) +[default: v1.25.2]                                     +--wait-time-in-secondsWait for Airflow webserver for specified number of seconds.(INTEGER RANGE) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S multi-cluster flags ────────────────────────────────────────────────────────────────────────────────────────────╮ +--allApply it to all created clusters +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_tests.svg b/images/breeze/output_k8s_tests.svg index a88e79a4036cf..8ab7a4149ddd4 100644 --- a/images/breeze/output_k8s_tests.svg +++ b/images/breeze/output_k8s_tests.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: k8s tests + Command: k8s tests - + - - -Usage: breeze k8s tests [OPTIONS] [TEST_ARGS]... - -Run tests against the current KinD cluster (optionally for all clusters in parallel). - -╭─ K8S tests flags ────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) -[default: v1.25.2]                                     ---executorExecutor to use for a kubernetes cluster.                                          -(>KubernetesExecutor< | CeleryExecutor | LocalExecutor | CeleryKubernetesExecutor) -[default: KubernetesExecutor]                                                      ---force-venv-setupForce recreation of the virtualenv. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) -[default: v1.25.2 v1.24.6 v1.23.12 v1.22.15 v1.21.14]          ---include-success-outputsWhether to include outputs of successful parallel runs (by default they are not         -printed).                                                                               -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze k8s tests [OPTIONS] [TEST_ARGS]... + +Run tests against the current KinD cluster (optionally for all clusters in parallel). + +╭─ K8S tests flags ────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) +[default: v1.25.2]                                     +--executorExecutor to use for a kubernetes cluster.                                          +(>KubernetesExecutor< | CeleryExecutor | LocalExecutor | CeleryKubernetesExecutor) +[default: KubernetesExecutor]                                                      +--force-venv-setupForce recreation of the virtualenv. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) +[default: v1.25.2 v1.24.6 v1.23.12 v1.22.15 v1.21.14]          +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_upload-k8s-image.svg b/images/breeze/output_k8s_upload-k8s-image.svg index e9ab57193bd0e..c3310fee2b3b9 100644 --- a/images/breeze/output_k8s_upload-k8s-image.svg +++ b/images/breeze/output_k8s_upload-k8s-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: k8s upload-k8s-image + Command: k8s upload-k8s-image - + - - -Usage: breeze k8s upload-k8s-image [OPTIONS] - -Upload k8s-ready airflow image to the KinD cluster (optionally to all clusters in parallel) - -╭─ Upload image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) -[default: v1.25.2]                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) -[default: v1.25.2 v1.24.6 v1.23.12 v1.22.15 v1.21.14]          ---include-success-outputsWhether to include outputs of successful parallel runs (by default they are not         -printed).                                                                               -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze k8s upload-k8s-image [OPTIONS] + +Upload k8s-ready airflow image to the KinD cluster (optionally to all clusters in parallel) + +╭─ Upload image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.25.2< | v1.24.6 | v1.23.12 | v1.22.15 | v1.21.14) +[default: v1.25.2]                                     +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) +[default: v1.25.2 v1.24.6 v1.23.12 v1.22.15 v1.21.14]          +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_prod-image.svg b/images/breeze/output_prod-image.svg index eb88437f63e2c..95374cf21d4b9 100644 --- a/images/breeze/output_prod-image.svg +++ b/images/breeze/output_prod-image.svg @@ -35,8 +35,8 @@ .terminal-3437848795-r1 { fill: #c5c8c6;font-weight: bold } .terminal-3437848795-r2 { fill: #c5c8c6 } .terminal-3437848795-r3 { fill: #d0b344;font-weight: bold } -.terminal-3437848795-r4 { fill: #68a0b3;font-weight: bold } -.terminal-3437848795-r5 { fill: #868887 } +.terminal-3437848795-r4 { fill: #868887 } +.terminal-3437848795-r5 { fill: #68a0b3;font-weight: bold } .terminal-3437848795-r6 { fill: #98a84b;font-weight: bold } @@ -93,18 +93,18 @@ -Usage: breeze prod-image [OPTIONSCOMMAND [ARGS]... +Usage: breeze prod-image [OPTIONS] COMMAND [ARGS]... -Tools that developers can use to manually manage PROD images +Tools that developers can use to manually manage PROD images -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Production Image tools ─────────────────────────────────────────────────────────────────────────────────────────────╮ -build  Build Production image. Include building multiple images for all or selected Python versions sequentially.  -pull   Pull and optionally verify Production images - possibly in parallel for all Python versions.                -verify Verify Production image.                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Production Image tools ─────────────────────────────────────────────────────────────────────────────────────────────╮ +build  Build Production image. Include building multiple images for all or selected Python versions sequentially.  +pull   Pull and optionally verify Production images - possibly in parallel for all Python versions.                +verify Verify Production image.                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_prod-image_build.svg b/images/breeze/output_prod-image_build.svg index 370394e60dde2..e0c261ca4f58d 100644 --- a/images/breeze/output_prod-image_build.svg +++ b/images/breeze/output_prod-image_build.svg @@ -19,393 +19,393 @@ font-weight: 700; } - .terminal-590988642-matrix { + .terminal-2287654653-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-590988642-title { + .terminal-2287654653-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-590988642-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-590988642-r2 { fill: #c5c8c6 } -.terminal-590988642-r3 { fill: #d0b344;font-weight: bold } -.terminal-590988642-r4 { fill: #68a0b3;font-weight: bold } -.terminal-590988642-r5 { fill: #868887 } -.terminal-590988642-r6 { fill: #98a84b;font-weight: bold } -.terminal-590988642-r7 { fill: #8d7b39 } + .terminal-2287654653-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-2287654653-r2 { fill: #c5c8c6 } +.terminal-2287654653-r3 { fill: #d0b344;font-weight: bold } +.terminal-2287654653-r4 { fill: #868887 } +.terminal-2287654653-r5 { fill: #68a0b3;font-weight: bold } +.terminal-2287654653-r6 { fill: #98a84b;font-weight: bold } +.terminal-2287654653-r7 { fill: #8d7b39 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: prod-image build + Command: prod-image build - + - - -Usage: breeze prod-image build [OPTIONS] - -Build Production image. Include building multiple images for all or selected Python versions sequentially. - -╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---install-airflow-version-VInstall version of Airflow from PyPI.(TEXT) ---upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. ---upgrade-on-failure-uWhen set, attempt to run upgrade to newer dependencies when regular build       -fails.                                                                          ---image-tag-tTag the image after building it(TEXT)[default: latest] ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful -when you build or pull image with --image-tag.                                  ---docker-cache-cCache option for image used during the build.(registry | local | disabled) -[default: registry]                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---skip-cleanupSkip cleanup of temporary files created during parallel run ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---include-success-outputsWhether to include outputs of successful parallel runs (by default they are not         -printed).                                                                               -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options for customizing images ─────────────────────────────────────────────────────────────────────────────────────╮ ---builderBuildx builder used to perform `docker buildx build` commands(TEXT) ---install-providers-from-sourcesInstall providers from sources when installing. ---airflow-extrasExtras to install by default.                                                    -(TEXT)                                                                           -[default:                                                                        -amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,google,google… ---airflow-constraints-modeMode of constraints for PROD image building                             -(constraints | constraints-no-providers | constraints-source-providers) -[default: constraints]                                                  ---airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) ---python-imageIf specified this is the base python image used to build the image. Should be    -something like: python:VERSION-slim-bullseye                                     -(TEXT)                                                                           ---additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) ---additional-pip-install-flagsAdditional flags added to `pip install` commands (except reinstalling `pip`      -itself).                                                                         -(TEXT)                                                                           ---additional-python-depsAdditional python dependencies to use when building the images.(TEXT) ---additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) ---additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) ---additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) ---additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) ---additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) ---additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) ---runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) ---runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) ---dev-apt-depsApt dev dependencies to use when building the images.(TEXT) ---dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Customization options (for specific customization needs) ───────────────────────────────────────────────────────────╮ ---install-packages-from-contextInstall wheels from local docker-context-files when building image.        -Implies --disable-airflow-repo-cache.                                      ---cleanup-contextClean up docker context files before running build (cannot be used         -together with --install-packages-from-context).                            ---disable-mysql-client-installationDo not install MySQL client. ---disable-mssql-client-installationDo not install MsSQl client. ---disable-postgres-client-installationDo not install Postgres client. ---disable-airflow-repo-cacheDisable cache from Airflow repository during building. ---install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT) ---installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---github-usernameThe user name used to authenticate to GitHub.(TEXT) ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) ---pushPush image after building it. ---empty-imagePrepare empty image tagged with the same name as the Airflow image. ---prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  -image).                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze prod-image build [OPTIONS] + +Build Production image. Include building multiple images for all or selected Python versions sequentially. + +╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--install-airflow-version-VInstall version of Airflow from PyPI.(TEXT) +--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. +--upgrade-on-failure-uWhen set, attempt to run upgrade to newer dependencies when regular build       +fails.                                                                          +--image-tag-tTag the image after building it.(TEXT)[default: latest] +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful +when you build or pull image with --image-tag.                                  +--docker-cache-cCache option for image used during the build.(registry | local | disabled) +[default: registry]                           +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options for customizing images ─────────────────────────────────────────────────────────────────────────────────────╮ +--builderBuildx builder used to perform `docker buildx build` commands.(TEXT) +--install-providers-from-sourcesInstall providers from sources when installing. +--airflow-extrasExtras to install by default.                                                    +(TEXT)                                                                           +[default:                                                                        +amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,google,google… +--airflow-constraints-modeMode of constraints for PROD image building.                            +(constraints | constraints-no-providers | constraints-source-providers) +[default: constraints]                                                  +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +--python-imageIf specified this is the base python image used to build the image. Should be    +something like: python:VERSION-slim-bullseye.                                    +(TEXT)                                                                           +--additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-pip-install-flagsAdditional flags added to `pip install` commands (except reinstalling `pip`      +itself).                                                                         +(TEXT)                                                                           +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) +--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) +--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) +--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Customization options (for specific customization needs) ───────────────────────────────────────────────────────────╮ +--install-packages-from-contextInstall wheels from local docker-context-files when building image.        +Implies --disable-airflow-repo-cache.                                      +--cleanup-contextClean up docker context files before running build (cannot be used         +together with --install-packages-from-context).                            +--disable-mysql-client-installationDo not install MySQL client. +--disable-mssql-client-installationDo not install MsSQl client. +--disable-postgres-client-installationDo not install Postgres client. +--disable-airflow-repo-cacheDisable cache from Airflow repository during building. +--install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT) +--installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--github-usernameThe user name used to authenticate to GitHub.(TEXT) +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--pushPush image after building it. +--empty-imagePrepare empty image tagged with the same name as the Airflow image. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_prod-image_pull.svg b/images/breeze/output_prod-image_pull.svg index c27907dfbd9a0..460793f165c54 100644 --- a/images/breeze/output_prod-image_pull.svg +++ b/images/breeze/output_prod-image_pull.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: prod-image pull + Command: prod-image pull - + - - -Usage: breeze prod-image pull [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Pull and optionally verify Production images - possibly in parallel for all Python versions. - -╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-tag-tTag of the image which is used to pull the image(TEXT)[default: latest] ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---verifyVerify image. ---wait-for-imageWait until image is available. ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you build -or pull image with --image-tag.                                                                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---skip-cleanupSkip cleanup of temporary files created during parallel run ---include-success-outputsWhether to include outputs of successful parallel runs (by default they are not       -printed).                                                                             ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze prod-image pull [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Pull and optionally verify Production images - possibly in parallel for all Python versions. + +╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-tag-tTag of the image which is used to pull the image.(TEXT)[default: latest] +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--verifyVerify image. +--wait-for-imageWait until image is available. +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you build +or pull image with --image-tag.                                                                +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_prod-image_verify.svg b/images/breeze/output_prod-image_verify.svg index d695397242f8e..e193df088984b 100644 --- a/images/breeze/output_prod-image_verify.svg +++ b/images/breeze/output_prod-image_verify.svg @@ -19,117 +19,117 @@ font-weight: 700; } - .terminal-3348574597-matrix { + .terminal-591802771-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3348574597-title { + .terminal-591802771-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3348574597-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-3348574597-r2 { fill: #c5c8c6 } -.terminal-3348574597-r3 { fill: #d0b344;font-weight: bold } -.terminal-3348574597-r4 { fill: #868887 } -.terminal-3348574597-r5 { fill: #68a0b3;font-weight: bold } -.terminal-3348574597-r6 { fill: #98a84b;font-weight: bold } -.terminal-3348574597-r7 { fill: #8d7b39 } + .terminal-591802771-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-591802771-r2 { fill: #c5c8c6 } +.terminal-591802771-r3 { fill: #d0b344;font-weight: bold } +.terminal-591802771-r4 { fill: #868887 } +.terminal-591802771-r5 { fill: #68a0b3;font-weight: bold } +.terminal-591802771-r6 { fill: #98a84b;font-weight: bold } +.terminal-591802771-r7 { fill: #8d7b39 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: prod-image verify + Command: prod-image verify - + - - -Usage: breeze prod-image verify [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Verify Production image. - -╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---slim-imageThe image to verify is slim and non-slim tests should be skipped. ---image-tag-tTag of the image when verifying it(TEXT)[default: latest] ---pullPull image is missing before attempting to verify it. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze prod-image verify [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Verify Production image. + +╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--slim-imageThe image to verify is slim and non-slim tests should be skipped. +--image-tag-tTag of the image when verifying it.(TEXT)[default: latest] +--pullPull image is missing before attempting to verify it. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_release-management.svg b/images/breeze/output_release-management.svg index 87988392239d0..66af753517d41 100644 --- a/images/breeze/output_release-management.svg +++ b/images/breeze/output_release-management.svg @@ -35,8 +35,8 @@ .terminal-3090088729-r1 { fill: #c5c8c6;font-weight: bold } .terminal-3090088729-r2 { fill: #c5c8c6 } .terminal-3090088729-r3 { fill: #d0b344;font-weight: bold } -.terminal-3090088729-r4 { fill: #68a0b3;font-weight: bold } -.terminal-3090088729-r5 { fill: #868887 } +.terminal-3090088729-r4 { fill: #868887 } +.terminal-3090088729-r5 { fill: #68a0b3;font-weight: bold } .terminal-3090088729-r6 { fill: #98a84b;font-weight: bold } @@ -102,21 +102,21 @@ -Usage: breeze release-management [OPTIONSCOMMAND [ARGS]... +Usage: breeze release-management [OPTIONS] COMMAND [ARGS]... Tools that release managers can use to prepare and manage Airflow releases -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Commands ───────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -generate-constraints             Generates pinned constraint files with all extras from setup.py in parallel.      -prepare-airflow-package          Prepare sdist/whl package of Airflow.                                             -prepare-provider-documentation   Prepare CHANGELOGREADME and COMMITS information for providers.                  -prepare-provider-packages        Prepare sdist/whl packages of Airflow Providers.                                  -release-prod-images              Release production images to DockerHub (needs DockerHub permissions).             -verify-provider-packages         Verifies if all provider code is following expectations for providers.            -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Commands ───────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +generate-constraints             Generates pinned constraint files with all extras from setup.py in parallel.      +prepare-airflow-package          Prepare sdist/whl package of Airflow.                                             +prepare-provider-documentation   Prepare CHANGELOG, README and COMMITS information for providers.                  +prepare-provider-packages        Prepare sdist/whl packages of Airflow Providers.                                  +release-prod-images              Release production images to DockerHub (needs DockerHub permissions).             +verify-provider-packages         Verifies if all provider code is following expectations for providers.            +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_release-management_generate-constraints.svg b/images/breeze/output_release-management_generate-constraints.svg index 1dbba60df9fc5..5ec7bda74e351 100644 --- a/images/breeze/output_release-management_generate-constraints.svg +++ b/images/breeze/output_release-management_generate-constraints.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: release-management generate-constraints + Command: release-management generate-constraints - + - - -Usage: breeze release-management generate-constraints [OPTIONS] - -Generates pinned constraint files with all extras from setup.py in parallel. - -╭─ Generate constraints flags ─────────────────────────────────────────────────────────────────────────────────────────╮ ---image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip) -(TEXT)                                                                         -[default: latest]                                                              ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---airflow-constraints-modeMode of constraints for CI image building                               -(constraints-source-providers | constraints | constraints-no-providers) -[default: constraints-source-providers]                                 ---debugDrop user in shell instead of running the command. Useful for debugging. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) -[default: 4; 1<=x<=8]                                                       ---skip-cleanupSkip cleanup of temporary files created during parallel run ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze release-management generate-constraints [OPTIONS] + +Generates pinned constraint files with all extras from setup.py in parallel. + +╭─ Generate constraints flags ─────────────────────────────────────────────────────────────────────────────────────────╮ +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip). +(TEXT)                                                                          +[default: latest]                                                               +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--airflow-constraints-modeMode of constraints for CI image building.                              +(constraints-source-providers | constraints | constraints-no-providers) +[default: constraints-source-providers]                                 +--debugDrop user in shell instead of running the command. Useful for debugging. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_release-management_prepare-provider-packages.svg b/images/breeze/output_release-management_prepare-provider-packages.svg index 9f457a0e4a246..4a861607114f5 100644 --- a/images/breeze/output_release-management_prepare-provider-packages.svg +++ b/images/breeze/output_release-management_prepare-provider-packages.svg @@ -19,169 +19,169 @@ font-weight: 700; } - .terminal-3181300188-matrix { + .terminal-481806826-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3181300188-title { + .terminal-481806826-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3181300188-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-3181300188-r2 { fill: #c5c8c6 } -.terminal-3181300188-r3 { fill: #d0b344;font-weight: bold } -.terminal-3181300188-r4 { fill: #868887 } -.terminal-3181300188-r5 { fill: #68a0b3;font-weight: bold } -.terminal-3181300188-r6 { fill: #8d7b39 } -.terminal-3181300188-r7 { fill: #98a84b;font-weight: bold } + .terminal-481806826-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-481806826-r2 { fill: #c5c8c6 } +.terminal-481806826-r3 { fill: #d0b344;font-weight: bold } +.terminal-481806826-r4 { fill: #868887 } +.terminal-481806826-r5 { fill: #68a0b3;font-weight: bold } +.terminal-481806826-r6 { fill: #8d7b39 } +.terminal-481806826-r7 { fill: #98a84b;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: release-management prepare-provider-packages + Command: release-management prepare-provider-packages - + - - -Usage: breeze release-management prepare-provider-packages [OPTIONS] [airbyte | alibaba | amazon | apache.beam | -                                                           apache.cassandra | apache.drill | apache.druid | -                                                           apache.hdfs | apache.hive | apache.kylin | apache.livy | -                                                           apache.pig | apache.pinot | apache.spark | apache.sqoop | -                                                           arangodb | asana | atlassian.jira | celery | cloudant | -                                                           cncf.kubernetes | common.sql | databricks | datadog | -                                                           dbt.cloud | dingding | discord | docker | elasticsearch | -                                                           exasol | facebook | ftp | github | google | grpc | -                                                           hashicorp | http | imap | influxdb | jdbc | jenkins | jira -                                                           | microsoft.azure | microsoft.mssql | microsoft.psrp | -                                                           microsoft.winrm | mongo | mysql | neo4j | odbc | openfaas | -                                                           opsgenie | oracle | pagerduty | papermill | plexus | -                                                           postgres | presto | qubole | redis | salesforce | samba | -                                                           segment | sendgrid | sftp | singularity | slack | snowflake -                                                           | sqlite | ssh | tableau | tabular | telegram | trino | -                                                           vertica | yandex | zendesk]... - -Prepare sdist/whl packages of Airflow Providers. - -╭─ Package flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---package-formatFormat of packages.(wheel | sdist | both)[default: wheel] ---version-suffix-for-pypiVersion suffix used for PyPI packages (alpha, beta, rc1, etc.).(TEXT) ---package-list-fileRead list of packages from text file (one package per line)(FILENAME) ---debugDrop user in shell instead of running the command. Useful for debugging. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze release-management prepare-provider-packages [OPTIONS] [airbyte | alibaba | amazon | apache.beam | +                                                           apache.cassandra | apache.drill | apache.druid | +                                                           apache.hdfs | apache.hive | apache.kylin | apache.livy | +                                                           apache.pig | apache.pinot | apache.spark | apache.sqoop | +                                                           arangodb | asana | atlassian.jira | celery | cloudant | +                                                           cncf.kubernetes | common.sql | databricks | datadog | +                                                           dbt.cloud | dingding | discord | docker | elasticsearch | +                                                           exasol | facebook | ftp | github | google | grpc | +                                                           hashicorp | http | imap | influxdb | jdbc | jenkins | jira +                                                           | microsoft.azure | microsoft.mssql | microsoft.psrp | +                                                           microsoft.winrm | mongo | mysql | neo4j | odbc | openfaas | +                                                           opsgenie | oracle | pagerduty | papermill | plexus | +                                                           postgres | presto | qubole | redis | salesforce | samba | +                                                           segment | sendgrid | sftp | singularity | slack | snowflake +                                                           | sqlite | ssh | tableau | tabular | telegram | trino | +                                                           vertica | yandex | zendesk]... + +Prepare sdist/whl packages of Airflow Providers. + +╭─ Package flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--package-formatFormat of packages.(wheel | sdist | both)[default: wheel] +--version-suffix-for-pypiVersion suffix used for PyPI packages (alpha, beta, rc1, etc.).(TEXT) +--package-list-fileRead list of packages from text file (one package per line).(FILENAME) +--debugDrop user in shell instead of running the command. Useful for debugging. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_setup.svg b/images/breeze/output_setup.svg index 4ea9da3ed8a9e..40050418b0348 100644 --- a/images/breeze/output_setup.svg +++ b/images/breeze/output_setup.svg @@ -35,8 +35,8 @@ .terminal-563718598-r1 { fill: #c5c8c6;font-weight: bold } .terminal-563718598-r2 { fill: #c5c8c6 } .terminal-563718598-r3 { fill: #d0b344;font-weight: bold } -.terminal-563718598-r4 { fill: #68a0b3;font-weight: bold } -.terminal-563718598-r5 { fill: #868887 } +.terminal-563718598-r4 { fill: #868887 } +.terminal-563718598-r5 { fill: #68a0b3;font-weight: bold } .terminal-563718598-r6 { fill: #98a84b;font-weight: bold } @@ -99,20 +99,20 @@ -Usage: breeze setup [OPTIONSCOMMAND [ARGS]... +Usage: breeze setup [OPTIONS] COMMAND [ARGS]... Tools that developers can use to configure Breeze -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Commands ───────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -autocomplete                    Enables autocompletion of breeze commands.                                         -config                          Show/update configuration (Python, Backend, Cheatsheet, ASCIIART).                 -regenerate-command-images       Regenerate breeze command images.                                                  -self-upgrade                    Self upgrade Breeze.                                                               -version                         Print information about version of apache-airflow-breeze.                          -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Commands ───────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +autocomplete                    Enables autocompletion of breeze commands.                                         +config                          Show/update configuration (Python, Backend, Cheatsheet, ASCIIART).                 +regenerate-command-images       Regenerate breeze command images.                                                  +self-upgrade                    Self upgrade Breeze.                                                               +version                         Print information about version of apache-airflow-breeze.                          +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_setup_config.svg b/images/breeze/output_setup_config.svg index 23fd34e205cc7..4d6c2dbba5af0 100644 --- a/images/breeze/output_setup_config.svg +++ b/images/breeze/output_setup_config.svg @@ -19,125 +19,125 @@ font-weight: 700; } - .terminal-4182924605-matrix { + .terminal-4106968381-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4182924605-title { + .terminal-4106968381-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4182924605-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-4182924605-r2 { fill: #c5c8c6 } -.terminal-4182924605-r3 { fill: #d0b344;font-weight: bold } -.terminal-4182924605-r4 { fill: #868887 } -.terminal-4182924605-r5 { fill: #68a0b3;font-weight: bold } -.terminal-4182924605-r6 { fill: #98a84b;font-weight: bold } -.terminal-4182924605-r7 { fill: #8d7b39 } + .terminal-4106968381-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-4106968381-r2 { fill: #c5c8c6 } +.terminal-4106968381-r3 { fill: #d0b344;font-weight: bold } +.terminal-4106968381-r4 { fill: #868887 } +.terminal-4106968381-r5 { fill: #68a0b3;font-weight: bold } +.terminal-4106968381-r6 { fill: #98a84b;font-weight: bold } +.terminal-4106968381-r7 { fill: #8d7b39 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: setup config + Command: setup config - + - - -Usage: breeze setup config [OPTIONS] - -Show/update configuration (Python, Backend, Cheatsheet, ASCIIART). - -╭─ Config flags ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---backend-bDatabase backend to use.(sqlite | mysql | >postgres< | mssql) -[default: sqlite]        ---postgres-version-PVersion of Postgres used.(10 | 11 | >12< | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---cheatsheet/--no-cheatsheet-C/-cEnable/disable cheatsheet. ---asciiart/--no-asciiart-A/-aEnable/disable ASCIIart. ---colour/--no-colourEnable/disable Colour mode (useful for colour blind-friendly communication). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze setup config [OPTIONS] + +Show/update configuration (Python, Backend, Cheatsheet, ASCIIART). + +╭─ Config flags ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql) +[default: sqlite]        +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--cheatsheet/--no-cheatsheet-C/-cEnable/disable cheatsheet. +--asciiart/--no-asciiart-A/-aEnable/disable ASCIIart. +--colour/--no-colourEnable/disable Colour mode (useful for colour blind-friendly communication). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_setup_regenerate-command-images.svg b/images/breeze/output_setup_regenerate-command-images.svg index 2091185f16ac2..49ec3a23ff324 100644 --- a/images/breeze/output_setup_regenerate-command-images.svg +++ b/images/breeze/output_setup_regenerate-command-images.svg @@ -35,8 +35,8 @@ .terminal-1674225816-r1 { fill: #c5c8c6;font-weight: bold } .terminal-1674225816-r2 { fill: #c5c8c6 } .terminal-1674225816-r3 { fill: #d0b344;font-weight: bold } -.terminal-1674225816-r4 { fill: #68a0b3;font-weight: bold } -.terminal-1674225816-r5 { fill: #868887 } +.terminal-1674225816-r4 { fill: #868887 } +.terminal-1674225816-r5 { fill: #68a0b3;font-weight: bold } .terminal-1674225816-r6 { fill: #8d7b39 } .terminal-1674225816-r7 { fill: #98a84b;font-weight: bold } @@ -139,33 +139,33 @@ -Usage: breeze setup regenerate-command-images [OPTIONS] +Usage: breeze setup regenerate-command-images [OPTIONS] Regenerate breeze command images. -╭─ Image regeneration option ──────────────────────────────────────────────────────────────────────────────────────────╮ ---forceForces regeneration of all images ---commandCommand(s) to regenerate images for (optional, might be repeated)                                       -(main | build-docs | ci:find-newer-dependencies | ci:fix-ownership | ci:free-space |                    -ci:get-workflow-info | ci:resource-check | ci:selective-check | ci | ci-image:build | ci-image:pull |   -ci-image:verify | ci-image | cleanup | compile-www-assets | exec | k8s:build-k8s-image |                -k8s:configure-cluster | k8s:create-cluster | k8s:delete-cluster | k8s:deploy-airflow | k8s:k9s |        -k8s:logs | k8s:setup-env | k8s:shell | k8s:status | k8s:tests | k8s:upload-k8s-image | k8s |            -prod-image:build | prod-image:pull | prod-image:verify | prod-image |                                   -release-management:generate-constraints | release-management:prepare-airflow-package |                  -release-management:prepare-provider-documentation | release-management:prepare-provider-packages |      -release-management:release-prod-images | release-management:verify-provider-packages |                  -release-management | setup:autocomplete | setup:config | setup:regenerate-command-images |              -setup:self-upgrade | setup:version | setup | shell | start-airflow | static-checks | stop |             -testing:docker-compose-tests | testing:helm-tests | testing:tests | testing)                            -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---check-onlyOnly check if some images need to be regenerated. Return 0 if no need or 1 if needed. Cannot be    -used together with --command flag or --force.                                                      ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Image regeneration option ──────────────────────────────────────────────────────────────────────────────────────────╮ +--forceForces regeneration of all images +--commandCommand(s) to regenerate images for (optional, might be repeated)                                       +(main | build-docs | ci:find-newer-dependencies | ci:fix-ownership | ci:free-space |                    +ci:get-workflow-info | ci:resource-check | ci:selective-check | ci | ci-image:build | ci-image:pull |   +ci-image:verify | ci-image | cleanup | compile-www-assets | exec | k8s:build-k8s-image |                +k8s:configure-cluster | k8s:create-cluster | k8s:delete-cluster | k8s:deploy-airflow | k8s:k9s |        +k8s:logs | k8s:setup-env | k8s:shell | k8s:status | k8s:tests | k8s:upload-k8s-image | k8s |            +prod-image:build | prod-image:pull | prod-image:verify | prod-image |                                   +release-management:generate-constraints | release-management:prepare-airflow-package |                  +release-management:prepare-provider-documentation | release-management:prepare-provider-packages |      +release-management:release-prod-images | release-management:verify-provider-packages |                  +release-management | setup:autocomplete | setup:config | setup:regenerate-command-images |              +setup:self-upgrade | setup:version | setup | shell | start-airflow | static-checks | stop |             +testing:docker-compose-tests | testing:helm-tests | testing:tests | testing)                            +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--check-onlyOnly check if some images need to be regenerated. Return 0 if no need or 1 if needed. Cannot be    +used together with --command flag or --force.                                                      +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_setup_self-upgrade.svg b/images/breeze/output_setup_self-upgrade.svg index 40eac379bdc8e..5a188b25a48d9 100644 --- a/images/breeze/output_setup_self-upgrade.svg +++ b/images/breeze/output_setup_self-upgrade.svg @@ -35,8 +35,8 @@ .terminal-2544310981-r1 { fill: #c5c8c6;font-weight: bold } .terminal-2544310981-r2 { fill: #c5c8c6 } .terminal-2544310981-r3 { fill: #d0b344;font-weight: bold } -.terminal-2544310981-r4 { fill: #68a0b3;font-weight: bold } -.terminal-2544310981-r5 { fill: #868887 } +.terminal-2544310981-r4 { fill: #868887 } +.terminal-2544310981-r5 { fill: #68a0b3;font-weight: bold } .terminal-2544310981-r6 { fill: #98a84b;font-weight: bold } @@ -87,16 +87,16 @@ -Usage: breeze setup self-upgrade [OPTIONS] +Usage: breeze setup self-upgrade [OPTIONS] Self upgrade Breeze. -╭─ Self-upgrade flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ ---use-current-airflow-sources-aUse current workdir Airflow sources for upgrade. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Self-upgrade flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +--use-current-airflow-sources-aUse current workdir Airflow sources for upgrade. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_shell.svg b/images/breeze/output_shell.svg index 322cd1bec2b7a..def029f80b4f6 100644 --- a/images/breeze/output_shell.svg +++ b/images/breeze/output_shell.svg @@ -19,249 +19,249 @@ font-weight: 700; } - .terminal-1099160322-matrix { + .terminal-3611482896-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1099160322-title { + .terminal-3611482896-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1099160322-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-1099160322-r2 { fill: #c5c8c6 } -.terminal-1099160322-r3 { fill: #d0b344;font-weight: bold } -.terminal-1099160322-r4 { fill: #868887 } -.terminal-1099160322-r5 { fill: #68a0b3;font-weight: bold } -.terminal-1099160322-r6 { fill: #98a84b;font-weight: bold } -.terminal-1099160322-r7 { fill: #8d7b39 } + .terminal-3611482896-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-3611482896-r2 { fill: #c5c8c6 } +.terminal-3611482896-r3 { fill: #d0b344;font-weight: bold } +.terminal-3611482896-r4 { fill: #868887 } +.terminal-3611482896-r5 { fill: #68a0b3;font-weight: bold } +.terminal-3611482896-r6 { fill: #98a84b;font-weight: bold } +.terminal-3611482896-r7 { fill: #8d7b39 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: shell + Command: shell - + - - -Usage: breeze shell [OPTIONS] [EXTRA_ARGS]... - -Enter breeze environment. this is the default command use when no other is selected. - -╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---backend-bDatabase backend to use.(sqlite | mysql | >postgres< | mssql)[default: sqlite] ---postgres-version-PVersion of Postgres used.(10 | 11 | >12< | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---integrationIntegration(s) to enable when running (can be more than one).                             -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    -all)                                                                                      ---forward-credentials-fForward local credentials to container when running. ---db-reset-dReset DB when entering the container. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`,      -`wheel`, or `sdist` if Airflow should be removed, installed from wheel packages -or sdist packages available in dist folder respectively. Implies                ---mount-sources `remove`.                                                       -(none | wheel | sdist | <airflow_version>)                                      ---airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     -specify constraints for the installed version and to find newer dependencies    -(TEXT)                                                                          ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64) ---airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) ---use-packages-from-distInstall all found packages (--package-format determines type) from 'dist'       -folder when entering breeze.                                                    ---package-formatFormat of packages that should be installed from dist.(wheel | sdist) -[default: wheel]                                       ---force-buildForce image build no matter if it is determined as needed. ---image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip) -(TEXT)                                                                         -[default: latest]                                                              ---mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed       -(default = selected).                                                           -(selected | all | skip | remove)                                                -[default: selected]                                                             ---include-mypy-volumeWhether to include mounting of the mypy volume (useful for debugging mypy). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---max-timeMaximum time that the command should take - if it takes longer, the command will fail. -(INTEGER RANGE)                                                                        ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze shell [OPTIONS] [EXTRA_ARGS]... + +Enter breeze environment. this is the default command use when no other is selected. + +╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                             +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    +all)                                                                                      +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`,      +`wheel`, or `sdist` if Airflow should be removed, installed from wheel packages +or sdist packages available in dist folder respectively. Implies                +--mount-sources `remove`.                                                       +(none | wheel | sdist | <airflow_version>)                                      +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     +specify constraints for the installed version and to find newer dependencies    +(TEXT)                                                                          +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64) +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist'       +folder when entering breeze.                                                    +--package-formatFormat of packages that should be installed from dist.(wheel | sdist) +[default: wheel]                                       +--force-buildForce image build no matter if it is determined as needed. +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip). +(TEXT)                                                                          +[default: latest]                                                               +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed       +(default = selected).                                                           +(selected | all | skip | remove)                                                +[default: selected]                                                             +--include-mypy-volumeWhether to include mounting of the mypy volume (useful for debugging mypy). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--max-timeMaximum time that the command should take - if it takes longer, the command will fail. +(INTEGER RANGE)                                                                        +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_start-airflow.svg b/images/breeze/output_start-airflow.svg index 591fc8bc77781..82317dd6ada45 100644 --- a/images/breeze/output_start-airflow.svg +++ b/images/breeze/output_start-airflow.svg @@ -19,277 +19,277 @@ font-weight: 700; } - .terminal-577517802-matrix { + .terminal-2829072632-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-577517802-title { + .terminal-2829072632-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-577517802-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-577517802-r2 { fill: #c5c8c6 } -.terminal-577517802-r3 { fill: #d0b344;font-weight: bold } -.terminal-577517802-r4 { fill: #868887 } -.terminal-577517802-r5 { fill: #68a0b3;font-weight: bold } -.terminal-577517802-r6 { fill: #98a84b;font-weight: bold } -.terminal-577517802-r7 { fill: #8d7b39 } + .terminal-2829072632-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-2829072632-r2 { fill: #c5c8c6 } +.terminal-2829072632-r3 { fill: #d0b344;font-weight: bold } +.terminal-2829072632-r4 { fill: #868887 } +.terminal-2829072632-r5 { fill: #68a0b3;font-weight: bold } +.terminal-2829072632-r6 { fill: #98a84b;font-weight: bold } +.terminal-2829072632-r7 { fill: #8d7b39 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: start-airflow + Command: start-airflow - + - - -Usage: breeze start-airflow [OPTIONS] [EXTRA_ARGS]... - -Enter breeze environment and starts all Airflow components in the tmux session. Compile assets if contents of www  -directory changed. - -╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---load-example-dags-eEnable configuration to load example DAGs when starting Airflow. ---load-default-connections-cEnable configuration to load default connections when starting Airflow. ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64) ---postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---integrationIntegration(s) to enable when running (can be more than one).                        -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino -| all)                                                                               ---forward-credentials-fForward local credentials to container when running. ---db-reset-dReset DB when entering the container. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Asset compilation options ──────────────────────────────────────────────────────────────────────────────────────────╮ ---skip-asset-compilationSkips compilation of assets when starting airflow even if the content of www changed     -(mutually exclusive with --dev-mode).                                                    ---dev-modeStarts webserver in dev mode (assets are always recompiled in this case when starting)   -(mutually exclusive with --skip-asset-compilation).                                      -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`,      -`wheel`, or `sdist` if Airflow should be removed, installed from wheel packages -or sdist packages available in dist folder respectively. Implies                ---mount-sources `remove`.                                                       -(none | wheel | sdist | <airflow_version>)                                      ---airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     -specify constraints for the installed version and to find newer dependencies    -(TEXT)                                                                          ---airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) ---use-packages-from-distInstall all found packages (--package-format determines type) from 'dist'       -folder when entering breeze.                                                    ---package-formatFormat of packages that should be installed from dist.(wheel | sdist) -[default: wheel]                                       ---force-buildForce image build no matter if it is determined as needed. ---image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip) -(TEXT)                                                                         -[default: latest]                                                              ---mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed       -(default = selected).                                                           -(selected | all | skip | remove)                                                -[default: selected]                                                             -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze start-airflow [OPTIONS] [EXTRA_ARGS]... + +Enter breeze environment and starts all Airflow components in the tmux session. Compile assets if contents of www  +directory changed. + +╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--load-example-dags-eEnable configuration to load example DAGs when starting Airflow. +--load-default-connections-cEnable configuration to load default connections when starting Airflow. +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64) +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                        +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino +| all)                                                                               +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Asset compilation options ──────────────────────────────────────────────────────────────────────────────────────────╮ +--skip-asset-compilationSkips compilation of assets when starting airflow even if the content of www changed     +(mutually exclusive with --dev-mode).                                                    +--dev-modeStarts webserver in dev mode (assets are always recompiled in this case when starting)   +(mutually exclusive with --skip-asset-compilation).                                      +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`,      +`wheel`, or `sdist` if Airflow should be removed, installed from wheel packages +or sdist packages available in dist folder respectively. Implies                +--mount-sources `remove`.                                                       +(none | wheel | sdist | <airflow_version>)                                      +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     +specify constraints for the installed version and to find newer dependencies    +(TEXT)                                                                          +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist'       +folder when entering breeze.                                                    +--package-formatFormat of packages that should be installed from dist.(wheel | sdist) +[default: wheel]                                       +--force-buildForce image build no matter if it is determined as needed. +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip). +(TEXT)                                                                          +[default: latest]                                                               +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed       +(default = selected).                                                           +(selected | all | skip | remove)                                                +[default: selected]                                                             +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_testing_docker-compose-tests.svg b/images/breeze/output_testing_docker-compose-tests.svg index 60d37db5b111e..c521157a63cab 100644 --- a/images/breeze/output_testing_docker-compose-tests.svg +++ b/images/breeze/output_testing_docker-compose-tests.svg @@ -19,113 +19,113 @@ font-weight: 700; } - .terminal-2612856083-matrix { + .terminal-128845089-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2612856083-title { + .terminal-128845089-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2612856083-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-2612856083-r2 { fill: #c5c8c6 } -.terminal-2612856083-r3 { fill: #d0b344;font-weight: bold } -.terminal-2612856083-r4 { fill: #868887 } -.terminal-2612856083-r5 { fill: #68a0b3;font-weight: bold } -.terminal-2612856083-r6 { fill: #98a84b;font-weight: bold } -.terminal-2612856083-r7 { fill: #8d7b39 } + .terminal-128845089-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-128845089-r2 { fill: #c5c8c6 } +.terminal-128845089-r3 { fill: #d0b344;font-weight: bold } +.terminal-128845089-r4 { fill: #868887 } +.terminal-128845089-r5 { fill: #68a0b3;font-weight: bold } +.terminal-128845089-r6 { fill: #98a84b;font-weight: bold } +.terminal-128845089-r7 { fill: #8d7b39 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: testing docker-compose-tests + Command: testing docker-compose-tests - + - - -Usage: breeze testing docker-compose-tests [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Run docker-compose tests. - -╭─ Docker-compose tests flag ──────────────────────────────────────────────────────────────────────────────────────────╮ ---image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip)(TEXT) -[default: latest]                                                              ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze testing docker-compose-tests [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Run docker-compose tests. + +╭─ Docker-compose tests flag ──────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip).(TEXT) +[default: latest]                                                               +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_testing_helm-tests.svg b/images/breeze/output_testing_helm-tests.svg index cc2295a244544..d2610033ea894 100644 --- a/images/breeze/output_testing_helm-tests.svg +++ b/images/breeze/output_testing_helm-tests.svg @@ -19,109 +19,109 @@ font-weight: 700; } - .terminal-3656301724-matrix { + .terminal-1160363178-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3656301724-title { + .terminal-1160363178-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3656301724-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-3656301724-r2 { fill: #c5c8c6 } -.terminal-3656301724-r3 { fill: #d0b344;font-weight: bold } -.terminal-3656301724-r4 { fill: #868887 } -.terminal-3656301724-r5 { fill: #68a0b3;font-weight: bold } -.terminal-3656301724-r6 { fill: #98a84b;font-weight: bold } -.terminal-3656301724-r7 { fill: #8d7b39 } + .terminal-1160363178-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-1160363178-r2 { fill: #c5c8c6 } +.terminal-1160363178-r3 { fill: #d0b344;font-weight: bold } +.terminal-1160363178-r4 { fill: #868887 } +.terminal-1160363178-r5 { fill: #68a0b3;font-weight: bold } +.terminal-1160363178-r6 { fill: #98a84b;font-weight: bold } +.terminal-1160363178-r7 { fill: #8d7b39 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: testing helm-tests + Command: testing helm-tests - + - - -Usage: breeze testing helm-tests [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Run Helm chart tests. - -╭─ Advanced flag for helms-tests command ──────────────────────────────────────────────────────────────────────────────╮ ---image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip)(TEXT) -[default: latest]                                                              ---mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default = selected). -(selected | all | skip | remove)                                                                -[default: selected]                                                                             -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze testing helm-tests [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Run Helm chart tests. + +╭─ Advanced flag for helms-tests command ──────────────────────────────────────────────────────────────────────────────╮ +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip).(TEXT) +[default: latest]                                                               +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default = selected). +(selected | all | skip | remove)                                                                +[default: selected]                                                                             +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_testing_tests.svg b/images/breeze/output_testing_tests.svg index e8c33d0cf094a..1b25d4efa6221 100644 --- a/images/breeze/output_testing_tests.svg +++ b/images/breeze/output_testing_tests.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - Command: testing tests + Command: testing tests - + - - -Usage: breeze testing tests [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Run the specified unit test targets. - -╭─ Basic flag for tests command ───────────────────────────────────────────────────────────────────────────────────────╮ ---integrationIntegration(s) to enable when running (can be more than one).                               -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino | all) ---test-typeType of test to run. Note that with Providers, you can also specify which provider tests     -should be run - for example --test-type "Providers[airbyte,http]"                            -(All | Always | API | Always | CLI | Core | Integration | Other | Providers | WWW | Helm |   -Postgres | MySQL | Integration | Other | Quarantine)                                         ---test-timeoutTest timeout. Set the pytest setup, execution and teardown timeouts to this value(TEXT) -[default: 60]                                                                     ---db-reset-dReset DB when entering the container. ---backend-bDatabase backend to use.(sqlite | mysql | >postgres< | mssql)[default: sqlite] ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---postgres-version-PVersion of Postgres used.(10 | 11 | >12< | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flag for tests command ────────────────────────────────────────────────────────────────────────────────────╮ ---limit-progress-outputLimit progress to percentage only and just show the summary when tests complete. ---image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip)(TEXT) -[default: latest]                                                              ---mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =    -selected).                                                                              -(selected | all | skip | remove)                                                        -[default: selected]                                                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze testing tests [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Run the specified unit test targets. + +╭─ Basic flag for tests command ───────────────────────────────────────────────────────────────────────────────────────╮ +--integrationIntegration(s) to enable when running (can be more than one).                               +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino | all) +--test-typeType of test to run. Note that with Providers, you can also specify which provider tests     +should be run - for example --test-type "Providers[airbyte,http]"                            +(All | API | Always | CLI | Core | Integration | Other | Providers | WWW | Helm | Postgres | +MySQL | Quarantine)                                                                          +--test-timeoutTest timeout. Set the pytest setup, execution and teardown timeouts to this value +(INTEGER RANGE)                                                                   +[default: 60; x>=0]                                                               +--db-reset-dReset DB when entering the container. +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options for parallel test commands ─────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--test-typesSpace separated list of test types used for testing in parallel.(TEXT) +[default: API Always CLI Core Integration Other Providers WWW]   +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flag for tests command ────────────────────────────────────────────────────────────────────────────────────╮ +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip).(TEXT) +[default: latest]                                                               +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default = selected). +(selected | all | skip | remove)                                                                +[default: selected]                                                                             +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/scripts/ci/docker-compose/backend-mssql-docker-volume.yml b/scripts/ci/docker-compose/backend-mssql-docker-volume.yml index f18d6086c3f5c..dc2b43032e93a 100644 --- a/scripts/ci/docker-compose/backend-mssql-docker-volume.yml +++ b/scripts/ci/docker-compose/backend-mssql-docker-volume.yml @@ -20,5 +20,8 @@ services: mssql: volumes: - mssql-db-volume:/var/opt/mssql + mssqlsetup: + volumes: + - mssql-db-volume:/var/opt/mssql volumes: mssql-db-volume: diff --git a/scripts/ci/docker-compose/backend-mssql-bind-volume.yml b/scripts/ci/docker-compose/backend-mssql-tmpfs-volume.yml similarity index 66% rename from scripts/ci/docker-compose/backend-mssql-bind-volume.yml rename to scripts/ci/docker-compose/backend-mssql-tmpfs-volume.yml index fdf3a95b43413..261875c0a844f 100644 --- a/scripts/ci/docker-compose/backend-mssql-bind-volume.yml +++ b/scripts/ci/docker-compose/backend-mssql-tmpfs-volume.yml @@ -17,12 +17,15 @@ --- version: "3.7" services: + # In case of tmpfs backend for docker, mssql fails because TMPFS does not support + # O_DIRECT parameter for direct writing to the filesystem + # https://github.com/microsoft/mssql-docker/issues/13 + # so we need to mount an external volume for its db location + # the external db must allow for parallel testing so external volume is mapped + # to the data volume mssql: volumes: - # In case of tmpfs backend for docker, mssql fails because TMPFS does not support - # O_DIRECT parameter for direct writing to the filesystem - # https://github.com/microsoft/mssql-docker/issues/13 - # so we need to mount an external volume for its db location - # the external db must allow for parallel testing so external volume is mapped - # to the data volume + - ${MSSQL_DATA_VOLUME}:/var/opt/mssql + mssqlsetup: + volumes: - ${MSSQL_DATA_VOLUME}:/var/opt/mssql diff --git a/scripts/ci/docker-compose/backend-sqlite-port.yml b/scripts/ci/docker-compose/base-ports.yml similarity index 86% rename from scripts/ci/docker-compose/backend-sqlite-port.yml rename to scripts/ci/docker-compose/base-ports.yml index 806925877bf31..9a9be6270ed5d 100644 --- a/scripts/ci/docker-compose/backend-sqlite-port.yml +++ b/scripts/ci/docker-compose/base-ports.yml @@ -16,3 +16,9 @@ # under the License. --- version: "3.7" +services: + airflow: + ports: + - "${SSH_PORT}:22" + - "${WEBSERVER_HOST_PORT}:8080" + - "${FLOWER_HOST_PORT}:5555" diff --git a/scripts/ci/docker-compose/base.yml b/scripts/ci/docker-compose/base.yml index 86426a86d6d40..8db997f50e30f 100644 --- a/scripts/ci/docker-compose/base.yml +++ b/scripts/ci/docker-compose/base.yml @@ -86,9 +86,5 @@ services: # Pass docker to inside of the container so that Kind and Moto tests can use it. - /var/run/docker.sock:/var/run/docker.sock - /dev/urandom:/dev/random # Required to get non-blocking entropy source - ports: - - "${SSH_PORT}:22" - - "${WEBSERVER_HOST_PORT}:8080" - - "${FLOWER_HOST_PORT}:5555" cap_add: - SYS_PTRACE diff --git a/scripts/ci/libraries/_all_libs.sh b/scripts/ci/libraries/_all_libs.sh index da893d5dcb613..5f64e4cd05b86 100755 --- a/scripts/ci/libraries/_all_libs.sh +++ b/scripts/ci/libraries/_all_libs.sh @@ -28,10 +28,6 @@ readonly SCRIPTS_CI_DIR . "${LIBRARIES_DIR}"/_traps.sh # shellcheck source=scripts/ci/libraries/_initialization.sh . "${LIBRARIES_DIR}"/_initialization.sh -# shellcheck source=scripts/ci/libraries/_parallel.sh -. "${LIBRARIES_DIR}"/_parallel.sh -# shellcheck source=scripts/ci/libraries/_docker_engine_resources.sh -. "${LIBRARIES_DIR}"/_docker_engine_resources.sh # shellcheck source=scripts/ci/libraries/_sanity_checks.sh . "${LIBRARIES_DIR}"/_sanity_checks.sh # shellcheck source=scripts/ci/libraries/_local_mounts.sh diff --git a/scripts/ci/libraries/_docker_engine_resources.sh b/scripts/ci/libraries/_docker_engine_resources.sh deleted file mode 100644 index c4740e19272db..0000000000000 --- a/scripts/ci/libraries/_docker_engine_resources.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - - -function docker_engine_resources::print_overall_stats() { - docker stats --all --no-stream - echo - docker run --rm --entrypoint /bin/sh "alpine:latest" -c "free -m" - echo - df -h \ - --exclude-type devtmpfs \ - --exclude-type overlay \ - --exclude-type squashfs \ - | grep -v " /run" \ - | grep -v " /sys" \ - | grep -v "/dev/shm" \ - || true -} - -function docker_engine_resources::get_available_cpus_in_docker() { - CPUS_AVAILABLE_FOR_DOCKER=$(docker run --rm "debian:bullseye-slim" grep -cE 'cpu[0-9]+' /proc/stat) - export CPUS_AVAILABLE_FOR_DOCKER -} - -function docker_engine_resources::get_available_memory_in_docker() { - MEMORY_AVAILABLE_FOR_DOCKER=$(docker run --rm --entrypoint /bin/bash "debian:bullseye-slim" -c 'echo $(($(getconf _PHYS_PAGES) * $(getconf PAGE_SIZE) / (1024 * 1024)))') - export MEMORY_AVAILABLE_FOR_DOCKER -} - -function docker_engine_resources::check_all_resources() { - docker_v run -t "${EXTRA_DOCKER_FLAGS[@]}" \ - --entrypoint "/bin/bash" \ - "${AIRFLOW_CI_IMAGE_WITH_TAG}" \ - -c "python /opt/airflow/scripts/in_container/run_resource_check.py" -} diff --git a/scripts/ci/libraries/_parallel.sh b/scripts/ci/libraries/_parallel.sh deleted file mode 100644 index a22a205937caf..0000000000000 --- a/scripts/ci/libraries/_parallel.sh +++ /dev/null @@ -1,249 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - - -# Require SEMAPHORE_NAME - - -function parallel::initialize_monitoring() { - PARALLEL_MONITORED_DIR="$(mktemp -d)" - export PARALLEL_MONITORED_DIR - - PARALLEL_TAIL_LENGTH=${PARALLEL_TAIL_LENGTH:=2} - export PARALLEL_TAIL_LENGTH -} - -function parallel::make_sure_gnu_parallel_is_installed() { - start_end::group_start "Making sure GNU Parallels is installed" - echo - echo "Making sure you have GNU parallel installed" - echo - echo "You might need to provide root password if you do not have it" - echo - (command -v parallel || apt install parallel || sudo apt install parallel || brew install parallel) >/dev/null - start_end::group_end "Making sure GNU Parallels is installed" -} - -function parallel::kill_stale_semaphore_locks() { - local pid - echo - echo "${COLOR_BLUE}Killing stale semaphore locks${COLOR_RESET}" - echo - for s in "${HOME}/.parallel/semaphores/id-${SEMAPHORE_NAME}/"*@* - do - pid="${s%%@*}" - if [[ ${pid} != "-*" ]]; then - kill -15 -- -"$(basename "${s%%@*}")" 2>/dev/null || true - rm -f "${s}" 2>/dev/null - fi - done - rm -rf "${HOME}/.parallel" -} - - -# Periodical loop to print summary of all the processes run by parallel -function parallel::monitor_loop() { - trap 'exit 0' TERM - echo - echo "Start monitoring of parallel execution in ${PARALLEL_MONITORED_DIR} directory." - echo - local progress_report_number=1 - local start_time - local end_time - # To continue supporting Bash v3 we can't use associative arrays - so use a - # normal array and just check if the value is in it -- it will only ever be - # a few items long so it won't be too expensive - declare -a finished_jobs=() - start_time=${SECONDS} - while true - do - echo - echo "${COLOR_YELLOW}########## Monitoring progress start: ${progress_report_number} ##########${COLOR_RESET}" - echo - if [[ ${PR_LABELS} == *debug-ci-resources* || ${GITHUB_EVENT_NAME} == "push" ]]; then - # Only print stats in `main` or when "debug-ci-resources" label is set on PR. - echo "${COLOR_BLUE}########### STATISTICS #################" - docker_engine_resources::print_overall_stats - echo "########### STATISTICS #################${COLOR_RESET}" - fi - for directory in "${PARALLEL_MONITORED_DIR}"/*/* - do - parallel_process=$(basename "${directory}") - if ( IFS=$'\x1F'; [[ "$IFS${finished_jobs[*]}$IFS" == *"$IFS${parallel_process}$IFS"* ]] ) ; then - # Already finished, so don't print anything - continue - fi - - echo "${COLOR_BLUE}### The last ${PARALLEL_TAIL_LENGTH} lines for ${parallel_process} process: ${directory}/stdout ###${COLOR_RESET}" - tail "-${PARALLEL_TAIL_LENGTH}" "${directory}/stdout" || true - echo - - if [[ -s "${directory}/status" ]]; then - finished_jobs+=("$parallel_process") - # The last line of output (which we've already shown) will be a line about the success/failure - # of this job - fi - - echo - - done - - end_time=${SECONDS} - echo "${COLOR_YELLOW}########## $((end_time - start_time)) seconds passed since start ##########${COLOR_RESET}" - sleep 15 - progress_report_number=$((progress_report_number + 1)) - done -} - -# Monitors progress of parallel execution and periodically summarizes stdout entries created by -# the parallel execution. Sets PAPARALLEL_MONITORED_DIR which should be be passed as --results -# parameter to GNU parallel execution. -function parallel::monitor_progress() { - echo "Parallel results are stored in: ${PARALLEL_MONITORED_DIR}" - parallel::monitor_loop 2>/dev/null & - - # shellcheck disable=SC2034 - PARALLEL_MONITORING_PID=$! - # shellcheck disable=SC2016 - traps::add_trap 'parallel::kill_monitor' EXIT -} - - -function parallel::kill_monitor() { - kill ${PARALLEL_MONITORING_PID} >/dev/null 2>&1 || true -} - -# Outputs logs for successful test type -# $1 test type -function parallel::output_log_for_successful_job(){ - local job=$1 - local log_dir="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job}" - start_end::group_start "${COLOR_GREEN}Output for successful ${job}${COLOR_RESET}" - echo "${COLOR_GREEN}##### The ${job} succeeded ##### ${COLOR_RESET}" - echo - cat "${log_dir}"/stdout - echo - echo "${COLOR_GREEN}##### The ${job} succeeded ##### ${COLOR_RESET}" - echo - start_end::group_end -} - -# Outputs logs for failed test type -# $1 test type -function parallel::output_log_for_failed_job(){ - local job=$1 - local log_dir="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job}" - start_end::group_start "${COLOR_RED}Output: for failed ${job}${COLOR_RESET}" - echo "${COLOR_RED}##### The ${job} failed ##### ${COLOR_RESET}" - echo - cat "${log_dir}"/stdout - echo - echo - echo "${COLOR_RED}##### The ${job} failed ##### ${COLOR_RESET}" - echo - start_end::group_end -} - -# Prints summary of jobs and returns status: -# 0 - all jobs succeeded (SKIPPED_FAILED_JOBS is not counted) -# >0 - number of failed jobs (except Quarantine) -function parallel::print_job_summary_and_return_status_code() { - local return_code="0" - local job - local status_file - for job_path in "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/"* - do - job="$(basename "${job_path}")" - status_file="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job}/status" - if [[ -s "${status_file}" ]]; then - status=$(cat "${status_file}") - else - echo "${COLOR_RED}Missing ${status_file} file" - status="1" - fi - if [[ ${status} == "0" ]]; then - parallel::output_log_for_successful_job "${job}" - else - parallel::output_log_for_failed_job "${job}" - # SKIPPED_FAILED_JOB failure does not trigger whole test failure - if [[ ${SKIPPED_FAILED_JOB=} != "${job}" ]]; then - return_code=$((return_code + 1)) - fi - fi - done - return "${return_code}" -} - -function parallel::kill_all_running_docker_containers() { - echo - echo "${COLOR_BLUE}Kill all running docker containers${COLOR_RESET}" - echo - # shellcheck disable=SC2046 - docker kill $(docker ps -q) || true -} - -function parallel::system_prune_docker() { - echo - echo "${COLOR_BLUE}System-prune docker${COLOR_RESET}" - echo - docker_v system prune --force --volumes - echo -} - -# Cleans up runner before test execution. -# * Kills all running docker containers -# * System prune to clean all the temporary/unnamed images and left-over volumes -# * Print information about available space and memory -# * Kills stale semaphore locks -function parallel::cleanup_runner() { - start_end::group_start "Cleanup runner" - parallel::kill_all_running_docker_containers - parallel::system_prune_docker - docker_engine_resources::check_all_resources - docker_engine_resources::print_overall_stats - parallel::kill_stale_semaphore_locks - start_end::group_end -} - -function parallel::make_sure_python_versions_are_specified() { - if [[ -z "${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING=}" ]]; then - echo - echo "${COLOR_RED}The CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING variable must be set and list python versions to use!${COLOR_RESET}" - echo - exit 1 - fi - echo - echo "${COLOR_BLUE}Running parallel builds for those Python versions: ${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING}${COLOR_RESET}" - echo -} - -function parallel::get_maximum_parallel_k8s_jobs() { - docker_engine_resources::get_available_cpus_in_docker - if [[ -n ${RUNS_ON=} && ${RUNS_ON} != *"self-hosted"* ]]; then - echo - echo "${COLOR_YELLOW}This is a GitHub Public runner - for now we are forcing max parallel K8S tests jobs to 1 for those${COLOR_RESET}" - echo - export MAX_PARALLEL_K8S_JOBS="1" - else - echo - echo "${COLOR_YELLOW}This is a Self-Hosted runner - forcing max parallel jobs to 5${COLOR_RESET}" - echo - export MAX_PARALLEL_K8S_JOBS="3" - fi - export MAX_PARALLEL_K8S_JOBS -} diff --git a/scripts/ci/libraries/_testing.sh b/scripts/ci/libraries/_testing.sh index e2d9f5f7cd6ed..a198f5d8fd49b 100644 --- a/scripts/ci/libraries/_testing.sh +++ b/scripts/ci/libraries/_testing.sh @@ -18,15 +18,6 @@ export MEMORY_REQUIRED_FOR_HEAVY_TEST_PARALLEL_RUN=33000 -function testing::skip_tests_if_requested(){ - if [[ -f ${BUILD_CACHE_DIR}/.skip_tests ]]; then - echo - echo "Skipping running tests !!!!!" - echo - exit - fi -} - function testing::get_docker_compose_local() { DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/files.yml") if [[ ${MOUNT_SELECTED_LOCAL_SOURCES} == "true" ]]; then @@ -50,70 +41,6 @@ function testing::get_docker_compose_local() { readonly DOCKER_COMPOSE_LOCAL } -function testing::get_maximum_parallel_test_jobs() { - docker_engine_resources::get_available_cpus_in_docker - if [[ -n ${RUNS_ON=} && ${RUNS_ON} != *"self-hosted"* ]]; then - echo - echo "${COLOR_YELLOW}This is a GitHub Public runner - for now we are forcing max parallel Quarantined tests jobs to 1 for those${COLOR_RESET}" - echo - export MAX_PARALLEL_QUARANTINED_TEST_JOBS="1" - else - if [[ ${MAX_PARALLEL_QUARANTINED_TEST_JOBS=} != "" ]]; then - echo - echo "${COLOR_YELLOW}Maximum parallel Quarantined test jobs forced via MAX_PARALLEL_QUARANTINED_TEST_JOBS = ${MAX_PARALLEL_QUARANTINED_TEST_JOBS}${COLOR_RESET}" - echo - else - MAX_PARALLEL_QUARANTINED_TEST_JOBS=${CPUS_AVAILABLE_FOR_DOCKER} - echo - echo "${COLOR_YELLOW}Maximum parallel Quarantined test jobs set to number of CPUs available for Docker = ${MAX_PARALLEL_QUARANTINED_TEST_JOBS}${COLOR_RESET}" - echo - fi - - fi - - if [[ ${MAX_PARALLEL_TEST_JOBS=} != "" ]]; then - echo - echo "${COLOR_YELLOW}Maximum parallel test jobs forced via MAX_PARALLEL_TEST_JOBS = ${MAX_PARALLEL_TEST_JOBS}${COLOR_RESET}" - echo - else - MAX_PARALLEL_TEST_JOBS=${CPUS_AVAILABLE_FOR_DOCKER} - echo - echo "${COLOR_YELLOW}Maximum parallel test jobs set to number of CPUs available for Docker = ${MAX_PARALLEL_TEST_JOBS}${COLOR_RESET}" - echo - fi - export MAX_PARALLEL_TEST_JOBS -} - -function testing::get_test_types_to_run() { - if [[ -n "${FORCE_TEST_TYPE=}" ]]; then - # Handle case where test type is forced from outside - export TEST_TYPES="${FORCE_TEST_TYPE}" - fi - - if [[ -z "${TEST_TYPES=}" ]]; then - TEST_TYPES="Core Providers API CLI Integration Other WWW" - echo - echo "Test types not specified. Adding all: ${TEST_TYPES}" - echo - fi - - if [[ -z "${FORCE_TEST_TYPE=}" ]]; then - # Add Postgres/MySQL special test types in case we are running several test types - if [[ ${BACKEND} == "postgres" && ${TEST_TYPES} != "Quarantined" ]]; then - TEST_TYPES="${TEST_TYPES} Postgres" - echo - echo "Added Postgres. Tests to run: ${TEST_TYPES}" - echo - fi - if [[ ${BACKEND} == "mysql" && ${TEST_TYPES} != "Quarantined" ]]; then - TEST_TYPES="${TEST_TYPES} MySQL" - echo - echo "Added MySQL. Tests to run: ${TEST_TYPES}" - echo - fi - fi - readonly TEST_TYPES -} function testing::dump_container_logs() { start_end::group_start "${COLOR_BLUE}Dumping container logs ${container}${COLOR_RESET}" @@ -149,7 +76,7 @@ function testing::setup_docker_compose_backend() { # This is a bit scary and we could get by making it group-writeable but the group would have # to be set to "root" (GID=0) for the volume to work and this cannot be accomplished without sudo chmod a+rwx "${MSSQL_DATA_VOLUME}" - backend_docker_compose+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/backend-mssql-bind-volume.yml") + backend_docker_compose+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/backend-mssql-tmpfs-volume.yml") # Runner user doesn't have blanket sudo access, but we can run docker as root. Go figure traps::add_trap "docker run -u 0 --rm -v ${MSSQL_DATA_VOLUME}:/mssql alpine sh -c 'rm -rvf -- /mssql/.* /mssql/*' || true" EXIT diff --git a/scripts/ci/pre_commit/pre_commit_breeze_cmd_line.py b/scripts/ci/pre_commit/pre_commit_breeze_cmd_line.py index 62a8ac13b1b29..5e85d12e8fe00 100755 --- a/scripts/ci/pre_commit/pre_commit_breeze_cmd_line.py +++ b/scripts/ci/pre_commit/pre_commit_breeze_cmd_line.py @@ -79,46 +79,12 @@ def is_regeneration_needed() -> bool: return return_code != 0 -def run_image_regeneration_in_breeze() -> int: - sys.path.insert(0, str(AIRFLOW_SOURCES_DIR / "dev" / "breeze" / "src")) - from airflow_breeze.utils.run_utils import run_command - - result = run_command(['breeze', 'version'], check=False, capture_output=True) - if result.returncode != 0: - run_command( - [ - sys.executable, - '-m', - 'pip', - 'install', - '-e', - os.fspath(AIRFLOW_SOURCES_DIR / "dev" / "breeze"), - ], - check=True, - capture_output=True, - ) - result = run_command(['breeze', 'setup', 'regenerate-command-images'], check=False) - return result.returncode - - if __name__ == '__main__': verify_all_commands_described_in_docs() - - run_generation = False - if FORCE: - run_generation = True + if is_regeneration_needed(): + console.print('\n[bright_blue]Some of the commands changed since last time images were generated.\n') console.print( - '[bright_blue]Force regenerating all images. It will be run in Breeze image for consistency.' + '\n[red]Image generation is needed. Please run this command:\n\n' + '[magenta]breeze setup regenerate-command-images\n' ) - elif is_regeneration_needed(): - run_generation = True - console.print('[yellow]Image generation is needed. It will be run in Breeze image for consistency.') - if run_generation: - return_code = run_image_regeneration_in_breeze() - if return_code != 0 and os.environ.get('CI'): - console.print( - "\n\n[yellow]Please run this command and commit resulting breeze images:[/]" - "\n\n `breeze setup regenerate-command-images`\n" - "\n\n[yellow]This will regenerate all the images in your commit!\n\n" - ) - sys.exit(return_code) + sys.exit(1) diff --git a/scripts/ci/testing/ci_run_airflow_testing.sh b/scripts/ci/testing/ci_run_airflow_testing.sh deleted file mode 100755 index fa5165b4704bb..0000000000000 --- a/scripts/ci/testing/ci_run_airflow_testing.sh +++ /dev/null @@ -1,153 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Enable automated tests execution -RUN_TESTS="true" -export RUN_TESTS - -SKIPPED_FAILED_JOB="Quarantined" -export SKIPPED_FAILED_JOB - -SEMAPHORE_NAME="tests" -export SEMAPHORE_NAME - -# shellcheck source=scripts/ci/libraries/_script_init.sh -. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" - -# Starts test types in parallel -# test_types_to_run - list of test types (it's not an array, it is space-separate list) -# ${@} - additional arguments to pass to test execution -function run_test_types_in_parallel() { - start_end::group_start "Monitoring tests: ${test_types_to_run}" - parallel::monitor_progress - mkdir -p "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}" - for TEST_TYPE in ${test_types_to_run} - do - export TEST_TYPE - mkdir -p "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${TEST_TYPE/\[*\]/}" - export JOB_LOG="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${TEST_TYPE/\[*\]/}/stdout" - export PARALLEL_JOB_STATUS="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${TEST_TYPE/\[*\]/}/status" - # Each test job will get SIGTERM followed by SIGTERM 200ms later and SIGKILL 200ms later after 45 mins - # shellcheck disable=SC2086 - parallel --ungroup --bg --semaphore --semaphorename "${SEMAPHORE_NAME}" \ - --jobs "${MAX_PARALLEL_TEST_JOBS}" --timeout 2700 \ - "$( dirname "${BASH_SOURCE[0]}" )/ci_run_single_airflow_test_in_docker.sh" "${@}" >"${JOB_LOG}" 2>&1 - done - parallel --semaphore --semaphorename "${SEMAPHORE_NAME}" --wait - parallel::kill_monitor - start_end::group_end -} - -# Runs all test types in parallel depending on the number of CPUs available -# We monitors their progress, display the progress and summarize the result when finished. -# -# In case there is not enough memory (MEMORY_REQUIRED_FOR_HEAVY_TEST_PARALLEL_RUN) available for -# the docker engine, the integration tests (which take a lot of memory for all the integrations) -# are run sequentially after all other tests were run in parallel. -# -# Input: -# * TEST_TYPES - contains all test types that should be executed -# * MEMORY_REQUIRED_FOR_HEAVY_TEST_PARALLEL_RUN - memory in bytes required to run integration tests -# in parallel to other tests -# -function run_all_test_types_in_parallel() { - parallel::cleanup_runner - docker_engine_resources::get_available_memory_in_docker - start_end::group_start "Determine how to run the tests" - echo - echo "${COLOR_YELLOW}Running maximum ${MAX_PARALLEL_TEST_JOBS} test types in parallel${COLOR_RESET}" - echo - local sequential_tests=() - # shellcheck disable=SC2153 - local test_types_to_run=${TEST_TYPES} - - if (( MEMORY_AVAILABLE_FOR_DOCKER < MEMORY_REQUIRED_FOR_HEAVY_TEST_PARALLEL_RUN )) ; then - # In case of Heavy tests - they need more resources (Memory) thus we only run them in - # parallel if we have more than 32 GB memory available. Otherwise we run them sequentially - # after cleaning up the memory and stopping all docker instances - echo "" - echo "${COLOR_YELLOW}There is not enough memory to run heavy test in parallel${COLOR_RESET}" - echo "${COLOR_YELLOW} Available memory: ${MEMORY_AVAILABLE_FOR_DOCKER}${COLOR_RESET}" - echo "${COLOR_YELLOW} Required memory: ${MEMORY_REQUIRED_FOR_HEAVY_TEST_PARALLEL_RUN}${COLOR_RESET}" - echo "" - echo "${COLOR_YELLOW}Heavy tests will be run sequentially after parallel tests including cleaning up docker between tests${COLOR_RESET}" - echo "" - if [[ ${test_types_to_run} == *"Integration"* ]]; then - test_types_to_run="${test_types_to_run//Integration/}" - if [[ ${BACKEND} == "mssql" ]]; then - # Also for mssql we skip Integration tests altogether on Public Runners. Mssql uses far - # too much memory and often shuts down and similarly as in case of Providers tests, - # there is no need to run them also for MsSQL engine as those integration tests - # are not really using any metadata-specific behaviour. - # Those tests will run in `main` anyway. - echo "${COLOR_YELLOW}Do not run integration tests for mssql in small systems due to memory issues.${COLOR_RESET}" - else - echo "${COLOR_YELLOW}Remove Integration from tests_types_to_run and add them to sequential tests due to low memory.${COLOR_RESET}" - sequential_tests+=("Integration") - fi - fi - if [[ ${BACKEND} == "mssql" || ${BACKEND} == "mysql" ]]; then - # For mssql/mysql - they take far more memory than postgres (or sqlite) - we skip the Provider - # tests altogether as they take too much memory even if run sequentially. - # Those tests will run in `main` anyway. - if [[ ${test_types_to_run} == *"Providers"* ]]; then - echo "${COLOR_YELLOW}Remove Providers from tests_types_to_run and skip running them altogether (mysql/mssql case).${COLOR_RESET}" - # shellcheck disable=SC2001 - test_types_to_run=$(echo "${test_types_to_run}" | sed 's/Providers[^ ]* *//') - fi - fi - fi - set +e - start_end::group_end - - parallel::initialize_monitoring - - # Run all tests that should run in parallel (from test_types_to_run variable) - run_test_types_in_parallel "${@}" - - # Check if sequential_tests contains any values since accessing an empty (and only initted) array throws an - # error in some versions of Bash 4 - if [[ ${sequential_tests[0]+"${sequential_tests[@]}"} ]] - then - # If needed run remaining tests sequentially - for sequential_test in "${sequential_tests[@]}"; do - parallel::cleanup_runner - test_types_to_run="${sequential_test}" - run_test_types_in_parallel "${@}" - done - fi - set -e - # This will exit with error code in case some of the non-Quarantined tests failed - parallel::print_job_summary_and_return_status_code -} - -testing::skip_tests_if_requested - -parallel::make_sure_gnu_parallel_is_installed - -testing::get_maximum_parallel_test_jobs - -testing::get_test_types_to_run - -testing::get_docker_compose_local - -# We don't include this in a group so that the failure output is always visible in CI -traps::add_trap 'python ./scripts/ci/testing/summarize_junit_failures.py files/test_result-*.xml' 'EXIT' -rm files/test_result-*.xml 2>/dev/null || true - -run_all_test_types_in_parallel "${@}" diff --git a/scripts/ci/testing/ci_run_quarantined_tests.sh b/scripts/ci/testing/ci_run_quarantined_tests.sh deleted file mode 100755 index 07c088f7444bd..0000000000000 --- a/scripts/ci/testing/ci_run_quarantined_tests.sh +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -set -euo pipefail - -# Enable automated tests execution -RUN_TESTS="true" -export RUN_TESTS - -SKIPPED_FAILED_JOB="Quarantined" -export SKIPPED_FAILED_JOB - -SEMAPHORE_NAME="tests" -export SEMAPHORE_NAME - -# shellcheck source=scripts/ci/libraries/_script_init.sh -. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" - -initialization::set_output_color_variables - -BACKEND_TEST_TYPES=(mysql postgres sqlite) - -# Starts test types in parallel -# test_types_to_run - list of test types (it's not an array, it is space-separate list) -# ${@} - additional arguments to pass to test execution -function run_quarantined_backend_tests_in_parallel() { - start_end::group_start "Determining how to run the tests" - echo - echo "${COLOR_YELLOW}Running maximum ${MAX_PARALLEL_QUARANTINED_TEST_JOBS} test types in parallel${COLOR_RESET}" - echo - start_end::group_end - start_end::group_start "Monitoring Quarantined tests : ${BACKEND_TEST_TYPES[*]}" - parallel::initialize_monitoring - parallel::monitor_progress - mkdir -p "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}" - TEST_TYPE="Quarantined" - export TEST_TYPE - for BACKEND in "${BACKEND_TEST_TYPES[@]}" - do - export BACKEND - mkdir -p "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${BACKEND}" - export JOB_LOG="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${BACKEND}/stdout" - export PARALLEL_JOB_STATUS="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${BACKEND}/status" - # Each test job will get SIGTERM followed by SIGTERM 200ms later and SIGKILL 200ms later after 25 mins - # shellcheck disable=SC2086 - parallel --ungroup --bg --semaphore --semaphorename "${SEMAPHORE_NAME}" \ - --jobs "${MAX_PARALLEL_QUARANTINED_TEST_JOBS}" --timeout 1500 \ - "$( dirname "${BASH_SOURCE[0]}" )/ci_run_single_airflow_test_in_docker.sh" "${@}" >"${JOB_LOG}" 2>&1 - done - parallel --semaphore --semaphorename "${SEMAPHORE_NAME}" --wait - parallel::kill_monitor - start_end::group_end -} - -testing::skip_tests_if_requested - -parallel::make_sure_gnu_parallel_is_installed - -testing::get_maximum_parallel_test_jobs - -testing::get_docker_compose_local - -run_quarantined_backend_tests_in_parallel "${@}" - -set +e - -parallel::print_job_summary_and_return_status_code - -echo "Those are quarantined tests so failure of those does not fail the whole build!" -echo "Please look above for the output of failed tests to fix them!" -echo diff --git a/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh b/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh deleted file mode 100755 index f1afd2687f3e1..0000000000000 --- a/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh +++ /dev/null @@ -1,183 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# Skip printing groups in CI -PRINT_INFO_FROM_SCRIPTS="false" -# shellcheck source=scripts/ci/libraries/_script_init.sh -. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" - -PRINT_INFO_FROM_SCRIPTS="true" -export PRINT_INFO_FROM_SCRIPTS - -DOCKER_COMPOSE_LOCAL=() -INTEGRATIONS=() -INTEGRATION_BREEZE_FLAGS=() - -function prepare_tests() { - DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/files.yml") - if [[ ${MOUNT_SELECTED_LOCAL_SOURCES} == "true" ]]; then - DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/local.yml") - fi - if [[ ${MOUNT_ALL_LOCAL_SOURCES} == "true" ]]; then - DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/local-all-sources.yml") - fi - - if [[ ${GITHUB_ACTIONS=} == "true" ]]; then - DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/ga.yml") - fi - - if [[ ${FORWARD_CREDENTIALS} == "true" ]]; then - DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/forward-credentials.yml") - fi - - if [[ -n ${USE_AIRFLOW_VERSION=} ]]; then - DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/remove-sources.yml") - fi - - readonly DOCKER_COMPOSE_LOCAL - - if [[ ${TEST_TYPE:=} == "Integration" ]]; then - export ENABLED_INTEGRATIONS="${AVAILABLE_INTEGRATIONS}" - export LIST_OF_INTEGRATION_TESTS_TO_RUN="${AVAILABLE_INTEGRATIONS}" - else - export ENABLED_INTEGRATIONS="" - export LIST_OF_INTEGRATION_TESTS_TO_RUN="" - fi - - for _INT in ${ENABLED_INTEGRATIONS} - do - INTEGRATIONS+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/integration-${_INT}.yml") - INTEGRATION_BREEZE_FLAGS+=("--integration" "${_INT}") - done - - readonly INTEGRATIONS - - echo "**********************************************************************************************" - echo - echo " TEST_TYPE: ${TEST_TYPE}, ENABLED INTEGRATIONS: ${ENABLED_INTEGRATIONS}" - echo - echo "**********************************************************************************************" -} - - -# Runs airflow testing in docker container -# You need to set variable TEST_TYPE - test type to run -# "${@}" - extra arguments to pass to docker command -function run_airflow_testing_in_docker() { - set +u - set +e - local exit_code - echo - echo "Semaphore grabbed. Running tests for ${TEST_TYPE}" - echo - - echo "Making sure docker-compose is down and remnants removed" - echo - docker-compose -f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \ - "${INTEGRATIONS[@]}" \ - --project-name "airflow-${TEST_TYPE/\[*\]/}-${BACKEND}" \ - down --remove-orphans \ - --volumes --timeout 10 - docker-compose --log-level INFO \ - -f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \ - "${BACKEND_DOCKER_COMPOSE[@]}" \ - "${INTEGRATIONS[@]}" \ - "${DOCKER_COMPOSE_LOCAL[@]}" \ - --project-name "airflow-${TEST_TYPE/\[*\]/}-${BACKEND}" \ - run airflow "${@}" - exit_code=$? - docker ps - if [[ ${exit_code} != "0" && ${CI} == "true" ]]; then - docker ps --all - local container - for container in $(docker ps --all --format '{{.Names}}') - do - testing::dump_container_logs "${container}" - done - fi - - docker-compose --log-level INFO -f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \ - "${INTEGRATIONS[@]}" \ - --project-name "airflow-${TEST_TYPE/\[*\]/}-${BACKEND}" \ - down --remove-orphans \ - --volumes --timeout 10 - set -u - set -e - if [[ ${exit_code} != "0" ]]; then - EXTRA_ARGS="" - if [[ ${BACKEND} == "postgres" ]]; then - EXTRA_ARGS="--postgres-version ${POSTGRES_VERSION} " - elif [[ ${BACKEND} == "mysql" ]]; then - EXTRA_ARGS="--mysql-version ${MYSQL_VERSION} " - fi - echo "${COLOR_RED}***********************************************************************************************${COLOR_RESET}" - echo "${COLOR_RED}*${COLOR_RESET}" - echo "${COLOR_RED}* ERROR! Some tests failed, unfortunately. Those might be transient errors,${COLOR_RESET}" - echo "${COLOR_RED}* but usually you have to fix something.${COLOR_RESET}" - echo "${COLOR_RED}* See the above log for details.${COLOR_RESET}" - echo "${COLOR_RED}*${COLOR_RESET}" - echo "${COLOR_RED}***********************************************************************************************${COLOR_RESET}" - echo - echo "${COLOR_BLUE}***********************************************************************************************${COLOR_RESET}" - echo "${COLOR_BLUE}Enter the same environment that was used for the tests:${COLOR_RESET}" - echo "${COLOR_YELLOW}breeze --image-tag ${IMAGE_TAG=} --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} ${INTEGRATION_BREEZE_FLAGS[*]} shell${COLOR_RESET}" - echo "${COLOR_BLUE}Then you can run failed tests with:${COLOR_RESET}" - echo "${COLOR_YELLOW}pytest [TEST_NAME]${COLOR_RESET}" - echo "${COLOR_BLUE}Or you can run the tests: ${COLOR_RESET}" - echo "${COLOR_YELLOW}breeze --image-tag ${IMAGE_TAG=} --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} ${INTEGRATION_BREEZE_FLAGS[*]} test${COLOR_RESET}" - echo "${COLOR_BLUE}***********************************************************************************************${COLOR_RESET}" - - - if [[ ${UPGRADE_TO_NEWER_DEPENDENCIES} != "false" ]]; then - local constraints_url="https://raw.githubusercontent.com/apache/airflow/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-source-providers-${PYTHON_MAJOR_MINOR_VERSION}.txt" - echo "${COLOR_BLUE}***********************************************************************************************${COLOR_RESET}" - echo "${COLOR_BLUE}*${COLOR_RESET}" - echo "${COLOR_BLUE}* In case you see unrelated test failures, it can be due to newer dependencies released.${COLOR_RESET}" - echo "${COLOR_BLUE}* This is either because it is 'main' branch or because this PR modifies dependencies (setup.* files).${COLOR_RESET}" - echo "${COLOR_BLUE}* Therefore 'eager-upgrade' is used to build the image, This means that this build can have newer dependencies than the 'tested' set of constraints,${COLOR_RESET}" - echo "${COLOR_BLUE}*${COLOR_RESET}" - echo "${COLOR_BLUE}* The tested constraints for that build are available at: ${constraints_url} ${COLOR_RESET}" - echo "${COLOR_BLUE}*${COLOR_RESET}" - echo "${COLOR_BLUE}* Please double check if the same failure is in other tests and in 'main' branch and check if the dependency differences causes the problem.${COLOR_RESET}" - echo "${COLOR_BLUE}* In case you identify the dependency, either fix the root cause or limit the dependency if it is too difficult to fix.${COLOR_RESET}" - echo "${COLOR_BLUE}*${COLOR_RESET}" - echo "${COLOR_BLUE}* The diff between fixed constraints and those used in this build is below.${COLOR_RESET}" - echo "${COLOR_BLUE}*${COLOR_RESET}" - echo "${COLOR_BLUE}***********************************************************************************************${COLOR_RESET}" - echo - curl "${constraints_url}" | grep -ve "^#" | diff --color=always - <( docker run --entrypoint /bin/bash "${AIRFLOW_CI_IMAGE_WITH_TAG}" -c 'pip freeze' \ - | sort | grep -v "apache_airflow" | grep -v "@" | grep -v "/opt/airflow" | grep -ve "^#") - echo - fi - fi - - echo ${exit_code} > "${PARALLEL_JOB_STATUS}" - - if [[ ${exit_code} == 0 ]]; then - echo - echo "${COLOR_GREEN}Test type: ${TEST_TYPE} succeeded.${COLOR_RESET}" - else - echo - echo "${COLOR_RED}Test type: ${TEST_TYPE} failed.${COLOR_RESET}" - fi - return "${exit_code}" -} - -prepare_tests - -testing::setup_docker_compose_backend "${TEST_TYPE}" -run_airflow_testing_in_docker "${@}"