From 641ce142611d4a57bdf3ee1679c12d9f59f070a5 Mon Sep 17 00:00:00 2001 From: "Soichiro Taga (Future)" <52783301+future-taga@users.noreply.github.com> Date: Fri, 10 Jun 2022 21:20:25 +0900 Subject: [PATCH 001/118] fix 2.3.2 release date. (#24370) (cherry picked from commit 4daf51a2c388b41201a0a8095e0a97c27d6704c8) --- RELEASE_NOTES.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index 3ec086d249391..edc7de3df967b 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -21,7 +21,7 @@ .. towncrier release notes start -Airflow 2.3.2 (2021-06-04) +Airflow 2.3.2 (2022-06-04) -------------------------- No significant changes From 2a7c1f842407f815d8abe0d2239e64f317307439 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sun, 19 Jun 2022 21:28:19 +0200 Subject: [PATCH 002/118] Update flask-appbuilder authlib/oauth dependency (#24516) The dependency we have for flask-appbuilder oauth authentication (for github/google authentication) should follow the limits that flask-appbuilder current version has. We added authlib there but apparently FAB currently limits authlib to <= 1.0 - we should follow fab rather than have our own dependency here. This has been pointed out in https://github.com/dpgaspar/Flask-AppBuilder/issues/1861 (cherry picked from commit 5674491dc8e8ed1685cdb4c04922cb72ad8ba9b4) --- setup.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/setup.py b/setup.py index 713fda3c1199d..f2cce10ed150e 100644 --- a/setup.py +++ b/setup.py @@ -309,8 +309,8 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version facebook = [ 'facebook-business>=6.0.2', ] -flask_appbuilder_authlib = [ - 'authlib', +flask_appbuilder_oauth = [ + 'flask-appbuilder[oauth]', ] github = [ 'pygithub', @@ -768,8 +768,8 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version 'cncf.kubernetes': kubernetes, # also has provider, but it extends the core with the KubernetesExecutor 'dask': dask, 'deprecated_api': deprecated_api, - 'github_enterprise': flask_appbuilder_authlib, - 'google_auth': flask_appbuilder_authlib, + 'github_enterprise': flask_appbuilder_oauth, + 'google_auth': flask_appbuilder_oauth, 'kerberos': kerberos, 'ldap': ldap, 'leveldb': leveldb, From 50657119da418366b91d2da180551b852be4b399 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sat, 18 Jun 2022 23:08:51 +0200 Subject: [PATCH 003/118] Get rid of TimedJSONWebSignatureSerializer (#24519) The TimedJSONWebSignatureSerializer has been deprecated from the itsdangerous library and they recommended to use dedicated libraries for it. https://github.com/pallets/itsdangerous/issues/129 Since we are going to move to FAB 4+ with #22397 where newer version of itsdangerous is used, we need to switch to another library. We are already using PyJWT so the choice is obvious. Additionally to switching, the following improvements were done: * the use of JWT claims has been fixed to follow JWT standard. We were using "iat" header wrongly. The specification of JWT only expects the header to be there and be valid UTC timestamp, but the claim does not impact maturity of the signature - the signature is valid if iat is in the future. Instead "nbf" - "not before" claim should be used to verify if the request is not coming from the future. We now require all claims to be present in the request. * rather than using salt/signing_context we switched to standard JWT "audience" claim (same end result) * we have now much better diagnostics on the server side of the reason why request is forbidden - explicit error messages are printed in server logs and details of the exception. This is secure, we do not spill the information about the reason to the client, it's only available in server logs, so there is no risk attacker could use it. * the JWTSigner is "use-agnostic". We should be able to use the same class for any other signatures (Internal API from AIP-44) with just different audience * Short, 5 seconds default clock skew is allowed, to account for systems that have "almost" synchronized time * more tests addded with proper time freezing testing both expiry and immaturity of the request This change is not a breaking one because the JWT authentication details are not "public API" - but in case someone reverse engineered our claims and implemented their own log file retrieval, we should add a change in our changelog - therefore newsfragment is added. (cherry picked from commit 1f8e4c955394b31462956501d9a6741b98892f3a) --- airflow/utils/jwt_signer.py | 82 ++++++++++++++ airflow/utils/log/file_task_handler.py | 18 +-- airflow/utils/serve_logs.py | 84 +++++++++----- newsfragments/24519.misc.rst | 1 + tests/utils/test_serve_logs.py | 147 ++++++++++++++++++++----- 5 files changed, 272 insertions(+), 60 deletions(-) create mode 100644 airflow/utils/jwt_signer.py create mode 100644 newsfragments/24519.misc.rst diff --git a/airflow/utils/jwt_signer.py b/airflow/utils/jwt_signer.py new file mode 100644 index 0000000000000..941a3d05981ce --- /dev/null +++ b/airflow/utils/jwt_signer.py @@ -0,0 +1,82 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from datetime import datetime, timedelta +from typing import Any, Dict + +import jwt + + +class JWTSigner: + """ + Signs and verifies JWT Token. Used to authorise and verify requests. + + :param secret_key: key used to sign the request + :param expiration_time_in_seconds: time after which the token becomes invalid (in seconds) + :param audience: audience that the request is expected to have + :param leeway_in_seconds: leeway that allows for a small clock skew between the two parties + :param algorithm: algorithm used for signing + """ + + def __init__( + self, + secret_key: str, + expiration_time_in_seconds: int, + audience: str, + leeway_in_seconds: int = 5, + algorithm: str = "HS512", + ): + self._secret_key = secret_key + self._expiration_time_in_seconds = expiration_time_in_seconds + self._audience = audience + self._leeway_in_seconds = leeway_in_seconds + self._algorithm = algorithm + + def generate_signed_token(self, extra_payload: Dict[str, Any]) -> str: + """ + Generate JWT with extra payload added. + :param extra_payload: extra payload that is added to the signed token + :return: signed token + """ + jwt_dict = { + "aud": self._audience, + "iat": datetime.utcnow(), + "nbf": datetime.utcnow(), + "exp": datetime.utcnow() + timedelta(seconds=self._expiration_time_in_seconds), + } + jwt_dict.update(extra_payload) + token = jwt.encode( + jwt_dict, + self._secret_key, + algorithm=self._algorithm, + ) + return token + + def verify_token(self, token: str) -> Dict[str, Any]: + payload = jwt.decode( + token, + self._secret_key, + leeway=timedelta(seconds=self._leeway_in_seconds), + algorithms=[self._algorithm], + options={ + "verify_signature": True, + "require_exp": True, + "require_iat": True, + "require_nbf": True, + }, + audience=self._audience, + ) + return payload diff --git a/airflow/utils/log/file_task_handler.py b/airflow/utils/log/file_task_handler.py index e0561991b255f..321e125288766 100644 --- a/airflow/utils/log/file_task_handler.py +++ b/airflow/utils/log/file_task_handler.py @@ -22,11 +22,10 @@ from pathlib import Path from typing import TYPE_CHECKING, Optional, Tuple -from itsdangerous import TimedJSONWebSignatureSerializer - from airflow.configuration import AirflowConfigException, conf from airflow.utils.context import Context from airflow.utils.helpers import parse_template_string, render_template_to_string +from airflow.utils.jwt_signer import JWTSigner from airflow.utils.log.non_caching_file_handler import NonCachingFileHandler if TYPE_CHECKING: @@ -191,16 +190,17 @@ def _read(self, ti, try_number, metadata=None): except (AirflowConfigException, ValueError): pass - signer = TimedJSONWebSignatureSerializer( + signer = JWTSigner( secret_key=conf.get('webserver', 'secret_key'), - algorithm_name='HS512', - expires_in=conf.getint('webserver', 'log_request_clock_grace', fallback=30), - # This isn't really a "salt", more of a signing context - salt='task-instance-logs', + expiration_time_in_seconds=conf.getint( + 'webserver', 'log_request_clock_grace', fallback=30 + ), + audience="task-instance-logs", ) - response = httpx.get( - url, timeout=timeout, headers={'Authorization': signer.dumps(log_relative_path)} + url, + timeout=timeout, + headers={b'Authorization': signer.generate_signed_token({"filename": log_relative_path})}, ) response.encoding = "utf-8" diff --git a/airflow/utils/serve_logs.py b/airflow/utils/serve_logs.py index 50fdb47a024a6..e14162178b182 100644 --- a/airflow/utils/serve_logs.py +++ b/airflow/utils/serve_logs.py @@ -16,55 +16,89 @@ # under the License. """Serve logs process""" +import logging import os -import time import gunicorn.app.base from flask import Flask, abort, request, send_from_directory -from itsdangerous import TimedJSONWebSignatureSerializer +from jwt.exceptions import ( + ExpiredSignatureError, + ImmatureSignatureError, + InvalidAudienceError, + InvalidIssuedAtError, + InvalidSignatureError, +) from setproctitle import setproctitle from airflow.configuration import conf +from airflow.utils.docs import get_docs_url +from airflow.utils.jwt_signer import JWTSigner + +logger = logging.getLogger(__name__) def create_app(): flask_app = Flask(__name__, static_folder=None) - max_request_age = conf.getint('webserver', 'log_request_clock_grace', fallback=30) + expiration_time_in_seconds = conf.getint('webserver', 'log_request_clock_grace', fallback=30) log_directory = os.path.expanduser(conf.get('logging', 'BASE_LOG_FOLDER')) - signer = TimedJSONWebSignatureSerializer( + signer = JWTSigner( secret_key=conf.get('webserver', 'secret_key'), - algorithm_name='HS512', - expires_in=max_request_age, - # This isn't really a "salt", more of a signing context - salt='task-instance-logs', + expiration_time_in_seconds=expiration_time_in_seconds, + audience="task-instance-logs", ) # Prevent direct access to the logs port @flask_app.before_request def validate_pre_signed_url(): try: - auth = request.headers['Authorization'] - - # We don't actually care about the payload, just that the signature - # was valid and the `exp` claim is correct - filename, headers = signer.loads(auth, return_header=True) - - issued_at = int(headers['iat']) - expires_at = int(headers['exp']) - except Exception: + auth = request.headers.get('Authorization') + if auth is None: + logger.warning("The Authorization header is missing: %s.", request.headers) + abort(403) + payload = signer.verify_token(auth) + token_filename = payload.get("filename") + request_filename = request.view_args['filename'] + if token_filename is None: + logger.warning("The payload does not contain 'filename' key: %s.", payload) + abort(403) + if token_filename != request_filename: + logger.warning( + "The payload log_relative_path key is different than the one in token:" + "Request path: %s. Token path: %s.", + request_filename, + token_filename, + ) + abort(403) + except InvalidAudienceError: + logger.warning("Invalid audience for the request", exc_info=True) abort(403) - - if filename != request.view_args['filename']: + except InvalidSignatureError: + logger.warning("The signature of the request was wrong", exc_info=True) abort(403) - - # Validate the `iat` and `exp` are within `max_request_age` of now. - now = int(time.time()) - if abs(now - issued_at) > max_request_age: + except ImmatureSignatureError: + logger.warning("The signature of the request was sent from the future", exc_info=True) abort(403) - if abs(now - expires_at) > max_request_age: + except ExpiredSignatureError: + logger.warning( + "The signature of the request has expired. Make sure that all components " + "in your system have synchronized clocks. " + "See more at %s", + get_docs_url("configurations-ref.html#secret-key"), + exc_info=True, + ) abort(403) - if issued_at > expires_at or expires_at - issued_at > max_request_age: + except InvalidIssuedAtError: + logger.warning( + "The request was issues in the future. Make sure that all components " + "in your system have synchronized clocks. " + "See more at %s", + get_docs_url("configurations-ref.html#secret-key"), + exc_info=True, + ) + abort(403) + except Exception: + logger.warning("Unknown error", exc_info=True) abort(403) @flask_app.route('/log/') diff --git a/newsfragments/24519.misc.rst b/newsfragments/24519.misc.rst new file mode 100644 index 0000000000000..799d9141d2a0a --- /dev/null +++ b/newsfragments/24519.misc.rst @@ -0,0 +1 @@ +The JWT claims in the request to retrieve logs have been standardized: we use "nbf" and "aud" claims for maturity and audience of the requests. Also "filename" payload field is used to keep log name. diff --git a/tests/utils/test_serve_logs.py b/tests/utils/test_serve_logs.py index 168a43a012787..f8d38817592b8 100644 --- a/tests/utils/test_serve_logs.py +++ b/tests/utils/test_serve_logs.py @@ -14,12 +14,15 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +import datetime from typing import TYPE_CHECKING +import jwt import pytest -from itsdangerous import TimedJSONWebSignatureSerializer +from freezegun import freeze_time from airflow.configuration import conf +from airflow.utils.jwt_signer import JWTSigner from airflow.utils.serve_logs import create_app from tests.test_utils.config import conf_vars @@ -47,12 +50,19 @@ def sample_log(tmpdir): @pytest.fixture def signer(): - return TimedJSONWebSignatureSerializer( + return JWTSigner( secret_key=conf.get('webserver', 'secret_key'), - algorithm_name='HS512', - expires_in=30, - # This isn't really a "salt", more of a signing context - salt='task-instance-logs', + expiration_time_in_seconds=30, + audience="task-instance-logs", + ) + + +@pytest.fixture +def different_audience(): + return JWTSigner( + secret_key=conf.get('webserver', 'secret_key'), + expiration_time_in_seconds=30, + audience="different-audience", ) @@ -62,49 +72,134 @@ def test_forbidden_no_auth(self, client: "FlaskClient"): assert 403 == client.get('/log/sample.log').status_code def test_should_serve_file(self, client: "FlaskClient", signer): + response = client.get( + '/log/sample.log', + headers={ + 'Authorization': signer.generate_signed_token({"filename": 'sample.log'}), + }, + ) + assert response.data.decode() == LOG_DATA + assert response.status_code == 200 + + def test_forbidden_different_logname(self, client: "FlaskClient", signer): + response = client.get( + '/log/sample.log', + headers={ + 'Authorization': signer.generate_signed_token({"filename": 'different.log'}), + }, + ) + assert response.status_code == 403 + + def test_forbidden_expired(self, client: "FlaskClient", signer): + with freeze_time("2010-01-14"): + token = signer.generate_signed_token({"filename": 'sample.log'}) + assert ( + client.get( + '/log/sample.log', + headers={ + 'Authorization': token, + }, + ).status_code + == 403 + ) + + def test_forbidden_future(self, client: "FlaskClient", signer): + with freeze_time(datetime.datetime.utcnow() + datetime.timedelta(seconds=3600)): + token = signer.generate_signed_token({"filename": 'sample.log'}) assert ( - LOG_DATA - == client.get( + client.get( '/log/sample.log', headers={ - 'Authorization': signer.dumps('sample.log'), + 'Authorization': token, }, - ).data.decode() + ).status_code + == 403 ) - def test_forbidden_too_long_validity(self, client: "FlaskClient", signer): - signer.expires_in = 3600 + def test_ok_with_short_future_skew(self, client: "FlaskClient", signer): + with freeze_time(datetime.datetime.utcnow() + datetime.timedelta(seconds=1)): + token = signer.generate_signed_token({"filename": 'sample.log'}) assert ( - 403 - == client.get( + client.get( '/log/sample.log', headers={ - 'Authorization': signer.dumps('sample.log'), + 'Authorization': token, }, ).status_code + == 200 ) - def test_forbidden_expired(self, client: "FlaskClient", signer): - # Fake the time we think we are - signer.now = lambda: 0 + def test_ok_with_short_past_skew(self, client: "FlaskClient", signer): + with freeze_time(datetime.datetime.utcnow() - datetime.timedelta(seconds=31)): + token = signer.generate_signed_token({"filename": 'sample.log'}) + assert ( + client.get( + '/log/sample.log', + headers={ + 'Authorization': token, + }, + ).status_code + == 200 + ) + + def test_forbidden_with_long_future_skew(self, client: "FlaskClient", signer): + with freeze_time(datetime.datetime.utcnow() + datetime.timedelta(seconds=10)): + token = signer.generate_signed_token({"filename": 'sample.log'}) + assert ( + client.get( + '/log/sample.log', + headers={ + 'Authorization': token, + }, + ).status_code + == 403 + ) + + def test_forbidden_with_long_past_skew(self, client: "FlaskClient", signer): + with freeze_time(datetime.datetime.utcnow() - datetime.timedelta(seconds=40)): + token = signer.generate_signed_token({"filename": 'sample.log'}) + assert ( + client.get( + '/log/sample.log', + headers={ + 'Authorization': token, + }, + ).status_code + == 403 + ) + + def test_wrong_audience(self, client: "FlaskClient", different_audience): assert ( - 403 - == client.get( + client.get( '/log/sample.log', headers={ - 'Authorization': signer.dumps('sample.log'), + 'Authorization': different_audience.generate_signed_token({"filename": 'sample.log'}), }, ).status_code + == 403 ) - def test_wrong_context(self, client: "FlaskClient", signer): - signer.salt = None + @pytest.mark.parametrize("claim_to_remove", ["iat", "exp", "nbf", "aud"]) + def test_missing_claims(self, claim_to_remove: str, client: "FlaskClient"): + jwt_dict = { + "aud": "task-instance-logs", + "iat": datetime.datetime.utcnow(), + "nbf": datetime.datetime.utcnow(), + "exp": datetime.datetime.utcnow() + datetime.timedelta(seconds=30), + } + del jwt_dict[claim_to_remove] + jwt_dict.update({"filename": 'sample.log'}) + token = jwt.encode( + jwt_dict, + conf.get('webserver', 'secret_key'), + algorithm="HS512", + ) assert ( - 403 - == client.get( + client.get( '/log/sample.log', headers={ - 'Authorization': signer.dumps('sample.log'), + 'Authorization': token, }, ).status_code + == 403 ) From df49aa06a6bad2010050cce13a9bd52ee705bb97 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sat, 4 Jun 2022 20:43:44 +0200 Subject: [PATCH 004/118] Update 'rich' to latest version across the board. (#24186) That Also includes regenerating the breeze output images. (cherry picked from commit 13c62b52cbe0bdcf3319a403ce061268661245b3) --- .pre-commit-config.yaml | 42 +- dev/breeze/README.md | 2 +- dev/breeze/setup.cfg | 2 +- images/breeze/output-build-docs.svg | 318 +++++++++--- images/breeze/output-build-image.svg | 333 ++++++++++--- images/breeze/output-build-prod-image.svg | 418 ++++++++++++---- images/breeze/output-cleanup.svg | 118 +++-- images/breeze/output-command-hash-export.svg | 86 +++- images/breeze/output-commands-hash.txt | 1 + images/breeze/output-commands.svg | 454 ++++++++++++++---- images/breeze/output-config.svg | 148 ++++-- images/breeze/output-docker-compose-tests.svg | 128 +++-- images/breeze/output-exec.svg | 91 ++-- .../breeze/output-find-newer-dependencies.svg | 133 +++-- images/breeze/output-fix-ownership.svg | 98 ++-- images/breeze/output-free-space.svg | 98 ++-- images/breeze/output-generate-constraints.svg | 188 ++++++-- .../breeze/output-prepare-airflow-package.svg | 123 +++-- .../output-prepare-provider-documentation.svg | 173 +++++-- .../output-prepare-provider-packages.svg | 183 +++++-- images/breeze/output-pull-image.svg | 183 +++++-- images/breeze/output-pull-prod-image.svg | 183 +++++-- images/breeze/output-release-prod-images.svg | 168 +++++-- images/breeze/output-resource-check.svg | 91 ++-- images/breeze/output-self-upgrade.svg | 101 ++-- images/breeze/output-setup-autocomplete.svg | 113 +++-- images/breeze/output-shell.svg | 264 +++++++--- images/breeze/output-start-airflow.svg | 274 ++++++++--- images/breeze/output-static-checks.svg | 285 ++++++++--- images/breeze/output-stop.svg | 106 ++-- images/breeze/output-tests.svg | 133 +++-- images/breeze/output-verify-image.svg | 128 +++-- images/breeze/output-verify-prod-image.svg | 128 +++-- .../output-verify-provider-packages.svg | 178 +++++-- images/breeze/output-version.svg | 86 +++- setup.cfg | 2 +- 36 files changed, 4140 insertions(+), 1420 deletions(-) create mode 100644 images/breeze/output-commands-hash.txt diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8f4039f1e9050..22d9712590add 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -324,7 +324,7 @@ repos: files: ^setup\.cfg$|^setup\.py$ pass_filenames: false entry: ./scripts/ci/pre_commit/pre_commit_check_order_setup.py - additional_dependencies: ['rich>=12.4.1'] + additional_dependencies: ['rich>=12.4.4'] - id: check-extra-packages-references name: Checks setup extra packages description: Checks if all the libraries in setup.py are listed in extra-packages-ref.rst file @@ -332,7 +332,7 @@ repos: files: ^setup\.py$|^docs/apache-airflow/extra-packages-ref\.rst$ pass_filenames: false entry: ./scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py - additional_dependencies: ['rich>=12.4.1'] + additional_dependencies: ['rich>=12.4.4'] # This check might be removed when min-airflow-version in providers is 2.2 - id: check-airflow-2-1-compatibility name: Check that providers are 2.1 compatible. @@ -340,21 +340,21 @@ repos: language: python pass_filenames: true files: ^airflow/providers/.*\.py$ - additional_dependencies: ['rich>=12.4.1'] + additional_dependencies: ['rich>=12.4.4'] - id: update-breeze-file name: Update output of breeze commands in BREEZE.rst entry: ./scripts/ci/pre_commit/pre_commit_breeze_cmd_line.py language: python files: ^BREEZE\.rst$|^dev/breeze/.*$ pass_filenames: false - additional_dependencies: ['rich>=12.4.1', 'rich-click'] + additional_dependencies: ['rich>=12.4.4', 'rich-click'] - id: update-local-yml-file name: Update mounts in the local yml file entry: ./scripts/ci/pre_commit/pre_commit_local_yml_mounts.py language: python files: ^dev/breeze/src/airflow_breeze/utils/docker_command_utils\.py$|^scripts/ci/docker_compose/local\.yml$ pass_filenames: false - additional_dependencies: ['rich>=12.4.1'] + additional_dependencies: ['rich>=12.4.4'] - id: update-setup-cfg-file name: Update setup.cfg file with all licenses entry: ./scripts/ci/pre_commit/pre_commit_setup_cfg_file.sh @@ -380,7 +380,7 @@ repos: language: python files: ^Dockerfile$ pass_filenames: false - additional_dependencies: ['rich>=12.4.1'] + additional_dependencies: ['rich>=12.4.4'] - id: update-supported-versions name: Updates supported versions in documentation entry: ./scripts/ci/pre_commit/pre_commit_supported_versions.py @@ -602,7 +602,7 @@ repos: - 'jsonschema>=3.2.0,<5.0.0' - 'tabulate==0.8.8' - 'jsonpath-ng==1.5.3' - - 'rich>=12.4.1' + - 'rich>=12.4.4' - id: check-pre-commit-information-consistent name: Update information re pre-commit hooks and verify ids and names entry: ./scripts/ci/pre_commit/pre_commit_check_pre_commit_hooks.py @@ -610,7 +610,7 @@ repos: - --max-length=64 language: python files: ^\.pre-commit-config\.yaml$|^scripts/ci/pre_commit/pre_commit_check_pre_commit_hook_names\.py$ - additional_dependencies: ['pyyaml', 'jinja2', 'black==22.3.0', 'tabulate', 'rich>=12.4.1'] + additional_dependencies: ['pyyaml', 'jinja2', 'black==22.3.0', 'tabulate', 'rich>=12.4.4'] require_serial: true pass_filenames: false - id: check-airflow-providers-have-extras @@ -620,7 +620,7 @@ repos: files: ^setup\.py$|^airflow/providers/.*\.py$ pass_filenames: false require_serial: true - additional_dependencies: ['rich>=12.4.1'] + additional_dependencies: ['rich>=12.4.4'] - id: update-breeze-readme-config-hash name: Update Breeze README.md with config files hash language: python @@ -635,7 +635,7 @@ repos: files: ^dev/breeze/.*$ pass_filenames: false require_serial: true - additional_dependencies: ['click', 'rich>=12.4.1'] + additional_dependencies: ['click', 'rich>=12.4.4'] - id: check-system-tests-present name: Check if system tests have required segments of code entry: ./scripts/ci/pre_commit/pre_commit_check_system_tests.py @@ -643,7 +643,7 @@ repos: files: ^tests/system/.*/example_[^/]*.py$ exclude: ^tests/system/providers/google/bigquery/example_bigquery_queries\.py$ pass_filenames: true - additional_dependencies: ['rich>=12.4.1'] + additional_dependencies: ['rich>=12.4.4'] - id: lint-markdown name: Run markdownlint description: Checks the style of Markdown files. @@ -734,7 +734,7 @@ repos: language: python pass_filenames: true files: ^\.github/workflows/.*\.yml$ - additional_dependencies: ['PyYAML', 'rich>=12.4.1'] + additional_dependencies: ['PyYAML', 'rich>=12.4.4'] - id: check-docstring-param-types name: Check that docstrings do not specify param types entry: ./scripts/ci/pre_commit/pre_commit_docstring_param_type.py @@ -742,7 +742,7 @@ repos: pass_filenames: true files: \.py$ exclude: ^airflow/_vendor/ - additional_dependencies: ['rich>=12.4.1'] + additional_dependencies: ['rich>=12.4.4'] - id: lint-chart-schema name: Lint chart/values.schema.json file entry: ./scripts/ci/pre_commit/pre_commit_chart_schema.py @@ -777,7 +777,7 @@ repos: entry: ./scripts/ci/pre_commit/pre_commit_mypy.py files: ^dev/.*\.py$ require_serial: true - additional_dependencies: ['rich>=12.4.1'] + additional_dependencies: ['rich>=12.4.4'] - id: run-mypy name: Run mypy for core language: python @@ -785,14 +785,14 @@ repos: files: \.py$ exclude: ^provider_packages|^docs|^airflow/_vendor/|^airflow/providers|^airflow/migrations|^dev require_serial: true - additional_dependencies: ['rich>=12.4.1'] + additional_dependencies: ['rich>=12.4.4'] - id: run-mypy name: Run mypy for providers language: python entry: ./scripts/ci/pre_commit/pre_commit_mypy.py --namespace-packages files: ^airflow/providers/.*\.py$ require_serial: true - additional_dependencies: ['rich>=12.4.1'] + additional_dependencies: ['rich>=12.4.4'] - id: run-mypy name: Run mypy for /docs/ folder language: python @@ -800,7 +800,7 @@ repos: files: ^docs/.*\.py$ exclude: ^docs/rtd-deprecation require_serial: true - additional_dependencies: ['rich>=12.4.1'] + additional_dependencies: ['rich>=12.4.4'] - id: run-flake8 name: Run flake8 language: python @@ -808,7 +808,7 @@ repos: files: \.py$ pass_filenames: true exclude: ^airflow/_vendor/ - additional_dependencies: ['rich>=12.4.1'] + additional_dependencies: ['rich>=12.4.4'] - id: lint-javascript name: ESLint against airflow/ui language: python @@ -816,7 +816,7 @@ repos: files: ^airflow/ui/ entry: ./scripts/ci/pre_commit/pre_commit_ui_lint.py pass_filenames: false - additional_dependencies: ['rich>=12.4.1'] + additional_dependencies: ['rich>=12.4.4'] - id: lint-javascript name: ESLint against current UI JavaScript files language: python @@ -824,12 +824,12 @@ repos: files: ^airflow/www/static/js/ entry: ./scripts/ci/pre_commit/pre_commit_www_lint.py pass_filenames: false - additional_dependencies: ['rich>=12.4.1'] + additional_dependencies: ['rich>=12.4.4'] - id: update-migration-references name: Update migration ref doc language: python entry: ./scripts/ci/pre_commit/pre_commit_migration_reference.py pass_filenames: false files: ^airflow/migrations/versions/.*\.py$|^docs/apache-airflow/migrations-ref\.rst$ - additional_dependencies: ['rich>=12.4.1'] + additional_dependencies: ['rich>=12.4.4'] ## ONLY ADD PRE-COMMITS HERE THAT REQUIRE CI IMAGE diff --git a/dev/breeze/README.md b/dev/breeze/README.md index 78d69b4381658..7794f25e4e8eb 100644 --- a/dev/breeze/README.md +++ b/dev/breeze/README.md @@ -52,6 +52,6 @@ PLEASE DO NOT MODIFY THE HASH BELOW! IT IS AUTOMATICALLY UPDATED BY PRE-COMMIT. --------------------------------------------------------------------------------------------------------- -Package config hash: 480e3d6ae41e7d82216d1d4196b5b44f65e0ff06a6e58f2a5b0c79c70c5912e8e4a42e99429cf3220718491f0ebde35a1a1d2663fd36cbd8d902a4d51f7ebeb1 +Package config hash: 40b9b6908905e94c93809cca70c68c632731242798dba9cbe62473e965cb4e5d44eaaa817c5ce9334397f3794a350bc00e3cf319631a25c461a935a389191e7b --------------------------------------------------------------------------------------------------------- diff --git a/dev/breeze/setup.cfg b/dev/breeze/setup.cfg index 14362ea492594..9c7154ce52f2a 100644 --- a/dev/breeze/setup.cfg +++ b/dev/breeze/setup.cfg @@ -63,7 +63,7 @@ install_requires = pytest-xdist pyyaml requests - rich>=12.4.1 + rich>=12.4.4 rich_click [options.packages.find] diff --git a/images/breeze/output-build-docs.svg b/images/breeze/output-build-docs.svg index c65181b9aeff9..928f8f898ba5d 100644 --- a/images/breeze/output-build-docs.svg +++ b/images/breeze/output-build-docs.svg @@ -1,4 +1,4 @@ - + - Command: build-docs - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: build-docs + + + + + - + -                                                                                                                          - Usage: breeze build-docs [OPTIONS]                                                                                      -                                                                                                                         - Build documentation in the container.                                                                                   -                                                                                                                         -╭─ Doc flags ──────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --docs-only        -d  Only build documentation.                                                                     -  --spellcheck-only  -s  Only run spell checking.                                                                      -  --for-production   -p  Builds documentation for official release i.e. all links point to stable version.             -  --package-filter   -p  List of packages to consider.                                                                 -                         (apache-airflow | apache-airflow-providers | apache-airflow-providers-airbyte |               -                         apache-airflow-providers-alibaba | apache-airflow-providers-amazon |                          -                         apache-airflow-providers-apache-beam | apache-airflow-providers-apache-cassandra |            -                         apache-airflow-providers-apache-drill | apache-airflow-providers-apache-druid |               -                         apache-airflow-providers-apache-hdfs | apache-airflow-providers-apache-hive |                 -                         apache-airflow-providers-apache-kylin | apache-airflow-providers-apache-livy |                -                         apache-airflow-providers-apache-pig | apache-airflow-providers-apache-pinot |                 -                         apache-airflow-providers-apache-spark | apache-airflow-providers-apache-sqoop |               -                         apache-airflow-providers-arangodb | apache-airflow-providers-asana |                          -                         apache-airflow-providers-celery | apache-airflow-providers-cloudant |                         -                         apache-airflow-providers-cncf-kubernetes | apache-airflow-providers-databricks |              -                         apache-airflow-providers-datadog | apache-airflow-providers-dbt-cloud |                       -                         apache-airflow-providers-dingding | apache-airflow-providers-discord |                        -                         apache-airflow-providers-docker | apache-airflow-providers-elasticsearch |                    -                         apache-airflow-providers-exasol | apache-airflow-providers-facebook |                         -                         apache-airflow-providers-ftp | apache-airflow-providers-github |                              -                         apache-airflow-providers-google | apache-airflow-providers-grpc |                             -                         apache-airflow-providers-hashicorp | apache-airflow-providers-http |                          -                         apache-airflow-providers-imap | apache-airflow-providers-influxdb |                           -                         apache-airflow-providers-jdbc | apache-airflow-providers-jenkins |                            -                         apache-airflow-providers-jira | apache-airflow-providers-microsoft-azure |                    -                         apache-airflow-providers-microsoft-mssql | apache-airflow-providers-microsoft-psrp |          -                         apache-airflow-providers-microsoft-winrm | apache-airflow-providers-mongo |                   -                         apache-airflow-providers-mysql | apache-airflow-providers-neo4j |                             -                         apache-airflow-providers-odbc | apache-airflow-providers-openfaas |                           -                         apache-airflow-providers-opsgenie | apache-airflow-providers-oracle |                         -                         apache-airflow-providers-pagerduty | apache-airflow-providers-papermill |                     -                         apache-airflow-providers-plexus | apache-airflow-providers-postgres |                         -                         apache-airflow-providers-presto | apache-airflow-providers-qubole |                           -                         apache-airflow-providers-redis | apache-airflow-providers-salesforce |                        -                         apache-airflow-providers-samba | apache-airflow-providers-segment |                           -                         apache-airflow-providers-sendgrid | apache-airflow-providers-sftp |                           -                         apache-airflow-providers-singularity | apache-airflow-providers-slack |                       -                         apache-airflow-providers-snowflake | apache-airflow-providers-sqlite |                        -                         apache-airflow-providers-ssh | apache-airflow-providers-tableau |                             -                         apache-airflow-providers-telegram | apache-airflow-providers-trino |                          -                         apache-airflow-providers-vertica | apache-airflow-providers-yandex |                          -                         apache-airflow-providers-zendesk | docker-stack | helm-chart)                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose            -v  Print verbose information about performed steps.                                            -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze build-docs [OPTIONS] + +Build documentation in the container. + +╭─ Doc flags ──────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--docs-only-dOnly build documentation. +--spellcheck-only-sOnly run spell checking. +--for-production-pBuilds documentation for official release i.e. all links point to stable version. +--package-filter-pList of packages to consider.                                                                 +(apache-airflow | apache-airflow-providers | apache-airflow-providers-airbyte |               +apache-airflow-providers-alibaba | apache-airflow-providers-amazon |                          +apache-airflow-providers-apache-beam | apache-airflow-providers-apache-cassandra |            +apache-airflow-providers-apache-drill | apache-airflow-providers-apache-druid |               +apache-airflow-providers-apache-hdfs | apache-airflow-providers-apache-hive |                 +apache-airflow-providers-apache-kylin | apache-airflow-providers-apache-livy |                +apache-airflow-providers-apache-pig | apache-airflow-providers-apache-pinot |                 +apache-airflow-providers-apache-spark | apache-airflow-providers-apache-sqoop |               +apache-airflow-providers-arangodb | apache-airflow-providers-asana |                          +apache-airflow-providers-celery | apache-airflow-providers-cloudant |                         +apache-airflow-providers-cncf-kubernetes | apache-airflow-providers-databricks |              +apache-airflow-providers-datadog | apache-airflow-providers-dbt-cloud |                       +apache-airflow-providers-dingding | apache-airflow-providers-discord |                        +apache-airflow-providers-docker | apache-airflow-providers-elasticsearch |                    +apache-airflow-providers-exasol | apache-airflow-providers-facebook |                         +apache-airflow-providers-ftp | apache-airflow-providers-github |                              +apache-airflow-providers-google | apache-airflow-providers-grpc |                             +apache-airflow-providers-hashicorp | apache-airflow-providers-http |                          +apache-airflow-providers-imap | apache-airflow-providers-influxdb |                           +apache-airflow-providers-jdbc | apache-airflow-providers-jenkins |                            +apache-airflow-providers-jira | apache-airflow-providers-microsoft-azure |                    +apache-airflow-providers-microsoft-mssql | apache-airflow-providers-microsoft-psrp |          +apache-airflow-providers-microsoft-winrm | apache-airflow-providers-mongo |                   +apache-airflow-providers-mysql | apache-airflow-providers-neo4j |                             +apache-airflow-providers-odbc | apache-airflow-providers-openfaas |                           +apache-airflow-providers-opsgenie | apache-airflow-providers-oracle |                         +apache-airflow-providers-pagerduty | apache-airflow-providers-papermill |                     +apache-airflow-providers-plexus | apache-airflow-providers-postgres |                         +apache-airflow-providers-presto | apache-airflow-providers-qubole |                           +apache-airflow-providers-redis | apache-airflow-providers-salesforce |                        +apache-airflow-providers-samba | apache-airflow-providers-segment |                           +apache-airflow-providers-sendgrid | apache-airflow-providers-sftp |                           +apache-airflow-providers-singularity | apache-airflow-providers-slack |                       +apache-airflow-providers-snowflake | apache-airflow-providers-sqlite |                        +apache-airflow-providers-ssh | apache-airflow-providers-tableau |                             +apache-airflow-providers-telegram | apache-airflow-providers-trino |                          +apache-airflow-providers-vertica | apache-airflow-providers-yandex |                          +apache-airflow-providers-zendesk | docker-stack | helm-chart)                                 +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-build-image.svg b/images/breeze/output-build-image.svg index b779367062c37..34c28afd8b91b 100644 --- a/images/breeze/output-build-image.svg +++ b/images/breeze/output-build-image.svg @@ -1,4 +1,4 @@ - + - Command: build-image - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: build-image + + + + + - + -                                                                                                                          - Usage: breeze build-image [OPTIONS]                                                                                     -                                                                                                                         - Build CI image. Include building multiple images for all python versions (sequentially).                                -                                                                                                                         -╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --python                         -p  Python major/minor version used in Airflow image for images.                    -                                       (>3.7< | 3.8 | 3.9 | 3.10)                                                      -                                       [default: 3.7]                                                                  -  --upgrade-to-newer-dependencies  -u  When set, upgrade all PIP packages to latest.                                   -  --debian-version                     Debian version used for the image. (bullseye | buster) [default: bullseye]      -  --image-tag                      -t  Tag added to the default naming conventions of Airflow CI/PROD images. (TEXT)   -  --tag-as-latest                      Tags the image as latest and update checksum of all files after pulling.        -                                       Useful when you build or pull image with --image-tag.                           -  --docker-cache                   -c  Cache option for image used during the build. (registry | local | disabled)     -                                       [default: registry]                                                             -  --force-build                        Force image build no matter if it is determined as needed.                      -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building multiple images ───────────────────────────────────────────────────────────────────────────────────────────╮ -  --build-multiple-images    Run the operation sequentially on all or selected subset of Python versions.              -  --python-versions          Space separated list of python versions used for build with multiple versions. (TEXT)     -                             [default: 3.7 3.8 3.9 3.10]                                                               -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced options (for power users) ─────────────────────────────────────────────────────────────────────────────────╮ -  --install-providers-from-sources    Install providers from sources when installing.                                  -  --airflow-constraints-mode          Mode of constraints for CI image building                                        -                                      (constraints-source-providers | constraints | constraints-no-providers)          -                                      [default: constraints-source-providers]                                          -  --airflow-constraints-reference     Constraint reference to use when building the image. (TEXT)                      -  --additional-python-deps            Additional python dependencies to use when building the images. (TEXT)           -  --runtime-apt-deps                  Apt runtime dependencies to use when building the images. (TEXT)                 -  --runtime-apt-command               Command executed before runtime apt deps are installed. (TEXT)                   -  --additional-extras                 Additional extra package while installing Airflow in the image. (TEXT)           -  --additional-runtime-apt-deps       Additional apt runtime dependencies to use when building the images. (TEXT)      -  --additional-runtime-apt-env        Additional environment variables set when adding runtime dependencies. (TEXT)    -  --additional-runtime-apt-command    Additional command executed before runtime apt deps are installed. (TEXT)        -  --additional-dev-apt-deps           Additional apt dev dependencies to use when building the images. (TEXT)          -  --additional-dev-apt-env            Additional environment variables set when adding dev dependencies. (TEXT)        -  --additional-dev-apt-command        Additional command executed before dev apt deps are installed. (TEXT)            -  --dev-apt-deps                      Apt dev dependencies to use when building the images. (TEXT)                     -  --dev-apt-command                   Command executed before dev apt deps are installed. (TEXT)                       -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ -  --github-token            The token used to authenticate to GitHub. (TEXT)                                           -  --github-username         The user name used to authenticate to GitHub. (TEXT)                                       -  --platform                Platform for Airflow image. (linux/amd64 | linux/arm64 | linux/amd64,linux/arm64)          -  --push-image              Push image after building it.                                                              -  --empty-image             Prepare empty image tagged with the same name as the Airflow image.                        -  --prepare-buildx-cache    Prepares build cache (this is done as separate per-platform steps instead of building      -                            the image).                                                                                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --verbose            -v  Print verbose information about performed steps.                                            -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --answer             -a  Force answer to questions. (y | n | q | yes | no | quit)                                    -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze build-image [OPTIONS] + +Build CI image. Include building multiple images for all python versions (sequentially). + +╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +--image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful +when you build or pull image with --image-tag.                                  +--docker-cache-cCache option for image used during the build.(registry | local | disabled) +[default: registry]                           +--force-buildForce image build no matter if it is determined as needed. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building multiple images ───────────────────────────────────────────────────────────────────────────────────────────╮ +--build-multiple-imagesRun the operation sequentially on all or selected subset of Python versions. +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced options (for power users) ─────────────────────────────────────────────────────────────────────────────────╮ +--install-providers-from-sourcesInstall providers from sources when installing. +--airflow-constraints-modeMode of constraints for CI image building                               +(constraints-source-providers | constraints | constraints-no-providers) +[default: constraints-source-providers]                                 +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) +--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) +--additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) +--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) +--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--github-usernameThe user name used to authenticate to GitHub.(TEXT) +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--push-imagePush image after building it. +--empty-imagePrepare empty image tagged with the same name as the Airflow image. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-build-prod-image.svg b/images/breeze/output-build-prod-image.svg index 247461d55530c..494e77574c3e0 100644 --- a/images/breeze/output-build-prod-image.svg +++ b/images/breeze/output-build-prod-image.svg @@ -1,4 +1,4 @@ - + - Command: build-prod-image - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: build-prod-image + + + + + - + -                                                                                                                          - Usage: breeze build-prod-image [OPTIONS]                                                                                -                                                                                                                         - Build Production image. Include building multiple images for all or selected Python versions sequentially.              -                                                                                                                         -╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --python                         -p  Python major/minor version used in Airflow image for images.                    -                                       (>3.7< | 3.8 | 3.9 | 3.10)                                                      -                                       [default: 3.7]                                                                  -  --install-airflow-version        -V  Install version of Airflow from PyPI. (TEXT)                                    -  --upgrade-to-newer-dependencies  -u  When set, upgrade all PIP packages to latest.                                   -  --debian-version                     Debian version used for the image. (bullseye | buster) [default: bullseye]      -  --image-tag                      -t  Tag added to the default naming conventions of Airflow CI/PROD images. (TEXT)   -  --tag-as-latest                      Tags the image as latest and update checksum of all files after pulling.        -                                       Useful when you build or pull image with --image-tag.                           -  --docker-cache                   -c  Cache option for image used during the build. (registry | local | disabled)     -                                       [default: registry]                                                             -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building multiple images ───────────────────────────────────────────────────────────────────────────────────────────╮ -  --build-multiple-images    Run the operation sequentially on all or selected subset of Python versions.              -  --python-versions          Space separated list of python versions used for build with multiple versions. (TEXT)     -                             [default: 3.7 3.8 3.9 3.10]                                                               -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options for customizing images ─────────────────────────────────────────────────────────────────────────────────────╮ -  --install-providers-from-sources    Install providers from sources when installing.                                  -  --airflow-extras                    Extras to install by default.                                                    -                                      (TEXT)                                                                           -                                      [default:                                                                        -                                      amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,google,goog…   -  --airflow-constraints-mode          Mode of constraints for PROD image building                                      -                                      (constraints | constraints-no-providers | constraints-source-providers)          -                                      [default: constraints]                                                           -  --airflow-constraints-reference     Constraint reference to use when building the image. (TEXT)                      -  --additional-python-deps            Additional python dependencies to use when building the images. (TEXT)           -  --additional-extras                 Additional extra package while installing Airflow in the image. (TEXT)           -  --additional-runtime-apt-deps       Additional apt runtime dependencies to use when building the images. (TEXT)      -  --additional-runtime-apt-env        Additional environment variables set when adding runtime dependencies. (TEXT)    -  --additional-runtime-apt-command    Additional command executed before runtime apt deps are installed. (TEXT)        -  --additional-dev-apt-deps           Additional apt dev dependencies to use when building the images. (TEXT)          -  --additional-dev-apt-env            Additional environment variables set when adding dev dependencies. (TEXT)        -  --additional-dev-apt-command        Additional command executed before dev apt deps are installed. (TEXT)            -  --runtime-apt-deps                  Apt runtime dependencies to use when building the images. (TEXT)                 -  --runtime-apt-command               Command executed before runtime apt deps are installed. (TEXT)                   -  --dev-apt-deps                      Apt dev dependencies to use when building the images. (TEXT)                     -  --dev-apt-command                   Command executed before dev apt deps are installed. (TEXT)                       -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Customization options (for specific customization needs) ───────────────────────────────────────────────────────────╮ -  --install-packages-from-context           Install wheels from local docker-context-files when building image.        -  --airflow-is-in-context                   If set Airflow is installed from docker-context-files only rather than     -                                            from PyPI or sources.                                                      -  --cleanup-context                         Clean up docker context files before running build (cannot be used         -                                            together with --install-packages-from-context).                            -  --disable-mysql-client-installation       Do not install MySQL client.                                               -  --disable-mssql-client-installation       Do not install MsSQl client.                                               -  --disable-postgres-client-installation    Do not install Postgres client.                                            -  --disable-airflow-repo-cache              Disable cache from Airflow repository during building.                     -  --install-airflow-reference               Install Airflow using GitHub tag or branch. (TEXT)                         -  --installation-method                     Install Airflow from: sources or PyPI. (. | apache-airflow)                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ -  --github-token            The token used to authenticate to GitHub. (TEXT)                                           -  --github-username         The user name used to authenticate to GitHub. (TEXT)                                       -  --platform                Platform for Airflow image. (linux/amd64 | linux/arm64 | linux/amd64,linux/arm64)          -  --push-image              Push image after building it.                                                              -  --empty-image             Prepare empty image tagged with the same name as the Airflow image.                        -  --prepare-buildx-cache    Prepares build cache (this is done as separate per-platform steps instead of building      -                            the image).                                                                                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --answer             -a  Force answer to questions. (y | n | q | yes | no | quit)                                    -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --verbose            -v  Print verbose information about performed steps.                                            -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze build-prod-image [OPTIONS] + +Build Production image. Include building multiple images for all or selected Python versions sequentially. + +╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--install-airflow-version-VInstall version of Airflow from PyPI.(TEXT) +--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +--image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful +when you build or pull image with --image-tag.                                  +--docker-cache-cCache option for image used during the build.(registry | local | disabled) +[default: registry]                           +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building multiple images ───────────────────────────────────────────────────────────────────────────────────────────╮ +--build-multiple-imagesRun the operation sequentially on all or selected subset of Python versions. +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options for customizing images ─────────────────────────────────────────────────────────────────────────────────────╮ +--install-providers-from-sourcesInstall providers from sources when installing. +--airflow-extrasExtras to install by default.                                                    +(TEXT)                                                                           +[default:                                                                        +amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,google,google… +--airflow-constraints-modeMode of constraints for PROD image building                             +(constraints | constraints-no-providers | constraints-source-providers) +[default: constraints]                                                  +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) +--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) +--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) +--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Customization options (for specific customization needs) ───────────────────────────────────────────────────────────╮ +--install-packages-from-contextInstall wheels from local docker-context-files when building image. +--airflow-is-in-contextIf set Airflow is installed from docker-context-files only rather than     +from PyPI or sources.                                                      +--cleanup-contextClean up docker context files before running build (cannot be used         +together with --install-packages-from-context).                            +--disable-mysql-client-installationDo not install MySQL client. +--disable-mssql-client-installationDo not install MsSQl client. +--disable-postgres-client-installationDo not install Postgres client. +--disable-airflow-repo-cacheDisable cache from Airflow repository during building. +--install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT) +--installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--github-usernameThe user name used to authenticate to GitHub.(TEXT) +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--push-imagePush image after building it. +--empty-imagePrepare empty image tagged with the same name as the Airflow image. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-cleanup.svg b/images/breeze/output-cleanup.svg index 68395e65ebe26..9bd373b98be83 100644 --- a/images/breeze/output-cleanup.svg +++ b/images/breeze/output-cleanup.svg @@ -1,4 +1,4 @@ - + - Command: cleanup - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: cleanup + + + + + - + -                                                                                                                          - Usage: breeze cleanup [OPTIONS]                                                                                         -                                                                                                                         - Cleans the cache of parameters, docker cache and optionally - currently downloaded images.                              -                                                                                                                         -╭─ Cleanup flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --all    Also remove currently downloaded Breeze images.                                                             -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose            -v  Print verbose information about performed steps.                                            -  --answer             -a  Force answer to questions. (y | n | q | yes | no | quit)                                    -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze cleanup [OPTIONS] + +Cleans the cache of parameters, docker cache and optionally - currently downloaded images. + +╭─ Cleanup flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--allAlso remove currently downloaded Breeze images. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-command-hash-export.svg b/images/breeze/output-command-hash-export.svg index 03d562e242f45..789317d72494f 100644 --- a/images/breeze/output-command-hash-export.svg +++ b/images/breeze/output-command-hash-export.svg @@ -1,4 +1,4 @@ - + - Command: command-hash-export - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: command-hash-export + + + + + - + -                                                                                                                          - Usage: breeze command-hash-export [OPTIONS] OUTPUT                                                                      -                                                                                                                         - Outputs hash of all click commands to file or stdout if `-` is used (useful to see if images should be regenerated).    -                                                                                                                         -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose  -v  Print verbose information about performed steps.                                                      -  --help     -h  Show this message and exit.                                                                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze command-hash-export [OPTIONS] OUTPUT + +Outputs hash of all click commands to file or stdout if `-` is used (useful to see if images should be regenerated). + +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt new file mode 100644 index 0000000000000..08e0c94ed2eef --- /dev/null +++ b/images/breeze/output-commands-hash.txt @@ -0,0 +1 @@ +c227c66369d92c76c0e52939e3e4ecf1 diff --git a/images/breeze/output-commands.svg b/images/breeze/output-commands.svg index 4fe025fcef29d..be4ea5a514d12 100644 --- a/images/breeze/output-commands.svg +++ b/images/breeze/output-commands.svg @@ -1,4 +1,4 @@ - + - Breeze commands - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Breeze commands + + + + + - + -                                                                                                                          - Usage: breeze [OPTIONS] COMMAND [ARGS]...                                                                               -                                                                                                                         -╭─ Basic flags for the default (shell) command ────────────────────────────────────────────────────────────────────────╮ -  --python               -p  Python major/minor version used in Airflow image for images. (>3.7< | 3.8 | 3.9 | 3.10)   -                             [default: 3.7]                                                                            -  --backend              -b  Database backend to use. (>sqlite< | mysql | postgres | mssql) [default: sqlite]          -  --postgres-version     -P  Version of Postgres used. (>10< | 11 | 12 | 13 | 14) [default: 10]                        -  --mysql-version        -M  Version of MySQL used. (>5.7< | 8) [default: 5.7]                                         -  --mssql-version        -S  Version of MsSQL used. (>2017-latest< | 2019-latest) [default: 2017-latest]               -  --integration              Integration(s) to enable when running (can be more than one).                             -                             (cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    -                             all)                                                                                      -  --forward-credentials  -f  Forward local credentials to container when running.                                      -  --db-reset             -d  Reset DB when entering the container.                                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flags for the default (shell) command ─────────────────────────────────────────────────────────────────────╮ -  --use-airflow-version       Use (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or   -                              `sdist` if Airflow should be removed, installed from wheel packages or sdist packages    -                              available in dist folder respectively. Implies --mount-sources `none`.                   -                              (none | wheel | sdist | <airflow_version>)                                               -  --airflow-extras            Airflow extras to install when --use-airflow-version is used (TEXT)                      -  --use-packages-from-dist    Install all found packages (--package-format determines type) from 'dist' folder when    -                              entering breeze.                                                                         -  --package-format            Format of packages that should be installed from dist. (wheel | sdist)                   -                              [default: wheel]                                                                         -  --force-build               Force image build no matter if it is determined as needed.                               -  --mount-sources             Choose scope of local sources should be mounted (default = selected).                    -                              (selected | all | none)                                                                  -                              [default: selected]                                                                      -  --debian-version            Debian version used for the image. (bullseye | buster) [default: bullseye]               -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose            -v  Print verbose information about performed steps.                                            -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --answer             -a  Force answer to questions. (y | n | q | yes | no | quit)                                    -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Developer tools ────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  shell          Enter breeze.py environment. this is the default command use when no other is selected.               -  start-airflow  Enter breeze.py environment and starts all Airflow components in the tmux session.                    -  exec           Joins the interactive shell of running airflow container                                              -  stop           Stop running breeze environment.                                                                      -  build-docs     Build documentation in the container.                                                                 -  static-checks  Run static checks.                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Testing ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  docker-compose-tests  Run docker-compose tests.                                                                      -  tests                 Run the specified unit test targets. Multiple targets may be specified separated by spaces.    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Configuration & maintenance ────────────────────────────────────────────────────────────────────────────────────────╮ -  cleanup              Cleans the cache of parameters, docker cache and optionally - currently downloaded images.      -  self-upgrade         Self upgrade Breeze.                                                                            -  setup-autocomplete   Enables autocompletion of breeze commands.                                                      -  config               Show/update configuration (Python, Backend, Cheatsheet, ASCIIART).                              -  resource-check       Check if available docker resources are enough.                                                 -  free-space           Free space for jobs run in CI.                                                                  -  fix-ownership        Fix ownership of source files to be same as host user.                                          -  command-hash-export  Outputs hash of all click commands to file or stdout if `-` is used (useful to see if images    -                       should be regenerated).                                                                         -  version              Print information about version of apache-airflow-breeze.                                       -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ CI Image tools ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  build-image   Build CI image. Include building multiple images for all python versions (sequentially).               -  pull-image    Pull and optionally verify CI images - possibly in parallel for all Python versions.                   -  verify-image  Verify CI image.                                                                                       -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Production Image tools ─────────────────────────────────────────────────────────────────────────────────────────────╮ -  build-prod-image   Build Production image. Include building multiple images for all or selected Python versions      -                     sequentially.                                                                                     -  pull-prod-image    Pull and optionally verify Production images - possibly in parallel for all Python versions.      -  verify-prod-image  Verify Production image.                                                                          -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Release management ─────────────────────────────────────────────────────────────────────────────────────────────────╮ -  verify-provider-packages        Verifies if all provider code is following expectations for providers.               -  prepare-provider-documentation  Prepare CHANGELOG, README and COMMITS information for providers.                     -  prepare-provider-packages       Prepare sdist/whl packages of Airflow Providers.                                     -  prepare-airflow-package         Prepare sdist/whl package of Airflow.                                                -  release-prod-images             Release production images to DockerHub (needs DockerHub permissions).                -  generate-constraints            Generates pinned constraint files with all extras from setup.py in parallel.         -  find-newer-dependencies         Finds which dependencies are being upgraded.                                         -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze [OPTIONS] COMMAND [ARGS]... + +╭─ Basic flags for the default (shell) command ────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                             +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    +all)                                                                                      +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flags for the default (shell) command ─────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or   +`sdist` if Airflow should be removed, installed from wheel packages or sdist packages    +available in dist folder respectively. Implies --mount-sources `none`.                   +(none | wheel | sdist | <airflow_version>)                                               +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when    +entering breeze.                                                                         +--package-formatFormat of packages that should be installed from dist.(wheel | sdist)[default: wheel] +--force-buildForce image build no matter if it is determined as needed. +--mount-sourcesChoose scope of local sources should be mounted (default = selected). +(selected | all | none)                                               +[default: selected]                                                   +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Developer tools ────────────────────────────────────────────────────────────────────────────────────────────────────╮ +shell          Enter breeze.py environment. this is the default command use when no other is selected.             +start-airflow  Enter breeze.py environment and starts all Airflow components in the tmux session.                  +exec           Joins the interactive shell of running airflow container                                            +stop           Stop running breeze environment.                                                                    +build-docs     Build documentation in the container.                                                               +static-checks  Run static checks.                                                                                  +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Testing ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +docker-compose-tests Run docker-compose tests.                                                                     +tests                Run the specified unit test targets. Multiple targets may be specified separated by spaces.   +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Configuration & maintenance ────────────────────────────────────────────────────────────────────────────────────────╮ +cleanup            Cleans the cache of parameters, docker cache and optionally - currently downloaded images.      +self-upgrade       Self upgrade Breeze.                                                                            +setup-autocomplete Enables autocompletion of breeze commands.                                                      +config             Show/update configuration (Python, Backend, Cheatsheet, ASCIIART).                              +resource-check     Check if available docker resources are enough.                                                 +free-space         Free space for jobs run in CI.                                                                  +fix-ownership      Fix ownership of source files to be same as host user.                                          +command-hash-exportOutputs hash of all click commands to file or stdout if `-` is used (useful to see if images    +should be regenerated).                                                                         +version            Print information about version of apache-airflow-breeze.                                       +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ CI Image tools ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +build-image   Build CI image. Include building multiple images for all python versions (sequentially).             +pull-image    Pull and optionally verify CI images - possibly in parallel for all Python versions.                 +verify-image  Verify CI image.                                                                                     +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Production Image tools ─────────────────────────────────────────────────────────────────────────────────────────────╮ +build-prod-image Build Production image. Include building multiple images for all or selected Python versions      +sequentially.                                                                                     +pull-prod-image  Pull and optionally verify Production images - possibly in parallel for all Python versions.      +verify-prod-imageVerify Production image.                                                                          +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Release management ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +verify-provider-packages         Verifies if all provider code is following expectations for providers.            +prepare-provider-documentation   Prepare CHANGELOG, README and COMMITS information for providers.                  +prepare-provider-packages        Prepare sdist/whl packages of Airflow Providers.                                  +prepare-airflow-package          Prepare sdist/whl package of Airflow.                                             +release-prod-images              Release production images to DockerHub (needs DockerHub permissions).             +generate-constraints             Generates pinned constraint files with all extras from setup.py in parallel.      +find-newer-dependencies          Finds which dependencies are being upgraded.                                      +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-config.svg b/images/breeze/output-config.svg index f3c2e8c23e987..b196ab3306b24 100644 --- a/images/breeze/output-config.svg +++ b/images/breeze/output-config.svg @@ -1,4 +1,4 @@ - + - Command: config - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: config + + + + + - + -                                                                                                                          - Usage: breeze config [OPTIONS]                                                                                          -                                                                                                                         - Show/update configuration (Python, Backend, Cheatsheet, ASCIIART).                                                      -                                                                                                                         -╭─ Config flags ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --python                      -p     Python major/minor version used in Airflow image for images.                    -                                       (>3.7< | 3.8 | 3.9 | 3.10)                                                      -                                       [default: 3.7]                                                                  -  --backend                     -b     Database backend to use. (>sqlite< | mysql | postgres | mssql)                  -                                       [default: sqlite]                                                               -  --cheatsheet/--no-cheatsheet  -C/-c  Enable/disable cheatsheet.                                                      -  --asciiart/--no-asciiart      -A/-a  Enable/disable ASCIIart.                                                        -  --colour/--no-colour          -B/-b  Enable/disable Colour mode (useful for colour blind-friendly communication).    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --postgres-version  -P  Version of Postgres used. (>10< | 11 | 12 | 13 | 14) [default: 10]                           -  --mysql-version     -M  Version of MySQL used. (>5.7< | 8) [default: 5.7]                                            -  --mssql-version     -S  Version of MsSQL used. (>2017-latest< | 2019-latest) [default: 2017-latest]                  -  --help              -h  Show this message and exit.                                                                  -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze config [OPTIONS] + +Show/update configuration (Python, Backend, Cheatsheet, ASCIIART). + +╭─ Config flags ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql) +[default: sqlite]        +--cheatsheet/--no-cheatsheet-C/-cEnable/disable cheatsheet. +--asciiart/--no-asciiart-A/-aEnable/disable ASCIIart. +--colour/--no-colour-B/-bEnable/disable Colour mode (useful for colour blind-friendly communication). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-docker-compose-tests.svg b/images/breeze/output-docker-compose-tests.svg index 71d16ecda4421..4830ca1215289 100644 --- a/images/breeze/output-docker-compose-tests.svg +++ b/images/breeze/output-docker-compose-tests.svg @@ -1,4 +1,4 @@ - + - Command: docker-compose-tests - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: docker-compose-tests + + + + + - + -                                                                                                                          - Usage: breeze docker-compose-tests [OPTIONS] [EXTRA_PYTEST_ARGS]...                                                     -                                                                                                                         - Run docker-compose tests.                                                                                               -                                                                                                                         -╭─ Docker-compose tests flag ──────────────────────────────────────────────────────────────────────────────────────────╮ -  --image-name  -n  Name of the image to verify (overrides --python and --image-tag). (TEXT)                           -  --python      -p  Python major/minor version used in Airflow image for images. (>3.7< | 3.8 | 3.9 | 3.10)            -                    [default: 3.7]                                                                                     -  --image-tag   -t  Tag added to the default naming conventions of Airflow CI/PROD images. (TEXT)                      -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose            -v  Print verbose information about performed steps.                                            -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze docker-compose-tests [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Run docker-compose tests. + +╭─ Docker-compose tests flag ──────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-exec.svg b/images/breeze/output-exec.svg index b197eb3d63d1b..b8e69cd3addef 100644 --- a/images/breeze/output-exec.svg +++ b/images/breeze/output-exec.svg @@ -1,4 +1,4 @@ - + - Command: exec - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: exec + + + + + - + -                                                                                                                          - Usage: breeze exec [OPTIONS] [EXEC_ARGS]...                                                                             -                                                                                                                         - Joins the interactive shell of running airflow container                                                                -                                                                                                                         -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose  -v  Print verbose information about performed steps.                                                      -  --dry-run  -D  If dry-run is set, commands are only printed, not executed.                                           -  --help     -h  Show this message and exit.                                                                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze exec [OPTIONS] [EXEC_ARGS]... + +Joins the interactive shell of running airflow container + +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-find-newer-dependencies.svg b/images/breeze/output-find-newer-dependencies.svg index ea60bb6ab5cc9..43fc632ead7ed 100644 --- a/images/breeze/output-find-newer-dependencies.svg +++ b/images/breeze/output-find-newer-dependencies.svg @@ -1,4 +1,4 @@ - + - Command: find-newer-dependencies - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: find-newer-dependencies + + + + + - + -                                                                                                                          - Usage: breeze find-newer-dependencies [OPTIONS]                                                                         -                                                                                                                         - Finds which dependencies are being upgraded.                                                                            -                                                                                                                         -╭─ Find newer dependencies flags ──────────────────────────────────────────────────────────────────────────────────────╮ -  --python               -p  Python major/minor version used in Airflow image for images. (>3.7< | 3.8 | 3.9 | 3.10)   -                             [default: 3.7]                                                                            -  --timezone                 Timezone to use during the check (TEXT)                                                   -  --updated-on-or-after      Date when the release was updated after (TEXT)                                            -  --max-age                  Max age of the last release (used if no updated-on-or-after if specified) (INTEGER)       -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --airflow-constraints-reference      Constraint reference to use. Useful with --use-airflow-version parameter to     -                                       specify constraints for the installed version and to find newer dependencies    -                                       (TEXT)                                                                          -  --help                           -h  Show this message and exit.                                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze find-newer-dependencies [OPTIONS] + +Finds which dependencies are being upgraded. + +╭─ Find newer dependencies flags ──────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--timezoneTimezone to use during the check(TEXT) +--updated-on-or-afterDate when the release was updated after(TEXT) +--max-ageMax age of the last release (used if no updated-on-or-after if specified)(INTEGER) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     +specify constraints for the installed version and to find newer dependencies    +(TEXT)                                                                          +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-fix-ownership.svg b/images/breeze/output-fix-ownership.svg index a35f337ea38e0..d8fe2ea84f440 100644 --- a/images/breeze/output-fix-ownership.svg +++ b/images/breeze/output-fix-ownership.svg @@ -1,4 +1,4 @@ - + - Command: fix-ownership - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: fix-ownership + + + + + - + -                                                                                                                          - Usage: breeze fix-ownership [OPTIONS]                                                                                   -                                                                                                                         - Fix ownership of source files to be same as host user.                                                                  -                                                                                                                         -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --verbose            -v  Print verbose information about performed steps.                                            -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze fix-ownership [OPTIONS] + +Fix ownership of source files to be same as host user. + +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-free-space.svg b/images/breeze/output-free-space.svg index d0dd2934fe5fe..e0e4296201385 100644 --- a/images/breeze/output-free-space.svg +++ b/images/breeze/output-free-space.svg @@ -1,4 +1,4 @@ - + - Command: free-space - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: free-space + + + + + - + -                                                                                                                          - Usage: breeze free-space [OPTIONS]                                                                                      -                                                                                                                         - Free space for jobs run in CI.                                                                                          -                                                                                                                         -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose  -v  Print verbose information about performed steps.                                                      -  --dry-run  -D  If dry-run is set, commands are only printed, not executed.                                           -  --answer   -a  Force answer to questions. (y | n | q | yes | no | quit)                                              -  --help     -h  Show this message and exit.                                                                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze free-space [OPTIONS] + +Free space for jobs run in CI. + +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-generate-constraints.svg b/images/breeze/output-generate-constraints.svg index ac2afa05a16bb..ca049566070eb 100644 --- a/images/breeze/output-generate-constraints.svg +++ b/images/breeze/output-generate-constraints.svg @@ -1,4 +1,4 @@ - + - Command: generate-constraints - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: generate-constraints + + + + + - + -                                                                                                                          - Usage: breeze generate-constraints [OPTIONS]                                                                            -                                                                                                                         - Generates pinned constraint files with all extras from setup.py in parallel.                                            -                                                                                                                         -╭─ Generate constraints flags ─────────────────────────────────────────────────────────────────────────────────────────╮ -  --image-tag                 -t  Tag added to the default naming conventions of Airflow CI/PROD images. (TEXT)        -  --python                    -p  Python major/minor version used in Airflow image for images.                         -                                  (>3.7< | 3.8 | 3.9 | 3.10)                                                           -                                  [default: 3.7]                                                                       -  --airflow-constraints-mode      Mode of constraints for CI image building                                            -                                  (constraints-source-providers | constraints | constraints-no-providers)              -                                  [default: constraints-source-providers]                                              -  --debug                         Drop user in shell instead of running the command. Useful for debugging.             -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --run-in-parallel    Run the operation in parallel on all or selected subset of Python versions.                     -  --parallelism        Maximum number of processes to use while running the operation in parallel. (INTEGER RANGE)     -                       [default: 4; 1<=x<=8]                                                                           -  --python-versions    Space separated list of python versions used for build with multiple versions. (TEXT)           -                       [default: 3.7 3.8 3.9 3.10]                                                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose            -v  Print verbose information about performed steps.                                            -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --answer             -a  Force answer to questions. (y | n | q | yes | no | quit)                                    -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze generate-constraints [OPTIONS] + +Generates pinned constraint files with all extras from setup.py in parallel. + +╭─ Generate constraints flags ─────────────────────────────────────────────────────────────────────────────────────────╮ +--image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--airflow-constraints-modeMode of constraints for CI image building                               +(constraints-source-providers | constraints | constraints-no-providers) +[default: constraints-source-providers]                                 +--debugDrop user in shell instead of running the command. Useful for debugging. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-prepare-airflow-package.svg b/images/breeze/output-prepare-airflow-package.svg index 2a3c147b511fb..996151f94cc6d 100644 --- a/images/breeze/output-prepare-airflow-package.svg +++ b/images/breeze/output-prepare-airflow-package.svg @@ -1,4 +1,4 @@ - + - Command: prepare-airflow-package - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: prepare-airflow-package + + + + + - + -                                                                                                                          - Usage: breeze prepare-airflow-package [OPTIONS]                                                                         -                                                                                                                         - Prepare sdist/whl package of Airflow.                                                                                   -                                                                                                                         -╭─ Package flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --package-format             Format of packages. (wheel | sdist | both) [default: wheel]                             -  --version-suffix-for-pypi    Version suffix used for PyPI packages (alpha, beta, rc1, etc.). (TEXT)                  -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose            -v  Print verbose information about performed steps.                                            -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --debug                  Drop user in shell instead of running the command. Useful for debugging.                    -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze prepare-airflow-package [OPTIONS] + +Prepare sdist/whl package of Airflow. + +╭─ Package flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--package-formatFormat of packages.(wheel | sdist | both)[default: wheel] +--version-suffix-for-pypiVersion suffix used for PyPI packages (alpha, beta, rc1, etc.).(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--debugDrop user in shell instead of running the command. Useful for debugging. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-prepare-provider-documentation.svg b/images/breeze/output-prepare-provider-documentation.svg index 2e5369e5cece8..49e1dd54b6781 100644 --- a/images/breeze/output-prepare-provider-documentation.svg +++ b/images/breeze/output-prepare-provider-documentation.svg @@ -1,4 +1,4 @@ - + - Command: prepare-provider-documentation - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: prepare-provider-documentation + + + + + - + -                                                                                                                          - Usage: breeze prepare-provider-documentation [OPTIONS] [airbyte | alibaba | amazon | apache.beam | apache.cassandra |   -                                              apache.drill | apache.druid | apache.hdfs | apache.hive | apache.kylin |   -                                              apache.livy | apache.pig | apache.pinot | apache.spark | apache.sqoop |    -                                              arangodb | asana | celery | cloudant | cncf.kubernetes | databricks |      -                                              datadog | dbt.cloud | dingding | discord | docker | elasticsearch |        -                                              exasol | facebook | ftp | github | google | grpc | hashicorp | http |      -                                              imap | influxdb | jdbc | jenkins | jira | microsoft.azure |                -                                              microsoft.mssql | microsoft.psrp | microsoft.winrm | mongo | mysql |       -                                              neo4j | odbc | openfaas | opsgenie | oracle | pagerduty | papermill |      -                                              plexus | postgres | presto | qubole | redis | salesforce | samba |         -                                              segment | sendgrid | sftp | singularity | slack | snowflake | sqlite |     -                                              ssh | tableau | telegram | trino | vertica | yandex | zendesk]...          -                                                                                                                         - Prepare CHANGELOG, README and COMMITS information for providers.                                                        -                                                                                                                         -╭─ Provider documentation preparation flags ───────────────────────────────────────────────────────────────────────────╮ -  --debug    Drop user in shell instead of running the command. Useful for debugging.                                  -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose            -v  Print verbose information about performed steps.                                            -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --answer             -a  Force answer to questions. (y | n | q | yes | no | quit)                                    -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze prepare-provider-documentation [OPTIONS] [airbyte | alibaba | amazon | apache.beam | apache.cassandra | +                                             apache.drill | apache.druid | apache.hdfs | apache.hive | apache.kylin | +                                             apache.livy | apache.pig | apache.pinot | apache.spark | apache.sqoop | +                                             arangodb | asana | celery | cloudant | cncf.kubernetes | databricks | +                                             datadog | dbt.cloud | dingding | discord | docker | elasticsearch | +                                             exasol | facebook | ftp | github | google | grpc | hashicorp | http | +                                             imap | influxdb | jdbc | jenkins | jira | microsoft.azure | +                                             microsoft.mssql | microsoft.psrp | microsoft.winrm | mongo | mysql | +                                             neo4j | odbc | openfaas | opsgenie | oracle | pagerduty | papermill | +                                             plexus | postgres | presto | qubole | redis | salesforce | samba | +                                             segment | sendgrid | sftp | singularity | slack | snowflake | sqlite | +                                             ssh | tableau | telegram | trino | vertica | yandex | zendesk]... + +Prepare CHANGELOG, README and COMMITS information for providers. + +╭─ Provider documentation preparation flags ───────────────────────────────────────────────────────────────────────────╮ +--debugDrop user in shell instead of running the command. Useful for debugging. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-prepare-provider-packages.svg b/images/breeze/output-prepare-provider-packages.svg index ec2a04598f2c7..1da53c7d566be 100644 --- a/images/breeze/output-prepare-provider-packages.svg +++ b/images/breeze/output-prepare-provider-packages.svg @@ -1,4 +1,4 @@ - + - Command: prepare-provider-packages - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: prepare-provider-packages + + + + + - + -                                                                                                                          - Usage: breeze prepare-provider-packages [OPTIONS] [airbyte | alibaba | amazon | apache.beam | apache.cassandra |        -                                         apache.drill | apache.druid | apache.hdfs | apache.hive | apache.kylin |        -                                         apache.livy | apache.pig | apache.pinot | apache.spark | apache.sqoop |         -                                         arangodb | asana | celery | cloudant | cncf.kubernetes | databricks | datadog   -                                         | dbt.cloud | dingding | discord | docker | elasticsearch | exasol | facebook   -                                         | ftp | github | google | grpc | hashicorp | http | imap | influxdb | jdbc |    -                                         jenkins | jira | microsoft.azure | microsoft.mssql | microsoft.psrp |           -                                         microsoft.winrm | mongo | mysql | neo4j | odbc | openfaas | opsgenie | oracle   -                                         | pagerduty | papermill | plexus | postgres | presto | qubole | redis |         -                                         salesforce | samba | segment | sendgrid | sftp | singularity | slack |          -                                         snowflake | sqlite | ssh | tableau | telegram | trino | vertica | yandex |      -                                         zendesk]...                                                                     -                                                                                                                         - Prepare sdist/whl packages of Airflow Providers.                                                                        -                                                                                                                         -╭─ Package flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --package-format             Format of packages. (wheel | sdist | both) [default: wheel]                             -  --version-suffix-for-pypi    Version suffix used for PyPI packages (alpha, beta, rc1, etc.). (TEXT)                  -  --package-list-file          Read list of packages from text file (one package per line) (FILENAME)                  -  --debug                      Drop user in shell instead of running the command. Useful for debugging.                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose            -v  Print verbose information about performed steps.                                            -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze prepare-provider-packages [OPTIONS] [airbyte | alibaba | amazon | apache.beam | apache.cassandra | +                                        apache.drill | apache.druid | apache.hdfs | apache.hive | apache.kylin | +                                        apache.livy | apache.pig | apache.pinot | apache.spark | apache.sqoop | +                                        arangodb | asana | celery | cloudant | cncf.kubernetes | databricks | datadog +                                        | dbt.cloud | dingding | discord | docker | elasticsearch | exasol | facebook +                                        | ftp | github | google | grpc | hashicorp | http | imap | influxdb | jdbc | +                                        jenkins | jira | microsoft.azure | microsoft.mssql | microsoft.psrp | +                                        microsoft.winrm | mongo | mysql | neo4j | odbc | openfaas | opsgenie | oracle +                                        | pagerduty | papermill | plexus | postgres | presto | qubole | redis | +                                        salesforce | samba | segment | sendgrid | sftp | singularity | slack | +                                        snowflake | sqlite | ssh | tableau | telegram | trino | vertica | yandex | +                                        zendesk]... + +Prepare sdist/whl packages of Airflow Providers. + +╭─ Package flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--package-formatFormat of packages.(wheel | sdist | both)[default: wheel] +--version-suffix-for-pypiVersion suffix used for PyPI packages (alpha, beta, rc1, etc.).(TEXT) +--package-list-fileRead list of packages from text file (one package per line)(FILENAME) +--debugDrop user in shell instead of running the command. Useful for debugging. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-pull-image.svg b/images/breeze/output-pull-image.svg index 42cf6b6689f44..e1cce7beebc1c 100644 --- a/images/breeze/output-pull-image.svg +++ b/images/breeze/output-pull-image.svg @@ -1,4 +1,4 @@ - + - Command: pull-image - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: pull-image + + + + + - + -                                                                                                                          - Usage: breeze pull-image [OPTIONS] [EXTRA_PYTEST_ARGS]...                                                               -                                                                                                                         - Pull and optionally verify CI images - possibly in parallel for all Python versions.                                    -                                                                                                                         -╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --image-tag       -t  Tag added to the default naming conventions of Airflow CI/PROD images. (TEXT)                  -  --python          -p  Python major/minor version used in Airflow image for images. (>3.7< | 3.8 | 3.9 | 3.10)        -                        [default: 3.7]                                                                                 -  --github-token        The token used to authenticate to GitHub. (TEXT)                                               -  --verify-image        Verify image.                                                                                  -  --wait-for-image      Wait until image is available.                                                                 -  --tag-as-latest       Tags the image as latest and update checksum of all files after pulling. Useful when you       -                        build or pull image with --image-tag.                                                          -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --run-in-parallel    Run the operation in parallel on all or selected subset of Python versions.                     -  --parallelism        Maximum number of processes to use while running the operation in parallel. (INTEGER RANGE)     -                       [default: 4; 1<=x<=8]                                                                           -  --python-versions    Space separated list of python versions used for build with multiple versions. (TEXT)           -                       [default: 3.7 3.8 3.9 3.10]                                                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose            -v  Print verbose information about performed steps.                                            -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze pull-image [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Pull and optionally verify CI images - possibly in parallel for all Python versions. + +╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--verify-imageVerify image. +--wait-for-imageWait until image is available. +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you build +or pull image with --image-tag.                                                                +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-pull-prod-image.svg b/images/breeze/output-pull-prod-image.svg index 9a12580b3bea1..d6ee293d03b8a 100644 --- a/images/breeze/output-pull-prod-image.svg +++ b/images/breeze/output-pull-prod-image.svg @@ -1,4 +1,4 @@ - + - Command: pull-prod-image - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: pull-prod-image + + + + + - + -                                                                                                                          - Usage: breeze pull-prod-image [OPTIONS] [EXTRA_PYTEST_ARGS]...                                                          -                                                                                                                         - Pull and optionally verify Production images - possibly in parallel for all Python versions.                            -                                                                                                                         -╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --image-tag       -t  Tag added to the default naming conventions of Airflow CI/PROD images. (TEXT)                  -  --python          -p  Python major/minor version used in Airflow image for images. (>3.7< | 3.8 | 3.9 | 3.10)        -                        [default: 3.7]                                                                                 -  --github-token        The token used to authenticate to GitHub. (TEXT)                                               -  --verify-image        Verify image.                                                                                  -  --wait-for-image      Wait until image is available.                                                                 -  --tag-as-latest       Tags the image as latest and update checksum of all files after pulling. Useful when you       -                        build or pull image with --image-tag.                                                          -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --run-in-parallel    Run the operation in parallel on all or selected subset of Python versions.                     -  --parallelism        Maximum number of processes to use while running the operation in parallel. (INTEGER RANGE)     -                       [default: 4; 1<=x<=8]                                                                           -  --python-versions    Space separated list of python versions used for build with multiple versions. (TEXT)           -                       [default: 3.7 3.8 3.9 3.10]                                                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose            -v  Print verbose information about performed steps.                                            -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze pull-prod-image [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Pull and optionally verify Production images - possibly in parallel for all Python versions. + +╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--verify-imageVerify image. +--wait-for-imageWait until image is available. +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you build +or pull image with --image-tag.                                                                +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-release-prod-images.svg b/images/breeze/output-release-prod-images.svg index 31f4da7b4d847..a3a392acc5583 100644 --- a/images/breeze/output-release-prod-images.svg +++ b/images/breeze/output-release-prod-images.svg @@ -1,4 +1,4 @@ - + - Command: release-prod-images - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: release-prod-images + + + + + - + -                                                                                                                          - Usage: breeze release-prod-images [OPTIONS]                                                                             -                                                                                                                         - Release production images to DockerHub (needs DockerHub permissions).                                                   -                                                                                                                         -╭─ Release PROD IMAGE flags ───────────────────────────────────────────────────────────────────────────────────────────╮ -  *  --airflow-version    Airflow version to release (2.3.0, 2.3.0rc1 etc.) (TEXT) [required]                          -     --dockerhub-repo     DockerHub repository for the images (TEXT) [default: apache/airflow]                         -     --slim-images        Whether to prepare slim images instead of the regular ones.                                  -     --limit-python       Specific python to build slim images for (if not specified - the images are built for all    -                          available python versions)                                                                   -                          (3.7 | 3.8 | 3.9 | 3.10)                                                                     -     --limit-platform     Specific platform to build images for (if not specified, multiplatform images will be        -                          built.                                                                                       -                          (linux/amd64 | linux/arm64 | linux/amd64,linux/arm64)                                        -                          [default: linux/amd64,linux/arm64]                                                           -     --skip-latest        Whether to skip publishing the latest images (so that 'latest' images are not updated).      -                          This should only be used if you release image for previous branches. Automatically set       -                          when rc/alpha/beta images are built.                                                         -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose  -v  Print verbose information about performed steps.                                                      -  --dry-run  -D  If dry-run is set, commands are only printed, not executed.                                           -  --help     -h  Show this message and exit.                                                                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze release-prod-images [OPTIONS] + +Release production images to DockerHub (needs DockerHub permissions). + +╭─ Release PROD IMAGE flags ───────────────────────────────────────────────────────────────────────────────────────────╮ +*--airflow-versionAirflow version to release (2.3.0, 2.3.0rc1 etc.)(TEXT)[required] +--dockerhub-repoDockerHub repository for the images(TEXT)[default: apache/airflow] +--slim-imagesWhether to prepare slim images instead of the regular ones. +--limit-pythonSpecific python to build slim images for (if not specified - the images are built for all    +available python versions)                                                                   +(3.7 | 3.8 | 3.9 | 3.10)                                                                     +--limit-platformSpecific platform to build images for (if not specified, multiplatform images will be built. +(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64)                                        +[default: linux/amd64,linux/arm64]                                                           +--skip-latestWhether to skip publishing the latest images (so that 'latest' images are not updated). This +should only be used if you release image for previous branches. Automatically set when       +rc/alpha/beta images are built.                                                              +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-resource-check.svg b/images/breeze/output-resource-check.svg index 310eaf9a1e707..8ec8aa32d7e7c 100644 --- a/images/breeze/output-resource-check.svg +++ b/images/breeze/output-resource-check.svg @@ -1,4 +1,4 @@ - + - Command: resource-check - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: resource-check + + + + + - + -                                                                                                                          - Usage: breeze resource-check [OPTIONS]                                                                                  -                                                                                                                         - Check if available docker resources are enough.                                                                         -                                                                                                                         -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose  -v  Print verbose information about performed steps.                                                      -  --dry-run  -D  If dry-run is set, commands are only printed, not executed.                                           -  --help     -h  Show this message and exit.                                                                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze resource-check [OPTIONS] + +Check if available docker resources are enough. + +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-self-upgrade.svg b/images/breeze/output-self-upgrade.svg index 147e6581b2598..205b6f1042e64 100644 --- a/images/breeze/output-self-upgrade.svg +++ b/images/breeze/output-self-upgrade.svg @@ -1,4 +1,4 @@ - + - Command: self-upgrade - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: self-upgrade + + + + + - + -                                                                                                                          - Usage: breeze self-upgrade [OPTIONS]                                                                                    -                                                                                                                         - Self upgrade Breeze.                                                                                                    -                                                                                                                         -╭─ Self-upgrade flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --use-current-airflow-sources  -a  Use current workdir Airflow sources for upgrade.                                  -  --force                        -f  Force upgrade without asking question to the user.                                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --help  -h  Show this message and exit.                                                                              -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze self-upgrade [OPTIONS] + +Self upgrade Breeze. + +╭─ Self-upgrade flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +--use-current-airflow-sources-aUse current workdir Airflow sources for upgrade. +--force-fForce upgrade without asking question to the user. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-setup-autocomplete.svg b/images/breeze/output-setup-autocomplete.svg index b07eca2b16701..de7c17ab43ee5 100644 --- a/images/breeze/output-setup-autocomplete.svg +++ b/images/breeze/output-setup-autocomplete.svg @@ -1,4 +1,4 @@ - + - Command: setup-autocomplete - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: setup-autocomplete + + + + + - + -                                                                                                                          - Usage: breeze setup-autocomplete [OPTIONS]                                                                              -                                                                                                                         - Enables autocompletion of breeze commands.                                                                              -                                                                                                                         -╭─ Setup autocomplete flags ───────────────────────────────────────────────────────────────────────────────────────────╮ -  --force  -f  Force autocomplete setup even if already setup before (overrides the setup).                            -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --answer   -a  Force answer to questions. (y | n | q | yes | no | quit)                                              -  --dry-run  -D  If dry-run is set, commands are only printed, not executed.                                           -  --verbose  -v  Print verbose information about performed steps.                                                      -  --help     -h  Show this message and exit.                                                                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze setup-autocomplete [OPTIONS] + +Enables autocompletion of breeze commands. + +╭─ Setup autocomplete flags ───────────────────────────────────────────────────────────────────────────────────────────╮ +--force-fForce autocomplete setup even if already setup before (overrides the setup). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-shell.svg b/images/breeze/output-shell.svg index c4a63835bc624..f279eab2d729e 100644 --- a/images/breeze/output-shell.svg +++ b/images/breeze/output-shell.svg @@ -1,4 +1,4 @@ - + - Command: shell - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: shell + + + + + - + -                                                                                                                          - Usage: breeze shell [OPTIONS] [EXTRA_ARGS]...                                                                           -                                                                                                                         - Enter breeze.py environment. this is the default command use when no other is selected.                                 -                                                                                                                         -╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --python               -p  Python major/minor version used in Airflow image for images. (>3.7< | 3.8 | 3.9 | 3.10)   -                             [default: 3.7]                                                                            -  --backend              -b  Database backend to use. (>sqlite< | mysql | postgres | mssql) [default: sqlite]          -  --postgres-version     -P  Version of Postgres used. (>10< | 11 | 12 | 13 | 14) [default: 10]                        -  --mysql-version        -M  Version of MySQL used. (>5.7< | 8) [default: 5.7]                                         -  --mssql-version        -S  Version of MsSQL used. (>2017-latest< | 2019-latest) [default: 2017-latest]               -  --integration              Integration(s) to enable when running (can be more than one).                             -                             (cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    -                             all)                                                                                      -  --forward-credentials  -f  Forward local credentials to container when running.                                      -  --db-reset             -d  Reset DB when entering the container.                                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ -  --use-airflow-version       Use (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or   -                              `sdist` if Airflow should be removed, installed from wheel packages or sdist packages    -                              available in dist folder respectively. Implies --mount-sources `none`.                   -                              (none | wheel | sdist | <airflow_version>)                                               -  --airflow-extras            Airflow extras to install when --use-airflow-version is used (TEXT)                      -  --use-packages-from-dist    Install all found packages (--package-format determines type) from 'dist' folder when    -                              entering breeze.                                                                         -  --package-format            Format of packages that should be installed from dist. (wheel | sdist)                   -                              [default: wheel]                                                                         -  --force-build               Force image build no matter if it is determined as needed.                               -  --mount-sources             Choose scope of local sources should be mounted (default = selected).                    -                              (selected | all | none)                                                                  -                              [default: selected]                                                                      -  --debian-version            Debian version used for the image. (bullseye | buster) [default: bullseye]               -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose                        -v  Print verbose information about performed steps.                                -  --dry-run                        -D  If dry-run is set, commands are only printed, not executed.                     -  --github-repository              -g  GitHub repository used to pull, push run images. (TEXT)                         -                                       [default: apache/airflow]                                                       -  --airflow-constraints-reference      Constraint reference to use. Useful with --use-airflow-version parameter to     -                                       specify constraints for the installed version and to find newer dependencies    -                                       (TEXT)                                                                          -  --answer                         -a  Force answer to questions. (y | n | q | yes | no | quit)                        -  --help                           -h  Show this message and exit.                                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze shell [OPTIONS] [EXTRA_ARGS]... + +Enter breeze.py environment. this is the default command use when no other is selected. + +╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                             +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    +all)                                                                                      +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or   +`sdist` if Airflow should be removed, installed from wheel packages or sdist packages    +available in dist folder respectively. Implies --mount-sources `none`.                   +(none | wheel | sdist | <airflow_version>)                                               +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when    +entering breeze.                                                                         +--package-formatFormat of packages that should be installed from dist.(wheel | sdist)[default: wheel] +--force-buildForce image build no matter if it is determined as needed. +--mount-sourcesChoose scope of local sources should be mounted (default = selected). +(selected | all | none)                                               +[default: selected]                                                   +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT) +[default: apache/airflow]                        +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     +specify constraints for the installed version and to find newer dependencies    +(TEXT)                                                                          +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-start-airflow.svg b/images/breeze/output-start-airflow.svg index 6f305e940e2fe..159eec8cb29a0 100644 --- a/images/breeze/output-start-airflow.svg +++ b/images/breeze/output-start-airflow.svg @@ -1,4 +1,4 @@ - + - Command: start-airflow - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: start-airflow + + + + + - + -                                                                                                                          - Usage: breeze start-airflow [OPTIONS] [EXTRA_ARGS]...                                                                   -                                                                                                                         - Enter breeze.py environment and starts all Airflow components in the tmux session.                                      -                                                                                                                         -╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --python                    -p  Python major/minor version used in Airflow image for images.                         -                                  (>3.7< | 3.8 | 3.9 | 3.10)                                                           -                                  [default: 3.7]                                                                       -  --load-example-dags         -e  Enable configuration to load example DAGs when starting Airflow.                     -  --load-default-connections  -c  Enable configuration to load default connections when starting Airflow.              -  --backend                   -b  Database backend to use. (>sqlite< | mysql | postgres | mssql) [default: sqlite]     -  --postgres-version          -P  Version of Postgres used. (>10< | 11 | 12 | 13 | 14) [default: 10]                   -  --mysql-version             -M  Version of MySQL used. (>5.7< | 8) [default: 5.7]                                    -  --mssql-version             -S  Version of MsSQL used. (>2017-latest< | 2019-latest) [default: 2017-latest]          -  --integration                   Integration(s) to enable when running (can be more than one).                        -                                  (cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd |       -                                  trino | all)                                                                         -  --forward-credentials       -f  Forward local credentials to container when running.                                 -  --db-reset                  -d  Reset DB when entering the container.                                                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ -  --use-airflow-version       Use (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or   -                              `sdist` if Airflow should be removed, installed from wheel packages or sdist packages    -                              available in dist folder respectively. Implies --mount-sources `none`.                   -                              (none | wheel | sdist | <airflow_version>)                                               -  --airflow-extras            Airflow extras to install when --use-airflow-version is used (TEXT)                      -  --use-packages-from-dist    Install all found packages (--package-format determines type) from 'dist' folder when    -                              entering breeze.                                                                         -  --package-format            Format of packages that should be installed from dist. (wheel | sdist)                   -                              [default: wheel]                                                                         -  --force-build               Force image build no matter if it is determined as needed.                               -  --mount-sources             Choose scope of local sources should be mounted (default = selected).                    -                              (selected | all | none)                                                                  -                              [default: selected]                                                                      -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --dry-run                        -D  If dry-run is set, commands are only printed, not executed.                     -  --github-repository              -g  GitHub repository used to pull, push run images. (TEXT)                         -                                       [default: apache/airflow]                                                       -  --airflow-constraints-reference      Constraint reference to use. Useful with --use-airflow-version parameter to     -                                       specify constraints for the installed version and to find newer dependencies    -                                       (TEXT)                                                                          -  --answer                         -a  Force answer to questions. (y | n | q | yes | no | quit)                        -  --verbose                        -v  Print verbose information about performed steps.                                -  --help                           -h  Show this message and exit.                                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze start-airflow [OPTIONS] [EXTRA_ARGS]... + +Enter breeze.py environment and starts all Airflow components in the tmux session. + +╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--load-example-dags-eEnable configuration to load example DAGs when starting Airflow. +--load-default-connections-cEnable configuration to load default connections when starting Airflow. +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                        +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino +| all)                                                                               +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or   +`sdist` if Airflow should be removed, installed from wheel packages or sdist packages    +available in dist folder respectively. Implies --mount-sources `none`.                   +(none | wheel | sdist | <airflow_version>)                                               +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when    +entering breeze.                                                                         +--package-formatFormat of packages that should be installed from dist.(wheel | sdist)[default: wheel] +--force-buildForce image build no matter if it is determined as needed. +--mount-sourcesChoose scope of local sources should be mounted (default = selected). +(selected | all | none)                                               +[default: selected]                                                   +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT) +[default: apache/airflow]                        +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     +specify constraints for the installed version and to find newer dependencies    +(TEXT)                                                                          +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-static-checks.svg b/images/breeze/output-static-checks.svg index e76f1a3f27ea3..edbc2fee71d47 100644 --- a/images/breeze/output-static-checks.svg +++ b/images/breeze/output-static-checks.svg @@ -1,4 +1,4 @@ - + - Command: static-checks - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: static-checks + + + + + - + -                                                                                                                          - Usage: breeze static-checks [OPTIONS] [PRECOMMIT_ARGS]...                                                               -                                                                                                                         - Run static checks.                                                                                                      -                                                                                                                         -╭─ Pre-commit flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --type                  -t  Type(s) of the static checks to run (multiple can be added).                             -                              (all | black | blacken-docs | check-airflow-2-1-compatibility |                          -                              check-airflow-config-yaml-consistent | check-airflow-providers-have-extras |             -                              check-apache-license-rat | check-base-operator-usage |                                   -                              check-boring-cyborg-configuration | check-breeze-top-dependencies-limited |              -                              check-builtin-literals | check-changelog-has-no-duplicates |                             -                              check-daysago-import-from-utils | check-docstring-param-types |                          -                              check-executables-have-shebangs | check-extra-packages-references | check-extras-order   -                              | check-for-inclusive-language | check-hooks-apply |                                     -                              check-incorrect-use-of-LoggingMixin | check-integrations-are-consistent |                -                              check-merge-conflict | check-newsfragments-are-valid |                                   -                              check-no-providers-in-core-examples | check-no-relative-imports |                        -                              check-persist-credentials-disabled-in-github-workflows |                                 -                              check-pre-commit-information-consistent | check-provide-create-sessions-imports |        -                              check-provider-yaml-valid | check-providers-init-file-missing |                          -                              check-providers-subpackages-init-file-exist | check-pydevd-left-in-code |                -                              check-revision-heads-map | check-safe-filter-usage-in-html | check-setup-order |         -                              check-start-date-not-used-in-defaults | check-system-tests-present | check-xml |         -                              codespell | debug-statements | detect-private-key | doctoc | end-of-file-fixer |         -                              fix-encoding-pragma | flynt | forbid-tabs | identity | insert-license | isort |          -                              lint-chart-schema | lint-css | lint-dockerfile | lint-helm-chart | lint-javascript |     -                              lint-json-schema | lint-markdown | lint-openapi | mixed-line-ending |                    -                              pretty-format-json | pydocstyle | python-no-log-warn | pyupgrade | rst-backticks |       -                              run-flake8 | run-mypy | run-shellcheck | static-check-autoflake | trailing-whitespace    -                              | update-breeze-file | update-breeze-readme-config-hash | update-extras |                -                              update-in-the-wild-to-be-sorted | update-inlined-dockerfile-scripts |                    -                              update-local-yml-file | update-migration-references | update-providers-dependencies |    -                              update-setup-cfg-file | update-spelling-wordlist-to-be-sorted |                          -                              update-supported-versions | update-vendored-in-k8s-json-schema | update-version |        -                              yamllint | yesqa)                                                                        -  --file                  -f  List of files to run the checks on. (PATH)                                               -  --all-files             -a  Run checks on all files.                                                                 -  --show-diff-on-failure  -s  Show diff for files modified by the checks.                                              -  --last-commit           -c  Run checks for all files in last commit. Mutually exclusive with --commit-ref.           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --commit-ref         -r  Run checks for this commit reference only (can be any git commit-ish reference). Mutually   -                           exclusive with --last-commit.                                                               -                           (TEXT)                                                                                      -  --verbose            -v  Print verbose information about performed steps.                                            -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze static-checks [OPTIONS] [PRECOMMIT_ARGS]... + +Run static checks. + +╭─ Pre-commit flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--type-tType(s) of the static checks to run (multiple can be added).                             +(all | black | blacken-docs | check-airflow-2-1-compatibility |                          +check-airflow-config-yaml-consistent | check-airflow-providers-have-extras |             +check-apache-license-rat | check-base-operator-usage | check-boring-cyborg-configuration +| check-breeze-top-dependencies-limited | check-builtin-literals |                       +check-changelog-has-no-duplicates | check-daysago-import-from-utils |                    +check-docstring-param-types | check-executables-have-shebangs |                          +check-extra-packages-references | check-extras-order | check-for-inclusive-language |    +check-hooks-apply | check-incorrect-use-of-LoggingMixin |                                +check-integrations-are-consistent | check-merge-conflict | check-newsfragments-are-valid +| check-no-providers-in-core-examples | check-no-relative-imports |                      +check-persist-credentials-disabled-in-github-workflows |                                 +check-pre-commit-information-consistent | check-provide-create-sessions-imports |        +check-provider-yaml-valid | check-providers-init-file-missing |                          +check-providers-subpackages-init-file-exist | check-pydevd-left-in-code |                +check-revision-heads-map | check-safe-filter-usage-in-html | check-setup-order |         +check-start-date-not-used-in-defaults | check-system-tests-present | check-xml |         +codespell | debug-statements | detect-private-key | doctoc | end-of-file-fixer |         +fix-encoding-pragma | flynt | forbid-tabs | identity | insert-license | isort |          +lint-chart-schema | lint-css | lint-dockerfile | lint-helm-chart | lint-javascript |     +lint-json-schema | lint-markdown | lint-openapi | mixed-line-ending | pretty-format-json +| pydocstyle | python-no-log-warn | pyupgrade | rst-backticks | run-flake8 | run-mypy |  +run-shellcheck | static-check-autoflake | trailing-whitespace | update-breeze-file |     +update-breeze-readme-config-hash | update-extras | update-in-the-wild-to-be-sorted |     +update-inlined-dockerfile-scripts | update-local-yml-file | update-migration-references  +| update-providers-dependencies | update-setup-cfg-file |                                +update-spelling-wordlist-to-be-sorted | update-supported-versions |                      +update-vendored-in-k8s-json-schema | update-version | yamllint | yesqa)                  +--file-fList of files to run the checks on.(PATH) +--all-files-aRun checks on all files. +--show-diff-on-failure-sShow diff for files modified by the checks. +--last-commit-cRun checks for all files in last commit. Mutually exclusive with --commit-ref. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--commit-ref-rRun checks for this commit reference only (can be any git commit-ish reference). Mutually   +exclusive with --last-commit.                                                               +(TEXT)                                                                                      +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-stop.svg b/images/breeze/output-stop.svg index 6f5a9ca2d86d7..5fc1819e95293 100644 --- a/images/breeze/output-stop.svg +++ b/images/breeze/output-stop.svg @@ -1,4 +1,4 @@ - + - Command: stop - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: stop + + + + + - + -                                                                                                                          - Usage: breeze stop [OPTIONS]                                                                                            -                                                                                                                         - Stop running breeze environment.                                                                                        -                                                                                                                         -╭─ Stop flags ─────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --preserve-volumes  -p  Skip removing volumes when stopping Breeze.                                                  -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose  -v  Print verbose information about performed steps.                                                      -  --dry-run  -D  If dry-run is set, commands are only printed, not executed.                                           -  --help     -h  Show this message and exit.                                                                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze stop [OPTIONS] + +Stop running breeze environment. + +╭─ Stop flags ─────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--preserve-volumes-pSkip removing volumes when stopping Breeze. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-tests.svg b/images/breeze/output-tests.svg index 0b344da888747..7c02458342214 100644 --- a/images/breeze/output-tests.svg +++ b/images/breeze/output-tests.svg @@ -1,4 +1,4 @@ - + - Command: tests - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: tests + + + + + - + -                                                                                                                          - Usage: breeze tests [OPTIONS] [EXTRA_PYTEST_ARGS]...                                                                    -                                                                                                                         - Run the specified unit test targets. Multiple targets may be specified separated by spaces.                             -                                                                                                                         -╭─ Basic flag for tests command ───────────────────────────────────────────────────────────────────────────────────────╮ -  --integration       Integration(s) to enable when running (can be more than one).                                    -                      (cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino | all)      -  --test-type    -tt  Type of test to run.                                                                             -                      (All | Always | Core | Providers | API | CLI | Integration | Other | WWW | Postgres | MySQL |    -                      Helm | Quarantined)                                                                              -  --db-reset     -d   Reset DB when entering the container.                                                            -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --dry-run  -D  If dry-run is set, commands are only printed, not executed.                                           -  --verbose  -v  Print verbose information about performed steps.                                                      -  --help     -h  Show this message and exit.                                                                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze tests [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Run the specified unit test targets. Multiple targets may be specified separated by spaces. + +╭─ Basic flag for tests command ───────────────────────────────────────────────────────────────────────────────────────╮ +--integrationIntegration(s) to enable when running (can be more than one).                               +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino | all) +--test-type-ttType of test to run.                                                                             +(All | Always | Core | Providers | API | CLI | Integration | Other | WWW | Postgres | MySQL |    +Helm | Quarantined)                                                                              +--db-reset-dReset DB when entering the container. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-verify-image.svg b/images/breeze/output-verify-image.svg index e35e11819f8ed..b79efb93f2602 100644 --- a/images/breeze/output-verify-image.svg +++ b/images/breeze/output-verify-image.svg @@ -1,4 +1,4 @@ - + - Command: verify-image - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: verify-image + + + + + - + -                                                                                                                          - Usage: breeze verify-image [OPTIONS] [EXTRA_PYTEST_ARGS]...                                                             -                                                                                                                         - Verify CI image.                                                                                                        -                                                                                                                         -╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --image-name  -n  Name of the image to verify (overrides --python and --image-tag). (TEXT)                           -  --python      -p  Python major/minor version used in Airflow image for images. (>3.7< | 3.8 | 3.9 | 3.10)            -                    [default: 3.7]                                                                                     -  --image-tag   -t  Tag added to the default naming conventions of Airflow CI/PROD images. (TEXT)                      -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose            -v  Print verbose information about performed steps.                                            -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze verify-image [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Verify CI image. + +╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-verify-prod-image.svg b/images/breeze/output-verify-prod-image.svg index 3dab7852ead6a..4bc7a36527741 100644 --- a/images/breeze/output-verify-prod-image.svg +++ b/images/breeze/output-verify-prod-image.svg @@ -1,4 +1,4 @@ - + - Command: verify-prod-image - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: verify-prod-image + + + + + - + -                                                                                                                          - Usage: breeze verify-prod-image [OPTIONS] [EXTRA_PYTEST_ARGS]...                                                        -                                                                                                                         - Verify Production image.                                                                                                -                                                                                                                         -╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --image-name  -n  Name of the image to verify (overrides --python and --image-tag). (TEXT)                           -  --python      -p  Python major/minor version used in Airflow image for images. (>3.7< | 3.8 | 3.9 | 3.10)            -                    [default: 3.7]                                                                                     -  --image-tag   -t  Tag added to the default naming conventions of Airflow CI/PROD images. (TEXT)                      -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose            -v  Print verbose information about performed steps.                                            -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze verify-prod-image [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Verify Production image. + +╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-verify-provider-packages.svg b/images/breeze/output-verify-provider-packages.svg index 061622d35349f..cfb88fbcc4755 100644 --- a/images/breeze/output-verify-provider-packages.svg +++ b/images/breeze/output-verify-provider-packages.svg @@ -1,4 +1,4 @@ - + - Command: verify-provider-packages - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: verify-provider-packages + + + + + - + -                                                                                                                          - Usage: breeze verify-provider-packages [OPTIONS]                                                                        -                                                                                                                         - Verifies if all provider code is following expectations for providers.                                                  -                                                                                                                         -╭─ Provider verification flags ────────────────────────────────────────────────────────────────────────────────────────╮ -  --use-airflow-version              Use (reinstall at entry) Airflow version from PyPI. It can also be `none`,        -                                     `wheel`, or `sdist` if Airflow should be removed, installed from wheel packages   -                                     or sdist packages available in dist folder respectively. Implies                  -                                     --mount-sources `none`.                                                           -                                     (none | wheel | sdist | <airflow_version>)                                        -  --airflow-constraints-reference    Constraint reference to use. Useful with --use-airflow-version parameter to       -                                     specify constraints for the installed version and to find newer dependencies      -                                     (TEXT)                                                                            -  --airflow-extras                   Airflow extras to install when --use-airflow-version is used (TEXT)               -  --use-packages-from-dist           Install all found packages (--package-format determines type) from 'dist'         -                                     folder when entering breeze.                                                      -  --package-format                   Format of packages that should be installed from dist. (wheel | sdist)            -                                     [default: wheel]                                                                  -  --debug                            Drop user in shell instead of running the command. Useful for debugging.          -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose            -v  Print verbose information about performed steps.                                            -  --dry-run            -D  If dry-run is set, commands are only printed, not executed.                                 -  --github-repository  -g  GitHub repository used to pull, push run images. (TEXT) [default: apache/airflow]           -  --help               -h  Show this message and exit.                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze verify-provider-packages [OPTIONS] + +Verifies if all provider code is following expectations for providers. + +╭─ Provider verification flags ────────────────────────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`,        +`wheel`, or `sdist` if Airflow should be removed, installed from wheel packages   +or sdist packages available in dist folder respectively. Implies --mount-sources +`none`.                                                                           +(none | wheel | sdist | <airflow_version>)                                        +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to       +specify constraints for the installed version and to find newer dependencies      +(TEXT)                                                                            +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder  +when entering breeze.                                                             +--package-formatFormat of packages that should be installed from dist.(wheel | sdist) +[default: wheel]                                       +--debugDrop user in shell instead of running the command. Useful for debugging. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/images/breeze/output-version.svg b/images/breeze/output-version.svg index d136f095946c2..9d37a0a7ac852 100644 --- a/images/breeze/output-version.svg +++ b/images/breeze/output-version.svg @@ -1,4 +1,4 @@ - + - Command: version - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: version + + + + + - + -                                                                                                                          - Usage: breeze version [OPTIONS]                                                                                         -                                                                                                                         - Print information about version of apache-airflow-breeze.                                                               -                                                                                                                         -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -  --verbose  -v  Print verbose information about performed steps.                                                      -  --help     -h  Show this message and exit.                                                                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + +Usage: breeze version [OPTIONS] + +Print information about version of apache-airflow-breeze. + +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + diff --git a/setup.cfg b/setup.cfg index 6311319feb22e..bd7310499674b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -156,7 +156,7 @@ install_requires = python-dateutil>=2.3 python-nvd3>=0.15.0 python-slugify>=5.0 - rich>=12.4.1 + rich>=12.4.4 setproctitle>=1.1.8 # SQL Alchemy 1.4.10 introduces a bug where for PyODBC driver UTCDateTime fields get wrongly converted # as string and fail to be converted back to datetime. It was supposed to be fixed in From 1641b496141f36f74c6d600fde3bbca2bcf9a0e4 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sun, 12 Jun 2022 17:29:32 +0200 Subject: [PATCH 005/118] Refresh list of committers (#24398) (cherry picked from commit 4d2536584ba1e8fea79be93c009041d1d9deb933) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 05781d33d7cc2..32283ed26d488 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -80,7 +80,6 @@ jobs: "XD-DENG", "aijamalnk", "alexvanboxel", - "aneesh-joseph", "aoen", "artwr", "ashb", @@ -107,6 +106,7 @@ jobs: "milton0825", "mistercrunch", "msumit", + "pingzh", "potiuk", "r39132", "ryanahamilton", From 0c8cda1dd44a6e9688bbe5e3a5149e6c26f4424a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torbj=C3=B8rn=20Vatn?= Date: Tue, 21 Jun 2022 11:24:13 +0200 Subject: [PATCH 006/118] Switch Markdown engine to markdown-it-py (#19702) (cherry picked from commit 88363b543f6f963247c332e9d7830bc782ed6e2d) --- airflow/www/static/css/main.css | 4 ++ airflow/www/utils.py | 5 +- setup.cfg | 3 + tests/www/test_utils.py | 112 ++++++++++++++++++++++++++------ 4 files changed, 103 insertions(+), 21 deletions(-) diff --git a/airflow/www/static/css/main.css b/airflow/www/static/css/main.css index 37bee892bd132..8a7a7eeec77d9 100644 --- a/airflow/www/static/css/main.css +++ b/airflow/www/static/css/main.css @@ -465,6 +465,10 @@ label[for="timezone-other"], z-index: 1070; } +details summary { + display: list-item; +} + .menu-scroll { max-height: 300px; overflow-y: auto; diff --git a/airflow/www/utils.py b/airflow/www/utils.py index 8c05f378855e5..3c63584e88ad1 100644 --- a/airflow/www/utils.py +++ b/airflow/www/utils.py @@ -21,7 +21,6 @@ from typing import Any, Dict, List, Optional, Union from urllib.parse import urlencode -import markdown import sqlalchemy as sqla from flask import Response, request, url_for from flask.helpers import flash @@ -31,6 +30,7 @@ from flask_appbuilder.models.sqla.filters import get_field_setup_query, set_value_to_type from flask_appbuilder.models.sqla.interface import SQLAInterface from flask_babel import lazy_gettext +from markdown_it import MarkdownIt from markupsafe import Markup from pendulum.datetime import DateTime from pygments import highlight, lexers @@ -521,10 +521,11 @@ def json_render(obj, lexer): def wrapped_markdown(s, css_class='rich_doc'): """Convert a Markdown string to HTML.""" + md = MarkdownIt("gfm-like") if s is None: return None s = textwrap.dedent(s) - return Markup(f'
' + markdown.markdown(s, extensions=['tables']) + "
") + return Markup(f'
{md.render(s)}
') def get_attr_renderer(): diff --git a/setup.cfg b/setup.cfg index bd7310499674b..8d996337ebfb2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -135,6 +135,7 @@ install_requires = # we pin to the same upper-bound as connexion. jsonschema>=3.2.0, <5.0 lazy-object-proxy + linkify-it-py>=2.0.0 lockfile>=0.12.2 markdown>=3.0 # Markupsafe 2.1.0 breaks with error: import name 'soft_unicode' from 'markupsafe'. @@ -142,8 +143,10 @@ install_requires = # https://github.com/pallets/markupsafe/issues/284 # or when we will be able to upgrade JINJA to newer version (currently limited due to Flask and # Flask Application Builder) + markdown-it-py>=2.1.0 markupsafe>=1.1.1,<2.1.0 marshmallow-oneofschema>=2.0.1 + mdit-py-plugins>=0.3.0 packaging>=14.0 pathspec~=0.9.0 pendulum>=2.0 diff --git a/tests/www/test_utils.py b/tests/www/test_utils.py index 01c49e1fdcafd..8ac06f75de010 100644 --- a/tests/www/test_utils.py +++ b/tests/www/test_utils.py @@ -184,29 +184,54 @@ def test_markdown_none(self): class TestWrappedMarkdown(unittest.TestCase): def test_wrapped_markdown_with_docstring_curly_braces(self): rendered = wrapped_markdown("{braces}", css_class="a_class") - assert '

{braces}

' == rendered + assert ( + '''

{braces}

+
''' + == rendered + ) def test_wrapped_markdown_with_some_markdown(self): - rendered = wrapped_markdown("*italic*\n**bold**\n", css_class="a_class") + rendered = wrapped_markdown( + """*italic* + **bold** + """, + css_class="a_class", + ) + assert ( '''

italic -bold

''' +bold

+''' == rendered ) def test_wrapped_markdown_with_table(self): rendered = wrapped_markdown( - """| Job | Duration | - | ----------- | ----------- | - | ETL | 14m |""" + """ +| Job | Duration | +| ----------- | ----------- | +| ETL | 14m | +""" ) assert ( - '
\n\n\n\n' - '\n\n\n\n\n\n\n\n\n' - '
JobDuration
ETL' - '14m
' - ) == rendered + '''
+ + + + + + + + + + + + +
JobDuration
ETL14m
+
''' + == rendered + ) def test_wrapped_markdown_with_indented_lines(self): rendered = wrapped_markdown( @@ -217,7 +242,11 @@ def test_wrapped_markdown_with_indented_lines(self): """ ) - assert '

header

\n

1st line\n2nd line

' == rendered + assert ( + '''

header

\n

1st line\n2nd line

+
''' + == rendered + ) def test_wrapped_markdown_with_raw_code_block(self): rendered = wrapped_markdown( @@ -235,10 +264,12 @@ def test_wrapped_markdown_with_raw_code_block(self): ) assert ( - '

Markdown code block

\n' - '

Inline code works well.

\n' - '
Code block\ndoes not\nrespect\nnewlines\n
' - ) == rendered + '''

Markdown code block

+

Inline code works well.

+
Code block\ndoes not\nrespect\nnewlines\n
+
''' + == rendered + ) def test_wrapped_markdown_with_nested_list(self): rendered = wrapped_markdown( @@ -251,6 +282,49 @@ def test_wrapped_markdown_with_nested_list(self): ) assert ( - '

Docstring with a code block

\n' - '
    \n
  • And
      \n
    • A nested list
    • \n
    \n
  • \n
' - ) == rendered + '''

Docstring with a code block

+
    +
  • And +
      +
    • A nested list
    • +
    +
  • +
+
''' + == rendered + ) + + def test_wrapped_markdown_with_collapsible_section(self): + rendered = wrapped_markdown( + """ +# A collapsible section with markdown +
+ Click to expand! + + ## Heading + 1. A numbered + 2. list + * With some + * Sub bullets +
+ """ + ) + + assert ( + '''

A collapsible section with markdown

+
+ Click to expand! +

Heading

+
    +
  1. A numbered
  2. +
  3. list +
      +
    • With some
    • +
    • Sub bullets
    • +
    +
  4. +
+
+
''' + == rendered + ) From 86c4540dedef05f0fd751f79feac5efd86cfbec3 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sun, 5 Jun 2022 11:51:04 +0200 Subject: [PATCH 007/118] Remove warnings when starting breeze (#24183) Breeze when started produced three warnings that were harmless, but we should fix them to remove "false positives". (cherry picked from commit ac8a790ab9dabfa747964b65aa9b93451b735ede) --- dev/breeze/src/airflow_breeze/breeze.py | 6 ++++- .../commands/developer_commands.py | 3 ++- .../airflow_breeze/commands/main_command.py | 2 -- .../airflow_breeze/params/build_ci_params.py | 1 + .../params/build_prod_params.py | 3 ++- .../src/airflow_breeze/params/shell_params.py | 5 ++++ .../utils/docker_command_utils.py | 16 +++-------- .../src/airflow_breeze/utils/path_utils.py | 14 ++++++++++ scripts/ci/docker-compose/local.yml | 7 +---- scripts/ci/docker-compose/mypy.yml | 27 +++++++++++++++++++ scripts/ci/pre_commit/pre_commit_flake8.py | 3 ++- .../pre_commit_migration_reference.py | 3 ++- scripts/ci/pre_commit/pre_commit_mypy.py | 3 ++- scripts/ci/pre_commit/pre_commit_ui_lint.py | 3 ++- scripts/ci/pre_commit/pre_commit_www_lint.py | 3 ++- 15 files changed, 71 insertions(+), 28 deletions(-) create mode 100644 scripts/ci/docker-compose/mypy.yml diff --git a/dev/breeze/src/airflow_breeze/breeze.py b/dev/breeze/src/airflow_breeze/breeze.py index a596c2be4967e..608619ae8e487 100755 --- a/dev/breeze/src/airflow_breeze/breeze.py +++ b/dev/breeze/src/airflow_breeze/breeze.py @@ -17,9 +17,13 @@ # under the License. from airflow_breeze.configure_rich_click import click # isort: skip # noqa from airflow_breeze.commands.main_command import main -from airflow_breeze.utils.path_utils import find_airflow_sources_root_to_operate_on +from airflow_breeze.utils.path_utils import ( + create_directories_and_files, + find_airflow_sources_root_to_operate_on, +) find_airflow_sources_root_to_operate_on() +create_directories_and_files() if __name__ == '__main__': main() diff --git a/dev/breeze/src/airflow_breeze/commands/developer_commands.py b/dev/breeze/src/airflow_breeze/commands/developer_commands.py index 1719bab5fc7a8..e10e79d0a8126 100644 --- a/dev/breeze/src/airflow_breeze/commands/developer_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/developer_commands.py @@ -524,7 +524,7 @@ def stop(verbose: bool, dry_run: bool, preserve_volumes: bool): command_to_execute = ['docker-compose', 'down', "--remove-orphans"] if not preserve_volumes: command_to_execute.append("--volumes") - shell_params = ShellParams(verbose=verbose, backend="all") + shell_params = ShellParams(verbose=verbose, backend="all", include_mypy_volume=True) env_variables = get_env_variables_for_docker_commands(shell_params) run_command(command_to_execute, verbose=verbose, dry_run=dry_run, env=env_variables) @@ -579,6 +579,7 @@ def enter_shell(**kwargs) -> RunCommandResult: if read_from_cache_file('suppress_cheatsheet') is None: get_console().print(CHEATSHEET, style=CHEATSHEET_STYLE) enter_shell_params = ShellParams(**filter_out_none(**kwargs)) + enter_shell_params.include_mypy_volume = True rebuild_ci_image_if_needed(build_params=enter_shell_params, dry_run=dry_run, verbose=verbose) return run_shell(verbose, dry_run, enter_shell_params) diff --git a/dev/breeze/src/airflow_breeze/commands/main_command.py b/dev/breeze/src/airflow_breeze/commands/main_command.py index 8ed451b0e5073..c62101a44f440 100644 --- a/dev/breeze/src/airflow_breeze/commands/main_command.py +++ b/dev/breeze/src/airflow_breeze/commands/main_command.py @@ -37,7 +37,6 @@ option_use_packages_from_dist, option_verbose, ) -from airflow_breeze.utils.path_utils import create_directories_and_files @click.group(invoke_without_command=True, context_settings={'help_option_names': ['-h', '--help']}) @@ -64,6 +63,5 @@ def main(ctx: click.Context, **kwargs): from airflow_breeze.commands.developer_commands import shell - create_directories_and_files() if not ctx.invoked_subcommand: ctx.forward(shell, extra_args={}) diff --git a/dev/breeze/src/airflow_breeze/params/build_ci_params.py b/dev/breeze/src/airflow_breeze/params/build_ci_params.py index aa9681fc955fe..52c242b65a77d 100644 --- a/dev/breeze/src/airflow_breeze/params/build_ci_params.py +++ b/dev/breeze/src/airflow_breeze/params/build_ci_params.py @@ -31,6 +31,7 @@ class BuildCiParams(CommonBuildParams): """ airflow_constraints_mode: str = "constraints-source-providers" + default_constraints_branch: str = DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH airflow_constraints_reference: str = DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH airflow_extras: str = "devel_ci" airflow_pre_cached_pip_packages: bool = True diff --git a/dev/breeze/src/airflow_breeze/params/build_prod_params.py b/dev/breeze/src/airflow_breeze/params/build_prod_params.py index e81a5d6739ac8..c37569f85f136 100644 --- a/dev/breeze/src/airflow_breeze/params/build_prod_params.py +++ b/dev/breeze/src/airflow_breeze/params/build_prod_params.py @@ -21,7 +21,7 @@ from dataclasses import dataclass from typing import List -from airflow_breeze.branch_defaults import AIRFLOW_BRANCH +from airflow_breeze.branch_defaults import AIRFLOW_BRANCH, DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH from airflow_breeze.global_constants import ( AIRFLOW_SOURCES_FROM, AIRFLOW_SOURCES_TO, @@ -41,6 +41,7 @@ class BuildProdParams(CommonBuildParams): """ airflow_constraints_mode: str = "constraints" + default_constraints_branch: str = DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH airflow_constraints_reference: str = "" airflow_is_in_context: bool = False cleanup_context: bool = False diff --git a/dev/breeze/src/airflow_breeze/params/shell_params.py b/dev/breeze/src/airflow_breeze/params/shell_params.py index 58107b9e27567..4ba4022651529 100644 --- a/dev/breeze/src/airflow_breeze/params/shell_params.py +++ b/dev/breeze/src/airflow_breeze/params/shell_params.py @@ -48,6 +48,7 @@ class ShellParams: """ airflow_branch: str = AIRFLOW_BRANCH + default_constraints_branch: str = DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH airflow_constraints_reference: str = DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH airflow_extras: str = "" answer: Optional[str] = None @@ -64,6 +65,7 @@ class ShellParams: github_repository: str = "apache/airflow" github_token: str = os.environ.get('GITHUB_TOKEN', "") image_tag: str = "latest" + include_mypy_volume: bool = False install_airflow_version: str = "" install_providers_from_sources: bool = True integration: Tuple[str, ...] = () @@ -189,6 +191,7 @@ def compose_files(self): local_all_sources_docker_compose_file = f"{str(SCRIPTS_CI_DIR)}/docker-compose/local-all-sources.yml" files_docker_compose_file = f"{str(SCRIPTS_CI_DIR)}/docker-compose/files.yml" remove_sources_docker_compose_file = f"{str(SCRIPTS_CI_DIR)}/docker-compose/remove-sources.yml" + mypy_docker_compose_file = f"{str(SCRIPTS_CI_DIR)}/docker-compose/mypy.yml" forward_credentials_docker_compose_file = ( f"{str(SCRIPTS_CI_DIR)}/docker-compose/forward-credentials.yml" ) @@ -213,6 +216,8 @@ def compose_files(self): compose_ci_file.append(forward_credentials_docker_compose_file) if self.use_airflow_version is not None: compose_ci_file.append(remove_sources_docker_compose_file) + if self.include_mypy_volume: + compose_ci_file.append(mypy_docker_compose_file) if "all" in self.integration: integrations = AVAILABLE_INTEGRATIONS else: diff --git a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py index 05bcc11d6cb87..1f30642edd6b0 100644 --- a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py @@ -17,7 +17,6 @@ """Various utils to prepare docker and docker compose commands.""" import os import re -import subprocess import sys from copy import deepcopy from random import randint @@ -97,16 +96,6 @@ ] -def create_volume_if_missing(volume_name: str): - res_inspect = run_command(cmd=["docker", "inspect", volume_name], stdout=subprocess.DEVNULL, check=False) - if res_inspect.returncode != 0: - run_command(cmd=["docker", "volume", "create", volume_name], check=True) - - -def create_static_check_volumes(): - create_volume_if_missing("docker-compose_mypy-cache-volume") - - def get_extra_docker_flags(mount_sources: str) -> List[str]: """ Returns extra docker flags based on the type of mounting we want to do for sources. @@ -124,7 +113,7 @@ def get_extra_docker_flags(mount_sources: str) -> List[str]: ["--mount", f'type=bind,src={AIRFLOW_SOURCES_ROOT / src},dst={dst}'] ) extra_docker_flags.extend( - ['--mount', "type=volume,src=docker-compose_mypy-cache-volume,dst=/opt/airflow/.mypy_cache"] + ['--mount', "type=volume,src=mypy-cache-volume,dst=/opt/airflow/.mypy_cache"] ) else: # none extra_docker_flags.extend( @@ -512,6 +501,7 @@ def update_expected_environment_variables(env: Dict[str, str]) -> None: :param env: environment variables to update with missing values if not set. """ set_value_to_default_if_not_set(env, 'AIRFLOW_CONSTRAINTS_MODE', "constraints-source-providers") + set_value_to_default_if_not_set(env, 'AIRFLOW_CONSTRAINTS_REFERENCE', "constraints-source-providers") set_value_to_default_if_not_set(env, 'AIRFLOW_EXTRAS', "") set_value_to_default_if_not_set(env, 'ANSWER', "") set_value_to_default_if_not_set(env, 'BREEZE', "true") @@ -557,7 +547,9 @@ def update_expected_environment_variables(env: Dict[str, str]) -> None: "AIRFLOW_CI_IMAGE": "airflow_image_name", "AIRFLOW_CI_IMAGE_WITH_TAG": "airflow_image_name_with_tag", "AIRFLOW_EXTRAS": "airflow_extras", + "DEFAULT_CONSTRAINTS_BRANCH": "default-constraints-branch", "AIRFLOW_CONSTRAINTS_MODE": "airflow_constraints_mode", + "AIRFLOW_CONSTRAINTS_REFERENCE": "airflow_constraints_reference", "AIRFLOW_IMAGE_KUBERNETES": "airflow_image_kubernetes", "AIRFLOW_PROD_IMAGE": "airflow_image_name", "AIRFLOW_SOURCES": "airflow_sources", diff --git a/dev/breeze/src/airflow_breeze/utils/path_utils.py b/dev/breeze/src/airflow_breeze/utils/path_utils.py index 759c3e09a7d6a..e4e79d55202ae 100644 --- a/dev/breeze/src/airflow_breeze/utils/path_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/path_utils.py @@ -19,6 +19,7 @@ """ import hashlib import os +import subprocess import sys import tempfile from functools import lru_cache @@ -249,6 +250,18 @@ def find_airflow_sources_root_to_operate_on() -> Path: BREEZE_SOURCES_ROOT = AIRFLOW_SOURCES_ROOT / "dev" / "breeze" +def create_volume_if_missing(volume_name: str): + from airflow_breeze.utils.run_utils import run_command + + res_inspect = run_command(cmd=["docker", "inspect", volume_name], stdout=subprocess.DEVNULL, check=False) + if res_inspect.returncode != 0: + run_command(cmd=["docker", "volume", "create", volume_name], check=True) + + +def create_static_check_volumes(): + create_volume_if_missing("mypy-cache-volume") + + def create_directories_and_files() -> None: """ Creates all directories and files that are needed for Breeze to work via docker-compose. @@ -264,3 +277,4 @@ def create_directories_and_files() -> None: (AIRFLOW_SOURCES_ROOT / ".bash_aliases").touch() (AIRFLOW_SOURCES_ROOT / ".bash_history").touch() (AIRFLOW_SOURCES_ROOT / ".inputrc").touch() + create_static_check_volumes() diff --git a/scripts/ci/docker-compose/local.yml b/scripts/ci/docker-compose/local.yml index e356cb873611f..9e63ffa467d07 100644 --- a/scripts/ci/docker-compose/local.yml +++ b/scripts/ci/docker-compose/local.yml @@ -26,10 +26,7 @@ services: # or those that might be useful to see in the host as output of the # tests (such as logs) volumes: - - type: volume - source: mypy-cache-volume - target: /opt/airflow/.mypy_cache/ - # START automatically generated volumes from NECESSARY_HOST_VOLUMES in docker_command_utils.py + # START automatically generated volumes from NECESSARY_HOST_VOLUMES in docker_command_utils.py - type: bind source: ../../../.bash_aliases target: /root/.bash_aliases @@ -121,5 +118,3 @@ services: source: ../../../metastore_browser target: /opt/airflow/metastore_browser # END automatically generated volumes from NECESSARY_HOST_VOLUMES in docker_command_utils.py -volumes: - mypy-cache-volume: diff --git a/scripts/ci/docker-compose/mypy.yml b/scripts/ci/docker-compose/mypy.yml new file mode 100644 index 0000000000000..7287738690239 --- /dev/null +++ b/scripts/ci/docker-compose/mypy.yml @@ -0,0 +1,27 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +--- +version: "3.7" +services: + airflow: + volumes: + - type: volume + source: mypy-cache-volume + target: /opt/airflow/.mypy_cache/ +volumes: + mypy-cache-volume: + external: true diff --git a/scripts/ci/pre_commit/pre_commit_flake8.py b/scripts/ci/pre_commit/pre_commit_flake8.py index 59bd356d24df0..22b09978935b2 100755 --- a/scripts/ci/pre_commit/pre_commit_flake8.py +++ b/scripts/ci/pre_commit/pre_commit_flake8.py @@ -34,7 +34,8 @@ if __name__ == '__main__': sys.path.insert(0, str(AIRFLOW_SOURCES / "dev" / "breeze" / "src")) from airflow_breeze.global_constants import MOUNT_SELECTED - from airflow_breeze.utils.docker_command_utils import create_static_check_volumes, get_extra_docker_flags + from airflow_breeze.utils.docker_command_utils import get_extra_docker_flags + from airflow_breeze.utils.path_utils import create_static_check_volumes from airflow_breeze.utils.run_utils import get_runnable_ci_image, run_command airflow_image = get_runnable_ci_image(verbose=VERBOSE, dry_run=DRY_RUN) diff --git a/scripts/ci/pre_commit/pre_commit_migration_reference.py b/scripts/ci/pre_commit/pre_commit_migration_reference.py index 154a795f3e84a..25ac796fdf4fc 100755 --- a/scripts/ci/pre_commit/pre_commit_migration_reference.py +++ b/scripts/ci/pre_commit/pre_commit_migration_reference.py @@ -34,7 +34,8 @@ if __name__ == '__main__': sys.path.insert(0, str(AIRFLOW_SOURCES / "dev" / "breeze" / "src")) from airflow_breeze.global_constants import MOUNT_SELECTED - from airflow_breeze.utils.docker_command_utils import create_static_check_volumes, get_extra_docker_flags + from airflow_breeze.utils.docker_command_utils import get_extra_docker_flags + from airflow_breeze.utils.path_utils import create_static_check_volumes from airflow_breeze.utils.run_utils import get_runnable_ci_image, run_command airflow_image = get_runnable_ci_image(verbose=VERBOSE, dry_run=DRY_RUN) diff --git a/scripts/ci/pre_commit/pre_commit_mypy.py b/scripts/ci/pre_commit/pre_commit_mypy.py index 74d511a65b7ed..965bed9a74d3c 100755 --- a/scripts/ci/pre_commit/pre_commit_mypy.py +++ b/scripts/ci/pre_commit/pre_commit_mypy.py @@ -35,7 +35,8 @@ if __name__ == '__main__': sys.path.insert(0, str(AIRFLOW_SOURCES / "dev" / "breeze" / "src")) from airflow_breeze.global_constants import MOUNT_SELECTED - from airflow_breeze.utils.docker_command_utils import create_static_check_volumes, get_extra_docker_flags + from airflow_breeze.utils.docker_command_utils import get_extra_docker_flags + from airflow_breeze.utils.path_utils import create_static_check_volumes from airflow_breeze.utils.run_utils import get_runnable_ci_image, run_command airflow_image = get_runnable_ci_image(verbose=VERBOSE, dry_run=DRY_RUN) diff --git a/scripts/ci/pre_commit/pre_commit_ui_lint.py b/scripts/ci/pre_commit/pre_commit_ui_lint.py index 7755e3d204bd8..5812344a45961 100755 --- a/scripts/ci/pre_commit/pre_commit_ui_lint.py +++ b/scripts/ci/pre_commit/pre_commit_ui_lint.py @@ -33,7 +33,8 @@ if __name__ == '__main__': sys.path.insert(0, str(AIRFLOW_SOURCES / "dev" / "breeze" / "src")) from airflow_breeze.global_constants import MOUNT_SELECTED - from airflow_breeze.utils.docker_command_utils import create_static_check_volumes, get_extra_docker_flags + from airflow_breeze.utils.docker_command_utils import get_extra_docker_flags + from airflow_breeze.utils.path_utils import create_static_check_volumes from airflow_breeze.utils.run_utils import get_runnable_ci_image, run_command airflow_image = get_runnable_ci_image(verbose=VERBOSE, dry_run=DRY_RUN) diff --git a/scripts/ci/pre_commit/pre_commit_www_lint.py b/scripts/ci/pre_commit/pre_commit_www_lint.py index 65553c3d81345..b22750c56d07a 100755 --- a/scripts/ci/pre_commit/pre_commit_www_lint.py +++ b/scripts/ci/pre_commit/pre_commit_www_lint.py @@ -34,7 +34,8 @@ if __name__ == '__main__': sys.path.insert(0, str(AIRFLOW_SOURCES / "dev" / "breeze" / "src")) from airflow_breeze.global_constants import MOUNT_SELECTED - from airflow_breeze.utils.docker_command_utils import create_static_check_volumes, get_extra_docker_flags + from airflow_breeze.utils.docker_command_utils import get_extra_docker_flags + from airflow_breeze.utils.path_utils import create_static_check_volumes from airflow_breeze.utils.run_utils import get_runnable_ci_image, run_command airflow_image = get_runnable_ci_image(verbose=VERBOSE, dry_run=DRY_RUN) From d7989b60f622775fc1695e6be6ba1e0849d9581d Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Tue, 7 Jun 2022 08:48:23 +0200 Subject: [PATCH 008/118] Fix choosing backend versions in breeze's command line (#24228) Choosing version of backend were broken when command line switches were used. The _VERSION variables were "hard-coded" to defaults rather than taken from command line. This is a remnant of initial implementation and converting the parameters to "cacheable" ones. While looking at the versions we also found that PARAM_NAME_FLAG is not used any more so we took the opportunity to remove it. (cherry picked from commit 4482eaefb02535317bc0d2378eba6bfcef3c45ff) --- dev/breeze/src/airflow_breeze/global_constants.py | 15 --------------- .../airflow_breeze/utils/docker_command_utils.py | 9 +++------ 2 files changed, 3 insertions(+), 21 deletions(-) diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index 4ed4c298ba230..327740f7bf12b 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -102,18 +102,6 @@ "MSSQL_VERSION": "MSSql version", } -PARAM_NAME_FLAG = { - "BACKEND": "--backend", - "MYSQL_VERSION": "--mysql-version", - "KUBERNETES_MODE": "--kubernetes-mode", - "KUBERNETES_VERSION": "--kubernetes-version", - "KIND_VERSION": "--kind-version", - "HELM_VERSION": "--helm-version", - "EXECUTOR": "--executor", - "POSTGRES_VERSION": "--postgres-version", - "MSSQL_VERSION": "--mssql-version", -} - EXCLUDE_DOCS_PACKAGE_FOLDER = [ 'exts', 'integration-logos', @@ -158,9 +146,6 @@ def get_available_packages(short_version=False) -> List[str]: CURRENT_POSTGRES_VERSIONS = ['10', '11', '12', '13', '14'] CURRENT_MYSQL_VERSIONS = ['5.7', '8'] CURRENT_MSSQL_VERSIONS = ['2017-latest', '2019-latest'] -POSTGRES_VERSION = CURRENT_POSTGRES_VERSIONS[0] -MYSQL_VERSION = CURRENT_MYSQL_VERSIONS[0] -MSSQL_VERSION = CURRENT_MSSQL_VERSIONS[0] DB_RESET = False START_AIRFLOW = "false" LOAD_EXAMPLES = False diff --git a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py index 1f30642edd6b0..fb38cefabacb4 100644 --- a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py @@ -45,11 +45,8 @@ MOUNT_ALL, MOUNT_SELECTED, MSSQL_HOST_PORT, - MSSQL_VERSION, MYSQL_HOST_PORT, - MYSQL_VERSION, POSTGRES_HOST_PORT, - POSTGRES_VERSION, REDIS_HOST_PORT, SSH_PORT, WEBSERVER_HOST_PORT, @@ -565,9 +562,12 @@ def update_expected_environment_variables(env: Dict[str, str]) -> None: "ISSUE_ID": "issue_id", "LOAD_EXAMPLES": "load_example_dags", "LOAD_DEFAULT_CONNECTIONS": "load_default_connections", + "MYSQL_VERSION": "mysql_version", + "MSSQL_VERSION": "mssql_version", "NUM_RUNS": "num_runs", "PACKAGE_FORMAT": "package_format", "PYTHON_MAJOR_MINOR_VERSION": "python", + "POSTGRES_VERSION": "postgres_version", "SQLITE_URL": "sqlite_url", "START_AIRFLOW": "start_airflow", "SKIP_ENVIRONMENT_INITIALIZATION": "skip_environment_initialization", @@ -579,11 +579,8 @@ def update_expected_environment_variables(env: Dict[str, str]) -> None: DOCKER_VARIABLE_CONSTANTS = { "FLOWER_HOST_PORT": FLOWER_HOST_PORT, "MSSQL_HOST_PORT": MSSQL_HOST_PORT, - "MSSQL_VERSION": MSSQL_VERSION, "MYSQL_HOST_PORT": MYSQL_HOST_PORT, - "MYSQL_VERSION": MYSQL_VERSION, "POSTGRES_HOST_PORT": POSTGRES_HOST_PORT, - "POSTGRES_VERSION": POSTGRES_VERSION, "REDIS_HOST_PORT": REDIS_HOST_PORT, "SSH_PORT": SSH_PORT, "WEBSERVER_HOST_PORT": WEBSERVER_HOST_PORT, From 797fce76e3c870b08637a5c9426b00964fe623fb Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Tue, 7 Jun 2022 11:29:10 +0200 Subject: [PATCH 009/118] Add command to regenerate breeze command output images (#24216) (cherry picked from commit 2d687e4b7b39fc5c133440de7f66f58327d33db7) --- .gitignore | 4 + BREEZE.rst | 11 + .../configuration_and_maintenance_commands.py | 27 +- images/breeze/output-commands-hash.txt | 2 +- images/breeze/output-commands.svg | 364 +++++++++--------- .../output-regenerate-command-images.svg | 99 +++++ 6 files changed, 327 insertions(+), 180 deletions(-) create mode 100644 images/breeze/output-regenerate-command-images.svg diff --git a/.gitignore b/.gitignore index 9a00d53fa3bda..fe3d74aa2344a 100644 --- a/.gitignore +++ b/.gitignore @@ -225,3 +225,7 @@ licenses/LICENSES-ui.txt # Packaged breeze on Windows /breeze.exe + +# Generated out dir + +/out diff --git a/BREEZE.rst b/BREEZE.rst index 1ddd32dfbefce..36b10759d3444 100644 --- a/BREEZE.rst +++ b/BREEZE.rst @@ -426,6 +426,17 @@ of help of the commands only when they change. :width: 100% :alt: Breeze command-hash-export +Regenerating images for documentation +===================================== + +This documentation contains exported images with "help" of their commands and parameters. You can +regenerate all those images (which might be needed in case new version of rich is used) via +``regenerate-breeze-images`` command. + +.. image:: ./images/breeze/output-regenerate-command-images.svg + :width: 100% + :alt: Breeze regenerate-command-images + Starting complete Airflow installation ====================================== diff --git a/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py b/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py index 818ba49ee8b83..d4ca3bcf466ca 100644 --- a/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py @@ -62,7 +62,7 @@ ) from airflow_breeze.utils.recording import output_file_for_recording from airflow_breeze.utils.reinstall import ask_to_reinstall_breeze, reinstall_breeze, warn_non_editable -from airflow_breeze.utils.run_utils import run_command +from airflow_breeze.utils.run_utils import assert_pre_commit_installed, run_command from airflow_breeze.utils.visuals import ASCIIART, ASCIIART_STYLE CONFIGURATION_AND_MAINTENANCE_COMMANDS = { @@ -75,6 +75,7 @@ "resource-check", "free-space", "fix-ownership", + "regenerate-command-images", "command-hash-export", "version", ], @@ -533,3 +534,27 @@ def remove_autogenerated_code(script_path: str): def backup(script_path_file: Path): shutil.copy(str(script_path_file), str(script_path_file) + ".bak") + + +@main.command(name="regenerate-command-images", help="Regenerate breeze command images.") +@option_verbose +@option_dry_run +def regenerate_command_images(verbose: bool, dry_run: bool): + assert_pre_commit_installed(verbose=verbose) + perform_environment_checks(verbose=verbose) + try: + (AIRFLOW_SOURCES_ROOT / "images" / "breeze" / "output-commands-hash.txt").unlink() + except FileNotFoundError: + # when we go to Python 3.8+ we can add missing_ok = True instead of try/except + pass + command_to_execute = [sys.executable, "-m", "pre_commit", 'run', 'update-breeze-file', '--all-files'] + env = os.environ.copy() + run_command( + command_to_execute, + verbose=verbose, + dry_run=dry_run, + check=False, + no_output_dump_on_exception=True, + text=True, + env=env, + ) diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index 08e0c94ed2eef..c6b12db9fa491 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -1 +1 @@ -c227c66369d92c76c0e52939e3e4ecf1 +ed70b784364f0b604176ea142305906b diff --git a/images/breeze/output-commands.svg b/images/breeze/output-commands.svg index be4ea5a514d12..a592f4642e045 100644 --- a/images/breeze/output-commands.svg +++ b/images/breeze/output-commands.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - Breeze commands + Breeze commands - + - - -Usage: breeze [OPTIONS] COMMAND [ARGS]... - -╭─ Basic flags for the default (shell) command ────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---integrationIntegration(s) to enable when running (can be more than one).                             -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    -all)                                                                                      ---forward-credentials-fForward local credentials to container when running. ---db-reset-dReset DB when entering the container. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flags for the default (shell) command ─────────────────────────────────────────────────────────────────────╮ ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or   -`sdist` if Airflow should be removed, installed from wheel packages or sdist packages    -available in dist folder respectively. Implies --mount-sources `none`.                   -(none | wheel | sdist | <airflow_version>)                                               ---airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) ---use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when    -entering breeze.                                                                         ---package-formatFormat of packages that should be installed from dist.(wheel | sdist)[default: wheel] ---force-buildForce image build no matter if it is determined as needed. ---mount-sourcesChoose scope of local sources should be mounted (default = selected). -(selected | all | none)                                               -[default: selected]                                                   ---debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Developer tools ────────────────────────────────────────────────────────────────────────────────────────────────────╮ -shell          Enter breeze.py environment. this is the default command use when no other is selected.             -start-airflow  Enter breeze.py environment and starts all Airflow components in the tmux session.                  -exec           Joins the interactive shell of running airflow container                                            -stop           Stop running breeze environment.                                                                    -build-docs     Build documentation in the container.                                                               -static-checks  Run static checks.                                                                                  -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Testing ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -docker-compose-tests Run docker-compose tests.                                                                     -tests                Run the specified unit test targets. Multiple targets may be specified separated by spaces.   -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Configuration & maintenance ────────────────────────────────────────────────────────────────────────────────────────╮ -cleanup            Cleans the cache of parameters, docker cache and optionally - currently downloaded images.      -self-upgrade       Self upgrade Breeze.                                                                            -setup-autocomplete Enables autocompletion of breeze commands.                                                      -config             Show/update configuration (Python, Backend, Cheatsheet, ASCIIART).                              -resource-check     Check if available docker resources are enough.                                                 -free-space         Free space for jobs run in CI.                                                                  -fix-ownership      Fix ownership of source files to be same as host user.                                          -command-hash-exportOutputs hash of all click commands to file or stdout if `-` is used (useful to see if images    -should be regenerated).                                                                         -version            Print information about version of apache-airflow-breeze.                                       -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ CI Image tools ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ -build-image   Build CI image. Include building multiple images for all python versions (sequentially).             -pull-image    Pull and optionally verify CI images - possibly in parallel for all Python versions.                 -verify-image  Verify CI image.                                                                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Production Image tools ─────────────────────────────────────────────────────────────────────────────────────────────╮ -build-prod-image Build Production image. Include building multiple images for all or selected Python versions      -sequentially.                                                                                     -pull-prod-image  Pull and optionally verify Production images - possibly in parallel for all Python versions.      -verify-prod-imageVerify Production image.                                                                          -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Release management ─────────────────────────────────────────────────────────────────────────────────────────────────╮ -verify-provider-packages         Verifies if all provider code is following expectations for providers.            -prepare-provider-documentation   Prepare CHANGELOG, README and COMMITS information for providers.                  -prepare-provider-packages        Prepare sdist/whl packages of Airflow Providers.                                  -prepare-airflow-package          Prepare sdist/whl package of Airflow.                                             -release-prod-images              Release production images to DockerHub (needs DockerHub permissions).             -generate-constraints             Generates pinned constraint files with all extras from setup.py in parallel.      -find-newer-dependencies          Finds which dependencies are being upgraded.                                      -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze [OPTIONS] COMMAND [ARGS]... + +╭─ Basic flags for the default (shell) command ────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                             +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    +all)                                                                                      +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flags for the default (shell) command ─────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or   +`sdist` if Airflow should be removed, installed from wheel packages or sdist packages    +available in dist folder respectively. Implies --mount-sources `none`.                   +(none | wheel | sdist | <airflow_version>)                                               +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when    +entering breeze.                                                                         +--package-formatFormat of packages that should be installed from dist.(wheel | sdist)[default: wheel] +--force-buildForce image build no matter if it is determined as needed. +--mount-sourcesChoose scope of local sources should be mounted (default = selected). +(selected | all | none)                                               +[default: selected]                                                   +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Developer tools ────────────────────────────────────────────────────────────────────────────────────────────────────╮ +shell          Enter breeze.py environment. this is the default command use when no other is selected.             +start-airflow  Enter breeze.py environment and starts all Airflow components in the tmux session.                  +exec           Joins the interactive shell of running airflow container                                            +stop           Stop running breeze environment.                                                                    +build-docs     Build documentation in the container.                                                               +static-checks  Run static checks.                                                                                  +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Testing ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +docker-compose-tests Run docker-compose tests.                                                                     +tests                Run the specified unit test targets. Multiple targets may be specified separated by spaces.   +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Configuration & maintenance ────────────────────────────────────────────────────────────────────────────────────────╮ +cleanup                  Cleans the cache of parameters, docker cache and optionally - currently downloaded        +images.                                                                                   +self-upgrade             Self upgrade Breeze.                                                                      +setup-autocomplete       Enables autocompletion of breeze commands.                                                +config                   Show/update configuration (Python, Backend, Cheatsheet, ASCIIART).                        +resource-check           Check if available docker resources are enough.                                           +free-space               Free space for jobs run in CI.                                                            +fix-ownership            Fix ownership of source files to be same as host user.                                    +regenerate-command-imagesRegenerate breeze command images.                                                         +command-hash-export      Outputs hash of all click commands to file or stdout if `-` is used (useful to see if     +images should be regenerated).                                                            +version                  Print information about version of apache-airflow-breeze.                                 +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ CI Image tools ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +build-image   Build CI image. Include building multiple images for all python versions (sequentially).             +pull-image    Pull and optionally verify CI images - possibly in parallel for all Python versions.                 +verify-image  Verify CI image.                                                                                     +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Production Image tools ─────────────────────────────────────────────────────────────────────────────────────────────╮ +build-prod-image Build Production image. Include building multiple images for all or selected Python versions      +sequentially.                                                                                     +pull-prod-image  Pull and optionally verify Production images - possibly in parallel for all Python versions.      +verify-prod-imageVerify Production image.                                                                          +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Release management ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +verify-provider-packages         Verifies if all provider code is following expectations for providers.            +prepare-provider-documentation   Prepare CHANGELOG, README and COMMITS information for providers.                  +prepare-provider-packages        Prepare sdist/whl packages of Airflow Providers.                                  +prepare-airflow-package          Prepare sdist/whl package of Airflow.                                             +release-prod-images              Release production images to DockerHub (needs DockerHub permissions).             +generate-constraints             Generates pinned constraint files with all extras from setup.py in parallel.      +find-newer-dependencies          Finds which dependencies are being upgraded.                                      +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-regenerate-command-images.svg b/images/breeze/output-regenerate-command-images.svg new file mode 100644 index 0000000000000..0842e64fa1da3 --- /dev/null +++ b/images/breeze/output-regenerate-command-images.svg @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: regenerate-command-images + + + + + + + + + + +Usage: breeze regenerate-command-images [OPTIONS] + +Regenerate breeze command images. + +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + + + From b5cbad4d3c8c0bc8c3fd40b9993fc26c4fcec3de Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sun, 12 Jun 2022 17:14:09 +0200 Subject: [PATCH 010/118] Better diagnostics for ARM for MySQL and MSSQL (#24185) Until we have debian suppor tof MySQL and MSSQL ARM, runnign those on ARM platform is not supported. However error about it was not clear (pulling docker image failed). This PR adds platform checking also in breeze and fails fast without even attempting to enter breeze shell when you are on ARM and wants to run MsSQL or MySQL breeze shell. Also some errors with running different backend versions via breeze have been removed. (cherry picked from commit 00d2a3c9319af6e145b8e03f5ac33a95fa8acd66) --- .../src/airflow_breeze/commands/developer_commands.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/dev/breeze/src/airflow_breeze/commands/developer_commands.py b/dev/breeze/src/airflow_breeze/commands/developer_commands.py index e10e79d0a8126..a8ad686c708c5 100644 --- a/dev/breeze/src/airflow_breeze/commands/developer_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/developer_commands.py @@ -25,6 +25,7 @@ from airflow_breeze.commands.main_command import main from airflow_breeze.global_constants import ( DEFAULT_PYTHON_MAJOR_MINOR_VERSION, + DOCKER_DEFAULT_PLATFORM, MOUNT_SELECTED, get_available_packages, ) @@ -601,7 +602,13 @@ def run_shell(verbose: bool, dry_run: bool, shell_params: ShellParams) -> RunCom env_variables = get_env_variables_for_docker_commands(shell_params) if cmd_added is not None: cmd.extend(['-c', cmd_added]) - + if "arm64" in DOCKER_DEFAULT_PLATFORM: + if shell_params.backend == "mysql": + get_console().print('\n[error]MySQL is not supported on ARM architecture.[/]\n') + sys.exit(1) + if shell_params.backend == "mssql": + get_console().print('\n[error]MSSQL is not supported on ARM architecture[/]\n') + sys.exit(1) command_result = run_command( cmd, verbose=verbose, dry_run=dry_run, env=env_variables, text=True, check=False ) From 0d0f57f3bd9e8022c71fb957bfdb60cd052d5cec Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Mon, 13 Jun 2022 19:38:20 +0200 Subject: [PATCH 011/118] Fix links to sources for examples (#24386) The links to example sources in exampleinclude have been broken in a number of providers and they were additionally broken by AIP-47. This PR fixes it. Fixes: #23632 Fixes: https://github.com/apache/airflow-site/issues/536 (cherry picked from commit 08b675cf6642171cb1c5ddfb09607b541db70b29) --- .pre-commit-config.yaml | 13 +- RELEASE_NOTES.rst | 1 + STATIC_CODE_CHECKS.rst | 4 + .../example_branch_datetime_operator.py | 20 +- .../example_external_task_marker_dag.py | 26 +- .../example_google_api_youtube_to_s3.py | 3 +- .../amazon/aws/example_dags/example_s3.py | 5 +- .../arangodb/example_dags/example_arangodb.py | 4 +- .../example_dags/example_automl_tables.py | 4 +- ...mple_cloud_storage_transfer_service_aws.py | 10 +- .../cloud/example_dags/example_pubsub.py | 8 +- .../cloud/example_dags/example_vertex_ai.py | 5 +- airflow/providers/mongo/hooks/mongo.py | 2 +- .../src/airflow_breeze/pre_commit_ids.py | 2 + docs/__init__.py | 16 ++ .../index.rst | 6 + .../apache-airflow-providers-amazon/index.rst | 6 + .../index.rst | 11 + .../index.rst | 6 + .../index.rst | 6 + .../index.rst | 12 + .../index.rst | 6 + .../index.rst | 6 + .../index.rst | 6 + .../index.rst | 6 + .../index.rst | 6 + .../index.rst | 6 + .../index.rst | 6 + .../index.rst | 6 + .../index.rst | 6 + .../apache-airflow-providers-docker/index.rst | 6 + .../apache-airflow-providers-google/index.rst | 6 + .../operators/cloud/bigquery.rst | 50 ++-- .../operators/cloud/datastore.rst | 26 +- .../operators/cloud/kubernetes_engine.rst | 10 +- docs/apache-airflow-providers-http/index.rst | 6 + docs/apache-airflow-providers-jdbc/index.rst | 6 + .../index.rst | 6 + .../index.rst | 6 + .../index.rst | 6 + .../index.rst | 6 + docs/apache-airflow-providers-mysql/index.rst | 6 + .../index.rst | 6 + .../index.rst | 6 + .../apache-airflow-providers-presto/index.rst | 6 + .../apache-airflow-providers-qubole/index.rst | 6 + .../index.rst | 6 + .../index.rst | 6 + .../index.rst | 6 + .../index.rst | 6 + .../index.rst | 6 + docs/apache-airflow-providers-trino/index.rst | 6 + .../apache-airflow-providers-yandex/index.rst | 6 + docs/apache-airflow/python-api-ref.rst | 9 + docs/build_docs.py | 14 +- docs/conf.py | 37 ++- docs/exts/docs_build/docs_builder.py | 8 +- docs/exts/provider_yaml_utils.py | 32 ++- docs/publish_docs.py | 2 + docs/spelling_wordlist.txt | 9 + images/breeze/output-commands-hash.txt | 2 +- images/breeze/output-static-checks.svg | 224 +++++++++--------- .../pre_commit_check_init_in_tests.py | 50 ++++ ...e_commit_check_setup_extra_packages_ref.py | 4 +- ...mmit_check_system_tests_hidden_in_index.py | 72 ++++++ scripts/in_container/run_docs_build.sh | 3 +- setup.cfg | 2 +- tests/config_templates/__init__.py | 16 ++ tests/core/__init__.py | 16 ++ tests/dags/__init__.py | 16 ++ tests/dags/subdir1/__init__.py | 16 ++ tests/dags/subdir2/__init__.py | 16 ++ tests/dags/subdir2/subdir3/__init__.py | 16 ++ tests/dags_corrupted/__init__.py | 16 ++ tests/dags_with_system_exit/__init__.py | 16 ++ .../__init__.py | 16 ++ tests/jobs/test_scheduler_job.py | 1 + tests/secrets/__init__.py | 16 ++ tests/system/README.md | 2 +- tests/system/providers/amazon/__init__.py | 16 ++ tests/system/providers/amazon/aws/__init__.py | 16 ++ .../system/providers/apache/kylin/__init__.py | 16 ++ tests/system/providers/cncf/__init__.py | 16 ++ .../providers/elasticsearch/__init__.py | 16 ++ tests/system/providers/github/__init__.py | 16 ++ tests/system/providers/google/README.md | 2 +- .../system/providers/google/cloud/__init__.py | 16 ++ .../providers/google/cloud/azure/__init__.py | 16 ++ .../google/cloud/bigquery/__init__.py | 16 ++ .../bigquery/example_bigquery_dataset.py | 0 .../bigquery/example_bigquery_operations.py | 0 .../example_bigquery_operations_location.py | 0 .../bigquery/example_bigquery_queries.py | 0 .../bigquery/example_bigquery_sensors.py | 0 .../bigquery/example_bigquery_tables.py | 0 .../bigquery/example_bigquery_to_bigquery.py | 0 .../bigquery/example_bigquery_to_gcs.py | 0 .../cloud/bigquery/resources/__init__.py | 16 ++ .../resources/example_bigquery_query.sql | 0 .../resources/update_table_schema.json | 0 .../bigquery/resources/us-states.csv | 0 .../google/cloud/cloud_sql/__init__.py | 16 ++ .../google/cloud/dataproc/__init__.py | 16 ++ .../dataproc/example_dataproc_gke.py | 0 .../cloud/dataproc/resources/__init__.py | 16 ++ .../google/cloud/datastore/__init__.py | 16 ++ .../datastore/example_datastore_commit.py | 0 .../example_datastore_export_import.py | 0 .../datastore/example_datastore_query.py | 0 .../datastore/example_datastore_rollback.py | 0 .../providers/google/cloud/gcs/__init__.py | 16 ++ .../gcs/example_gcs_to_bigquery.py | 0 .../google/cloud/gcs/resources/__init__.py | 16 ++ .../cloud/kubernetes_engine/__init__.py | 16 ++ .../example_kubernetes_engine.py | 0 .../google/cloud/spanner/__init__.py | 16 ++ .../google/cloud/speech_to_text/__init__.py | 16 ++ .../google/cloud/stackdriver/__init__.py | 16 ++ .../providers/google/cloud/tasks/__init__.py | 16 ++ .../google/cloud/text_to_speech/__init__.py | 16 ++ .../google/cloud/workflows/__init__.py | 16 ++ .../providers/google/workplace/__init__.py | 16 ++ tests/system/providers/presto/__init__.py | 16 ++ tests/test_utils/operators/__init__.py | 16 ++ tests/test_utils/perf/__init__.py | 16 ++ tests/test_utils/perf/dags/__init__.py | 16 ++ tests/testconfig/__init__.py | 16 ++ tests/testconfig/conf/__init__.py | 16 ++ tests/triggers/__init__.py | 16 ++ tests/www/test_logs/__init__.py | 16 ++ tests/www/views/__init__.py | 16 ++ 131 files changed, 1349 insertions(+), 242 deletions(-) create mode 100644 docs/__init__.py create mode 100755 scripts/ci/pre_commit/pre_commit_check_init_in_tests.py create mode 100755 scripts/ci/pre_commit/pre_commit_check_system_tests_hidden_in_index.py create mode 100644 tests/config_templates/__init__.py create mode 100644 tests/core/__init__.py create mode 100644 tests/dags/__init__.py create mode 100644 tests/dags/subdir1/__init__.py create mode 100644 tests/dags/subdir2/__init__.py create mode 100644 tests/dags/subdir2/subdir3/__init__.py create mode 100644 tests/dags_corrupted/__init__.py create mode 100644 tests/dags_with_system_exit/__init__.py create mode 100644 tests/executors/kubernetes_executor_template_files/__init__.py create mode 100644 tests/secrets/__init__.py create mode 100644 tests/system/providers/amazon/__init__.py create mode 100644 tests/system/providers/amazon/aws/__init__.py create mode 100644 tests/system/providers/apache/kylin/__init__.py create mode 100644 tests/system/providers/cncf/__init__.py create mode 100644 tests/system/providers/elasticsearch/__init__.py create mode 100644 tests/system/providers/github/__init__.py create mode 100644 tests/system/providers/google/cloud/__init__.py create mode 100644 tests/system/providers/google/cloud/azure/__init__.py create mode 100644 tests/system/providers/google/cloud/bigquery/__init__.py rename tests/system/providers/google/{ => cloud}/bigquery/example_bigquery_dataset.py (100%) rename tests/system/providers/google/{ => cloud}/bigquery/example_bigquery_operations.py (100%) rename tests/system/providers/google/{ => cloud}/bigquery/example_bigquery_operations_location.py (100%) rename tests/system/providers/google/{ => cloud}/bigquery/example_bigquery_queries.py (100%) rename tests/system/providers/google/{ => cloud}/bigquery/example_bigquery_sensors.py (100%) rename tests/system/providers/google/{ => cloud}/bigquery/example_bigquery_tables.py (100%) rename tests/system/providers/google/{ => cloud}/bigquery/example_bigquery_to_bigquery.py (100%) rename tests/system/providers/google/{ => cloud}/bigquery/example_bigquery_to_gcs.py (100%) create mode 100644 tests/system/providers/google/cloud/bigquery/resources/__init__.py rename tests/system/providers/google/{ => cloud}/bigquery/resources/example_bigquery_query.sql (100%) rename tests/system/providers/google/{ => cloud}/bigquery/resources/update_table_schema.json (100%) rename tests/system/providers/google/{ => cloud}/bigquery/resources/us-states.csv (100%) create mode 100644 tests/system/providers/google/cloud/cloud_sql/__init__.py create mode 100644 tests/system/providers/google/cloud/dataproc/__init__.py rename tests/system/providers/google/{ => cloud}/dataproc/example_dataproc_gke.py (100%) create mode 100644 tests/system/providers/google/cloud/dataproc/resources/__init__.py create mode 100644 tests/system/providers/google/cloud/datastore/__init__.py rename tests/system/providers/google/{ => cloud}/datastore/example_datastore_commit.py (100%) rename tests/system/providers/google/{ => cloud}/datastore/example_datastore_export_import.py (100%) rename tests/system/providers/google/{ => cloud}/datastore/example_datastore_query.py (100%) rename tests/system/providers/google/{ => cloud}/datastore/example_datastore_rollback.py (100%) create mode 100644 tests/system/providers/google/cloud/gcs/__init__.py rename tests/system/providers/google/{ => cloud}/gcs/example_gcs_to_bigquery.py (100%) create mode 100644 tests/system/providers/google/cloud/gcs/resources/__init__.py create mode 100644 tests/system/providers/google/cloud/kubernetes_engine/__init__.py rename tests/system/providers/google/{ => cloud}/kubernetes_engine/example_kubernetes_engine.py (100%) create mode 100644 tests/system/providers/google/cloud/spanner/__init__.py create mode 100644 tests/system/providers/google/cloud/speech_to_text/__init__.py create mode 100644 tests/system/providers/google/cloud/stackdriver/__init__.py create mode 100644 tests/system/providers/google/cloud/tasks/__init__.py create mode 100644 tests/system/providers/google/cloud/text_to_speech/__init__.py create mode 100644 tests/system/providers/google/cloud/workflows/__init__.py create mode 100644 tests/system/providers/google/workplace/__init__.py create mode 100644 tests/system/providers/presto/__init__.py create mode 100644 tests/test_utils/operators/__init__.py create mode 100644 tests/test_utils/perf/__init__.py create mode 100644 tests/test_utils/perf/dags/__init__.py create mode 100644 tests/testconfig/__init__.py create mode 100644 tests/testconfig/conf/__init__.py create mode 100644 tests/triggers/__init__.py create mode 100644 tests/www/test_logs/__init__.py create mode 100644 tests/www/views/__init__.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 22d9712590add..15d18dd1b096b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -28,7 +28,7 @@ repos: - id: check-hooks-apply name: Check if all hooks apply to the repository - repo: https://github.com/thlorenz/doctoc.git - rev: v2.1.0 + rev: v2.2.0 hooks: - id: doctoc name: Add TOC for md and rst files @@ -39,7 +39,7 @@ repos: - "--maxlevel" - "2" - repo: https://github.com/Lucas-C/pre-commit-hooks - rev: v1.1.13 + rev: v1.2.0 hooks: - id: forbid-tabs name: Fail if tabs are used in the project @@ -204,7 +204,7 @@ repos: pass_filenames: true # TODO: Bump to Python 3.8 when support for Python 3.7 is dropped in Airflow. - repo: https://github.com/asottile/pyupgrade - rev: v2.32.0 + rev: v2.32.1 hooks: - id: pyupgrade name: Upgrade Python code automatically @@ -242,7 +242,7 @@ repos: name: Run pydocstyle args: - --convention=pep257 - - --add-ignore=D100,D102,D103,D104,D105,D107,D202,D205,D400,D401 + - --add-ignore=D100,D102,D103,D104,D105,D107,D205,D400,D401 exclude: | (?x) ^tests/.*\.py$| @@ -422,11 +422,11 @@ repos: - id: check-no-relative-imports language: pygrep name: No relative imports - description: Airflow style is to use absolute imports only + description: Airflow style is to use absolute imports only (except docs building) entry: "^\\s*from\\s+\\." pass_filenames: true files: \.py$ - exclude: ^tests/|^airflow/_vendor/ + exclude: ^tests/|^airflow/_vendor/|^docs/ - id: check-for-inclusive-language language: pygrep name: Check for language that we do not accept as community @@ -449,6 +449,7 @@ repos: ^airflow/www/static/| ^airflow/providers/| ^tests/providers/apache/cassandra/hooks/test_cassandra.py$| + ^tests/system/providers/apache/spark/example_spark_dag.py$| ^docs/apache-airflow-providers-apache-cassandra/connections/cassandra.rst$| ^docs/apache-airflow-providers-apache-hive/commits.rst$| ^airflow/api_connexion/openapi/v1.yaml$| diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index edc7de3df967b..66edcf9c90e90 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -704,6 +704,7 @@ Misc/Internal - Remove deprecated usage of ``init_role()`` from API (#18820) - Remove duplicate code on dbapi hook (#18821) + Airflow 2.2.5, (2022-04-04) --------------------------- diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst index e2b57eae6c69b..5b7fa05908e46 100644 --- a/STATIC_CODE_CHECKS.rst +++ b/STATIC_CODE_CHECKS.rst @@ -205,10 +205,14 @@ require Breeze Docker image to be build locally. +--------------------------------------------------------+------------------------------------------------------------------+---------+ | check-system-tests-present | Check if system tests have required segments of code | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ +| check-system-tests-tocs | Check that system tests is properly added | | ++--------------------------------------------------------+------------------------------------------------------------------+---------+ | check-xml | Check XML files with xmllint | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ | codespell | Run codespell to check for common misspellings in files | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ +| create-missing-init-py-files-tests | Create missing init.py files in tests | | ++--------------------------------------------------------+------------------------------------------------------------------+---------+ | debug-statements | Detect accidentally committed debug statements | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ | detect-private-key | Detect if private key is added to the repository | | diff --git a/airflow/example_dags/example_branch_datetime_operator.py b/airflow/example_dags/example_branch_datetime_operator.py index 3c86e40402aef..e707514c868a0 100644 --- a/airflow/example_dags/example_branch_datetime_operator.py +++ b/airflow/example_dags/example_branch_datetime_operator.py @@ -26,7 +26,7 @@ from airflow.operators.datetime import BranchDateTimeOperator from airflow.operators.empty import EmptyOperator -dag = DAG( +dag1 = DAG( dag_id="example_branch_datetime_operator", start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), catchup=False, @@ -35,8 +35,8 @@ ) # [START howto_branch_datetime_operator] -empty_task_1 = EmptyOperator(task_id='date_in_range', dag=dag) -empty_task_2 = EmptyOperator(task_id='date_outside_range', dag=dag) +empty_task_11 = EmptyOperator(task_id='date_in_range', dag=dag1) +empty_task_21 = EmptyOperator(task_id='date_outside_range', dag=dag1) cond1 = BranchDateTimeOperator( task_id='datetime_branch', @@ -44,15 +44,15 @@ follow_task_ids_if_false=['date_outside_range'], target_upper=pendulum.datetime(2020, 10, 10, 15, 0, 0), target_lower=pendulum.datetime(2020, 10, 10, 14, 0, 0), - dag=dag, + dag=dag1, ) # Run empty_task_1 if cond1 executes between 2020-10-10 14:00:00 and 2020-10-10 15:00:00 -cond1 >> [empty_task_1, empty_task_2] +cond1 >> [empty_task_11, empty_task_21] # [END howto_branch_datetime_operator] -dag = DAG( +dag2 = DAG( dag_id="example_branch_datetime_operator_2", start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), catchup=False, @@ -60,8 +60,8 @@ schedule_interval="@daily", ) # [START howto_branch_datetime_operator_next_day] -empty_task_1 = EmptyOperator(task_id='date_in_range', dag=dag) -empty_task_2 = EmptyOperator(task_id='date_outside_range', dag=dag) +empty_task_12 = EmptyOperator(task_id='date_in_range', dag=dag2) +empty_task_22 = EmptyOperator(task_id='date_outside_range', dag=dag2) cond2 = BranchDateTimeOperator( task_id='datetime_branch', @@ -69,10 +69,10 @@ follow_task_ids_if_false=['date_outside_range'], target_upper=pendulum.time(0, 0, 0), target_lower=pendulum.time(15, 0, 0), - dag=dag, + dag=dag2, ) # Since target_lower happens after target_upper, target_upper will be moved to the following day # Run empty_task_1 if cond2 executes between 15:00:00, and 00:00:00 of the following day -cond2 >> [empty_task_1, empty_task_2] +cond2 >> [empty_task_12, empty_task_22] # [END howto_branch_datetime_operator_next_day] diff --git a/airflow/example_dags/example_external_task_marker_dag.py b/airflow/example_dags/example_external_task_marker_dag.py index 0c4479a0d66f0..733b732756633 100644 --- a/airflow/example_dags/example_external_task_marker_dag.py +++ b/airflow/example_dags/example_external_task_marker_dag.py @@ -18,23 +18,25 @@ """ Example DAG demonstrating setting up inter-DAG dependencies using ExternalTaskSensor and -ExternalTaskMarker +ExternalTaskMarker. In this example, child_task1 in example_external_task_marker_child depends on parent_task in -example_external_task_marker_parent. When parent_task is cleared with "Recursive" selected, -the presence of ExternalTaskMarker tells Airflow to clear child_task1 and its -downstream tasks. +example_external_task_marker_parent. When parent_task is cleared with 'Recursive' selected, +the presence of ExternalTaskMarker tells Airflow to clear child_task1 and its downstream tasks. ExternalTaskSensor will keep poking for the status of remote ExternalTaskMarker task at a regular interval till one of the following will happen: -1. ExternalTaskMarker reaches the states mentioned in the allowed_states list - In this case, ExternalTaskSensor will exit with a success status code -2. ExternalTaskMarker reaches the states mentioned in the failed_states list - In this case, ExternalTaskSensor will raise an AirflowException and user need to handle this - with multiple downstream tasks -3. ExternalTaskSensor times out - In this case, ExternalTaskSensor will raise AirflowSkipException or AirflowSensorTimeout - exception + +ExternalTaskMarker reaches the states mentioned in the allowed_states list. +In this case, ExternalTaskSensor will exit with a success status code + +ExternalTaskMarker reaches the states mentioned in the failed_states list +In this case, ExternalTaskSensor will raise an AirflowException and user need to handle this +with multiple downstream tasks + +ExternalTaskSensor times out. In this case, ExternalTaskSensor will raise AirflowSkipException +or AirflowSensorTimeout exception + """ import pendulum diff --git a/airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py b/airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py index 241a551320efd..2bbe4fac58e2c 100644 --- a/airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py +++ b/airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py @@ -26,7 +26,8 @@ Further information: YOUTUBE_VIDEO_PUBLISHED_AFTER and YOUTUBE_VIDEO_PUBLISHED_BEFORE needs to be formatted -"YYYY-MM-DDThh:mm:ss.sZ". See https://developers.google.com/youtube/v3/docs/search/list for more information. +``YYYY-MM-DDThh:mm:ss.sZ``. +See https://developers.google.com/youtube/v3/docs/search/list for more information. YOUTUBE_VIDEO_PARTS depends on the fields you pass via YOUTUBE_VIDEO_FIELDS. See https://developers.google.com/youtube/v3/docs/videos/list#parameters for more information. YOUTUBE_CONN_ID is optional for public videos. It does only need to authenticate when there are private videos diff --git a/airflow/providers/amazon/aws/example_dags/example_s3.py b/airflow/providers/amazon/aws/example_dags/example_s3.py index ecd9d374cf688..83a6825d72fe0 100644 --- a/airflow/providers/amazon/aws/example_dags/example_s3.py +++ b/airflow/providers/amazon/aws/example_dags/example_s3.py @@ -62,12 +62,9 @@ # [START howto_sensor_s3_key_function_definition] def check_fn(files: List) -> bool: """ - Example of custom check: check if all files are bigger than 1kB + Example of custom check: check if all files are bigger than ``1kB`` :param files: List of S3 object attributes. - Format: [{ - 'Size': int - }] :return: true if the criteria is met :rtype: bool """ diff --git a/airflow/providers/arangodb/example_dags/example_arangodb.py b/airflow/providers/arangodb/example_dags/example_arangodb.py index f9da187cfb665..37a8250dfd16a 100644 --- a/airflow/providers/arangodb/example_dags/example_arangodb.py +++ b/airflow/providers/arangodb/example_dags/example_arangodb.py @@ -41,7 +41,7 @@ # [START howto_aql_sensor_template_file_arangodb] -sensor = AQLSensor( +sensor2 = AQLSensor( task_id="aql_sensor_template_file", query="search_judy.sql", timeout=60, @@ -65,7 +65,7 @@ # [START howto_aql_operator_template_file_arangodb] -operator = AQLOperator( +operator2 = AQLOperator( task_id='aql_operator_template_file', dag=dag, result_processor=lambda cursor: print([document["name"] for document in cursor]), diff --git a/airflow/providers/google/cloud/example_dags/example_automl_tables.py b/airflow/providers/google/cloud/example_dags/example_automl_tables.py index 9ba0314dae777..c13de99fa8512 100644 --- a/airflow/providers/google/cloud/example_dags/example_automl_tables.py +++ b/airflow/providers/google/cloud/example_dags/example_automl_tables.py @@ -204,14 +204,14 @@ def get_target_column_spec(columns_specs: List[Dict], column_name: str) -> str: catchup=False, user_defined_macros={"extract_object_id": extract_object_id}, ) as example_dag: - create_dataset_task = AutoMLCreateDatasetOperator( + create_dataset_task2 = AutoMLCreateDatasetOperator( task_id="create_dataset_task", dataset=DATASET, location=GCP_AUTOML_LOCATION, project_id=GCP_PROJECT_ID, ) - dataset_id = create_dataset_task.output['dataset_id'] + dataset_id = create_dataset_task2.output['dataset_id'] import_dataset_task = AutoMLImportDataOperator( task_id="import_dataset_task", diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py b/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py index be858c4018753..bf73959d4ff72 100644 --- a/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py +++ b/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py @@ -17,17 +17,15 @@ # under the License. """ -Example Airflow DAG that demonstrates interactions with Google Cloud Transfer. +Example Airflow DAG that demonstrates interactions with Google Cloud Transfer. This DAG relies on +the following OS environment variables - -This DAG relies on the following OS environment variables +Note that you need to provide a large enough set of data so that operations do not execute too quickly. +Otherwise, DAG will fail. * GCP_PROJECT_ID - Google Cloud Project to use for the Google Cloud Transfer Service. * GCP_DESCRIPTION - Description of transfer job * GCP_TRANSFER_SOURCE_AWS_BUCKET - Amazon Web Services Storage bucket from which files are copied. - .. warning:: - You need to provide a large enough set of data so that operations do not execute too quickly. - Otherwise, DAG will fail. * GCP_TRANSFER_SECOND_TARGET_BUCKET - Google Cloud Storage bucket to which files are copied * WAIT_FOR_OPERATION_POKE_INTERVAL - interval of what to check the status of the operation A smaller value than the default value accelerates the system test and ensures its correct execution with diff --git a/airflow/providers/google/cloud/example_dags/example_pubsub.py b/airflow/providers/google/cloud/example_dags/example_pubsub.py index 8e3dd1fe8f01e..05ae16bd68f35 100644 --- a/airflow/providers/google/cloud/example_dags/example_pubsub.py +++ b/airflow/providers/google/cloud/example_dags/example_pubsub.py @@ -56,7 +56,7 @@ catchup=False, ) as example_sensor_dag: # [START howto_operator_gcp_pubsub_create_topic] - create_topic = PubSubCreateTopicOperator( + create_topic1 = PubSubCreateTopicOperator( task_id="create_topic", topic=TOPIC_FOR_SENSOR_DAG, project_id=GCP_PROJECT_ID, fail_if_exists=False ) # [END howto_operator_gcp_pubsub_create_topic] @@ -105,7 +105,7 @@ ) # [END howto_operator_gcp_pubsub_delete_topic] - create_topic >> subscribe_task >> publish_task + create_topic1 >> subscribe_task >> publish_task pull_messages >> pull_messages_result >> unsubscribe_task >> delete_topic # Task dependencies created via `XComArgs`: @@ -120,7 +120,7 @@ catchup=False, ) as example_operator_dag: # [START howto_operator_gcp_pubsub_create_topic] - create_topic = PubSubCreateTopicOperator( + create_topic2 = PubSubCreateTopicOperator( task_id="create_topic", topic=TOPIC_FOR_OPERATOR_DAG, project_id=GCP_PROJECT_ID ) # [END howto_operator_gcp_pubsub_create_topic] @@ -170,7 +170,7 @@ # [END howto_operator_gcp_pubsub_delete_topic] ( - create_topic + create_topic2 >> subscribe_task >> publish_task >> pull_messages_operator diff --git a/airflow/providers/google/cloud/example_dags/example_vertex_ai.py b/airflow/providers/google/cloud/example_dags/example_vertex_ai.py index cded48ae9b4de..a421c31a7ee2c 100644 --- a/airflow/providers/google/cloud/example_dags/example_vertex_ai.py +++ b/airflow/providers/google/cloud/example_dags/example_vertex_ai.py @@ -26,15 +26,16 @@ This DAG relies on the following OS environment variables: * GCP_VERTEX_AI_BUCKET - Google Cloud Storage bucket where the model will be saved -after training process was finished. + after training process was finished. * CUSTOM_CONTAINER_URI - path to container with model. * PYTHON_PACKAGE_GSC_URI - path to test model in archive. * LOCAL_TRAINING_SCRIPT_PATH - path to local training script. * DATASET_ID - ID of dataset which will be used in training process. * MODEL_ID - ID of model which will be used in predict process. * MODEL_ARTIFACT_URI - The artifact_uri should be the path to a GCS directory containing saved model -artifacts. + artifacts. """ + import os from datetime import datetime from uuid import uuid4 diff --git a/airflow/providers/mongo/hooks/mongo.py b/airflow/providers/mongo/hooks/mongo.py index 96a5ec800302a..c022ec4135ebd 100644 --- a/airflow/providers/mongo/hooks/mongo.py +++ b/airflow/providers/mongo/hooks/mongo.py @@ -266,7 +266,7 @@ def replace_many( :param mongo_collection: The name of the collection to update. :param docs: The new documents. :param filter_docs: A list of queries that match the documents to replace. - Can be omitted; then the _id fields from docs will be used. + Can be omitted; then the _id fields from airflow.docs will be used. :param mongo_db: The name of the database to use. Can be omitted; then the database from the connection string is used. :param upsert: If ``True``, perform an insert if no documents diff --git a/dev/breeze/src/airflow_breeze/pre_commit_ids.py b/dev/breeze/src/airflow_breeze/pre_commit_ids.py index 40ff16a7a03bd..2f8366b01ffd2 100644 --- a/dev/breeze/src/airflow_breeze/pre_commit_ids.py +++ b/dev/breeze/src/airflow_breeze/pre_commit_ids.py @@ -59,8 +59,10 @@ 'check-setup-order', 'check-start-date-not-used-in-defaults', 'check-system-tests-present', + 'check-system-tests-tocs', 'check-xml', 'codespell', + 'create-missing-init-py-files-tests', 'debug-statements', 'detect-private-key', 'doctoc', diff --git a/docs/__init__.py b/docs/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/docs/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/docs/apache-airflow-providers-alibaba/index.rst b/docs/apache-airflow-providers-alibaba/index.rst index fca2d993f395d..d5a1292090ae1 100644 --- a/docs/apache-airflow-providers-alibaba/index.rst +++ b/docs/apache-airflow-providers-alibaba/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/alibaba/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/alibaba/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-amazon/index.rst b/docs/apache-airflow-providers-amazon/index.rst index 477b6f6a7573f..d1fd9c63006f8 100644 --- a/docs/apache-airflow-providers-amazon/index.rst +++ b/docs/apache-airflow-providers-amazon/index.rst @@ -36,6 +36,12 @@ Content Python API <_api/airflow/providers/amazon/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/amazon/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-apache-beam/index.rst b/docs/apache-airflow-providers-apache-beam/index.rst index f9a7159ccb3ff..dc26b8c708522 100644 --- a/docs/apache-airflow-providers-apache-beam/index.rst +++ b/docs/apache-airflow-providers-apache-beam/index.rst @@ -26,6 +26,17 @@ Content :caption: References Python API <_api/airflow/providers/apache/beam/index> + +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/beam/index> + +.. toctree:: + :maxdepth: 1 + :caption: Resources + PyPI Repository Example DAGs diff --git a/docs/apache-airflow-providers-apache-cassandra/index.rst b/docs/apache-airflow-providers-apache-cassandra/index.rst index 4f207a2e3f1d5..6fcf012613868 100644 --- a/docs/apache-airflow-providers-apache-cassandra/index.rst +++ b/docs/apache-airflow-providers-apache-cassandra/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/apache/cassandra/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/cassandra/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-apache-drill/index.rst b/docs/apache-airflow-providers-apache-drill/index.rst index d3d85f780fe75..bc8b7aa1113f2 100644 --- a/docs/apache-airflow-providers-apache-drill/index.rst +++ b/docs/apache-airflow-providers-apache-drill/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/apache/drill/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/drill/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-apache-druid/index.rst b/docs/apache-airflow-providers-apache-druid/index.rst index 4946cc1fd143a..d8558d0b49eac 100644 --- a/docs/apache-airflow-providers-apache-druid/index.rst +++ b/docs/apache-airflow-providers-apache-druid/index.rst @@ -32,6 +32,18 @@ Content :caption: References Python API <_api/airflow/providers/apache/druid/index> + +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/druid/index> + + +.. toctree:: + :maxdepth: 1 + :caption: Resources + PyPI Repository Installing from sources Example DAGs diff --git a/docs/apache-airflow-providers-apache-hive/index.rst b/docs/apache-airflow-providers-apache-hive/index.rst index 0ca5ac916b47e..de8124e80cc03 100644 --- a/docs/apache-airflow-providers-apache-hive/index.rst +++ b/docs/apache-airflow-providers-apache-hive/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/apache/hive/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/hive/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-apache-kylin/index.rst b/docs/apache-airflow-providers-apache-kylin/index.rst index 02fb13b255947..faf390c0b3fa9 100644 --- a/docs/apache-airflow-providers-apache-kylin/index.rst +++ b/docs/apache-airflow-providers-apache-kylin/index.rst @@ -28,6 +28,12 @@ Content Python API <_api/airflow/providers/apache/kylin/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/kylin/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-apache-livy/index.rst b/docs/apache-airflow-providers-apache-livy/index.rst index cd2f5058c61fc..5b27808aaa716 100644 --- a/docs/apache-airflow-providers-apache-livy/index.rst +++ b/docs/apache-airflow-providers-apache-livy/index.rst @@ -33,6 +33,12 @@ Content Python API <_api/airflow/providers/apache/livy/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/livy/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-apache-pig/index.rst b/docs/apache-airflow-providers-apache-pig/index.rst index 4fd1ffee28698..33dd0a1f095f2 100644 --- a/docs/apache-airflow-providers-apache-pig/index.rst +++ b/docs/apache-airflow-providers-apache-pig/index.rst @@ -33,6 +33,12 @@ Content Python API <_api/airflow/providers/apache/pig/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/pig/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-apache-spark/index.rst b/docs/apache-airflow-providers-apache-spark/index.rst index 03f1449cb3d94..7bfecdc428377 100644 --- a/docs/apache-airflow-providers-apache-spark/index.rst +++ b/docs/apache-airflow-providers-apache-spark/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/apache/spark/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/spark/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-cncf-kubernetes/index.rst b/docs/apache-airflow-providers-cncf-kubernetes/index.rst index bc5b6b7d169ff..4ead6580fbdf6 100644 --- a/docs/apache-airflow-providers-cncf-kubernetes/index.rst +++ b/docs/apache-airflow-providers-cncf-kubernetes/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/cncf/kubernetes/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/cncf/kubernetes/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-databricks/index.rst b/docs/apache-airflow-providers-databricks/index.rst index 1a6d32cab3df5..712eee960ba38 100644 --- a/docs/apache-airflow-providers-databricks/index.rst +++ b/docs/apache-airflow-providers-databricks/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/databricks/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/databricks/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-dbt-cloud/index.rst b/docs/apache-airflow-providers-dbt-cloud/index.rst index ecfa51fc0ac59..2a1a9cf199358 100644 --- a/docs/apache-airflow-providers-dbt-cloud/index.rst +++ b/docs/apache-airflow-providers-dbt-cloud/index.rst @@ -39,6 +39,12 @@ Content Python API <_api/airflow/providers/dbt/cloud/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/dbt/cloud/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-dingding/index.rst b/docs/apache-airflow-providers-dingding/index.rst index ba3be6088494b..4966b0a174073 100644 --- a/docs/apache-airflow-providers-dingding/index.rst +++ b/docs/apache-airflow-providers-dingding/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/dingding/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/dingding/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-docker/index.rst b/docs/apache-airflow-providers-docker/index.rst index 218968f18ef96..3fa5c2b9c0fc0 100644 --- a/docs/apache-airflow-providers-docker/index.rst +++ b/docs/apache-airflow-providers-docker/index.rst @@ -29,6 +29,12 @@ Content Connection types Python API <_api/airflow/providers/docker/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/docker/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-google/index.rst b/docs/apache-airflow-providers-google/index.rst index f7ea7fae3149f..c22fa4bf57fac 100644 --- a/docs/apache-airflow-providers-google/index.rst +++ b/docs/apache-airflow-providers-google/index.rst @@ -38,6 +38,12 @@ Content Python API <_api/airflow/providers/google/index> Configuration +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/google/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst b/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst index 96038ce8c191a..57e4d87ff8c03 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst @@ -42,7 +42,7 @@ Create dataset To create an empty dataset in a BigQuery database you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator`. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_dataset.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_dataset] @@ -58,7 +58,7 @@ To get the details of an existing dataset you can use This operator returns a `Dataset Resource `__. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_dataset.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_get_dataset] @@ -72,7 +72,7 @@ List tables in dataset To retrieve the list of tables in a given dataset use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryGetDatasetTablesOperator`. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_get_dataset_tables] @@ -89,7 +89,7 @@ To update a table in BigQuery you can use The update method replaces the entire Table resource, whereas the patch method only replaces fields that are provided in the submitted Table resource. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_update_table] @@ -106,7 +106,7 @@ To update a dataset in BigQuery you can use The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_dataset.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_update_dataset] @@ -120,7 +120,7 @@ Delete dataset To delete an existing dataset from a BigQuery database you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteDatasetOperator`. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_dataset.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_dataset] @@ -143,7 +143,7 @@ ways. You may either directly pass the schema fields in, or you may point the operator to a Google Cloud Storage object name. The object in Google Cloud Storage must be a JSON file with the schema fields in it. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_table] @@ -151,7 +151,7 @@ Storage must be a JSON file with the schema fields in it. You can use this operator to create a view on top of an existing table. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_view] @@ -160,7 +160,7 @@ You can use this operator to create a view on top of an existing table. You can also use this operator to create a materialized view that periodically cache results of a query for increased performance and efficiency. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_materialized_view] @@ -179,7 +179,7 @@ Similarly to :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyTableOperator` you can directly pass the schema fields in. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_operations.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_external_table] @@ -187,7 +187,7 @@ you can directly pass the schema fields in. Or you may point the operator to a Google Cloud Storage object name where the schema is stored. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_table_schema_json] @@ -211,7 +211,7 @@ returned list will be equal to the number of rows fetched. Each element in the list will again be a list where elements would represent the column values for that row. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_get_data] @@ -228,7 +228,7 @@ To upsert a table you can use This operator either updates the existing table or creates a new, empty table in the given dataset. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_upsert_table] @@ -245,7 +245,7 @@ To update the schema of a table you can use This operator updates the schema field values supplied, while leaving the rest unchanged. This is useful for instance to set new field descriptions on an existing table schema. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_update_table_schema] @@ -259,7 +259,7 @@ Delete table To delete an existing table you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteTableOperator`. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_table] @@ -267,7 +267,7 @@ To delete an existing table you can use You can also use this operator to delete a view. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_view] @@ -275,7 +275,7 @@ You can also use this operator to delete a view. You can also use this operator to delete a materialized view. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_materialized_view] @@ -288,7 +288,7 @@ Execute BigQuery jobs Let's say you would like to execute the following query. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 0 :start-after: [START howto_operator_bigquery_query] @@ -298,7 +298,7 @@ To execute the SQL query in a specific BigQuery database you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryInsertJobOperator` with proper query job configuration that can be Jinja templated. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_insert_job] @@ -310,7 +310,7 @@ For more information on types of BigQuery job please check If you want to include some files in your configuration you can use ``include`` clause of Jinja template language as follow: -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_select_job] @@ -339,7 +339,7 @@ This operator expects a sql query that will return a single row. Each value on that first row is evaluated using python ``bool`` casting. If any of the values return ``False`` the check is failed and errors out. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_check] @@ -357,7 +357,7 @@ This operator expects a sql query that will return a single row. Each value on that first row is evaluated against ``pass_value`` which can be either a string or numeric value. If numeric, you can also specify ``tolerance``. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_value_check] @@ -372,7 +372,7 @@ To check that the values of metrics given as SQL expressions are within a certai tolerance of the ones from ``days_back`` before you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryIntervalCheckOperator`. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_interval_check] @@ -390,7 +390,7 @@ use the ``{{ ds_nodash }}`` macro as the table name suffix. :class:`~airflow.providers.google.cloud.sensors.bigquery.BigQueryTableExistenceSensor`. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_sensors.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_bigquery_table] @@ -402,7 +402,7 @@ Check that a Table Partition exists To check that a table exists and has a partition you can use. :class:`~airflow.providers.google.cloud.sensors.bigquery.BigQueryTablePartitionExistenceSensor`. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_sensors.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_bigquery_table_partition] diff --git a/docs/apache-airflow-providers-google/operators/cloud/datastore.rst b/docs/apache-airflow-providers-google/operators/cloud/datastore.rst index 4a8e623d6ee14..05da36a3f83a2 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/datastore.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/datastore.rst @@ -38,7 +38,7 @@ Export Entities To export entities from Google Cloud Datastore to Cloud Storage use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreExportEntitiesOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_export_import.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_export_import.py :language: python :dedent: 4 :start-after: [START how_to_export_task] @@ -52,7 +52,7 @@ Import Entities To import entities from Cloud Storage to Google Cloud Datastore use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreImportEntitiesOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_export_import.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_export_import.py :language: python :dedent: 4 :start-after: [START how_to_import_task] @@ -66,7 +66,7 @@ Allocate Ids To allocate IDs for incomplete keys use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreAllocateIdsOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_commit.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START how_to_allocate_ids] @@ -74,7 +74,7 @@ To allocate IDs for incomplete keys use An example of a partial keys required by the operator: -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_commit.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 0 :start-after: [START how_to_keys_def] @@ -88,7 +88,7 @@ Begin transaction To begin a new transaction use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreBeginTransactionOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_commit.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START how_to_begin_transaction] @@ -96,7 +96,7 @@ To begin a new transaction use An example of a transaction options required by the operator: -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_commit.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 0 :start-after: [START how_to_transaction_def] @@ -110,7 +110,7 @@ Commit transaction To commit a transaction, optionally creating, deleting or modifying some entities use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreCommitOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_commit.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START how_to_commit_task] @@ -118,7 +118,7 @@ use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreCo An example of a commit information required by the operator: -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_commit.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 0 :start-after: [START how_to_commit_def] @@ -132,7 +132,7 @@ Run query To run a query for entities use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreRunQueryOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_query.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_query.py :language: python :dedent: 4 :start-after: [START how_to_run_query] @@ -140,7 +140,7 @@ To run a query for entities use An example of a query required by the operator: -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_query.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_query.py :language: python :dedent: 0 :start-after: [START how_to_query_def] @@ -154,7 +154,7 @@ Roll back transaction To roll back a transaction use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreRollbackOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_rollback.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_rollback.py :language: python :dedent: 4 :start-after: [START how_to_rollback_transaction] @@ -168,7 +168,7 @@ Get operation state To get the current state of a long-running operation use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreGetOperationOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_export_import.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_export_import.py :language: python :dedent: 4 :start-after: [START get_operation_state] @@ -182,7 +182,7 @@ Delete operation To delete an operation use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreDeleteOperationOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_export_import.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_export_import.py :language: python :dedent: 4 :start-after: [START delete_operation] diff --git a/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst b/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst index 39da66207a965..23ed67d8aa9b1 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst @@ -43,7 +43,7 @@ Create GKE cluster Here is an example of a cluster definition: -.. exampleinclude:: /../../tests/system/providers/google/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :start-after: [START howto_operator_gcp_gke_create_cluster_definition] :end-before: [END howto_operator_gcp_gke_create_cluster_definition] @@ -53,7 +53,7 @@ A dict object like this, or a definition, is required when creating a cluster with :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKECreateClusterOperator`. -.. exampleinclude:: /../../tests/system/providers/google/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_create_cluster] @@ -68,7 +68,7 @@ To delete a cluster, use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKEDeleteClusterOperator`. This would also delete all the nodes allocated to the cluster. -.. exampleinclude:: /../../tests/system/providers/google/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_delete_cluster] @@ -117,7 +117,7 @@ is the path ``/airflow/xcom``. To provide values to the XCom, ensure your Pod wr ``return.json`` in the sidecar. The contents of this can then be used downstream in your DAG. Here is an example of it being used: -.. exampleinclude:: /../../tests/system/providers/google/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_start_pod_xcom] @@ -125,7 +125,7 @@ Here is an example of it being used: And then use it in other operators: -.. exampleinclude:: /../../tests/system/providers/google/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_xcom_result] diff --git a/docs/apache-airflow-providers-http/index.rst b/docs/apache-airflow-providers-http/index.rst index 2b7ff3930dd0f..e9bf6af3116b7 100644 --- a/docs/apache-airflow-providers-http/index.rst +++ b/docs/apache-airflow-providers-http/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/http/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/http/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-jdbc/index.rst b/docs/apache-airflow-providers-jdbc/index.rst index c46fc1e43a880..9cc096df79492 100644 --- a/docs/apache-airflow-providers-jdbc/index.rst +++ b/docs/apache-airflow-providers-jdbc/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/jdbc/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/jdbc/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-jenkins/index.rst b/docs/apache-airflow-providers-jenkins/index.rst index 9a23555000ed5..92107cd33d587 100644 --- a/docs/apache-airflow-providers-jenkins/index.rst +++ b/docs/apache-airflow-providers-jenkins/index.rst @@ -33,6 +33,12 @@ Content Python API <_api/airflow/providers/jenkins/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/jenkins/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-microsoft-azure/index.rst b/docs/apache-airflow-providers-microsoft-azure/index.rst index ddbc51d490c4a..681ee497f2adc 100644 --- a/docs/apache-airflow-providers-microsoft-azure/index.rst +++ b/docs/apache-airflow-providers-microsoft-azure/index.rst @@ -37,6 +37,12 @@ Content Python API <_api/airflow/providers/microsoft/azure/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/microsoft/azure/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-microsoft-mssql/index.rst b/docs/apache-airflow-providers-microsoft-mssql/index.rst index ac42801752ede..bb3df09b2cf85 100644 --- a/docs/apache-airflow-providers-microsoft-mssql/index.rst +++ b/docs/apache-airflow-providers-microsoft-mssql/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/microsoft/mssql/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/microsoft/mssql/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-microsoft-winrm/index.rst b/docs/apache-airflow-providers-microsoft-winrm/index.rst index 5f3d5d148a16e..1c15797fa605a 100644 --- a/docs/apache-airflow-providers-microsoft-winrm/index.rst +++ b/docs/apache-airflow-providers-microsoft-winrm/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/microsoft/winrm/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/microsoft/winrm/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-mysql/index.rst b/docs/apache-airflow-providers-mysql/index.rst index 6fb518d7e9a5f..90cfe7b235ddb 100644 --- a/docs/apache-airflow-providers-mysql/index.rst +++ b/docs/apache-airflow-providers-mysql/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/mysql/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/mysql/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-opsgenie/index.rst b/docs/apache-airflow-providers-opsgenie/index.rst index 3197d6d77fe56..776d096bdda58 100644 --- a/docs/apache-airflow-providers-opsgenie/index.rst +++ b/docs/apache-airflow-providers-opsgenie/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/opsgenie/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/opsgenie/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-papermill/index.rst b/docs/apache-airflow-providers-papermill/index.rst index 7effd4b35ca76..424ecb67398cd 100644 --- a/docs/apache-airflow-providers-papermill/index.rst +++ b/docs/apache-airflow-providers-papermill/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/papermill/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/papermill/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-presto/index.rst b/docs/apache-airflow-providers-presto/index.rst index b3c4b6f09f768..6c066e3602ade 100644 --- a/docs/apache-airflow-providers-presto/index.rst +++ b/docs/apache-airflow-providers-presto/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/presto/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/presto/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-qubole/index.rst b/docs/apache-airflow-providers-qubole/index.rst index 1e5b673d2cc80..aa687242250c3 100644 --- a/docs/apache-airflow-providers-qubole/index.rst +++ b/docs/apache-airflow-providers-qubole/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/qubole/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/qubole/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-salesforce/index.rst b/docs/apache-airflow-providers-salesforce/index.rst index 2da4b06569dee..37d7956ea4cf5 100644 --- a/docs/apache-airflow-providers-salesforce/index.rst +++ b/docs/apache-airflow-providers-salesforce/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/salesforce/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/salesforce/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-singularity/index.rst b/docs/apache-airflow-providers-singularity/index.rst index 32acfebd72098..75b63f96eb4de 100644 --- a/docs/apache-airflow-providers-singularity/index.rst +++ b/docs/apache-airflow-providers-singularity/index.rst @@ -28,6 +28,12 @@ Content Python API <_api/airflow/providers/singularity/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/singularity/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-snowflake/index.rst b/docs/apache-airflow-providers-snowflake/index.rst index 5d0a6b317cc14..2ff510af123ff 100644 --- a/docs/apache-airflow-providers-snowflake/index.rst +++ b/docs/apache-airflow-providers-snowflake/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/snowflake/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/snowflake/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-tableau/index.rst b/docs/apache-airflow-providers-tableau/index.rst index 41abe988f1b44..27d08fe67a153 100644 --- a/docs/apache-airflow-providers-tableau/index.rst +++ b/docs/apache-airflow-providers-tableau/index.rst @@ -30,6 +30,12 @@ Content Operators Python API <_api/airflow/providers/tableau/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/tableau/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-telegram/index.rst b/docs/apache-airflow-providers-telegram/index.rst index 782de1e712d21..f3a9a3fd87763 100644 --- a/docs/apache-airflow-providers-telegram/index.rst +++ b/docs/apache-airflow-providers-telegram/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/telegram/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/telegram/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-trino/index.rst b/docs/apache-airflow-providers-trino/index.rst index 239ec03cd8002..c56a9b85897db 100644 --- a/docs/apache-airflow-providers-trino/index.rst +++ b/docs/apache-airflow-providers-trino/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/trino/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/trino/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-yandex/index.rst b/docs/apache-airflow-providers-yandex/index.rst index 7ad2303510141..7699d53bd2c04 100644 --- a/docs/apache-airflow-providers-yandex/index.rst +++ b/docs/apache-airflow-providers-yandex/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/yandex/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/yandex/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow/python-api-ref.rst b/docs/apache-airflow/python-api-ref.rst index e8c4dff3c499c..28a4545859db6 100644 --- a/docs/apache-airflow/python-api-ref.rst +++ b/docs/apache-airflow/python-api-ref.rst @@ -154,3 +154,12 @@ schedule DAG runs in ways not possible with built-in schedule expressions. :maxdepth: 1 _api/airflow/timetables/index + +Example DAGs +------------ + +.. toctree:: + :includehidden: + :maxdepth: 1 + + _api/airflow/example_dags/index diff --git a/docs/build_docs.py b/docs/build_docs.py index d7edad1cad97e..9f4217bd106c4 100755 --- a/docs/build_docs.py +++ b/docs/build_docs.py @@ -15,17 +15,19 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + + import argparse import multiprocessing import os import sys from collections import defaultdict -from typing import Dict, List, NamedTuple, Optional, Tuple +from itertools import filterfalse, tee +from typing import Callable, Dict, Iterable, List, NamedTuple, Optional, Tuple, TypeVar from rich.console import Console from tabulate import tabulate -from airflow.utils.helpers import partition from docs.exts.docs_build import dev_index_generator, lint_checks from docs.exts.docs_build.code_utils import CONSOLE_WIDTH, PROVIDER_INIT_FILE from docs.exts.docs_build.docs_builder import DOCS_DIR, AirflowDocsBuilder, get_available_packages @@ -62,6 +64,14 @@ console = Console(force_terminal=True, color_system="standard", width=CONSOLE_WIDTH) +T = TypeVar('T') + + +def partition(pred: Callable[[T], bool], iterable: Iterable[T]) -> Tuple[Iterable[T], Iterable[T]]: + """Use a predicate to partition entries into false entries and true entries""" + iter_1, iter_2 = tee(iterable) + return filterfalse(pred, iter_1), filter(pred, iter_2) + def _promote_new_flags(): console.print() diff --git a/docs/conf.py b/docs/conf.py index b1742f0c00202..47334381fc8e4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -35,20 +35,17 @@ import os import sys from collections import defaultdict +from pathlib import Path from typing import Any, Dict, List, Optional, Tuple import yaml -try: - from yaml import CSafeLoader as SafeLoader -except ImportError: - from yaml import SafeLoader # type: ignore[misc] - import airflow from airflow.configuration import AirflowConfigParser, default_config_yaml -from docs.exts.docs_build.third_party_inventories import THIRD_PARTY_INDEXES -sys.path.append(os.path.join(os.path.dirname(__file__), 'exts')) +sys.path.append(str(Path(__file__).parent / 'exts')) + +from docs_build.third_party_inventories import THIRD_PARTY_INDEXES # noqa: E402 CONF_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__))) INVENTORY_CACHE_DIR = os.path.join(CONF_DIR, '_inventory_cache') @@ -61,6 +58,7 @@ if PACKAGE_NAME == 'apache-airflow': PACKAGE_DIR = os.path.join(ROOT_DIR, 'airflow') PACKAGE_VERSION = airflow.__version__ + SYSTEM_TESTS_DIR = None elif PACKAGE_NAME.startswith('apache-airflow-providers-'): from provider_yaml_utils import load_package_data @@ -75,23 +73,27 @@ raise Exception(f"Could not find provider.yaml file for package: {PACKAGE_NAME}") PACKAGE_DIR = CURRENT_PROVIDER['package-dir'] PACKAGE_VERSION = CURRENT_PROVIDER['versions'][0] + SYSTEM_TESTS_DIR = CURRENT_PROVIDER['system-tests-dir'] elif PACKAGE_NAME == 'apache-airflow-providers': from provider_yaml_utils import load_package_data PACKAGE_DIR = os.path.join(ROOT_DIR, 'airflow', 'providers') PACKAGE_VERSION = 'devel' ALL_PROVIDER_YAMLS = load_package_data() + SYSTEM_TESTS_DIR = None elif PACKAGE_NAME == 'helm-chart': PACKAGE_DIR = os.path.join(ROOT_DIR, 'chart') CHART_YAML_FILE = os.path.join(PACKAGE_DIR, 'Chart.yaml') with open(CHART_YAML_FILE) as chart_file: - chart_yaml_contents = yaml.load(chart_file, SafeLoader) + chart_yaml_contents = yaml.safe_load(chart_file) PACKAGE_VERSION = chart_yaml_contents['version'] + SYSTEM_TESTS_DIR = None else: PACKAGE_DIR = None PACKAGE_VERSION = 'devel' + SYSTEM_TESTS_DIR = None # Adds to environment variables for easy access from other plugins like airflow_intersphinx. os.environ['AIRFLOW_PACKAGE_NAME'] = PACKAGE_NAME if PACKAGE_DIR: @@ -220,6 +222,7 @@ def _get_rst_filepath_from_path(filepath: str): exclude_patterns.append(f"_api/airflow/{name.rpartition('.')[0]}") browsable_packages = [ "hooks", + "example_dags", "executors", "models", "operators", @@ -318,9 +321,12 @@ def _get_rst_filepath_from_path(filepath: str): html_show_copyright = False # Theme configuration -html_theme_options: Dict[str, Any] = { - 'hide_website_buttons': True, -} +if PACKAGE_NAME.startswith('apache-airflow-providers-'): + # Only hide hidden items for providers. For Chart and Airflow we are using the approach where + # TOC is hidden but sidebar still shows the content (but we are not doing it for providers). + html_theme_options: Dict[str, Any] = {'hide_website_buttons': True, 'sidebar_includehidden': False} +else: + html_theme_options = {'hide_website_buttons': True, 'sidebar_includehidden': True} if FOR_PRODUCTION: html_theme_options['navbar_links'] = [ {'href': '/community/', 'text': 'Community'}, @@ -415,7 +421,7 @@ def _load_config(): return {} with open(file_path) as f: - return yaml.load(f, SafeLoader) + return yaml.safe_load(f) config = _load_config() jinja_contexts = { @@ -668,6 +674,9 @@ def _get_params(root_schema: dict, prefix: str = "", default_section: str = "") PACKAGE_DIR, ] +if SYSTEM_TESTS_DIR and os.path.exists(SYSTEM_TESTS_DIR): + autoapi_dirs.append(SYSTEM_TESTS_DIR) + # A directory that has user-defined templates to override our default templates. if PACKAGE_NAME == 'apache-airflow': autoapi_template_dir = 'autoapi_templates' @@ -675,11 +684,13 @@ def _get_params(root_schema: dict, prefix: str = "", default_section: str = "") # A list of patterns to ignore when finding files autoapi_ignore = [ '*/airflow/_vendor/*', - '*/example_dags/*', '*/_internal*', '*/node_modules/*', '*/migrations/*', '*/contrib/*', + '**/example_sla_dag.py', + '**/example_taskflow_api_etl_docker_virtualenv.py', + '**/example_dag_decorator.py', ] if PACKAGE_NAME == 'apache-airflow': autoapi_ignore.append('*/airflow/providers/*') diff --git a/docs/exts/docs_build/docs_builder.py b/docs/exts/docs_build/docs_builder.py index cc65f3f1700fc..b8fcf6fd64ecf 100644 --- a/docs/exts/docs_build/docs_builder.py +++ b/docs/exts/docs_build/docs_builder.py @@ -24,7 +24,7 @@ from rich.console import Console -from docs.exts.docs_build.code_utils import ( +from .code_utils import ( AIRFLOW_SITE_DIR, ALL_PROVIDER_YAMLS, CONSOLE_WIDTH, @@ -32,9 +32,9 @@ PROCESS_TIMEOUT, pretty_format_path, ) -from docs.exts.docs_build.errors import DocBuildError, parse_sphinx_warnings -from docs.exts.docs_build.helm_chart_utils import chart_version -from docs.exts.docs_build.spelling_checks import SpellingError, parse_spelling_warnings +from .errors import DocBuildError, parse_sphinx_warnings +from .helm_chart_utils import chart_version +from .spelling_checks import SpellingError, parse_spelling_warnings console = Console(force_terminal=True, color_system="standard", width=CONSOLE_WIDTH) diff --git a/docs/exts/provider_yaml_utils.py b/docs/exts/provider_yaml_utils.py index a6d1ee297d560..085cd480c722f 100644 --- a/docs/exts/provider_yaml_utils.py +++ b/docs/exts/provider_yaml_utils.py @@ -18,19 +18,14 @@ import json import os from glob import glob +from pathlib import Path from typing import Any, Dict, List import jsonschema import yaml -try: - from yaml import CSafeLoader as SafeLoader -except ImportError: - from yaml import SafeLoader # type: ignore[misc] - - -ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) -PROVIDER_DATA_SCHEMA_PATH = os.path.join(ROOT_DIR, "airflow", "provider.yaml.schema.json") +ROOT_DIR = Path(__file__).parents[2].resolve() +PROVIDER_DATA_SCHEMA_PATH = ROOT_DIR / "airflow" / "provider.yaml.schema.json" def _load_schema() -> Dict[str, Any]: @@ -40,8 +35,17 @@ def _load_schema() -> Dict[str, Any]: def _filepath_to_module(filepath: str): - filepath = os.path.relpath(os.path.abspath(filepath), ROOT_DIR) - return filepath.replace("/", ".") + return str(Path(filepath).relative_to(ROOT_DIR)).replace("/", ".") + + +def _filepath_to_system_tests(filepath: str): + return str( + ROOT_DIR + / "tests" + / "system" + / "providers" + / Path(filepath).relative_to(ROOT_DIR / "airflow" / "providers") + ) def get_provider_yaml_paths(): @@ -59,12 +63,14 @@ def load_package_data() -> List[Dict[str, Any]]: result = [] for provider_yaml_path in get_provider_yaml_paths(): with open(provider_yaml_path) as yaml_file: - provider = yaml.load(yaml_file, SafeLoader) + provider = yaml.safe_load(yaml_file) try: jsonschema.validate(provider, schema=schema) except jsonschema.ValidationError: raise Exception(f"Unable to parse: {provider_yaml_path}.") - provider['python-module'] = _filepath_to_module(os.path.dirname(provider_yaml_path)) - provider['package-dir'] = os.path.dirname(provider_yaml_path) + provider_yaml_dir = os.path.dirname(provider_yaml_path) + provider['python-module'] = _filepath_to_module(provider_yaml_dir) + provider['package-dir'] = provider_yaml_dir + provider['system-tests-dir'] = _filepath_to_system_tests(provider_yaml_dir) result.append(provider) return result diff --git a/docs/publish_docs.py b/docs/publish_docs.py index 60c89d10e424c..7451033f16c20 100755 --- a/docs/publish_docs.py +++ b/docs/publish_docs.py @@ -17,6 +17,7 @@ # specific language governing permissions and limitations # under the License. + import argparse import os @@ -26,6 +27,7 @@ AIRFLOW_SITE_DIR = os.environ.get('AIRFLOW_SITE_DIRECTORY') + if __name__ != "__main__": raise SystemExit( "This file is intended to be executed as an executable program. You cannot use it as a module." diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index b9f2aad173a90..69130e7d058d6 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -193,6 +193,7 @@ Hou Http HttpError HttpRequest +IGM IdP ImageAnnotatorClient Imap @@ -281,6 +282,7 @@ OSS Oauth Oauthlib Okta +OnFailure Oozie Opsgenie Optimise @@ -794,6 +796,7 @@ evals eventlet evo exasol +executables execvp exitcode explicit @@ -819,12 +822,14 @@ filehandle fileloc filelocs filepath +fileshare filesize filesystem filesystems filetype finalizers findall +firestore firstname fluentd fmt @@ -873,6 +878,7 @@ greenlet groupId grpc gz +gzipped hadoop hadoopcmd hardcoded @@ -1184,6 +1190,7 @@ preloading prepend prepended preprocess +preprocessing presign presigned prestocmd @@ -1339,6 +1346,7 @@ sourceRepository sourceUploadUrl sparkApplication sparkcmd +sparkr sparksql spegno spotahome @@ -1469,6 +1477,7 @@ tsql tsv ttl tunables +twitterHandle txt typeahead tz diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index c6b12db9fa491..96155ac41a84c 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -1 +1 @@ -ed70b784364f0b604176ea142305906b +2f24dd5bade0ac4b635ef6ada358590b diff --git a/images/breeze/output-static-checks.svg b/images/breeze/output-static-checks.svg index edbc2fee71d47..1e8d54ca8ec49 100644 --- a/images/breeze/output-static-checks.svg +++ b/images/breeze/output-static-checks.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: static-checks + Command: static-checks - + - - -Usage: breeze static-checks [OPTIONS] [PRECOMMIT_ARGS]... - -Run static checks. - -╭─ Pre-commit flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---type-tType(s) of the static checks to run (multiple can be added).                             -(all | black | blacken-docs | check-airflow-2-1-compatibility |                          -check-airflow-config-yaml-consistent | check-airflow-providers-have-extras |             -check-apache-license-rat | check-base-operator-usage | check-boring-cyborg-configuration -| check-breeze-top-dependencies-limited | check-builtin-literals |                       -check-changelog-has-no-duplicates | check-daysago-import-from-utils |                    -check-docstring-param-types | check-executables-have-shebangs |                          -check-extra-packages-references | check-extras-order | check-for-inclusive-language |    -check-hooks-apply | check-incorrect-use-of-LoggingMixin |                                -check-integrations-are-consistent | check-merge-conflict | check-newsfragments-are-valid -| check-no-providers-in-core-examples | check-no-relative-imports |                      -check-persist-credentials-disabled-in-github-workflows |                                 -check-pre-commit-information-consistent | check-provide-create-sessions-imports |        -check-provider-yaml-valid | check-providers-init-file-missing |                          -check-providers-subpackages-init-file-exist | check-pydevd-left-in-code |                -check-revision-heads-map | check-safe-filter-usage-in-html | check-setup-order |         -check-start-date-not-used-in-defaults | check-system-tests-present | check-xml |         -codespell | debug-statements | detect-private-key | doctoc | end-of-file-fixer |         -fix-encoding-pragma | flynt | forbid-tabs | identity | insert-license | isort |          -lint-chart-schema | lint-css | lint-dockerfile | lint-helm-chart | lint-javascript |     -lint-json-schema | lint-markdown | lint-openapi | mixed-line-ending | pretty-format-json -| pydocstyle | python-no-log-warn | pyupgrade | rst-backticks | run-flake8 | run-mypy |  -run-shellcheck | static-check-autoflake | trailing-whitespace | update-breeze-file |     -update-breeze-readme-config-hash | update-extras | update-in-the-wild-to-be-sorted |     -update-inlined-dockerfile-scripts | update-local-yml-file | update-migration-references  -| update-providers-dependencies | update-setup-cfg-file |                                -update-spelling-wordlist-to-be-sorted | update-supported-versions |                      -update-vendored-in-k8s-json-schema | update-version | yamllint | yesqa)                  ---file-fList of files to run the checks on.(PATH) ---all-files-aRun checks on all files. ---show-diff-on-failure-sShow diff for files modified by the checks. ---last-commit-cRun checks for all files in last commit. Mutually exclusive with --commit-ref. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---commit-ref-rRun checks for this commit reference only (can be any git commit-ish reference). Mutually   -exclusive with --last-commit.                                                               -(TEXT)                                                                                      ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze static-checks [OPTIONS] [PRECOMMIT_ARGS]... + +Run static checks. + +╭─ Pre-commit flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--type-tType(s) of the static checks to run (multiple can be added).                             +(all | black | blacken-docs | check-airflow-2-1-compatibility |                          +check-airflow-config-yaml-consistent | check-airflow-providers-have-extras |             +check-apache-license-rat | check-base-operator-usage | check-boring-cyborg-configuration +| check-breeze-top-dependencies-limited | check-builtin-literals |                       +check-changelog-has-no-duplicates | check-daysago-import-from-utils |                    +check-docstring-param-types | check-executables-have-shebangs |                          +check-extra-packages-references | check-extras-order | check-for-inclusive-language |    +check-hooks-apply | check-incorrect-use-of-LoggingMixin |                                +check-integrations-are-consistent | check-merge-conflict | check-newsfragments-are-valid +| check-no-providers-in-core-examples | check-no-relative-imports |                      +check-persist-credentials-disabled-in-github-workflows |                                 +check-pre-commit-information-consistent | check-provide-create-sessions-imports |        +check-provider-yaml-valid | check-providers-init-file-missing |                          +check-providers-subpackages-init-file-exist | check-pydevd-left-in-code |                +check-revision-heads-map | check-safe-filter-usage-in-html | check-setup-order |         +check-start-date-not-used-in-defaults | check-system-tests-present |                     +check-system-tests-tocs | check-xml | codespell | create-missing-init-py-files-tests |   +debug-statements | detect-private-key | doctoc | end-of-file-fixer | fix-encoding-pragma +| flynt | forbid-tabs | identity | insert-license | isort | lint-chart-schema | lint-css +| lint-dockerfile | lint-helm-chart | lint-javascript | lint-json-schema | lint-markdown +| lint-openapi | mixed-line-ending | pretty-format-json | pydocstyle |                   +python-no-log-warn | pyupgrade | rst-backticks | run-flake8 | run-mypy | run-shellcheck  +| static-check-autoflake | trailing-whitespace | update-breeze-file |                    +update-breeze-readme-config-hash | update-extras | update-in-the-wild-to-be-sorted |     +update-inlined-dockerfile-scripts | update-local-yml-file | update-migration-references  +| update-providers-dependencies | update-setup-cfg-file |                                +update-spelling-wordlist-to-be-sorted | update-supported-versions |                      +update-vendored-in-k8s-json-schema | update-version | yamllint | yesqa)                  +--file-fList of files to run the checks on.(PATH) +--all-files-aRun checks on all files. +--show-diff-on-failure-sShow diff for files modified by the checks. +--last-commit-cRun checks for all files in last commit. Mutually exclusive with --commit-ref. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--commit-ref-rRun checks for this commit reference only (can be any git commit-ish reference). Mutually   +exclusive with --last-commit.                                                               +(TEXT)                                                                                      +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/scripts/ci/pre_commit/pre_commit_check_init_in_tests.py b/scripts/ci/pre_commit/pre_commit_check_init_in_tests.py new file mode 100755 index 0000000000000..947f284e1661d --- /dev/null +++ b/scripts/ci/pre_commit/pre_commit_check_init_in_tests.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import os +import pathlib +import sys +from pathlib import Path +from typing import List + +from rich.console import Console + +if __name__ not in ("__main__", "__mp_main__"): + raise SystemExit( + "This file is intended to be executed as an executable program. You cannot use it as a module." + f"To execute this script, run ./{__file__} [FILE] ..." + ) + +ROOT_DIR = pathlib.Path(__file__).resolve().parents[3] + + +console = Console(color_system="standard", width=200) + +errors: List[str] = [] + +added = False + +if __name__ == '__main__': + for dir, sub_dirs, files in os.walk(str(ROOT_DIR / "tests")): + for sub_dir in sub_dirs: + dir_to_check = dir + os.sep + sub_dir + init_py_path = Path(dir_to_check) / "__init__.py" + if not init_py_path.exists() and "/test_logs/" not in str(init_py_path): + init_py_path.touch() + console.print(f"[yellow] Created {init_py_path}[/]") + added = True + sys.exit(1 if added else 0) diff --git a/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py b/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py index 6097ac22512b5..28d2bbc7c9e5b 100755 --- a/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py +++ b/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py @@ -59,7 +59,7 @@ def get_extras_from_setup() -> Set[str]: def get_extras_from_docs() -> Set[str]: """ - Returns a list of extras from docs. + Returns a list of extras from airflow.docs. """ docs_content = get_file_content(DOCS_FILE) extras_section_regex = re.compile( @@ -86,7 +86,7 @@ def get_preinstalled_providers_from_docs() -> List[str]: def get_deprecated_extras_from_docs() -> Dict[str, str]: """ - Returns dict of deprecated extras from docs (alias -> target extra) + Returns dict of deprecated extras from airflow.docs (alias -> target extra) """ deprecated_extras = {} docs_content = get_file_content(DOCS_FILE) diff --git a/scripts/ci/pre_commit/pre_commit_check_system_tests_hidden_in_index.py b/scripts/ci/pre_commit/pre_commit_check_system_tests_hidden_in_index.py new file mode 100755 index 0000000000000..72f8b4fe78cc0 --- /dev/null +++ b/scripts/ci/pre_commit/pre_commit_check_system_tests_hidden_in_index.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import sys +from pathlib import Path + +from rich.console import Console + +if __name__ not in ("__main__", "__mp_main__"): + raise SystemExit( + "This file is intended to be executed as an executable program. You cannot use it as a module." + f"To run this script, run the ./{__file__} command [FILE] ..." + ) + + +console = Console(color_system="standard", width=200) + +AIRFLOW_SOURCES_ROOT = Path(__file__).parents[3].resolve() +DOCS_ROOT = AIRFLOW_SOURCES_ROOT / "docs" + +PREFIX = "apache-airflow-providers-" + + +errors = [] + + +def check_system_test_entry_hidden(provider_index: Path): + console.print(f"[bright_blue]Checking {provider_index}") + provider_folder = provider_index.parent.name + if not provider_folder.startswith(PREFIX): + console.print(f"[red]Bad provider index passed: {provider_index}") + errors.append(provider_index) + provider_path = provider_folder[len(PREFIX) :].replace("-", "/") + expected_text = f""" +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/{provider_path}/index> +""" + index_text = provider_index.read_text() + system_tests_path = AIRFLOW_SOURCES_ROOT / "tests" / "system" / "providers" / provider_path + if system_tests_path.exists(): + if expected_text not in index_text: + console.print(f"[red]The {provider_index} does not contain System Tests TOC.\n") + console.print(f"[yellow]Make sure to add those lines to {provider_index}:\n") + console.print(expected_text, markup=False) + errors.append(provider_index) + else: + console.print(f"[green]All ok. The {provider_index} contains hidden index.\n") + else: + console.print(f"[yellow]All ok. The {provider_index} does not contain system tests.\n") + + +if __name__ == '__main__': + for file in sys.argv[1:]: + check_system_test_entry_hidden(Path(file)) + sys.exit(0 if len(errors) == 0 else 1) diff --git a/scripts/in_container/run_docs_build.sh b/scripts/in_container/run_docs_build.sh index b96be023048c9..85e7c85dafae3 100755 --- a/scripts/in_container/run_docs_build.sh +++ b/scripts/in_container/run_docs_build.sh @@ -18,7 +18,8 @@ # shellcheck source=scripts/in_container/_in_container_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh" -sudo -E "${AIRFLOW_SOURCES}/docs/build_docs.py" "${@}" +cd "${AIRFLOW_SOURCES}" || exit 1 +python -m docs.build_docs "${@}" if [[ ( ${CI:="false"} == "true" || ${CI} == "True" ) && -d "${AIRFLOW_SOURCES}/docs/_build/docs/" ]]; then diff --git a/setup.cfg b/setup.cfg index 8d996337ebfb2..1512a6201c46e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -229,7 +229,7 @@ no_implicit_optional = False line_length=110 combine_as_imports = true default_section = THIRDPARTY -known_first_party=airflow,airflow_breeze,tests +known_first_party=airflow,airflow_breeze,tests,docs # Need to be consistent with the exclude config defined in pre-commit-config.yaml skip=build,.tox,venv profile = black diff --git a/tests/config_templates/__init__.py b/tests/config_templates/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/config_templates/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/core/__init__.py b/tests/core/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/core/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/dags/__init__.py b/tests/dags/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/dags/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/dags/subdir1/__init__.py b/tests/dags/subdir1/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/dags/subdir1/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/dags/subdir2/__init__.py b/tests/dags/subdir2/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/dags/subdir2/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/dags/subdir2/subdir3/__init__.py b/tests/dags/subdir2/subdir3/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/dags/subdir2/subdir3/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/dags_corrupted/__init__.py b/tests/dags_corrupted/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/dags_corrupted/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/dags_with_system_exit/__init__.py b/tests/dags_with_system_exit/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/dags_with_system_exit/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/executors/kubernetes_executor_template_files/__init__.py b/tests/executors/kubernetes_executor_template_files/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/executors/kubernetes_executor_template_files/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index 29642e493bf16..6f50b1c759a1e 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -2665,6 +2665,7 @@ def test_list_py_file_paths(self): 'test_ignore_this.py', 'test_invalid_param.py', 'test_nested_dag.py', + '__init__.py', } for root, _, files in os.walk(TEST_DAG_FOLDER): for file_name in files: diff --git a/tests/secrets/__init__.py b/tests/secrets/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/secrets/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/README.md b/tests/system/README.md index 912bfc248ce0d..c1452bfe6e5df 100644 --- a/tests/system/README.md +++ b/tests/system/README.md @@ -67,7 +67,7 @@ example of command: ```commandline # pytest --system [provider_name] [path_to_test(s)] -pytest --system google tests/system/providers/google/bigquery/example_bigquery_queries.py +pytest --system google tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py ``` You can specify several `--system` flags if you want to execute tests for several providers: diff --git a/tests/system/providers/amazon/__init__.py b/tests/system/providers/amazon/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/amazon/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/amazon/aws/__init__.py b/tests/system/providers/amazon/aws/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/amazon/aws/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/apache/kylin/__init__.py b/tests/system/providers/apache/kylin/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/apache/kylin/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/cncf/__init__.py b/tests/system/providers/cncf/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/cncf/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/elasticsearch/__init__.py b/tests/system/providers/elasticsearch/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/elasticsearch/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/github/__init__.py b/tests/system/providers/github/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/github/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/README.md b/tests/system/providers/google/README.md index 35d07c8e1e72d..da8709cb901e1 100644 --- a/tests/system/providers/google/README.md +++ b/tests/system/providers/google/README.md @@ -23,7 +23,7 @@ All Google-related system tests are located inside this subdirectory of system tests which is `tests/system/providers/google/`. They are grouped in directories by the related service name, e.g. all BigQuery -tests are stored inside `tests/system/providers/google/bigquery/` directory. In each directory you will find test files +tests are stored inside `tests/system/providers/google/cloud/bigquery/` directory. In each directory you will find test files as self-contained DAGs (one DAG per file). Each test may require some additional resources which should be placed in `resources` directory found on the same level as tests. Each test file should start with prefix `example_*`. If there is anything more needed for the test to be executed, it should be documented in the docstrings. diff --git a/tests/system/providers/google/cloud/__init__.py b/tests/system/providers/google/cloud/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/cloud/azure/__init__.py b/tests/system/providers/google/cloud/azure/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/azure/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/cloud/bigquery/__init__.py b/tests/system/providers/google/cloud/bigquery/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/bigquery/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/bigquery/example_bigquery_dataset.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py similarity index 100% rename from tests/system/providers/google/bigquery/example_bigquery_dataset.py rename to tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py diff --git a/tests/system/providers/google/bigquery/example_bigquery_operations.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py similarity index 100% rename from tests/system/providers/google/bigquery/example_bigquery_operations.py rename to tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py diff --git a/tests/system/providers/google/bigquery/example_bigquery_operations_location.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_operations_location.py similarity index 100% rename from tests/system/providers/google/bigquery/example_bigquery_operations_location.py rename to tests/system/providers/google/cloud/bigquery/example_bigquery_operations_location.py diff --git a/tests/system/providers/google/bigquery/example_bigquery_queries.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py similarity index 100% rename from tests/system/providers/google/bigquery/example_bigquery_queries.py rename to tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py diff --git a/tests/system/providers/google/bigquery/example_bigquery_sensors.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py similarity index 100% rename from tests/system/providers/google/bigquery/example_bigquery_sensors.py rename to tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py diff --git a/tests/system/providers/google/bigquery/example_bigquery_tables.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py similarity index 100% rename from tests/system/providers/google/bigquery/example_bigquery_tables.py rename to tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py diff --git a/tests/system/providers/google/bigquery/example_bigquery_to_bigquery.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_to_bigquery.py similarity index 100% rename from tests/system/providers/google/bigquery/example_bigquery_to_bigquery.py rename to tests/system/providers/google/cloud/bigquery/example_bigquery_to_bigquery.py diff --git a/tests/system/providers/google/bigquery/example_bigquery_to_gcs.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs.py similarity index 100% rename from tests/system/providers/google/bigquery/example_bigquery_to_gcs.py rename to tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs.py diff --git a/tests/system/providers/google/cloud/bigquery/resources/__init__.py b/tests/system/providers/google/cloud/bigquery/resources/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/bigquery/resources/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/bigquery/resources/example_bigquery_query.sql b/tests/system/providers/google/cloud/bigquery/resources/example_bigquery_query.sql similarity index 100% rename from tests/system/providers/google/bigquery/resources/example_bigquery_query.sql rename to tests/system/providers/google/cloud/bigquery/resources/example_bigquery_query.sql diff --git a/tests/system/providers/google/bigquery/resources/update_table_schema.json b/tests/system/providers/google/cloud/bigquery/resources/update_table_schema.json similarity index 100% rename from tests/system/providers/google/bigquery/resources/update_table_schema.json rename to tests/system/providers/google/cloud/bigquery/resources/update_table_schema.json diff --git a/tests/system/providers/google/bigquery/resources/us-states.csv b/tests/system/providers/google/cloud/bigquery/resources/us-states.csv similarity index 100% rename from tests/system/providers/google/bigquery/resources/us-states.csv rename to tests/system/providers/google/cloud/bigquery/resources/us-states.csv diff --git a/tests/system/providers/google/cloud/cloud_sql/__init__.py b/tests/system/providers/google/cloud/cloud_sql/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/cloud_sql/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/cloud/dataproc/__init__.py b/tests/system/providers/google/cloud/dataproc/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/dataproc/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/dataproc/example_dataproc_gke.py b/tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py similarity index 100% rename from tests/system/providers/google/dataproc/example_dataproc_gke.py rename to tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py diff --git a/tests/system/providers/google/cloud/dataproc/resources/__init__.py b/tests/system/providers/google/cloud/dataproc/resources/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/dataproc/resources/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/cloud/datastore/__init__.py b/tests/system/providers/google/cloud/datastore/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/datastore/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/datastore/example_datastore_commit.py b/tests/system/providers/google/cloud/datastore/example_datastore_commit.py similarity index 100% rename from tests/system/providers/google/datastore/example_datastore_commit.py rename to tests/system/providers/google/cloud/datastore/example_datastore_commit.py diff --git a/tests/system/providers/google/datastore/example_datastore_export_import.py b/tests/system/providers/google/cloud/datastore/example_datastore_export_import.py similarity index 100% rename from tests/system/providers/google/datastore/example_datastore_export_import.py rename to tests/system/providers/google/cloud/datastore/example_datastore_export_import.py diff --git a/tests/system/providers/google/datastore/example_datastore_query.py b/tests/system/providers/google/cloud/datastore/example_datastore_query.py similarity index 100% rename from tests/system/providers/google/datastore/example_datastore_query.py rename to tests/system/providers/google/cloud/datastore/example_datastore_query.py diff --git a/tests/system/providers/google/datastore/example_datastore_rollback.py b/tests/system/providers/google/cloud/datastore/example_datastore_rollback.py similarity index 100% rename from tests/system/providers/google/datastore/example_datastore_rollback.py rename to tests/system/providers/google/cloud/datastore/example_datastore_rollback.py diff --git a/tests/system/providers/google/cloud/gcs/__init__.py b/tests/system/providers/google/cloud/gcs/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/gcs/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/gcs/example_gcs_to_bigquery.py b/tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery.py similarity index 100% rename from tests/system/providers/google/gcs/example_gcs_to_bigquery.py rename to tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery.py diff --git a/tests/system/providers/google/cloud/gcs/resources/__init__.py b/tests/system/providers/google/cloud/gcs/resources/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/gcs/resources/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/cloud/kubernetes_engine/__init__.py b/tests/system/providers/google/cloud/kubernetes_engine/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/kubernetes_engine/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/kubernetes_engine/example_kubernetes_engine.py b/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py similarity index 100% rename from tests/system/providers/google/kubernetes_engine/example_kubernetes_engine.py rename to tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py diff --git a/tests/system/providers/google/cloud/spanner/__init__.py b/tests/system/providers/google/cloud/spanner/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/spanner/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/cloud/speech_to_text/__init__.py b/tests/system/providers/google/cloud/speech_to_text/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/speech_to_text/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/cloud/stackdriver/__init__.py b/tests/system/providers/google/cloud/stackdriver/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/stackdriver/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/cloud/tasks/__init__.py b/tests/system/providers/google/cloud/tasks/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/tasks/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/cloud/text_to_speech/__init__.py b/tests/system/providers/google/cloud/text_to_speech/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/text_to_speech/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/cloud/workflows/__init__.py b/tests/system/providers/google/cloud/workflows/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/workflows/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/workplace/__init__.py b/tests/system/providers/google/workplace/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/workplace/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/presto/__init__.py b/tests/system/providers/presto/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/presto/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/test_utils/operators/__init__.py b/tests/test_utils/operators/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/test_utils/operators/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/test_utils/perf/__init__.py b/tests/test_utils/perf/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/test_utils/perf/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/test_utils/perf/dags/__init__.py b/tests/test_utils/perf/dags/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/test_utils/perf/dags/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/testconfig/__init__.py b/tests/testconfig/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/testconfig/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/testconfig/conf/__init__.py b/tests/testconfig/conf/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/testconfig/conf/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/triggers/__init__.py b/tests/triggers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/triggers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/www/test_logs/__init__.py b/tests/www/test_logs/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/www/test_logs/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/www/views/__init__.py b/tests/www/views/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/www/views/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. From a66af3b17deb5d19258342bc89ff88466a63477d Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Tue, 14 Jun 2022 22:32:49 +0200 Subject: [PATCH 012/118] Add CI-friendly progress output for tests (#24236) This is the first step to run breeze tests in parallel in CI. This flag adds "limited progress" output when running tests which means that the runnig tests will just print few lines with percent progress and color status indication from last few progress lines of Pytest output, but when it completes, the whole output is printed in a CI group - colored depending on status. The final version (wnen we implement parallel test execution) should also defer writing the output to until all tests are completed, but this should be a follow-up PR. (cherry picked from commit 41fefa146ac64379447db503b7dba82d5121f06a) --- TESTING.rst | 15 ++ .../configuration_and_maintenance_commands.py | 6 +- .../commands/testing_commands.py | 163 +++++++++++++++++- .../src/airflow_breeze/utils/ci_group.py | 8 +- .../src/airflow_breeze/utils/console.py | 14 ++ images/breeze/output-commands-hash.txt | 2 +- images/breeze/output-docker-compose-tests.svg | 92 +++++----- images/breeze/output-tests.svg | 132 ++++++++------ 8 files changed, 319 insertions(+), 113 deletions(-) diff --git a/TESTING.rst b/TESTING.rst index 12983726e1ebb..2271e73ecfd8c 100644 --- a/TESTING.rst +++ b/TESTING.rst @@ -182,6 +182,21 @@ You can also specify individual tests or a group of tests: breeze tests --db-reset tests/core/test_core.py::TestCore +You can also limit the tests to execute to specific group of tests + +.. code-block:: bash + + breeze tests --test-type Core + + +You can also write tests in "limited progress" mode (useful in the future to run CI). In this mode each +test just prints "percentage" summary of the run as single line and only dumps full output of the test +after it completes. + +.. code-block:: bash + + breeze tests --test-type Core --limit-progress-output + Running Tests of a specified type from the Host ----------------------------------------------- diff --git a/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py b/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py index d4ca3bcf466ca..116319a2efca3 100644 --- a/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py @@ -157,9 +157,9 @@ def cleanup(verbose: bool, dry_run: bool, github_repository: str, all: bool, ans ) images = command_result.stdout.splitlines() if command_result and command_result.stdout else [] if images: - get_console().print("[light_blue]Removing images:[/]") + get_console().print("[info]Removing images:[/]") for image in images: - get_console().print(f"[light_blue] * {image}[/]") + get_console().print(f"[info] * {image}[/]") get_console().print() docker_rmi_command_to_execute = [ 'docker', @@ -173,7 +173,7 @@ def cleanup(verbose: bool, dry_run: bool, github_repository: str, all: bool, ans elif given_answer == Answer.QUIT: sys.exit(0) else: - get_console().print("[light_blue]No locally downloaded images to remove[/]\n") + get_console().print("[info]No locally downloaded images to remove[/]\n") get_console().print("Pruning docker images") given_answer = user_confirm("Are you sure with the removal?") if given_answer == Answer.YES: diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands.py b/dev/breeze/src/airflow_breeze/commands/testing_commands.py index 84bfd29d0ea5c..ebe4701b73ceb 100644 --- a/dev/breeze/src/airflow_breeze/commands/testing_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/testing_commands.py @@ -14,10 +14,16 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - +import errno import os +import re +import shutil +import subprocess import sys -from typing import Tuple +import tempfile +from threading import Event, Thread +from time import sleep +from typing import Dict, List, Tuple import click @@ -25,24 +31,29 @@ from airflow_breeze.global_constants import ALLOWED_TEST_TYPES from airflow_breeze.params.build_prod_params import BuildProdParams from airflow_breeze.params.shell_params import ShellParams +from airflow_breeze.utils.ci_group import ci_group from airflow_breeze.utils.common_options import ( + option_backend, option_db_reset, option_dry_run, option_github_repository, option_image_name, option_image_tag, option_integration, + option_mssql_version, + option_mysql_version, + option_postgres_version, option_python, option_verbose, ) -from airflow_breeze.utils.console import get_console +from airflow_breeze.utils.console import get_console, message_type_from_return_code from airflow_breeze.utils.custom_param_types import BetterChoice from airflow_breeze.utils.docker_command_utils import ( get_env_variables_for_docker_commands, perform_environment_checks, ) from airflow_breeze.utils.run_tests import run_docker_compose_tests -from airflow_breeze.utils.run_utils import run_command +from airflow_breeze.utils.run_utils import RunCommandResult, run_command TESTING_COMMANDS = { "name": "Testing", @@ -55,8 +66,8 @@ "name": "Docker-compose tests flag", "options": [ "--image-name", - "--python", "--image-tag", + "--python", ], } ], @@ -66,7 +77,13 @@ "options": [ "--integration", "--test-type", + "--limit-progress-output", "--db-reset", + "--backend", + "--python", + "--postgres-version", + "--mysql-version", + "--mssql-version", ], } ], @@ -112,6 +129,91 @@ def docker_compose_tests( sys.exit(return_code) +class MonitoringThread(Thread): + """Thread class with a stop() method. The thread itself has to check + regularly for the stopped() condition.""" + + def __init__(self, title: str, file_name: str): + super().__init__(target=self.peek_percent_at_last_lines_of_file, daemon=True) + self._stop_event = Event() + self.title = title + self.file_name = file_name + + def peek_percent_at_last_lines_of_file(self) -> None: + max_line_length = 400 + matcher = re.compile(r"^.*\[([^\]]*)\]$") + while not self.stopped(): + if os.path.exists(self.file_name): + try: + with open(self.file_name, 'rb') as temp_f: + temp_f.seek(-(max_line_length * 2), os.SEEK_END) + tail = temp_f.read().decode() + try: + two_last_lines = tail.splitlines()[-2:] + previous_no_ansi_line = escape_ansi(two_last_lines[0]) + m = matcher.match(previous_no_ansi_line) + if m: + get_console().print(f"[info]{self.title}:[/] {m.group(1).strip()}") + print(f"\r{two_last_lines[0]}\r") + print(f"\r{two_last_lines[1]}\r") + except IndexError: + pass + except OSError as e: + if e.errno == errno.EINVAL: + pass + else: + raise + sleep(5) + + def stop(self): + self._stop_event.set() + + def stopped(self): + return self._stop_event.is_set() + + +def escape_ansi(line): + ansi_escape = re.compile(r'(?:\x1B[@-_]|[\x80-\x9F])[0-?]*[ -/]*[@-~]') + return ansi_escape.sub('', line) + + +def run_with_progress( + cmd: List[str], + env_variables: Dict[str, str], + test_type: str, + python: str, + backend: str, + version: str, + verbose: bool, + dry_run: bool, +) -> RunCommandResult: + title = f"Running tests: {test_type}, Python: {python}, Backend: {backend}:{version}" + try: + with tempfile.NamedTemporaryFile(mode='w+t', delete=False) as f: + get_console().print(f"[info]Starting test = {title}[/]") + thread = MonitoringThread(title=title, file_name=f.name) + thread.start() + try: + result = run_command( + cmd, + verbose=verbose, + dry_run=dry_run, + env=env_variables, + check=False, + stdout=f, + stderr=subprocess.STDOUT, + ) + finally: + thread.stop() + thread.join() + with ci_group(f"Result of {title}", message_type=message_type_from_return_code(result.returncode)): + with open(f.name) as f: + shutil.copyfileobj(f, sys.stdout) + finally: + os.unlink(f.name) + return result + + @main.command( name='tests', help="Run the specified unit test targets. Multiple targets may be specified separated by spaces.", @@ -122,10 +224,19 @@ def docker_compose_tests( ) @option_dry_run @option_verbose +@option_python +@option_backend +@option_postgres_version +@option_mysql_version +@option_mssql_version @option_integration +@click.option( + '--limit-progress-output', + help="Limit progress to percentage only and just show the summary when tests complete.", + is_flag=True, +) @click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED) @click.option( - "-tt", "--test-type", help="Type of test to run.", default="All", @@ -135,6 +246,12 @@ def docker_compose_tests( def tests( dry_run: bool, verbose: bool, + python: str, + backend: str, + postgres_version: str, + mysql_version: str, + mssql_version: str, + limit_progress_output: bool, integration: Tuple, extra_pytest_args: Tuple, test_type: str, @@ -149,11 +266,39 @@ def tests( os.environ["LIST_OF_INTEGRATION_TESTS_TO_RUN"] = ' '.join(list(integration)) if db_reset: os.environ["DB_RESET"] = "true" - - exec_shell_params = ShellParams(verbose=verbose, dry_run=dry_run) + exec_shell_params = ShellParams( + verbose=verbose, + dry_run=dry_run, + python=python, + backend=backend, + postgres_version=postgres_version, + mysql_version=mysql_version, + mssql_version=mssql_version, + ) env_variables = get_env_variables_for_docker_commands(exec_shell_params) perform_environment_checks(verbose=verbose) cmd = ['docker-compose', 'run', '--service-ports', '--rm', 'airflow'] cmd.extend(list(extra_pytest_args)) - result = run_command(cmd, verbose=verbose, dry_run=dry_run, env=env_variables, check=False) + version = ( + mssql_version + if backend == "mssql" + else mysql_version + if backend == "mysql" + else postgres_version + if backend == "postgres" + else "none" + ) + if limit_progress_output: + result = run_with_progress( + cmd=cmd, + env_variables=env_variables, + test_type=test_type, + python=python, + backend=backend, + version=version, + verbose=verbose, + dry_run=dry_run, + ) + else: + result = run_command(cmd, verbose=verbose, dry_run=dry_run, env=env_variables, check=False) sys.exit(result.returncode) diff --git a/dev/breeze/src/airflow_breeze/utils/ci_group.py b/dev/breeze/src/airflow_breeze/utils/ci_group.py index e65751a322a2e..96525b55253a8 100644 --- a/dev/breeze/src/airflow_breeze/utils/ci_group.py +++ b/dev/breeze/src/airflow_breeze/utils/ci_group.py @@ -18,11 +18,11 @@ import os from contextlib import contextmanager -from airflow_breeze.utils.console import get_console +from airflow_breeze.utils.console import MessageType, get_console @contextmanager -def ci_group(title: str, enabled: bool = True): +def ci_group(title: str, enabled: bool = True, message_type: MessageType = MessageType.INFO): """ If used in GitHub Action, creates an expandable group in the GitHub Action log. Otherwise, display simple text groups. @@ -34,9 +34,9 @@ def ci_group(title: str, enabled: bool = True): yield return if os.environ.get('GITHUB_ACTIONS', 'false') != "true": - get_console().print(f"[info]{title}[/]") + get_console().print(f"[{message_type.value}]{title}[/]") yield return - get_console().print(f"::group::: [info]{title}[/]") + get_console().print(f"::group::: [{message_type.value}]{title}[/]") yield get_console().print("::endgroup::") diff --git a/dev/breeze/src/airflow_breeze/utils/console.py b/dev/breeze/src/airflow_breeze/utils/console.py index 9a14d91eaed89..41ae65ef61158 100644 --- a/dev/breeze/src/airflow_breeze/utils/console.py +++ b/dev/breeze/src/airflow_breeze/utils/console.py @@ -19,6 +19,7 @@ to be only run in CI or real development terminal - in both cases we want to have colors on. """ import os +from enum import Enum from functools import lru_cache from rich.console import Console @@ -56,6 +57,19 @@ def get_theme() -> Theme: ) +class MessageType(Enum): + SUCCESS = "success" + INFO = "info" + WARNING = "warning" + ERROR = "error" + + +def message_type_from_return_code(return_code: int) -> MessageType: + if return_code == 0: + return MessageType.SUCCESS + return MessageType.ERROR + + @lru_cache(maxsize=None) def get_console() -> Console: return Console( diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index 96155ac41a84c..aedf286361b03 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -1 +1 @@ -2f24dd5bade0ac4b635ef6ada358590b +4715fa0a006457cc3f4f6447e3fecc95 diff --git a/images/breeze/output-docker-compose-tests.svg b/images/breeze/output-docker-compose-tests.svg index 4830ca1215289..75f5c1a31b102 100644 --- a/images/breeze/output-docker-compose-tests.svg +++ b/images/breeze/output-docker-compose-tests.svg @@ -19,109 +19,109 @@ font-weight: 700; } - .terminal-25948600-matrix { + .terminal-1448538552-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-25948600-title { + .terminal-1448538552-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-25948600-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-25948600-r2 { fill: #c5c8c6 } -.terminal-25948600-r3 { fill: #d0b344;font-weight: bold } -.terminal-25948600-r4 { fill: #868887 } -.terminal-25948600-r5 { fill: #68a0b3;font-weight: bold } -.terminal-25948600-r6 { fill: #98a84b;font-weight: bold } -.terminal-25948600-r7 { fill: #8d7b39 } + .terminal-1448538552-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-1448538552-r2 { fill: #c5c8c6 } +.terminal-1448538552-r3 { fill: #d0b344;font-weight: bold } +.terminal-1448538552-r4 { fill: #868887 } +.terminal-1448538552-r5 { fill: #68a0b3;font-weight: bold } +.terminal-1448538552-r6 { fill: #98a84b;font-weight: bold } +.terminal-1448538552-r7 { fill: #8d7b39 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: docker-compose-tests + Command: docker-compose-tests - + - - -Usage: breeze docker-compose-tests [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Run docker-compose tests. - -╭─ Docker-compose tests flag ──────────────────────────────────────────────────────────────────────────────────────────╮ ---image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze docker-compose-tests [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Run docker-compose tests. + +╭─ Docker-compose tests flag ──────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-tests.svg b/images/breeze/output-tests.svg index 7c02458342214..914f2c4587a0b 100644 --- a/images/breeze/output-tests.svg +++ b/images/breeze/output-tests.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + + + + + + + + + + + + + + + + + + + - Command: tests + Command: tests - + - - -Usage: breeze tests [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Run the specified unit test targets. Multiple targets may be specified separated by spaces. - -╭─ Basic flag for tests command ───────────────────────────────────────────────────────────────────────────────────────╮ ---integrationIntegration(s) to enable when running (can be more than one).                               -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino | all) ---test-type-ttType of test to run.                                                                             -(All | Always | Core | Providers | API | CLI | Integration | Other | WWW | Postgres | MySQL |    -Helm | Quarantined)                                                                              ---db-reset-dReset DB when entering the container. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze tests [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Run the specified unit test targets. Multiple targets may be specified separated by spaces. + +╭─ Basic flag for tests command ───────────────────────────────────────────────────────────────────────────────────────╮ +--integrationIntegration(s) to enable when running (can be more than one).                           +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |  +all)                                                                                    +--test-typeType of test to run.                                                                    +(All | Always | Core | Providers | API | CLI | Integration | Other | WWW | Postgres |   +MySQL | Helm | Quarantined)                                                             +--limit-progress-outputLimit progress to percentage only and just show the summary when tests complete. +--db-reset-dReset DB when entering the container. +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ From 6a05f043004482a7928712e2b4d6ef360a950018 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Wed, 15 Jun 2022 11:42:41 +0200 Subject: [PATCH 013/118] First attempt to have CI-controlled process of releasing PROD image (#24433) (cherry picked from commit b79790d8e5de18782a3179a8a55457eafa529a88) --- .github/workflows/release_dockerhub_image.yml | 125 ++++++++++++++++++ dev/MANUALLY_BUILDING_IMAGES.md | 96 ++++++++++++++ dev/README_RELEASE_AIRFLOW.md | 109 +++------------ .../commands/release_management_commands.py | 7 +- dev/images/release_prod_image.png | Bin 0 -> 112569 bytes 5 files changed, 245 insertions(+), 92 deletions(-) create mode 100644 .github/workflows/release_dockerhub_image.yml create mode 100644 dev/MANUALLY_BUILDING_IMAGES.md create mode 100644 dev/images/release_prod_image.png diff --git a/.github/workflows/release_dockerhub_image.yml b/.github/workflows/release_dockerhub_image.yml new file mode 100644 index 0000000000000..bd9fdaec1e6a7 --- /dev/null +++ b/.github/workflows/release_dockerhub_image.yml @@ -0,0 +1,125 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: "Release PROD image" +on: # yamllint disable-line rule:truthy + workflow_dispatch: + inputs: + airflowVersion: + description: 'Airflow version' + required: true + skipLatest: + description: 'Skip Latest: Set to true if not latest.' + default: '' + required: false +jobs: + build-info: + timeout-minutes: 10 + name: "Build Info" + runs-on: ${{ github.repository == 'apache/airflow' && 'self-hosted' || 'ubuntu-20.04' }} + outputs: + pythonVersions: ${{ steps.selective-checks.outputs.python-versions }} + allPythonVersions: ${{ steps.selective-checks.outputs.all-python-versions }} + defaultPythonVersion: ${{ steps.selective-checks.outputs.default-python-version }} + skipLatest: ${{ github.event.inputs.skipLatest == '' && ' ' || '--skip-latest' }} + limitPlatform: ${{ github.repository == 'apache/airflow' && ' ' || '--limit-platform linux/amd64' }} + env: + GITHUB_CONTEXT: ${{ toJson(github) }} + steps: + - name: Cleanup repo + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v2 + with: + persist-credentials: false + submodules: recursive + - name: Selective checks + id: selective-checks + run: ./scripts/ci/selective_ci_checks.sh + release-images: + timeout-minutes: 120 + name: "Release images" + runs-on: ${{ github.repository == 'apache/airflow' && 'self-hosted' || 'ubuntu-20.04' }} + needs: [build-info] + strategy: + fail-fast: false + matrix: + python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }} + env: + RUNS_ON: ${{ github.repository == 'apache/airflow' && 'self-hosted' || 'ubuntu-20.04' }} + if: contains(fromJSON('[ + "ashb", + "ephraimbuddy", + "jedcunningham", + "kaxil", + "potiuk", + ]'), github.event.sender.login) + steps: + - name: Cleanup repo + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v2 + with: + persist-credentials: false + - name: "Setup python" + uses: actions/setup-python@v2 + with: + python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + cache: 'pip' + cache-dependency-path: ./dev/breeze/setup* + - run: ./scripts/ci/install_breeze.sh + - name: "Free space" + run: breeze free-space + - name: Build CI image for PROD build ${{ needs.build-info.outputs.defaultPythonVersion }} + run: breeze build-image + env: + PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }} + - name: "Cleanup dist and context file" + run: rm -fv ./dist/* ./docker-context-files/* + - name: "Start ARM instance" + run: ./scripts/ci/images/ci_start_arm_instance_and_connect_to_docker.sh + if: github.repository == 'apache/airflow' + - name: "Login to docker" + run: > + echo ${{ secrets.DOCKERHUB_TOKEN }} | + docker login --password-stdin --username ${{ secrets.DOCKERHUB_USER }} + - name: "Release regular images" + run: > + breeze release-prod-images + --dockerhub-repo ${{ github.repository }} + --airflow-version ${{ github.event.inputs.airflowVersion }} + ${{ needs.build-info.outputs.skipLatest }} + ${{ needs.build-info.outputs.limitPlatform }} + --limit-python ${{ matrix.python-version }} + - name: "Release slim images" + run: > + breeze release-prod-images + --dockerhub-repo ${{ github.repository }} + --airflow-version ${{ github.event.inputs.airflowVersion }} + ${{ needs.build-info.outputs.skipLatest }} + ${{ needs.build-info.outputs.limitPlatform }} + --limit-python ${{ matrix.python-version }} --slim-images + - name: "Docker logout" + run: docker logout + if: always() + - name: "Stop ARM instance" + run: ./scripts/ci/images/ci_stop_arm_instance.sh + if: always() && github.repository == 'apache/airflow' + - name: "Fix ownership" + run: breeze fix-ownership + if: always() diff --git a/dev/MANUALLY_BUILDING_IMAGES.md b/dev/MANUALLY_BUILDING_IMAGES.md new file mode 100644 index 0000000000000..99cf589d691d4 --- /dev/null +++ b/dev/MANUALLY_BUILDING_IMAGES.md @@ -0,0 +1,96 @@ + + + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Building docker images](#building-docker-images) +- [Setting environment with emulation](#setting-environment-with-emulation) +- [Setting up cache refreshing with hardware ARM/AMD support](#setting-up-cache-refreshing-with-hardware-armamd-support) + + + +## Building docker images + +In order to build images on local hardware, you need to have the buildx plugin installed to run the build. +Also, you need to have regctl installed from https://github.com/regclient/regclient in order to tag +the multi-platform images in DockerHub. The script to build images will refuse to work if +you do not have those two installed. + +You also need to have the right permissions to push the images, so you should run +`docker login` before and authenticate with your DockerHub token. + +## Setting environment with emulation + +According to the [official installation instructions](https://docs.docker.com/buildx/working-with-buildx/#build-multi-platform-images) +this can be achieved via: + +```shell +docker run --privileged --rm tonistiigi/binfmt --install all +``` + +More information can be found [here](https://docs.docker.com/engine/reference/commandline/buildx_create/). + +However, emulation is very slow - more than 10x slower than hardware-backed builds. + +## Setting up cache refreshing with hardware ARM/AMD support + +If you plan to build a number of images, it's probably better to set up a hardware remote builder +for your ARM or AMD builds (depending which platform you build images on - the "other" platform should be +remote). + +This can be achieved by settings build as described in +[this blog post](https://www.docker.com/blog/speed-up-building-with-docker-buildx-and-graviton2-ec2/) and +adding it to docker buildx `airflow_cache` builder. + +This usually can be done with those two commands: + +```bash +docker buildx create --name airflow_cache # your local builder +docker buildx create --name airflow_cache --append HOST:PORT # your remote builder +``` + +One of the ways to have HOST:PORT is to login to the remote machine via SSH and forward the port to +the docker engine running on the remote machine. + +When everything is fine you should see both local and remote builder configured and reporting status: + +```bash +docker buildx ls + + airflow_cache docker-container + airflow_cache0 unix:///var/run/docker.sock + airflow_cache1 tcp://127.0.0.1:2375 +``` + +Preparing regular images: + +```shell script +breeze release-prod-images --airflow-version "${VERSION}" +``` + +Preparing slim images: + +```shell script +breeze release-prod-images --airflow-version "${VERSION}" --slim-images +``` + +This will wipe Breeze cache and docker-context-files in order to make sure the build is "clean". It +also performs image verification after pushing the images. diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md index 6dce6e487c64a..3d242ff9d4843 100644 --- a/dev/README_RELEASE_AIRFLOW.md +++ b/dev/README_RELEASE_AIRFLOW.md @@ -28,9 +28,6 @@ - [[\Optional\] Prepare new release branches and cache](#%5Coptional%5C-prepare-new-release-branches-and-cache) - [Prepare PyPI convenience "snapshot" packages](#prepare-pypi-convenience-snapshot-packages) - [Prepare production Docker Image](#prepare-production-docker-image) - - [Prerequisites](#prerequisites) - - [Setting environment with emulation](#setting-environment-with-emulation) - - [Setting up cache refreshing with hardware ARM/AMD support](#setting-up-cache-refreshing-with-hardware-armamd-support) - [Prepare issue for testing status of rc](#prepare-issue-for-testing-status-of-rc) - [Prepare Vote email on the Apache Airflow release candidate](#prepare-vote-email-on-the-apache-airflow-release-candidate) - [Verify the release candidate by PMCs](#verify-the-release-candidate-by-pmcs) @@ -493,76 +490,23 @@ is not supposed to be used by and advertised to the end-users who do not read th Production Docker images should be manually prepared and pushed by the release manager or another committer who has access to Airflow's DockerHub. Note that we started releasing a multi-platform build, so you need -to have an environment prepared to build multi-platform images. You can achieve it with either emulation -(very slow) or if you have two types of hardware (AMD64 and ARM64) you can configure Hardware builders. +to have an environment prepared to build multi-platform images. You can achieve it with: -## Prerequisites +* GitHub Actions Manual Job (easiest) +* Emulation (very slow) +* Hardware builders if you have both AMD64 and ARM64 hardware locally -You need to have buildx plugin installed to run the build. Also, you need to have regctl -installed from https://github.com/regclient/regclient in order to tag the multi-platform images in -DockerHub. The script to build images will refuse to work if you do not have those two installed. +Building the image is triggered by running the `Release PROD image` workflow via +[GitHub Actions](https://github.com/apache/airflow/actions). -You also need to have the right permissions to push the images, so you should run -`docker login` before and authenticate with your DockerHub token. +When you trigger it you need to pass: -## Setting environment with emulation +* Airflow Version +* Optional "true" in skip latest field if you do not want to retag the latest image -According to the [official installation instructions](https://docs.docker.com/buildx/working-with-buildx/#build-multi-platform-images) -this can be achieved via: - -```shell -docker run --privileged --rm tonistiigi/binfmt --install all -``` - -More information can be found [here](https://docs.docker.com/engine/reference/commandline/buildx_create/) - -However, emulation is very slow - more than 10x slower than hardware-backed builds. - -## Setting up cache refreshing with hardware ARM/AMD support - -If you plan to build a number of images, probably better solution is to set up a hardware remote builder -for your ARM or AMD builds (depending which platform you build images on - the "other" platform should be -remote. - -This can be achieved by settings build as described in -[this guideline](https://www.docker.com/blog/speed-up-building-with-docker-buildx-and-graviton2-ec2/) and -adding it to docker buildx `airflow_cache` builder. - -This usually can be done with those two commands: - -```bash -docker buildx create --name airflow_cache # your local builder -docker buildx create --name airflow_cache --append HOST:PORT # your remote builder -``` - -One of the ways to have HOST:PORT is to login to the remote machine via SSH and forward the port to -the docker engine running on the remote machine. - -When everything is fine you should see both local and remote builder configured and reporting status: - -```bash -docker buildx ls - - airflow_cache docker-container - airflow_cache0 unix:///var/run/docker.sock - airflow_cache1 tcp://127.0.0.1:2375 -``` - -Preparing regular images: - -```shell script -breeze release-prod-images --airflow-version "${VERSION}" -``` - -Preparing slim images: - -```shell script -breeze release-prod-images --airflow-version "${VERSION}" --slim-images -``` - -This will wipe Breeze cache and docker-context-files in order to make sure the build is "clean". It -also performs image verification after pushing the images. +![Release prod image](images/release_prod_image.png) +The manual building is described in [MANUALLY_BUILDING_IMAGES.md](MANUALLY_BUILDING_IMAGES.md). ## Prepare issue for testing status of rc @@ -1013,33 +957,22 @@ At this point we release an official package: ## Manually prepare production Docker Image -Note that this scripts prepares multi-platform image, so you need to fulfill prerequisites as -described above in the preparation of RC images. +Building the image is triggered by running the `Release PROD image` workflow via +[GitHub Actions](https://github.com/apache/airflow/actions). + +When you trigger it you need to pass: + +* Airflow Version +* Optional "true" in skip latest field if you do not want to retag the latest image + +![Release prod image](images/release_prod_image.png) Note that by default the `latest` images tagged are aliased to the just released image which is the usual way we release. For example when you are releasing 2.3.N image and 2.3 is our latest branch the new image is marked as "latest". In case we are releasing (which almost never happens so far) a critical bugfix release in one of -the older branches, you should add the `--skip-latest` flag. - -Preparing regular images: - -```shell script -breeze release-prod-images --airflow-version "${VERSION}" -``` - -Preparing slim images: - -```shell script -breeze release-prod-images --airflow-version "${VERSION}" --slim-images -``` - -Preparing a release that is not in the latest branch: - -```shell script -breeze release-prod-images --airflow-version "${VERSION}" --slim-images --skip-latest -``` +the older branches, you should set the "skip" field to true. ## Publish documentation diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index ce80e6a1932a8..839b966faebb5 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -650,12 +650,11 @@ def release_prod_images( ["docker", 'buildx', 'inspect', 'airflow_cache'], check=False, dry_run=dry_run, verbose=verbose ) if result_inspect_builder.returncode != 0: - get_console().print("[error]Regctl must be installed and on PATH to release the images[/]") + get_console().print("[error]Airflow Cache builder must be configured to release the images[/]") get_console().print() get_console().print( - "See https://github.com/apache/airflow/blob/main/dev/README_RELEASE_AIRFLOW.md" - "#setting-up-cache-refreshing-with-hardware-armamd-support for " - "instructions on setting it up." + "See https://github.com/apache/airflow/blob/main/dev/MANUALLY_BUILDING_IMAGES.md" + " for instructions on setting it up." ) sys.exit(1) result_regctl = run_command(["regctl", 'version'], check=False, dry_run=dry_run, verbose=verbose) diff --git a/dev/images/release_prod_image.png b/dev/images/release_prod_image.png new file mode 100644 index 0000000000000000000000000000000000000000..78f941a43b4c9ac23929eda5e742eab83279fbd0 GIT binary patch literal 112569 zcmeFYXIN9))<3G-f})6ufKo&$(m{cMbQS5nLlBTo2t|4a3q`shy(%3-=m{ORgkGg2 z0YZ@)Ak+Y%1j5bU`<(Oc^Sz`I}=!XlW=?Q8G|o zxNw0=S?RUTg$opB7cN}7cI`LvKiJxzgBLEmzo7j3rJj%3MvK3$9=h?oY4-QyMLiwo zndIAA*HqXqU%&I4>Et zp{eG;lvV9yNdxF|lel}VZ<~kv9hiD>smuYi`&l>Bp6H6>F=1@@wEz4d_?&_~fq%3R z+F)1Fi~qd4@OutpnZ>{E-dA1%-~ZR$qmADguKw%pnq+0zf4$d!>)&2m6l5v1X8(iF zzuJpyI~FQkqc)%_7WT95Q~RPE$*9xL2B&{Mzi`24F<@r|FJ3Y5h}0jf&_(M07&`i2 z#4h}z!n535X~;%|5#nF{FFcOXF<$r-mn_LV z``{Dc=D!}?9zC_QF4HkskhK+bz5jP1jYpil&|r#<$$=e?Sp1!|#GR-G8eO4e0af?( zX5oJm?Dz;+<+=K7+K0bWSU)Ap;O}#>pg=wS*92Zn{99+Qf8&z*e_y+B`~P#1wB1=q zK+!u-3-p*biL_5XzHsNds-ZJ5!@S-LQAi8wUs2^%$cOfz!+>pWt?sn{RHcvmYj)d$ zV7RC>8%KN9wO@mHQ6oeZ2;yb%P>O)8S|NmWb$v>*VKgjinR#g6(YKys7A;F7)za!+ zmG_BJ^@e0%8yMWnOLBYXyPw&aG$)M&205suH}?OvT6er^*ae&p@X_4ynYA5N63zS_ zeekKq--ZbZ&?Xglkal8<`IoFLTKdI}>n>t@3&IJ^V%zxrvZdPS0v7fG_x<1~C4qG* z@7=xzlw9wPji|Q_9fdr??Bd)%TCdrv*qb2kfEaNVbAFC^IlyTfsl{(k$p5@9^nnrL zL5vK>_a8pv{X5$q>$w#bJxIW~wk|I$gjXvWxEhEk`+)z<1DC&$SV{KK+ubAC`(&QLGjoNXnWIy{x z5V3~>A1-e^WosviGxec%s`=?zB-Aawik+CalQ75pKlY*X<6ph^FCCtHfX-(FO(<{Z zT2=k!!5+OCfzG=$GMY$y(mkw05|DFJRcT4H?pf-%g{-I-tX`lX-LICH0>^x7t4rTv z1vJfmG@j-{IXGkCKKkLs4LmdJx4Io25~ljy%!ZJrm41epB=;iKWXq)%1rhKBW_;S~ zv{pe^(G+}WJo9L4gR3uy*!-w$-uRK1yL`A1#gej4vM{gt-5sV|7GNJz*pS7{m7stk zX!Se!E^GGWGpv<(P5@C{3*z(T6wc`bODVW5rC z?_!!k&U`?<2`|k<{ooRfnu;Ta)HW_>(mspap4H;`-QKafFfp^A(D`txhC+GWx-hnd z{291dtN4nb$`|uItbMR?zD1D_D0d;ZfEUk0_$!>4U8hMIW91(svr8=S@-QC`f*zGy4eJ_J)q%`SIV`WTMA^5BSas#X>UC{#pf)3dy`Oj+al@lG}kC4NUo zB5A17Rp$IY^Kk(Vk`C*Ayp-{0%>8|7;CBX+kD+`ZX7{ArDg-KhKYYQMvUwk z1ap^X)jh{}{I)1Y--}-EXzW<^?Wc^?jF9Yi#0e@Xy)o2zgFzK1vek2?@XMG5_S6XV zl2%hsOdV$-AuwMX^=?dUHdJTWv|qOJjiskd9g~vTZRXuwlqtYTot4`x`>v8z~3jGku@LJk0f*BL}O>KWgV3t{t?n+~3^W6O3^Xg>*nyV1=+z z!g(=29@$(0Q^bk}6&3MF-ftOc-IW_FWRsC4)WG-A6$Z;ep1R&HZ54^#H?64@q0&cr z$;_wwb?~hI{Ih4bP+j>I-=lyHd#B88?((7CZJx}hZw4(zn>Onh21QRIZO?d`E78@T z`EjFf+*E;e%({S^pd3Rwc#Oj(nPyfKL{wknzTD9oEL6c3@5`@Z(2n^e~_ zkf65AtOyNH276h*_UqL~((i}@U`$Xj1gz`yv2e;XVkIE49;#?ftZ+y7je7zX4u_nw zJk_&GFS4KQ=_%(5l}(NHm9X~D30e$~W;x+OYa0_MyZ21T$D4IyYo;a>X^Ol)_dI|1 z#{KFr+>1$eW~5kBW>7y3C^i^S`}3%d&v&i*uJ+I0P4?5=Q^uiUNl>KSVsmY7-U;1u z@6VCq20g3<5T7hbaozg~EQ}2$_W9Om*;80IeR18a9mE@EZwM`6*M7Fbs_Rx7!tbv= z&f($!0i!ZcqGlA~#x0#CbDa#@li{&ARgA=7iVwI^`=%i*EJI~9iIg?lwQ}o#@mdw9 zO@s1^#gD$b4Xl+Ox$9n?5p;myW+=}L2e+0j9w*f6{@mhtmhM3N=cK83>9T#xF7lPt zGV1xB$TgWua@jv&VOT7DZj=e3?UJ@NI%FJFt?ABwCZ-{Qw^lDhLugH;)W5FreYrYf zXk5E}DhE4T=}l5uJWb*x)++esv0XBy7l8Rv45T%aau^I($DUc*9Pm4hwD1n58~|^> zgwB5!83P|?2YA0L*m1JU1T;G?`i@!11i9@Id7bFX0}jXJz8atIoSGqT0vz|dgXc<2 zkSk~Rj2t}{_hlT-DqPT^`q0K0|Dw(5_f?mK>%#c0vSkIk0*-c26>Sg1XM{AT;EWMn zg@(sx&jJ=z+!qazoNW%zgUW@Klw^lrA3$ zUxCT^4)1Ug&=qxE*Ye7BUg9@yELVBP%%Ez(NawzRqMT=ZSOJ#A1X!8^KQ&I*m6z~HYPV5V zxr(nBI7-+b!{7Rrn9R=Yms!A;H%cz0T~|2&%syQ1H?PwotsGhrqg*zBg*$t3y`(19 zxhk?7ZK^%#LBJ)OW}uz~XT~47Cb+#^#M6GNAodW-xMqf;2JzRh=vF9bw$uGCW$fp0 z1zo53>{y2Q+J<7~4Le+l-)N(OR_>;c$>9j=u+HW&&93XqP`uL^W|6)1;=+yCp2L#5 z_B?6Igx7#~nB|{oc+czmOtHq^uTDw7n;624+l<^FhiT4xTMWfp7=9QBNAn2VONbBaJd4R*)KJFo> zwgTr}F{oZ=-zVAi0)A5cB+x!F?&z%vVz`TS?haA~F3}czltXgGjrPVsWHnVxns_AD zMFULB=0o&j+EyuWk_{+ej>Qyty}GnoH+O6w;)pRcv?+c&jqxIZ+X+Cn_KiK~pI6T- zS0p<@EeBDW*VQtVBns2u)a8zEtY=LmS1!LhU!*6Qq!6$b>MF#5&XZxAyZ~ENW#`=a zA}X|+-G)VG8ud=KjZ08Lkj~*C3x*?YgqlBF1+3VKlqw!>#I^Q`?4kZzP9A9y048a> z=tnBKK1M_@O`aXalK&bW|^0 z(~a(7jFHs$-|aolTGtA@Qklyf7ualN%3|Q)mL~p zji+Cr5v|MOTH!)Vlh7Dbs3n%7Htb5(ovQ11V*5I^v#To@h5^Qv!qR6e;Q&N6ey%#hR)6i&oD@E%zjGYJN+Dlh_;E>U0AQ_%MYFZ}Cs*)M^ zIe-6@RdW{Q$Ukoi3zNj)lPCQYhPBN~y{hsaz3UtopO@gUX04%oopg225PVk~$53Xz zvUXPFYC4^R08uP8cG0)c;!J}oo^A^-`=wEuX-ZszhD;UQ1c7~m+$>;+v9QQrOhdN5 zcY579w>AvbG@}4zz7@7*sGT1zS#`BmOoXX9#FNxQ0F(ZN?JRFM^|R;@b5Il(9?@mv zU*XWbr5|frap!dDlyDjP{*!H<;sXuHb`N_R+?LUBD)Wq~wHDx5;o&I5$+pQmeo>vH zG^geM+1fUhBCps=c|dSA_vJr#?T;g!sWc;zzjWRAB{Ym2R62MJD$q~y<1BpUB}Ulv zE6KUZH-5S&1H6B&it66YRe7ZmGx2o_stwF*;<)pk8+g6q$7`QUuZ#E&6YuHQ-QT`F z_8c=^=?ObHGyPF!;mwNTrf*NYAj6BQBye;z#KIR&VaZ}M>M>87SHl3)AbHu@p{+%I zis}?&7v)Wqu*wg$(4J_hIv!|8&9MBv$eW(iA?{jB-^;gp;QBnCncd|O+t*%OD&Bn6 zcf)jFv9c0XkrJ9Uv)&`wmb-Vf3=?g9uc=`h>t({NL;(?9aHs~0IjhDQ3VWon>(zHG zoc7_dC%4=LgGTwrC|FUd*_~uG*)@m}-71q>Ia3!j;BB)T=u`q!0eNrl?x#2$)8;mI z^Y9H(gU72oWuv?+R5$QwByg^Qi zN$$3T%+D7V6#05f=(4~!Y;CEA(>^Bc_^E}6sW|G5t`DfoH{AjtAIwqu#5s%6p53&i z>h7|3oB{eR^pwPQRkTb+^@!NcFeD4?;#oJD!wi?uvB-YFGodkE)(u&0YNdM9{YT_j z)yd9b3kGnsu5tB=&e)tli;8wIpOps(WVPOmxiuFM9mCNJ0E)`6A`f~yrfR#ms|pZ6 z0N14NK?+Op*Y31whHRQ3-B(^qkfh(n3R!}SI|Kx^DHV;(6~wY|3nxq((;Ty|Xwb>? zr!)<4BW^9T+~o|ian4-JQhiWO_K|cI7Gde;PoA`;4I1q)ooZk=9au{%H@yd6Os71` zXnWE$=3}0j0BkzV*F50bW-=*>zFoJRpj}UB(m|w;D+w>>%78F@8}cSC*X*k9cjIPm zW|L4~aQj?j_1dkrLRa>H6t#(Oj;a*+>+@2Ed`|j*SWlkn4|i!UmLlzN7T`ZKm{ zH#!K+;XHuDWYw0NH6eZ#K*@__6IB^6;#3`0pbT5+nLD`bBpJuz#axx-^CwpP{pgi{)CrWz80@!1FxS6&*5o92j^sxGUH)0>MU>W~b|(!Qki4sxB^I z?!Mv3YT}!%5Trb{B^88E$=HxBx*aY!c2IWyw$9AswO@qAPj3%YmV>z8o5p=AS^&Ri z`>%{DK$$daW0W2R_x&0hs^RN3+|M{)@?Q^xHoIc)RO%c+A&dU>{J2Vw%A!hepS?)IzvY&ZJC)6XL87sJkaxo2_ zpyHLMq!F0dkNX6(q7}|`=}G$7+!lBl8Hm9pDPT=j9KpuWdGc-yX4X)E$*XGxTI2+b z)rL{UVP+b$D)dLcjLh}Y!PM(UhPTyx+VlXyE`SQy#@^V6Pra_-Vcr7{vm?RHjByPr z+r^=r4hB(ls}BflWJ?MM*Xaczy$(_=TH=0uEpO2{Wckb164Fc%z}Rx}<3*#nm8IC71+>fe&>#k3|e|P2?y#7{MEfSKnBcUGhh1GKT@vdsXm|)W-Bg8tm zOu#RWxH4aHnI_SLkya>nm<=NiK=4fXLbmxJ1 zX1!rQgBSD?O@l%3#v(PXMxoYs4s>sw}}E-x$q;)_X+a#*GU(E0`K zN7@c5%+)CicbvcL&xg|tbY;1~!W2Dq%^nYV0ByS&M5#$ZQ-v`FF;21!89&>E{}lgw zEi6aby!f+>{-w6f7Mq6ZD#ovzJgV@AKxctVrDz7TZ6J8!C0PL$JXb2&3qKC3_zXU2 z2XhE{CGavfz?F9G_r)R_fQTF8XrnAB;8>AC+re}MWQ6F{0Q5Qt3@ZE(- zBUMTl*(;cZX`avr@}Meh^V5TxS&mS;ZI>G-8h8=Dz2oSggVPLV2DwtK(-N38Q?QRl zs#-)bQoavR%HKOQv!S99afIrYvMY2qeUpQD<~t>&R(qH-@}zfHa03=rR4vcxa;g(n z3UKt2VlkVn?y25C`;?C1tv2{%OQkq$e4LcdeY)d^;pp57m}q_}Ohcbi;(6EE)IoO> z7&RjvEQhV!V&oxfVm6oKXl9d++BvN~lXZw0 z&VD}osm^ix&@btxww;`bb@hlK?5uR7cdpL9K;J_#=xBlCAiu#t$LsbV%HXGR^asu6 zIt>C3u)D@pWXN+?6KI?B^bUyPV9c|^-RTAIgTG87BBd$}E7&W=?jM}{@pVFx#>1j= zuKrO@2Pi(4tqwiibZ3D1U8>4Oz6kDIlj{|8uYIInTPHG!&-lV?YU~RTBf>N-4Qk_; z?N3dTrxqNj6kQCUOwtV#ZA;pSp4J3%=Z}IsZs7_sAi0qKerr` zA@->G#BPQr06+6mm(FP%ixi7KX`3Q0{6Zn-j^qHbIlq)gzqX*w@$m zswpWW5W;Q)oF;83<&M1Vl799C%pF|q%yrqFf~ipFk*Z~%<8&)FCiPlH1T|$WkLO#> zBz6f0_Vtcs0Jnfe6-nWT&mhAPz*kxOVdqOS@NB_oVK#NKlcjbtz7#NR4GUX@N4B#G z(AB^>!vIVv?%(Pg*Lnm7%Z11;DXnqL4}{E=J(JIg1TZDV8&((}p9tn(n)6d_)6!AV zw8n)-=qI?9c5=X_U;IYB>_sowTds41#cqm+x}$U}c*e}LuZ0rqGn5mc!VZ-b&~VxN zU|m$>@=dn*;Ga9SJOla&33fY3okVFXpRyoricWiHe2D6Sib0dHBaG2o6c|%}Q_7v0a$AD|&kJtCsO-@p&)mU zC!Ynaf2^lYuzbSSZY%JbM^=A}4|300?@Zvp$?eGs$Tw6S&~$f@PEOBZ=y80^#K;%n zE5Ut(dO|I{Yr-iVwUwNrz7;ymM}3oH=6=<>d1WasW&s9-*+K;l+@9WuPGE@_?K;6W zRmvE4d#7PVu0IBMpRuNTNy+3eUXF^Dq7w7wr4axx!B9OoN6FVS!BeV`2_~c4Ln1?V z6tO`wlf|ca)0(h6R76PkP;4#}KvvtTcl7khG5C6l3tKz$AN%|4+q7U-0d4_q-oQZR zU694GGP=$hM(XN$R5#m<>@!`FPOxjFd{!b(Avq+P!&OdiI2N!?@v!eqKtuA8L-ZR^ z8IMtE)9~otcb$P87oZeS{-dEa)p^=T!hY6V$3$vfwTe34w8#urVxID1c)kDwOe91$ z>y4c;GNupOTy6(K70tdh5)MHDT&nr^ksW{ooZb3yqEl4LCYz5x=?$DFRBIMj_0nvUHVef^{-=27) zn6~m9+ELl=lI%1XrvaN-#WxYlAWh_8sDJcs&KFRFXFJt+a=7KBCQ{2$l^#~x zEq9k#`!a}CuAwlG>woGxL?9g?1<3R?wfcHqm>57CJlceH>FRQFu-BK7dS88fD0 zQDi{b1p?MT`{7m?trU>a>_q0fU$I(<5nCH+`Oz>YnbWDBJJ}QJnL{gds8S8XDTHt>RPn$xKDS% zelVgso$lcHT@~?juYLzZ_LHf-h8oaJKKFNE!M1x>|7xM1>2EUO^!ZE0fFBhiSq@x0 z;+2r4{>T%JgYJvn-u!^`^v4smzBjJffS&^i);HQD&d21MUY_oJ-?m)rT6TVt#|w&) zLC~B+EnuFY4VgUDW{oUUlCOTQp`qxT7GZ}2y0gO|@nARegh9=l8>brI<=T^@x=co# z0WNoC4x-B}B8rn3<4kvBfK&@bn$Cf94Z$r<5K>S?ufqA+r~*dNdqA`iD)8>@$H{tz zJ>kA|m((|FWM_os(Ji3KI7`|}nv*&{PVBn*mP)lb$7JmE%mp6B_3Q+9u+rz0%*gmx z)Xq$Sg~c2iXAuh@cPsq{qe-fQ6l-{17HN*@hM)?J&wXyL+`9ZU3D20=Z0N~S8MfKc zmG8n|>I2M@JQTz!*390nvQ;i z3C#_5g|D_f;=pQ!yN<+kjD{>rGL|jY(qe6155$qrZ~a^Cc-4{DS-*~Y2l5EJgOm#jcbW_~QylpYmNpaj(sC-6^ED zfNSOVH`@#L$2amb6C$7V>pM#XT7tb2#7&sMK7Owu1+&8jE?$Wq?=055Hf=l--(B*I&{!b&L1d$Pyx@AQAN||MqGUw# z_3>$iS$1|(YT`XwsTxi<uH9T3T!W+{Mx=*|t;ucT+1jhEL(mwG=()#RLiuFo&N z@4tBS^U;f$j!_?#j*#2BjRHTi5mnmPAM`Ht)`&d*LE<{n-AVp?ALEDb6io+ub5CCT@0UoTXEJo&J6G4#Xx09E zmKSI3m_`%L6sTzC`eyCeR*2UW$orM#X00Uo*;H1DH__1qabs-OG&St+!;^piNrBIQ z81(OibM3is z{BK<5axs)jU{IAi8ML4O<6y$%!JY#l`WCj|^Fqn|{M3-#)UYr5&urhF-6Ye0mHJ#{kvv!)3`lEBCjqqo&-(l?{^w_8$TWKW75;5? z+>@8nCC@pz#V)0h_P=Sses%SK75k@}|9_=ehCYTGKJT;GOUnTDB486xQZT2<=gKeS zG@6x?5R8WDgs_gyr3~Fus zzcX~X=R;HVhQoIRBm>%OmZNHi-DFR!Ll@F7HnZl7KN%DOwJO(Wqy>#i4NMu`vo0U< zE8B<|TgrMc&hT&Rb&kQbZSlP`Wl>-&f=OH`LR= zyNnYrdsG)|XEX*5=~zQuY_`q!Eml(kcuzdDvNKKElpt5#zJyx*cVk|tdlaiaq`$88 z>xSIww|d0rK!CJocv8$C-7Uz_)BmG)0XN}i8>$hUnFOZa8DE$1Js#9V`+~8{Q%U^) zAo+KnBQg#U6*oLymN^K0JpN@RUr4;bcwNHZPW%7RTi=(+#ErO}&J}?e<@3yHsVVMT z$^47=|Dmkn?q$FrR={l6@wk1h?_#rSej4|nCMfx88lvVdZ^j?B$Ot{K&-ef2p5^#t zkW=!2>(`7E)9;S*Q3-|HaTthVMSSh9!ovRUM@F>JsG)u`mE+Z~v>JxDe#Nj2s)2%3 zYQ{YYL%BSEQuzc#{N2IW7enyrf<+t@w^_M#|LTTuBTt6ht>eG9?+>l0{GeK@S(F$S(^C% znFEZM86j>jgm^+N4z@Aei;(^lL$z`=)B>A2bR@l5v6_(hK5%yQNP3V#K$&VnF>VnO ze$RX&*t-KG_ETH;XsCUFsu93swSC^Tkjku2-YyGjv5<{r;b)BSoa#Xbdubc^`p@pq zSdOKxqLm@{T8hX6YMAAw+~8#Rf5j}Lh^jDiZU})%HlXagwSTO8d`zupr+j#93X6DS zpsB$^UzU6~!>wT7w@G;>_wmVO^>8&~)4Jm2TRt5=q%wIx3*SEK*wZHF5@!*<_(%oR z4g)q-U^kaGJ_5p;QvO9(>(YU*q*Y3pg(3_8kie?HfOjhzXIxrmQWJKmoakpI4X!Wh z>tVPxn&Sgs?3rGlp} zAXwJc=$-v^-L&eMykeNsAGbUHD6p(*|2x6L%lg&Ico*lt@}yh1BU&SbI7X9X=-$L= z*Cyk-u}bfYV}hI-_wqFIVN^cZc}vh4H~Owqbk{ZylY~uZ#A*qe&G*mGNli<-cGyaU zHh4nj53__Xlg+Y=7Yf+mUZZ!H@3-ywWFfXs1CbSTy$Fayb8TOCp@)j zjEIHJhNM@AVI@*>E6ilY0Z+2dM_hOdT}g{KR0SB9RrH=H^a-m63Yi4-(aSQ-;`Irx zuYkgPE%-y;*2aLk0?>BXJrs3+x98&ct(1abgP2KeNp|Lgz1ViOlmd&AXW`47HI7tG z{4nr9YhlxRoa~$Af96srZYPvw^H9#K(BM;v`n|Dbyt;8L?RkLOAOoou-?r5sEPBh- znO|J%IMl5AE?hn~T=h-y!47PAMZ@NGWS5cK1 z9kIayhl75awW>MHtXV*%v$t=ctA2mHu3fc^$U*d8KEx9N8UC!gA^pt;-OAj^s#zCi znylguQaq+6%{Lvk==%z&$|=$t)}EN(tlO%o8&;zN`;>aG4(PX{%{mQi@rgH@IoI9w z2N~dko~x9*NfFtAjVVr`)TN@g>sCD9 zaeV&pG`!NBG#s3=^c;Qm^%(%{llAl?=T&Mu5Wf-PNqZxz(!|l4$}Sae#2;8&X5k{g z)fBZSmbrB}LtAh+yJd&C?JqfO&1rJdW;n6fB<*EYDKD#$u0?@$py$ zx;TO6{VyJ>cjRQPF)%f``hSx`&tku(2l>j?g*)smZHVtR?k>ytj=;Q@zbz3`r#B+E z$}{X@C{c-|5I^F3-_MuuSay19pYs4GxFW_{kT+6r%<_KE`E*jeUXuiCO>J!ui& z83SJ9HqABQdR8(&G3k3_8i{NUy?hy=1de6Jtda&#K!-86?ds-IeV$FX4$sQeOr!%gX0gi)sck5Y;2_aO zcm{l(_TX%x%F7{V)SiX20Inb89OuC-YTDF!oNcud9=6YPf8D-(P^orXj=+8Jm`@rv zOT?uWf~Va57mjz{25)8ua7Y@l<~hFzn&62Gr8w+Fc;yFW#A`ZD)-5^sR&0F^->JbO z)U;g;FrOOWIhRj; z&&8i)mxdRkvn-CK#5E_$e7beG8ou2oHj30CD!-bNEa_`-3+jTNKJzcpt!@;=3%$4lOXZNmjmYRJoY0(4o@q&>B+ z8C97U8k?L}OZM2NM6fYp_LKZFdWm~Ac*lNchz()I-1vpV&b9Q6jXKqAr98T(NB8HR z(wY>>$MPuF9B5j1FT4e|yP}RE2s;Nu%(qq`G@)@+`0?5|YsvN}?Tt#%J=}Ilj2MkR?w2ORlQZ4G}y>E&|-ahW6ZRAjB~LKo8qM@ENB)T7l+)w&5@m_ zy9icTN~SI?^a=_nmcsa%jygzg*{c@xKHXqxE^1JU>t3(x#rCCAX9ymN$fCDXY-7#O{oV!y zLcX{0nS!H7GCOPEl zooH4B%KHueUU@=0)bVTc&YV*lp*VO8JlUqSDcEkGBqSMPoPG7z6m2<|PZm2Wle9b+l7Wwn6j0 zh9i{SuGeK`W*~uE_ivgi#bp!(6eP6t~p;TW=DiF5fi|fUqy!4r3@%Q(sJ^j77G(o{F-L;UYJum-G!AsfK!Da!r zpt*@-lc;e7{bk~Sn=JJoxJXU>A8?51C>b>vQkW`2%@WL0I5LB#RVxoVes5zT?aiSz z$%Qw%LU8Y+f?L$**yxjV*m4ut@5H}tn|tob3!*hq8u zeQ?LQhqdE?_W=eE{a}9ZB!(^S9LlfhJD3tE-TEP0l`4CAh2SZV%^zx|!N8J-y;+<$ zaV`8VxGk5spojNY9mTgsgafW*XcF_8C1ISr&|hjrwT5lSZL>KXCJ1TG0ybv*DkDTM zX4oTWZf*>%n_Wk&^ms3J>RnK?JFJKfywuW4s-Zx0I6wD4TWe#HA&?6|$^gXSpcyfd zrzflB9^-j@F(s^tMS_{oQL0NQyR|U0(@_VG2g1y>;J}vXv)Xk;{|JqV!)2yW{_N|Y6@{q{V+7umZqghWf2ySE!fcABt=w} zGNr(tpi|oyT!<}(O?x`i$vh{&0!@GgmqF>G~(6k`W}(sX_6kXD@@-s&#jgF@`-WQbW9Rw5q%`ir}FdKJDtGkQ7Ys zj~d_im|<%2#FAoud_d_Uxvvw-Eab5(Fz>Y3DGb`&+}c`G(nG(|fZSnc&qDp{ z*#esZr_t0iQ%~Yb=hDjO0n`Wu4Z^!?X&`V@+!oc7xz4lp@zx9rLG$KZ%XSmWKi@w5 zIGyK28x&UyY4Pr$+1UE*n_;>5o5?uNzxXu|6~NE||}O?kwZIP7cJgmD6I z2xDywm>QmY$PGXk#fZ56WfqHx?#x&PIzpeO-Wn+QSj{gN^)M#~T{`=zniH}=p1rPa z!Z*zHJ$oU@)ted)so}6yJXS}FLk0ERg9-?#RQ&65R;#ZFmR2DfUc0wjB^UNIkAMYf z-{5U2m<8Iod$DtCqS=hV-ZgF%1FCJlW}G28TMus>_-q0lyttb;i#nlVq#P5w*A>;` zr%BY}Mdi?@@wY@;$!uW0=FNVOAC)ulE1P-H#PKkrm0^+Qabd0bZhUwJ+?taYGpIH_ zHbZ>jw=+v0wS{&tMvQXGd-VtUzFU*q(*s3&*{&2O8*o#%?8f7VquD6&Y0G|Bwf;g5 z_LJ5btHRJCaXqM=%^GlGvL>(O*(eX`Nrvw6+C>w8e0&f{KGA{eLVO+x_8@ai&?ofuZQC=pmxH-9ySb*&3={qnzHSj z5NO>HSL^TH{V*o_*gw|EttJfJ8_w5wCiV2^7<$g1NM;8I?F0#WM?by=3v-f)0U{RT z6l2Ta2L90%TesJ=^XP31RY78R%XG%+HmT9>+ysf5$!ye}xq}a}>+O#Ktn^9!;@*qY z0S|T0IvScPV)`)BoEg8xQBdAh2Rq$XHgeFgnUb8uhUA@CRtDw-^6nNagdYo@qBZ+h zs_lm*ijH~&6@s6CwU_23dph2`yonuEil!8MYJgDPA~W#w)>lk3H4(8h1c^#_cR?=y zpvC=L4IG7)W=rdR%87o%^1juF63=w}RUX<^7W@=>wY3GVW)}9dX!D#aBl{0k;xB0~ z{|75~B0H}wdN+<8fDbL4QhI)SxK(LL(5o1)QM3~UZ*tqcPR~y0wwu|C95by`^a~Zn zRU3w_Knh2HULl@pJA_5neGx~cf9zUfdGIB!(KEwpP9%nJGbOfX&QK8FH@0+Yvy2Pr zrV$HT-ENflMN^)WIo&d-7h$rRi;% zwu9`c-K$HsQBGYBrN)R44pf@U^@-Tu7fBy4*WIxm-2c2GFK$f!U|%bycW$QW{u#|D zF^Xo{=$mnEJJfHATgr?Pg`__DXDmlCVVeS0Dvdq5?~IwyM|q5B8%?;Oa!bI53xDmK ziiXZJw>p-g&8>Ptf<+JUlR@DxXP4RTU!!G^ZRv@t{Mk2rL6j1@rDXGc@HZ*3F^}?l;3I9Yy6W~P`k=NswCvJYB>R!Z z75`+7-1VN(G}#o(3P;=|e^BIOS;|Cl_-lc0Qd)y|pppem=zTaxz8D(p%;mwrP=u>`NAKrITSH)|Ip zRdbBt4-PaeL-9|a4d*<{G5!Y-c5-r{ZiK+~wVlhYzdKyl1}u86qjtUrO;R7oG91Gy z0zMH~n}(?aq*+~dv9a%Ic?^)$y(Y~bFJ^LIqjs~q&s&MOi5apD6AOAZX2yhBoh*Nq zWsaP#Z55h#_DFZ5R|TB9hSIlfgwf}+c6wk*4qTL~TmP_}72ElkaVbWRiJNrr`=}qGRO#n!VXIM8$-hM?i|}R)9(}G~MUQkezB11&b(rk` z+U$EKKjiSdSi^^+4x`&~r{p~IKBUjWB z+a7k<&zTDu{48t4Zw!Yt%>ElNZvHBAt{qh29b?5ZnzRISox95Ku5RIM=^U-JT(fVu zm##ipvJUefG2zF0P7yW5vs1n>=4=$RFz8NMyD9$$Wqdv~y#oyp_k(O66dpX6yZ@pt z{YDbWfoCpjHlXFIhuT&;UA)%X;c^6@s#PYxboH8h6SikLVt2O+l^&_7V>oD%Lh-GRYGh;E8cNhi=*$O_HspWtnKV zcgVA$zPPZ%b zzBsF!_#knoJRL0yf?5naYnR*Yr)TT)%(P1qxKM<5b-MvS9jM}DQg>i_9bj?NqzWLF zqE%}1?Ijo97n%MqT|rGMrYjL{Ld)14^CX1>SCOEKZo>4D(s5P0)$w&ZXC?r} zVTp%aUH-bUEi7YR8(x{I1je~+gb`eS4KTo?w?Ai*zuj&G@NL3_NH9+)V+4NDbfae9 zn$)q|P}0~{ST^rX)9LbN8vCB0Y|+PH0H(N3#Eh5|2k?jM!%Jif`!5K&LN;g<^=@e8 zBxo|_6q~1EVc(~QW*`-vZ#;A|{g57ZYK5RAu84@jsHM&h5$2L&U{Bfv>iym@(Ak4Y zsRM&`uhJq$YjCBhrN@siS58f8(|x_Z!DM0Eyi7*4 zD7Sq@t?J4wOj!8AM$jYB+Sevxy3yGQ2vFB=oa|~b#d=m(E9`)A$9=Q`uH3sJA&B>O-F-f?ON{Pk-)KC2?r{0QsSARysof3@61<)&f<$} z{tQt~9enU`-0c#539VE&OPuGpK|bG@&;}E3h&`59XSDci)*)~D-4{~q^C}Ja+i}*) zySv%LN=W`vG$@AI7_lOwD_!pBS|htX!Qa=eayZnD z87mlr0n0zKo{d+IfZzS2Z zda|(J6V!wNHmY-k`dpYF?$fV6=%eg)OK1(AHg7X-@*W2w7V2)TxF#D!c#wz+y;CnR zl(%_@btZqAt)Uco$1(m!(p?R{3;oofl~v!vyHWVqn~^iRDlMv;QH1`uC(?qmRi+1a z9EYv_qcWey!*}WhHGO}k5qaYPYY|x$FbsLyzlgCULm3~Psi-qmJ*K@0)Uf9Tw(&kY z5?yG5RzLa38r&+2UOxw(K!%N^!O#8V&EnF(@#oxDJa3RkFEvO!^I|t~qQ!2i~V16c1 zAP)=MoUQF;AbHpg#%V5J+AgalqP^HAMiP)edF?0|HEK2RFLMH31>ue@k8j4g0@X&C zv*pd7j})LP18)thlsWc;@I%pBC4g!F3FA9PHDSczRb)^WENm`f_nr@uoQ>t`UCmwOqk>kW+UMvww< zlb=t^a`QW}E&mr=UmXx-*Su{ah=7U;ETtly3QLD5ptSS?5=+Ap(hVvCl1g_s?9xla zDob}R4bt5m3*U`C@AEwG`~CO_SN4u`=9-yn&YUyD$Gw)c1!I3U(3>k5kVwvUw8t|l z7=DvT zI&9GfBFtq|M_0*VE5g^*SbZH;64hjkgxYtMrsqx!Nd_w;bgLkYvX3W9Fb)<*qhoRc=n&0WvdLD!9fjjF2$+$dX?{YNn3V9FzMDMr-LKAj|n4 zyZl3}nF=A{@X8`;=j+xtzNYZXl$sy11564Ms{Oi~`*XZW_sOds!iKZ0*IGahLi}2Q zo0)-d!H#m{Ys@v<0phQahblNv{4uJ7NYytW$m=GnbO>q}9H@)zDe*zB5Pzed_z=$= z^CZLRtAe_4i}9nsa?N!f?)I2M0r@1tvtdWQ3)QqG(Kevs%-A~29P{C$1F?F2?V!{) z)8xidd0Z6!#o=;Gj%=zXz~QW?OZ{!`N=@UJ^FJI-wepX6)Ci(l=GR@#)IcL5=7oye z1^XP`B5aB1kv`>)=PXH8pLQaG?UCZX^EC@(6tkr<1C)tcI=mqoXhQpTP+3kS7}z6p zkRgF7)D{?2he4Kd_xkWDd616qJVELjj8-B4ORQ2@S0{-xgvdqcsiQn2cdn7SdWqp+ zq6ac71V6JTx9fRvWcPl(=9A)^`&B`JT`5wNzyN1fgs4@_J_?70L19Ccs<{BTp{)11 zHE4$4?F-pRP$jw4x;2YYWz6`Qs6<4hqoy%WqY{Hd2nLIN3}M4X%H=+QO|29X&~o=o zhOACv$*lbwzygtKp=8vK-Q)zEITyHB}+iM4)fG;zw2u$q8C3kcu^5*DzwoZ5OvC)`-m^X zYrV@Xaa%>N*MfEGov9i&?Tzt{mhsmVKrcrr6o`L#?!K=YPA$pG?SfBB4upWs`*Nt& z(z9t13m$YW^HLJG-uNKlE0s6v^)^IGTI%*&?P4nMI|lVtI@v0i77CG=LP@~AaAPaV zq;qxzpFclBBRTE&6YVvEOH@=`yWT5lDu6}ydSe8oUFZC*N!giyKt=8Ox^sKR7Ob@FV=cKD zTU}XJ3FOJIXr|*_?Zw(eispE?W*{?eTxPN#WemI!Dyxm}HE>V{xme~Jv73Qh*)%T& z_Fy~0P_ji`Pwkfpt!J0i(a(=Hwhw;^ZVsmd@BOvROrjJBJvw?4$8ykIKbpboQChUx{JB$wT-q>H-0L8z$Zpzor4oA6?$^XCjxi_B&nX4-(s5x(j zUDzOFk24+?Q&5{EPI^V=@BHHmAYJNf&26PIA23dx4EG#yF<}`X zgPSWfBkD3+>h}2RiAj;!RRJ@8a?DmKOo&<3N72SbFPmwI_`+28zU(ZFuigwNFS1u~ zxT@P}gEm`rd8=_nY|#X#a<{bmu_mU_P()T8ZB|VK8}6IBCs5*JXa^e(KP@PL6#%C( z%O?t=4*YVA31Isn8SV}vNtLc{8|vuS6R;Nv#;uPf@+PR~I0OFP&2Y5eT+juo^Kc13 zVoNM2QSApy?-uFe8jm&L(+%Ch`+_G!K)^9xDoC?lIB?|$hh>nGb>+9e*C&7ke5^)2d$?f&- zV(_@EZ#rx>UQrU`Q4tiMW;#;FlNT4|ksD?#o!rGBm>kT(36a^(4}3(}Pvx;;NQD zmOkM_tHloj9za#hpMT)w*GhGYWf|#^#%tC-m(%zD$8;y}#(9aKCQTIiJA0PurFGw` zWt}I%?%X4YmmbQ4^Pl9Ervs^6(vcAnmmo&i@T4uMnU=CwKf}B{x^))oEUnh!Tuqce z&#E3`E1F2^@gGO6Pu#fERdw{SR41%5xf0kvF)R|X-AOwebU?bQ-@_k_ea)Jm z`}*vI0g&>vb;Cw6NC4+EPPTUQpEhTxN5?{#Enm*2BN*P95lju55sWpJBP$Z~ld-_| zV#0rKFIKYj48nYn&@&ntN-LJ<@2P6nc=rnVv3sNNpgTJkGT%-Zpodt0I;5?WdTF9*CSTjQPlKCX z=Fo04c_oQ4l89-r&3Qo$nrh54oAJj*#ZS%4+7RAEk`yPJ$^k%U@rd!yJU>Cbvj_dT^D z*hyR;iY%&~7KCzY&r^d66mvN>%|7DvEdv~zL_2rhe-)%d1E+;yKD?AwX8ztr%N{Qc zR(Zh|kxO`xA*EUX0M7nPa;{Ob<63J(E#Eftk)zVQVoJHj0i!*YrOBZFB4ew&qiZpR znpBLq1gp9DAbk4MMC-vhw@R_kyxKh})@^TOxwrJN_6poNcV!I&N<6VgwSXSNM~ z!lp9iIo+EPgQaANO@2w!-cnZ15r?&XP*agUPO?To)q+}KB;NHwx#OEF>gcX&?b@&U zDkCe8cEz6VE|ovT zz)Ce4Unh-Cy}A9!NUtYWUrY|6GFtA=dH#*wk=&s@2if<_Mb%0JN$RjSW>Lnqcxl7K z9n8;GC(NWjz##0BA*y{mPhIP1B6C4Noxyg;Rsq3nR4v#>9G5e$tJQB9bpa_rjapVU zAH7)G*vi}WCI%*;KrX_957;fb4eVxPV@ za-!ZdoVZ~R5b@~jry0c%CRkor93cPjo8BhH94Dow@MMwt&0$YW7O6Wh%h0&Z6AJj)0%8@fpKl-GU48$ zvFDjeS!%$k;G&Y{z?>?0?(3Y@-1K1ogYdqM;JT2>N9CYoYpIaCX-~8A%qDBo@+||6 z9g||Ou2d~5DbJrBVvu%0u)(|t{Tla__rl%867#B!Q!%7@S{CGQgy@0%46t06t6OcU zrGUT`7VnoX(lNSf;6OOmA1-|`u~c1?oKMhR4zoBZ_da`soAcV7NZ z;^MUa5jnP=7Wil$qm zU?0v?otKweoj0N-k2=kv+g9{1yLL`VpZli7XTtftG&jD`k$Z-3+=gWBF`jV9=OWi` zqlfwhH>uyFJhGVV-EXTY4Fq-*$A#yBJEo-H8LOjh`%)hnpDs6(R1Ocq* z3&n$3=F7r(26!IE4w&bfMwBgN_J!6eq>NgZd~bL~I7G~Mb1&EKOhBXfn+{f^GE(GP ze=&kGsyk`QT;bb6iV=bI=3{IkOy!&>zq8DN8Rn!abwYdM8cb-!b{A}BHP~J6!qxry zU80ls%b#2%E`u@iSsHbBiTTdomH4zC6@&Pls-Gr#ib2I?3DoL_wFzPi`RW>7z0Rj=7O>#h&>t0b`N5r^@%)tDn> zYpgbrK~hVFO0ePhntG{nC&TquMpzs29V9l;aBV}Mg36UJvQ|Nd_qICvJ$9kQ=XIGP zD*14Ah07vxX`msLX*=^xP=;GqP90$vNmss(a2@NO^0`OlcN~I#!^p29&F5H$EZv`O zj{G{VQ|39w7KK@M;IL2Rp%z8^3h4SW@%2#K?_=3^1SBuCT8S6iwLg;bvwW+L1j~G7 zN(o*`+kiddKD9Z=^f^?GQEJXjKKIv2A+_rnq~GgsfbCkC(yZ4~Jz3T(wD4k9FWoRC zV1I8(oRk#hp7Cgz_VMhwX+~C+2Wn_sf}nV_VaebH6^nL7V2zoIIA6!^OFPM(ZOiAL zN=p$X)m8?bH@Uv<9%jm8^2m}Ej=j~WL{5s1@xKP9YC*TPu$h zsZ;NwPK!5yU=3899fvu!ht(G$y{YX(70M zmCm#Zt{r8zoa{1SnK7~bR9Z1_ZlfI_DYvXLS~+j^*~irI-SR%n{Kd*N8wzIO8r?KJ zCozOS0_|QbsT8-jYC^{Ktn_nYLnuQ$w^nsrse8q6my?L9MHJO4j^slrhX$bH5%s8$ zDJ!aO<6=t^iKrf$6br9D*Yl=gA3TM=?#CV|(}_HaJ703}rE{wqJ>O^zcEH3onH|Q% znkR=3CZvC|PCUKP7Sp0jeS1PbGNl@we?ApW8Xt1tcXY*j+|1s?niB>&AzGNZxXL#c zRWp?_rHM;wPr!UQXiN}xL7rF8YenH|1qqvnJ5L>@dg-9t>^7fc!Crk}L4{Ugdik~G zRNZ5t`{qU3=f~e%52*r9(;Nu!<(KVGf@WuPUrmc-3D$9#d)di$xpME5JaJfLId*&~ zb)&3y@dnszAY0*ETx@(!lYF>n5D2`&qpYS?CGi#D_Y|<;$>YTz?VgiNI_QaKq0Xt> z&((iXmv>o4(-UP1C0V-(W$Wh)-gsTNGZhE8TMyFl*|Go@O&xFqxOLS3OgYNE>Kyk}b8 zAwmP;>CRxjx{Ph>Tmf-o2tunJwT9CtW@f$>oJ%xq#!n;@(wQIab&0h6!WKw{F260? z$5>rZ9PKw9+;ArIcxI|O?^s_;HF?pb;i0G2>i2pv*3$3Wyhpzu*MUOy1*z}#%{iC& z=ZRnn2AA$ccuHgZ#m`?e~N^I_m<@@uh0=3e6 zx2WyS<%v&Gd#9NN8IufEHnen#?ngG8g1kqteq?6bu3BEc zy`#T+q!&B2%0t}q&nJ+#l$xT|jNYuHb562VffTEf-|xq-izF$gbb~M~X`3Lr^aPY- z`T5#V+)#_Ob4iL;1$;csRjs76_=)haW93P}^_zL_pH;0;PxcQE&WHPZdX(*RS{)Fx z3*Y*}rcB3}1&_xa1~rz~=t4V044q+qF{{H>sp;~YJHl@aE^j z4syIZNw_}2Q*CMf6}$p6+Q>^9__7^1Xusq3EI%t}HiRQBB#Ytn$j};VhT(Xkw~Dud z2$L0DsXn$p?6lmTGBTER`E;aDk^zf;;c1modbU}sMH%HSS%OHBuKKu`mLIFUP;2Gb z6`Gnfk88eRog#5*Jj%=Is$O2ZG%9ausBXe|xf!t)CO0lzF8aX%89yt&m3ewxoLdc4 zKY$Ml=p;glwrH$XyfA{x!Dyh|Re{5=@=YREBKr)Rb@y(T+A$Hdsg10Kcmz@{__kMx}Eu2tq#RLQhkkx`rPe!KrV>A z!+ap2dCA$y*Rkb;I{ILtA3A8KS<@=OHR%I~dP*D44}GKVCART4MorczCL}DGy;x8# z`l(bw6IUy(B7BR=ZNMPpMAQD89&9?p@~5JKLvnfbtk|@RR%)KtxP&kag7IFM6)Gi0 zQxdke=9pAZbhEHO6+Euq^|sSO*Xr4=)Q2jq z7vsO)92+U6)p9fHaAj^Z(EcQslN6%kMMDr$aKlOKZC6B-X}}5d_Kf-HygTtBRRx=X zVc&IA=1w(%Q)PR=&ESUXHq-da`JHZQ*@v8~+X%pTf6_Cd3iGpeTQ}=nzi+rX%2L>V zsXnO?KKp#|ggb)cjaq7~Tk2GPJE$}zQl1twF@>79>vG7(&{ z=DjtC;cN3+Y&}?c%}`~V-7=?W#={kJtI;2{ttXxk^+CE3Pdk;vQ7bA)97268t{oJ~ zMPq+~q&pxumpap#(7o`qZ_tk0%lB+OzxFz-4qsXd*-V;psNI}Xdvb9d;5)=YQE{>2 z=@)_$r;k_a=j8bttb~_d6Q522Hvb{uVf(j#t<5)kv*QeBwG@9yk%#1%5tOXnTYS7M zAkzD|P9_+QYpAVM?L!*`L8auTg$bGC%Ac?$vGr2c0-iJzHoWBg@m0#ZCC$vwG<@?YG$KM#Uvq-bw>T8HL}dboDqpI zc3(%=iEHZUv|K2e0@=-FSU{ic?a;nr>=8vP55hL`jiI@$PBo3YeVy~wBp1F>P=rO- zUxkGc2rSAAo4l2pb7ubtH{tLI?_ zV`KRTp?hDqN=XO_uX>+v zlby@xa2l>?fcR1mz|!rv*zvg8r+3FQm@R~1?S3B$S%I6RNd##uHKyIEO{6_*uur}7 z`^G6C3{&s@RwRt_aqCB0shFm?cjEL{|v4CErBw6 z6SarPa{mRxeiJ8qq0vT;U~7-Nv!uP~&y!8VY9aj&64Y!^Nj9bfVq5}^vQT(o(X$pMDo4>CRLGSSZ?Ro1oSL%R{*666a z`)hGHMS2vQ+x}_i>?*iDORYc8QMk%+_gCZ@!)~aqt(;;V(qAG2<>sb`OrS8njMVC| zO!1OcxiXGazy}vZJqSme(88Q#QPo5HAlhXVM7Np$(rf(oG^`IPs8>)&ph1j5iy)hrS(a**=uqzqJ-(;TSxj4Ez0rP7r(PP-XTj_RB{E z69}pGzx;*ey;5_d6{NbT?h$OXwI{QjEC>QN**?AOb-i?pe?9T{H;aQwT*4Wo5PRbaf*L65yYw8= zZ0~^AIg!O+0tmje42xay`83(R|JZD((1>keaTPk4Smr~vNp>0T{uTA=E5)qo7BLR{ zE?4e`*RdzAUYeh51~_yJr)R7hEn4P-AZrq#|IPi4As3sxJC^zS?Zu$HY)T`#+_RZso&#SB| zNgvleACw)5bspcYvEtY;g+*^C{$~y^hmUA8Cdo~x&u3Ui?MCA?9Dj`@xWrDNQT~WW zlY`;d2KwGE@fA`Wmy02B-fxiE2b;s1F8P^HU>5g8E~MJJe}(RF*3n!}@>-0jGH+}vdh9%v|KF(nYxWQ7G~yH(;vp~id=6mK=3&db_SHrw z`NDX#w^HKG-?nVcg>_*`O zA@e=5mpZPaph)58Et8!80xO^=2ay_OFjKV<-66KSmZ>A&2Qb;Bf@*N%6#QWIlJX%P z2RR52omHjF{83~)t>_3H=X0BkUxc0=yp0Hl3sWaeI70Cb#N)uV~$NI5w-3jyu5^7)T6Qr=|`W&hgB&9x&P z89KZk*!O8Y=W}1cEkIoQZ=wn&RgcvvQ*@2}_?(x=qihd*T5O$KSK zvv1X9Um6~vIeuEs&GdamaTV6-*;KD~*2;zS4BJK8FU<{J+g_8D4*0d|9jA>r+}3kv z4He;rG0a02W9~$j(s_ela&&HK75C6Kkw@0CiDuCBSsZ2|b(_(a7$b`3D=|D!;> zP0zd1$fD#Qo8EZR`E=lCApZK>+a=MXcRl%#VfT}nK+_>#*$`TelA+fy22SKU2DugcJ*0KWM}yXzTAw{8r2 zY5=1blpeaAq*3p&~rJ8X))>Q!<@O%V{FWy^Qh&% zb#6F|UrjFK$&x1Zq`GT%tnUTb9qVnL7x%ej8WKtv7%@T&$ zQR0Ea>!qH>41D!0rOZ;Zb1x`W?yntlJL0hIk|+gU<*dBt;YR0`bRZ8fMe2r2g(j*C znmvbtbfWEjv9C#R+qc33EIoRIcR8aHoP*+Sji@We@_;$q8z_8sUx*-Ji;m~JA5Zv1 z&*tuu#myiy&7%x z#pgMVi>9PhQ_2HWGRLHQOn8)+@P7kCIqe*3ec%SQ*!&RxvqKMFH6kStRb(44X~~A^ z_OpvLSdbN&71Tp|MJGiOsPLdT0bBi>!5ksL|D({TRu`w^dcC}mK3jwGM{(Xk#rF~n zZZT&Tj!M%yYABpPbud!kLT+hl;Z+7FK8|0+_y41x)rtc9-r$W7koektlSP`MG9FLU z1zzl?etJx=D0dO51U$}mDvPY)V8KMIvwNSZDT(;fpuk8_)sIj={|lOceMk?Ek~;{W zq-NxQhZwBW&2asSSmw)8+(Je(vNYW8C#3TqVC=9H?7a)k6oBv&sB)4uYBSwlFS;*Z zHgBmzMocROPkR=)Y67M0+xcS$kfQ6GLg;QBw{x~E5J19|I0M&1?BuA#5aqk;_YyQ zZa-b#6I6=(WWpm08KUV(e=bF-lIx$^gG|^iY1{x2)#}S#=<*gw5NW zgd)t0TedM=&D*v~+$j)y-PBh5)Za@jA2y7KF8iRy-*sPmC1k!dm{4ZMIuEex} zAF$};W|=(7-jVmY&$uBknguQ`_@G;BJsqzEul{6BmkxjUS)wj8R<-YI@+O|D)qra~ zqfiOW@MXnW(G~BYAsaL(dQz(gqx| zhI+1bsgpv>17qdGn4bXgJtE-#U)aQGtdTrY@;M%VIqfEIM%ZUW$fW*dMlwy5)$Vp4 zO4>f2QKnme-c|$o2GXfLU~~f}fEbu$^M^>I1|Q0mOdyQK6eq**e}@u6gsV?Z>mPvC z>p$c_&#tVL5tv%$444#c56QW$)tGP$UpYy91kT;YGRBHyUq{84&*=KMl4hKcwLf-W6?nUGYjfNd_T%ycA1n7 z4)usakB7~>%IOZ;VT%I20vP##P>>agX7f_qf|` zs-7aA8CKQWncZ+rW!pARko$I2vUKky04}?;>^E*u{{aD(3iKJ50f#1DTes!fIK8_) z6V}ovfjLG{kIoOWPKhzP8?cT(*@Jq>8An_QK4$taGt`kY{6fY)QdlRNDT#@}^rOO~ zCn`njZ{v(ic+{giA0%?Y>U2!TZ-wC5F5=Ti&WVA5s&}NUf;0F^*#c*oPn76mUbb>c z(e+D~e6i9L--JXPM|{Oo59;@ABpWv|G9Fw~`n1#f8%09@iz3&2)l=`AMv_=zbKqcG z?7N*C5gt&lGtmsAdhdC*XS7rA2!z5t$C(Uo%O`?GoHnkVvyfaq{h{CzzLG=}rO^{& z<{0gaJ9_q`5es_O{Ez5OG94jJCLYGH^O8@)yk|yLK8&Ac97q3~Sd4Na``BeTy&FBK z8Fe6QBt`;W!?B4xOgA-|vGor|+2PN~+ZHo+R0d2lKLjpcw>5y$+*Qh~R(e-m*B1L*{ z0+0Gw{hd+e`B&_#*||GH~v~LO>Db1uTAZl0SaMMP?))>R!j8fFuI&m)0Sg zd*TLt{jJQfIwv~mv90Dn!flJ0N+%!0*tEP_yE>9gE#o&zQvjsn?pqY)Oqd$Mt52b(~QAPQTP+?3K#9#CS1*uZkAm`mv`^ z*Gm-_zAXL7=&$W)>2M@?F8I6hcj9L{>F=gdGIf0{EG`Z8Yw%JcW?~*}@3%W)0&pzp zV|P+rVfwF5qadTYsegw|zRFu)Z`#uuMlTcf<+&c$E26f(#PkvyOJ4ojLULKfbdtd! zH^X)#UHw&ZSf7B5t*`0kR-uh=O!uAC-$SDM;2Q!gA^jbNQK?lkqH&JvgOz-+v(au) z&Lno01N?qBC{ic-Y?*{9{Z5+UWtV2st0uFBfYRy6*s*0+qIA0E73C~wXx&S zbIy;c9mYOij}wtTjhm&6{{wDfg(-T_6hs(Vh;%W$D#9| zuX@;Qau85v)}qfAuO3|YG9e%qGzL7g)xSKmofF_4T^S6A6E5fRNGR;ry*EvCA+`gT zirTLRJmHGdqZiYmmjD32?h@}XLF z9OBDFipp!`ib}Z1JBw@0Tf4f4Wrsu)de^!>P#wyHDaoVQqH2~sWipPxW{?aBo`E%08coBv- zckp|rQvDO&NHrA30+3_?MifnT^>;IDQpsmP{opTLB>m1Y3xxP08z}$|**(H1p|p08 zF3QZLt6WL9!@Kcj=x+^z4!#Hg<(vO8NR)NA<%;^)*Er+Dq1#dEW^Ei}>}xzaT_le* zd`xNXkFm4Fd-nA!*!}LVO%pH*W|3>?tAV#tK5~PaQbm#bibt@$roYf=S(~-m!xL2o zs8PM+gxjk}|MH?*YDKW<6&4ajA7Fdt=#7Tl9(H*8odCu!Qf*r@+b0Wi;h~8JeUF}{ z7(xIGtKoAYcwOtVFZhqIuZ$-}V=zdd=?IIozRryHxed7mbB7(E>T&txo>E^>}wt@cNo&&h@?uYgqtsf>fcZrRZtqG&m2%xS_vV`@ik7T zj4i4#XiD|!C!vh4`(EhjBDGx_UfZvDAR|4x9#`7VW1VI7>fY~u!Me{}+%@**Y7R4aPC^+A$@a`bl8{k8_8EA{kO zcS2qs>An=E>iKrN&)CQR>oJ}Kd^)89eetkxW*q)-G5H_OFw;SlsNzx;CzGpu{iEM6;)$-ABl68_gjFwx^GYxcRDVkI9TgS7o(@=RMHa@dZcPPs6 zGgwB1=qMn#aF6ekhfzy*Ia|T=juI4>2oJFHTfs%( zwK9P_m)Ziqt5*X!>noCCqRPJ?F?rVo264?ac59>8UBZ`M{J!U=$d4{v#J||AF1+kn z{qdf6SB=4KhXulRB|d$E)d}b+^~0wNlEC}^{`=)!egu$BM?#Oq_O$+*6X2AH&qkpy z$d>><4K3t0zS6v|` zY0bO|i;E#o?CxxKh;9oR@&=S<-3m^^jYYPe>vG98;uUxUZc8Gvh*A<3< z*9r~)+>N7t3j>&3*JA5`?jA_M1swDr=puALKyTF*TDNaq)ctv9QiW%*BT59ggNKm3 z3JsinyT%WE{%0!Od67Q8A#~Gl!z0frXX?MKZVP7Xv`TU$qY0{pm}azHZ2U8;moH@{ zF^gXfjC|iaVpn2>WC0HCBKh;afn31RI137+ap)juVH~Q)NxRS$kOVU!AZn%jXGB!Z zUYNp?NN55(3k1FSA89p#7C^oLH3BLC>dkWe)ffEAl`V&O4K$T>7{{LX#jQW@R@@VF z=$Q@fs75Ci`FO*j{|u!QkZLVH%P+n5LYpR&HGDuA&!uKO|I{s~3DzTM-zZE0ul=BO zU{rcl>N9Jf#Qb+n^N)wWfIZ5R?S;a!uPn*_8V^T27;Z{&pqzQ*pBJ)G);a2gpe0z* zsbHDzw-<{Ecz-}0B39-@pS@A&5R2gyF@X#PG(gbA=Zo@xX^(iZ&jr_~Ds&AP z2!iBZ(8f0t{39OrH%K2orD$|M*qmLqz6E5&_L4p^1jxNzD#e3LShNL&1JEEUk?}cC z(+`lxpGm!xtJAF=zZjq;4Y4TNLDOf7>I1Kc{+V|c!z5mzXfRbLm5scH%EP+3)8U1_aS%Lw@jD!Mu7DWd&Az1Byrc3aR zzyD06g?U&U?{?4vNzb$Zr%7+|Ll}^Nn44pLt1Ts=7-`T~&8`EK@bpl|4bc9Mv~jnb z#27N$7Z7ZsQk%@4JR@`p6gdGUQb%tB zsg$Vke_=sX5Js@-1|0zzxkGzFkuKo_I)%aU;PWc#-_4@H1CQI0>2XWPPlBRbZc zQXMb$SDjhqHFdP+?Zdu=W%o19ms{^W zE_=k0|AP$D*LONWkGinHo$@Y4{Du-J6F{3ijaD&}@&fMRN<^6^U|XVv zTMb?H>{ZHBGQ2>`gkWku z51(M5ZnQV$RM8;^rA}z2BAJX%(ZcwmPO1I=_&Rxo?crioo@eJ&nQQFDtu@N10?6x4 zh)tGOTnTgSU$VaD_V zr9PCKW6DZ}>pS482zA@15w3{NDY;vC&|ds->v;w9G3S@%PwG<*nOkJnwL~uGbBA1u z*II$!xyZ!5s4hhCY;d(~e5FQSTWZIJ(UItm-?-}%sk3#p3zglWaGiDa>Cy6T5njNE z>=K5UPh{DxGiolTW@4X4JsSRrg_Gv-7s1;4=UcmBJ`0+r?IJUdKt?A~;b3N_PHqFT6vE`#qE6?E?cOpcq1pQc%4<~D7sc7_?q*;~s1XqApdrxJB zl`MkN?z{U7R8?<2OFYV={6&7v%eb<21)PVfR!7lOm}Dois-A4#`EGCNlJLnz@pjdo zQ?}C&_M2f-N2j4tR);_J+vr}=bhgPB`Q;p*Bo*Ud?5j2_EA^1*Oc>mvzBstxTW`bx zNQs0MLEcScg4J{pREKmHw1EO^8n~aRo?@Qg#k(9N-(%WsqH5A1jA?nIv3!X$ZPL`U8t}1ZS!zpFc0s+3~~zT z5^CA(SFzd^&T5N5-?;@E9MX}i1D%4UPm=0D4-U`L#%1jY1P-Z>=jPNKX;F}=x992k z<$%+hsiQngj}Lx8%_q9a%lWpreHmSlUz-cxRb6zi`Y?C)%e z*F5(gfDm`IQP$P9)ln38CPai3ahUViLa%>Z7Wc5adPVnv0vtM90W^^)Tp|41 zZ+(2;L0%5{5RTtgHWhqEUr^>_yIa;963Z;R7D^9fnxAF9?1-q=xp0wQ@n99$k12+9 zHC;njm#m5GFlresp3~UPg~@k+7x7$jwb?J2`d;sF45I8lF`)#CQJ(4*liG@2JTo6I zeyu+Jc`idr4c=!{ZL>~i2;EsS>3=S6OyDclaK5!me6rXqaKIR#5Dqp**oA}FPU zb?#*z`B}JhbD%xboMl*YQ(MbbWpt)wNJ;kR z+9P0k>_XtvBF`~}g5YWjo<9@;u#H!mm_RyCUojl6MoZ%$ZbHz34a{k`96iv*Vd!JOq#o|EH@Z1%d;ZZGT|k6Gn<5U zFUY}tD)ZQLYimRVr_ooEB$-{Qo82oW@_p4a*&B4H5*LEDb&*;rgjtxam=%5p=U(eb zs_?GcIu%^fm1=!6cP3nhll@j%E$p@ux14B6oW#3u45yQOOdL9|T;ZrP0TOA74g!`C z>=h2+kSpcC=(E%9W>)mLI)xR1>k`OL+mfLB6DR0L%|esBS)?^SFl+zot$k%bEQAo@h$%fGiussC?P0&FaU_jFroy zQ_n6!w6DrQCBa$wk=u5cIog$hJ_xJ1swU{?(>ClWJ!;-Sd{S+6Y()(-MRda3)=yYk~q+hSV+l z?w^S45(R0EJ#gqWy!$$)(5*Kvq|ApoH(UO27Hxcn_`NJFz)WQ=d0juxq%r7?7QOeems+Ak!D62Oko^0&~@ zG}E!DKDwmZTTB22t|SF|lT`m>aDQl$rGiUv$g;yUL zt#?f}qt*cn;~prcei$~B3SPt1(Xw_W(->w6YsR4xFbYMlZf&Q`X5okvJCAng7F&7E z?62A}4@?h$FhUHpYfkUv^4~osSFs!|wfN?>f9YofSs^e;FuAJWOo}GD_)i#pDcvhI zBnvX;5DbS9{Tz_*;p(dcvpk@&BCtBEcw4j*SpZ3^wg;AYM6O$0E7 zYnPF+fLf|85YfyKev1cFtiW;ugIBcT&o%=~FW?=Ir5<~7vwqd2y4%7|^m{I@T+#Qx zSLCCv6AhfIhb1fN9HB1uqV54Ft!O&^OPup|h2t^=ER41m)#?-jYrbLT`nMaz+j-1K ztd}6(Y)+1c=kiOJej|%!qW0S|?y45fjqZuE(_gVcno;f*8YHtnURkEH7V%g*64gAo ze+RRGPZAr~u}9Nya?>WSnBIUsf-)1@5dPzP3ovi}*2PJehr^;r6H3sF`$Eo=ZyeTy zI@tb9&pk7gdFH7@-YgZPTsv@=dv{;q+?w~5Kl-qbGq3Ga0V5!8`T5c+x3aGxn3^ds zeYRHKBh6yr0CO3lbCi8YOru(2?#JS3SP`s(e5X`l(fsun>M_&Np2q1RjV<+Om-(m5 zA6=GTnP8UVdO%C2rV-E_L#Jnm|Hs~21;p_@ZKK2m7J`Lj@!;-m2^u_s;4JQoyF<|6 z1a}RP;BJdA?z*_UyW1J^d;ju&-?=?^XRac>Gu_oSB~Lw5tu3fOCel{0?Q{UG8LK=W zEGd;wb)(R94;h?rHUzv>ZQby#D#V42l{z#yJaLUq`M2KY8um_xB=z^2u$21-cbjx$ z#m`@iY)`YEb36WNh@XES1Nhu-R1{{bS|Gb}JlUyWmZ{8}(a)v2NZfbcxB#N=q*BsI zQ`?7t;oi1*J+@h;jEr!S4jNbNCi@DXctBG5+NRQ?yi=yHQ~2dyGmYu4urgGj(iW1mo75Ut}w>{ z0d_Z>KpeO{90>qK@*o{|zJKI_v~U`n!N#j?Td?^45izK*3JV^ZvudyO<#SXiFISGj z)c04i8aI-MqOUxl*X9ZHrnM{O@y+?bp?Vk(F+2joCfpT7G=uPCEaq{&xG+A;c0ztT zw~PD5Tz}K$D5Cx#8HOm{?VE0;lZsH%KFDf3N&OMh*IEE|)*|iOI^V zK9*Rh*zZ+G_rRvTT(kXU!tkM)#s4hf8d3Zum;uT;Iz9^HjOXxxEY80-w)k{itf^J>^2w4p)jqQ&o`y(VzWFEnk37)^sr1j(HrpAjBe|! zmvK`({&d0X&SBfy${@Fw6*Qu7_48Ga;>yMzCxxp!phaZ&-_1{e4C3dpZ%;MVgWARz z6v(xgjhp`RE>wd@LgqNSZ|`z!<5En9XJ&$U82_#() z;kjxje*Ocvpk=xUH1(;5=dE%yFco4Q^cc_Pi{7cLhWyqE^jd_BxE& zX?>9YRlOAP`Q5GDDg`XPM7PUm6m(@cGkx7_v{Y1E`*I~E zg34C^jeY*+C}WrYd69vj@p^$%R`$`|=?HXQ-{}|l@)>fKir=cv&KnjGLK^wkyJH@b0D{Po?;bq$2)oQop)@@W3vKoCvogQ`_|1Cd3kZku%+u0B? zd+Ri7(1(mDY9*GJ;ve~TQuX++y6?APL*tqU!HNT~d;#`6@NbNY2jOh_m!u1WV0Nvj z0`0kJd~$eT^dNB3{Vsx~(wa_D&Bm>xNvI%seEQ2mM^PW27O02Q1v}rafbNQYZk&$H zzzM`nQ`tC|>w(@gz%hEX`NqvIK4EW}szIevW|mp@dV4?#%Z#cxzqi7amYQ2bsc1e@ zfO|l>oQY)SE{V$_U(Sa&CSgr<&k{pXrw8{8{h2<12Xh zFCZAelW45q3C33-W%yP5;!wF?;@j54xx%Fb?w1Kf99-4=gSh^7*?Leb#>U_unu;Pl z@&;WvyU&QP^s!uTGFz79moXyYE#5o04Zu4`KL5r>DxBhR&*6`|rb(84|zT$!BQ-$AF+F=IF z65CdX3NO3Nw9XDhl)jSUJD9goU+9gM6q>JheOt|5NZ#FfvS~(*S_UJKmlDT;9SHUD z!>R?cUM7Kl66347|7k-g_Mr>1moP{&Hp_>ecfzjfMocVk zV3+@w@TS0X1Q;!_TC6s(d#sH=w&n%o8o{XT^pU7hWWCx%;`PM}YcZr*Fqtexlc}(+$hW zu8`C1}x)Q&!gcbhvZ!=uLE(WtU)2_%0@Dg|0h)oHNkAt)eR!JAta{5k9tVTuDGB38!r@>$t_HF~KS`omVYk*VC$M+eKCEmC#5h7`2?T zhmFKhgPQ4+{o_i%T5BkTZkq5aqtUfSQZ#3~Nbn%V-mXOHNr&8K^|m6dQ=PwBTt}8Y zXn@{aFZEzsL?+V18<`E#9>Nn%GMIcDP-LX5III@A9++xA4o=ZBK+BDZ^v~O^$vYp& z(x2qMwEF2^;18dmb6y$z!0R$MIW{7mI6~;aX8(s{wnCNl9KRd)^@GT?qz-^~H=9p~ zC%|`@e^Fhns1hhha-9)pF1dVFLcTmp^UT!8OZIc2f}6F*_*x#n#RLJl%%aM8`nOebc`0Jd zh(QH(c*6LZ<*CBU($V6WJdE=lpv&QKwpU$YOqris>DGj)e+(bJ>&P2>-}1Zd+S;?=$tpj#7h~{_ffuQ#N;n2SeWi2m4~&(Z+)WO4e#a#R zin3P;gSOwi8&21kD9aV&PcO@+^vhO~UD3(xDsdq9cJdT*`IoK%aP_^2hv3V9*Z?$+ zbHHa3?}DE}Co~2bgS3x}fMi5KC=?g4U96XF>wB=_qAtmLxitIe_9&*1-vfCzW;V2B|=G&z4_V{Yv!cM5A z4k7M^A<`3HAm3v1l~cndnZiqE=Y{|$&nF;R+#X43o%&xVF?VQpP>O+2+z3|*VQDFm!^KDK=&$nyww<@*7{ zht#PX9Z%55!MYzsSF68Y`%9HXmAO@$4CDiC)Vk#0_L)2y$}I}e3oCD5KCtw&ik;&@ zLrLALtdLQyBPKlU^FfJxtr6;}P)q6Cl7ti}0!*;=uXhRQA6;l{0^6e|k{r@IC~&_x zP_qr41Kan*QxlLeCfP*`&@QTP2V=ZuTw^kH>9!sRiy3r)$ziSJDCN^ftWcWHS-E!g zH>PQ>T<2-zQiq3OuY<%x#ic?d#ihmjK2MMIUO4bw$06q%mpy)2v~OQNu1XVlWV76I zD&2}T?^;=Jn|Aj3=h@-g6_t3qN-Q4GE`Cei;B?CxEs$KV^KO@{K?@^tD*RrAisMw< zbYcZ%Go8igEWwOH<0S}7Z@4d+dWD~zY{S{0vt!mrn?=qu8h<5^@~F7W-G=R>^wqM; zc&^XEwZ#?f+`1sNOVHm?hmrhU0vm18J`;dS;QIWnO9wLnv}ReL^KS&t4$!A|B!BVV zvVpM;+hIZo0uOa`Y&@Niku1jhM1&bN{!4TCk<^C*e5j*@dBxrm1oh8+%e(3_mexH%??&j;GLKm&H`3PdyY?wm3s;#;9? zH|05Jj$gY?>*Y5S2UO#W@@!6!-!an3j`&P}BjqVDSAi2hqwAHpGBuV-rH?IG^tYP| zngX29c=}v!$T_yPV7G^y#TWqJ5vV4l1t{J6W5g?nC$daW#uGQD|JCdEr$8-p_4wmI z+Wl?~Xt-6%!y8keJ!`jatCY1IYk1hJzd>L-hG6@a$68fvmhJ|6>wWUPpkh7B@3A4G z7-4BEn(5liIzP(X7u~v(Ugr0TxTZrDI9J~JA-pE1d9L1zekNhw@AxH0oQ|h{cgGRE|7XsCJ$25`S5A353C8A~?m)4o)8(HjUEk^XG8SP3&AJFsPUXnZHYeUEx zQVDf@f)K@PQ+}8yh+_V2cmo@+j}VO`nl32vP}x986xOW_7jplXP5Y>e)7m`X}Wn5gSm&nPZy-m=7b#lGF)8JU^5O{$+FfP30y zt^78D`Mod+!#Z}!ib!3Y*&eNbjnCd=EeRIt9uBU)pcH^T&tsusud7ZmYu)3I&9JzA zOK+|?z@lV((iti7k-T?E1$KnrL9WPOMm93MmlEW+)0^fenK}xazVdX$Cb_(HD z$hhq<)iOK2996=>XmS;&zIJ*7oR`~y$E$fE_64)QyH*dn42rM~j6f{N*NMxe$ge{Y zV-800V7;nuiEJ~v3s*Hk$4bpi{RI7bDorLzPzu`=uA}w?@PY<$79ILsTj&v z6D*jnDz0CXFb(C;ZhX9y^T&;TFAjAy@{f^~@s9KiHFnxjBC-csK>d43|FvyE>ggyM zo6-7d17JWep>ojuShmPINVIvSB-IrM4>1+kbW=+p-%y<9yUM61@=^0#{^d5jIDpR#8U=m-S`tTWb2WG_5APXp z#`mrZ?7sgq3CAStR6s|q#QS3aK=hNc9ExIX!`sxRtLv927|60})~MR~$;%cW-b+$> z6p4Ku7<7DeEGLL1He48lqu%+hv?c7Yf;_uRPOk!=M? zZ@X^TT@3>GvRW|JqRv%b%X+PSDI;?o$QUccPS%o}c9uj})2R>#SRwusKu(!cucUle zbHuWa8jVLkp{gZ5${G(|9?n}p0(|ZKDtr6T-LkfeO0aneRo68)YpjaPgnK{6ABXVy zNyIv)lWXf4PO8#2AEjI5@mZ$`uR)2bRiv8;bd6t?fbwHP5~P9_a929?0__ zx1-q(1Dr*2=a0oEKXh0~Pxh~(Giz;cd~B=W`wdJ@M2nwHo#Wi(lsaTweV(a_ys$Fr z_uE`u90pTp9&mPL+N!h7X*A+3BGz@tLyE~}nXPdiM{#0yY?$pJ9Mygr6{4+Pm#W$$ ztB9rVe_*~jdxn@w;NHIEhC-l{9MBuSA_HlSNRi{ZpM7=};|R9V%9G-3Lksz@#Wb&8 z(Ko)fw*=6^qrDw3)pi@4TDKZgP@}0D?zJ5EqSNwEl)Y{rZs-KnfLf@UZSr{N!(0E` z9}(s7e@z z%q4ll?unfJ+Z86pk8U53bN9Ee4B1X9VxwK~ ze=0b<;s@ZOu+_Mbd~uHaMgQ5p>(Y#1#ibwUz?5B$UO^`Ow+SuJKg?Kk28qv&7lv*C z+Twrof3M}4pqn)Co-ey;I1tk;D(w-N$43y0NPY7PZz^U`Ec29g+~ZffhSaB?UYSE7 zU)e1l7A1TW^EQ^lYVpaybJZJc^{OSH5vPDhLkD@RS!zeBo1+WG-NARgV$n) z%EbdSbJ%`r3zh=Sv{*HRaK*-&P++wHn8DQGa1O^07noX)gTaMWOde#$85_kC4t~2c zq_%@9P&98cYRAt+X!6MC^YvYw2;Td_`aRNANB zQ6d?f&&C@p6!a$Ur$cFaiA&n~<~DHp6!)}R0htpMd8rHwjHM1x+Gz{Qhv>qS>Cr!l z*v(_0#qh-*W6kxE=STOx%z@Ulg^S?wotfU~8sk4*&E)2f9GFDfeyMg&F_+O%1$=El zb(L+23NOa8T3r33eW(7kjEa<&or}EPk&OTDD-+dwV;UPeu9|q2nCb~i&Db4Cq^6#U zM9hU_Y)*Uc8asD3agjm($wsG51cn8>>sI?SIA=LZT%7YxgULlDzVsMF8vW)b2iC(R zn~a#d0fOPbh?KA9z;9<44-G+$JiHE$)m#`zo-OvHxo>x9h3>t^A1yv<@Gf_@d=wQYC-MRUxoRX-RlW{&H zQWO2x8l~r>2rVAool1QZaNu90Va{9&Bq?=%PsGV;U@mo2_Y7GV^KBxg{G)fME0iBP zFt{>0twciU@4|rzTOd6k+%t@1-PWbIg$Y#o%XaJ0aH<2}hdVJgc(T#PwP=5}s;hY6 z`GUCP9f}T;C(h%Cnoq+)d~$+{vVAW?eSUb(IxjD)qN1{JPuc2|Dk(kRgCISvIvuP0 z4Bg)RIeFwe|`& zJUp-vAI!*x8BhCQjYv!hD9AV$|I7>(zyc@+<^LGe2eB>396}#z>2k(z*XntZcte*^ zGacR`E%5z^4`kH?6moN}JwDoyE+BjV^r?lfObG1(;*0#x|NS_}a=*>7b)YxJ zK_xyC$;Wc8_v4vQkR1QvSw2i!YlfMiV6lejaIJL|s6Eb=J<)m^hfnUx^(j>7ZUK-6 z`t$#mMkX7tMYG^9anG+l1`^@R;R2k141E4mR0m{W1OkfD>=fBYe!vr-RQPpSbrL24 zTiEh{&P$}p5svDK?oHrx)BFkvi3BKIKg!U!^r<7oyX9LCr|qWZdOLhXtOB#qwoqQO3uHmi9x#xZXk93@(OGK zOOaFYYuo%vh5B!p=$X^~Pua`F_SX*(zsBSLwWKp7px_B=xPlX$d%T~i&ic_98_W2% zV9^YQ=B{|Wnro+a@t+?qTKQOfa|YO3;TVMc-(lE}n0IB7K1zXFsHf}Z&zjzi)X^@+ zy^0Nsry4S_#-fvEw2}3^`YE}1gwM-anQds z99?r6b()AAghptwe-{ue0B`{G3)DO2hj8BoAr933CEy8I+?$l>`n_cZQN1FKDq23i z53InHgM>~0aA}@LH$Cbh-9wbXlZ=Ayy3o4@G@>ITqTVcQFC7sUb@vUFE6ZW%O3Pmq z{FX|Hj3~{uJT4rD`1b^%#qLg(F>r8DxSNOn?9nYQFfEP7Zc%(}aL_YN!hP*?Au;A= zXQ{yHus!X!f0I8dyrwPln!`{}6o|VFEvKI2JO~1{*$2V3>LGj1Dt-C_u0<=G! zp|A8ZbWS=lgZ8Aax_8Hj$wH^L=75>YKticU)?l#KJGOz_<1oz$qw(NfGX?uOmJ`Y7 zO8>Gsm`6&20k>OVu4<_SFhlQ~24s*M4ybY0Tt6f;xWt zmr}amM0hWR%j;ju!Togt>&XzsvVwamw>BAzI?`4ewc>{@N%XSCkcpp(I!B9@Y&2xB z3$JKRjg^%l2vzzNPRn5>J%pZ)Ug|41H$GEg{$koF9Hj45veh{g)Cg%q1)W$LIM38fTfG5jZnCF;y6>SJVY_ zEzs|n6GA`6pdW>7@44Al??hQ|m!c^tLGmqb&-`I-Pv%c~FP5%Cl0bxIG{M2r9~=bS z?lAta2&hV$lupd1rmA17C7O$Oi$RY}a1$C~_dTz`>v?oj3y`42Lk!Np%XX3@ z)+};7+^bFM4_xX7N>pS=E3CU-vqC*~?F+hq|6*kS&bIRiPYtd( zjkA)S8+6+J`3^B#9##!-1vl^qzKstj+VAj4iJ5!ckHq#>HEh<&-aptY;Dp+~F1;0v z`w_duN`QBybbT)1W>!eeC&2ZB=Y zb1`&e259r;xRncBhG#1r=J}x)EB&;mRIg=!Xe2ad3T~b7YATgA)7FNZ#ZdH0Gv;{I zNix|mDCpCe_ec56ymqQ9OfR=g=1L;}ES1ViL1x#pDX)W>_ff&PZkV9cJdY7-Z|V5a z@?z&;;)id{PRmI7e)OJN^HNE~*MqfJ_ths$o4rK(!Ur4$uwni48G$TiM`9QxY5J`GJn z!s~oOg?nX5Ny1X<$sGnXBE?=jSr^u*8~)s`HdPySU8r%-^#1a&xk%#Jw4AGE7nbsg z{F9d|oUaiDp>YfS{3{+co4$(I`HtuP{bvn#hN-tj;g4TGrp?jEX;{=Ur(GwILF=UE zA){-W2tuu5GuCjos<7R@w7JdzrM&UwI!u&sz(QW3kCIvuUvuRy)rGuvx|BNH2Fh|W z5jYujuE*3|AQDPC7egm1C;S0L?EPMj7gn;_7LGT$5FUxVPN-WT|GLv#NN$nmK+*;_ z%pR})qXy2A;QoZ(t9T~;mK(ca)Rio8{7DQUXMBCvk)C`*WRfWFNeX8LC-|qJ2rTpz z6>?r(p5YUk;0aQ^ft=SPQA!jIzt0 z;G4&FY&*F4jjFDw1CX~V@X{kM(A!zx7C}WkQkMuWk1Lpb%knrrEqxKXCe>#Aw^)?! zsp1@4G3|a+NL*L`iMspDr9E)+5$4(&7M9I+)Js_^qN1JVUdr1@+C_@Jff(A6UtDXT ztd>XiL}HywmUj1Tr~8)^rU}g7@iKh%TkhX15=WUS|6G6;3!vokak~wxOYRXY9`M`A zTojd>lnlEO>h)uW_9_HCpRbl;Tt`OZWthECb>$!W<2&3S{dxccbyTYE9B>TElMa9|N8lIuO`6k zr0sx1ko9IxEVVRbcXmCym8)(FXt1Sge5o;fF?Px$C+W6)*w61`wMlQUIkkH=;r2%a z9VEH@XX2+VQ3#={Yk=9xWgO}?6He$yRum-U=NXH>`q@UDoEgHMxld<7o5NB~(Ojz* zl4bf3Q^eFa@>3f^&~@d0MB1;G=Z4gMTf2Yy^|$Juo7h$Kmr18fwR)?84dPCLdiPvwcR9~>73#XgA=32 z-xpWr{<(LRG;_~qbWkP$`LFJy4|6jc<=}$pDnJEm_J=*}W&H|xT%28$C|bLSq@~QW z&_aT;NfU18gnrg473$0^3JI8Obyy(b@f)k?mN1$L`_%qzI zzBlZvCKQa>t!??8ksrSJN(ah1NqKITUsZlGu@PdhW1>2nvuePSum zsjLS-TU%E7a(6WfYLpQ{Tq{~r8-xApAMbU}n*=|R4iW$y565D*G=B?2!F(VA;-^YcJ z-x$=V75Guiut6W|^+(gR zZMZBM%W`s(ty=&j=1n{J?SP>1dG=>D{A2O@bBc`C3#kpQqCOmX!H@>T`?>@N$Inq4 zwn|qR&a$!j*@mf`>YuooQZi%LlLR;XU`CFaEMRk}nNbM`8++7#)sJILky%Auh^&~Q zi=Bd7MH}`GbKV4#N@WE<0}Bapedj{mHNF@GxzxClds*O!8k~D%9aJi5f;%-ZU7M7Q zn|jMHO1}FW=j~cjL zTHYz?+J5!!GG$ZfG!Z?<8?p>wrz~p_Ia8KP7r*YpbTN=MaBM{t`ShxGv#w8lEur}R zuRb116iyzG_=a(xfD$|BKZZ?P-Oq`|dCMqf-)~*ML%(v?C>mm{QL#-qeKc+-H>BU< z$+~-f^U*ILI+ka;uP-O$l8U%F{ZaFh0BBw?vm$W6BG}lPr@?Rn}Uzn59gm|r};EyET*-(fgtJuHUP9_=xa2A_RHuZgaZxR zj3!fq=2B>Kk7Lfw4cU3?{N2QugndwnR_>eXJL)i zr?*&%J`zKgt~y{?=BP{_lin)UFvFyCGcOp_I6JkddmU`VT32t1Ha+23=gZn~yr)w6 zl|qBq$Mxy@7^cw^VN=U^uUBL6r9g z)5!5CZ?@X0L1AIZk@`11d+$td*!xh7sv%2nni4zuQk?SiPr1;cA$_`FOa>B4k>CCw zakHq(wME8qoKbWX_)1XGO^ewP2zsYImEtKu_n~+Wg%7sF^VLP3B4ND(_6ihYli$xOGRj z9CyEcZunxf^vz5#+>!~u)QFQ=T(cr4_tS5zW;{kBh$|tJJ`>)4#o5)jN#2=*9q&?r z8YR53?Nl!Y?}t-oOnkg5Ph!tMz4`6G>1z~sY1q*`zU{y}(2d1UDfZ5!P|GWrgn)0} z>f52a)iV=bJeEyZ#m(T+^bz*D)08>my?TfV?CgNhBI)Gdnb^u~%pgfVWL7A6a)^XqbS+eh@DJCawz+QCeT_D;jfSiuiq@r^*ob z4tBr1I<(J!xwHj@21i&Rv7j<k`*JTyOzCnw%)5-M4^-%l!y^Ea>_ZN|<^UA_TDn(s7c*%M|izC_zRIPUZ%c>{XV z`R*V7-RVQIpPw+Be5feK1TThxnZjcC*c1xSHJsGqGYzc+^AEP(G@dwc~Lm zEFw|)Q{^gbgCC+6vtQ_W+N(ZPY|}!^!;$0{z>2z-p^b+3Gq0@h$8o*E^4dE2F3z&# zrYFCsPYUG#k38#6bwGb=!k4@}A86XM4;S)ZLUX)acrKi8L)TUAgkK5xzmeZ$aT#8P z^;~ggs1;`XSSEHD4I!4*&Ak{+&b1aU_xfxF5^`ZvV$nP62&3Yqwq?P#Dxtk4^PW3f z;&g6W#{?Di;^a1;q_CSSYEV3=f)JnzbD7#Lh*N^esj++F+hO@Ar(P41U5>_7>M6y_ zqv)r9ZfDEZCrL*;j2vxJz7%SASE;~}iNWOK0{oYlyX`TgbHz?Qqjkb?ban2VBQwEh4anvffa!TD>7SR{~QVGmzvMoSeul%YkQRXKW=LE@#^!0xz3TN)wmpW1yLY zz{DtDPJe+=L_JQM{Y6Pl1i6{@5I=w4^VP;l#n@Q~CGn^D5Q`I+!C9TfIcG+DrK-Ns z2klG^+1e@4pMJ(p|GP?qAZ?bil_+Hs#Ti=zS!#zz7zxWi`0#^%_2Gg1w_BYuy)yM} zlGtX<>{pA2lO&ufvnIFlk!R`{V%~S9Q6|eY4rt}$SY*^(X~ArUOIS`L*U)Xa(zY+H zMBxLrg!to=Cb~smevIt|A~6w(YSQVM@l~vfh)=!i>yG=;npAgN&=OrWn!|$a?$#;_ z4|Scl1O1!?*;B0naqJ5yrnQ8m{W$UyBj#~}@=*dly1F@fs_%Be(OH7=R5a6W!7ttT z_JfCt%T|!(1$>^TvGnd|4_Ou|T;`!PXb8Ba?CzXc7VV-KHotJcYsmiW+L?9!Q}>ER zzDS5fG2W<1_VkG4>H6U%G#p>xixPd5msPBeWa0*TiV&-M!zaTx)GgBxfd(L$I-n4S zyEE#tO6e;887|DBJe~qvlbgjDA@**0MvG~t_NL@MgZ-i~{5p5LD>idpy10b%6Dk{< z=8)ynNK}DFbC~iG4<8h1bsX;Akq+n2%BF^edO$_QJr6uvI$%@Vtuk+tku$=Tt=2@o?A|7gL zc{ZO5$n(mGEG%4@U@z4?S-xOab+0LP*K}K*oJSSOr}Vh7_LA0As*>9)9f3hC=)YJ{ z^OzryH91zRRgyNAb#f)mXo4T3OK=|NC{)(80{UZrYc8b|OyZdW1~-cSIp} zYc!i}DtkUCM>82I53bTB%xnz2R;DO>C3|W5O zYIv?HmNh;%I&5B9&W=}+K&yk*F9x5nUFH&lHzYf(899nuhL%9s+Y}si^f9Yut%TtQ z$vVm0nnkJn1FnTzF04DQK}>k}s;V@Z$#k|%+?zks#+o2(mNQ4zH|lE_p_`Z8sj-Y= zoSOBATq5wZ82gTatzQz%03hrB5JA+5!Si$w>fp;5Q?uQTEV`78-u?R{rQwwGDbz}< zditG~Vbn(>Z34<_r#Ld7b}j70$gewl`Hj~m}5&z1Wo`Qvwh z1K2#U>H9BJeE1EjEWeoi@`aLjFjKKPb|X|D-VAL%ci5r)Rz8Q5k4^0=W2noEUHEyo`3j}P-RT-Q5! zg^gAY5e@VS`eMBIMveqT;OCyt9uFO~EKvbz;BUGpMM)k725j)#mb54R_D=O$1UR7U z+9HEIMjY9um?(XnFCwhHuT|qC8sIyxMpHpHMr;ror?{a?;|$$~$s;O#-$ru%ogXW#Q^vn_ASNvtXK`khQMj z<+ShVSmuc}jB2D3ZU*8(owrgfM)R4A?>qY5t0}F&77%?cQG`8@T+o)shX!ClP3jMO zN(H{#ntG@9=WMHL*|vq@vh1utK>U_ZB;JCpL3*k&)!BMwfZ|+XoDgc>czXM7{skc+ zW4!qFX~{J#_(f;*H33MhjUuv#h;H2c-m=$($F|&Q;h3g{>h5TKht7Gb*3G)94Snn;Z3rOA5Rax>t5<}JHy836)e*pExn->pz z&ICq%iK<*Ln`;W4%!?cje`73Y6|0g&vz{hCzP^f+Wii+$%H6|BnW?RLZl{1@K9qnL*r9IO$WNkd74*k1W$Q{6A@GL^>>gaapyC~QevVQXJq$KJHVwTUSIN#OBf z#^t8_7y%sWwoXekq~8~$$KF;=F5ytF@)}d$>B~nwnVK(G@-tH|wWLFr7`&-8M6{WM zE7Xlzle?#}Gq;4Gj+KBQBfHKmG5_`$o}gzi6CFk}hTbgj`mOFXpC(Oa;%Jb-5>h{avrk`hN7$q?l8$gKR;4vV4UW$g{7U+X)@nA@KQI6YG`wC4-w35O0Y$uo7V1UqDT zu)L*ulBdBZudWmq#rQ=u*4BE$it>5VIo&BCg2h=9HRWl#moS<2$Q?=6o#kq-APDFB zKCps{AOX_f(sDB|A96sizd4}t;+*Ckx9hGwRIF%k>_cT^4kZgUeV560hVfCXLi<9xS$no~xp!C^;}Zt$`PA%S=*ji;<+e~I<%?$5T|q&;hBaYJ0jB-A4PI$@x;e9-kt=ICsdYGZ2Y%kx5r0XfNU2@@7`oj-3Jp36 zlqln1qc?-1~cS`rQP}|>#)Gkti&vvhFF+dKsDn}E9Xq@hj zHdEmng&zCHl0iol8hcC~1NO&o;_<^Lh`?u3wQ9)Aad~!k)Sfgo_M@?!aQ@HTMiU)Y z@}lq+zKCy>QFEe00PEp^0Z7SZdbcWYZ8UqjsiL+hzP#FI##Slb?+gWSUTfqkiY$Xe zHJqsEBGP>6`6&5fv)!u@QMg|1&dT)ONUW+Jewvk> zyi-S7+WFJlQngoqNF$rcIn3FK#ZxJfN{ zA_Jseu;_XvYGm@eyN9luK9^R&aSAc3QA+O-b^u3nft$NIo5+Rl4_SV6bS}6MK%w`SCUoy-i z)DyR^)~CKYw;8bEqFhS^`gnYZ0))QYE(U+3<3&4j!cy^!pyvXJ`+6}lv^{Z<9wP2H zK*SyMK1188i`JMrFFw8q9QB*QB1%@;tmK`wZ8GSsVY{Nx`L&iFv`C<{!xuMXm3P z_eRPWW+xfX%WMK|#rEUCEk3{&{T!12dOJCh*zeRVo?a9Nw!U>wbttc{{F)aA{X3<8Qj5FYhjP zeOxUjcT-DG{-<~8iYxva0LM3Q!AXtA#2b4|y9^=GN-$K_s-jQS!u0$h>a07{)ioi0 zzNzbMF9p^%&FK(V2af?xnt`9uyM zUPQj&HWU<9tL{+^;ax_49W>aXH>5 zW+ojX`c$O7pC8;$S1*P`4vz4iiVEHrUowXb?xn)kgq15;G;$Aq++hg+M8P-akT1W%5Mn{++;!&lir zgzJd@fGRzHKrmOlB477~PN7TG^Nl@I-ESm8Lsl?~XjXSC9{YeuH0SyqcJK{Fn<;Q8 zc068M@vvssoFjjE%hLsz3^1o5#-CPk20?KBn*;J&jlo_W&4oYk;lzI}T|~PM5Wgn~RT)g@>CzeqaY_ z?USkCYp2IuHk|?h>FyYq+TVemCV!Z~p@EVzeZ4XgeM>CB4a1A+y?MW{aIFx^AbC0g z+WdFm$BNN+Ab-Ec4>#N%SE5pQewSi-;&1+6M`0cY82^c%1RT}H3hF|Qghbzm0RpGT zmmL^LT6e^MyCfnZeW8mMg{LnT{n9gii&6XQ)Yb9DK~ng72(R7BBs>bJwGjPh%@Uc~ zI;%7-`=>?H^FM6gt-a{;eRZ^Rh(?Okg#57W4;L^$To8jzp_mwrq%a_wGG3%cBA{^t z$@j5!B(3yMK>6|EN>j}Z^@|MvT59VDRBYm6BJ^9ye-V*9ZzMv)Ku;~#NT8KUVeQ3v z?&caqrQ%0wa?-#%W(*o$&-z)dQAtQaP|LZ``5(hrGXg-??|ztTpp}_(nsYv!$wb)l*N^ z#x_2LH!5An(Nc`BP$xocBg1SCDdhzFdeM0=)aWSo+9a;~Kd&sOKsKaN_kN)$UZL23 zp+tV6G+otS`29y07CZluCypQv&|2a@f}?jDXVd=aQycnrg*{Vn)sLpi?+d)9b^+!# zT%;anu&|v6u|tq|lT#K#_SZ&RP|`e+O;&1wl8$D;6Pd0WnPkNdTH|X}YfWQX(2A7Y zpfbEDLPK+?rhg{2FZ<6M-gx*!P1nQx!ej+5e9y19Nv_B1;IS$){Fh7u%of8WP70~p zF68vpmk0O1PDa%zgJz=PPa%GUd*0Pn7iXvU9qc)ppQ7mv{kLw!OlKfs!QtydWVgW6 zl+xx~&h#$?Kv-cT^YIXwFgFi_OHs`;#4<}vj0>;(Jlkxw;+6?Tt)ts9f!+2M14;+& z(=#s6JRW`n#CJaywO!TdZNrwU9w~a8fwX#RqQXFbD^>}hpL+vM|op~C#vch$mlmzc$xCi;HWCjtAY&rvg# z)?^n)3p3B^CvvnG3a`9t2pgMt!;#qk>jnEWEM#DF7y!~-^9WuX9qy~1TMx?viNTFW z!2;*u3w(sx-0kB;@z`HpiHu5sujhp(I(kec%QI*Q(X3OU`ut za0J##NcV+oN~U#BIIGtnud!_%!x?9N-ug~wG`mIS>3QY}I?9Ob{2GIp%D&Z9kd%4O z_;rnV*;Nw=cMfKcN?hsB{8Jc7=BXR%C9(I)?VAnRMMXL)rl{ElDd^&MJRZ^z**?)V z-+azP9JxL#E>RDK?%)NJ7#7PbenO`zPN`r zaPQv((+S)NOa)W{kmujjvY?arN@r!PLTVM2!Bq*AWsf^C=c3>?2u4fE8f0n2FxL*6zD{dl!|@y_q&r(11Kj4t{= zFUazz^Bu->5UbU1F8#*Ovy-@JLgu9G$4;A4e3DrxE*28z_JnLoDSR2<4KS}pH6T4Z zoCV?YgA|dqUS}W58^1_+obfaz&V2F`6J@SrYo{OL3&qwfV=JY z+X(BVXvgb`%xlVVY0hHP%Zzj;u^sN8YvHT2uX$L;pZN}iL{&wBEGeXJ? zQjdLVU5Rr*lW+1i@$B?BEb4=Dj86B#tXH(ZhTP0M+I8 z{%PE{YL#}=+q16;E3Ir4acSgt53<4z@h48bj<=q!DuHrY_poNh9sr0bc}4Z64h!>a zwF59YGwzW#a!MNu1}h5h^USN+N0(j)v*v zN4t(vu_Cc3JTJ3=N@V*ZkInPT5scgOloj#rn}yDc%I_VawPTKZw(FNlO>}52q<}N} z*A#wYQI=@G8%#!hZAYc?z>kGCE53`Ns57IO+&AM=PhTA%=wc@d`!f>(OR|&`BsJ=1 z6DqV4)^+4{PhoWlJq#6|lpQ(kG0cuU0k%N3E&fWKHX{D z8*qO|nXAVVqakWw8QRxFFaR=~@m42VK zYz8}y61?ThP%R<>JWZ5p{Tqfyp~2D-ZR6KP&8A_$XHQwJ7Bo*J^6Zp%K$|qz`x^`- z5T@Tjbj9=6O=g-?f&OYu*ux&+YxhCTP(^uE2rMVOEMP<@t0B1Zb0CzC{3g06pZNx; zHAg<^Jb*o$5Z&|Fltra99!AT47KBluq5aS4d0!rVOc*7rnl|fCaEh-q-tjZ{a%Wo= zh^GnkZkF!Da8YS$b@{Y!1LlWV1zT55*H4~SVwOm$M#IG9HDn~_X}wdgHd1cL;o#$7aT1X6E*)*Nd1;&u1EUlGEHPTh| zufR)ZoL2CzNieoJHXx!R#&`2;x1ABF@_2r{y#&(-( z-I=V~3m_zyK2TK)kKaiDDbJ(z}P2g8uKwN6g+C zDyC3@mEOVkyga#(y*nJVAi$g>uT@gKGxQmd)s;Q@Q?xlY5jq<47JQQZ(w2Uo=6ikXTQRDiB z@YV|ZYA=%i>wU`n=-mP_<**S1{A85J(nW_cLedl|p-=rN2 zs36Srx3J@pjdItOv2do>x+)VCb*OVBHaW_0mFa0Em2FY4jK?1ROn+({;Yje}Fto&1 z!W@Y@PgOL~_XAiUdDwj6Zb5!L)686;p+1HsK35P=d6H(p?YOa1>%_A$}AhR^qwtF4Q#t)i5f_}Qz z?-QY1LF}NPwJxKPR5#kTf&z8D=#RtD;0?p*!=L9Ejk)90Pd|oifH8JQW-OF5Ij)a z8xvRlq3onxo^@1KHs?q5Y>c0nP2UV74}4y>MyJkB^@~wt-#Qs)$#7pZ>uu?(C?GZE zM<*kXL_6sgJoj$9#^mKYx-Fa6|c+7JgZbhg--QkUypX!eJmy zR5=5P#!6g&2g8U_?6wP8G)*QiK5^n#lM?mQOG|NNi063;z6#jNNrE(Kfkk8}lY)?fIC;k*BVZ-9N|2wUur_~`1MVOMeQBG-Wm1C}Pzybre*^h1!B9Vm=8UJwW3&U$RP=IIP1d&w z^}wWu_A7wT$QtrfAO*1Un?BtrfZ9Yoqc7r&K&S9#zldadb-UIOz`cc42GoINQ|;5~ z21|x0a1_%YzClm*a+9t9+TNXTfz|)xqA?tBA!v$HxM_x7c_FV&jaKjlJj0*?$02S= z-Rq>_;L2wQ01f);i_Ffe0Y(t50j`2vvY-Q!t$5aLr!@=^Yn84i4v{qf_mL}8QGIcm zhN`~hwhEAzS*~FPuTZ_^UZu;iF_3;Ol!1>^wmrgax{7CP*%uTny~H;#cv!kowVB9e zL`VxD{)%b$&nJge}6T$_Ql}j8nfW_NoO;Lz1O2rzxHvXEMt?P@`$Q%2s&EL{S3uOGI=>?dnwCT z6Mw%>!1#C5wg7GiC97yVdQ-(xjic+P%(?b8b=leoo1#2vm;3X4tn+jI)LoXvvxHro zS&Yz-VdkDGowNn@G&ar^Bh&7Z!`-~)5`;Rd2%k=QQ!ONf=8Xv7e1 z$mOxx>fXJCwqkQH!mMujZtI15J!Hv(<_r7*Z69IV^u3d{p|pdt?peE@!Frxqw?=Wo z-OZ^ZcYzg-y-;0pP$?(pRt4_5^XOl27L8fszZX$&$mz8!im?X~CiG13M+AmpyEfEF zK7O(A705YZPTSSJE{9gVoxIfoZ+m+jo2@BqZ#I~Foe>8$4`ud#A@dymHp+2Z8#)>nXdle>#4a%xIvc(lJdkbd<;BgXhcR+rD#e=kV% z1q49E)j+OqZ-{@pM`&={H>taV z8K#U(X-{a7STl0Y>Z_L857vuiEc83F`pep*|ROuMB`y zg0@G`EmfslVR#9z5*v5;&H~y4KiT)-0aZZ0_?*~+M(%>-TwRP;lK0zjmq#l`IVzT_ z!c!jTE&x0lNy{(x*|sh86cg{M!DR;w>gGT-YE;KhV(D$(N8-=uMvP5!vU0?G4Ha!- zA{~sMtxKhJ)@ju?ytv^jmVa+F{p}lxWzrU7=BqFR3hWJC_kbDg7r8~H+BXFS>ybI0 zeAM35PHM$9^CqFFl~|Hf@j1rT4>8OkV`QNEA)VexyKREgrU8h+Hw{?Xb`1Zk6v4Cl zZs2BC8QgSy3MglpHF-r=La7nrw6j~Bk-LubN+PI{I^yj!$9iU6!h>b!GsNGPC#Cec z0w6hN9@;^iA)yQUnv2`<6x8$qaF18>U2Ww)+o7F^pkt2&_61rx%L;V>ya-C-cGT-#h#qR_mE}PH5CEpNH8(a7?DH>P_E#ehzUO_=JOhL%NwQU;vPmZ|I?Q! z=zD$!B5E%ASkvxMzzxCYz1(aLtaKd3ilHQie{VM~D*hxXmlxzk%@rf>!~igs+Opae zSQ|SWz%rJA=}xsX(W2j^0bwy$RXkN)U{wc`MohWxu{zNEdWdkw2hvY@fC~c_EGXPM z$RxJB^LXfz8rmEcQ)4Y$4;}Yw(~rNs%CFq!;G;}P$_%g&!L*2!DENy-@^5>EbwaMF)0Z%Zp)*T-y zY*(F}@F);e=FLM+;1(bayZ6l(SWO5jOvz)@G!v9jRolv1xq+B7^WL4Zn=8Fd+ACK0 z@eWJ0y3^kqPuqD{cWJ{^kYvRT+mD%qf+dK247k~CKh6E6`fS9QC) zJEFdJzK6kW4SLxx$<7O8&o*>>&Q_+2LfBvt2Z>o}v+n_oKPSGZrKI3p$&kSC%?itO zOXMruGMkhZd$J@Nv0by7b;BJeW3{wqYbp_2Wh%+ldX)aL8ZANG2G*<9zP#B_h|W8V zg*k6br!D6a)3^Skm|a`X0dch)>jxe31Uu@h>BPHKx>57_a=k0KeaZLD<+2>M@MRlm zxb7!NUSoY=wfDw_kYA#`-4YC8POQEn?~It3dDqj(I#Gx)8@-^=-o>RD8&JF8&i@iO zsLVzxX>Xk{c#_GjYl6iwu88o@haD1OqGygn_5z{ki6B0KtOD*C`@ni}ueE70T#*os zQ-bqVI2E~USnxb3EpikYrbFapRj)tVf12F?x9_0d=TGKf(v&!nZOIi4?_Uc)B9Q}i zt-xx=(XANzdy&JjX%8tmg`E79$uK?QkNUeCR*+5WZK#-GzLZC-QO87>Y_gnK9A$qp z^fQ=Z0y!t+>Gw5M(<;r))0<2crXAb6Pkz65MC7AYPzp3|FUHzfdCjz6zW|PW$Q)l!lkJe|pJ0S&fiMHCdne)yT zE2k@fk;4rGa7|mau~NtMJc$R;SnVwA$t~DAWAxZ_;cD))(=$zZ@I<1_rHtt~v;12L zIlLaadRmZQrlFjM?h&Hqwyt533xqW_;v8R8%08XMpJ=>s->?F++i$XNQqkqgA*sq! z@stj`AYTO-QNT zN28w6LqApH9uD$5wcYm|2~iZ(Ac^0fUe3U`EG--LDyixO-IEt{iCNs@; z;GcZ4x9uJgoHTHJnELT4B!y?9!&VakqM-V$j`_n}Gu%GbHL`$ezb8O=b&tPOn`pOW z(wg2e?>CZ@Y!4KjRWF~CQgHaGn~djk8-78J_(5ugt4Li| zie82BPjY3DR;7Kt4E-rsT<3r3Ho$sUP0jEn>rI`F1H`QH%}BT6RAZrppP8$PC0PK? zY`&w;zG+fC2PkE#WA~mREnmpzln3(^f{hbqxkh88B#utC5K%@50w{in?NA{i+ROh* zHfT0!p+BCGSZR&WQF?Ey=V}14SFty7PGHrH)&8l--Lm3Rvo+zKkN=(@W@lVGJg%i^ zE;(9Xf=8-wLHrixSdNqoxjq3gSq`wH%f2PpO>WmFA_1FgJE%s(_Xo)5%9Z<6dLt#; zBfCkT&AQxi6Ui+R`1VgW;%qEM^mJ9Ve7yBuS0xG&zcT`R#u;jgn`J=j0T>^%~ zR2GMV21J&u37kutHbb`+;sGYKzyxd`ftmF@88B;T1Yl0_cDM1Thd0hT+j!Z}AE31e zi_l?>Vly0m>+}x1hy{*mVhc=EV!tng?ufq7An4R&Y3+B}gG+kTr}uMu0)zTebl%{b z3y8M2LghM_v~`ka`XAQ9A9dkA@TrN<4W|o(#{TG6C5j|zxC;?zC7Ak8maw%~J=`5D zSVFX!hKr_VA1!rBgqWAdXI2D2DdxBMbTWceXrn2kADjXGQ&XK|82M_;8il!@o*Bsc z8;%yHBh;2(Is9zU=J6`zoYkdd>TCcP+y-uIamx|(2#;kw8lNdH>N9hV2~$xUiyg>5 zXpbLABh8U6LIDMD{M+17Y|qZ_=FUt6pawO~sD+iQ-68>$*ezr;`nk0X@E0AT!`9cH zlHu6Qw~7PJFEE*?wPOtCcslbPA>J|GF=-9l(-SsGM`9wQ9N+n>?KvfzpAsH#%SlWF zp{w7PWmw-4$~^0>?ooMpB#MKHTLv{u?NxcM=nkG&ji&Jp?v=1+?ub=Rb60G=qy!=T;p>~}U=bgt3IweOk+Urkmc<@5(JVD-F@pjLCF<@Ll zcPlHHT;?`7Z0j;15zDZ&c4bcd5)YNvW9<8QnEfW0@>*LQhv@ov*+C!H?FACIWd%r) zeAXOz+fMa6l>>#96LY9>ArahYi$WnDTf{WDVVc}Q)*m$74dS}}-LmObEO_~Pf!c{gIi3?uwb`8=-8ex=fN5wWLHB5r zRQEX$1q1_ruKDK=2ALnpC*0ov)*^|Vg~9~Wl&oos3zTy7uN43&T`InOWVsEwj&5y z!)UwYVBiqdnXa-?Wlp?&L{MSj3(&yghY)$@o#M;zTL;w@Wl)lNUgP*a3wPdvrJYM_ zYUFdmY`MD}HXik4NRYoz>nI~FP&G4h2qFM5U03On zE7#lw@kIb)`#W8q5hAG?1K{zixvkY6@{HRvUy<`o28 z76wByr9ZB|!mE2jlhqCY9|xKYw~m4c;g75UG8!@K`mJc+wt`7QWkW^K*D{|WYEA98>#C$KFLWzx{;obU=(mj{ zx)mB)4~-?N?if~!RR^cS_em*=@PZ1byL#`75m2bTE?8_8ZEqv#!^a$6`cI(t3bfY! z(8TXnHQojt?Q7i!@IctpejE7y%i*a#0C(x3mDfbE$T6KNd@=|fdwqZQM}&1c0!{WX zT+2(i|Tr^TRpAXJL-&i3!`X*@1l++J#`x5k)X5sWRt4lC>7G!Ixvs z>SDug7EY(^z~oEUGOq7?4-*$KNiyPZ5szoICjx%BH4zV8TPv#|SQQ81T$-p)3pf`+ z>FKiT-i~|pV(qQv&+P`RpT{Ecb*D@Orku||WgIwV2#yZcf0qJ+3vV##3)1qeppTW_ z@kG_HnX)q;n~v#*vs7@g&t~hk%erWT5*>`GaHn z6?^S1C}lZ=##=Ib51UABk?mn)dIcKeL6jp3!~wSI?~PH^R9^?&+;s^PuBJTkmrA%c24Py?Kf zvv&gcV|CcfHNYn%5|-2wVmLemVC?*VCPr9>F?9@mnm6URR>n4rm&N6{ zd1rxxiKMX;nqo+3k?xrbZC&sMiaycV0=Q0ZTaq>qwzB2r@@jSyggZq^t;fF9k7oOR z1e&>ukNR|<1jbLy51NAcCnT6$d-DV3rjiWe13r7uIj{p2RIVbJ<;q(8O=;zj;nBaY z!(-9*>$sMk-ze;(U7!IKw?FiPUH^gy6#5MAsxE6?ok`W{Pzhj-+{N^R(m=t^E0u-e z!cZES)TmAc{h0<}3Ie15h)}e_-pr(r2qH;UH$m$h|yC zcSM3I)Se+$$e3$Zu3O8+bGpCTanl=)pqxnyG{WRD`EoNcpN^oZ_@_NOGsc~;#0OMQn$K_@gs+@YgUJtT2_aRZIa`jRge5h*~;(* z0E=A%7@0z}%qWR=>FaQ;$hjKP!lkh5C6^{GVqr_ zD5kZPtuB^?z%-wlyE{J9c!V|r8GvC0C0yg?Gn=>7CPCk#5-vs z1Z(KBD~^KPa6W5z#qyk6E!x`)0OefklQ)|-)~Gw;@S>Z@oe|%z6K$7HVId=***UMB zLzDw(5ZXu44@x0UXllQ|Q)fg}j40Fwti!X9iW7$9hj&*^S^>Nl?V6Vouv|vtr^;iI zC~+^(o1u}|$<Sdm}V#zIMHE&ThCkEXqoj&%Ix{?h0P|xu4?B zS}dwQz1y)0lS$*9oeEVV3%uP<8t-3aY|TNsr-Oa-!k5n>x17x%)9QBTT!tZ){LB%g z&inmvpeY76rVBHr90=vQF-aF&tkd9tDzK=LcRpnZ#EZ1x)8jpyX<1!C-X#~-{%We) zFgqw4^4uS#{L|+XkzUyohN_BfdO594ZtLOD3ED2xEhf3=3wm53CrSfFYj@SPEmS}5 z&ckK~h8dAtkBb9B>g#Oci*lQ^`S+Je7yGzgk!=$b$e$^iHL-ABe9=ip1mUKEN=X(c zm364lwb}k*PfgqRVSo=HP-50liS%a17uLrBviB|ge~;bmI*E2AEq@c~j(h-X@8UAW z0Y9VdS&)BC!}`0t4Q=srI->AX$6!N%g|=_`(@tPOXwBAl3UZLxbcWu8dV?JYx{!T) zXuPlxi=_4hn0Jr;ikas>>s-ae|3)=Ytr)M?6vRq2Qws>e_}vS8v(DN zG`q1S^Ou82od^)O1jCpQTdV%?<`3=-k_j(I^G=rZbpD>CciGA{);GKj zrG55=7K^is`-@rd*0i*y+gn3g!yyp!-{!oGfA>dd(j>PLwe0OBbn=?U6SsMcZ{e(n z&oQ{Ge9m;3aLXmz$dZ-yLgX*O3*bIU3z&nofF+un`{SAfPTAT~C5;IQ!@tYe4Lta3 zi~lC??&b)P@4aMF#uCv{;FFcuva8X}dM&R4mx^f@tNP+fh>f#XS^CXz5!uSCg;04Z zV=AE8%HXG7=j{#8RPZ=OHNErMT~Yjh*7Lvl+5ia%(73((KAcv_CT`%t;Wph>0p<{N zwiIJs-=DFn0@oL*E1v**JkWfjok!D%l4H;fC;^TW`cC(M5$C^YGhme0z&nI!9q{m8 zp28Pb^XQr($D6TCR5}Pg*8}FOdMB$Lbo)wS*f+0_8Ty^qOMn)Mg0?}ShYf#A8uWky zV8C@D@wEGOF6FirzW?eGVJ zz@(YlGR%R8x+bQ52kXEtZfqWiOr9>Y?CNSaz&ucif$gk!sr@&9$Jq8?l-EVVzlTPZ zYzYCtDmFfD2>7*%xK>g5aDW`NNu8~#nP$66+1a4E^0GV0#=D2x-%cgeO!cre*%K)p zVx58b^|KV<1!lt9)TlYTigmbw<`w3DR`E)c8Sc`sK(_AgMES2Yy4N?rB1Ee5UBMUs z040rHLY(KFxtdHFRh2mxyOEjeQ%9*6#tZvB=N2w|kzIV)Ui|QJZI_M7RC#AF7bHBY zHb#3gUo9^OsK=kIhglxxc=H4ECW9fHKYQ$}MegqTbZ43Z9fv0HQ&2MXMDsak1aE-yIm8RHoX6z{WJ%)pxY!!N_+Z;zd~`G6@; zSp(`!mGfS{M!|*rTFqrc?fs|RKi^CSQd+-09UX76E^)XIX$?rT0B%{f6B0# zxHOS@-90je&z!9yZZT1`5cjh&1=H8_2cWAh0332;_`mk#+tl55E$aUD0RO9~WB=Jd z{^1=T=&p1T@O}QLzUevN{eBLl-GNy3e+u~La( zh%)fg{MXI`QXTZyQ2c-Y9KnO}abS380=M4L3atj8AYy+5vn`oQVY#iY2yM@NoZ#tj z@c6~RL%OVp9C~h%_7g4sHbZWpjj~rEbfMDhq3EWg!-e13#QrqU>YDu%P;cX~?=8Jx zk1{c*=l&a#Kfln65X|gBCqOZ*0SiSKgJ$*FZmQ{}OQ1{C>gqaOhuERV1--tFiv-H8 z^@Hb&8m9yQk0VUz5=!b?3i&fXad4IlQJyr=K4F+nY<&9Dwc(|gcp0P7O1pWS6I>dy zAu-eS?mlT?Pp8a=sP4hKz+Uxg4ATI$*0ZzMdFNIKawn-a^l*DkZ!6((d}C7%wzrar z8lR$G(B2BXVVL_xsz400uC6nc{K7EDZIfK%qWCF2rHS}R0p_GBDXds9^wYGfv7IL_ z+!zzK&R0*O;B!6{B&rvf=G%zl{@kKYHb~-AlQAo5|CUjQ55U$Q|G&l7*qB2fwiXab zdsaKM3QgTci?)9whAQ^4hi~q&E3a5&GFx`ticV~P@sY#rNw&x;o^+h79aCH_RB3UV~VWe@dziGKimCv?jHF;5hil}_eCCjf7o}boBAtB zAlZ3Ye1Zv+p>$d!-+=6!jgezME5jV-+@6Xi>LR9|sypPAI0R}ncN#i>`m)HrK*T8T z;}d0HO{m$qn8uHTToVfm38Z;gSPYT?aX0eLyOH^! z=xxfO61&y$pH#Kv@Py_P6$}TDnV>8%-MjYRJC?|r#DYGBs><)8W}kvtJ!~w3z|mE6 zNvyKiM#IX=cw|{2{v+ZRiZ8jgf(kSewJLNyf)fd)L%y%eHw@3tX%I0~J-Ns(;_`+b zL&S#2IsPQaSn}4na+!f=f9qLM)|l5^1tj6{Y4m#9ys2?{1@bB*(B%gu=v)cj*@C|a zryt$gZ&0W&@){G@570)$FGc8PH|~djXZr?TN;5M!A`)KDOL>Wd+k5%?^lZb&;3dW> zF^L9Cj(ftiTw+*4{|OJy;mdk*peX(n{~@${O=*u^eUL?fWmiCUu&7G;%CX?$@JSjD zN#FCTap+n~_UT*M9JxdilIPaglI|uk5h*4k6-C{L)nl~{VEt8@BcrKvCvH_jOXIwc zNt>I~TQKjhM6$dhnxo@H(?n^6CYgaDr;kN2V#%T6c7I zu=(Ej%WLj9%IV{X=`Rj^brUR(15C%E`Avl_ahXL`V-oU_SHjkgC)S=jfs0?Aj6*+p4ej!0=8kGbG3*J0wm<4nnXz3tKYT}X%u$Z!wk|&f%`*Oq%bCYS)Yz) z$8yFM3M?ht2WJ+T8hv(4uB*=nYkbBvG;aE4lX}qz1dCtCvcz#+WYCL-B=;;u^wa$s z2xxVN=c!~r$$Y6(cWuFn!Kiu%fq5OAJ6$Pow%%L8DJp-0f<9quRcPX*?if6Iqjg|r z_WKJgP0qDZbNYw3vJ{te1(I5k=?7iK>(N1Zt@{d0zJxmA$R=7RJq73J<>~L0Vr_=A z!t;Var_HS(?s|1jR(jgz-)7b?taJCnG}~J*BWCe7X&yeYQ7FigK6hz1AYjeomvD{f zbaptwN?!gd7@7$1Y%1!c-X)ZQ6TBVop)I0Jk$cVQwTCzG5|43#?S?(|ex%ER*lT|l zY_5l;AKtg1O^XvzzR$#cP4@Uh<}#Wy5pT7D#LV-MUzi9={?KeX_&fc!*L|YxL=tr3 z8SEPyalHpgZs)7FUfvDfEjNfg_vxyF{04>>Wo7dgFckuo-RYBA{CU^~9y1gNV24Vq z8y_;AG3AhBG<(qDkC}%8jIfq+Dr{;*-fRApo(!q0YYlrKTig$U+gd7M#wy#1&-yyP z*0VRi@o=mx96&k{yvftuH&a_EFEFrYoKs*N1d&+H^=7)l2?`vP=EOHjyS3@Vp0Da~ znh3m#h8Sa>ja!a)4F;F~ZN<7U6vm&NBcS@brB#fFG)zEZtoLgPkHEQqedh7 z_796(*f$D{3RJt*`YPA!XVeYO0)#xitVUUdufw(UkhY|@xI=p_@4ao9P;a|8w?TX} zs|YXadvArX-jofx|xtwkKn8t)`T6|C=roca~G`d8Lh&?%<6=*wQ z@V27%dQ+~NO#Q4vLP(dm(c^gx{129p6Uq^Tqkn+%1_3T4%E5$MD#P$>8aXrXc3p7wwxgC$y` zGq`-+EQphhj&oGQ`8~4$!ez!x<>hP8ILz`3*;(vK5@P<+-aMSgTmD8(}s(lfR)?2OW+rd z*7DMM#b)n(!-i`g-!tV&eVJe>XV<4by!Rp?;?1n`q;FYTf{gwyAYfg6-41PQA4Q({ zJR!ieiA_f|>A1-MDj3ynUg!^#UkN!U-OYI5b5q1ZR;0r-X*_SxZ8X&sTE3KC*v8o4 zc#0m{$7b|)H)2VsoNzCuJXx8BQ_cmYjw=0Ad(=Ce!DH%zer8ZiP-1#+88*?58nAN} z?>%rNTSO6GS82D2@hPR|gmx(u%(8|JkNwIeE}{`{=ZuM43MKC}X7C@LlHZH!&0gxr zoXh|IvaPVfOFo%j?`@ZY^H09TrIW7y>2B?#*_KIX5-Z~9sg1RWj^1ox^L{7sAG7Xt zSnHZLlV)*A%!0f~o>w+Guc|`%UTI&6=>B{PSzT@Ql0W;I!7s$hCCC`F@CY`Os<=zd zNB#=ELp_o$naq4c-~#QbTk>yL@FGQu<8Dl>EPf2xc99Gs=k;U~HH0h$yt=fVuiSkm=Gx0O32k)$I@YLrvg`s5hFr=83!K9IMJLKb0 z?*LAhtHR?DXuwW_VH+DlXuF=}`jgk;y^FzXU&5f<^)Dx=F96EG%yU>viA{F6fy4N$ zBJB0y&k@186aZfir_lnFCdO1rW6Yg@P8a6K>Fm~=6$Ltw;#=jNV%$jP8JC)=ot5)M zCua!w+(S)HvGR7XRabsw4ts_DboB zj3l+ANc@&yqn@Q0$zv?K7e10Dbkcm;di@H&*MLS?wv z(Rs{Wno%(E1VxKZ%XQ}|q{jZtIz0%mAvof}mmQvj9nlWHv~oNZ2DKr?pG4c&NHwJu zgAxnyS{;d$dln>Q)Sm+}Bs7w#3qc!0s+6P%!%3SzfdVxq*2VGu-a63 z=tnHcdV*A42+P0Csv-PRA2`+OO)=iB{RZME&jbpUTJF-q4nCehFGvJT$o|7%63mkD zc?r57ZIz|ez}a(19YzUy!PXAU2-snul$rqGDvLnPCGyzwP+CM;j&I7%`oolz&@?F(O4 zMSV7PlGV=Ab8+zWXufu?70ipdKkB6yERd-+!XmdW!&{K~KEzu5{I?XjHDp{}(B?Ud zKjCd-GA=6`roH8=SzN%?)EA;1v){*$^Ll*bwVgh+z83e=;)olR4;Oy#mLqCzOm4>; z0!`iE$Ka}Q3_0%*O^GyfdgR{rTsLGbY=CHL`Zv!SWW`Jk^rk-)ldw&c5^Egb*ZGGR zD5;7DLT?fd>Ugf|NJRHOnTiqxy2Aj&x%4rri4(`D+IHp@p8xM+;a58N2{Rv%r$G}H zhk;8&w5d|^T=EUz@k&s{CDOg4L0msIfhE!WCq+PoY97}J?-n7LvKIjiL|f9^)_;A=qc zfo_^zypK^|yzh9JY8oqcBILX)a1jyxLrIP5gSPv3TZMy(&dIfDoi`YPj+fZao2Bj} zvX=oXCujej2@jwJ>9mxSD$0Fu(;n0@#Z`SVJ~urSeg8!QoSreiR0KoWjXH8ZBjvJw zx@`x{MV$eEJ4J@wFn=cAnOioThb2qTwS{S1yP)&R_XF?rRRiplY=9}}T%qC`05tCp zKK>*e+FoPHGRE`e9lS4$WROKi`XT>MNcZ&+ zXjTN9%*_i*v}99A%?r%1i$C6dzg(_-w|QEyu(A1)De)n{Ll?M}4+wc+VI7EJ85maN z;KjWf;z#;A6SOBKbC1PP@NxHIJeg9w#G#k&t{8CJ8Cq!x^7Sb~L+VbyJ7V{s9_2sf{a5 zfAG;`;Ry_slBsd~q53xO*fn_^j~&wc<;iV6@07oUS8s~cd9I7mpIuAlD3#1FNaS`j zR^ISxB*tOriE0I#eo(}HPOoAENLGYasG&inpggR25&kA}w^7A@5ak-5g-ff0l22_>tRsU&KTZHd=afWCKYyDJ|dvQkD4+vZ?xg2 z?WAx}`9z|9gr)0+5Q}F>-UuGT*l|TfK~m56%c4DI(yzAT=EzN5(v*_hrXQOK%k9$~ zJWO==%TDB$84WR@FEqYpYNB2w!wY9+Ax0^1FZ#hae_ zJ(yHocnrj)ENR1^NMtqLcWVbeGQCY%`@W=5L~7nqOnc`5Ns*y0nL}$}#c$YzI0qx5 z463BX%;U}CaHD_fs*TOAY@J92Kyw>tf+(8kuJMITe{0q$!xZH7y^6J9S4TH0x|7;_%C}tsvt%42$I9fi+MKLYi1Q!R=}M zHSG3SL0~0M{Ok57b1RA&>GQt)pIk&|O813CH|{Z}Vbs1<8ICOR-Xoxw z-fZ}R^y)i>I%c)d6o7`Amml44`XI$~^`oUEBZ0G8z1d`X@RO5bRS7V&c1;_X=&Id= zm0mIO+fL7Cp-|N}7M8TX~^OP67;8hZ=6ROZ*{Lkq zkeC0tpu6#)VDFS&)BGans|8`hvjh7_>t(l#CF@evJe@WB><4xmP6rMw*J#R3t^Os< z>Z3W9y&3w&(2{q)0u$zyv{1Z}D)tv^8x3B5DW$v(fL%41`-)BA#Z=-b#~rAcLd5bjY!a zDO6AkSks`t4Rcy*Utmha{Dj*oSt^;lEwbF6GtDxWBqjUAzk}%R3y|@xE&RVnv(^kHfit+CxGpDC44~qECL-b#Mtb0R0 z)g63T&Y0-rVIJB^dT7(q-d&=Bm`*I(RZtJm*Nu;lkEX?rBHuVtF=0CVMekrPpn4gZ z%EzT7q8mWquKoF`STt*0Fx16nQpjHS((GMPuy?@t*K{vOh&xP)S*j$DZA+lC)%S`Q>RvoZW+LFs>%cI+u7i>?Hc=B=YJR{M+csLPZI-=(#Eocq(QY+A`8@l7B5LaHD#6;dF zkK|R0jeeiqorsaBEo{D6mgb(9#RVF#D@=MT7hJ8fn)q^L#0A`NIeaOxF|&34n=w0A z=ef9qrj-~i`-wldRj~tx1@U%?94>9P9oyB*%N@H|wc15a+M>pJI=SPWwTiW0!rdjU zzBV@1JXxSVubL3zP*7@pKb`6t*)vzZxs#K@F(($;Vx8kA3XDkDb5WQQXnr_<=clnC+F# zqx;%Etg*XLIS0QYG%>6uuOP4LBg272vpxvU3YIY1wCtbF)NzQ*A?56eO_^;&upbe~ zW?4g#ubp#QJ9B5uc&d^Z!r9m+ReC&7`c->bcjFXW+{W@b2tz{_{3)Cr?V+VZ)*qjy zr5ku6nqI?qf0e+3nOG^kTsc1WV~=`Sb|5m)Z45lwR}ti3?9NL`t7S-G@S&Ost2Vv);Fh{xRX8HxET!27MS{PJO4CAZ&-DOyjH%XSor1ef!2S=ZGCb-sNl zap&&7@ZC-Q71d>WwukW5z2PiBg0@#e>mv!po=S4XD(#vVEml&7wY~Y{B+;dcW7em; zd82qDzcvcwQoa8mBP3f&*;@DCK=aylwKZrTHVb-^==7J0b2`XY{MoTr;~OqcTVOmB zkf7GWc~E=IX8LDv6HD1t^sMEImCbNe5Bo|C>pnbR*$`$kCG*C>Jmz1oHW}6J#b$w>qpeLpU)VCm=t`IUu;Mx#BwXm*5F5ec@`_Rm#T`E0S5Q(cjRmup_hxU&O6;m z)2OYG)jij(rPK8|nzWx~z9gT!jhPD_m!#(Q|D0y%Rk|YD7suZ33=yrOG;m+o8x3~l zYkTOavJ3GnT9`BT45*M`;bDc{tF3j{MsaJEx8P~$GXj9-mz-|V2r8^_(pQNNJFu*_ zwjy@TEzl!U0FiZ&hq--n);2ZaFzlY2o0~Rja&WA*+fiI)V#BcC$P@e{I(LdtefucY zqSG)!)7X7YP+w#&NI!*~@Fj*tP?q9rfMG2^>SY{6B_*DFrgZfNT)eVh)BcJgRK^E} zGAw{qJZ)6Y?7k#TQM4Cb#EtKdvb6?`!t|zr`=GwW_b#c)u+uAwjJW}XaF_K}q*#lH zr{7E$$UbzM0c>Ivneq~SXOx*zg+LAa7EVB0v8y?+qu!}|+3aOvxurGiF!p6qxZ)Uk z(xK*rpO&V0LJlMRKs|$o5ZiNV{D2fhxZK!pZx22Iq)ppllN|64FaE6*S7dawlpdha zMX!rMUrnrC4p@lDVk25OS?LJH3ON!ZdfDL^N191fO)uwU zuLjGd2Asrwl(C@k_K3ixa^YVQ4drrQtIhLb=blHoM=x2?UxGy(zI|&)OQfnO#9-;!@1e#};-jDAD{l>(KPwh>_^I%_lTt`50}jBe$2W;i zpqLl)))OhN+>`zB4;#!7r7yh9g+DX1z)3uV+MSJ9m}*J)(!Rd`Rk?A>phlFc)fzj+ zyEL5WMs*EEa}7dyOJ5)OkhgSPIfE@tR5B3Dipt;5(a=Q-rPh^k!NP~`4IT|^DLz%1 zA6lQPrGvCR>)MwViV@*n?kU;WE^lr&@z?RZ0zPQqn^boxZ?e#`;IKQ z24wwTXodzindtx97I3-$)9rEv3!7H)VxM8MMj_oPOmsEAY=p}!@$);C$<5!!C508z z{IPg-)}lv5?zQJaCr1Wj?91CqBQ#+W3Dh#ufZH3xO=2P@_^~&}0 z382~XpF-|pQi?t~Z`Z3F?`O&F?v?hbf5wn#@?*fq0T8Ns7IU!m6l$N&gMY_^Sky1gok3V5GV9*GQth zNaMj}%T+=xPu-OjYVr?$WeEOah#<80@&pynz+F-Pa&Aq$1$YZMu0dYsYP+!{@KKoG zQ2D;M%OQ@PWhc&LP4~h)SrY}(TlrF_QRy5G!tGT7d1W~A0s#8qVkjIj< zAQHBhac|Uf&rqy3{zeUSq#WBDJ)@jMe_8q)CGS@W5TznJPWjOp+BMuNOp~oq5wyxJ zaQbX*WiZv++?46}P>^3Pm{f?Sfbw_7+%i{eNisp=lM4ITcY1T!26s2H&MWQJYb!UD z=OYKdRUesb=~%4K7uY=YqY)bH3wfh(C-a5w!b}E#fXeN&-AJS`Vm0sbyZZR2 zoM)UxFTlFz@;r~i!8#|oDaBbKez6z82LJmNjKO_WeOG9u%~~u6x1C?rte5BhS6O*N z^=vCyK09ac&xO&aeM=CeoEJ#7uDzLACaQ6NaySEowaJaY9$07?3hjjR^ z!A=0yT4Lzg1-?l!xW*QBSkv#MHe!Zo(r0fnL7q1k3!~@rH^4|>z+iWA*y$O28O%&u z$t`^jBr}tiG4-r^zZbPg{+$e)Y{!5q^MSwe_|W1o*EZSkC#eV1<=_D*AZl3BLR}d6Kb~@e4;BdF2 z;73Tn7Jm(@Cc2^4eV)6CI`Lg70yx+0yh8j0)yCuOflaZiM|CHNhhct`oPuL>U+T>! zWSzfGF7W)YlHyW|b6;up9?bXE&0E9Yn#@{uw6hV>5NI-!oN_y0nZ)sQn`5vW6-ulb zJnNRXW6PiHcPDxB(xLICqyk7*{_;G-)WqcKB1aq5asTbMG`skS$+J`};_ zmiI+nkjR~V^eTbVSzA)C79{@Qe9! zEv8EJvs2v4AE*$X35n3m!C9jFir2+M#0>m6bfAZVq!eH-?79-a*s9hb?}A3NNZ!y`7!`4}^U5yxMIV?+@{v0q-XL4G*KQ0c0Q)u;m}+ zwof;qo$V7DJd8&QsiTDj=F=Mhimb4KQ;$reql+7O9rwaSa6Ue<(yVUS1oX#YSQ+$U zSAAb0uxG@n=wXN-K$@J14Hyy9Ezy$0({?I#M?eD_^ROk<`%QKrSnu-qh#O@+t+?0S z(*Cvi6SwyX+wk}tFg(Tyt(NfK>#TJt8(gt7jg-geizu9oQkPALR;#Cx&7EEta1yDg z34MeGfGdvlVNuahM-u;v9E{sO_jn?OQ$SBl3(4ty(0=FWT&vS9u>-NN=5w>SBUp)I@wy_%}b^rk9ZxtLz7R1|2n=hzbZwC_BV`X@qNNjxo-?cZPV7OFR zTmPWrkfiJ(j6(jkvLG4O`yd|-;Rp52bD8;69n@mVGG)G?M7%0(^mmr*!B9- zX^0F#2R9*Z+0Hc)LBkPJE2Jj%y+*KyW3qg~&zE6y6Ug3@WMZLFK~f&%G^%;|ckINP z+ct{}^WRl?tc-wb1F06C_X-Mqlez45(Z1N;gs9E_J(Q0vam&f1^VL;Hnr;a6ZGBn-@4G((fsMxDhL($U}eFw}Jm^$Sba*oV4 z`YH;y_inbo@zDXs`eZ=BO-M7yeh@}1%Is^I=Gg4Ik#ay1Xco( zZ@79#ZB^A&ipG)B%xgX+#JXBve9C`g;=~ED@4Y3rw*6ID6j&csoh1660Wx&lvdRpVGuT%LX znQP-)k<%FW5La%OVns#hmZUO%1;+a8k4V%?5Me%YT$FpEw+(DY^ayaZgQ~Id=O!dt z@)BNM-}rHjfZ>#IU(Y|P@1TRV{YYXf*_ zb@z&l^itH(2YE7AJeblbKIRqDw6NM7Ut_l*hU|bqK}9kV7`e*-i+@BVTZWfotGHoK z16rx5(|4}-wPnUwq4F3;n?0^oFu)XI6_ou`P7WdzT3s{Dft2_4qjxKv4pj6etCX*) zVzd2{LAPqZl6ykaJpr*=kQvp(Iasok&%M&o?_>1&++$S=?LO{K$<|6)@gky8yn|ut zojN{!?POh3a+Lx2_4`B5U5LTiVJsMeKO7+8K9?Z@^xG@1yz>Ute*?rT1xN$X-<`{P*sC>&@dTII`hNGWc8OolZywxL_=R=!@zXLn@Xg*aRDE&du1 z6y&$Ifz{cY)o|-xp0hxaoW~yL`3#`cSkQY!XE}FKr_iRVXth$NqbcyN&fk^R`Wh0! zeg{vS>z4#p1^2v=9p+o-1AU&y4=IRwIYCnB_{y4ZZPdkYAoYGC2aT*ZD#PVMw6)|z zLM~kTj)VRtR9JnOFl7v}{T;qLc|<4DIeVUPy9w|keAhq|9yOM+&*NU!zT34IS2)#oWOM-B1I*|>SL+EUXg&AXLVr+l%YMnJi`ODjx}AhTb2)zIO1S{pU^VB(DpE-3Om->;g3iK;Qv*SQ!C|vc&bI_Ufcj zbgbE@R#uSBEbt7#U6ZmEg$2$YVo9zJ^Z`_Ydv(-Ncq)4+O(upkJ#Ay1@jOHkFLA}z z8)hV50fgA^W>q}2(;R^~PKLmJVgjmx(@fz-O#tEs_!l0Bbg19l^P>%^5kr6KU99GV zKvi|D?hPkP`}neSiTi=(*)*8Gqi*I1`PyFp2HGyYX3vUsBYg|>k zX(s7iB4G7WRZvJ~pM?l!FFFh=xd@ZK<6>FmuXZ?lzrJ$qsg&JRmGI^5TjkTQ2K3( zcP;cURt3O8qB0`hUZ<1WsXJ7#p3wdE3fXPWFw4`R$h!-RBGfQp-a^ZBi&9g?4@pN!ip$j6g6C3(1*F78SeRu`6 z0mpHkJ2lS@?X>+YS*Dz}Fy^?$QkX0y@n!b4>tfL5X5WUwxsr)n-P#^2(rUuJq?*Jc zXRPt9=mla2{b*|m#Ig|C%bhpHqfYrqBKMg=Cz;_Lk=7^xPl6Mp2K)ZCX<>=f6qkuqc7T}Qk(mZ=Nz2;H6@Hv?)Y{s` z6gi9)5!}!$0KU6y=bS`$ zTOs$3H{$Yi36iN@wgd=Pdq|zPG0`~D@ens;dW9o5H>i9vZJ^!5Q{-+{0zI$VZ?O?@ zGm8o!Z*@(4Qb;Bm%Llvx<}m_Q697$-y!-hZ$myKkjy*Ddd%cy^PJcSCZVjUo2mZMS$9F~k+lkyumnp8wG9*~!K_-X+Vb_JHGwj@GJUb;XKI6JY-Q-ZK8! zuqkFY3b1e5lf+ekxym~vM@LEUG7o?dQUz9AjE*XS9o6@?j|~T2gsb&Uvo-$&(f$sQ z{~bHJxwZ9m{wF<-%gcaU@+a&Efm};aysGO?mNkknqWuAiFw*^7lwlNG<>myBZrU`jN&5>J zaATjg8x)^d*>sH^$w<4;s)hcfp~3lg{PHfl8z?C*LaO~yF?Wt+p&k%OT=2d`G+nh{ z?RlfYz#0m8u2bLrv0V?pKm5M?ni8n-)E^WoOd7t6PkWn~f!v{BuP zdFYA>{}d;XQqfkWZ-c@|6xcMgyu<>x|pLFj? zH70Y$2mgmf&bq5N^hs8t`x8J-Xh&+t*rsrj-9a;F}M58gR3m zhJmi#n}JeZk(mnS{ZU)a|;V<4c#n3iT9sNVD$N}DaoW_7pH_dg=d9NAD=dDU%jqn zv*334l_R)`A$Wf>>*b{Ahtf5oAO>kLw4`A|Kfe;FptGFPSAsp3uESC&Baqe(MP{n^ zT|zuZvo?Stuv@oM*#99GP?3ipi!o#z%TA)FYquG86{TGDfmEQLkKf&-8lkBBG-vB$ z_){#I4$bOx-eqa$?OxqR5Qj_pkXl@koJF}%&hRzVoGYYaxD7N ziBVYX!yM?eOXiw?QUQd+ryWs{o5nt0f)M3eST4jETDjCsu8P=fO?LQ8bQo^ZV1E%0 zaMcweFosBD7)b!Dhr_xD6JalE`~~BSnMOJlp@~2v>tqbX(ESln>jOvoWgEvTH;&hj zi(T4_1=U#6k7h@!^C>CbGuD0Ds$p|)TsXS_@Xud7q4rl>>X`Ru_z1R2Q#qXlc1yahXMEry);d6QUX+PsOSB_Q~G;J1U6U-uQJ9k+hc$xc9^@aOJFxd0C=iSe? zR*v5iaias_?WMM=EwQRr+tYj;1JhOg2JX|=h@nv@kNIFX8MNbRrktjUSh*nWd1&%% zxF><2x4b>CZ89dez#bj|Hvz~R&cB5s{awZ!42NVa9s|l$_*AR0);-LLIZN8V5^Fgp z&@V@qZj!71YuJ3@w)I6sJC7gU3-?9g~O zgf$D=gRE-RibzZ);f9*)N;%NOC?QgvF~7e6nBRnbT4hDgt#xZKG-74(uoUW)u3*`6 zQLf#e57Z}kRGBk>1RIhi!y2A?$U0YKHD8T*cQJXp381r2y$2satQw(lXz#PJk+(UH zKDCjm`9p>N*-a*}#T@NM_3;Dtt79>3%h0)vu1_a(@i39mY8S(79t>|bOKBXt_(Cq~ zMY|lRQLUyR!%ttZ)?z3y{JKO7w&od-YGO;m{Q@A&ufS_;id*E*nN~R9KY;1F?8HL& zG& zJ+d;WPQEpZR&wJ~VdEab(yQ@@_W1J72#h1&U_)UypcQ(WRKIE$Zi%2VIJ;0}#M9;M zTkejnlQsh-+09bcQE>~|YAz<=(PvMIf=feo-c)I>9LL$=$a=Rp{0=e1cKjJ_hof8X zc0PyI9PEt2BvD$Mxq!m?vj$tasGOosP?{F>5*M;|w(dJ2#K>fzm|lOPqt9GI6nn>t zztWM*C8)OIxwH=2ycJJG4X)VgJ3G2CmMQ1yGC{U@2A=&3)Qa-PUAWPVAU9A!a#p%9 zAnZ!6l8A)g;?C8D^-vAvJqA~X(HM`wi+ zg!?3+K!?SkP6n@8nCJE!!GK3`S{d`V1tG%~_|g;nX;VBvTzZ2$8jS=Xcw zC%|goO2p>17$0;xwwJfHJ0~UW@ zboxVXV9T|+Tk`xLmJtILq+~r0fbXu~X@7w$BN%7fQL0ia_)F}wx*x+@!N^1voHxNl z@2IJ5u_-OXAac6@wVOIEvEAu6EG{JDbg=&d)MS83);u*DHkZSDwJ}&wN$qbM$8@C= zerCn%C8Gy7_xH_JewJ*F>JHdi*lT;i2QfM?ND35+6VwqgjknXzh7M?!PbW_7^1Wz) zmrQV-eF*hivg<#~&=`uz7+Z`H79{mh#w^arr)e~>8_abgG4{4^z#qTe(Q=5njwCJH z!D}BMRz*I;Ai**%hwmoW;p_-kbB-&OvT=GEMO!@GZpr5+LpE`5Kp3T@dix5ugxW`$ zTtSE!Cg?#rGm~P+`mWPVg!qG-={tbEI4{? z_mj{6rx|ihIRkhJUE08m?O9B^-nzNJKptqU8&k}aM}tA~XOdX^IBIapePzoI?;B7d z`@7Mc#wFR>Me!MPPzhkCkc>7(xImXi!s^z$O~`Oxb?zcn#R z8hO9!cbld2wuqi6ezKmSu#jgo$rDfebd|g3rd5V1$HNhnZaS$+xPcy3>-oE+IUso=?Cb>Or1fMjPOuqfd>$Gg@~A0k6E z$CBe4yX5#N6~(c>`lIQOD2a6K#Ag9oj08b))8{4v{KmGT%N`(v#^&ZU#q-XG7C4wF zp74Apfv1RBjVw#rTi(8B_dGsxKSm{wvOps-jKu%kb*1eGfx5JsjRjtj(#9BYQMifT&mxn}H z41M=&(#CaXB*YIEQ|4_#Iu#+&a!_~B>nh2{AFLREcP5#) zzTW`)v63V)f=Gv_Oiqhc3?$1HP{_*4G4*4$*8sUf5{ui(i)2Vh4h#&D@0p3KV}ezG z466L2rKC9NmD0IW<7lv_x~Z-Z-q`xQUdcvh!L(<0ux5|Ss_W7bbP_L= zqjT_b3z#j&Hx*~N!I+;hcPd!D4OOY9Rv^IYC&2~V;HwqLv9TT>$eZ6o-N~4H3?xm) zV{C>?)@K=53i)$LAQLqJFDNS;0!Gu8oBCQ~MLP20Yt8#FA-_W_9X%PK^tf2|k_tlj zeAaIqSFcU=r|^GlG)h9XekIM=q~qhXg?4wp4xk)|zWXH>vWClZ^AnRe{X23tZjW?p zf(OdsTw)+%2$@PlJwpM??p#e;Xd6@U%tWuUsl@(8NOZnxli>{=PGq z<2?zaD)5>5JE5oP2F9~IsrG{yL-W%0f!qPUfK?jca9-N~0*a~ZZUd<|Cc+}H57SZR zezb5bMt5LQHw#E=7^z+gwnKf5oU}fjDoB7kiznbSN4ez(_imJQ34DOx-~9f{d0_K& z8lF>maa!HFw_HNycQXyuNOf)D*RPV{CG*pMitA=4`;Qxd3dm+Z^G_uly*pSH%Rqf0gg;M`xUqv9Ne* zuTEM9MHuy*g7Lw{ImDa-++KgOrs*|}#7nr(=>V~oX{l1T%);F03uN-0cuYaHe44Hi zv~fs0bQLX>@3zW~vMV3*HkgzhT7ODG2eB0(g=Q9$B|+U^!BrR3m*mloH8KQaekBF) zT|nuRWca~I(DVQh)9POcm*O`Cj(>Zof-=+`uw9q_r|-N5`cz%>;JbN0H~qr!sFW^8 z^4M~^h03!V- zhB(Ez>a^LRCCDM`02*fI_}MNCFtH5O={5VzTx?_29Jc&>l$xJ76j;5A!yv?BfM9J9 zI08=lt&)g;cn>fL1)%q@{SACA(j9`qKI8DBL)yM~J!DwzMO|A0-J!G%TsnDg9P)_; z&j4@Z8RbJ248WX@-@2WhzF~=y!C(?PH>)-9F|^(e-{_fgO*io49pki5WOi{*S9-J- zSjRFH09?>Wqfw3l&ZHbNEJ(!EhCi%Gy;5rY;4#0*MPF9qmeizU0A$KeESwV=XQr>W zmTlzDW3mC)Jl%cHF%xnyYP`&33aamrVL;~ptgVi97Me}sXkshC)WE{|Px@irBBiLeQhCCqs`TB}mewHHmnCbM& zolLX)tE4ypA#z&_C*Trm^oC|z;UXm3k2BSHB#Idz&Qtph>6+?~x(TpY_6L>pUZZwW zVBHycr*nhKfug+GI1) z&sCR`FVAmi#Z#LOv>QMeUQ8@yY_v;{S958H+fOI2y-5?Nl_f$YUnTUpu`ZN!$oK-? zQq)|S?9j_v``qG)W!wBAhKJUjM)an2_NN-GS;+ugtIPT~4OR-b$je>RuG*)skagTRlJL`j__W0!!0{dqZFvYi*MFPATEs=(&?bf-WH+%@{Km z)uI|)I|$MS5xDZDt#+44Bf#AfA96Z%jGM~C};o*5mWPN5WYMdR4K zvH5Af1+wGh7pMMr7gCY6&A~tMxR=e$zncktT?+QY2l`2fT!WI`O}PftbR!}X8cH+M zsD5~Dr2#h(9_j<~Lu$uJJl=oZ(||9!o9CO^GRGvxOq|6J%ry_mPtXVBJ6$QEUYhin zJEWnMt5*MH_{Zk->eke$aznWt&Mf-e0lsqC6nBCb<^y2N!p9Mxafej zTn5Wsl+AZ6S2-^6I2ko=eh%OYNv-YkOW_tWml;vSGOD>g$K24;op`+IMDuP{nAA^F z^cTVC0~eR=_rA-$U-Qgd^l)-Q5h_&0E!u$YD2-Ly%)`R+31n-1;%su)r>E^rhQYR9 zg4?eO99L1Rq12q~3TZK*e;)Yb@1Tu3lQ>`f#~PdAJ3#-L;#$??e=T`6(K=-sp3 z`y-!}ecSQn$D{Ya)n;)FV(yFS=@y^6e6)Q+?pvNqHyM7<-aM|z7S4v%E$U^!0bR@d<0k{Hwu6D`H*r8d`AF_!RC<8`KSkY5de#maw(c3i=Y701X&96)1z#- zhnxV?iEGqTQtvTDOD&uo*;`*nPwrF$Y z=fHcOP?ZHnU{qjhMrt|H#cq!_?>d(bhy8Y<-WBZ_w8Tig6-@?4UtE;F)P4e9;*E0! zwvHmz1A>A=1JfULe-PLfbni>FDA<@kBcdm)y|Yr(GhK~3>6@ozn-V|1;H(IkWO~e=uf;8SJJ6Q(+eSex( zlLySTlwJ{qMjW9eoeh5+W2y^r(r-fjX<}ZG%LCmr z+%vpVt~@lffQ#F%6FMm8as+Jr)~oM8G!w{iB<{l^99#cTn|ae-V|J)@_yhppK%y1| zewfAouA|kzM)LR*C2QdL9zfP27kN~}`i@j-l6Gw9CI$#BcT-1yjrI`Rs~+=q+5lL} zufyp5Ppa?+MrdlrKbaLkb~!J`4bsZKK79%#jAXocuWk|>EC7`BEtwzz>kd(W0XMTZ zvZPYoTzVS+u|6qQ@!SSB1~dy$a=_v(X0GcPF`%mj>$C+ct?BCN==hWn1*=6u;ZwIZjZrz*u( zp2!ySB`~kxo?LlP-n!?u8mxcm7{ocu#0Lsv_=zVkr>R$6M@NI8nF=<(KbIgLaq=Zb z%C6VNN#@~@&I&0%KkGd6H#nCbMf$;(;G)EnSVTi`CiJ;Xz0-uNAT8X+A^bAxEbdKka4lh%90(&3CI|qoTfh?1IX9%M)av{$ zCXefjGUS2zq+(yeW8<0aBZgU2d@XD$AWP;oOc$d4CA#?DnP*_lvUE9lQD~UUtF=91 zD<*JHvs5wJbNAg*YV&339_sB+13lXPmfB*XAYy`^tmxxO;^n%KpS_N9OeAtu>H0`o z->T=R>yLf?&&RF?`0=(hmSG;19k5G-dN%B0+Lcw1YroeGbn>d`Xs;kT~dAiT#6N% zF|@-NJ7lk{y79xtY5a!(TRVKmv)>fiI72UM9r=Cb zXU>dTLMCGx5j){%DI+Z(TSG-@=hDGp@bO`olOu{xJE1Usa{tkul=jA^d}^J0MZS00 z#_;!?AuF6o3Uf^z87;T>-HnPn?n3P|lLd>XZJlL%8#9zgQ;S;o!h^~wM2(&=$mFEc z3L)wVtZbd53|YIkVS?aD(l%OE*VeNKmi4ckblDP#bbaQhJ~xIYkw5$@%`mlL?r3Xv zNT59MM%KKrJYvX{zQEq)tga(q_cXyG)8f_g;QC!59NsVhIP4)IMo)qdfhk>qZI#>`0BB&WY=#s=tEH7RV#T2ua5%GVL^sBYrV19^OLyqVA5nu#~;UX8J- z&!iqCx^sMR3FF3(GtB6qo}6D260O$V9o?Z1#3&QQ#UeS)O6S7gq^G8PP~&u zCSnj)Gn8lZbfE4j-Vd1y<&JvjjSqH^mkO%3VvC{QX5XfO^?yf9D>evrXfGHJb}^8U zayyIE;BcdwLlcru9q`ku6NgtwmW8AVU8h9ETCz!)bfBS= zDqC3?fb;+A#zp?CWtoLAR4#9>jK1$%-S^OYvdo3vVWBb6r=ERxXpS&Qu{N}wRAuF} zw578&jWP=PmtH_sD_YlyRbkKgHf`~{s6=bjLo{*TQ-d{I8aNXxZ8@kcFS^v$j$j8= z3M=OQcpw`aMeKH?enm(2S%^H2&^&u#4;2Xvjcu#4?6DDuSPQ?i^cG?sH;t(T;mI4O zD%x&nD=3sN7I+Z9Fs=O}^21~%y{TOIKwmd172URR({N8>*(gC2aWAUKXcc13WN3Mn2(S4sxcXnoSKEM($# z>ETzTRfeV%5H3WO8&iLmo{77%%OAtbawcrR(m zL)xs2H*0pJoL?;cHKsG3lY!+=8fA@hfPG5$_mWCn2cP~X|03Rk$TF)g9-GEpU3VDU zLG3T#x>5;Ap0EDW_lC$w=Br~n#lyzgG^>g;3aa1amvk{FUw+Qm$C!{6F#sE|Ofebr z=Sy28ZKKQ42D!E?k*SjQecRaEtvXD4{ks#S#r{|E3%kJ)3u}D8u7=cJP-D*Gh1w)h z=cL0Mky~cAlg{eK=51m*YBjo~srl~|IkaSg{UD5Ntt5hh-AYeW%TuT5BIQ~Gr0P`f zQ-9Soy-eYA{bdrIwMp*$t92tJwlb`4x#n}}7S|*@eL1|BIv`TccQ9rn_)*P~9Z4T- zrUcH)tLfn|WB+zW{+$Jh^V`~E|3-4$ldf)`%am;jNmnl&OAi&sCJZ;2aL%NplfYZ- zZ{;*#KK{W0_x;muwr`)ALKZKe&r7E!TDU z2DUVh)F6FFdMj~=ouc5kYV(D$v!&wEJ7|7+Mq{J&g{Zl5v~mwfGBMD+?OAH2Ijel}YR3+!D>E)L_`l$cR{i#|k+xRRxO-cfA5TFz$cg!yCC ztg+Rt51J)-l-^|+z>>zLStf!e*0UX%9{71i6I_`5S+uv!V-3enlly5Q$T`U&D{uIV z!jdU`dLAz!lMYP>LYyY6To!E%;`%siqKrX zw;?k$>^RRERQpvW*}84q4B{J#e`Y=a1g(Nu4TovB>&OAn6_9OLJYI`c&nHSku9b1E z1g$BZtVh``KSj_&hFyk0VH9_g4jVRi=_0wehM8V52jy_x%r3lSxR5?D4vMIN2 z+z+7pomg|xEbi6g6XQX#cQ6jgC(d@nO1B@nGSbqHFk`P|woFvOZ9+v8*pX$;_X1e# zi48%UD95NpO+%Gt|JTxwl)o7`Q)n}D^wT}4ORh}IAm;TtB6Cz58&-Yd#61Q>bwm9( zA}#1xw;r~Qi*`{@&pY&GmP69M5*@=1s+%xhy@Lk;G_N%4__pqS(TA)<0 ze+TiJKHyPxQT3fNa*S@P8sy85;`h>tSj6*RMZf;mCIX`8%*11PtQG!EpVK4pCsS&~ zpFy*qm6IBUT%5V1T$Qnz=E9f9UeH19go*%kU(aSwQandFe_g?&O@^=)DxBk<`0njK zHzz&f8LAG8@v5fY$PRnM8ZS(!t|@6 z6c+DM!HL-bGhQ`2zP+Xzc&ZV^;aqA&yT-bgQ)=UC0vx! zndbTea5234t2iwUKq2n{rU-J9@Kl!?*d_?1jq{n7SN}9ZwiVWWzb4zT zV~nqfmcgHCvZhvl>a>t>q(gTrE%Al8FFq(BjUIZ{JX*-q9mCAr@jjh{oz9I$0T^|- zLt7`HxUlWGJ|HF{+2F>98eR@G`2HEah0unWD#DWHWqS96u#0+(vENcI@_6)fGW(SK z86{uTEB1|M-R>itK~k=i3?@k%R~g6Cj^3YXoRnyYP<$+(__UUdNKC>&;DyDo38rg;Kmh&eJAsI05MQ&y&<%H>LmTRfR@m@P}#KWH{=bBDELvx z_LwP%F~%cp-T%?;pJAudz_h#{a+f)n5@BGTRQI*DIXwiOVe>;LbYqsEaA#J3xak>( zYH`cF9_oejs~SD)F&gVs!3V9!R7u^{QX5)8?CAaYZzf=986H2RCUCc5zqlBX)h{>w zhi_L!KmuhS;JeMEF;&aGUv!nECXx-QZ-=iGr!<)F^-O(CHxM4JtWL-zkE51UoGayW z4zhvJL?;(+Bt1^qr7n~|S92`8WZM5fY<+cDRPEL`wt|3w)KCfn5(2`|sS?uN<J1(hbtmAd*8UL#K3uLl2!ZG~b5jc;5Fr=bb;iE}1>|-uqtbUcG)x9<5d{)o=>u zDbZDavh{YP$p^!>OK)opCBoUG9_LDLe4txH_H9>|c%LP9(UDSEzx(^h>0mf4|5>#U z6*0WfBMg>*UnLYV9B4(NQcNweny6bXTOyLdaQD~Xr@32Rt2Qiha%%iW;1mtUG-c0& z+4lP-TDCQvI%OsC^?0v0brkjn!%i_HX06!xwP|UV13GS-M1*H}42JsvLS!Ig@5FR> zgMV}H#HVV5BK@C|h~?=KiU0xTd!E&s(?f%G)(fLm%Nios=2;N4I)|SFW##85;V^_; z$%$|1bJw zO#Onk;#ilF0A1EaPm`$&G%_TXsc+mg6M|T0U|q6HZ?)8K`td}N@nt7<{GTG?kU^CJ z4n`q{ysu`5UCa8#Mw!U+hNofa*8qKaS(SBK4FK?ExHkKC*1}jNH;e@tRzfBray|yV!8_(!l<&hRI`Vik2Y~xz# z?ufjlzaBSgVrJ4tKfa(6AG}0&Gl`pW^>o$CRp{w7q-X!O*Lq0e!SA|+Zejc%d<{Jh zMlwTTwVJv5tnHBaYVtGJOI^)Nz`wEP2s3xfMDhmi)*L(69C;p7h(XWB!QI1 zZN;HYqSY)CaQsJuH@iH4u>z^;brD+={ca&B8+9NXlWgnP0*eF^Zo0g8${`7Xp5LE3 z#U6pxxvj!>$eTc0H0rFeO{=zPQlbGCwIBCC2v1h8Bq0q}YH?_YY1X$x46RlezwJ?> zt9uiF*{G5nSR@o8*DW*V)}Q4wz{qa*Yq7#w8`hbfeIEc2vLBivhBm4~DRnL1bbs(+ z(GQsJPif{~$KSHmPh}F4>jgV0a%p_)o7O`BRR`ab4FmFPArzt~uboI}D)#6*>u!*4l=DV$*fA9E)i6e|IYAT+-piL4@^9Pl~gE1E4 zW4rh0K%)sfTlwbSKlD`wR0QB-Iuq*+$tkTlYkYRmp_e#hNmJEURoeKW-O7mv41Zs(bEN+u?1&nFUQkrF0(Bu6ZLx{3utIrPtE!WX zPOu(q=yQdLk}*fd;P^+aBMd6ZiCPRA z^m41#Fn_-H)xOZ096;_f^n5qh{BW0Pb(=*fx2#T^Eo;EDX-w1+VCxicU{lbjfyDW$ z#S~|`R+%1=KeNZc_i@_BnFAD~1mcY+O+YZ^Vr@sF<6ugh0gnQ4*G>C-SyrTd2I&(t zZ-8;@zC`x+Imuq(2Qz1C17%93?@B$xZJn3>&jcyNz^wJ}Pe+K-g#`)eUL#JqNpQUs zU?r0`$GT>`g)P!2>I}d(8%O>GxT5glv^xH$?fU?NUkGcApxxZ6g46cA?-Jrd@|DD7 zW8Ky#tJB4iWrL|y+%;VsPpIG6)3i>>Oo)GJvew4ylqMQm-MYX_`d@W&BNRXmg5%l$ zNS0PvaL*-u4cP$oxMuPT7}F4Q?R_x~?k#@B2SwXLtU!hm3)|*>l)=DX||O)YJ1Ofs&$2yxaNx;+t|?5WPP2tMvl|0z{zG@7BZtg;w*f!wV*NO zxu%blU4v3tteLCh0|rdqLE@gQaMt%UuY5059p!RS5`C-&$Gjf_b&2mJy|z1>lW3;u`*PH6=KN18~ypRARj#o;pMS zfwImX#W%B6sqhdguP>m3Oo(wiO^k)Ro%wpo?Sh&8?qr`za-DPh4eOcwi?DbetJ5V= z6J=C2O4j$P;Y^g}g=TU7ZaDSAbm4oUI{P4_AVs|9@rm!?fo`zmwlaIS=4DL)955!20;e*y zlHuyR&T4qh-0V??1|yBm2UR7yHv7U0?5jYPV!_+Ar56*m4i3Lm`0vlw!h@#1=i6t1 z8Fhwdt4it&d?*AIer_AP7dgGzu2LTsG%}niP&KDxcGJgki$hmIZFIEYMi3ua9KY8E zu92>yez2jyJPA5boWV{iuJgd%+)?|Somb;ksvB9Wfh* zf#W!GWra#1kmYyQk380?;2LcDdIps>o)c|x!uOJ`DQcc;F!Q~0Zj#J`DB^}uUsQ89 zpRd*f$Vs@=m%p=+w@yck=SCXM$GRq4`?R^2aK^az&1d=!K{-DI+`@F3XCwM3Ui#Y3 zmbH(ejkz>;X+DGtAIjE3`#cKQb!+qH8h!Lj&iX6!4rIJ^Wo`kFXT41l zf%N{^(}21+rjCl+1rFxUKRqSC11#K4b^)tIm6vGUqbeY&6cmdzsM6-=b}_fMy%Vn@ zy>2CmxH}fsWl(OG_m~t7otjqq@`#EgP3`wjd7(vo_qo@;o~it9sIE9RPwSSxs?o_V zN*6gIRjnDZ{+~;n@!Ya33-wDujadfV>E~IFB*B=WDR^Ehd6pI^R*klF#J`snEdw zD0pO%;LKw#u&%=+;!)<~x80WtAD!N=k`IYn5B~OjOl+>*6z(;q6sud?OrXehgH+0t!H{o2Zscp~!{rBeMs8`_!=gcm^)ZqKfo`z9F*F7i9mKiI{m z2a&9CZ4@YcHyf?$%A5G${{r{yK~KpQ5}*Rs7=3H$gmj{;?$TVerQKZ8@@$RrnJiA< zMal9#(y7Jy?UZjDBNREW7tba-#1SW@6z7fB0MzjIK96J5@4y-xOD z8O~J+(eqKYr*5qdIZqREW{(YcCsV$3x|BT^_@r2&W2Tj*HWA%vCEd3OWiYGO%|+RM zAtkn!rcLgZmg68xo+TMj=Gq9#KU?RbbG=QI#}0`2SyQ+~;;uyNGi@Vpq0*jvUP9Mk zfSjz`#x((6q=eB?na+fZkY9d}I2{SipZoE?3+DC`9XWi^-yN5deL=}|+o7qv)*63n zX$$=PQULNil**4oTzg?-(J)qS`em)I=Gjm62QaFXa8idI*(h8zH>fHU%mYue?;Vm4coY$Y9c z)jv&Aq$Xaq%w$~m#9g@+04ksZwESrAgOBwx|AJA`a(43`OgOFW9A1EefB5x{h%V+4 z$~KJ`9Xuk*IJ!gcmZoP8KD=&XdvBrjI5jDz>^Y^)RG0OJ(F?V6t0zfhq-Q0`*sxZ z8?vyd>F&{LaP{uW=7n(OgdCN<@U@x*M#p88HIygF-@Re6`0SO~Lo8!_AdK-&oZhr< ztk^mW$~Znz>EQFmL(*PmsCn~|ZrRqm(+{($id#Ar!=~iMt+tyiJg(>}H)@-idZ+>n zsmG^RAGvVjDnIt9>$NZ_ywvfn(EUSR#SAQ%J+yfZ1E?l}pme-J0Df08Fj2NtWLp~4Eqz20izg4Y_) zB@vbsGW&G}sv?^vbfQ>vQRv5!s-IKdZcG68s{8-$WiHBpZ{*p#=bw@Y7sZYG*|6RZ z+_^e^z(=eDyVZgZlV4C@V#Tw4i{tHQSZ7d6nm!g*7S8{?QZVpJ_%E-%SKtd7ufV~2 z=lr*D;L&&Bp>JP-4F0dD1Nzpk?b!F_S3?i~xevg~kM0A+;o$k7X9Hg6=2i1^+duV8 zz=cnNuHf6MR#K=@RYmXlVFlMn86&TYsD9xe$VEmP_X~o(-!AS1+U04N9%@%rq5o^+ z0rc?ugl?pi-b>mV{k-YN2V-7Iia$sw?=NpjejoJTO6IlmwO@GSO3>RlcAPcW*ZqoHtN)fCy#I3bNZA(drxW2L{oJzD-jFxznrYssKyzTJ zr`jiB={_OMz`*SO*Y3ye$IMj}(iZv!lu`=ZBgRgZMs5MO^FwoZ^O7u_?qVSSsZ1A0r)IYGyF@RS3iy3p&rpOvfA%d7=<1Z zy-Yb1Izm+J3JjEG4UM>zV+5N#Qx7f=1)EOM>`k&&UmES=%A7yF4<0@{uPd zPeFJ$=>~QRn~v~M_D6d!k;sPr;mANgzipZVsQM+wxB$(rcRDEFsMd<{-S_va$Mlc+ z?xv3UoxaI<-5TM0X_m=Z68i<+bbh*>t!OZ&mp=e-N;>|<8<5}bnBhRGwJ7G1W&H)5 z1_z(!Vvz9@RP=nmU-9y2I_J`7bMDe7La2UcNYiYKEoPLK;%OL--)=envH#P{B{y|T zNkm|yOANSiw>{lVnEN!6C?cBp^7x_&>R1T_W;S2!(#!^r1;bW^^iNNlE+fGgyXk%+ zr^^AQ=Zh46$76<_2*bv6>y=-}yKlV0-&|o^{(pzY{?Sn=Y|~42&KF~sjbK#RB-8<{ zIiERx1)YnX2+>rJeQ&G46-n3qbp5BdzZRxhtJuDBJP9j}#*=5Sl6jH6FE66RaLJBB z4yylBopVq6ap1k5=$!FBV@6Wx z1zx|4q~C<&P7ksFOD_2T^a5|JI;URj@_#=7>dOR=wJpHZxPBMRq_~$04Soe(QhH|- zrh|=`vsWV|&vX@qUkL#A13x5I=YCbXE(U)TDo*Qq}e{-XGq^14m3B<0yDP5otG({de--f#Fi;5w7A*nH>#On~C_jn}wjx)G^9tZz&#sBi`*wC-o{UocP_!v+d zpvQT0M|D;buM_0Y#x>JI@ela#0z;6o$SD3(s3cvUFB$p?yDoV!DH9(7Q{GQEebp5+ z&MQjM`IDB-4cnho>a`VnFLv4j+YM=+iTYhui=On(`7$2VUgAujO|xHqhLck6j=9Y- z;A^7&Qf4Lvy&;B7e@HTsVeGb9r*8*#>diII_Rsx%c98cL6Tc_#k>ld*7vcM?KZkE6 z^7apf+;k6r^6II!7iXnSQ4;u{wSjW8Qs0uqEjqwNZ>pa2G2cG+ry~lV$7Lp^ zFZ~)X+0`#kR%mL;>n#!}$D1xs)0BcSewTY8>6c6Kuf5tD_wen!-RJM+)h5*gzdUyn zjnnYhj}UFh=l(TXm8Vs6Nx|=!0YpLMJh6vdyc&-mdK`r`T`>F1vDPK`TO2uQ8a}}f z7<|M$cRaFgGSFALNkn~j=l?t_JHQ~ikXT&bV{|DXuW}@>IQmo4kG#)YtS0%tfZiIr zvkl$sDS&alTw?q%68YzWO@0f&!!G!<+dSDc$y0&byTREx6E}j{g}kHtzZ1EU-hW|` z2|@BWivzYowcoM+pyQg9VHO-X4ZJC5**lrq={;GR&q|SAmrF7RtU`4bNl`X zgQ~V)oCCUsPBy~8L&56DCey`s;AMmKi1`y4QKxkK!s;iI2*IYRZHDxo*~d70*l9kS zIN7EXY-auz2;P#b2obHYiYa3W!Ha z_R)q0hy6CdIC{qIdn~1Aa*T%i2_6(@rJt;&<+1yPpVeUhxQWI|vmYLBXjNOAXG8AIz|&O}Ie_h0UH_b9{< z_r3$m2|1rB4<23I+nzg>OZ`311!ABEbf8H6?$e23r#YY9f%qz0;lmc-R7Z_zTAQ@t*Lby-|rIUOlDFMe}NfE>St566`H+V;d2IT(04Z1 z4Mu?Zd1cMcnE~pIlv-bZ0xquZ?EdWeY0t+59@F`ovJ-#Eq>SMW4|z@Jj{IE* z&Y0*MsSnpp>{nn%s0SuBjQ))Wcf^iF#2U%&&prBNhcm?A2|WDuGeZH#jrubTvU>gc z9L!mes(*n3ok{7#0)X;|>AZqRxQIvp%gG7t0{UhCg@Sa%{x}P{!ML*MqRv>gQsF`K zo?vnDY2G9L$p7?Lpgv%4%;yl;5Kqi9aPcOS(Utq8BC`4Hf3DsY1d-fxyRzo5K3V#J zxAh>zaj&j`N>RNm5?Gr&mCX@5k|;7`vmuOcVFQe%8AcW&>N5BKey)yI1fSdey!$RxC=*dd+ zU#dvv{tkmNhG4RYm}Q><+p7HJ{reQ^*HjOteekf_$o~D_EZJGe&a-Kt=3Bh)qAOHE zvnwyPYh;A0wbgvx614_1+dsVq(7cC zV{xEwi-Ji0R%nwUp_bqc>A=b?2(rrw=$`txI-LDUf*4eLEpTmo@s&q@kf_I)I|`A!O9^>+y< z4&49;H(ZJfl=fWzk>eQ9j0mhP83MR+^J7?BEFUlyTEeFPKFFKbSm0Oj=&Oc-t2je7 zH;8(>_Cov$jmTd28xZIVfeZCWZznu*qEW^Fzj$pyhi; zOdt|JB`L&wS@_=@+uWA{hKZ3B0PiDE1cXOP)2~;D;uKqGv9e?UlltD&3xMJp2_L=$ znKczY|I5AiMI?bIs0KuL0T~;0Qdw(~yoWk812gHgb6fI}UsRew+5A4bRvg%Q8uqQPa6w5J&;Gs}Pg zc?=+gbv*F}!N`9Hem#o(5cQ?**_iHzS@CsqY6##9+%mx2p~tv(S3!N)Z7Iu)D9q zCiE&CHlhk+{JZfrGyjN<4^jw6`G0l@5gH65_TK{@(W7hUpN2+Yb-Hoj5B+yCsx8Lf z)*=s5j2u5$0risxaG+U8%s%o){6o0O&LR9a2LCutq$%P zjVTk{>*Ox*@Vx1Dj18>%OX98`=SI{o&BCNlz^=XoX(*DT32^fSu%%4FDK=Fb9X))! zg^R_^V_rA~9hVRGb}W2nr)%7+M1dazk-t@-Z%GHW^C%0_n?$GrX4MOE6n&P>*THQi z=Z*azx8SB+2U_k`VYh&>kL=WKPRKIIXF%H?HQZ63ZBSpLoq22S3RLl4eX@X@uT~=7 zIp@;}VW1ahFFSabLIef&IcI8D|tWxzSouc#_;OPFU=HA zN{bAL3%IBwfDv%@&+{1wT-P&^CKXH5MT9(O-1EiEnq8~uB9zevQJQLfEDqzvHutdP z34&_z`V&>RI?5$DfPMo2)~p4G17ya4w>}c{8^~%W5ptR38qEVix}U`ia^wt%600Yp z-xxB2VI#F=5mtmmk`ha`@pweiEXq+@km-Kz6psV*BYA1-mk>`{xdr}CGkPHaA$jk}Vy z@5nqNV6kW6u&~jX`^hFU{Y(s`g$`Up1m#gXNjJ$FrRyklh^r~9v3_k>nVsN(hd1| zKZ!-(^Kh?|=oXbA4;;s4Z3OXRBvo@leYiwl6tg!$*B~rkSU@P(^YU8)X9~W}_G**; z7N~-1&n%UE)xCZo@tfP0hcLAx*$O-5w&-Z1Z3SG2u8f!JT4?@EcmgmE%D$`{mxTe( z%Sz^vYHshNixy%Ou&-jj9{{aXclE5gIC!xuRAIQ{2oY>tqI_-8K-od;0$K`jV=VF= zz+ZLA6OA-LL@&naR%#_)gWPZS&oBwVFYP*9&=14IX#ldqMY;R8l9$&tioz48CViIc zJc&nRab|keUxLbuXxg2iJ6c0Gn{FBXBSwsyfde@b0DxywlrlY4)W#;=l44Z*r*rH5 z*C%fZl%!tQpQf0Pt1FPBmE~X4fpS9|E!^ShnAClgg)cRnH0um#t6Yf~$x%?Y>#_r$ z<^oW6-6Pc#FC`{F(kK^gOa`FufQY`RYEYd9kIF%W8NJq2hdYa!+MUkbz4SkN(itSO z5~!N4(dhrkgq(R5fkuwf)-M^Cq4nYNrai*7675j2VC%{SJkw_9HBGA(`_1;0pZ z_J0-nyFuO{WQrXFK=*KF5BqCSs@#(dnX&y0EuR~OykrpDFnsnU}osQz`^A0tM`S{0KLC1hSp z$B5Xd_)+qlHgYPmqF$)<3GXZNWKXWxjlz_vctnIxr`mwMpVR!aT*Vr0yUp?;=M-jA z6(z~?vGCeFi+)si*;r-Mfluwq`WQT3Yw6=N)froM)hWO8-l2x%)ZMkPPZE8R4%fx4f42cHc}{XaA;sf z-=1YCfR|4Ym|O)Zr6dp)X_B@G=CKSk=4XVHANIg__IwmEqGp^CV6KJlFs4~9BV zq0EWVlY(VjfTI4rLUF?0GTOJ5{I%w*n$~Zx3 ztMpXpmkwG4xD`Z!`QHg&3wXC51w}=KX3e~rb|77$bDNQ`1^y*;?7oZ)I8PV!N&4D9 zv~p=Ya|7!`i0oVGGSl0U^iT#k>3iMJbVhNrGy)Pujp28gf*5 z6!n_`J+11PPf+d$4SM6hV~`KhENZ?$AqCRY;TA_vlf&d)?x=t%+p-ZVY?Qe)xI|71 z^Ao}+%Zgyi@*2!MX7O`@S1nM|O=>IRorYbe#)jqv4!gIhDyAP&%RD}j0z@Sbc?1$%EFZ&tBSS>x3eMzK1HZO5J6)a*D8mAjlyo-uTsLaO)NfvAE z91G^QuoR2w8+5!FOFNgsiEAqZ7s;m=!<15AG>R&<4ynVhEIu+@ARZV!g0lZQP*#_F zI~h;al>W$K{GCA2!_T$OTI^4FsEMqNN;hx3dux_!()Yt@T*9~3`gKiN>QQ3;hRSP0`d@+WbdB3~mb(HHhT&}z@PXxu-@?k=PXiFJBClpOK9c87&D9D||p z&Ase+|DALD4_d_&yEs1^1=xVwBMS|}wwuB;2WP*nNhg;<$$}eFU?DLnL_{pkW?Pol zo;NQ>xnDMoJ7xTA!)*~Q+Yde|I!pe|A+}N=HEI?da>k$bakD)?EM>i5Uwp&>1r(Fi zhV^%@IL%e{QhB1*5@$-1`l5$R1uWuEc&AX+PoC$II4rGo{;FUM=tPx1q$ZCcO@+F# zj8^heH@+?YhWQ|)R(~bUv9ja=qY{;=912TxP6&ee#Ki6}QsuHI1UM()e{L{u`Ow~i z_2_oh51SkC4#QW|F(n+TEstfzO%|obC9pmaNYdH`l|9C039Q9(g1B~-d0Ln`K^Q&NGS_nl9D)Gi8XaCNPw^2N8E6&v(KXlG}5o>R4rY|(cP5-=3e#6%zuyEk!K zBviY3ah0Sl>S+(Vz0N+?XT3ReEl`m7R9vO6U{mT=`t}P+4sf;V0E>TeSB?aU)SF9w z%Yi>=R)P9aV=$-gj_oIY(wATxKxl$mi;*X|E5ZuscJ z!RpQI$Y z;uEpW%eZ?Ago%L%Vm%YN-|5e3TiOFS@E`8ieW0MlPzk#?PUm7=wR1J zVu!M7@o}ynoww3CACGnioU_xvJYYgh#|I7Uv3?#XGsu7>EicXm+En>* z39EOs*=d7qf|$P93OK(~W%2ajLqTf)M&f>pfSyzr2gY=cCzv}xs>l+`5VIRLcHajN0&#oC+}*d`5r z%BK%8wud9qr+!O}cUjYHi^?koh|DbzTAz9*6PC2AjlH7jSGLS7&sZSPUQf&5*^^PM zgnlqi`~_6u?*C3*aWyM2La6C;uA??{ik`{if2W@8e*^p!@T5(FYQzar^!O$1SP$v`s6 zWTWOQ(Z%=SEaKGr6xab_E6oBq&(z$~jq@JjGl7<^a3fkQ5_+Au^9Ld6`vG@;1;GrP zd{OFmby*K%F9bXlmUL2`dk{Jp?fl%ek?=bz%il~7CNb@Zbr{NjyRU_ zOZ{-_ATfNL zNh|GCFZKQ0zE=SB-N_*WSKtn12N7gWyvl07vfKH_?L7t+>7_g6Y5|%IZWO}!nB!ii zr1k|mP*n|lbz*f^(2Fb#W{7gZA#%ABcLF_e;hRHcE=;y8(+#u8jkNDbVuqO~eO0Ug zTWvu%LHeA(m|cljXw^b%*We5O2eI53s3h;i|AlElY% zq!7+jK^)5^&IaEq80m%w%Ix9|L^KS=6^+%F-*@LWp}f)R1=vysdKeT_URP~K^J5}ZuK1wxWtVB52D#u zRjM?71-57C+vDUfSSkl?Z5dzT4dJRe4~2QHTGWhDwP+&>NDV!#^aa#JV-2hdZ1b#S z4uq51w|IMY@`=kApSwg^^fvRkV_S%XN4d*vFL#F3_b`;WiXXJuTmlQ*$O;zWV`BRhE@%1TGb zt(<4$ObC!VEKHG{`qw~NzYb#iWo_Bdw*r*e+K5>_fuYNB9k-8KAH__H1q-%Nchn;_ z7(qNEuHT@uOCv~iwfD(d>P-t;bKPpg9RSK4q`XTolqpq^>-VmER(DCqp$fH6&}RO6 z&z2B>%`nm&?0u5h>k*uoeIbr0P22Rn@}U)&ds)KuW2|uvx3!A4zCx^0jR>~}BrD7# z4F}4K8zUw=jukW?S>s}cnPhe)3FxUO{Dy9Az&cU0z?w6a}Xkoc=yuq3QNCm-!ghhHws6i^*&fH zwVq78#CoW$;cMyYt8HKIrJTga|B}&FW|(Sm#bJoZvHSb}-V3A+FRohx7o`bl{sW?| z6lX=}uAUzs4>snVr5bz9k|{-$XZ${T$mr=1&M|VqJJRAT!@?TOg=Wm>v&VZ9I}=~3 z(yFZ&Ol~Knh*RslP!%C05$=skj?<4JG}qV;chziZu}J2oA4^jgcTG)8Oa^mV?qy4_ zK^L{<>ufQf9OF#VrTP>wO_~Ft&32fJ5#8s->J|hMZHBua8W^L_bC2=5eG_Te6z?;!_?%Pq|t&4Zym^)Ux$a!{P5 z+GBoIMipU3`bo!H^u~1aV=s*;VfHY;gyd+;IJi7`s}8RDp5yjA@b;JF<>ACO2ngT*YZ)qFJYPSCuwDEt=Mx#7QyFUL%*TpFt>c z9*05%n z6T~zK(x9gWw3c)S0Fr41$G_Af;hbk){&yy1JK7I*+^pgQ5uidho(!DT1L!zCwyQ>Q z2ZL@~0yEm(i&0(D;iRX$+~85+uceX&#z1iZiy5w=XJbX$%>A6Y_xJ*Nbtt{KdJmjM@+mXyzON?h&K;EFNJDxG2}+gzh_6?cp`S(T5=XO@sE}|n;Nbe&u*SL zZj~q}=%Ib#CMoFH$%fta-H|g-E@jmcKRN9SVsk8#PknkLMk|A!AKJ|4aJUFJ2g)|d2hK3IQ3gyzc@B02F!sfNc1lms7Z;APd-a_Gt!eN%;TXCj|! zMBm2&lK8xAX<^Pe?=;uYUwYAYRs^YdUWsmUJbhaOyoLkzTJDlFT*2h|oBQfc3bFiu zTB%!$8|nvwV2K~gBpD=_#F!r5wE$32T(e$-GUCzSjLnqAq4!Bb|aG zZf^~`!~OWH5EKumQ{22@u#3Uww*)I@g*g`^<|Sc_1-lU@eo-#fz)b?w!*F z&HbS|*Sj@vW?8@U6P}j6k&jHXM2ma5s-$QSi2Sm0i_#};hk3aY#Lm;g zJ7hzM!Y3A>g$)UQfZnq&Z0AcR6D1qoS!=XT7_x0|boi~CF6Bu$3Qv3VkIX7M1 zU`OFFc}RF~|KPrKzjNK=0BCH5R4MK-nVU;kmi_im{XRd16^A z-98mrA92Lv!Yq$JnLp-PghyV#)%iM(#$W4+SkEG)=QL9p$`mr-5vaQjNiSgLmxeLs3@78xh_qE^C$v^9~S z%a3*|C9Qu`fOiRc0TFlP_t1*5Y*1|oVA?&NCNOK0*u_WW@gD{5M#s)H=b1523ZJ?x zD}Mi`Not#02>Z@qlGaj9>0SYInHe`#muDI+0_TazB=Xc9|;e#jW2=d9^wU{6IUsB7V%8`a^C(b!4&tamOu+< z2Pdj*y;Pl?W`YIs`-x%&Q4 z&go-zZYtbp{9&9y?RKB9j!P1h4fxm7BA({}d7NCt2SrLW3zbH{?_iRM&_SqHK*Rfc z@1HpJl{oY@#sj{a?0C3eGKj`5-KBI$otmZ<(9J6cXGY&cyuA|4ygr;$Z$WA%l;!d0 z*e9B8Ug^cqvSO#l^-`4~tc>a$*h7Z5CVU~DHJ<*Sv|B-GFT zZ4Q@t4~Z5JfUcrqCIpaoSK!fIH6r-+nx}iK`_UVqIm^Z>mKI*`iY|8)DCt#N)GK`m zj+K>QWMwK0_mR+XHvKxwR+>YK8`wJPqU3g72Ml(vjA@scEWj9iSy97-y6u#|YxbsCmietRg887Q*0FY5$$V(C zZIa%lk|im*>X6z)JJK?0#l7ZLtbPGB#0_chWj;kjy_woD({IQp)%)#`vPUCc5?pG` zmxaVqe6jU}{kJSTE}Ey0UGZ(tTc%R$HT5~P5&>>uu-7+HC8?#gp5T8-y-KVE7E)#e zq4gv=i2?%?@YdyNcpg2Hh1tjAVZGAeF>idYzgQ~9!~H8sAi0Qh3UozfbGeM@jZBUg zaup5FHvuY}$WgnIc}Sa?;UJfbygD;8e`p~y=mXaiWBDXTY^|y?_3fG_X$hlcYZ@N) zX*=x&cR68qf5$c>b$HhVY8&LInn=?J7W(rQ4ou z&8d?KKy8fLSz+-?TEg0VWqN#CKx*4P6Pfv7a)9GhxFzr%;K&Hj)4@KnNpA4^1A?|w zZ|Kb&+eRhP)c!aAJc2vL>3JavdT`J0ms(Q&R388JK$obr4JYJsF?=O z+U-PRs=$-eRlU8*fOh7VXC-30nA7RIinwT8gT`mzw6$sH&oa9PTXs`b4GxX+Z)Ni< zOVX)Vx3|_T=ca~chvwXZ@h(bSe?Kj4Exzy6xpntmotG#4v~MtDr8X-p=xzbzp(fY{ zTee7aN8U=R6b~#g>|?a-)+p8#E21a-Jtw%9D}xz?Mp?}*Ku3D4w_#i$yn|uRfYvdW z=gW^-^gtV%U-2Z#ah+(sHL|Gr&ETsA?FwBbgx}rf;&T`W(kG_#Wd;JQhDqNwi@n7* zYKzoww=5jqGVarB&6YA1hfsI;Y)w^nqp1rKeH z+OSKfcL;~3q_tB?zUl&-;Y4RTO`9l7d#3CtttN7*0#j~+`<4ty@&V%NpCW)b#ahattR)_I>T08R?4JK#6=Cu+Y5Vd7OU%6__%7L;DQtBH6_TMMwm!OUpw8@PE%?FW;LUP5s9lhB;&X+LD>DM1<7SV!d8>2#X(09d3b{1OgG(US z=(q>M2eb9gKPLJZ`NI_1(voXz!uLN=NcGs0m3=)s3i zpKzkTyOeB4|JtUpx>0&n_ z($ieU`O;}!qLehoPLY8)oJRRa3FcjY@ES9Jvzh#I>wCW7y4e+El6zMwd-O|M$;U;9 zE_8YLIj>44WfS36%!zs9I09*&JcONk?Rd(V zp$Xka+xdbN!1NW|)R`+O2IMwf%Fdt5#&}%tQZ!by#}qL@CeD086bLIZ=4RGzK3Dbr z)3wx?wz);AzI(4^^pkaWdGx_-DUf#naAerjrpnZ}-KA%TD+{GYmp!9?bvzuK>_hl# z#^%_y`cO^kYnG_8kE_hjPENTQP(4Z)9GR=CVs0C!8sS5b;X3WE?jPJR6h0Hnt-~Md zUDFzz^gY<%_uMKlKOIvP@!KfB>vq#{NjoIM;%Kl?Q!$Nfbr#+L^=0E)82^F{g9X}K z5$vkFOnFA6)Ol}>tj8PFdOLn?Iv)@S46xywtA*ASW(wfH0_e$5*Om-uvICA^CGgmRR@lj3@QC%*cLU+kwMSd zU_tE>w9Fuxk2bA}a}*BQ80d1nPsXM2(m0WmAlDq*I-prc;+eA(L)=#hek7)_br6mD;Q)aOi7yUyjapkIz~}&(_0$=fC#h_Oh(|M3PW6F?N$?g zx5J)J#|3>=&b>kswVnmMP69z`i~4q+_$f1&i39t5^Wn!*F0J)CTVGNv(=6QU`DZ&T zYxc~8U!?3_iRbX;et9LSws*G$PdNNzQtnuXLFH&!cL=aglX1HJYhrM`056gKZWcfk zipgnY#%Lr^;0XE7z$dfkm=LrQ)Z5?1omxs`(Ax9y%6+>pAvp+%Zj<=B+yW6DgfKrb zW3$MAua!oa)2If(WpQ&F72I+{7PlFc-1_-KwZeX)A%yQAsnn3)iQ*^T(ssIy!(6K# zr_n>K%iL8Qov1GErrljAeK|2f#689GoiXVDHFf53No`vk$0oa%OnlC>S1U(wik!_# z!?KJt5r-7*YHFrpk^^OiqNb=hk2f5_0TrxN$e|EtrL-Ko*`pk8Vrmp=Y4wiYPkr9` zcc0H$d+&2T`>egz_qR4wQgT4I&)K83x>D__xv^g&)S{g;EX2MpHn%$NHBn-cdTY0* zU-v2PcPPA<{}f@7$<1NgRgMIb7jFVTmqV<)EL`0Js*+O4rJFf{Yju}B+40&C!YBVyupLIpgq0?h}UJAjx@!dt|+My|@ zOa2tiITUMI1F8yT^!{CC4O%5C1N8*l=%y)wXC*tF4V}@C2)nYux}x4YnRGcZd_S=; z6wv8d&U%_IjoWqk+%Rv`FpfVP+paiza{+`gMVDlV-|Nr<-0ewzvrgXpRV!WB_Ug=$=fcoDcF>Gjl+B?|8at)@jkDO} z=$y*MjqOHV+d8HZGgJoS5lZ$uOq2`&nIed@D}6{8i*H17{x_Z@wMavq*{&GA7YhKIlF}oqkWt>_oxb&?0@iGv z{lv_oZrF7@P^*hfY_O2+C;ZnLBQ(|Ag&5vUfkXfB`D$v1|1;Q4Ia+LCwx?#}#Itp! z#0XK1F^p`K)>0&FsVXOdn=kSnh#_|Hl6+DC=>yHsueC+2q@z4N-frTY%JdTH^7xj& zu{W^AA0TK)eO81A4=fJ)3vuxHb_Hs6!%H#WmYpDT8U03Y&oz)lNQ!^H0sHxz-EhZwtdvRZ363a1#i>87A1oHa=9O|3h9_Isk5P6XFyZHGIBtL4FM570#o z*WL4eQ(FiR{eoaltUh6yjel6=mL@4n?8qrx@NABGML4Ld99=ck*PRit`A^5p58>S!6oosigDV z2WLa;(EW=n!tj=7*k~T|XXNuiVSMzPEbHc=tB_2wCcKEUGq5`8v`2nn&xxseOwngA zt+~3m`X6xbse5$Hfbn)euTQ@okNK7&x?44p6?8b&ZdQtkG6|{ne#D432vzUSciF11H*3B= zR4ham$lv*_3wePW73jT-hohsIMVnNR|+XA3Ab8 zJqri~e`07PyFMIL^&D$GMlb69{D*aO?T0d9vFg|L`9unIhcKWoBdboKh=^t!|?w6MZ4i?Sl^#G8Owcrk6)T|bXMtXXUJ@6~O3qC5i^Zwxdm8w0cxj4Q&QJRno z{x}k5s0#S7ux&N__JG!bx-;J*(A1?;BW08*M}VtojGKH-k~;;^38X!l^95~LQ^(Wx z3gTZh=*URG;p5kCF-4Dhp;0xb6AormhqbU{mVhB~>$ZOF1@@5I;4C|lZy%5*jDV4T zP8v;4z~SBL80GzU7iW*JZ&6tPuqPQ9(LTjM11E8(8Tuu%y^>aLyq+=LsBAT=f=T~w z^K|x?xT7brl-lKKLt|;Vc+rd|7*b{$pCu#_M)}Wgmp9G;vrfVwvO$arMWTY_ScQ&7 zK@gGsb_!`K$J|yw0K<%99K%-0aw5p{Lk zuIK%q)qQCJ>*JIzmI$K0&rT2jbgCkmv zYa~jSxog&w-;&{tDY=H&)h=Ja29He&SgKyQl(WxKi)LJeA0+-F Date: Thu, 16 Jun 2022 10:50:19 +0200 Subject: [PATCH 014/118] Improve production image release workflow (#24481) Few improvements: * direct link to workflow in the docs * "green" success screenshots (separate screenshot for rc) * job and step names contain both Airflow version and python version * running subsequent build with same version will cancel past in-progress build (in case you quickly make constraint fis for example and re-run) (cherry picked from commit 8ad18bf02924d1677502231e678951bc5b8cb420) --- .github/workflows/release_dockerhub_image.yml | 13 +++++++++---- dev/README_RELEASE_AIRFLOW.md | 18 +++++++++--------- dev/images/release_prod_image.png | Bin 112569 -> 76309 bytes dev/images/release_prod_image_rc.png | Bin 0 -> 65816 bytes 4 files changed, 18 insertions(+), 13 deletions(-) create mode 100644 dev/images/release_prod_image_rc.png diff --git a/.github/workflows/release_dockerhub_image.yml b/.github/workflows/release_dockerhub_image.yml index bd9fdaec1e6a7..a6c3e86ddd814 100644 --- a/.github/workflows/release_dockerhub_image.yml +++ b/.github/workflows/release_dockerhub_image.yml @@ -16,7 +16,7 @@ # under the License. # --- -name: "Release PROD image" +name: "Release PROD images" on: # yamllint disable-line rule:truthy workflow_dispatch: inputs: @@ -27,6 +27,9 @@ on: # yamllint disable-line rule:truthy description: 'Skip Latest: Set to true if not latest.' default: '' required: false +concurrency: + group: ${{ github.event.inputs.airflowVersion }} + cancel-in-progress: true jobs: build-info: timeout-minutes: 10 @@ -53,7 +56,7 @@ jobs: run: ./scripts/ci/selective_ci_checks.sh release-images: timeout-minutes: 120 - name: "Release images" + name: "Release images: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }}" runs-on: ${{ github.repository == 'apache/airflow' && 'self-hosted' || 'ubuntu-20.04' }} needs: [build-info] strategy: @@ -98,7 +101,8 @@ jobs: run: > echo ${{ secrets.DOCKERHUB_TOKEN }} | docker login --password-stdin --username ${{ secrets.DOCKERHUB_USER }} - - name: "Release regular images" + - name: > + Release regular images: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }} run: > breeze release-prod-images --dockerhub-repo ${{ github.repository }} @@ -106,7 +110,8 @@ jobs: ${{ needs.build-info.outputs.skipLatest }} ${{ needs.build-info.outputs.limitPlatform }} --limit-python ${{ matrix.python-version }} - - name: "Release slim images" + - name: > + Release slim images: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }} run: > breeze release-prod-images --dockerhub-repo ${{ github.repository }} diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md index 3d242ff9d4843..98f30621343da 100644 --- a/dev/README_RELEASE_AIRFLOW.md +++ b/dev/README_RELEASE_AIRFLOW.md @@ -27,7 +27,7 @@ - [Build RC artifacts](#build-rc-artifacts) - [[\Optional\] Prepare new release branches and cache](#%5Coptional%5C-prepare-new-release-branches-and-cache) - [Prepare PyPI convenience "snapshot" packages](#prepare-pypi-convenience-snapshot-packages) - - [Prepare production Docker Image](#prepare-production-docker-image) + - [Prepare production Docker Image RC](#prepare-production-docker-image-rc) - [Prepare issue for testing status of rc](#prepare-issue-for-testing-status-of-rc) - [Prepare Vote email on the Apache Airflow release candidate](#prepare-vote-email-on-the-apache-airflow-release-candidate) - [Verify the release candidate by PMCs](#verify-the-release-candidate-by-pmcs) @@ -486,7 +486,7 @@ is not supposed to be used by and advertised to the end-users who do not read th git push origin tag ${VERSION} ``` -## Prepare production Docker Image +## Prepare production Docker Image RC Production Docker images should be manually prepared and pushed by the release manager or another committer who has access to Airflow's DockerHub. Note that we started releasing a multi-platform build, so you need @@ -496,15 +496,15 @@ to have an environment prepared to build multi-platform images. You can achieve * Emulation (very slow) * Hardware builders if you have both AMD64 and ARM64 hardware locally -Building the image is triggered by running the `Release PROD image` workflow via -[GitHub Actions](https://github.com/apache/airflow/actions). +Building the image is triggered by running the +[Release PROD Images](https://github.com/apache/airflow/actions/workflows/release_dockerhub_image.yml) workflow. When you trigger it you need to pass: -* Airflow Version -* Optional "true" in skip latest field if you do not want to retag the latest image +* Airflow Version (including the right rc suffix) +* Optional "true" in the "Skip latest:" field if you do not want to re-tag the latest image -![Release prod image](images/release_prod_image.png) +![Release prod image](images/release_prod_image_rc.png) The manual building is described in [MANUALLY_BUILDING_IMAGES.md](MANUALLY_BUILDING_IMAGES.md). @@ -957,8 +957,8 @@ At this point we release an official package: ## Manually prepare production Docker Image -Building the image is triggered by running the `Release PROD image` workflow via -[GitHub Actions](https://github.com/apache/airflow/actions). +Building the image is triggered by running the +[Release PROD Images](https://github.com/apache/airflow/actions/workflows/release_dockerhub_image.yml) workflow. When you trigger it you need to pass: diff --git a/dev/images/release_prod_image.png b/dev/images/release_prod_image.png index 78f941a43b4c9ac23929eda5e742eab83279fbd0..50336e5194f58020169c3aa070755aa183efb96c 100644 GIT binary patch literal 76309 zcmeFZXH=8hx;8Ah5ET&-1(jw41OiC!U;|W|NGCw(y-4pxMUXDiyL1R8v=C_l={-_H z4?$We384lE1m3K*_g?Fqz1JD%-#fEd-=}gbLY;{ zzIyrM?YVQ*I_J)vzkHFB{7ZeQpwPKUc*_Uf~?_O4X0W-fLW4aa0{2TqvFLy6&1>an{7B&0K|00i8of}u~!3El>CcV!- zLJbNaOTTV#Gs$^w6WZWM6ghuO^l>0qZoj-AS*n|@bI{{eQBMpa;msV6F6g!X)w@T-`1sG>lb#P$SN`l>t2UuH|7R}*@c+Hj`rY~F<@utb!j%o- z_i-PWqJH~xI_FZT{J&&W`70O{WbdJ{fX`H!r8^xOOX+g`gPC5Ka4&= z$7WYhoxtG+Dzld@?_w_eHQGdMi~X0Npi5fb@o9BO3EzCr!J;L;6CX6sa`mrKR?N8- zAJ;c{o<640TjSY~f3a}(JIDUf23Q&G`QH&`G?*q)lu7uY$Z+5Tb z6w>b~aRW>+Qh)vUShtn(uNz;~uMGa{IiE2CO#U1h{Dq3<)}Ot*7C-*qY5zAMnV)#= zu+Y_p@VvM)AwcKmdGU$f$&PmV1cs>sUF%~x9r39z!qk>`cRxq0!eG^q>R%E%m1w(O zJ)#RyN4&i12B17NPxWBjYRli>fH@@R2bzGSx-SB?NiZe`#Bb^lJaA!kzH6sV4G$q2 ztVhVRzYfoUE%kV*?s?N*EACQ{nA~(j!!+;THz!h4UMMxKbOk9V`pV>%bFZ=_I3ubr3?x= z?PiGg$uBCev3;K5M*pY-fN-|^>WL7~?2gg8iJivv@=ksM)s`=HRIRWLUXlVPI(fX$ z6Bp;kSxUAbnvH5by*3i-MmhQgv2%ADsAfmq>!6#pYMM#`$_J9|7dNqw`m+2AuQ6`% z-nCLz2+KIX{*b%hhXZtBizSPe@txkZbx}p^t=9KSF)F;C4q*-+20ZX;(h`ThuSKJq6bd zq=y+ri>(OF_Zm@wVYrR4h3o6na9`xHU%~Bf&X)o6X!WuTHq|g#xZ%7TlxjFePB;{^ zRaO}+U%WOkFoVH6hJ zZkC9u6F+qUL`CR=tT8rc&5zoLPDcJ9cI)ioAh0#kjPb?5PdzQ2OImRz`ai0MU?|^< zAJDWal>#H-h!V#7R#0}2$U3D!-{g4yPy{!h$@1F^sbZ$}Tw}GPy7m+;Z8^^pOc*O) ztF*zdLfbosP-{a4oNebpq^(w%#oZ6Honi?;&Kp|l(s*|v;$gQ*;}z&k`a1Ay5|L}N z{m1M14Iva}OQlHl%Wa9{6h6jE=RIGK!0EP6apB!TSvgcJEl?aL9k%(%TVNtT4b_#> zRwF*<`D1=Nn$cWjf6@bDxVNZlKVW4xPy~zx5jUObx}(yX=-B2?PDi#$8)%q*s%bV` z1K63k5VvevRl)$#xY^Bz~~jPXQpmg!~PTe8Mbq>SdR$f zJ-%L48*HBJEHmP5yS37c-Ww!|5e~8&4d5q$PWLhTnb@;p$J(?0!4-N&rxP$pTEoj} zZuMkf2WdYIk0hGb;dEnp&)MI(0Hr@IjJZ}%=Q$s^5=SohtIdE_uA5Oy#^KXb3C z1^?Ak(uw3>_cI9PXg)-v3=BGn@KTXhK~EwuEdaw9rax51wYH`Zs*~Z(h|uwd(83Se zn?czEJ&#JXq(yw?grMD4%UEd_Y?<-L>Y7IQ>5KPV5876^JDj+|y5q7{Pmh%OWCQ~y zm=vo%xRsaRcw*lpEY?U#)s*$Iv9ug?zmv##&IUDu}aO&bzp!p~LDo>{CXO1{NK*%TO9{#__u|A zU6>x%td43`fR?>8+BZdjUmWL|6TPZ@APWA_FM$t+|A&DJxh!Bx5e7-@dOB(vOLF#3EJ)ok zdd?T7={_R}XkJPo90aLrYVKAM$B8OI@^`Y zmrCz2HS)rMvK?#aPAU4Su#KpK^2`0D*-)0IwRElXosO)8WzMuP zDqdw|L~~y_7|qISQ0u=n4&FQ08>9(J=FV#0<~~k?Dm}IKBK5AA)1E@Esj*Hm+Gkz( zVLe}F&8XL^W86lW1*>v<0qk5bsyOO%u!pC7-?q%HdOdc!O9v&SgDSp8)940FLBx3u z6{#Ef4efBZMXXndr=E5Ng19yvZS)8es?n8Dz2@Qh6fj>|Th2twzT4{+rgJHXP*4-B z>+d<|o)Viqdvf}E;}b8Mgv;r@5$GM5Z3QOccDjR5mU<&Aq#Jrz*R!<%Z_m6cBc}1% zTb{Vg@w%sHSDTus9B%KP_z14Fhrv!yBjuHV%da+K)>Lo)VKP$O$SSDmv7)dU{Pe!? zNR`NlCg8LUf-X86WQG-}I4vyw!v5A?32g9xo+!ekmRPnA>=m5h36a;+Mf+}I!)a}| zr;R^x6`mDn7xE`96Zvwe>bXMdA0z3$pbg}{29j;H)xO1a@`Y!oWi$F0@0&Tsy*ryp z16MOdm0n{=&ok;Pe4gY?_4YMt=g`WE!k!PK#x@Xja6qTLhjqHPS0$Yg)0s{SpOAKY^!OG0 zg3mTp;zG`_7SJL;Ih4bR+wS`D)Xt~4kP4;`Hs$s>Ih`f_3i+TmIiBJ#Xr4jV?k&2z z72n=AtNZcD5cBS8Xl>LPUHxbaN`8X)wMxs?fxqOt-d}f%$==x8Kf?G!)27>KGWApI zc`VmMzvizDQ0H<{Vhm|)gr1tWR6@E=Qivur{EZLJB zolkmOdVnSRT{>Y0;%H51(zdm|ys8SYg%ikU_sfc+8zQ-1vl)H5LUL_}R(IMySuetw^sG&k?71&yRAR=q~+8MC@5O z>Z)WNJHxvL{9*j8J)rCq!dhQ+HaPUKU%!=+{R{?V>Z=XermGX56XNfStcv{PTPXYo zH9{B#%qf(HlqDY-H7!6r7w-a;5`y-D&jeou(O0M{na8vjM5Flq3x=%Jo92pBm2{`! zwuH$mWfunxop_8k)ahwWWP^uB67JhJgll>=RM~hvC4i%BioBdTgQ=QKw6)`pJz|Gv z=VAneRM$cvt(4=2_bel0hWhIgMMZR3xLcXlA2cs>}lYFj1ia`lRE-O^&v2 zO|Ag-ejE~Day&z$ZRXVzaMh=zQ;jT5I}Ed-<26SWbTdMS+ZFROP(v)S#x^r@^n zMQkQn?GKidt=N)wf8rldXTEm}{!$stR55wbY|KJfrytZ(9YYIwEspUID*C$yCa-)U z(kkf}>evpPz83!kQi<}Yfca)J79lGI%6n_%oTHmxKv0x1ssK`{Y5$xZugK*$s{><{ zF=&q|yy^k_tD2Z3&sGaNuU^VVjZdLQo3e#IQ*oby5-aw~A$_&1f&Ky6Iy-GGm(J>G zgF7oo`;f)~1>rH(GbfruJZ=R%nDmUDe?RVzAocFUpM3LmL%bOH+9k#3aV0!9>vf}T z5|tAl^wNwt%5pE@!n$_bCK$u9O`4qOarM2V%W<-HdY3|J-fi|UD(&dz78zshph0ns z`}1Aa(gd(NxK4w=#?TDp&eJ{4Z8${8jWWDW;inCK))W_Z3Sm-eqJGRuo3eK&zN85e#S(;Rxj zTZ4NW8cM~I>a7CWm@C+C2LBN&j4c5mxI8{hiZ&ek+(c!VwZi zF1ENyZM$y;i-?mQ+Rdu8DudQAg2&uQKU37+Bsz>Uh3waAOxn%ycGEo?;Bw1%Y_u_~ z!h-{`vOAN}ci`$n_|FqG0;`@r9fTTR7YpZtL9;sZI{U=IDa0>)T`(^3^B~5eHlU#Y z=#PX@aDA*Pg?c!$XVi65N=?u1X~#=tShw?}t-J{QZPgFaQeOSz)Rwr9WsT>$>(ako z7F}?&>&O~&iu}opDi1rSGAiVK-s%VY6ARafqWt29sWX0oOgC$0SUD7~#G68eRhzuv zb5G?G3erJ2-r``hVZ%2Ba`suA-SegqXh2hL)|#<6TYDKHe_(Hp1K+mnpKur!KKJgb zT8+J!C-u~X9P{Myptd4`L?UGdebEZO z5uHj;0yL7A?>&y+SRk^*%*{T$1B6MEd_U>oW-=R$7Wm?5Kvoxqz{K6 zdEHV7R-{SG)H3kRs6g{Xc8B!OHZk#aXP%7-2i4Ki0mEjsyt0r8W9hbqtv=lJc(r$j zoaWJV-0loY$j;h^f-9iJQi}Hk?1Q#qTwiPELEB3<1=jeL1w{Lbjf=-b1Vuc%P2*)0 zR{~87OZR*o)QuA8uE8zNJf4B_Pi!6-w6=KMOf!B9dvmy>h+ENc0o3vRk>7f9(d5R5 zVtzfgY!>DaOXYI@oD&Lfb^RFh%ZSNG*ZbhM1A>9++4nS?D?Y%UnR-I0GjZWR2Q z32kbt23<7-Zn=`@XsdU@xeIXkJUslRVS0VN&B3oi-YWlk7{rd8*;?OH;ml zpPi*5s#U#GT{doWABp8^3h&Y}PKZp1dS!2{R(bbP0OoB`owvhqQ{UbX+f%Tsxf+ua zKJKqeXaZYz=iuBqQg~$bx!SdEmOFyn9M$t5_Y2+$*Yj5(eubbGjNCE{W6ul!+K9BI z*uE|s3ziQ9FLZ!k&np;ncL$gzwoIFJ&&4E?ukw3LMe& z2kNfF?s|+|Mg2r|S*shsE((n3V1DQibZ)1K%Bu(24nXgunTeC#0P9<)Wt$iS>e__Y zhgFKapUo4RyotLx50OjAp)NFBD~vS%S%*pQdu`k?#GZ4fS*@tkq1|EPj;Q#_30V`K zTy3Pns(FhLZkZoB_K`Bg(Y1vv~k_7OY14puvmzG{aQ;y=T59OeIX8n^NPR%`)d)Hg& z(Zl*d)kk`nu4~M^c4-@*;toa_D^EYK@{h3On>6vzc+UaFI959CInoiCjy%$?ZE7FO zDrYs5Hd>5@8_1$CZMN$UchGI=KU@{Od3sgG@q?f;zh$vrYvEVu1HN>oJN2dF@C10# z=EKu*sK8t?y`T$3^MH16Zi7Zi!wAT@)!r3dJ$zIrK4YLmb4ZRo_;j6ai>;PJbf&qc zuJD<(B;%Zh`Nw!oiv5FL@XE4Op(^_k_=unuEny$$Oe5zhOwq@%uVxe3$sYHN5BYIk z%D}p70~JORpQs$*oBzzo&tsPPOk^>z#TRuaK5(5n=*(Bq!kepD?wJ%8*5q4XZpkJ} zq|lg%$F5LSvK4k9g}mdfew5vJK$>pT_>KxxpVw=h-pJ=CupIX@0#z@No;5Mj=$Q|X ztlm%`s%W>|&r~FAmk7lQ-u|#lFx)HQGcnz=mCWk$BYc?k9nLb-V5}=&4&iDyknz>1 ze0Gihxg_3Fsf@qf$&Q&3`zPTeKRS!ZE&gKyIdFpVNJiaVjtKx!7OlL%;1A+ zI(G_fvXBK}zGqqK0`#V{e|=Y_sQM+Ko9R*;tAQML@~Ca!pnitQJek0}Z>QPPWbWX# z#H%kf86w}W=bg86PxJxJ;acXRePVk!2wmtHurTG^MKk4YWkaEa!wd1;`(AJ_UDLk% zgeqpyYpvjmb0!MDpQ|JIT-nk1R{c(yQ!*Z1#*A*Mw^W?DWHrHGYtU*Qdc+?tn)$D+ z^W$x4z`V~euy3s8mtUW&_IYtBb*(@L$qY08QQI-fxm5$2$)}H*T20x02l(TlkaxS) z4`vP-s|a3ZIQ*rrU>9?X_L8Ra2ln*aUGi{+b`D}Z^kBC4iGdGlqn4B|AC9Us0! zv+PnEBsw=*2AVjS0;Cu#ol?>Q-PsSvN8#A|$$IH_pWfPuAI(ivO}Q`R?$1ii_&~`T z4rcNCMH%1MiQ>A4(q56gk{k;&th*Vhy&f(BSCuepyN`d&ffTYIdSuVoQ>a@%FVGoGf=;HMipLHPP>xFjcsGP*m%=9K9;WK1=8%ILTx+cFVhvZ!!Eia*UDH548|(hbl#1O^ zE)Pof+nFf$U1P}e@961No_^(4?vA~iV$}H4pkacr(@>1F-d`VJ_$QT3Ru1%9X~;gH zj;x%<(Y;|!NQ@>Z!E%kE${23ngM;ok{pLo&vZ8Tr!!+$y(vNg;nz<{cw>c7vq!AvT zjODHpURL*E3E}9SY@%|~_<|jUlG8AN`GNi)t2D97&GX>1;k)y&4hBhQ_yQN5QtGuYdEzs@$=+SjC)FXc%8i_V@i;q(L z#F6|#4j*Yx=bmw6-NQb$j?yKDv_>@i-K+sj|o@^!?H2t*R zH-StYHsao)xq#(Ga(;ORF*n-u4A*H-2f0;O*phl$yuaPmIcm7IC7SNBn3kBM5lRXA6}1P76DF#Zi^VVaj%9lj%7 z5UMzkS8r-wVE>!h!`<~4o8LvuCd<^by#@R|zU}qAU&I;7ACW+4vVd~;v?(Q;0x3N{ zAJ!6+$ccj8#;6iua~f~sl+G+dw6U{{aSddze-T4X>fXV=(hY|;xJ+8PjhDBnwom8? zaxs-gWeSM!Sp9Y{#fVqWr*HL}&@oxo z4EIr_2RTZcynFqYv~+}Pt^f1Pg8tp~F$lXM+{2rkEt_{-+ol)fG-EJls2LGYx{=K| zp5+>#JbuU7>5A(HpB^v1I;&4+RQt2qpyZefIZv{1!9{fNAdpN2{6g)=T(O=+O>L@v z+GKqnITrd|OuAL9@vC`J{nnhQEO-1#S7+A;ZEb^*%?miN>RH2YzE=g%bATD%owuq7 z&MFR@Os-4BfI??4vYnD_1ylx$r|~@g?FZKHLaTR8YKM;P#M|{BZ<#0VitYadsScH} zqR|45q7#l6m$uaM+S{TtPj8@xhxVt#2FYp8Pjr`=Nuky8T%>Ms>A5Aw(&TeVCuURQ zZ9Q6%Miubqh=vB~A6SE4>SKg`>NI0l1=}lRsrn;t-l~7pLZ2wXf{VgxHtkeKKo+lb zwu>75(MiT`%XlkR?6y-4FDtuF`Vf?!4}_0WcOR z%3jLzg+C__S2`317KotBES8w^*)&5`8QHsmJMJoKzgOsGwyTvHEi7EAZa3Ruagy=W z+4NUOfmAp-FN)B3U-n&p4eLm2xqi`g~!k!tkPDP`XV=ZaZYnX z(5<8bmvRr=fxsZXY3mkA9*o_K3fU3w{h<`(MFQ&h=p^}=A^HUL$|TI|4(kHUc(3Ig zZhUI6EjEywM(>^((z+Ys-Cd@*HOd8p<7Fy`Nuo~pW17h``+tt5`|xpxoB@ zo4+29wT&pp9C0beX3aE;+&c@|q_`v{)#q7$v$Pw4u-9^YI@}@w+yfG_ie<)q<}Ana24W-3n{&oNg|m}fXOcrXB!Vws7_Q5$Y~2p z4uvUp$xVm}%y{X8W!49&vt-%zBM7AL)eT)qE7E2>5gC^(G5My#%O#(vprQ6kA%2-h zJX5ZUPkK0m6ip=~d>{H2ivhzVUJHJ&^bb3ID-mQxs;yA2B+IWO0S97XxOwlSmRv>A zV|P4Oef7K8Qm7cWun+AN%m^#{z&Z%}yQC=hT%3T6ba=E1(S5KG*XIQ3HQt90R%WT% z9M^A;E}u+L|B;KcVxY-oNG`H0u(03=NVww&;U!AH2{npqkeSmcq6Boj04S@zL7Yt| z?3IOdbbM~5`k>vG+{IbQudWhPX;iXxI2l|0H6il*eZhg1j!y#7F_9n>N5gB$WrmrP zmddz&|HN(G9+$7U#qZY`7>jWJZ(rLvsLV3~f7qM9cfB-{*c(sJY%hOiS`|(|Oz29! z>yTO(zU-3VnW~_&(eRt!)$SJ^)(^wE5Icw7$9Z=j{=ZEV=l*Z(elYoi8^8bg|M%T2 zFVjevE!Ou$W8U0Q(D*N(;re1t{nj8S9b(R-h}{4BanmTO3`U2mc$ik@ee#^n{HZ35 zQ?n_`^yI1?RcmCJwk<+k!qtv$hImffdOd3d9&0gfE!28375&YHs)Mx=614yA#u_&= zBc&3l0RCo|1plUO&qfrH3xCl>`Zi(CziV02+ggm3y~2wBbvEShm?Skz3g#@q|2~;V zQWNexGE!5vsC)ZckMIQ_RJZpdc=j^0$wbD76e}dp;pWR0(=F|0G;Ua%u zogFCwm>`8cOW5E4Hz*%P3-to{EE?0y=(6{EC)&3_Z(zfJQGSvNw-(4@qR*<7JP1UVHjC|8?6s`_R07*9Q8jWi)=MWz ztg^qRh(6Qnvvl&t+^Ua*Cqjg}6H*HY#4X2r*NO((%McMC29UaJ{j!%F{)4QUkqs`G zr|l=Iz|4)kwR3al`eucrOf1zJhL96Et{d<9O#Th(Vaa0s?3=Gf9gjD!7x7YWg=(zi zWPCB;uQcz1yC%-iI{ZDm$<$8CH`G6)IAgM+8==y%T1hXV0f^t3q>LG}S4`2z+K^Sq z-zv{PBo*L`+aWnyDyd62N(9Sa)2Dxynq9FwK7_YV244A_Sv|iKpTHaBqD}&(I6iWF ztEOg|)G#!sTkIN1p3&bZp&G+w;QUL9(5Y~h-*l`>Hn0Joc*L0||6VJ3U+r8LwVvTO z-gY3(uE8lzemxS_OwH~C&~nYQfW$ zE#C0u6-wZLunqI2&fVCl(R;Z)R#yro9xTsdX~navnOaa$b8r(pYiL3;8ClZ^pIj>WZeHiYo6P$Eze4sHyVJ<`dBDunxzyjPa#s z%Q6nv!&a)tR54umfgaw$X{|%kufBv_x7SGeot<`8*)xg3lgH}K@N`!|&Z%g8EOa`t%a)wStb97V(Y+qiCPxTkv#wwtP z4U+{~?*zVkvnC@bX_$enc2~3aE(x^Z=2ynuX*5?y1fYfY$GD^16W|Q>BW^BlNtm8b zdykaP4qqB%KxbOmO%~+r@H( zuC9vRWAfJCl%Yde5@h-7QgFbE%}JIMMRNWHta*nEvVG)iAH|AWW5*VA`&+xLaJEy% z)E%6)FRW&2H&8X*eZx++ybDvz*6`%@aJ8R-Mc2Q@${SzHW=>9PY#nBMBqlc&u}>#z z{rj+gwcF+tEjr`aa%tbDDhs6;x47t9+D?VAjXvI3+~k+UN;}=*?XEl!i{aubfl;X6 zjzWMjYr8b2J(3u@DSM|%h!cwn@5PuC6b}4iR!Vq@HNg{--R|LeI-y);mq75hs@$|= z)=pjg`mHf?z8OY8&kS+?5Jb(a4a|yy+VEhoOOf%NbD}(r#LJ)sUDhy+4qj3VRN2Xp zA8Rjjmd7gj2}wo0allf>H>nv{!!6$@14+Vfv4C|I=QM}Et>^e6c;Y7Tx2(szT#nYA zFw13^esYO7zjW^z^sHcRaPcIv?We8e|F*W9)LG9In$>)31AvrsH#|OE#C|A6AiKTo zzQ~R19lbfeD=j1E=DYZpZ_t9=wyz9!^YlN;ww^&r9PEcLcFQ4V=JhzVdU+bkre8nD zcuF$rH~8EM?b<&XdM`Y4T?~3f1)g$vc3Qf`V2y!YRpK)_{@21F`xY(S`w3S*OXgq- z8ukP&DH;I*>0>>V{9=|A)Nj;}R9(yfQm^Fp;IPt+)0+kH9{zd?axZUfrd=eV~ z6#Tf$uB+!d1+sc!?n;jT$GB>ycw6R&*SJCE?AAK=R)MiZ*$9{%+R=8parhJMgLM~_ zME$6xZL!_n(6s4takkeDs|G*-uPCvYXraSr0xR&0ofM}O-}Es$DCjS_7Ppb`uRAhWBn^KZUamhoXL}9+P}ff`0m?! zZ3Q$q*`?+tEj)C7l}shbbKe_-|cs(6;f8R+@hHA4Rx3G$T%({Qhgj1+C# zRwj#7S^><2BHf$Uj-s*i6fl29(->S3n?*IN8F#$-^+dIZy2(o+{G+viKm1rW)4Dny zKtl6k_vhGJ1jmw{Z9%&(y`Pfo7a!2=d9cegxRgWcYPXt$k2x2R)(xRu&a-R)1oYtO zyVVoaPu#C*8Byk;CMUysT_g8$BesNTw#;;+VU)-2>7+SM`ab&O@mxQu)5qYwxj70u z+Ai)C>o>zY7fEoiV`EEwgG#EnE*c6J;;a8}HUN48B^VZ}`D)eT(nRq;CBiD-yTLy1$HUzOO9 zF96R!1v>s%ZBtwD@Exo51DfwezCRWQVwD>ko#aPGl-cDQhSAO9O0u5E4SiR$u^Sq` z>(XT^p>XG!-D>F2$|;Q;H=h&xR?KQ9S9WZ4lqWP>BWB1lh^&leqjqRm&mVLy|5V39 z;X9P-7smv|s)LLC0K>r!Zn>N4?O8eWmJ?@FMH%~Ve`$?89qSJ?kQ&1 zVz1k&+M0)ZPsWHj9AK1y1ilgjRH^jBy%X$FPo=j5LB#)7c@}b~G;ckxdTiI}t*zx* zfCm>>Siii+gs9xu&K{kppq14u!D{)r>WujE`GO+ZAdf=gV8|_SUYnG2BH~+KGkIhC zX0)g1L?BBxK@BXUJB0Wy%FYCZoqP*QC@zgUFmgZu9a}b^%GI27tNj@^oD?ohSW}cz z(PATiV_xS!13}y09kU(hzA^V@tQ_6)sB7vRs5WT*J&-ZBY!s!Lgtm|w>{eWSk|u@p zr(>f<`79m04HK{^(N8rv+xl3TT-5K9D?`;BYk>*xj(_0g-?tv>Au*!KY&}=&C~r4g*T zL(}U}iIPk&5bSE!NE^JH<|_AKV0RXkgM(9MQF{@=A1kf-eKT_vu^c<<80F4VX-(0O zWGWg$$e{|Pe0ku0a+U|~@e7?D9p3F~8XqKuL@TVc9DUvm){OaAmh5^FqI-v8sV~b_ z0(`BXAAD)BY4R+G8ae3Fkk@imBvi6Usq=mDgw4`>_%1{H^3?bSc{iJj5(umkN{{|2 zi{kT@nX5FC)jd6`T>hYY;20ohxpEi0a296eNubwl}>btN0f?SQF_G+tVz~f(7dXm8T@MY_^xpUY8Mx+sn&Dy)F=7mXUGQ zr3ECPX?%$KY5wbEPEgh&OVu+mqHRi1od(eup(7QeyfwTgM{?^%Y<;p=wq+stlmmEN ziBFzF;O2dNgz$!3(sNZ5BJq+EHIy3Sx;r=Oh5uo*oCIdv-r5^a!<2JDfM{(Oi5?>S?E_P`K2h8ao^&6#MfF3jW)^Y&_biA*{_p95=-*Q# zHF1PBDC`ttNmYa=NKbm2ZK3JTj_?~reDIX5G{%DgYj%F2FVOsew-&g)iiFBUHb+4oBuA1DMt zyebdg=5ck-!gGpM*RZlx&z+Cl5yMHR@R{|x&C{LLq8~qoR-|I%s(2?|r%SPuz0LZz zRMt|7&BV@w{r3PQ_Tr8%-ru0H` zjhuKZe~;tQ2}9o)x5{S@0=%gEV7|4!k;*bhtcweDLm1g*yRRi!_KFsn=7;*u+&dBS ziR}Ah*jR(ySQbpLB!BaTW&F`y?a+xHl_zLu=%X29*+4-T{P1U?oxv4}T?1vF4$wRT zS!*fSRv5Y9jPF%4540!|jDifz=U!(wqJ*qS1iHa1pe7GL}*YJ}i8 z*u89Qdm-9Li$QB2>U!|{0JD9V+I*zw=e>D*um?VqZfUr@nrV<2KGS?uI&o_zK%j^) z;3)X{rM%Y^?}o-_Td^sxY7sSdpQk&4gX7sOkUkfEd%I|Uz^UgqjGdQp^~~FnQf7mS ztDaE%HDqpps_QF1<+EZU-)Hv1C8KUX0(VxR-gNL*Apo0L*vr8Dvx) z%qoR+u^boUF3QzZO?59TwH=#tTK*y(1J|;gP)i=tOQD)#R`?a2iQgl(x_PIySR#F}xo;BDCUZIq<%MNTSJgb9+&^KKZdX zPI}Rf6Mo7fZ11h+k?8s~3?F;QskAwFV7yTZn6`x%zLf{yv^}Bt81D}#R#Cj0l4?%> z9RKzb3vTnfn3TJ2CeQOe9|@y$ej#;IKrLXtH>quaD)nX}Q<(E-ACJYAC(rANd6$n< zgon4<jI|KCV+-%pp4&F410du{Q&|@qk zq0jijbZukj#>!+xLIo@Ywqw7b_}SXLl$znMoFj!8Y6%Cf>3$vJ9~itENC?{IhkGLS z6wK%)eLD(Xb)Nk)_^oyJ@7sqOAm#Q#)lX~bmlr#Xn~Q>qZ!#ky23F+OqGG)dR&2a% zzVH0hVYBz@at`k(B8lEy-_Xs~gUMb(RC!0OBal6dx2Dt5%%uCn0EP?)HVW#Ik=%*vf9% z*GY3+800zymL0n9h4^$vnWN}VNn6|V!|I)ku*|h%1>DMRv|sc2=DA-#sLjf^-{+k| zt0R21N?~|+qoUQapTOI#lyhT~r39bpgXN0+1X)!usEnZDiQhcT8+HNVw+EdVw=V2| zQGjJ%p!PeaCOXM~J!_hKn?X2^aY=g4H{Xi1-3QEvcjqPAmm_a2f~8+Hj7};bo4?{t zLrN^l70%8>`KFAi#b!o@)|=%fHH>cMhnj2Er#%if6Q7rc)YFV*9aY-5;qg z6+*?r^LhA`gm9Tyv0oYRTXtw?^lQVC!>1$~^!_L~957uooew%O!-ff88h2YLrfL$( zdB*ucc3mpL8hn&4MKWl0xw)`PHcHs43ERwUzo(l=(WBDHq}8?yaek$o3U_4lcC=zW zTx$vWQbTXta%@9pWhb;|EU|4A8*qJX1f>ok2lHiJKSk*g;{%o1H8t@H6gAZ7eyLY} zyRnaE<68t<=2R|>E1TZdOGp6GfJ!WCbc!Whq8_(|oTNcgmD-uGyGrAGH=NJAm5I_u z4?$S8VjCxMGEduIF5E>6B>*!Ax0Y6-jZ6DP45tFnp)Dwbl#E%z8@ zIche#8y$_&-|PyNEFOS+WBHXo+PR-*1=_BvUq!KCk4)Vy7Ct#|J;Pa|D8=@ZnL7RF z#qW51k1DZAD4MU9s0ddQuPB|f&sHh$$J%q(8bXdb&)dP+rwQ(}OvjIBcRv~%&pUzU zl#=4et9EQmYxyhcHm*@@Qv6ZiZ{G~dR!Gw_!=j{GUrQrFd0|mfajxmh!20_m&d%%7 z^UMImphzaKErq;YiS4G}RvTv5hdQ*`*X2CE*90-bt;MV_x$D?V=&Q++T`0P_p7&&^ zs=)63;w2cSRXSUwzYz|3?Uxmj{PO&sMt7D+9d78LhedDS??s-hvO#O#JW5+*{8Y%& zcnPzP)0n0cDAXrcac_xFkaq@`rKR`>F4mG*?x5|#`mMPfq_RpIu5Np`a`o7yX@1fH z1DL7|y5UUSJb}@7Vy%C&JIj@Iq(!4wSCpA_TD_BMnXZ9}+oW)X{mvEeFZYBcMkIYd;`LNWRlC`pW8#v`|xz z$mFYbK&>9|M!*TaF^Ik87=b3jA5Rtf`6(r;UdhrFPLQ7(-SZN3bw)|GM-NVF?>5^_ z6Hkw$tW3GA0|H+;9@%>P@3lxrieVlGe2p(8cgXhBN-eSH+7 znukBOdY_$DRFKWoR+Z3n9wZW}x&5_UVk3ufYYZA3kZjND;pdUJYw$5H!0Mq&;^&YO zFDC01_sf0@PWT21*mhBrWc9+Ym-QPj7gbO&ng+G)A1_8-0bu|avpRunlvMSO@e~WkSr5vBZ>9^wfhfut- zHt6b8AX?rCD zGg%t%1!J^Sw_JX|MeQX#r!0f2ko-MygZveJqP_kNs~#oCFe)65cpB>4VR%(xeo1+=EI4B`{TtAXGed6H6IMl~HQ)UAwjBzOZYK|&o-*TO zcrWm6YONx9Yi%rtXD@kerOvr1;B{Nxr?5Z4)yDULHQue!l*SuzxHM+6${pyJf$DN7IrP zTT{s|X`$(*Yl5|}(CAG$E}m>`OjidZyBjC1mwQjD7Tq-6!c@s5;QWj(`;MR>#kVC; zl_5{)#b9|Yr>V;u!dboK==1ZRB`LK3Nf+OI1|RKt@Yx|kDYNanR%!{=i8{Kznb<(d zSAN*tl}H#|b8uq!C*s3`@UyuS&tFPSLhj_zT5HAEVm_Ou`xcDy}?j*c;skq6uR~FXLI-BFb}N3sb6$_REtz_v*1$h$)~2b{ZJOWBoDFCvFf5W zE#!`AbXp=@uGy-<^5m#fC~s>xwcMaFOGEU#(l}CdU)-7^4C8@*a|X`F+MgKqkBZ=H zG{A>;o-SE;}hpK=KawKfwwDKgY_>9*r8|m6gtO}voc)kB5IU%dQh{)up zv%2VTqz1h^Nc91?7}?rG3D?2TQp;kT?y6f+UAV0@)r>SqvqRjIoe+3kS}D2h-IP~% z-{(`7AV%-Na$)j*DkE;Agphpx>Ij;iYphf!QYT;g-cUT2yKlPk!RvnU11TY>VwaD;75IRK(G7H$M`}jxDH^dw5hu8|kMAiT zEb4vD=R_TI`qKjU{FF+m0D+ArY}P)BFA*(x0d8g+AcffxKsy?%?G&*0tRX-NYMznJ z1z&n&6%(ffxt6Fa`Os56$xL@T?`m-@=K}<)$_QtD{{Y@UBLA^AY%4@uU2b)Nv}~^G zV}{wl)d5Vp59=ngr~u@hdOzxZ>khC!dlQ)m4ej_iAqa0dFk=YSUqIbq;uh(|*|9cAV%n-M`gL%WM&8MKL356Hj&0Tam_|4W^#+>Dbhq zAzLS99rNro+=f>M?boY5NQNC{Jce7qCra2Xjem=GFNnd=j$#J$d%5Z#C7(mpwJUZM z2do}CSR~*4dtpUUW3u-3%9W*(ou>2CW}C$~&H_DZ@5>^MkeMv(ztL#k)Mlal*57UFU;qTh5xL%%HKww^?K4sy39u1i9d$!Lt zxz2A?PWvXn!xtL72z8SlfujXKfQ96>pvV_elhT)<_tRAdgBb!WLigR`Q1zVZB;<)j z*vsE`A;;~h=lqIGv#79*hPbkPZq0%X+r^BP8wQ$d{;3+?s)Jx1 zgIuVW-Wuj{!<)kiySdV8_aC0y)6LYzvD_F9aE5T-B@rY0AYzkb*qF8|G}ukt<8tGc z-SZFTyo5L|>VuT(;E!Ns#>$*gdH&QvFlMQucY{8DsweH_g+!eklF!45BZWVXgDSyq ze7XXm0^~D^Z?K?97gfv8BZm5k7f5X&4YX>4EnnTZ~cYk_YRK9quR<;P>3jPq9ndgRZ2u;y?O0LQdxkYyAH&x zEV=OZn#kZgT+z9BR=@o_0KH#2p#Q_xSBFK}J%7vUin1ai zrKE^}bV-*YAzjk7ba&@cHr>6_(%rRyNOyO4cP;FDqfh<5*UKMXUb}mrbI+WK&zv)J zMo8yKy2v6Ff6rv}jRt2o$ckHGCa1bR^~%va!JR7|T$_4GeLRF2RbZE!!^D%lLeub* zr6-0)YffboMI{Gu;VJW0f{fJ=ClK7$65jH<>;b*`NzTY%=JC<0Q@hG9Zqbp$678e+pSJ{4^B3xN?Y1Jn0a2N8 zZ>vFIkFlAZ`S#sWiQ>T=a4z+Tn)%siduH)DlY$$W+pvD@PtE~_rGY_~ zVx40khX&+C&rT0zfT&vb%8bq|Qjb(S>v(EhA#7%`{P=Iji7)kn@Oe<-0|!Bk5V)Zh0b%8;SCU z8u|pniQUUh)oy$t1eqtn+4$UHZ?}8Q;%$}m`Yrika^>a%{i$r{`D{N6+!*F2i1#(T zX1mKBibRZMr4!WX+LgjG{6cXOH3Y}ik8(+6-J}HKi}{Jzi@Uvd`Hjc3<1mf$T^W=*Pdyx{ zdIw1B4_-I!LyQ@-J`Y4>q@yiGgQ*l^g)Hbkq~b+%F&jU0GGYIc7>Or`FTuvoIA zHrNbaZd4Xi{{s>b*OOa7d6;{pHXtgKlhvXlEx?r_Q4^QxTEN-LLOr{0g1;A==jOSj z0u;E6YAh#^pQs})>?i9m&JWbLo!J0t&)tt^<5Z19%N;T+Z3F1%);#LC+-;Jaw}yLO zhxF3_bspz=ctJ=%m*?n*x@cW$xXasN$ER@>;lX>hq=Ry?t1w+PyQGB^?;w@bmPa$T z%THJ}+!b071>wNY=c9biIi3<1!vZP%IzO_5hAHtdZkMNT3R?Ag0PLT?X?qOQ`GS& z^~MfO!10cT42wB&7UFJ^-=>fNZfaPQ2dl_=QqDB`3D+Ag3Wvc>uSd>uScq$wzDUR$)WE zQd*TxBfFE)g-Q(h5#kl+c8eLWUTPgiDetjNY{o$YLHwS1p>fUok3lZ%ou+Uq_%J}{;kLday~v1 zm&}Sa1nkkzDj&sRaVkY8>}(Pqz;KukEXx}5i}D&?WK~PbM7sMaO)8;!X2p81*^0fl zH-Rb_?7OpzZ`iMx#q&nF2NdMLOHs?u93@UcDt?|voG%`95vdKY*p_k^%nGa5)K+Gg zuhwJzQaX{Axb3;n2o=w$3#Rl+FY`9G(JWT=IpYwCFi6~bnQc;?*^^*X=e_<;%+%ZS z9&x8B+j?$e91X2!g9ngA4~D;g@0H`J^lX2cXq!uIKUn}y83(qw$n)k1QnsFeH)QeI zj3knhh9|B3T*{lbEoh6aOIJgf;TKwxWynKZy0!P8)!C}^hurc!QY~>KVUNDT-wQBj z>3sG!%0G!XzvK9)=i`_=gJ*6%1yP~d#xJqTwoV047uY2omh>Eb?OzF2L5_N#Y2X(c z3f}Ou22gJRWB!RK#v(ecbIl9Dxq=nb1<}`EObyUBkH1BFI*r56-RpML5Y5w@tck|$ zvZdM@g8v`Pb9JO_ez~zt(vZ!uX0vJ(A9*fHKj%N(>P~jGNd^QD31Oj5@7Ga+p2OzD zC+wJ>!PkzbW{wJWzl%9zhSz#YQFUv(&%HXWZM|gW){*|0KjLE0qIB zjarwNCEg!iG+{b5#~rZ^k|Mzk3(_jC~R`~76;J<$z2Du)_*&FZZxEXCgLEdr{&!9Z4id2SbuYqVMa z0N#i#?g$2!dMBvds^@iSuWZhbS}Mhdp6crZx2{rYhxCUw0w;cjwwm+0qy4(ymqe|- z`AR!Jp3qTlJS47&u4f~a&6TcMB+*3QW}8HsGyZB+U5qZpt%hOT1IP23Rf2dZd`GpV zR?v;5Z}lyI_cDR!1~z7SI+2nb$yJ$#LMO;6U~7x4swV<$vCHkZs1USPM4!W8bnx@_ z%t53X>5iYdtE9?`rX2N0`IjR>`Zd=lxUqnUu^_KhHq;PqEIOB5dYkI_h582es4>U( zN%PM5OE;@dvi^oAm-D6Pdh7T$8pYdQ0PPiK4D-dLr)dS7Ypc&cX_cEFYe+kOy7Dt9 z`!V{y0Bk{`qo3|kkI4?R(plOn7zQO-jGNX`lsbiiEi_sjiN$K7j`FTn5dDMwI^X%% ze(|&@zH?7bkgZBaUIl(#EuJX5J%p)9raYj3wR)}ek z`Hk3ak(m`Kkt}?AMnQs}ut?NEzYpwyW#1OurM1(HYstD_ws7|dUK$B5#^2Mb1Ju?- zw>dCw zwfgKoew^d1<#rHEo$9zL;@&axYZCA*(=*#}i#$bNvFgOQTnEKKtqT9~UW?WzQy|i9 z?%WZSmUFENe;4(P+h#0qCysXJ%3(I|R?+sysDaDi%d5*$M`O1Drr>U{m-Mx@FE_-6 z@NoRYdRga9o|$yge*6HZ0)(s3Y?xZ)1Ny2$i714#V|4ZrzbJAR45M_dxB~v%yYBQDa*UNc4USoYkSSVY_|>^ zSL&~aT{WH7&hD+UpPR8L@3VR2p$OUil&*aC2-Wz<6;jzkdUEk?>i47=RHphW33bWU zO1y8I`6`pYXHc5g%+Bfh9x?o6OVThZp9V7K=>I+2Fv5AOTZx}*fAiQ`aabX#BJQ;F z;sNK;Mn1%#cxHNrzBMweq)jJBJv$1_S4rJIGn>H*QT3$!KJZvY)m@{~XFI(oE-kKp zLLC7hceP8UajR?~2Z1^}<14{qnQ~k=i6LpOjdK|>!#q{RU)33I7%0h6kkjy5t&@D) zqz?O~7GOnPN6iUrYiTtm^^)F6kzSTJxjB}yAqUF54WF-;8WjWQ_Wk+59>!=~5aEaZ z7A*QL$=fEHWiEHWx{GyE)aR)fjb1G6;1yg#_9E>21q<;D>ryJ@+yO^9K3+E9CRB$M z1(NlUK`v=+UpB_zn&*tdiug8h9tU?Wy6nvU4_Xmrm|F@cE%uThoU)hs*LV)nZGr(n{Q+1QNjNjwb2c)xhZtX*Dv)*}@DY3KL6>m6Cv}zXXxS`{4 z#^W5FJafcjSl3st$(+l!aU=^Ky;dub&e{M3j%e5mK5f0nMeCA3utBRz2R1*Jy)@fY zVp6T8=}{%5-lz^L9sQao>$GJXus#$!3>tx@S@7t`<^R|Saa1v+1-udC_RbpFwz*SI z|5Lhe?q_sH)!94!yc;v3?k7j)PZRUq1*9Wa8w%>H%!LXV8G`puKG+@e$zIUqNRS!Z z0r?A-#M=pDn+QVu!XFCT)s9(i4A83M()00d&xOd3BqBY$GHY~sV7~B0^%Ak}EV6>U zb)tT|?ZKJz*X%Z8JaHmwX2ZMX!Gh^#Y6%+}m<-!=!UIL6-ihW}JW4>J>6ZU+l8w82 zhM=yR^(f$P`a6cBuva>f-RPqR{3eT16BTFeu>zJVI{ElEW-k5S7hBbn@%rwM8EGsl zuS`q~N*jN?v{Xg#>ewk&z|L)2YI7@5u#zywWj+yf4R;F>J5+^4f9_c-;?3CFknN-ZTR;-8sf^#nTyi!F&}4VF_Q zO!b*G7!*cmpR8(+7B^8x$dbH8ENACwfgb{ z72VdKwP2pL?_gpx^|B)MTI=0o28ox2T97f^ieEw%E{=-|Mpt$k`^Rqs*tfRyiAgUZ zUuSbrL|^IWPGi8tg?<;Hx{92~WomGs(3b9Je9|lYeeSf2IPKl*E3Jk`V`$`f3ROKB0{Sf%SSTqY@elC)~2Z>76mEQ(t*L`xbBCe1T;2a*cRFNg{o+ zqkH%9{48UG3|e@i!L<&KWoO9R9XeJ?CbXxg67)QS$2)KAiP)RG9;F7hSHfM;SPrt4 zmeSdJZ};QbQrEP2NF|?kZ`6mful9ZKa+Zc$E3@&uEwL{U@I0`eG<2IzlgZlb`{kB9 zyt5$Efi4aBC*L6FKaJ zHaUhq0iJR{_C7$O?`%FwN-!p=S(1ZxdnWdPwD;9Y_JpS|G`Hmqqih#!zf3O|SeFUn z9f+nCS{Jp36WjR-?Y&E3lpIY~=lPLTs$o)5)ArRLqblCcL=EA^YogaR|3b^i%FSnQ z2A5CRP!p(!_;r4}3zf2TER=esQzuc*4XSze9P9U4>TH zw!p%+Dl;B-(cxSN*48+(#74q)TW7{jn#~*Wrm0ZZZ?&knWG|@4XQSfNu4iL5q7%-l&Tf6sGD*N*2)|TjoUrCb~b%h_TCdmG`iDaW3+G-(PqJ*HNLyG`oa>u%jsIJ?_>d-&qV^3 z_}u|bR_UO=9mk!cSJo@|ES4b}cD32)m$`8LLkrQQ=;r5Da#s3W<*b(`m@NDbmAgT35@>NXk*u_FmaQ1rFlg(j5%IHW zmlT8dq%|=oQY^@;zM@U<{G_NbZxyRkKykD%-6~|NZnH^w8$a5*;RO9@gR_7lNkBVt zVWy#NuGt!6aqV5)at|4b^z3RS|B35UFN5};Vlsbsey_8{^rfMhCPlH1_?^$e%esSE z!YA^)s3)Ip?~Vm=F#@B^E6U*I?(nn}*khoGPJU~3U^l)#$}MrNBl3xu?NgfG_j9Mkj#Hkgken>E{R_v{ zZ4JEP(TCyNO+5#ob-5uN#Ds&yetTt?qJI6&`IQe3-H54BhlsosM9iDPKg+HUD*MJG6 z3&Fv;YNE4Xsdu@xA{EyQry8T}lCC+)T9vhv;pn3SQpNlHW6o6@9rFptR`r|C(-Cft z?ywzG6O-)9q4%D3izB@Q;j?Ozc)LFL6*3Ixp35|O`D3L2hQr%1eHcGszg#AsyZN4-slHB655_eq)5Xv%-D zH70>+E-}2$8=$V}j6hmVxHCTBfJgA)1fDc1*9CA0gp*V{xxnOCv>~wwIN#dn(RTSv4S`i1e=+OGp);Ae<+qo1Osq|LIvmEy>-ejkTt%g7Q*K?LGf(4c*NsS) zs$Y0iXYyp5_MM#9{xjBaZPDHC8uV>TV4>S#4I3*vHCWZBl%`4Ovg;^BO;7yjx33b; zBzg4aTR8TgU=0baN5cndh&Wp+NDM4#d!*hp7IUCsocd~ks9Y4O!%#=kCP0KNh*FPP zulu=s_#jF=r3pBEwDFERTU2D$^J&8qz zo0{#K(suU?GL{Dw^{L*S-|n07%-J+DN|PsGw%55zqY9udC2>g2{q&jee!FHax0bfngo# zOr})kA80*39n!oMuZ=FXMs1kYL^Xuksw5VB-#1N9xqmITcz>T}0aM4zztCedG)GV% z!a~!hH5gXsVHEreWyrD7rBCeYLhamUqlOq+8Ppwi&=_HW{$e9#C`%Nf+jwVy<{sPG zVI}A(BwLH8jr7_yQ&P`OZI5JY`6xXfWWm9xbgt>VCmyc0p)$<>m82@V8oU`Y?lZF! zq*xX2_$7zK4)2^Esumq5bJEvSoFOz(vwi)Dze2R%7?4yp>M8O%?V%ZiY^^*)^O6_*AvTLy@=B&M!)ht0ima5Mwd@- z8%GgfCm}t@g)Z{uwOM%VP3L2s#x0Xiw=E{3UI?&d+&0v+%YU6Y1aB*7jJn&VvQ$I< zVp3wUbQu}w68dWUV5bQE;cN&A%eqe~o?%X+TQ*NDnEuY2!o@~cap`Bvf{X&UdW7ETNCe2_rCY<^#TQ-;vW+E*nU$)ia}RZlkaxTR;<^UG&t zi+djp;utQQ$1M!Sv+hsdjYHj{TX?8x9ae80;(AirsDO+2V;ndraaL}jOw3-3s4snj z>zrzmx`Uee`ZJtl0~R(~V68;(A(SERVL~W>O(%CX%9Jh&ccz+-)TpD5XEL6(Dh;^9 zy@Ph*MSO7n#+%E$W}?AY=I$^xL>Lt{arMm4`o`JMs2rkPRKY_5IflATosxzheOZ?S zP0zPkMAtm~Y*)OmQQr1y2;tOaDnsmb!Qg&@&(*JX4j=9lJ-q1q>E)El2o*V2LpV$^ zSssheLvEeWurg_F;H##oBevXra=*KwuQt0} z@}W9)bR(g~0gZ?s>jsV{LTR}gfd6b@T#(Yw=@!1AG+pcojAz4%w&x6F@6X}L?VhM| zYX0~${UTm8GDb<9sNAY+qMC|9WIfr*)D%~8@KR}?y80q^&{uHeS1Jj%{gApr4tV77bWE$=;6+;H` z8Dc{2>PgR8p7P*MYY~%;ozkt_;#r&gVy}iBGH9kF62hT994!Sa;o8%N zygtax=|cT>*2PqUn_wYaeA%i-H~Umi_XDs@7hXd^@d|VM9Hvf^csw}Z#&l7rQ{&?) zX>u<=Qi?;G6K#6*mJNW3IU?xwhr&y=H}Dd-T8?NGjjYn-$<5%!rM0sdf`b?W)>_pI z%iqe6phS~GzcJO14>LR+edc8InOXk;t3vO_nC8j)yZD8}+|CAO?)ZggR{azn8s>L? zu>{$Y1_Ym*}Ind9dVoY^*0- zU+pnrtmj=8!cZ?qqGP9Ad$a~^&403;Erj*HkI$TNIzL~m=E*OX9xOfL#V4!i)LUnI z6|UdXL!`mh*y(*fwgLJH=!>ab+S>Mkh#I?F0j#y>IYr*vT@B47KqVDh63b0^B_?*x z2z@yeb$5sBxa3I>@0g0b3sl^qSVh?GM^&M>w$NU8jRrNRd6aB+ib}u7!JKkq`Ig)u zhM?j_n1v)bkQH`yG~`Cd-T7NR&S(=@c(SD`NPwZMbDKbS86R4{CsuV}L~!SD()X7luA#Bl7F^k_#F!Uz}VifZN%$!e@F(`q^$h)nZ0 z>fy_QybPJQKt$;9+`P1parHY!og*+Ik3eZ{h7aP>>Q?2wFmP^936hu}WConhdFo-9 z4gHV@@a}};rg!NudUnq%hgXg#)7u+0jx~AJO~nG-;}Zuv86>gP1uC4*vyY>8P0i{Z zSB|p$O_EWgHm<*)=1(?H&DCy>ThDuYqx*m;5nL67%BAva%o!ONEQZj--Y#rM|zerhT9s0^aY zMsRp|TnLB@^WxJR8h=N<0I7O3;oEc^=hZCy4ixERUsg+RD5k=IJgG%L;@GhX0jeVd z=ubhx5fi}1IsD0u`2Y1Vj-(jZ{3!)l(cwejQ!S}ptsfO0$Byro!sb4!-uj4TjE>bw z@{qknNweCj%0)U(_I!LCdYA0$?}1``|0U#xqE<_iyzZw4W<@H|3-9xe#^rCqhzaoK zLJ2`HOsT2d&a8;|NBg2|qy^(k!Ceg{ zfJJLKiZ`77m-)wMS!GXso9@2|XEUk8@1c#Y*UB`{Ks~%!6s_c*4hB9&N7vSwrVror z5HG~6(XcSNGfaI~W`AW{3`&NKI4f5h}} zd7T(GACF4c67prJQLX%91W_+w_pIp|!DR}cfPK1opM3dVh?bhVymQ9ELACTGKI7NB z-W4A$Q!BhNZ3l2=W{aB-hDTQ!hb})-_n$|=qXW&IyrRv_;pjsXN#HJ*U;oH7E{3D! zaxo~D^f^)X(H>8ll=Y3J2!d$E#c3(KR!Q0TR2<$Vi^<&^Da1T-9@RjU6YI{Py17wG z{JQ6Hy$*-;f?`B;f{R2Rv~s|K4gJN>{|@bnm!PQ>8AQH$5n3VdR=}<#=eGGBP$cBq z#xZYaeJ;h)7R{@cJk2Mhw{KZ*P5caE%60Y$+?s3I&vDuobgQpU3JD7}XP)fn=$*LR z(R1m00LOd*n5}ODEa1O+^=El%%(|CuZ`j=qG@Gy#E>il2=^#Np9W$&ABKz|7<`-}C z7Zrn!_x8D7>Viso?sTdB zY`^`m%6;SPe0q%%CBap~;gJ29yPg$LY?0Im7*Sn(`AfE)QA^|>+rwkDba@@d#U zuju5^*%(U+XwI0+$hGaVsaiBHS3_FwCy7FDe%5%_7I^8OJ63yWBksx&v(#q|+B!5$ zd^jPQ|EB9na)SJiFfI*8k>w^Op^+Vm!zSf*V-2vC_R6a5;(4NNU{&w^e!%4YV*B`y zrIGuwnU!$+Wzyofa(YEulmvAGMVcK!Qi?B4Gc)+cy=Zv~spP81hnMXB3{}L-?9t-0 z%t8zQ6m}r_5ZcRBu8V2!#9VcJH~^_RkfqGrjUy-@faWi)>dBmQPrqJYMSF7$$KL|B z6q`JOmHf*x;%<=RwYf z#++))Rl)G5Br0GpC!+}X?6HI(-o5PeF6jSp6o2WRQ@VQ3HIB@AjMVFVd>FnNyRjTs zdW3DY@>`A;Hr-g7x4G?E?A)#R4yt9D-Zm+We!riiwy?Bhw5cxV;N(`v8QL;6Kg^&g zD?3+zZ*E_wPP7b~Eb(q}&rV|b+d=%1CZ{hv>O9o%-C$`iUwbl?KBVCn_K;3q&6m)v z7+J9h3%AL7`&F-uijZ8tE{TZzci{Cy9xLlay)Xt5tdWPgN|7G5!sMmR_GA29-W&dQ zdjvyy#mXyxbX35OsStS%NKKbV?}LB+g50Rwrvn(EA^Pc=p)xe1dy+Nq+#f?Klu1~; z>DXjnGN*ns?ctj`PaJXm^TwS)eV>|^Y`%(i{A|V zXH@`~AMn+a|KtGHo|QVp@-xSmAX>ykI@W)drfIw&x!6-7C3{*acOaimgy8q?E|0=S z8PxU%jnxcN4n*@)8+yh1FvqHApvNSsc%Vua*%2Q96g-@E_mC~u@|Yv8)o|Gu7D!u% zIAMVHQ)hhtudQU?D9YbnMGag(`}(Ha;mJ7Q7<3vzBcpo$DK>=kFF$0ZlhS?7=lGc5 z5_|{s`nQn{&&Y7;yxu(;7r3(l3V$=L=XMs`X3{e-g7lysvCC!VRLOql+K5arPc7%j zlzajsePnj7?*~{=P4(b8GNQlm72wDI>(w})M9ZF-biu=}TK7V+2A4Wh0?cKF1rJ89 z+i#-8K1S+5{Z5B>;)ScjQ?cb;f_zPwF`a>Xi}p|X%3uJFLq5B4kzO{>gWSPpNSzTd zu-K}qAK{t~hu@qM;8nR@=2LuaJz)d2Sw8EK73PHKb5kwehC{&A_TFEh{uI(i$-w|! z;WhrxsstrY^rDmw&@kWg$veDTF4qwl?2{-j$k343e8~2ZDJQ9krj#EOzCe>O{~Lao zsbF5|NHf{Y>0N7!mSAO0(NnoM4j8od`c0@Qv_Bl_&1My~FJcE=dl`i)D(m&IYd?9Y zjN<@G?9 z?Zs@(JGRuoEhr1UD(9+~Fq>~)cYk#pd{Zevka`Tz^YoV8gcj?h|3QR5M*0WD69+me zL@u++CRTGCZWpohSG>ol$7h3m7}J>u6%OTr#Xh7WJm_?|#K^2MGfEjF5=rJ)To5mt zs-s(cb02S0?*1Z=lMdcMMYevOqgInCFp3p6~xm#UU$8D*w5_tNXv zyfG!%i!pqK%WG$l>Rt{bGh4Wsk?LXq)YfUm*fR0puzmtN7gY*aUgdh&f4s<=Nlpxk zE~#_B%F7JJRDdBd`=~(gpnx${j!&#C?4*+lxl!U7o2!krFRZ=}4->s}C0FS1RuETa z`e~y6+qZ$Q?%W7nANg5>I>DI1t6h`fx5~EHdd9D|r82qNs$c0|Ts|>9dB3W7LcrmR zZ4rV^FEtpHc6-wZCh7=(3iy`=&ffo(n(DGK6kW!x!z!;P*TkW%)b2a)i$N{nZ~fHQ zkV-IFGv34Kq=Jsse6!wyxWFIXd&5pLS#|2TMNwY*bO|!N8ol?EJzf(0Mp|xb)*Q>BPs_JhePyK6lTzur~p%kiJO2emY^sSCLsJtZ4UlB;?g6snl_j8<%~kUgG{ zLjsxauyt-RA226&O1RE9o{1^mbWERAH+NJD!P(s9eDy|bM(cq`M zDnZ2*BaWf$?o;-(QIEG!fFuS!sJ1MK_4zA3^k4husD`KEoprHx2+KOif`z#e8G;q? zCO?;&`YDIQ#=B-s1+bGKDFP3_s68Q1cs)=e0^G+3m3jh9ETlDeiW$q(en%oho;6nH z5W8Lpk6%wWNhyWgfL#V1-`FNKHc=^67TiIN4y@SO72E&7#0Hf5HVSVq^ul?J9vX+8 zymUo#)o}ORTcX<^)uAOKas8c8;w9>(QU@||2WP&*h6Tm}0cH=lPqBtl5N!n;k>k+C zYNrQ49Va4J^lW+XsZBBnRDYRx8Jl01_>A4#QYBzEOghtg^?z}NS3}tVN|FuMa!o>m zQrSWo@_LTNC6UpBHR!Gya3@eXFR(gR;Ri~crYqK!t*sd$@76tr1x-R%YhRyMd9kf@ zE%w51T9807SslafM@Xd_u%?&L#%lAgh-INQUIam(auCGytSB%R_G`+TPaEGhdo)?$ z9lM+IZ)}8nk;JYpjfPL6n@GFvMVLlP_^XvIS5o zQcqqLm5?sL6X_8M{8*hRb0Hy!}(rSc36XoCzE(=UN;d8ns6(yl+2N*xqC$SA|BuM>E3sD4k<^92r zr1M0k6#qYsHVzBwG&VQ8E>JhGabFU$7c#kM(_sAh^EK65xh-;q1+eK^K^T(m5!D>F zL;-4rtn{;>&P_K=#Je{*1LEeDU%n>t;U^HjYWC}C0n%v$Rsi)bWF$~U3aNxcx?p$ZBF?)W z5$jHZ@EgAUFUmVrlYE1)xMs?W^uEVFD$7?kXpHKQ`*JJessX^A%^7`U@E*)dt{2K& z1c?M~@Qjqo98&ROlTFx>2A2&@0IEmwN4=?Az$+rCJr3m=nL7GBJ09l|fhh%i2&v3@LY? z^WNKA=y&@;RH`>Rl>T2G@_OHeB1y)D8Xo{moWV!I+O*E)1G2&+96aS~BLzpRw38>UI_C<2j0^{DAL3dcWvXS}BOcGwDx(dWktrME! zvF$UkYC9-Hnm-rgszCyyFy`@(v|GJ+1=KP!B{Ivt#T5VY zS2kh)&`rTGk}GrWxSg9RCCeDB@Y48>Laz&DbbEwbK2^l$gcB*+eW~XcA6B!OR`Pi$ z3lEm2lv?an5}vudAU%DDoT5l>qnIB{@K3dW9NeX*&1J0CxMldsn2C}&jsz}9ca6wB zyDEv=vezjd<#!5pmm>Y$1sD1%nT;vd_rn*L_8B$XOQV);YMfWG86Vo?q{cfRhOfKJ z2+b$>Qy2(Q4^}f*h0_S^S28aayukfNcFSKDv_#>v*XZrR_nizct@lBCBQ^^M!yNmv zfO*G%v}&VP-DBjQc40PL)kD}%Rk;zHYJ(DAdkCVeROknm=3Zo&Xq=%jOd7f;!+88y zc0(9XbFRu4TdL#Z}t zrl&?zUZ<3oM)&b;lH9wje)pk3m^{}&)EI~UrcCk<#HDwXHD`1U`ouQl(~fphs&*#ydPn2%uT_9AXGFB8Sp4`HZP;Qp8!f&L+6tj!9Z zwD(!gN+|L^W-L4o#%TQZPg{3y2ulY8;^6)&`d`{DTIP+ZHHU3>=i@(p;2A$q09nNh z2tw4T5u|Vp1soFqBa1$50gY56IEZ{KD-p^9%=hs3p8wUrS=Ut|H3=5=(ZeBC}_4|I{m{=1E z&?s}10Js_%?|)t@|7TmqYny8(Yc#gigg@^t{|fd(D>yq z=p6>AI|G6Tr2_OZ_WjL?0M-C-7dwck3IP=7H2#+u+OaZkF?w)QAL`2bk0h)K8PA|H z@R@f(!2w`;12o`lTGuY`R zM5xp|_0Jc;Q-~Ng1^mK9e5fae{}kviJ8j4XQ60R;r=a%eH$QGhM~D=BtV?V&E zrGPA~!cd+-lRDrV=&MkMIht%pZeqr9AmX0~pnQt~V2d@BSE!dzn|Oa?wt$TJJcDXkW9y^Ip6@pNX$LQg*ABX#1VW@k{K7fVWV>%6 z8Tx-D?#|>JgnF(zVnpFt06la?$Y$YhIeo8N?L96E%>*veQ+oty{%0b@*kOGE*q8|{ zv?kcJJO_~AEiY2;o&II45Id&V_et1)b@NvgxjI4}rNT`#bl#xM04)PQ`7M&n8zz7j zd}~@~fZirhjRvFrtIeVESfCY!XMbcYHNXrFiw@ExL;Oc(f!S`^02%m*Ukr!@umuCK zwxE&B*9Z`)y^$+WN?j7YQ1dK6=k=#c%~`{o5sR_n=7gj&E-T8XUvxr+cI(}`?|PlH zkTYA@ZcKDPRpV986F=R@S|I?4Xn&xOHreG{2>jdb`-pzd<{zbCj(WfXZqkLQ@;H|2 zp-Iv_N_FL~jF9ETCuJhv>SkK7rt~Nl2xlz3M95!{eEDkqdTlPg;ic`DEF_3NIaG3I z8l|Lmy)tdZde$;dd$BtFO9QDTN_K$pu=cv5I6TXo^RN?;5ChS2@0KsBJ-&rjfVllj z?i1^PTyX9o%n%ZD3=;wF2I5wLx$*$z#J1znLpUB|sDy?{IrVEit+`e>*ThZYmQH0F_p#G0M|bDlY}t{+Z)i93>7#8HKU0N_Sy2>9L%4ul z{(r1R07G-R_a~w|^tdQ6#3k3Z1GIB)8N%qgwc?=rplXoLn?bpb{i#=WPC&<|3?FMM3M*UrPP)d-AnTTG@ev1*vn|Hc^1 z>C!_5AbMQ*g~g zOP@Y9gkB|B0WEx{%HwC2HI=5PTp{&b8X_C3uxm1{gGpYJrQTjyINWLwR9y3N>*LgdWQfU3n8e;S(SI*9gKIbkxj}A2vF;>5~@fF`Aiwf zt`ly2U^Uunpj&*mxQlrA*1-`*E25y6FmCrn{&mEx(yd!hG%VB+A8l{;qQ}9?ME@EO zM2!9MO*7M z-Z#X72uXEFhZfz(x`3BJVD6dv8J~6|t*wguWKn0RtrO+jKvV~u1`hj{csnu{&Vb7C z{$m(_C196(pW{R0@eAdU(VZHIrK96D-Iy*JW?m^+ZFQc!@?w0 z-uUZ8y<+5b)54fx4qL_cj^}}$oQ*n4Kb4?zf8^A`QljRiTI2%hNm0mk6Vw&saqC$BgKpZW0&mG#2fur1uWMNOS{ z{7kHd?31|S9^OQ(^(Jy`$%r=}T5Rds8QIHcQaeLbVv*wag0)u&p2(ozKMwR2k+j-A zx3n4Wkj?m3Hx}$gK*}_KQg_LFN<34{U$8SF7+GV{H6y|~U-L}rX|3&{mAf+&_mgUv zYs!NtCCwM;x5kaD@eIEqXF#_QPf}D7zibg4K#4N=UpTeseQJefex>sZ6s7P?7;&p zoM5#LE_lpebF%H6_M_e-F(}ik`U|6JRz0g$K<0PYW7djoW#vjPtMq~8?9#sc=}INF z7(Si}VJQGop*4mIUR?xSS9zYkF=Y8#yfRc)4&2o?Tj4)!bg{Zj#JSGe(M;dq5^2(# zI&0h&EB1^33S=5yww-1~35jufsW{epwzOPRim5SHGvqZbWesTYcr*3ur>Kn%Odgr? zHGxr;3R2d0PG26}{m77!#BM0rL+W;#Jz+k8uTz`2KHqAEee2dQe84*s>7#WjmNeV` z>-`bK{bvntOTEY0USHxD+CD1$rh(Yx@DvrhK);?Ub&L*%F~bVh)s1b2;~Wg9nyJ0Z zy9Hd&gWNvz-4_o-zZf32UTfQSKW&Et>PN=!8im`y=OxbH*R-h6Og|pJuBTmiU}NNF zqI~VBPgf_tucQWXIW(@?YKlul8Sqc@_*evK1uT3AZ=Kb5A6%U6e}iFu#R_M9t&@>7 zdsngRse^djnB+;|wQGKHlveV?8vMPgY#t}qezhplh@J!OYF4GH(Xi$A>f=H*Y)P>X(b#6sE@aJlCN+O2;aPbF2khBrx}x7eDx4Wkv> zr3W!`6G7L~M&xn@-}PEMU$9UD9Cr!dRU|K?tb}5eY2B%!{w6;-j^Yg}L5z9dy{3ADb@nPA z_b81FfW2JK;#p%t46d_eF+`!~fCc$_|Azs=7}QmPutDT&7!fZ_H!X|=o3yU0lj$>0 zTMZU5AZucWu9G$H>vifo*YBcpbI;W)nyx;y9A#Yl2(EwDL%Xr+`3%uz>}nG9!_~tw z&SM}av)L080U0q4i*Xjgm%byf(5jZL2(kcRA!)YEYHsj!nn!I*9U-y8`oVL1jBZyR z7hTX9g+1*y_N_MY>fkYnLHIWc{U|!fjW@5v1=7njA54OxwEhh>m`5J;gQ9vbJ_3-+ z)bBu?st%*u1h^Y4vNZ{H4zsTvZd3WNdC+@qU6!*m?@&ba%2TqZD!UqR&`SBAI9~qZ z?VFjwqcM+c+;%;`!BAtLL4YZIQdhvuznN?DmX5$96}vgwdg}dvBha}?#9HCxM9(wT z>Vt&A&)ZeAYDM1d0-N|omfbU%Fk+q)uxEgeeLzAVsR(PdrR>l zXpVxq_rX`A{oE=qgW0<+n{DgLKbrHwx$bfjPxikja3#Oy$OiOs)r)A;2Z%|Y=l~^m z#OS#$jc+Tw4GI20-W!+{z+a5w)r7<(UGC!-I#ioXgYduhS9n)g^eWO<4V!bUSsi1k z3r||y2ZZLlMsWq~Z(ub!eK`3{>tJtH2x|3CIJbC337)$OR5Vet%&oPTIKz;Ja`khQ z4&%(*WM^#DyN|SfB_xSh^Bfvtk^pX3t)gk8b97yLcFH1?;-NZ%PGKZ0)LzN)sF#-n z%K`fr@R7{+q!gB5T={`#*Lr{;f=3EZu{amTXdW?k#R7Rz64@~zQo&U=q%@vyIzgd8Lf20pcB2rf2bh@>-jUZle^y1MLHR@o{le*RPn=wSmC!0pq*3LYnAq%-4jTPK8Xa+WNUT6NYBgV9+tN?FfUO&9+ zrr149<%E1^`;=tY^90?JsG^({2p+umKUkol-uC$RN%a9edq?l_%`iYWkc^&ZS(Ygp zVeNVbx?Y$!QUUxiliCb`qL8z?stDLB12hZl@`?c}q}C$R61nMmK z;wSKjCrFO9MW*u>8KA05>k_hl&s7`Lj86yQH9_Mov-St@Al>Ebce7!sQx0!?BA(z9 z@4?(J8Qxp58UPz17t)gv8V~^{atHu++S_^4?x5|`ap}7A-^93!kRA|zrV(IzFAEvw zu(Sq1PCt>$flx30Km52gcMy;a<}a_cM=>9_v(kk7{a)=W zvHjQgQ5KiFQJI2Tb|adCBe@0?elV9aqRkY-lqJ4LJ7&!|VGt8$I4u;ObAL%%0qISN=;5N>5hLEcw z{BbCks}tV?7(;7)uKe%Sb%dzCq{;wqtq5`=bMDM6Ir^_fE#2>Xxb^fg0bcyG#{{HH zolLj9$*rz4Y}VTVg!RB?5&sW;`5getKFJ5K(gJruvN)2vuhwhKqJCDF-Y!55y6t9e z*I;d=Sl@>Tr7{j_!H#NhaipqdIc=7N*MWFrjMCWE)1z|D0YD~^Ep2r@9%_fSeb2o( z9sI?Z6nE0U)hJzjyPq?XwzAin0loc>E(FhZPY&e9j*Xl6qTL(A8Ne0P2 zv@{ExTNWBIvw3o-Kty48;+Q1GU4_8avx9ScXY9Z}42)1&Ijv;l`9x#6!S^!nF;X@% zbUU7UQwElkYG%rvewZk zx=H^j=9_lr5AVNeLzC#Wqz&t6pc&RuVeU^ZgM;M)4-(xq&wda38vySg>y=Qq}J z()X_p6dQeop~TxQs<>Wz{wU9Cfb9I@2?Y|Pe|(sto;IyzA>@A z`e7((b{#$fnYf)-n|1FBIW1|}HqxnY;$odzzm1#&^2J($p((-^av0<@A%A^sNP4pz zIRquH?jKl1g`t04C47LkdW643_X|yM^%^~GGv3JiMsCsf@>f<>13%4d()q^o5?|vK z5Bz!P_-cShiDij-4avx)_e*BwCQ^>H{18h@NP<^MPsr`ewUPXTtMP9)Te}v*)f474 z|2dgp)o6daa{De$P*OhWQ!YJpsP4@ezMOLTYIHIMzpb#xTo1XI_wMxxlnh%cW>zU*z^PYo zzqpvGA0BV0ldL}u;MgGrjua+ZWDm4^lBYpmf!OD%-Qggh{!iZMkri0&d-jk7v{ZI6UG=N7nyurS3ad^|6=GAn zoG>?w&{kl7Iy4o>MmLWLFSMF;G2I!h13w)k%r1J`x}{##&eb1$oU= zoL43oC9l#@y%2872;+mf3y>VC9$gYqi}63KE zJ`ISpyZwh2L=Kqh*eHM90HGW}#>Z#qg7yTqQ;C`|F*(0a*9$lWy0X?^HJ(06jzB#J z7luB{-bH!Rlkd}3uwgFm6&5LyZ?3mv$#zxFSYeFpkuzCJNb7N3(f}!|{h6aOS~4A5 zT=VpMP|Ye;IZm+-l#5GE8w}nuOn#-Snr-PJD0W1uad&R+1G2Qd37hC}2HRV!nF}He z2fhlp27$Z0rYAnB8RX>~UkT}+CKNb#4LM$`9KaWfSk6oq#ZkyzX6eTP-Y;XPSUze{VsT)h)1N9|cJz4vQSgyr@jgV;u=#bcd zrE>=m6v;N9NkU_)gRQinJh^-AS7p81^1F%q_t);8OXitQ{o}(gc{wQr9p3K4X<0M! z$iYm?fknX{Z+V_I*1%sMW#K>0PRe1$leLLFS1Sfs+d5=~hYMy+%wtd(VXw1Wza z3q3~*{AqUk(iVoRJqo_N-dQ{$^;#r|)K^GPsj8fYq?1;se-m{bc6upV(@i6o@Q7#g zHMXg&g(dIr*B42a>Xs_s44r4~Pls~c>Iyx_3JGStd7`sBx%yqFU!hLCHROB8NP^1N%!FET zie|f($9zgg4i7+gp{h!bhnAM{(D5kQa${;Uum1S70o{jtBJP(L0RsdnOW%7r2OG@> zJ{2FYH|HQG-q5kOdx6ct+S(s9wwkX!9SW?j&9!?6%8UxJ36n-41r9%6j{E5%1WsP} z+3IS~chDLBvd$BW&j-DI*U_yCt#U6ZW>FZJuN+?nhY@S2?A&rPgtb1|jR>`wz&4?k zM<`k`GcElc37GHn;E3fp8dH@pE`5iOxp99YC4)8ZeuAVef(FARy&Ozk$cg zb9KAsm!9R;zr5;HEb&eIfdpy2CO@A*DaNld-f%Dlqn%}P_4*+U%r&*Ru+u0^Oh6Iv zUZr$#+kcB=lisZqhkGlVDS?BqezBg{tYR`Bzyqh8>!it zCBT)2Th@SuOs^Ay5-*CokrcdMD6Pd!WA3gLbj`@9d^Ocffl#lWjgf$DnwFdRM+3if zIW#2{+};;Z^YEjp1CB)x7EkdI?4bH!%Kz7e|Aqf*+f9F{{G>x<&CvHVo?HK_bAcI4 z=4`iVVg=FPIeNh!+;A5DPaaJ2SFR3aOnv;Iqus#mCrc`uV10+kQ(zQ2C+cNfYWbs@ zum|)%G5e>D0yRcA-KE$OsTIBmb##93{ zv_b}9em+>h&tMYMUIYNCG-GI1A)PoN`5pGF?qOt(Mh)5Dv|!lF4$V?uQzPXqmQ3TA z-{4RMM0$$S#Ox8KA;XawV8+!~sQn%px%#8#8bo$9VKR-?ba1<=QyBBiPJCDMv%TvW zekE)A>kYxdaoS9b+u^Od)QC`WCDVdxHiD2%Pc=R#56U3#tPI?xeqgT&2vQaL8LwJ0 zs4wNDIV%ef*PdRGdp@_fTG#ojONj;*8)N}DGjkN``4h@-#;Ua70V8A6!0Z1CfqVDT z`Cq25B%T8`gjh;2)nVK$=$*B&a)X=LnR_wtUi-El zvw2r3keD<~!h^Giki|j%)r8h?F9| z&2|$4octKVubQOW-|BmZoRzvzamerDSL~^o+TLbC%?}k2QM2_0=`nbE(t^m>TX6vu}?$i?H_WK`_l z$)=~l$d?Oz>^Eb0d99OYL=QjbbSi!)Rq0mA7e(c5Y_IJ2D_4bPJZ+66R(FtF)g_@5 z0jdcI2^}vVsC%GVz%8}{E_7!X^0;b+>C8q9xy@vloGDcL205DX4WW^_{FK%%9B0v^ zpZ|2fW|3At4_Er$6{*9N{_@Z;Toy36Q2{K%$}CD6KB5y;#Z-W~6MHAE=Mj_&OGCSE zjvXo>O?sjvom3R0j5y~TIeA??Jyp1Z0wTAoo~<0tuLO-@rsB?yOj$Y;Yr^_kbaH-G zq@=7kos!QuqD|wEn;@SPReKB=+^Lj=dG&JDmxNS8;c|4ggBE6Tn924-KGe-71Z|eA~ukkcISYb1frFE)}&3cmX&!f$Hh8$mmo_EE!TEcr6k}% zeExjtF5k0!9$KL)Hta&1TlZN7iv9LNsdqxHk`%QRQ zyiF2eLK&^dzH3c(QRP|U{7<}tK^tX?Bm?BZA=iKgY`(M^4>Xg6gcL>*MORK!sc=5k zQBI3cvEt9zWMx`&YHDu=PIV*TA+1g!{a*#9GHPb0SI+DmzRok;>RG&9E_I$@qVSwg zBWz(=f!$uvZ;Y8ed42RQpi& zp}kk~3OnU_$P96zw38|%EHgf}S~Ky6ibH;&_Z?BH;I!6cKubB=3!nrm)I$lirBt`y zu(X`NF*zaUqEPdYs75|Bu(KYPLX4;SXw(g>FfHQbT;wR1t2KDQql-n>=L|wXQ)7B9 zA%5}s~PnwHb$mVJ;si484j5vc|O2ikjvXwe96v6z4v9&hc>pG`6KV!48KPj=%gnJlw;~JG0*?+ncHoE?6c_ERjhr1=;HHh zUC*jJE7Sf$o~;l%xJjTr+Y%^I?>;>}qop#bwZIOe%Y;6MsOEvISZt;8p@WV;)+8Oq z$0rCTd_2c}-m!zTxSHw+Rj200N4O3WpE#>g*7Q_6HC4o?u=Gn~E@}^7?G{#YImP$z zOKV$7^H?4<6d3et;gqkt(Ig`*iVH1&fw%Pg`{#26MWmv!$-MCk7Jr}#t~|I)(d+;b z*1FWlU=3DmbDw2bwDxuwcF*FYI1!X+>p)XD5&U`JH%SBspKuC^Df@aMBPSS^uCwEx z+IK=HP$*{p4^x>MbE@CR`aN(a1tSSqQowizRf|;Rd+4=3?)S90LG3!woeQAiF(~U?zy1dXn-xLhlQ*;eHsZ4** zL#z=grw)s#OUB6wBq?ko$N~U1mqX8LArTWI%X<>8Q<3--E+yLJfYXT*>%X?he^+!A z+O^*%id5ChWACE!$hZSWmnXS>oxXx}%WvPy@e<`}z?fgH=s8_^HQ1bbY}O{LY87)dH1#-qY7ai~eX#Q$Jj$ z$h|e1Xq1~mOA>7^It-+h$6q6f-=3yMJ62k^aOHTBZ58yhW&sUv_8UfZzaKt|{Zssnmh?2czZN z-Md=mD?;>$FmcxF-ilu@HH8%GI@%j(O7_fzw)yjl;8-J@9%5k2v&B?q|NOpUC^iSa zNcJa2_B&i|$;U%!sAe>FLq(PB2nZrAZtG{{6cpyO0|)X6(a?^Q;S+D~MTQ}kh@+pT zzZRY!f3$QvE7{4lIvyHJLGC8EoFyA5Klf9QC@3CT2~SQ?IcyHPG6BDoy8e#{iVrPC zW4dViJ5=`1RlX90uFIM*%C9W8fM;(W?&QC;E&|{egNPfwm+Taxv}kI4yt8KXZ=l|! zljDDonr!U%p)4dMz;luHhv`@Q!HeWNa4f`c8zkRh7|pHG3uTOE8xp7xDj?x zvXPw_;)wdAsdrEr0{55c26VU@AU>&{2>Z@VArT$;7_BJz2nC z$}9yiNcsLITI8QVUWEtMb^H>eFYbqq#8&DD)MJ1n44crFFk7<-nRQ5r%V(b_Bf)H6 zNa6Cuci~EeGuk^I29e#P5%2rM3n$K4AKg74@6#o>5h$hz3%a|%-QwGg#-!JLjg1wy z%q)l``P4qN83TxGzWDPA9?zYB?Cdfg;IU-hWp=66)C5@~w&hkgUIs2cv%McjqQ8tY z=G4Z@)9ei%HY@gKo)>L!A1b6)f3o-fzKdOpUy0m(qiDj&Y_3HykdO>9{%Ci9K0J`I zz?`xX&b`F=HrcyQl2^KE8LDl6b()DMOqZ7yzt9{`YV4&}5452uy-k zWciWi897N3$^5SMA0Z)S;rx4Kk^mlfvh(EpqU#PV3x{ueb16s?=#KG!>uwk&N6im2 z{Oi&#ACjaWn-W~;~D zpu)p{k1PwV9td>(eCx~ZZz3AG5-Gw9U2ZmyZ z1T^yv`S&D&RY3IMenwiT{{tFa0tN*mx^K2f{jZ*Jfb5!bBT1(MXJndD_3=&J#vg7R z@uoMj8AXI0A)sc%G z`GE2mR^uf?+#Jm(rq@1WkAHdtfWs~R-~M!fqse)(&T1)m4Yuut3f}6lw76FI0>RUZ zvUP-0GW3uqjPRd{;5|Jb`U#-2sD$~=9=kBw($-%mU=%-Zu7HKq;UgIe5@bK_l!^50 z>%({xm1Sleie9yP(afmF;70Qj&GmpMj5Oz>y%T4Ra$nqa2bfC70|3^6)Q&PAqg%mE zlU#OLK6{--53lo~S{b~%ZW^mhz7VKISs6x2L;8mZoq-IZdFIK`;y2Xbn;ex7ADS^e zFFp9)A3&3OYHd)TByg;TBveLs~CfA2g~dvZa`KEL;brO8WM(m8mVjFNPV{mVjd zPsQcoA;Mk+4yrge`*1tF+;D|sV)Rpr^@Pjb0Sg)@2`!VH3O*Uj&qhq5zpXz5Fc|_YzoOvo^B``Sl{BYn`-~J)~TX75Uj1e3W!2IwtRkl}Hxu6}tG&*RknA zg9z>H1kMmWEfiq!mgpnn$;GNhCX;~RaZwf68VSW!JHoavT^0HQY7uh{^5F#O3f9`E z-M@oFP74?A9Gh$wgVLnbtY>8atX~luaCxZaYPv`Ir`)9@OjRr=j=9pxEe$^qzU&g@6##la@tJG-IGmX++P{WbY(%uJs~ z;+7Tmr>`9)>+$(4acG1JR;=ZZbUFK6uYK5Py@k|Tp77DH(8h0{?df)@CfT@HO+{zb zU44%H)esciSTj9;_#>T3>@2r>NBSso+W3t8HpR*SfOh`gphs|H4sJAO{hW_?Dakvf zaO3WBI0CR(%XmtT2$w%1?_NL{egUo$ymjh^p#8p@F)mYwQRkl1)d-XeX7JKSd1>bK z@$9oqvS#H1Id(BRMx2a52^P+|fzvLZXHDe4)BT`Kca@R8E`U&(=KX#@7bDGNYX5*J z#--1CLfT1MR1_b@dW%Sf@{@r=kvH& zuU*HIA%MtW6ZS(BM45v6GLNfu$pXlexTqcSD>Gf&xk{fAfwX)`HqWlGYi#)J&0#`u z^1kQfW#sK8fW3>W3;Mp?)%E3eBN@6!0syqmD19#1Rj+Pt0eD6jh}JMxQR|GB_+`l4 zmZxob!`1Nq9kmbl!rlQURxaiz%<9dtns_KsV|6mDs?p}=mcC4o_@#?}v9Xj(&Vl%& zI`1@(^Q%p30DT%L80_PH-fFWfdCwzTajOln<|$f`+l#$}W0AZ47^md8tB0(?8NKk^N^Nn!e6L~(TetrvEjv)@dS)8}I#!V;qF`i$i+2_I9F zGaC8{Wd~o|{M#u5cD!7<~`XVvD z;R8f^k03_l7LX&De0foOA@(5%$qp2jSFiZQbaDzi!oJTV2BRk%e*DUO22_jVxV0LZ z9`+#pbG{O(-OCj#b=V=JqL0Vz20$$v;~q_8+g|SGviiBhMel{T+cWn9CyA15iy3;Z zxF1(($79t^{9I@jz=v+Ww-sk1b~{>TuJM=&Wa8(w@$PER4>PLIE|jj>|J@Jo?i!5n zR!h=vm9YbhoM3TuK+Kdw0mKkJfe!!wyh zex<%4cvvjRtE5f3C9dKdhk8Va&$7vsDfL3Ri&lr1Nuz6o!Qq6)Nq7yFxK+R%1@t|} z#dZA1#2XFo55wO{ju@64x279v;46ejr4P#+60CZrK#HF<9cq`4wvK^M35HyBrQ;``r@!u=PNLW`ECz_FQPr_&bNXSBP=IF~1ZEYHsCZ%Q!0prT$-fB}W~|%A zMk9_&k>-U9R9=KuAQ{rk%uZBDOkb*u6vnNgw_>F|4Y(Wzg*b0u8ZpK7Et}_9@t1ih z3A$YhX7a2(S4p8`75~s}idcP)xzJW2L@TlCB&ZwJL=Ru>75cs>I*d5mV}ZOVJ|+I0 zV)`9h(PeHusBpUO>H_VkHj>c&T-5=VLaap1{Xrly?v<0eG}e|=teyO8Xv&}pvvyU9 z6D>BH$yUHqspU71SNp;~fe1=mMs)PgIs7+*npFHWVVA_o_K>!7+`D&?UaxL)%4%OR zA6wA^2(TcDV#4&+quv$=b*<6qBB)0BM}!Sl+Ix{5T7iX(6GShkXr zYMRVktZSS0mM&fzAFP~JS@xo&m9YuYr-O3EKY2cJGsaYWS6FY6wtQt1#p-a9=Uc?`s> z3e;f~y$%7DYd^1!gIEB)hwZr@as*N^n&)lHS{D7_C%N=G+tcJ^0O)g!C|3hP*GhYB zW@*D0(^1QO~QIl$e*7G1SK27|`dXt_8gK zoN|}mAb=CRJ#*!w+q7|xbt-Vani3TA;2|zCDyq22r>D47pFe+fzzBc(AXqZiY)Fs# z;d5McafL76IM@zLGCcTYaPnybHi4|bFD2Q%4jVDP>${l*-J zhYnU-`@MbCQww=`RSeG`;c5?A`Dle+A}C_n(J|36=aCPwc+I^RP1A|UCk)7Ad<~g$ zrf{O%z=#U5qLem+%N9v6=8(J=N%<=35gc*F&tWfK`}yar?ABe;6gN4Hie?+QL%*5=PCDnTKTf-?Q` zn+LfwJvB!u?(&-2+I3YrX`8!s5KEUh!l{XTAF`C`l?e&d9f9WCnFZ}Ph6PK4C`dz6 z;xOwU)OuNz-q)aUbP&CXGvqxZO|M79T@i%68d`jsxT#y}9doTB0V7sAs+?Qy%V3TT$}Q`qF3}n z{OpV%mFy|)xV>5B1qPMyjv2`GsC~53NDMt;Uy)-$m3Lv!`gLYV3g)bikfEZ5x{>11 z&g+fV#@7O93H(Kd6KXMIV>2U6O1QSagy_nh%$NsDbNz17)&_hlE#!E*qsSUxyJWac z?v&c^0_$90j-s`@-{Hxlop)-u2QN)g>l=#7pialXdoqp3&kh5fWw~hc&$#!jUNLHG zo3JM5CpRdWXqWSQyt8opIPbYJZ8JM{sK@lj`e=cp%;=Cm#~b+YNg z4eMKS`OKMb!N@jbPRb%be@6r!tGtWfS#A(;qqeeAt|bZ6cZ!FLO|b35j9z~IVHzXFy#D}n-y{U8LLE1a(?<#rLo!v zx#)d1kz)NbZMM1_yEhhX;Z?y86om!4-JDazw%SSGS51Bk76l=IC`jK{B9F`fTq}Xp z!sgMdS2ksngOBzFufEoo&7E1#45rs=c)N*ebsin>_S)F9EcEJ7%$(UHlzZWfQ@H&u z;(cY-A2!}S-3#_@FbrXIA^jNn0O=s}tMA-f37Ac>Cy$Lu2B+6qazMz!?GNY9LTD~d z`VE9!4p_&SlY1nDv@!!}r6xD%*s|#79yWf+R3gE3|E;Q-rZ1$wyHv6 zVjQE-_{1?+H0}YqQy3lG<0Z|aMu2oh7=mX!xP#o|i3RECF6h;n9Q+lxw`g~8Kgea* z{PF~rt4scSr)?v5pvr|}Zq^vp^XS@TX6@G*o@w#+Ox-FqnzP3(sW>W@Ue^m03bG>V zMg67W%K_Vd;ccakr#fduy%WE_8R)ukSx8oDg}+JNd5yv?WTUnByP;A5gh-CCQWZN3 zG-kGo<=P|~BH57n#h7E>7$wBy)G@PNb~G*_;TF!yd$kcKP}bWZP4|nbmDRn}P|)1i z?=cIuR!2c&3?op2b7oY>`rjVFx*zroPl_;>roV|9YjI&Y+^=4~r*?iF>%jX$f8v2wyP$|8A28mR^a+w5A~ zJp)OvPPp^ojo&~+nLs8+#XS`RIo^i)k#91Of_Gmf443v1@NbQKfewQ6o*PfsXQVk)kDO7b7sHYSgt?e?#guo%`iCZxwM~l}Nyns>spH z;rrETr{;Ao@w=Z$ACo&Se-(}3v>t2F$+X|KQ5s0au87zYR@9!#vj_s3h~>ExK4?wt zEuL(maO!mtmKZ8|wcmX<*Z_sI5M7~MF`}G3t_sVrwz7D&4VedEYOKB(Q`lN#C{N0Q zysBo>g7eBT6M6`UWl5xR1H->1oO%W%;{b-$VFzI@3z^U@*ag=%(W|03GsM?e`ztiJ zX>%fzCQC4k2ZRgo2NQj zQQYzg8>V#jIySW=C9Mib#tH)%p33!<^M*9JGH5K`Okf7A*>jxI{Cx&%a!I-KV2Ngp zPpG3mI{jL+HZ0^vdA=bW7unLl3yhDsyE*VRU|<=GE9B%mLb)2f2y?75*yKZ}T< zOI9IBB8|#BPM5^YIuVzFa7qN%);7E_So*l=Uavq!S`5HNuJc1+sQ+5s+t>9 z3AcPI_4Tg1$Q0u1^Qr_yE3O3Tmb0l#>$Q>TreUVfP5ky{3|EAAS;FT^c?*qbt0^Rj zb*{5CV+}efWb_<(r{9bf)6UAP#T-a@y-HZrPKy5Jh5+Ksnjw6M?8E-h$;s#COWOQh z`cjA$_>05S(l>KTE1$95bURO1LVa~N1^J=nMLKl>(2D{U%I6j}0w~U_7v%7#(Hhf` z<@zF;MSX=~4F^+J9k75ezRH?qelO2n+3vn|Tm^r)%IP~V=K>oZVqnYLMBtW`D~IIl z1!3`f5EV1iEUuH>sNwZlH`S+sDq9*L1yZy zJIkf!18byJ8KXx)=EJf$+pX7ysxhn2ep8{ae3VYN$)B2^Ib_F2LfR8Y-FYY^3FDAc ziLVPajVXPrriZ_Khxhjy}vW`%{Qc1HAI(nzB)Io39aPLfQ$kYFSv0;v2I9?@gQv8ZZVDQ-={Uthfbh+FE7}fh5hbka;jbeQ&hPA?F-maN4?x=t`*}8Rl)E?ba z2e(r()E)e^hQ@K{hihUZt9dRHWNebK#_smGvfB?1i1nF3$aZZ{7#IHdSr?oLnKTgR zoDYS&YCer-Q7zCoj?c07 zdDnXS(Q^lMW8}j~`Ed^f5ZvE%K_VjApdS|^wpU1!FwwK;)w=)IOp6Fg?l?DMI?wjC zqjPQ)%hwT?D(&5PWueO{G`85dC0lM{n7a|BWqCR(iP4GA2`KQNCkZcTaH`@V!(Gu{ zX0PJ2=KacdrXzd;`k1xbdbnD(4|Z1F$k|e}h>2c>Yj@;^X{fDAh_?rPh_Vx%69%)N{T7qg?S4UzxYZ+ud?bCQ^QM9WIVdcR5UYJ8ykMqEcNR=m5ZUa7YpcBRYG}tDIDQ5Ys@7lBGQqIA>Mm`Ox5dB$v%Z>p zid^-axMSr?u3UF@oSY^}_Yuo4voicss2Vp;x7NZ;p2f!m8J4?oM~|)Jh7RlSJ{KR$ z4>Ooju-cUzVfL~HlaO93%i*znAin|_(Cc6lsATv&jYW3pi6@&I3puO(@nLfk@f?E8 z9;T@l-{juk{Dw2C?F&BJ9jDy=6lU)P>{3ezH?RvmU8F#2`tX69%)_Z~4o7OoxCfuJ zdlQPi2dg9ay~2Ih^lRMnnU$~YHg=S>pue|wgY27(OvuOXa1W4Ppgd#~e~OF~zDK>N zrW4kqBF_wmYT~0n1B4k~sQojpJ1rG8D#&0wHt zu5w&NtC0sy2P>AVRdT8lc6RG_c3hH3Jv|pgy$&wE$uJUNjPcsdHs_L;9+iO2EA6DW zITjV_(*(%@`*7a;E6N&5s=|-0)1+^{seFcD^OJn8PJiGQLlH`l4m}a5jNJ*pdSSL^iEoRx&cJ$WWn9riUqLG z?R*dj5Ti{^*-O9Y&b)#|bQKh6XNqS%OMVyyOxYjf+HNMgmJ(;K9i5*q6zBQ4ti&7A z4@|2bt}Jlm5_AazBT(8X{ub}DP&|A(w&j2-Navj5c`z5XVj3S;tljv#K`cGB`fgMt zXLTTY@GD+2=T%ty{W-fREDrmU1;?1OOq`rDP;rC0o7a96@*GE6no~I|wRv*OfHuwN z#$djMOEqwW`5R+ur=A=iiG=@Sox5evEq@xxxy5^R1J&OZEyQSBsSnm*Cy%GgE3L}B z3^}_ErIzGMBck}G-O#pT%9bnzK}IAdA_9t!hV7Y~H( z#$LGw7~xNUbePHI-cKS&%m(aDuHi=}frb^hGu%siS##?|P@W7zMfxD6N?xW-U#U&m z7s53Ok`(b(9Bxu`K^~WRe>ACrpHyerGy+csi!8W0a#ByEPO&51uajdN8zyhKCn}lIpi0&5tk3@8!nmrJ z-S+1mpD%%ms7$M?gF+ss(3vsd1nBLqifdO_ZG`S_v1YC%V@`hg>Wd8X`s#}w6oN5z zi*0TC9A0nfo%x27ILeT5&)jCA9?Hi5w&LnC_qkfvNJD>JaEeeEZb`SBz){GRC1jZ-c!?GIF4Wp2@W=W z9?z2ZYxKE*bs>|?_E-N3ryxtq9#xfT6*c93h}vad;a*$jSBj3lS|OA(X$BXHgH*0J z&sx2{tVC}p+gMu{y6lC#mKi>bthy;2$8Y~0<1ljETbWx&gW?@^Y^JNwP`+Ih=Azm? zF!4;|Q>6f)8Z7wN@6^|PH&3F!a%z8y3@3zp{&Y5#mQv6YxOt`L(TxGW(2&(eyDR9~ z-erbLOT#>uxL>6}rA zF*SQuM~h-TtDRcNjJ13bX)<{|RrH%@A5t{#Q#Q`kfApsA;^{BLOPxn&mc@B2Jc{L& z0~t5uk1DfR#6!;BjY!?EAYl}O;;8D`*H_!G9s>vidg?_gdQYpCpA4h)CToydbVbKy zRPA-kv_$rs(|U%Q^5tlHlH4Z@fC`SM$WfNlFGD2q0ZzeBr$dHpc$e_}-f;R17*5rp zzCjHfEy5P^&cE!&#_>HzRn=ThQMo~fLz&84_9ehlS*=Gddv0zDXB;h0iFg9{`PrDE z@7%(+@F6alOT*Mq$Pszx>bxq!-K~nI z`RoI7r(A0_w7)x>r%x^g-O+N2!E^P>4w9L{`E`(pC4s#9qc4H4%|pD#oE@Yq%d^YM zKE+AdDw|GN>!{U!^&LkpW*#R1v5lb&M6vq!$7N3|9dR5J;1|xRER3-Uu$z9{6$_u} zX-8w|wwqt4(JO|<+Fw!Z7Jw9ac^qbDXES>sf$_nY+0ZT({Ksr{I_0<}S!V&e@A=a) zH#A*<-LdO)h*9mPb_uEd9au|yypbb6(3_&Iqu;b`q2{Cj0i|)ll{XxgwCp+x@GVK* zHNEv_6|bcAOvOHA6oTBn?LJh3&kOid}S$k3{iKC z<%7ZW4cVTWRckItNSn0OdU{WvB0nIa2DY*k4sJ=pdWOzbgtB=@x_c+V^TK_>u?ndB z?Q;-G_WCXd3O8S|%W`I!CO#(AT1PkO+4gy`n(N0r@QVzuFE0I1oP=Xp7A?g}wK~oW zXpXzLT(ofbqxb#H`2N_W zXTd(dJ@GI0Dx_eUa4?)CW9zFH|OOIy;3+Xt-y6W6ofS1p+2GV zs}~#RTe9w)-FL1k<+vGEy%nai$F>{Db90L9mIkg``c9o2`r51_FKd64ja>FWO3rD( z?_HO4d~RBGD92Q!9MH0>mPLia?J^6v<-zH2l^Njyb4W6-u>m?V(k3x38yf~7h_P_R zVXX8E2S47!9V$Ls(UM~+X(QZpL>7j4LFjjK$ zwV(66-|dn?=?DD`x|Lzkr44d2eC`Qv&(9Bn4xG=#$-r=p@T2{dH8*tsEA*2R-7*3Q z^Z52Jx`IRd6sf9`rl$rmEuRH^9#Fu`R2!D+5C5`1@SwIXJ4BJs@fCB^uPM>~w1 z2gpT8h4Szr@?X(Hbnxl+o7;3bVZUyzd$(2pgR**oKvG0t&+;0L*vM}F%1M?M^9we& zb{&u%mMIuP;rM0*xacnb4iyUv<=zX9ip#c`@%eE|dsMPZ+*&R=n*T*$u1P#MFG=#Y zeWm5CcK32~F4(%B?7e*3ZUT(Wm!9TdM1qy5>6{)@;BM~1;kjYRfL35E`Y!?i zp+F?{G+h&{G2)|NNg}*r2(yAM}q7C4k$fZkm`+x@G)oIp`{i0{a0l`M%HI z<5`TU&Py#`4P5G%)E$Y_$ktRvEE;!yxiDlRi~mxBALXnx0xce&>X{;)1bvRAIDHga zD|1`G%g}G6#Sia-8N&{nkOE2Wmk|HD`U5We}_#r<25 z5En>y32>X@{+$-`ANEGvzXv5y{~%N2rGU5Z2K9fY3#=m~n8QD;yMHg%{J+y(k!ZND zVf0Ab_T5RsWN6I&+>oq+%?(U|#24ct?v9KAKY@eJ+LI74u-^r%HzZW0+daE)1(Us> z#evp?r^t7gDht1=W4|V@vQRGqkvD_W##^e65BIH*cmQDo%pdzLs-Tstt+Lj~_u^KB zxCdPdxCWck(s^t zRsFoUDSVR}h2R~v0TIR@;@^Dx!;0|*T-&!@iR^YOU*2W4-BH(u``#_DP{zd4aksFV zRoMN>DShu5z30YnE)?&65hIzM3-;2L+v8&IIZ4=O-M5LePZag;d)`FcI3aSW4q+e>)IUC$j~;x}L2(n+*<`Jj7QfOH}%qo{dKhcW!^qlM$UZS~~m@TS8& zg+m5zI|jq$j^0e~sLv}n^)IOe`Pl{e^-reSh7IvrGM}|{v;~EK!~7PC@6#C;{Poe3 zh$j@}*4{fXx0r%YaqJ+?}z>u;bgQt;}_`LL+swm zu>rJe@0w==I`X{e!Z&()&dcXGy?gW|vIBV`_31wCydLAcl{SypS?bR(Z0t)l zrRR0Be{0o@=Ok};VEaZ!r51;SZPhDETYp>?e^onx=!X%ykz@qnpZScLs?Cu`NtV^#b0Li@M(vt-1E! z7Z$FzvUX(=+~cZ~duFp&c6)t1%2gAF^i4Ic+ST4wa}A_O=3SC7;tbqYsj2_f-g`$i zxi3N7k)ooYw170FcR~${(u;!hrlNF+fIz4TUFk^gp-bEzu8svAt~t1RM1bfEGz^1#8P)hjz(cySex>y*yo0;PQUczcf z&%`7Q3%==3#FCYR;NF+q=gQWxQ#gkobKl!sLw7aWVl(X5haE|5OXP$##4ba2t`=SX zhyw}}NREGJzC=w_^(QvYbLAG7570;DRzdU z^%mOa#sp_eKZ~@-(NS@Qj%{gB3cHx-XZ4hHX|>kosGM)#+9lcrZ0w}3O6MA)zDs7w z0Q0n*GNIijHfwx0@AzZPFO@8yDRT#)M!#*QlKnLwtMEkfeaSGVC2{;w>%+0s}B+b zXVN5Lv+__aw`IJk>X`NZ=8hbBs9c*um<@hH&@{j$(bA%-U~i%bTI>|f2j!@s=jZR{ z?|kO`@TJ2(ASk;iv&Id}%=8o1RvIr1azDSBZvDh?J;i}ie&=LEVO@H;@>I{tDfaUo zQ)4DlX~HrLEg`xGsu#vD2KpRMI@~MACT+DK3^j+{HJXQ!2`*_atla zXlDLyHHW88qnpq406yV)3mo4uU!r>!nxV;L{*#2rTk&_u1!iRrOeemXpYIwwo8&w(fp+Ru2Fv#O3797X#W~L3$%J4)4W_lt_iK zr3_7?65b;Jbh%K8l7iJlzZ!JStoO61`pw7-c4p{TfwqRrR=-%&Oe1?HI24Cdh6?8+ zN+ixRPMQo?W)-3&MH;gN9`>)ix$fh%US2XPc=f2gE_LP;SwoANiLQD6NT`Yee#fV6 zXRF`I$)b84++AVfvo3*4GVpwB)aFn3?hk1jbHFj2=Nsi5etxB~x2^G#+6HuEX$*p< zB}ohEjGyQ*n_?W-?-XzaM(O_5O}w=E%<||h^yK63S-b4`ss+D1Fl4}hmxpa=4kFDC z*?KqY@+wTH-Xhuq&!qael&Aabf(Bsk2Gy*9r~pbvtM)Q}^}JN!MqSdk0!O~J_Vw=| z9CaRtJVJuV#!3t=chFRJd|NQ(B}xJVavLx4m1!8g2I$W-Pba@WnHX13=}5qn&^m=) zdyJJE+xb}YX%CWzW)q-;+!%3JXSpqarA|lF!2U{+g}~TZR0*5RHF#U%_H2NQOutQb zG^!oC7qgTa6K|y-LUnCktEbv3)mCCLtf6qdJ#CIdk%h#(q2_Ivi%i0!j`y7f{i`J{ z2A$!4;dj)u?lRPuwIqmZsk6$9JjCW&dgP8`n6{TpJj0>isv{znZTCMNq_G;vukmxp zK>0_Qh*cUgGKoj2RJAi*#lG# z1V-RTZZ_yV4hch~{j%*?Yc0(UnnrnL(5M79-Q*PJD`T~WL|^}|p{FIuYjkFO6acfm6` z0NjA=;k6QHrf5m!M^&}xM>Orw3M+l3h#vI7^>%Byi_WdsCU|VvJVM5wK7_r83t)RQ z@K+v7XV>NMD$9Zzr->?cz3WaVLp9^I^i|M2(emE& zo}-@qf~;(~+e)v5`JyT|Z)@Zo80D16Nnd(o^9VO`nZ51V401XsBsR~oG*2BTERy!A&=D(m?SwdFzxCuWinh|}cg+?DCFBQJe$NAz zu1F!D$Rn4bP{D<6EF1xN5KB~}@zfV0ARqwE}A?2CR zVI=0eJ6*Qw9A!H5bYe6tybeTr3@4$A06#HvA86Lf)r*H7cm6z}R;ZpGUw|5#cfP zo-MrDrX5nR?}p_FDTb}d`OG>RJ?rt~##@&%no_XRAr79Ab7?M{w-(2ULsH_REtW8~ zame+j5;Hj0QYaACWz4Qbu1h;=jXE3#sCvgp4>cA*hQw0*F6~kRl+hN5u}I^QY`2`W zNv(H#4)Y?2bA}zsW=Rg692ic!;b8HvZd#dcGecpH3n0pHsJO^}x6v|4vqm{jrC7?k zugG+5fHS2h$3aHtoy5$c_iFFmsVSb;HP-W&n;PX0_9eF8RLPM?{47s)3x6_JJ?C3e zWwcTY5Ea-<7$|Trs4JuEltY^n#GSedqpfnp7kTjFqO6fcf`%o{l4&3=>6J;EG&}do zJB?i)yt*R_OjMkfm;@H|myJIq&Gi=7wSJIbyTA?g;^4L!Gxz*BwC4LIbjfQb&7VtZ zd!%9XC>0pUHE@>xGp(Ih2Q=?zXx$+^wVjx`~N;m417eXT%J3+4b?x?I*ei;YnAt z%NIr$?$Zc^hnvM3hkT)}Qeg*T*>!8ka*gW>kK`o6};+R48OQx6EwJ`F0SHCjT234U5O z>nV_qieYM#0?uM@w4-nAt+c20A1PioiF_n7kgP|Ynmu?Z189ZlK5XswiA=PP#cEAQ zJzG{vc6xnUDm)}mBQh-2dznHSCGXjQwA_q*z-^`DJ)@mi@TCz%H`f)J+~xFO9$BxQ zmXD>{mNJPEzSq;$P@9=kPwc5vjr7Ew>oumAG+$P~y)onE)l6Y@l11V7R&6xlRr{Er z5=C9}m5B~bIj=q%QV^3@mmjTKZbI4B8^#1Rl6As(i2La+e)=IapxVq|R@4cwxRdI83|9wW*+QHh)?=PESJJ1ikjl??eDi+TEp(9JYyL;MI^}hXPh}M?Eg<= zR{r<#MgbGV#x)bdG+p0K=(GU2GaW-j)CsR&RvvM#&r=!kt+)9Gv%5g#c=vZvR~zVS zGqL)!fU*(QN=(Fbd(vEV?Zy{ z2-@e-$kD*WaO8F%^P5V#AiYSW@cC73>@h1#pgz+$FqkzadR(wVZ5TX>gh)uJ-sU!0 z&E$@0rNuVgpEdw;4HCts=3e*16HNGXP4^PW?cVfx75@WPDc%Qu}~s( zCx@-(hP)UN$-k~342jWv0SHYZqi>NUFUT=v_J8}R$}OoTMT!l15iAeDl2gdxe>3z6>8tM59_0P+v+u%!6s+ z6)AgDP}JUP;9{$qwl7p%3deZxw5wjB&>*`v=NP_`s49Qp~2W%c3Qz015w3)kr-4 z^64wM66n(7QMjL@*Yny65R<;KqkoLi{Wct6{-f`W zG`c{kJWJ2tTWm4c{%$?rfuXcDQBvMuOKPL{IUs$|Ew=yADfX(}?9t8~iXD5PH}#1v zK3?LwXZg>f-8VUJYr*w<{&D@ou9O%}z&YXiHhLTH$cbEE4IQ)#mUjtjn+!FbYtt=x z8YUzLZ%@Tdnk=0nf|Z^6`+6c#-;66`ZAa)-8YO$hqBfhQYsd8~eHLrz*8LnSmgkrD zLqf5{M1pVsu_C)6&#%vdBCtay!Y4fVj);a!+fn7`)k@y$^^PCKb|aPQeA5egC2(wn z!i6~qQRTURF0RH?#s$|U!W9N{N?$6bg|h0y-nzTswir`E9n8fbCT^Pqm{`nbp?_F- z>3$tA{JEU#N3fJ&3Vv&Y%@(9^A+5uo0RD}#>UuQH`_cPwg<IelZper{wHdYyj{JV8W z5VMYAX7P}eLu&X+jC+;0is1pS`dQ^Z<~zBIjOw_%n~6GhyF#I4IQ80Yf4?%51h~1v zf8HE33%C@}qre_{c#K?_@JdMQp~BL&wS$F-l@fgJ!MLqwx$$P70!mt~^er&kmVcOS z9tOz6?v3?Jdm8!j=4Jf0f``sMCL$rx|Ic^>nkR}~|M)Hdm%;xqpHoEWd!qj^jQ=O- zYX#{0H*g*MFTj;8G0yVPX_p!IwvQVt;ynLs)eyRDG|7B@liU3h zjS6Kzz?1drsw3I;r(0l*PJ(|%CCyM@zdDn8U1k4U|23eqv7MdW3yIY|3-DP*`^DM+ ze4W>TnMjG;!t|0Eg|IWQos3*UME)L^WdxDizaG0((ZvN9rFQox=h85vOQG@bvZS&9J$Jzlz8dX5vH1R4PU*1jP*lBO>%g z+R{r;@RwZwNP%ShRXsS9E}5`X@$ugLc5z@4QP;pef#0x$3*j8j5czrr4TmA;1N9_);=_SONJ~Z?F_BPUecLmm1-#|=M|6J$E15u4IndVSk5Nrr>Ddl7 zpn<{eP(dTX>aa*7@%{xhoU@Gc?{Vw@@b26Hz8vwt-O3y(n*N##3e(zF_mEJn3Oa`9 zi*vMq&N(kb1k2*TYA683fIbs*hGp2PApJ_~-ko$mb`TTMZ>Ip|t7}kUaHu-KS2`u$sbtP>?*eHzce+^c5U zMMSPVII>D2?O0zM6x<+_HyMLWJ>(R>w)`W^b7Az4LBM1c6gBYiU6WWrF4nz%@w*)o zL!gBLwJ!q(NydDx`UM>-V5eHT8DsNuS=c0XhnHu9ePi<;drmGrn)tbH7p4X6tR%CO z1NB{ei$qw|3FTYP4~Dy;4dQ|9p<0K-fJFdwe7j-QHpp=5pL_2u={Q^!w;p3)h&Eap z;l+N1viVdWBz~vZixRAUJIW@DHJhuh13R;-phe!p`7!iXW7l2T^lcTO*XQ_Y?Ghu0 zIgAAC-2N(GM<`$h+bTSfo%q9F0#}7c;OM)0<*M{vZYj z+A~hosQBM%ratv*vaq-DjG`(kMKCI;%w2zMZ zBZb}(3!u#f`?3Wkm6f&M0(26&gwy_B5uIOT*{cZk@?qOyx3WX4vBNK>`w8W&ZG^&D z98~XhN@oVra07yx@=(Plpvesi3Q#^uxH3IB*!C)ufXr^Va{LLYAkm*$6P^Im(SQSa zuiW1CCFYVopt0?}Q?cAp?8%*e zfiW>A{;+E&9hA}4jycO)pI1v)rQYB?@(U6{ie{+TeaXT4U;Ul?g@u-uBg)R;$%>$C(8T~Fp~{q-Ul-h?nULW zYJJ$l$qHtxdgIC?ES8Xdu>-!QS1G1S8tUB#hTddEOmD0^t|!pj`Ash0uH?2JtmX!u zp08&oXy~s=7GYO~d1g^DxPtEutZ34yy>NDlY<)g!(Nz#b6?F$KKzPs|9@MD03h;4@{fq7Vb zR0+O!nEFsQHy3Ph^-L_eP^#Ve=vp$$KE5F0a=N8Vh+ok}odClja9iyvy}V)Hhzkj! za<6$yhbOf}9!fZfhI+pPrdr;>!{eg+_CtWD@lQ z0L^EE_XEW*%czR6AVtqH$=R-p=-8ZNa4CD7CJzme24p5^?rxXla*98SUD#RJ06qLX z^$Gg*$=3GNoMC^QkIaR?Mq6Y>A!FrEs3RwAO%$9#5V6KOZ}~M%7?OX0rr}R6^KyE4 zt10fD@yg4{(}|^&05Tt4H3Czo>K|HAH$Gba&Pa}FtQvlWa_V8*@)=)!CzAksgC}pnUCN-omWbl22^M=4nZI&+xd0oy)M zcU-7yg6nPdHD~Q_hI_dvmn2nq=nWU?#O_u{u(I1FRe4zTF^4qFds>VhI&Q5jmskuH zph?zTMNR+8RG*P1iMuB>b68)h#iqJ0$vyn~)f)743yK6FhFg8A`-{LmilwqNlwGTJ ztA!yutmi+b8xZNzWg>mv#ZVNj*o03y+j{+D{+Qogj0H_w5v(6wN9g$Ir8qhqJ&vdn<+tUId>ksvjy&6=0F| z&@Ul?#xRQd(9A!6lT!c_(7}=_&3E<5hanKP--S`9;C{@6)SlWr{LCx^HKq{``P0;v%*9j^@aluc?Fnh;tv$eAuPWd?4W{G=Bi~6sv58pkj4!JMO(xS+a!uU% zWm)ao=3?eB6(gdtaDTqP#Vc>?aW0?50Cv_(Z#UKk5&Wq>(O^YNGC<_{p2|(WFlOsH zdoZB^6ztN=p0yKKFWNh1cJBuV*ftUyo@^i5l-o0XF*3f@Z1$+3FhVl2ynecMCebl6 zSpAX9Yp_LpwvCU4rYB1yzYAk701WTGiD$7!%AqZ2>e+N?G3K}28M@BfdW|P$x%|=? z$VANB`+=W*li!6O1nhV{Pg5Qpi?{GW$!9KK#eD$>^_Y-<_aYt1S^UzWZ;|i-uY{hA z&L847oz8SEhDxl+#qfg&ey`#U^`dL@jvZp;M@gar)Z1j%{`qNY3a#PmoOUI)+3_uU z20D{A>cjHVJU2?9pkF3dt#7BY^4F|K8O@io^5XNLO($rGzk%fx1gTZX&RcmNNVUW2 zIxysoxn<8YOpKXk@{klMySJ9?;!ods)ZZJFSquwVzK^&J;B5_bdL*pJN^Epm+GqPf zAb3>pR@^8uxJA3=j(`ZGSY9%OP8K`-27v#`^Q$ zTd#G>r-9MGrB<_B_v-a#tWdJvcf3~(2Mc!J49$Y1P*FSu<*MUJ3N)%wvRFMZrg>;K z>t!=ot+FFUXewTDsdgs*=svrf1A57YiIQK2biYf3wKOyc@HXe_UP6I>Ov_Cc(O!YY zyP5B_;x=a=EZ?jb;~Q@1>XTpZIND!GWTVyrJ(Y$QZSQ--$f|ivyLZ4Yi|j|>%%`wZ zEN%K@0ZCIUDAaD$ipV`d+Aa&pEpvLw5Bf3sI2pSnNkRX=)St0pqWR)b7~ zK%Ng516v*y6C7%4(mtp+t6&o_Ja6N*&VsgWUT}9Q#bY@TXZkI|7=6#lmUky^-o?81 zSo~FEGZBXGW_=O5uMD_hURqOWrh9#;RwB8SC#=Z?WSbB!i ze2H)IJWL@ej5va&m-^2R--rJ-V_Z)m369)2=r%ZdjYvMnbirl0okQx59JTtPq~u_s zwb^IuvduM%N2(8+yhx8gR{ZF&Hqx$yp;o09?fkrUTghTcnu-m71U~N{h$ypJlCy!Ot03&O9gAe9dbUkrH2H+V49hB## zK=&QYYCYclydl37d)~Y~R+7)McQW$cGAJG+!zMk>CR!wQfjDR;0@02RX;`$K&b&Ri z2Zk;QDiw#bRtJ7yAwGP@GvRYINV$CTq58tXDEZgr*$$oY%u?;C0KJ^CyqKsJIb5Qp zKA7yNesk)bz%1Y1id|KDDH+jt z#xL+sY6{S9pv_8fN}PuRY=BSFnTY6G{heDk9&W+rvt7Wb4Vbr^2jQ%+0ATe3!sEDb zxb^7%0I0!=r+Y$|KrM3T&}I=Z)>*FS4W4rt8Jo=0sGC}vxB9vI5n z7Gr^p@`Q;^eDRLwB$%{OYP>Sn#&^yN1r~EK^K|;>)+{p17`MJEcKX!Ifd&lMEi1A< zBuEUWv?ifo0Q)loUiM9t<)pXGdr5oinju z-&I1_k*KgsP6mdR_aJvoZ0^5}oU!_P_z~+4o^Duf>@Qz6E_hun1@XHnUr{{l^rKUF zy^%A~!V#sWj0S|$B<&CWg=Cji!bHwdStgQ0PXg##r!?(JsVj#;@9P_(zGU?7%+dL} z7W&oSK_rVxpU&#q3YOIePHi`azaudpqBRFdC0;l~#_CYA%1}Umd}U=fw4-WtM@_xg z@B}ouI+rzUc-#)0YxN7$g#tls#_;lHleTTbJ|aOKzL92fU)dlH>=Io$^ zxm==NvGy&_w4mQ|Nv}fgs<3=5uOwf&$KAR%a?OXOpfO_DkL-vG{iN2610;@RYwp%^ zv>t)KFiY@W;#d_2^IwT1!Rqwf!m>PF1A|T8G+!dT*N~6Dfsp-7VjIkMp(I^=aJ8kO z*o@hZ!=q^_o8T+qU&PaT7M6YDO*g!hZgGQp_j3E&1=phG@9E`@82jPJ-x!ULH7>t% zOS7sOfY2}~xaAXq$qCa6%hot)(fQ9IzOVPGVzUr@Uxy>`u^EC3$}f4jI&@MwKbbC# zxIN~BCQ&$C3yzK8$w*0fi`{CS5&+i$UJ&Q%*81FRhHcFt27(ASe}0$dipZl*7Pay1FV|qA+l>0!0+9inadG z;!~@r2i!TqUq`_-{+&Cim)moXEX%Aw!ok%M`!nWE%kw4s^) z<*-(000Wl>7EG@?be>q37FDcfUTjPWv0Zkq3~Xk=3cOcn?O!kb|7;7*e~-i;Foyl{*dC^K%Xwpi7=dH;=& zvb|B~{cFXu*E-C;HeO3PDWJ*1$~r^of92KH5J~IH&FL>GE(;X~2EIRaRp#TVkflRf ziYxPOE7)D^ZWZ3CwA$P0aD2|^LF&hHytkcT4{C3F-_dd9;%j5ky|YQ)&ndzFri;dP zDZWq{6OHP*T`@hoqVK#CP`F<#>RHukTobhN98q7EkZo-zWg7RX_tvzp-<~_6C7ajB zdi+s$#bn>ogkl3Wc->`L!z2>4(JSG10T`i_$@t>%+h|~9-=|202*bNxPH>f?4C_@9~RxA?2accun8;VW|C z|9;*75@lKgGjWw!zEIs9(l>n3&pIa3CW&U+>zH>`^jm?g3ez8q= zZHcb>34N-~JW0Zv@)XPY|Hjz#+J*$YqWi6@)GMdK8d50ogk}qUnY`8q{p|p#fR zu{n>ZA~wo?RdcRUi|GN$0HiXDU{^w-+jyys{oNu?sFP_SRIp$IQ7C?g7 zPS?`_xtzm&Q<9O%_u9F}d#<>;9TInXWYV!=R({YxCu;9PEVyr)UV;%^i(%r}KR|C8 zOW&x@1h%P7uSo!u;PqiEjTd)`P5hh*XjJ zQbqR&3F8XvTZ)X}xD_tA>b&EGIE92FlCw69#x`Ovh4{Abu`$R}B5C0kaO-{~(akFR zD2kdtJbvS%CxmU#%CLIQt^LQSUwnW7la#3ih1<<9%f=Np@V$>qXDCZmk;ao16h;S; zzv4p=C9JVp#_YM>*oNFY=nzF|Q_|-O3%9#Sn@LYfqlDk@s-%ROBHZ_9$0he~@FCDU z5R&h2{)#U^_QFl}EhyKO9Q4GjgMiSR6TjnEf-iV@c4Dbl1akd`=U^9lf+2X7*Y~-~ zu`P`WabfspTfu2Ns7?6%4Wv!sVUx1aMdH7F(gFb~#ZH7wmXnXhm3mPlsK`Iv`0JBp zi#9!~7baoncQx+$c-$A>U3`P#Jl={Uur`C9Fwt$N3~_)Z1h-8rx5ME08$D zzXHgZ`@Z3(&JwGe+|LIwzI9>e#tn$NLw-3M2vCDo?hOXi<*}nm>!V~sRli&+%J%~! z?O(S?E3|bJa^lCuV<^&u_#56v|8e2h_3xlNsVJFl`~6<7IG=Z5fOwtJrZl^dhC)%aRHwa$17!xf{8r+?|-3PwI|e_MjZrGc`51MYrg z%*ej~y$H4DZ@0DC5mU&l@ z(mwW5cm5`r_nJh&azbybHe4HF;MC|c*J@ZOz*mdcS!FH+Nm( z`ekAgHJV?GNo&tOO}WR*D1hyJp4#d?9ZG^)>sD+pTYuSW)&DBOaHk2{(4nptOSv~a zoQR zj-Wn6{?XJm_QWdFQagR0k+9wI4YU^Z>kfh(%U`@A_NcOKxo9Vej|mY(Zbz>9nddHS>s#6_;+fMF( zMeeoOylT3-;&gOaSD`7t`*6b86WgS~kPk*SeMEKTo=%o4jIZ@LX+mA*z?0y18<$Lj z5?4?z<|Z+1uNO6C4^3gMNA&PmXcExQoQ?v!z}WuJ(QRsTJ84;+}*`-k|-1*+l zmtpqmsiQ==&wgsY$Ik1y+KSjQpN7S1T;tJ`iKtg0>-^8S!rh?Z*UaX<> zuvvuc<}=uCe$&d}a%O3S+KNSz?BDzaNiEHV8NACNU$POj=Ihsf2p^1IpPD5KHZL*nOyVLOxk{ZqzYSmCdLu~ub%&=s?^4Z zS{ksQ`37ZEcrfZ;N$`(RL9Dd{&ewh&h(O60Yv7^gv%V9sN}*$mT#bo>dp5SKRJyGE z7`0$4zo>zRmf1f?Zj48aRT(uH?l**!-)NO|t|8b*{?X6fIh!Jw_4vvXX9af721UUL zcW-($7(M3GSpCNLh3LQR*c+1Kw)b(<4(Go2ldePl{8+nyYpO5ma99=Mvrv{n)g*s3 z8KL99cvnCwd+od@2(wBF>}{phzp`7i`+6sK9OdTzY19=FXmf#A?tc7@8`HJhBl#-THeVB-ZSqQtxV4N%VBLWZbr1%0 z)m2MR_hxnS47c}+G|eqk^*uYbC!XMaY9ebbJORiTraE+$?zH8bQc5a}N@}J0?jGWB z9S9%XgazB^)PNhKyj~M&QZlOpj-Qb+g~_zS55Jue@NZ?dJ(zoI!G>+O)97e5h|jt3F#YDwc;f3Y$^zj+8>n?d&cFa;}lX zdhCSaA^zw4rBNW--WJIMdlV-ItT(&I!P^B?Bo}kAO8IT{pfKZIK{;ukl_&jpQLn2< zdxI5nN1@&TnVQSonuBV}1~ z8adQPJgIVZwz-t;zc{h3iSS5Y+oMkPK|jOO+V3@{cu$lqWsWGwInJ?0j6@rZ`wS-b zgA>_yPS5GIw;B{3930xVr#deBl}3mU3stmD8Wi;#Q;oa3epZK^iD)euXWMU?s;h95 z_tHm+eT4T?9zK5&HkNuiKMa*WZq%`n8a6^8v7jE@`L%@ zhKe2qNvm|MD!RrDe)I@7FdRNEc(mDL)zOR?hvE5m74|VI?UF+J)KF1WN>%d+^~as^ z@!ehaJn_YNC|r>sS-asGfjIY{FLPGQ#r36D(?b^6`-F+v_om=2xX(7;-;NHr{ow%} zRrjQ_Ck?p0uc9QoQVRtCkwiR8^|cUu0_&LCeI1dqmDs%zGR!x&{aQ>gv@bd5ET8NF z>)_q|ZE#VquEH+M7p!@#afwnB*{cD(gt*n!7zH19`4LbXJ!km1c&=deyI}j#0ds!& zW`aFd1|=nj8UXrp-Jg{QnS-qU(T@&>IYt=*e!I4$Y?nEqbn*KU<&&@M*JH@-5dU60 z70bCFz?crlq7)LKvb*W330~GSbWKMKp8!s;7wDw0j2XJl3r|dHb;u)I4fq;3aH;5Oq^zvnC9mZGjx8{EUci}G>O-Mfdxg|$S;V3q{gz-=wAR!HfWO}(u! z1ZnV*u>ltblV;%~t*>9~(JSACQmFVLOiWI2Bw(ViG+l;@ExZqs1mUPGHpgQw6X#@2pbhRTL*)yf&liNE3xSoWki$L`A3_{WFrn zf|tGO^$5#1cCft5aE464Z#2Tk;(T@KJ}+*CmD|TCb!r@iEtF_tPv-fbq_%bov}^vf zLfwE4`SigIk{|ku*&_Qd@?Uiah))n{>(&*3FoAUT+CIlTjOp!vro~Jie@7t2zZtE+ zF!5@ommp=_os<7Qt0(0CpuP#LU~}dek)2>TDe=I|jaW_aSK{2~4Z*ei;Kckc62Jlk zb+b8}4A;3*{dKu`dr1CiH$rY}RL;~$Dr+*HJc}6c1ZB|VSMt#T+?vvVyy}TWUtW62 zyXe6oM@!l@1}F12Xu+9Hl0ND%}tCDJzhm@4aT2ZfH& zPLpdgka?LJdX8NF8KK&$ZKJdOuvZO?Cv8@KZy|f!eL|B+DD7-!)sZuSDJo zS-Zxof%(8vTi`9A#8Ri?qFZ@o1fN#@|3&a_6 zn^%eT`=~!W4pM!sj_)h-aRwzU7b2mF9z0n@N-0^?ko4sG&gk=%7gTPtrXAK$Aw_|a z6BFhXrMHm6_0Wrt`03UL6!bVTL+@h=LaW*})2;EUf7RGX*=kh-qdX>^UYz(~E6s?MIa{>6EZ3 zuL+$ehk(@*GT6FC-0yQb^GNl$rh7-I27j|1WAC^yrD16u^vHfv{VeorQhjeFN$~Fz0;=fKB;~NP_I8xW9o|=j&kJ#$ zi1ZC;sizOh(g;_r-yG|;L3$`!rCCKNelj^(qL4Q}* z9=x`+OG*&*{S}Y9Xcy)e+)=*FP#O(Di{7w|^yN(}69f`c9oM`I4=L%*WO5}LFW`keHDMpkawq8m|4NBySBxcBp;4MP1FiNDcs1BWd9qH;D z-$XW2KEhqRtCQA5Gxu;~7L+jsF6K5h@l_|o2-1nh_%=bQY`sJoONo9t+mlo}dlRx1 z;7c7ZL2hI8W`k58#*b{LM2fE;uh;Vr-Voa!5#qTJ^i7DZ2g9w+DPQmc)2FA9l(idZ zmu@J5W4KxnPKlgX#xz3^{iI2Dq`+v5n!l=}PZw1jk|tsO@BKs(?gQPkaO{FBa>Nm- zQh+B$`}T5T{6EOflj@H^j%8QN13psl8d^a*(&m6BFx!sMIY|$J)g;keMmAP`I8X3_6@2L?X*Sa+u z*ORxp{d}z`kw~(~1d-!TG|~Z`%YG*v+2RY7CAP3?Z)Uz;lFZomQ7_z@Sx}4E7k83G zB6L*9i)sq#0CH$*iFH>g)-8yax! z-#f8PXP-vNjj`=`2^-`UA13gvAGZ)t2?;|4geZ#9-kJqHT6-OG;$!PE69Vh@_^df4 z^04S)W9dbk$SMJS*UPV*UJ$<*ti9dV4$rJQIWjO{GXfczI2Xu8i3~q>9CNxm=sX_# z3h<%6H?OOd)^`$b&ci0J2uUjz9O^x2ZF1m=~kMMowe)3ihZg_^l=onY{0D2d2j!e_p zuk!^WA^oi6_tnt`+L*?(01@+U+V4ssvtXyGi3(sZoj(X1ibtTLae8hA(xep&sMC7B zGTYuFKVqXFg?D=L zk7n0P9a^^s;8XnQhAx)f@vFp|m|vdw{t6e2Wv$VM$37zrtA35RoAuW)>gvY5;cUFd5k=s4J7PBc%rR$qm5?R=xC_aD zMDPP3h{M^@N0@v?`zVH^$!Dx%H>+*!&Izs(GqJ7fKcjIsX?Xq!>QfPn8r>adQ2FDW zA(_A!L^g|5R{0p5{9GIuZgrm=!75|I>5<5l<;$cNR7+>1kcO4|U|d&9DvS-F9x?=ns~ z5$?H!iY6WD;76bSl`s6{A}*Dtx|~l~wRA{R0K`q_h2ASn3_r4hWkeM>!Z6t@PjoE^ z-vT|?5gyK?X)Thc8#A81Ub=`IhLPF>d%8kgx`1jR9a;MojPB85<)}VBA!E}7c>#;f z4a@s4xX-_A(zEYo(bsw9X;@D3M{FavhHS)zBDhc897YrvApc)cel0u{B0 zQjhS>Oq|U-uoEU`#0L}{qXn%28@axIznCW4gsMS69MaTuLo^RyzJxzC*OD!|Y%^)+0M2H|mZJ20C63zx&7WGr&|M4p zIbi_w*7j&c4U@GM*9JC-&6PMU!A zSQ9zcju%0?7Iu=vPMeBuxRpEeDISmX#j}#zPq3SO` zD_h-=g}l_L-iYpx)I%IylsP2$(L68v=5zRchi^d|*`$L5V5JpA&QB^VBCh<%e4x|B9 zdu(T$xMs2T<5V{W#2WE!&7*O=K)n{1wzsK+k(j%O;=YZ;l43e`f|~!pZaCIA(dbvX zp_XqKBxTjN-7<412F+a(v*R~kuOQgLm}Qwo7<=@C-JwO6|K=}Kq98>k$K ze#XL9zMef@Sn_d0re;q@K4Ednt_jDij?X^8F(1M2kYuHyl%H+`r4rpy(zum>)9B^@ E0C^Z|1poj5 literal 112569 zcmeFYXIN9))<3G-f})6ufKo&$(m{cMbQS5nLlBTo2t|4a3q`shy(%3-=m{ORgkGg2 z0YZ@)Ak+Y%1j5bU`<(Oc^Sz`I}=!XlW=?Q8G|o zxNw0=S?RUTg$opB7cN}7cI`LvKiJxzgBLEmzo7j3rJj%3MvK3$9=h?oY4-QyMLiwo zndIAA*HqXqU%&I4>Et zp{eG;lvV9yNdxF|lel}VZ<~kv9hiD>smuYi`&l>Bp6H6>F=1@@wEz4d_?&_~fq%3R z+F)1Fi~qd4@OutpnZ>{E-dA1%-~ZR$qmADguKw%pnq+0zf4$d!>)&2m6l5v1X8(iF zzuJpyI~FQkqc)%_7WT95Q~RPE$*9xL2B&{Mzi`24F<@r|FJ3Y5h}0jf&_(M07&`i2 z#4h}z!n535X~;%|5#nF{FFcOXF<$r-mn_LV z``{Dc=D!}?9zC_QF4HkskhK+bz5jP1jYpil&|r#<$$=e?Sp1!|#GR-G8eO4e0af?( zX5oJm?Dz;+<+=K7+K0bWSU)Ap;O}#>pg=wS*92Zn{99+Qf8&z*e_y+B`~P#1wB1=q zK+!u-3-p*biL_5XzHsNds-ZJ5!@S-LQAi8wUs2^%$cOfz!+>pWt?sn{RHcvmYj)d$ zV7RC>8%KN9wO@mHQ6oeZ2;yb%P>O)8S|NmWb$v>*VKgjinR#g6(YKys7A;F7)za!+ zmG_BJ^@e0%8yMWnOLBYXyPw&aG$)M&205suH}?OvT6er^*ae&p@X_4ynYA5N63zS_ zeekKq--ZbZ&?Xglkal8<`IoFLTKdI}>n>t@3&IJ^V%zxrvZdPS0v7fG_x<1~C4qG* z@7=xzlw9wPji|Q_9fdr??Bd)%TCdrv*qb2kfEaNVbAFC^IlyTfsl{(k$p5@9^nnrL zL5vK>_a8pv{X5$q>$w#bJxIW~wk|I$gjXvWxEhEk`+)z<1DC&$SV{KK+ubAC`(&QLGjoNXnWIy{x z5V3~>A1-e^WosviGxec%s`=?zB-Aawik+CalQ75pKlY*X<6ph^FCCtHfX-(FO(<{Z zT2=k!!5+OCfzG=$GMY$y(mkw05|DFJRcT4H?pf-%g{-I-tX`lX-LICH0>^x7t4rTv z1vJfmG@j-{IXGkCKKkLs4LmdJx4Io25~ljy%!ZJrm41epB=;iKWXq)%1rhKBW_;S~ zv{pe^(G+}WJo9L4gR3uy*!-w$-uRK1yL`A1#gej4vM{gt-5sV|7GNJz*pS7{m7stk zX!Se!E^GGWGpv<(P5@C{3*z(T6wc`bODVW5rC z?_!!k&U`?<2`|k<{ooRfnu;Ta)HW_>(mspap4H;`-QKafFfp^A(D`txhC+GWx-hnd z{291dtN4nb$`|uItbMR?zD1D_D0d;ZfEUk0_$!>4U8hMIW91(svr8=S@-QC`f*zGy4eJ_J)q%`SIV`WTMA^5BSas#X>UC{#pf)3dy`Oj+al@lG}kC4NUo zB5A17Rp$IY^Kk(Vk`C*Ayp-{0%>8|7;CBX+kD+`ZX7{ArDg-KhKYYQMvUwk z1ap^X)jh{}{I)1Y--}-EXzW<^?Wc^?jF9Yi#0e@Xy)o2zgFzK1vek2?@XMG5_S6XV zl2%hsOdV$-AuwMX^=?dUHdJTWv|qOJjiskd9g~vTZRXuwlqtYTot4`x`>v8z~3jGku@LJk0f*BL}O>KWgV3t{t?n+~3^W6O3^Xg>*nyV1=+z z!g(=29@$(0Q^bk}6&3MF-ftOc-IW_FWRsC4)WG-A6$Z;ep1R&HZ54^#H?64@q0&cr z$;_wwb?~hI{Ih4bP+j>I-=lyHd#B88?((7CZJx}hZw4(zn>Onh21QRIZO?d`E78@T z`EjFf+*E;e%({S^pd3Rwc#Oj(nPyfKL{wknzTD9oEL6c3@5`@Z(2n^e~_ zkf65AtOyNH276h*_UqL~((i}@U`$Xj1gz`yv2e;XVkIE49;#?ftZ+y7je7zX4u_nw zJk_&GFS4KQ=_%(5l}(NHm9X~D30e$~W;x+OYa0_MyZ21T$D4IyYo;a>X^Ol)_dI|1 z#{KFr+>1$eW~5kBW>7y3C^i^S`}3%d&v&i*uJ+I0P4?5=Q^uiUNl>KSVsmY7-U;1u z@6VCq20g3<5T7hbaozg~EQ}2$_W9Om*;80IeR18a9mE@EZwM`6*M7Fbs_Rx7!tbv= z&f($!0i!ZcqGlA~#x0#CbDa#@li{&ARgA=7iVwI^`=%i*EJI~9iIg?lwQ}o#@mdw9 zO@s1^#gD$b4Xl+Ox$9n?5p;myW+=}L2e+0j9w*f6{@mhtmhM3N=cK83>9T#xF7lPt zGV1xB$TgWua@jv&VOT7DZj=e3?UJ@NI%FJFt?ABwCZ-{Qw^lDhLugH;)W5FreYrYf zXk5E}DhE4T=}l5uJWb*x)++esv0XBy7l8Rv45T%aau^I($DUc*9Pm4hwD1n58~|^> zgwB5!83P|?2YA0L*m1JU1T;G?`i@!11i9@Id7bFX0}jXJz8atIoSGqT0vz|dgXc<2 zkSk~Rj2t}{_hlT-DqPT^`q0K0|Dw(5_f?mK>%#c0vSkIk0*-c26>Sg1XM{AT;EWMn zg@(sx&jJ=z+!qazoNW%zgUW@Klw^lrA3$ zUxCT^4)1Ug&=qxE*Ye7BUg9@yELVBP%%Ez(NawzRqMT=ZSOJ#A1X!8^KQ&I*m6z~HYPV5V zxr(nBI7-+b!{7Rrn9R=Yms!A;H%cz0T~|2&%syQ1H?PwotsGhrqg*zBg*$t3y`(19 zxhk?7ZK^%#LBJ)OW}uz~XT~47Cb+#^#M6GNAodW-xMqf;2JzRh=vF9bw$uGCW$fp0 z1zo53>{y2Q+J<7~4Le+l-)N(OR_>;c$>9j=u+HW&&93XqP`uL^W|6)1;=+yCp2L#5 z_B?6Igx7#~nB|{oc+czmOtHq^uTDw7n;624+l<^FhiT4xTMWfp7=9QBNAn2VONbBaJd4R*)KJFo> zwgTr}F{oZ=-zVAi0)A5cB+x!F?&z%vVz`TS?haA~F3}czltXgGjrPVsWHnVxns_AD zMFULB=0o&j+EyuWk_{+ej>Qyty}GnoH+O6w;)pRcv?+c&jqxIZ+X+Cn_KiK~pI6T- zS0p<@EeBDW*VQtVBns2u)a8zEtY=LmS1!LhU!*6Qq!6$b>MF#5&XZxAyZ~ENW#`=a zA}X|+-G)VG8ud=KjZ08Lkj~*C3x*?YgqlBF1+3VKlqw!>#I^Q`?4kZzP9A9y048a> z=tnBKK1M_@O`aXalK&bW|^0 z(~a(7jFHs$-|aolTGtA@Qklyf7ualN%3|Q)mL~p zji+Cr5v|MOTH!)Vlh7Dbs3n%7Htb5(ovQ11V*5I^v#To@h5^Qv!qR6e;Q&N6ey%#hR)6i&oD@E%zjGYJN+Dlh_;E>U0AQ_%MYFZ}Cs*)M^ zIe-6@RdW{Q$Ukoi3zNj)lPCQYhPBN~y{hsaz3UtopO@gUX04%oopg225PVk~$53Xz zvUXPFYC4^R08uP8cG0)c;!J}oo^A^-`=wEuX-ZszhD;UQ1c7~m+$>;+v9QQrOhdN5 zcY579w>AvbG@}4zz7@7*sGT1zS#`BmOoXX9#FNxQ0F(ZN?JRFM^|R;@b5Il(9?@mv zU*XWbr5|frap!dDlyDjP{*!H<;sXuHb`N_R+?LUBD)Wq~wHDx5;o&I5$+pQmeo>vH zG^geM+1fUhBCps=c|dSA_vJr#?T;g!sWc;zzjWRAB{Ym2R62MJD$q~y<1BpUB}Ulv zE6KUZH-5S&1H6B&it66YRe7ZmGx2o_stwF*;<)pk8+g6q$7`QUuZ#E&6YuHQ-QT`F z_8c=^=?ObHGyPF!;mwNTrf*NYAj6BQBye;z#KIR&VaZ}M>M>87SHl3)AbHu@p{+%I zis}?&7v)Wqu*wg$(4J_hIv!|8&9MBv$eW(iA?{jB-^;gp;QBnCncd|O+t*%OD&Bn6 zcf)jFv9c0XkrJ9Uv)&`wmb-Vf3=?g9uc=`h>t({NL;(?9aHs~0IjhDQ3VWon>(zHG zoc7_dC%4=LgGTwrC|FUd*_~uG*)@m}-71q>Ia3!j;BB)T=u`q!0eNrl?x#2$)8;mI z^Y9H(gU72oWuv?+R5$QwByg^Qi zN$$3T%+D7V6#05f=(4~!Y;CEA(>^Bc_^E}6sW|G5t`DfoH{AjtAIwqu#5s%6p53&i z>h7|3oB{eR^pwPQRkTb+^@!NcFeD4?;#oJD!wi?uvB-YFGodkE)(u&0YNdM9{YT_j z)yd9b3kGnsu5tB=&e)tli;8wIpOps(WVPOmxiuFM9mCNJ0E)`6A`f~yrfR#ms|pZ6 z0N14NK?+Op*Y31whHRQ3-B(^qkfh(n3R!}SI|Kx^DHV;(6~wY|3nxq((;Ty|Xwb>? zr!)<4BW^9T+~o|ian4-JQhiWO_K|cI7Gde;PoA`;4I1q)ooZk=9au{%H@yd6Os71` zXnWE$=3}0j0BkzV*F50bW-=*>zFoJRpj}UB(m|w;D+w>>%78F@8}cSC*X*k9cjIPm zW|L4~aQj?j_1dkrLRa>H6t#(Oj;a*+>+@2Ed`|j*SWlkn4|i!UmLlzN7T`ZKm{ zH#!K+;XHuDWYw0NH6eZ#K*@__6IB^6;#3`0pbT5+nLD`bBpJuz#axx-^CwpP{pgi{)CrWz80@!1FxS6&*5o92j^sxGUH)0>MU>W~b|(!Qki4sxB^I z?!Mv3YT}!%5Trb{B^88E$=HxBx*aY!c2IWyw$9AswO@qAPj3%YmV>z8o5p=AS^&Ri z`>%{DK$$daW0W2R_x&0hs^RN3+|M{)@?Q^xHoIc)RO%c+A&dU>{J2Vw%A!hepS?)IzvY&ZJC)6XL87sJkaxo2_ zpyHLMq!F0dkNX6(q7}|`=}G$7+!lBl8Hm9pDPT=j9KpuWdGc-yX4X)E$*XGxTI2+b z)rL{UVP+b$D)dLcjLh}Y!PM(UhPTyx+VlXyE`SQy#@^V6Pra_-Vcr7{vm?RHjByPr z+r^=r4hB(ls}BflWJ?MM*Xaczy$(_=TH=0uEpO2{Wckb164Fc%z}Rx}<3*#nm8IC71+>fe&>#k3|e|P2?y#7{MEfSKnBcUGhh1GKT@vdsXm|)W-Bg8tm zOu#RWxH4aHnI_SLkya>nm<=NiK=4fXLbmxJ1 zX1!rQgBSD?O@l%3#v(PXMxoYs4s>sw}}E-x$q;)_X+a#*GU(E0`K zN7@c5%+)CicbvcL&xg|tbY;1~!W2Dq%^nYV0ByS&M5#$ZQ-v`FF;21!89&>E{}lgw zEi6aby!f+>{-w6f7Mq6ZD#ovzJgV@AKxctVrDz7TZ6J8!C0PL$JXb2&3qKC3_zXU2 z2XhE{CGavfz?F9G_r)R_fQTF8XrnAB;8>AC+re}MWQ6F{0Q5Qt3@ZE(- zBUMTl*(;cZX`avr@}Meh^V5TxS&mS;ZI>G-8h8=Dz2oSggVPLV2DwtK(-N38Q?QRl zs#-)bQoavR%HKOQv!S99afIrYvMY2qeUpQD<~t>&R(qH-@}zfHa03=rR4vcxa;g(n z3UKt2VlkVn?y25C`;?C1tv2{%OQkq$e4LcdeY)d^;pp57m}q_}Ohcbi;(6EE)IoO> z7&RjvEQhV!V&oxfVm6oKXl9d++BvN~lXZw0 z&VD}osm^ix&@btxww;`bb@hlK?5uR7cdpL9K;J_#=xBlCAiu#t$LsbV%HXGR^asu6 zIt>C3u)D@pWXN+?6KI?B^bUyPV9c|^-RTAIgTG87BBd$}E7&W=?jM}{@pVFx#>1j= zuKrO@2Pi(4tqwiibZ3D1U8>4Oz6kDIlj{|8uYIInTPHG!&-lV?YU~RTBf>N-4Qk_; z?N3dTrxqNj6kQCUOwtV#ZA;pSp4J3%=Z}IsZs7_sAi0qKerr` zA@->G#BPQr06+6mm(FP%ixi7KX`3Q0{6Zn-j^qHbIlq)gzqX*w@$m zswpWW5W;Q)oF;83<&M1Vl799C%pF|q%yrqFf~ipFk*Z~%<8&)FCiPlH1T|$WkLO#> zBz6f0_Vtcs0Jnfe6-nWT&mhAPz*kxOVdqOS@NB_oVK#NKlcjbtz7#NR4GUX@N4B#G z(AB^>!vIVv?%(Pg*Lnm7%Z11;DXnqL4}{E=J(JIg1TZDV8&((}p9tn(n)6d_)6!AV zw8n)-=qI?9c5=X_U;IYB>_sowTds41#cqm+x}$U}c*e}LuZ0rqGn5mc!VZ-b&~VxN zU|m$>@=dn*;Ga9SJOla&33fY3okVFXpRyoricWiHe2D6Sib0dHBaG2o6c|%}Q_7v0a$AD|&kJtCsO-@p&)mU zC!Ynaf2^lYuzbSSZY%JbM^=A}4|300?@Zvp$?eGs$Tw6S&~$f@PEOBZ=y80^#K;%n zE5Ut(dO|I{Yr-iVwUwNrz7;ymM}3oH=6=<>d1WasW&s9-*+K;l+@9WuPGE@_?K;6W zRmvE4d#7PVu0IBMpRuNTNy+3eUXF^Dq7w7wr4axx!B9OoN6FVS!BeV`2_~c4Ln1?V z6tO`wlf|ca)0(h6R76PkP;4#}KvvtTcl7khG5C6l3tKz$AN%|4+q7U-0d4_q-oQZR zU694GGP=$hM(XN$R5#m<>@!`FPOxjFd{!b(Avq+P!&OdiI2N!?@v!eqKtuA8L-ZR^ z8IMtE)9~otcb$P87oZeS{-dEa)p^=T!hY6V$3$vfwTe34w8#urVxID1c)kDwOe91$ z>y4c;GNupOTy6(K70tdh5)MHDT&nr^ksW{ooZb3yqEl4LCYz5x=?$DFRBIMj_0nvUHVef^{-=27) zn6~m9+ELl=lI%1XrvaN-#WxYlAWh_8sDJcs&KFRFXFJt+a=7KBCQ{2$l^#~x zEq9k#`!a}CuAwlG>woGxL?9g?1<3R?wfcHqm>57CJlceH>FRQFu-BK7dS88fD0 zQDi{b1p?MT`{7m?trU>a>_q0fU$I(<5nCH+`Oz>YnbWDBJJ}QJnL{gds8S8XDTHt>RPn$xKDS% zelVgso$lcHT@~?juYLzZ_LHf-h8oaJKKFNE!M1x>|7xM1>2EUO^!ZE0fFBhiSq@x0 z;+2r4{>T%JgYJvn-u!^`^v4smzBjJffS&^i);HQD&d21MUY_oJ-?m)rT6TVt#|w&) zLC~B+EnuFY4VgUDW{oUUlCOTQp`qxT7GZ}2y0gO|@nARegh9=l8>brI<=T^@x=co# z0WNoC4x-B}B8rn3<4kvBfK&@bn$Cf94Z$r<5K>S?ufqA+r~*dNdqA`iD)8>@$H{tz zJ>kA|m((|FWM_os(Ji3KI7`|}nv*&{PVBn*mP)lb$7JmE%mp6B_3Q+9u+rz0%*gmx z)Xq$Sg~c2iXAuh@cPsq{qe-fQ6l-{17HN*@hM)?J&wXyL+`9ZU3D20=Z0N~S8MfKc zmG8n|>I2M@JQTz!*390nvQ;i z3C#_5g|D_f;=pQ!yN<+kjD{>rGL|jY(qe6155$qrZ~a^Cc-4{DS-*~Y2l5EJgOm#jcbW_~QylpYmNpaj(sC-6^ED zfNSOVH`@#L$2amb6C$7V>pM#XT7tb2#7&sMK7Owu1+&8jE?$Wq?=055Hf=l--(B*I&{!b&L1d$Pyx@AQAN||MqGUw# z_3>$iS$1|(YT`XwsTxi<uH9T3T!W+{Mx=*|t;ucT+1jhEL(mwG=()#RLiuFo&N z@4tBS^U;f$j!_?#j*#2BjRHTi5mnmPAM`Ht)`&d*LE<{n-AVp?ALEDb6io+ub5CCT@0UoTXEJo&J6G4#Xx09E zmKSI3m_`%L6sTzC`eyCeR*2UW$orM#X00Uo*;H1DH__1qabs-OG&St+!;^piNrBIQ z81(OibM3is z{BK<5axs)jU{IAi8ML4O<6y$%!JY#l`WCj|^Fqn|{M3-#)UYr5&urhF-6Ye0mHJ#{kvv!)3`lEBCjqqo&-(l?{^w_8$TWKW75;5? z+>@8nCC@pz#V)0h_P=Sses%SK75k@}|9_=ehCYTGKJT;GOUnTDB486xQZT2<=gKeS zG@6x?5R8WDgs_gyr3~Fus zzcX~X=R;HVhQoIRBm>%OmZNHi-DFR!Ll@F7HnZl7KN%DOwJO(Wqy>#i4NMu`vo0U< zE8B<|TgrMc&hT&Rb&kQbZSlP`Wl>-&f=OH`LR= zyNnYrdsG)|XEX*5=~zQuY_`q!Eml(kcuzdDvNKKElpt5#zJyx*cVk|tdlaiaq`$88 z>xSIww|d0rK!CJocv8$C-7Uz_)BmG)0XN}i8>$hUnFOZa8DE$1Js#9V`+~8{Q%U^) zAo+KnBQg#U6*oLymN^K0JpN@RUr4;bcwNHZPW%7RTi=(+#ErO}&J}?e<@3yHsVVMT z$^47=|Dmkn?q$FrR={l6@wk1h?_#rSej4|nCMfx88lvVdZ^j?B$Ot{K&-ef2p5^#t zkW=!2>(`7E)9;S*Q3-|HaTthVMSSh9!ovRUM@F>JsG)u`mE+Z~v>JxDe#Nj2s)2%3 zYQ{YYL%BSEQuzc#{N2IW7enyrf<+t@w^_M#|LTTuBTt6ht>eG9?+>l0{GeK@S(F$S(^C% znFEZM86j>jgm^+N4z@Aei;(^lL$z`=)B>A2bR@l5v6_(hK5%yQNP3V#K$&VnF>VnO ze$RX&*t-KG_ETH;XsCUFsu93swSC^Tkjku2-YyGjv5<{r;b)BSoa#Xbdubc^`p@pq zSdOKxqLm@{T8hX6YMAAw+~8#Rf5j}Lh^jDiZU})%HlXagwSTO8d`zupr+j#93X6DS zpsB$^UzU6~!>wT7w@G;>_wmVO^>8&~)4Jm2TRt5=q%wIx3*SEK*wZHF5@!*<_(%oR z4g)q-U^kaGJ_5p;QvO9(>(YU*q*Y3pg(3_8kie?HfOjhzXIxrmQWJKmoakpI4X!Wh z>tVPxn&Sgs?3rGlp} zAXwJc=$-v^-L&eMykeNsAGbUHD6p(*|2x6L%lg&Ico*lt@}yh1BU&SbI7X9X=-$L= z*Cyk-u}bfYV}hI-_wqFIVN^cZc}vh4H~Owqbk{ZylY~uZ#A*qe&G*mGNli<-cGyaU zHh4nj53__Xlg+Y=7Yf+mUZZ!H@3-ywWFfXs1CbSTy$Fayb8TOCp@)j zjEIHJhNM@AVI@*>E6ilY0Z+2dM_hOdT}g{KR0SB9RrH=H^a-m63Yi4-(aSQ-;`Irx zuYkgPE%-y;*2aLk0?>BXJrs3+x98&ct(1abgP2KeNp|Lgz1ViOlmd&AXW`47HI7tG z{4nr9YhlxRoa~$Af96srZYPvw^H9#K(BM;v`n|Dbyt;8L?RkLOAOoou-?r5sEPBh- znO|J%IMl5AE?hn~T=h-y!47PAMZ@NGWS5cK1 z9kIayhl75awW>MHtXV*%v$t=ctA2mHu3fc^$U*d8KEx9N8UC!gA^pt;-OAj^s#zCi znylguQaq+6%{Lvk==%z&$|=$t)}EN(tlO%o8&;zN`;>aG4(PX{%{mQi@rgH@IoI9w z2N~dko~x9*NfFtAjVVr`)TN@g>sCD9 zaeV&pG`!NBG#s3=^c;Qm^%(%{llAl?=T&Mu5Wf-PNqZxz(!|l4$}Sae#2;8&X5k{g z)fBZSmbrB}LtAh+yJd&C?JqfO&1rJdW;n6fB<*EYDKD#$u0?@$py$ zx;TO6{VyJ>cjRQPF)%f``hSx`&tku(2l>j?g*)smZHVtR?k>ytj=;Q@zbz3`r#B+E z$}{X@C{c-|5I^F3-_MuuSay19pYs4GxFW_{kT+6r%<_KE`E*jeUXuiCO>J!ui& z83SJ9HqABQdR8(&G3k3_8i{NUy?hy=1de6Jtda&#K!-86?ds-IeV$FX4$sQeOr!%gX0gi)sck5Y;2_aO zcm{l(_TX%x%F7{V)SiX20Inb89OuC-YTDF!oNcud9=6YPf8D-(P^orXj=+8Jm`@rv zOT?uWf~Va57mjz{25)8ua7Y@l<~hFzn&62Gr8w+Fc;yFW#A`ZD)-5^sR&0F^->JbO z)U;g;FrOOWIhRj; z&&8i)mxdRkvn-CK#5E_$e7beG8ou2oHj30CD!-bNEa_`-3+jTNKJzcpt!@;=3%$4lOXZNmjmYRJoY0(4o@q&>B+ z8C97U8k?L}OZM2NM6fYp_LKZFdWm~Ac*lNchz()I-1vpV&b9Q6jXKqAr98T(NB8HR z(wY>>$MPuF9B5j1FT4e|yP}RE2s;Nu%(qq`G@)@+`0?5|YsvN}?Tt#%J=}Ilj2MkR?w2ORlQZ4G}y>E&|-ahW6ZRAjB~LKo8qM@ENB)T7l+)w&5@m_ zy9icTN~SI?^a=_nmcsa%jygzg*{c@xKHXqxE^1JU>t3(x#rCCAX9ymN$fCDXY-7#O{oV!y zLcX{0nS!H7GCOPEl zooH4B%KHueUU@=0)bVTc&YV*lp*VO8JlUqSDcEkGBqSMPoPG7z6m2<|PZm2Wle9b+l7Wwn6j0 zh9i{SuGeK`W*~uE_ivgi#bp!(6eP6t~p;TW=DiF5fi|fUqy!4r3@%Q(sJ^j77G(o{F-L;UYJum-G!AsfK!Da!r zpt*@-lc;e7{bk~Sn=JJoxJXU>A8?51C>b>vQkW`2%@WL0I5LB#RVxoVes5zT?aiSz z$%Qw%LU8Y+f?L$**yxjV*m4ut@5H}tn|tob3!*hq8u zeQ?LQhqdE?_W=eE{a}9ZB!(^S9LlfhJD3tE-TEP0l`4CAh2SZV%^zx|!N8J-y;+<$ zaV`8VxGk5spojNY9mTgsgafW*XcF_8C1ISr&|hjrwT5lSZL>KXCJ1TG0ybv*DkDTM zX4oTWZf*>%n_Wk&^ms3J>RnK?JFJKfywuW4s-Zx0I6wD4TWe#HA&?6|$^gXSpcyfd zrzflB9^-j@F(s^tMS_{oQL0NQyR|U0(@_VG2g1y>;J}vXv)Xk;{|JqV!)2yW{_N|Y6@{q{V+7umZqghWf2ySE!fcABt=w} zGNr(tpi|oyT!<}(O?x`i$vh{&0!@GgmqF>G~(6k`W}(sX_6kXD@@-s&#jgF@`-WQbW9Rw5q%`ir}FdKJDtGkQ7Ys zj~d_im|<%2#FAoud_d_Uxvvw-Eab5(Fz>Y3DGb`&+}c`G(nG(|fZSnc&qDp{ z*#esZr_t0iQ%~Yb=hDjO0n`Wu4Z^!?X&`V@+!oc7xz4lp@zx9rLG$KZ%XSmWKi@w5 zIGyK28x&UyY4Pr$+1UE*n_;>5o5?uNzxXu|6~NE||}O?kwZIP7cJgmD6I z2xDywm>QmY$PGXk#fZ56WfqHx?#x&PIzpeO-Wn+QSj{gN^)M#~T{`=zniH}=p1rPa z!Z*zHJ$oU@)ted)so}6yJXS}FLk0ERg9-?#RQ&65R;#ZFmR2DfUc0wjB^UNIkAMYf z-{5U2m<8Iod$DtCqS=hV-ZgF%1FCJlW}G28TMus>_-q0lyttb;i#nlVq#P5w*A>;` zr%BY}Mdi?@@wY@;$!uW0=FNVOAC)ulE1P-H#PKkrm0^+Qabd0bZhUwJ+?taYGpIH_ zHbZ>jw=+v0wS{&tMvQXGd-VtUzFU*q(*s3&*{&2O8*o#%?8f7VquD6&Y0G|Bwf;g5 z_LJ5btHRJCaXqM=%^GlGvL>(O*(eX`Nrvw6+C>w8e0&f{KGA{eLVO+x_8@ai&?ofuZQC=pmxH-9ySb*&3={qnzHSj z5NO>HSL^TH{V*o_*gw|EttJfJ8_w5wCiV2^7<$g1NM;8I?F0#WM?by=3v-f)0U{RT z6l2Ta2L90%TesJ=^XP31RY78R%XG%+HmT9>+ysf5$!ye}xq}a}>+O#Ktn^9!;@*qY z0S|T0IvScPV)`)BoEg8xQBdAh2Rq$XHgeFgnUb8uhUA@CRtDw-^6nNagdYo@qBZ+h zs_lm*ijH~&6@s6CwU_23dph2`yonuEil!8MYJgDPA~W#w)>lk3H4(8h1c^#_cR?=y zpvC=L4IG7)W=rdR%87o%^1juF63=w}RUX<^7W@=>wY3GVW)}9dX!D#aBl{0k;xB0~ z{|75~B0H}wdN+<8fDbL4QhI)SxK(LL(5o1)QM3~UZ*tqcPR~y0wwu|C95by`^a~Zn zRU3w_Knh2HULl@pJA_5neGx~cf9zUfdGIB!(KEwpP9%nJGbOfX&QK8FH@0+Yvy2Pr zrV$HT-ENflMN^)WIo&d-7h$rRi;% zwu9`c-K$HsQBGYBrN)R44pf@U^@-Tu7fBy4*WIxm-2c2GFK$f!U|%bycW$QW{u#|D zF^Xo{=$mnEJJfHATgr?Pg`__DXDmlCVVeS0Dvdq5?~IwyM|q5B8%?;Oa!bI53xDmK ziiXZJw>p-g&8>Ptf<+JUlR@DxXP4RTU!!G^ZRv@t{Mk2rL6j1@rDXGc@HZ*3F^}?l;3I9Yy6W~P`k=NswCvJYB>R!Z z75`+7-1VN(G}#o(3P;=|e^BIOS;|Cl_-lc0Qd)y|pppem=zTaxz8D(p%;mwrP=u>`NAKrITSH)|Ip zRdbBt4-PaeL-9|a4d*<{G5!Y-c5-r{ZiK+~wVlhYzdKyl1}u86qjtUrO;R7oG91Gy z0zMH~n}(?aq*+~dv9a%Ic?^)$y(Y~bFJ^LIqjs~q&s&MOi5apD6AOAZX2yhBoh*Nq zWsaP#Z55h#_DFZ5R|TB9hSIlfgwf}+c6wk*4qTL~TmP_}72ElkaVbWRiJNrr`=}qGRO#n!VXIM8$-hM?i|}R)9(}G~MUQkezB11&b(rk` z+U$EKKjiSdSi^^+4x`&~r{p~IKBUjWB z+a7k<&zTDu{48t4Zw!Yt%>ElNZvHBAt{qh29b?5ZnzRISox95Ku5RIM=^U-JT(fVu zm##ipvJUefG2zF0P7yW5vs1n>=4=$RFz8NMyD9$$Wqdv~y#oyp_k(O66dpX6yZ@pt z{YDbWfoCpjHlXFIhuT&;UA)%X;c^6@s#PYxboH8h6SikLVt2O+l^&_7V>oD%Lh-GRYGh;E8cNhi=*$O_HspWtnKV zcgVA$zPPZ%b zzBsF!_#knoJRL0yf?5naYnR*Yr)TT)%(P1qxKM<5b-MvS9jM}DQg>i_9bj?NqzWLF zqE%}1?Ijo97n%MqT|rGMrYjL{Ld)14^CX1>SCOEKZo>4D(s5P0)$w&ZXC?r} zVTp%aUH-bUEi7YR8(x{I1je~+gb`eS4KTo?w?Ai*zuj&G@NL3_NH9+)V+4NDbfae9 zn$)q|P}0~{ST^rX)9LbN8vCB0Y|+PH0H(N3#Eh5|2k?jM!%Jif`!5K&LN;g<^=@e8 zBxo|_6q~1EVc(~QW*`-vZ#;A|{g57ZYK5RAu84@jsHM&h5$2L&U{Bfv>iym@(Ak4Y zsRM&`uhJq$YjCBhrN@siS58f8(|x_Z!DM0Eyi7*4 zD7Sq@t?J4wOj!8AM$jYB+Sevxy3yGQ2vFB=oa|~b#d=m(E9`)A$9=Q`uH3sJA&B>O-F-f?ON{Pk-)KC2?r{0QsSARysof3@61<)&f<$} z{tQt~9enU`-0c#539VE&OPuGpK|bG@&;}E3h&`59XSDci)*)~D-4{~q^C}Ja+i}*) zySv%LN=W`vG$@AI7_lOwD_!pBS|htX!Qa=eayZnD z87mlr0n0zKo{d+IfZzS2Z zda|(J6V!wNHmY-k`dpYF?$fV6=%eg)OK1(AHg7X-@*W2w7V2)TxF#D!c#wz+y;CnR zl(%_@btZqAt)Uco$1(m!(p?R{3;oofl~v!vyHWVqn~^iRDlMv;QH1`uC(?qmRi+1a z9EYv_qcWey!*}WhHGO}k5qaYPYY|x$FbsLyzlgCULm3~Psi-qmJ*K@0)Uf9Tw(&kY z5?yG5RzLa38r&+2UOxw(K!%N^!O#8V&EnF(@#oxDJa3RkFEvO!^I|t~qQ!2i~V16c1 zAP)=MoUQF;AbHpg#%V5J+AgalqP^HAMiP)edF?0|HEK2RFLMH31>ue@k8j4g0@X&C zv*pd7j})LP18)thlsWc;@I%pBC4g!F3FA9PHDSczRb)^WENm`f_nr@uoQ>t`UCmwOqk>kW+UMvww< zlb=t^a`QW}E&mr=UmXx-*Su{ah=7U;ETtly3QLD5ptSS?5=+Ap(hVvCl1g_s?9xla zDob}R4bt5m3*U`C@AEwG`~CO_SN4u`=9-yn&YUyD$Gw)c1!I3U(3>k5kVwvUw8t|l z7=DvT zI&9GfBFtq|M_0*VE5g^*SbZH;64hjkgxYtMrsqx!Nd_w;bgLkYvX3W9Fb)<*qhoRc=n&0WvdLD!9fjjF2$+$dX?{YNn3V9FzMDMr-LKAj|n4 zyZl3}nF=A{@X8`;=j+xtzNYZXl$sy11564Ms{Oi~`*XZW_sOds!iKZ0*IGahLi}2Q zo0)-d!H#m{Ys@v<0phQahblNv{4uJ7NYytW$m=GnbO>q}9H@)zDe*zB5Pzed_z=$= z^CZLRtAe_4i}9nsa?N!f?)I2M0r@1tvtdWQ3)QqG(Kevs%-A~29P{C$1F?F2?V!{) z)8xidd0Z6!#o=;Gj%=zXz~QW?OZ{!`N=@UJ^FJI-wepX6)Ci(l=GR@#)IcL5=7oye z1^XP`B5aB1kv`>)=PXH8pLQaG?UCZX^EC@(6tkr<1C)tcI=mqoXhQpTP+3kS7}z6p zkRgF7)D{?2he4Kd_xkWDd616qJVELjj8-B4ORQ2@S0{-xgvdqcsiQn2cdn7SdWqp+ zq6ac71V6JTx9fRvWcPl(=9A)^`&B`JT`5wNzyN1fgs4@_J_?70L19Ccs<{BTp{)11 zHE4$4?F-pRP$jw4x;2YYWz6`Qs6<4hqoy%WqY{Hd2nLIN3}M4X%H=+QO|29X&~o=o zhOACv$*lbwzygtKp=8vK-Q)zEITyHB}+iM4)fG;zw2u$q8C3kcu^5*DzwoZ5OvC)`-m^X zYrV@Xaa%>N*MfEGov9i&?Tzt{mhsmVKrcrr6o`L#?!K=YPA$pG?SfBB4upWs`*Nt& z(z9t13m$YW^HLJG-uNKlE0s6v^)^IGTI%*&?P4nMI|lVtI@v0i77CG=LP@~AaAPaV zq;qxzpFclBBRTE&6YVvEOH@=`yWT5lDu6}ydSe8oUFZC*N!giyKt=8Ox^sKR7Ob@FV=cKD zTU}XJ3FOJIXr|*_?Zw(eispE?W*{?eTxPN#WemI!Dyxm}HE>V{xme~Jv73Qh*)%T& z_Fy~0P_ji`Pwkfpt!J0i(a(=Hwhw;^ZVsmd@BOvROrjJBJvw?4$8ykIKbpboQChUx{JB$wT-q>H-0L8z$Zpzor4oA6?$^XCjxi_B&nX4-(s5x(j zUDzOFk24+?Q&5{EPI^V=@BHHmAYJNf&26PIA23dx4EG#yF<}`X zgPSWfBkD3+>h}2RiAj;!RRJ@8a?DmKOo&<3N72SbFPmwI_`+28zU(ZFuigwNFS1u~ zxT@P}gEm`rd8=_nY|#X#a<{bmu_mU_P()T8ZB|VK8}6IBCs5*JXa^e(KP@PL6#%C( z%O?t=4*YVA31Isn8SV}vNtLc{8|vuS6R;Nv#;uPf@+PR~I0OFP&2Y5eT+juo^Kc13 zVoNM2QSApy?-uFe8jm&L(+%Ch`+_G!K)^9xDoC?lIB?|$hh>nGb>+9e*C&7ke5^)2d$?f&- zV(_@EZ#rx>UQrU`Q4tiMW;#;FlNT4|ksD?#o!rGBm>kT(36a^(4}3(}Pvx;;NQD zmOkM_tHloj9za#hpMT)w*GhGYWf|#^#%tC-m(%zD$8;y}#(9aKCQTIiJA0PurFGw` zWt}I%?%X4YmmbQ4^Pl9Ervs^6(vcAnmmo&i@T4uMnU=CwKf}B{x^))oEUnh!Tuqce z&#E3`E1F2^@gGO6Pu#fERdw{SR41%5xf0kvF)R|X-AOwebU?bQ-@_k_ea)Jm z`}*vI0g&>vb;Cw6NC4+EPPTUQpEhTxN5?{#Enm*2BN*P95lju55sWpJBP$Z~ld-_| zV#0rKFIKYj48nYn&@&ntN-LJ<@2P6nc=rnVv3sNNpgTJkGT%-Zpodt0I;5?WdTF9*CSTjQPlKCX z=Fo04c_oQ4l89-r&3Qo$nrh54oAJj*#ZS%4+7RAEk`yPJ$^k%U@rd!yJU>Cbvj_dT^D z*hyR;iY%&~7KCzY&r^d66mvN>%|7DvEdv~zL_2rhe-)%d1E+;yKD?AwX8ztr%N{Qc zR(Zh|kxO`xA*EUX0M7nPa;{Ob<63J(E#Eftk)zVQVoJHj0i!*YrOBZFB4ew&qiZpR znpBLq1gp9DAbk4MMC-vhw@R_kyxKh})@^TOxwrJN_6poNcV!I&N<6VgwSXSNM~ z!lp9iIo+EPgQaANO@2w!-cnZ15r?&XP*agUPO?To)q+}KB;NHwx#OEF>gcX&?b@&U zDkCe8cEz6VE|ovT zz)Ce4Unh-Cy}A9!NUtYWUrY|6GFtA=dH#*wk=&s@2if<_Mb%0JN$RjSW>Lnqcxl7K z9n8;GC(NWjz##0BA*y{mPhIP1B6C4Noxyg;Rsq3nR4v#>9G5e$tJQB9bpa_rjapVU zAH7)G*vi}WCI%*;KrX_957;fb4eVxPV@ za-!ZdoVZ~R5b@~jry0c%CRkor93cPjo8BhH94Dow@MMwt&0$YW7O6Wh%h0&Z6AJj)0%8@fpKl-GU48$ zvFDjeS!%$k;G&Y{z?>?0?(3Y@-1K1ogYdqM;JT2>N9CYoYpIaCX-~8A%qDBo@+||6 z9g||Ou2d~5DbJrBVvu%0u)(|t{Tla__rl%867#B!Q!%7@S{CGQgy@0%46t06t6OcU zrGUT`7VnoX(lNSf;6OOmA1-|`u~c1?oKMhR4zoBZ_da`soAcV7NZ z;^MUa5jnP=7Wil$qm zU?0v?otKweoj0N-k2=kv+g9{1yLL`VpZli7XTtftG&jD`k$Z-3+=gWBF`jV9=OWi` zqlfwhH>uyFJhGVV-EXTY4Fq-*$A#yBJEo-H8LOjh`%)hnpDs6(R1Ocq* z3&n$3=F7r(26!IE4w&bfMwBgN_J!6eq>NgZd~bL~I7G~Mb1&EKOhBXfn+{f^GE(GP ze=&kGsyk`QT;bb6iV=bI=3{IkOy!&>zq8DN8Rn!abwYdM8cb-!b{A}BHP~J6!qxry zU80ls%b#2%E`u@iSsHbBiTTdomH4zC6@&Pls-Gr#ib2I?3DoL_wFzPi`RW>7z0Rj=7O>#h&>t0b`N5r^@%)tDn> zYpgbrK~hVFO0ePhntG{nC&TquMpzs29V9l;aBV}Mg36UJvQ|Nd_qICvJ$9kQ=XIGP zD*14Ah07vxX`msLX*=^xP=;GqP90$vNmss(a2@NO^0`OlcN~I#!^p29&F5H$EZv`O zj{G{VQ|39w7KK@M;IL2Rp%z8^3h4SW@%2#K?_=3^1SBuCT8S6iwLg;bvwW+L1j~G7 zN(o*`+kiddKD9Z=^f^?GQEJXjKKIv2A+_rnq~GgsfbCkC(yZ4~Jz3T(wD4k9FWoRC zV1I8(oRk#hp7Cgz_VMhwX+~C+2Wn_sf}nV_VaebH6^nL7V2zoIIA6!^OFPM(ZOiAL zN=p$X)m8?bH@Uv<9%jm8^2m}Ej=j~WL{5s1@xKP9YC*TPu$h zsZ;NwPK!5yU=3899fvu!ht(G$y{YX(70M zmCm#Zt{r8zoa{1SnK7~bR9Z1_ZlfI_DYvXLS~+j^*~irI-SR%n{Kd*N8wzIO8r?KJ zCozOS0_|QbsT8-jYC^{Ktn_nYLnuQ$w^nsrse8q6my?L9MHJO4j^slrhX$bH5%s8$ zDJ!aO<6=t^iKrf$6br9D*Yl=gA3TM=?#CV|(}_HaJ703}rE{wqJ>O^zcEH3onH|Q% znkR=3CZvC|PCUKP7Sp0jeS1PbGNl@we?ApW8Xt1tcXY*j+|1s?niB>&AzGNZxXL#c zRWp?_rHM;wPr!UQXiN}xL7rF8YenH|1qqvnJ5L>@dg-9t>^7fc!Crk}L4{Ugdik~G zRNZ5t`{qU3=f~e%52*r9(;Nu!<(KVGf@WuPUrmc-3D$9#d)di$xpME5JaJfLId*&~ zb)&3y@dnszAY0*ETx@(!lYF>n5D2`&qpYS?CGi#D_Y|<;$>YTz?VgiNI_QaKq0Xt> z&((iXmv>o4(-UP1C0V-(W$Wh)-gsTNGZhE8TMyFl*|Go@O&xFqxOLS3OgYNE>Kyk}b8 zAwmP;>CRxjx{Ph>Tmf-o2tunJwT9CtW@f$>oJ%xq#!n;@(wQIab&0h6!WKw{F260? z$5>rZ9PKw9+;ArIcxI|O?^s_;HF?pb;i0G2>i2pv*3$3Wyhpzu*MUOy1*z}#%{iC& z=ZRnn2AA$ccuHgZ#m`?e~N^I_m<@@uh0=3e6 zx2WyS<%v&Gd#9NN8IufEHnen#?ngG8g1kqteq?6bu3BEc zy`#T+q!&B2%0t}q&nJ+#l$xT|jNYuHb562VffTEf-|xq-izF$gbb~M~X`3Lr^aPY- z`T5#V+)#_Ob4iL;1$;csRjs76_=)haW93P}^_zL_pH;0;PxcQE&WHPZdX(*RS{)Fx z3*Y*}rcB3}1&_xa1~rz~=t4V044q+qF{{H>sp;~YJHl@aE^j z4syIZNw_}2Q*CMf6}$p6+Q>^9__7^1Xusq3EI%t}HiRQBB#Ytn$j};VhT(Xkw~Dud z2$L0DsXn$p?6lmTGBTER`E;aDk^zf;;c1modbU}sMH%HSS%OHBuKKu`mLIFUP;2Gb z6`Gnfk88eRog#5*Jj%=Is$O2ZG%9ausBXe|xf!t)CO0lzF8aX%89yt&m3ewxoLdc4 zKY$Ml=p;glwrH$XyfA{x!Dyh|Re{5=@=YREBKr)Rb@y(T+A$Hdsg10Kcmz@{__kMx}Eu2tq#RLQhkkx`rPe!KrV>A z!+ap2dCA$y*Rkb;I{ILtA3A8KS<@=OHR%I~dP*D44}GKVCART4MorczCL}DGy;x8# z`l(bw6IUy(B7BR=ZNMPpMAQD89&9?p@~5JKLvnfbtk|@RR%)KtxP&kag7IFM6)Gi0 zQxdke=9pAZbhEHO6+Euq^|sSO*Xr4=)Q2jq z7vsO)92+U6)p9fHaAj^Z(EcQslN6%kMMDr$aKlOKZC6B-X}}5d_Kf-HygTtBRRx=X zVc&IA=1w(%Q)PR=&ESUXHq-da`JHZQ*@v8~+X%pTf6_Cd3iGpeTQ}=nzi+rX%2L>V zsXnO?KKp#|ggb)cjaq7~Tk2GPJE$}zQl1twF@>79>vG7(&{ z=DjtC;cN3+Y&}?c%}`~V-7=?W#={kJtI;2{ttXxk^+CE3Pdk;vQ7bA)97268t{oJ~ zMPq+~q&pxumpap#(7o`qZ_tk0%lB+OzxFz-4qsXd*-V;psNI}Xdvb9d;5)=YQE{>2 z=@)_$r;k_a=j8bttb~_d6Q522Hvb{uVf(j#t<5)kv*QeBwG@9yk%#1%5tOXnTYS7M zAkzD|P9_+QYpAVM?L!*`L8auTg$bGC%Ac?$vGr2c0-iJzHoWBg@m0#ZCC$vwG<@?YG$KM#Uvq-bw>T8HL}dboDqpI zc3(%=iEHZUv|K2e0@=-FSU{ic?a;nr>=8vP55hL`jiI@$PBo3YeVy~wBp1F>P=rO- zUxkGc2rSAAo4l2pb7ubtH{tLI?_ zV`KRTp?hDqN=XO_uX>+v zlby@xa2l>?fcR1mz|!rv*zvg8r+3FQm@R~1?S3B$S%I6RNd##uHKyIEO{6_*uur}7 z`^G6C3{&s@RwRt_aqCB0shFm?cjEL{|v4CErBw6 z6SarPa{mRxeiJ8qq0vT;U~7-Nv!uP~&y!8VY9aj&64Y!^Nj9bfVq5}^vQT(o(X$pMDo4>CRLGSSZ?Ro1oSL%R{*666a z`)hGHMS2vQ+x}_i>?*iDORYc8QMk%+_gCZ@!)~aqt(;;V(qAG2<>sb`OrS8njMVC| zO!1OcxiXGazy}vZJqSme(88Q#QPo5HAlhXVM7Np$(rf(oG^`IPs8>)&ph1j5iy)hrS(a**=uqzqJ-(;TSxj4Ez0rP7r(PP-XTj_RB{E z69}pGzx;*ey;5_d6{NbT?h$OXwI{QjEC>QN**?AOb-i?pe?9T{H;aQwT*4Wo5PRbaf*L65yYw8= zZ0~^AIg!O+0tmje42xay`83(R|JZD((1>keaTPk4Smr~vNp>0T{uTA=E5)qo7BLR{ zE?4e`*RdzAUYeh51~_yJr)R7hEn4P-AZrq#|IPi4As3sxJC^zS?Zu$HY)T`#+_RZso&#SB| zNgvleACw)5bspcYvEtY;g+*^C{$~y^hmUA8Cdo~x&u3Ui?MCA?9Dj`@xWrDNQT~WW zlY`;d2KwGE@fA`Wmy02B-fxiE2b;s1F8P^HU>5g8E~MJJe}(RF*3n!}@>-0jGH+}vdh9%v|KF(nYxWQ7G~yH(;vp~id=6mK=3&db_SHrw z`NDX#w^HKG-?nVcg>_*`O zA@e=5mpZPaph)58Et8!80xO^=2ay_OFjKV<-66KSmZ>A&2Qb;Bf@*N%6#QWIlJX%P z2RR52omHjF{83~)t>_3H=X0BkUxc0=yp0Hl3sWaeI70Cb#N)uV~$NI5w-3jyu5^7)T6Qr=|`W&hgB&9x&P z89KZk*!O8Y=W}1cEkIoQZ=wn&RgcvvQ*@2}_?(x=qihd*T5O$KSK zvv1X9Um6~vIeuEs&GdamaTV6-*;KD~*2;zS4BJK8FU<{J+g_8D4*0d|9jA>r+}3kv z4He;rG0a02W9~$j(s_ela&&HK75C6Kkw@0CiDuCBSsZ2|b(_(a7$b`3D=|D!;> zP0zd1$fD#Qo8EZR`E=lCApZK>+a=MXcRl%#VfT}nK+_>#*$`TelA+fy22SKU2DugcJ*0KWM}yXzTAw{8r2 zY5=1blpeaAq*3p&~rJ8X))>Q!<@O%V{FWy^Qh&% zb#6F|UrjFK$&x1Zq`GT%tnUTb9qVnL7x%ej8WKtv7%@T&$ zQR0Ea>!qH>41D!0rOZ;Zb1x`W?yntlJL0hIk|+gU<*dBt;YR0`bRZ8fMe2r2g(j*C znmvbtbfWEjv9C#R+qc33EIoRIcR8aHoP*+Sji@We@_;$q8z_8sUx*-Ji;m~JA5Zv1 z&*tuu#myiy&7%x z#pgMVi>9PhQ_2HWGRLHQOn8)+@P7kCIqe*3ec%SQ*!&RxvqKMFH6kStRb(44X~~A^ z_OpvLSdbN&71Tp|MJGiOsPLdT0bBi>!5ksL|D({TRu`w^dcC}mK3jwGM{(Xk#rF~n zZZT&Tj!M%yYABpPbud!kLT+hl;Z+7FK8|0+_y41x)rtc9-r$W7koektlSP`MG9FLU z1zzl?etJx=D0dO51U$}mDvPY)V8KMIvwNSZDT(;fpuk8_)sIj={|lOceMk?Ek~;{W zq-NxQhZwBW&2asSSmw)8+(Je(vNYW8C#3TqVC=9H?7a)k6oBv&sB)4uYBSwlFS;*Z zHgBmzMocROPkR=)Y67M0+xcS$kfQ6GLg;QBw{x~E5J19|I0M&1?BuA#5aqk;_YyQ zZa-b#6I6=(WWpm08KUV(e=bF-lIx$^gG|^iY1{x2)#}S#=<*gw5NW zgd)t0TedM=&D*v~+$j)y-PBh5)Za@jA2y7KF8iRy-*sPmC1k!dm{4ZMIuEex} zAF$};W|=(7-jVmY&$uBknguQ`_@G;BJsqzEul{6BmkxjUS)wj8R<-YI@+O|D)qra~ zqfiOW@MXnW(G~BYAsaL(dQz(gqx| zhI+1bsgpv>17qdGn4bXgJtE-#U)aQGtdTrY@;M%VIqfEIM%ZUW$fW*dMlwy5)$Vp4 zO4>f2QKnme-c|$o2GXfLU~~f}fEbu$^M^>I1|Q0mOdyQK6eq**e}@u6gsV?Z>mPvC z>p$c_&#tVL5tv%$444#c56QW$)tGP$UpYy91kT;YGRBHyUq{84&*=KMl4hKcwLf-W6?nUGYjfNd_T%ycA1n7 z4)usakB7~>%IOZ;VT%I20vP##P>>agX7f_qf|` zs-7aA8CKQWncZ+rW!pARko$I2vUKky04}?;>^E*u{{aD(3iKJ50f#1DTes!fIK8_) z6V}ovfjLG{kIoOWPKhzP8?cT(*@Jq>8An_QK4$taGt`kY{6fY)QdlRNDT#@}^rOO~ zCn`njZ{v(ic+{giA0%?Y>U2!TZ-wC5F5=Ti&WVA5s&}NUf;0F^*#c*oPn76mUbb>c z(e+D~e6i9L--JXPM|{Oo59;@ABpWv|G9Fw~`n1#f8%09@iz3&2)l=`AMv_=zbKqcG z?7N*C5gt&lGtmsAdhdC*XS7rA2!z5t$C(Uo%O`?GoHnkVvyfaq{h{CzzLG=}rO^{& z<{0gaJ9_q`5es_O{Ez5OG94jJCLYGH^O8@)yk|yLK8&Ac97q3~Sd4Na``BeTy&FBK z8Fe6QBt`;W!?B4xOgA-|vGor|+2PN~+ZHo+R0d2lKLjpcw>5y$+*Qh~R(e-m*B1L*{ z0+0Gw{hd+e`B&_#*||GH~v~LO>Db1uTAZl0SaMMP?))>R!j8fFuI&m)0Sg zd*TLt{jJQfIwv~mv90Dn!flJ0N+%!0*tEP_yE>9gE#o&zQvjsn?pqY)Oqd$Mt52b(~QAPQTP+?3K#9#CS1*uZkAm`mv`^ z*Gm-_zAXL7=&$W)>2M@?F8I6hcj9L{>F=gdGIf0{EG`Z8Yw%JcW?~*}@3%W)0&pzp zV|P+rVfwF5qadTYsegw|zRFu)Z`#uuMlTcf<+&c$E26f(#PkvyOJ4ojLULKfbdtd! zH^X)#UHw&ZSf7B5t*`0kR-uh=O!uAC-$SDM;2Q!gA^jbNQK?lkqH&JvgOz-+v(au) z&Lno01N?qBC{ic-Y?*{9{Z5+UWtV2st0uFBfYRy6*s*0+qIA0E73C~wXx&S zbIy;c9mYOij}wtTjhm&6{{wDfg(-T_6hs(Vh;%W$D#9| zuX@;Qau85v)}qfAuO3|YG9e%qGzL7g)xSKmofF_4T^S6A6E5fRNGR;ry*EvCA+`gT zirTLRJmHGdqZiYmmjD32?h@}XLF z9OBDFipp!`ib}Z1JBw@0Tf4f4Wrsu)de^!>P#wyHDaoVQqH2~sWipPxW{?aBo`E%08coBv- zckp|rQvDO&NHrA30+3_?MifnT^>;IDQpsmP{opTLB>m1Y3xxP08z}$|**(H1p|p08 zF3QZLt6WL9!@Kcj=x+^z4!#Hg<(vO8NR)NA<%;^)*Er+Dq1#dEW^Ei}>}xzaT_le* zd`xNXkFm4Fd-nA!*!}LVO%pH*W|3>?tAV#tK5~PaQbm#bibt@$roYf=S(~-m!xL2o zs8PM+gxjk}|MH?*YDKW<6&4ajA7Fdt=#7Tl9(H*8odCu!Qf*r@+b0Wi;h~8JeUF}{ z7(xIGtKoAYcwOtVFZhqIuZ$-}V=zdd=?IIozRryHxed7mbB7(E>T&txo>E^>}wt@cNo&&h@?uYgqtsf>fcZrRZtqG&m2%xS_vV`@ik7T zj4i4#XiD|!C!vh4`(EhjBDGx_UfZvDAR|4x9#`7VW1VI7>fY~u!Me{}+%@**Y7R4aPC^+A$@a`bl8{k8_8EA{kO zcS2qs>An=E>iKrN&)CQR>oJ}Kd^)89eetkxW*q)-G5H_OFw;SlsNzx;CzGpu{iEM6;)$-ABl68_gjFwx^GYxcRDVkI9TgS7o(@=RMHa@dZcPPs6 zGgwB1=qMn#aF6ekhfzy*Ia|T=juI4>2oJFHTfs%( zwK9P_m)Ziqt5*X!>noCCqRPJ?F?rVo264?ac59>8UBZ`M{J!U=$d4{v#J||AF1+kn z{qdf6SB=4KhXulRB|d$E)d}b+^~0wNlEC}^{`=)!egu$BM?#Oq_O$+*6X2AH&qkpy z$d>><4K3t0zS6v|` zY0bO|i;E#o?CxxKh;9oR@&=S<-3m^^jYYPe>vG98;uUxUZc8Gvh*A<3< z*9r~)+>N7t3j>&3*JA5`?jA_M1swDr=puALKyTF*TDNaq)ctv9QiW%*BT59ggNKm3 z3JsinyT%WE{%0!Od67Q8A#~Gl!z0frXX?MKZVP7Xv`TU$qY0{pm}azHZ2U8;moH@{ zF^gXfjC|iaVpn2>WC0HCBKh;afn31RI137+ap)juVH~Q)NxRS$kOVU!AZn%jXGB!Z zUYNp?NN55(3k1FSA89p#7C^oLH3BLC>dkWe)ffEAl`V&O4K$T>7{{LX#jQW@R@@VF z=$Q@fs75Ci`FO*j{|u!QkZLVH%P+n5LYpR&HGDuA&!uKO|I{s~3DzTM-zZE0ul=BO zU{rcl>N9Jf#Qb+n^N)wWfIZ5R?S;a!uPn*_8V^T27;Z{&pqzQ*pBJ)G);a2gpe0z* zsbHDzw-<{Ecz-}0B39-@pS@A&5R2gyF@X#PG(gbA=Zo@xX^(iZ&jr_~Ds&AP z2!iBZ(8f0t{39OrH%K2orD$|M*qmLqz6E5&_L4p^1jxNzD#e3LShNL&1JEEUk?}cC z(+`lxpGm!xtJAF=zZjq;4Y4TNLDOf7>I1Kc{+V|c!z5mzXfRbLm5scH%EP+3)8U1_aS%Lw@jD!Mu7DWd&Az1Byrc3aR zzyD06g?U&U?{?4vNzb$Zr%7+|Ll}^Nn44pLt1Ts=7-`T~&8`EK@bpl|4bc9Mv~jnb z#27N$7Z7ZsQk%@4JR@`p6gdGUQb%tB zsg$Vke_=sX5Js@-1|0zzxkGzFkuKo_I)%aU;PWc#-_4@H1CQI0>2XWPPlBRbZc zQXMb$SDjhqHFdP+?Zdu=W%o19ms{^W zE_=k0|AP$D*LONWkGinHo$@Y4{Du-J6F{3ijaD&}@&fMRN<^6^U|XVv zTMb?H>{ZHBGQ2>`gkWku z51(M5ZnQV$RM8;^rA}z2BAJX%(ZcwmPO1I=_&Rxo?crioo@eJ&nQQFDtu@N10?6x4 zh)tGOTnTgSU$VaD_V zr9PCKW6DZ}>pS482zA@15w3{NDY;vC&|ds->v;w9G3S@%PwG<*nOkJnwL~uGbBA1u z*II$!xyZ!5s4hhCY;d(~e5FQSTWZIJ(UItm-?-}%sk3#p3zglWaGiDa>Cy6T5njNE z>=K5UPh{DxGiolTW@4X4JsSRrg_Gv-7s1;4=UcmBJ`0+r?IJUdKt?A~;b3N_PHqFT6vE`#qE6?E?cOpcq1pQc%4<~D7sc7_?q*;~s1XqApdrxJB zl`MkN?z{U7R8?<2OFYV={6&7v%eb<21)PVfR!7lOm}Dois-A4#`EGCNlJLnz@pjdo zQ?}C&_M2f-N2j4tR);_J+vr}=bhgPB`Q;p*Bo*Ud?5j2_EA^1*Oc>mvzBstxTW`bx zNQs0MLEcScg4J{pREKmHw1EO^8n~aRo?@Qg#k(9N-(%WsqH5A1jA?nIv3!X$ZPL`U8t}1ZS!zpFc0s+3~~zT z5^CA(SFzd^&T5N5-?;@E9MX}i1D%4UPm=0D4-U`L#%1jY1P-Z>=jPNKX;F}=x992k z<$%+hsiQngj}Lx8%_q9a%lWpreHmSlUz-cxRb6zi`Y?C)%e z*F5(gfDm`IQP$P9)ln38CPai3ahUViLa%>Z7Wc5adPVnv0vtM90W^^)Tp|41 zZ+(2;L0%5{5RTtgHWhqEUr^>_yIa;963Z;R7D^9fnxAF9?1-q=xp0wQ@n99$k12+9 zHC;njm#m5GFlresp3~UPg~@k+7x7$jwb?J2`d;sF45I8lF`)#CQJ(4*liG@2JTo6I zeyu+Jc`idr4c=!{ZL>~i2;EsS>3=S6OyDclaK5!me6rXqaKIR#5Dqp**oA}FPU zb?#*z`B}JhbD%xboMl*YQ(MbbWpt)wNJ;kR z+9P0k>_XtvBF`~}g5YWjo<9@;u#H!mm_RyCUojl6MoZ%$ZbHz34a{k`96iv*Vd!JOq#o|EH@Z1%d;ZZGT|k6Gn<5U zFUY}tD)ZQLYimRVr_ooEB$-{Qo82oW@_p4a*&B4H5*LEDb&*;rgjtxam=%5p=U(eb zs_?GcIu%^fm1=!6cP3nhll@j%E$p@ux14B6oW#3u45yQOOdL9|T;ZrP0TOA74g!`C z>=h2+kSpcC=(E%9W>)mLI)xR1>k`OL+mfLB6DR0L%|esBS)?^SFl+zot$k%bEQAo@h$%fGiussC?P0&FaU_jFroy zQ_n6!w6DrQCBa$wk=u5cIog$hJ_xJ1swU{?(>ClWJ!;-Sd{S+6Y()(-MRda3)=yYk~q+hSV+l z?w^S45(R0EJ#gqWy!$$)(5*Kvq|ApoH(UO27Hxcn_`NJFz)WQ=d0juxq%r7?7QOeems+Ak!D62Oko^0&~@ zG}E!DKDwmZTTB22t|SF|lT`m>aDQl$rGiUv$g;yUL zt#?f}qt*cn;~prcei$~B3SPt1(Xw_W(->w6YsR4xFbYMlZf&Q`X5okvJCAng7F&7E z?62A}4@?h$FhUHpYfkUv^4~osSFs!|wfN?>f9YofSs^e;FuAJWOo}GD_)i#pDcvhI zBnvX;5DbS9{Tz_*;p(dcvpk@&BCtBEcw4j*SpZ3^wg;AYM6O$0E7 zYnPF+fLf|85YfyKev1cFtiW;ugIBcT&o%=~FW?=Ir5<~7vwqd2y4%7|^m{I@T+#Qx zSLCCv6AhfIhb1fN9HB1uqV54Ft!O&^OPup|h2t^=ER41m)#?-jYrbLT`nMaz+j-1K ztd}6(Y)+1c=kiOJej|%!qW0S|?y45fjqZuE(_gVcno;f*8YHtnURkEH7V%g*64gAo ze+RRGPZAr~u}9Nya?>WSnBIUsf-)1@5dPzP3ovi}*2PJehr^;r6H3sF`$Eo=ZyeTy zI@tb9&pk7gdFH7@-YgZPTsv@=dv{;q+?w~5Kl-qbGq3Ga0V5!8`T5c+x3aGxn3^ds zeYRHKBh6yr0CO3lbCi8YOru(2?#JS3SP`s(e5X`l(fsun>M_&Np2q1RjV<+Om-(m5 zA6=GTnP8UVdO%C2rV-E_L#Jnm|Hs~21;p_@ZKK2m7J`Lj@!;-m2^u_s;4JQoyF<|6 z1a}RP;BJdA?z*_UyW1J^d;ju&-?=?^XRac>Gu_oSB~Lw5tu3fOCel{0?Q{UG8LK=W zEGd;wb)(R94;h?rHUzv>ZQby#D#V42l{z#yJaLUq`M2KY8um_xB=z^2u$21-cbjx$ z#m`@iY)`YEb36WNh@XES1Nhu-R1{{bS|Gb}JlUyWmZ{8}(a)v2NZfbcxB#N=q*BsI zQ`?7t;oi1*J+@h;jEr!S4jNbNCi@DXctBG5+NRQ?yi=yHQ~2dyGmYu4urgGj(iW1mo75Ut}w>{ z0d_Z>KpeO{90>qK@*o{|zJKI_v~U`n!N#j?Td?^45izK*3JV^ZvudyO<#SXiFISGj z)c04i8aI-MqOUxl*X9ZHrnM{O@y+?bp?Vk(F+2joCfpT7G=uPCEaq{&xG+A;c0ztT zw~PD5Tz}K$D5Cx#8HOm{?VE0;lZsH%KFDf3N&OMh*IEE|)*|iOI^V zK9*Rh*zZ+G_rRvTT(kXU!tkM)#s4hf8d3Zum;uT;Iz9^HjOXxxEY80-w)k{itf^J>^2w4p)jqQ&o`y(VzWFEnk37)^sr1j(HrpAjBe|! zmvK`({&d0X&SBfy${@Fw6*Qu7_48Ga;>yMzCxxp!phaZ&-_1{e4C3dpZ%;MVgWARz z6v(xgjhp`RE>wd@LgqNSZ|`z!<5En9XJ&$U82_#() z;kjxje*Ocvpk=xUH1(;5=dE%yFco4Q^cc_Pi{7cLhWyqE^jd_BxE& zX?>9YRlOAP`Q5GDDg`XPM7PUm6m(@cGkx7_v{Y1E`*I~E zg34C^jeY*+C}WrYd69vj@p^$%R`$`|=?HXQ-{}|l@)>fKir=cv&KnjGLK^wkyJH@b0D{Po?;bq$2)oQop)@@W3vKoCvogQ`_|1Cd3kZku%+u0B? zd+Ri7(1(mDY9*GJ;ve~TQuX++y6?APL*tqU!HNT~d;#`6@NbNY2jOh_m!u1WV0Nvj z0`0kJd~$eT^dNB3{Vsx~(wa_D&Bm>xNvI%seEQ2mM^PW27O02Q1v}rafbNQYZk&$H zzzM`nQ`tC|>w(@gz%hEX`NqvIK4EW}szIevW|mp@dV4?#%Z#cxzqi7amYQ2bsc1e@ zfO|l>oQY)SE{V$_U(Sa&CSgr<&k{pXrw8{8{h2<12Xh zFCZAelW45q3C33-W%yP5;!wF?;@j54xx%Fb?w1Kf99-4=gSh^7*?Leb#>U_unu;Pl z@&;WvyU&QP^s!uTGFz79moXyYE#5o04Zu4`KL5r>DxBhR&*6`|rb(84|zT$!BQ-$AF+F=IF z65CdX3NO3Nw9XDhl)jSUJD9goU+9gM6q>JheOt|5NZ#FfvS~(*S_UJKmlDT;9SHUD z!>R?cUM7Kl66347|7k-g_Mr>1moP{&Hp_>ecfzjfMocVk zV3+@w@TS0X1Q;!_TC6s(d#sH=w&n%o8o{XT^pU7hWWCx%;`PM}YcZr*Fqtexlc}(+$hW zu8`C1}x)Q&!gcbhvZ!=uLE(WtU)2_%0@Dg|0h)oHNkAt)eR!JAta{5k9tVTuDGB38!r@>$t_HF~KS`omVYk*VC$M+eKCEmC#5h7`2?T zhmFKhgPQ4+{o_i%T5BkTZkq5aqtUfSQZ#3~Nbn%V-mXOHNr&8K^|m6dQ=PwBTt}8Y zXn@{aFZEzsL?+V18<`E#9>Nn%GMIcDP-LX5III@A9++xA4o=ZBK+BDZ^v~O^$vYp& z(x2qMwEF2^;18dmb6y$z!0R$MIW{7mI6~;aX8(s{wnCNl9KRd)^@GT?qz-^~H=9p~ zC%|`@e^Fhns1hhha-9)pF1dVFLcTmp^UT!8OZIc2f}6F*_*x#n#RLJl%%aM8`nOebc`0Jd zh(QH(c*6LZ<*CBU($V6WJdE=lpv&QKwpU$YOqris>DGj)e+(bJ>&P2>-}1Zd+S;?=$tpj#7h~{_ffuQ#N;n2SeWi2m4~&(Z+)WO4e#a#R zin3P;gSOwi8&21kD9aV&PcO@+^vhO~UD3(xDsdq9cJdT*`IoK%aP_^2hv3V9*Z?$+ zbHHa3?}DE}Co~2bgS3x}fMi5KC=?g4U96XF>wB=_qAtmLxitIe_9&*1-vfCzW;V2B|=G&z4_V{Yv!cM5A z4k7M^A<`3HAm3v1l~cndnZiqE=Y{|$&nF;R+#X43o%&xVF?VQpP>O+2+z3|*VQDFm!^KDK=&$nyww<@*7{ zht#PX9Z%55!MYzsSF68Y`%9HXmAO@$4CDiC)Vk#0_L)2y$}I}e3oCD5KCtw&ik;&@ zLrLALtdLQyBPKlU^FfJxtr6;}P)q6Cl7ti}0!*;=uXhRQA6;l{0^6e|k{r@IC~&_x zP_qr41Kan*QxlLeCfP*`&@QTP2V=ZuTw^kH>9!sRiy3r)$ziSJDCN^ftWcWHS-E!g zH>PQ>T<2-zQiq3OuY<%x#ic?d#ihmjK2MMIUO4bw$06q%mpy)2v~OQNu1XVlWV76I zD&2}T?^;=Jn|Aj3=h@-g6_t3qN-Q4GE`Cei;B?CxEs$KV^KO@{K?@^tD*RrAisMw< zbYcZ%Go8igEWwOH<0S}7Z@4d+dWD~zY{S{0vt!mrn?=qu8h<5^@~F7W-G=R>^wqM; zc&^XEwZ#?f+`1sNOVHm?hmrhU0vm18J`;dS;QIWnO9wLnv}ReL^KS&t4$!A|B!BVV zvVpM;+hIZo0uOa`Y&@Niku1jhM1&bN{!4TCk<^C*e5j*@dBxrm1oh8+%e(3_mexH%??&j;GLKm&H`3PdyY?wm3s;#;9? zH|05Jj$gY?>*Y5S2UO#W@@!6!-!an3j`&P}BjqVDSAi2hqwAHpGBuV-rH?IG^tYP| zngX29c=}v!$T_yPV7G^y#TWqJ5vV4l1t{J6W5g?nC$daW#uGQD|JCdEr$8-p_4wmI z+Wl?~Xt-6%!y8keJ!`jatCY1IYk1hJzd>L-hG6@a$68fvmhJ|6>wWUPpkh7B@3A4G z7-4BEn(5liIzP(X7u~v(Ugr0TxTZrDI9J~JA-pE1d9L1zekNhw@AxH0oQ|h{cgGRE|7XsCJ$25`S5A353C8A~?m)4o)8(HjUEk^XG8SP3&AJFsPUXnZHYeUEx zQVDf@f)K@PQ+}8yh+_V2cmo@+j}VO`nl32vP}x986xOW_7jplXP5Y>e)7m`X}Wn5gSm&nPZy-m=7b#lGF)8JU^5O{$+FfP30y zt^78D`Mod+!#Z}!ib!3Y*&eNbjnCd=EeRIt9uBU)pcH^T&tsusud7ZmYu)3I&9JzA zOK+|?z@lV((iti7k-T?E1$KnrL9WPOMm93MmlEW+)0^fenK}xazVdX$Cb_(HD z$hhq<)iOK2996=>XmS;&zIJ*7oR`~y$E$fE_64)QyH*dn42rM~j6f{N*NMxe$ge{Y zV-800V7;nuiEJ~v3s*Hk$4bpi{RI7bDorLzPzu`=uA}w?@PY<$79ILsTj&v z6D*jnDz0CXFb(C;ZhX9y^T&;TFAjAy@{f^~@s9KiHFnxjBC-csK>d43|FvyE>ggyM zo6-7d17JWep>ojuShmPINVIvSB-IrM4>1+kbW=+p-%y<9yUM61@=^0#{^d5jIDpR#8U=m-S`tTWb2WG_5APXp z#`mrZ?7sgq3CAStR6s|q#QS3aK=hNc9ExIX!`sxRtLv927|60})~MR~$;%cW-b+$> z6p4Ku7<7DeEGLL1He48lqu%+hv?c7Yf;_uRPOk!=M? zZ@X^TT@3>GvRW|JqRv%b%X+PSDI;?o$QUccPS%o}c9uj})2R>#SRwusKu(!cucUle zbHuWa8jVLkp{gZ5${G(|9?n}p0(|ZKDtr6T-LkfeO0aneRo68)YpjaPgnK{6ABXVy zNyIv)lWXf4PO8#2AEjI5@mZ$`uR)2bRiv8;bd6t?fbwHP5~P9_a929?0__ zx1-q(1Dr*2=a0oEKXh0~Pxh~(Giz;cd~B=W`wdJ@M2nwHo#Wi(lsaTweV(a_ys$Fr z_uE`u90pTp9&mPL+N!h7X*A+3BGz@tLyE~}nXPdiM{#0yY?$pJ9Mygr6{4+Pm#W$$ ztB9rVe_*~jdxn@w;NHIEhC-l{9MBuSA_HlSNRi{ZpM7=};|R9V%9G-3Lksz@#Wb&8 z(Ko)fw*=6^qrDw3)pi@4TDKZgP@}0D?zJ5EqSNwEl)Y{rZs-KnfLf@UZSr{N!(0E` z9}(s7e@z z%q4ll?unfJ+Z86pk8U53bN9Ee4B1X9VxwK~ ze=0b<;s@ZOu+_Mbd~uHaMgQ5p>(Y#1#ibwUz?5B$UO^`Ow+SuJKg?Kk28qv&7lv*C z+Twrof3M}4pqn)Co-ey;I1tk;D(w-N$43y0NPY7PZz^U`Ec29g+~ZffhSaB?UYSE7 zU)e1l7A1TW^EQ^lYVpaybJZJc^{OSH5vPDhLkD@RS!zeBo1+WG-NARgV$n) z%EbdSbJ%`r3zh=Sv{*HRaK*-&P++wHn8DQGa1O^07noX)gTaMWOde#$85_kC4t~2c zq_%@9P&98cYRAt+X!6MC^YvYw2;Td_`aRNANB zQ6d?f&&C@p6!a$Ur$cFaiA&n~<~DHp6!)}R0htpMd8rHwjHM1x+Gz{Qhv>qS>Cr!l z*v(_0#qh-*W6kxE=STOx%z@Ulg^S?wotfU~8sk4*&E)2f9GFDfeyMg&F_+O%1$=El zb(L+23NOa8T3r33eW(7kjEa<&or}EPk&OTDD-+dwV;UPeu9|q2nCb~i&Db4Cq^6#U zM9hU_Y)*Uc8asD3agjm($wsG51cn8>>sI?SIA=LZT%7YxgULlDzVsMF8vW)b2iC(R zn~a#d0fOPbh?KA9z;9<44-G+$JiHE$)m#`zo-OvHxo>x9h3>t^A1yv<@Gf_@d=wQYC-MRUxoRX-RlW{&H zQWO2x8l~r>2rVAool1QZaNu90Va{9&Bq?=%PsGV;U@mo2_Y7GV^KBxg{G)fME0iBP zFt{>0twciU@4|rzTOd6k+%t@1-PWbIg$Y#o%XaJ0aH<2}hdVJgc(T#PwP=5}s;hY6 z`GUCP9f}T;C(h%Cnoq+)d~$+{vVAW?eSUb(IxjD)qN1{JPuc2|Dk(kRgCISvIvuP0 z4Bg)RIeFwe|`& zJUp-vAI!*x8BhCQjYv!hD9AV$|I7>(zyc@+<^LGe2eB>396}#z>2k(z*XntZcte*^ zGacR`E%5z^4`kH?6moN}JwDoyE+BjV^r?lfObG1(;*0#x|NS_}a=*>7b)YxJ zK_xyC$;Wc8_v4vQkR1QvSw2i!YlfMiV6lejaIJL|s6Eb=J<)m^hfnUx^(j>7ZUK-6 z`t$#mMkX7tMYG^9anG+l1`^@R;R2k141E4mR0m{W1OkfD>=fBYe!vr-RQPpSbrL24 zTiEh{&P$}p5svDK?oHrx)BFkvi3BKIKg!U!^r<7oyX9LCr|qWZdOLhXtOB#qwoqQO3uHmi9x#xZXk93@(OGK zOOaFYYuo%vh5B!p=$X^~Pua`F_SX*(zsBSLwWKp7px_B=xPlX$d%T~i&ic_98_W2% zV9^YQ=B{|Wnro+a@t+?qTKQOfa|YO3;TVMc-(lE}n0IB7K1zXFsHf}Z&zjzi)X^@+ zy^0Nsry4S_#-fvEw2}3^`YE}1gwM-anQds z99?r6b()AAghptwe-{ue0B`{G3)DO2hj8BoAr933CEy8I+?$l>`n_cZQN1FKDq23i z53InHgM>~0aA}@LH$Cbh-9wbXlZ=Ayy3o4@G@>ITqTVcQFC7sUb@vUFE6ZW%O3Pmq z{FX|Hj3~{uJT4rD`1b^%#qLg(F>r8DxSNOn?9nYQFfEP7Zc%(}aL_YN!hP*?Au;A= zXQ{yHus!X!f0I8dyrwPln!`{}6o|VFEvKI2JO~1{*$2V3>LGj1Dt-C_u0<=G! zp|A8ZbWS=lgZ8Aax_8Hj$wH^L=75>YKticU)?l#KJGOz_<1oz$qw(NfGX?uOmJ`Y7 zO8>Gsm`6&20k>OVu4<_SFhlQ~24s*M4ybY0Tt6f;xWt zmr}amM0hWR%j;ju!Togt>&XzsvVwamw>BAzI?`4ewc>{@N%XSCkcpp(I!B9@Y&2xB z3$JKRjg^%l2vzzNPRn5>J%pZ)Ug|41H$GEg{$koF9Hj45veh{g)Cg%q1)W$LIM38fTfG5jZnCF;y6>SJVY_ zEzs|n6GA`6pdW>7@44Al??hQ|m!c^tLGmqb&-`I-Pv%c~FP5%Cl0bxIG{M2r9~=bS z?lAta2&hV$lupd1rmA17C7O$Oi$RY}a1$C~_dTz`>v?oj3y`42Lk!Np%XX3@ z)+};7+^bFM4_xX7N>pS=E3CU-vqC*~?F+hq|6*kS&bIRiPYtd( zjkA)S8+6+J`3^B#9##!-1vl^qzKstj+VAj4iJ5!ckHq#>HEh<&-aptY;Dp+~F1;0v z`w_duN`QBybbT)1W>!eeC&2ZB=Y zb1`&e259r;xRncBhG#1r=J}x)EB&;mRIg=!Xe2ad3T~b7YATgA)7FNZ#ZdH0Gv;{I zNix|mDCpCe_ec56ymqQ9OfR=g=1L;}ES1ViL1x#pDX)W>_ff&PZkV9cJdY7-Z|V5a z@?z&;;)id{PRmI7e)OJN^HNE~*MqfJ_ths$o4rK(!Ur4$uwni48G$TiM`9QxY5J`GJn z!s~oOg?nX5Ny1X<$sGnXBE?=jSr^u*8~)s`HdPySU8r%-^#1a&xk%#Jw4AGE7nbsg z{F9d|oUaiDp>YfS{3{+co4$(I`HtuP{bvn#hN-tj;g4TGrp?jEX;{=Ur(GwILF=UE zA){-W2tuu5GuCjos<7R@w7JdzrM&UwI!u&sz(QW3kCIvuUvuRy)rGuvx|BNH2Fh|W z5jYujuE*3|AQDPC7egm1C;S0L?EPMj7gn;_7LGT$5FUxVPN-WT|GLv#NN$nmK+*;_ z%pR})qXy2A;QoZ(t9T~;mK(ca)Rio8{7DQUXMBCvk)C`*WRfWFNeX8LC-|qJ2rTpz z6>?r(p5YUk;0aQ^ft=SPQA!jIzt0 z;G4&FY&*F4jjFDw1CX~V@X{kM(A!zx7C}WkQkMuWk1Lpb%knrrEqxKXCe>#Aw^)?! zsp1@4G3|a+NL*L`iMspDr9E)+5$4(&7M9I+)Js_^qN1JVUdr1@+C_@Jff(A6UtDXT ztd>XiL}HywmUj1Tr~8)^rU}g7@iKh%TkhX15=WUS|6G6;3!vokak~wxOYRXY9`M`A zTojd>lnlEO>h)uW_9_HCpRbl;Tt`OZWthECb>$!W<2&3S{dxccbyTYE9B>TElMa9|N8lIuO`6k zr0sx1ko9IxEVVRbcXmCym8)(FXt1Sge5o;fF?Px$C+W6)*w61`wMlQUIkkH=;r2%a z9VEH@XX2+VQ3#={Yk=9xWgO}?6He$yRum-U=NXH>`q@UDoEgHMxld<7o5NB~(Ojz* zl4bf3Q^eFa@>3f^&~@d0MB1;G=Z4gMTf2Yy^|$Juo7h$Kmr18fwR)?84dPCLdiPvwcR9~>73#XgA=32 z-xpWr{<(LRG;_~qbWkP$`LFJy4|6jc<=}$pDnJEm_J=*}W&H|xT%28$C|bLSq@~QW z&_aT;NfU18gnrg473$0^3JI8Obyy(b@f)k?mN1$L`_%qzI zzBlZvCKQa>t!??8ksrSJN(ah1NqKITUsZlGu@PdhW1>2nvuePSum zsjLS-TU%E7a(6WfYLpQ{Tq{~r8-xApAMbU}n*=|R4iW$y565D*G=B?2!F(VA;-^YcJ z-x$=V75Guiut6W|^+(gR zZMZBM%W`s(ty=&j=1n{J?SP>1dG=>D{A2O@bBc`C3#kpQqCOmX!H@>T`?>@N$Inq4 zwn|qR&a$!j*@mf`>YuooQZi%LlLR;XU`CFaEMRk}nNbM`8++7#)sJILky%Auh^&~Q zi=Bd7MH}`GbKV4#N@WE<0}Bapedj{mHNF@GxzxClds*O!8k~D%9aJi5f;%-ZU7M7Q zn|jMHO1}FW=j~cjL zTHYz?+J5!!GG$ZfG!Z?<8?p>wrz~p_Ia8KP7r*YpbTN=MaBM{t`ShxGv#w8lEur}R zuRb116iyzG_=a(xfD$|BKZZ?P-Oq`|dCMqf-)~*ML%(v?C>mm{QL#-qeKc+-H>BU< z$+~-f^U*ILI+ka;uP-O$l8U%F{ZaFh0BBw?vm$W6BG}lPr@?Rn}Uzn59gm|r};EyET*-(fgtJuHUP9_=xa2A_RHuZgaZxR zj3!fq=2B>Kk7Lfw4cU3?{N2QugndwnR_>eXJL)i zr?*&%J`zKgt~y{?=BP{_lin)UFvFyCGcOp_I6JkddmU`VT32t1Ha+23=gZn~yr)w6 zl|qBq$Mxy@7^cw^VN=U^uUBL6r9g z)5!5CZ?@X0L1AIZk@`11d+$td*!xh7sv%2nni4zuQk?SiPr1;cA$_`FOa>B4k>CCw zakHq(wME8qoKbWX_)1XGO^ewP2zsYImEtKu_n~+Wg%7sF^VLP3B4ND(_6ihYli$xOGRj z9CyEcZunxf^vz5#+>!~u)QFQ=T(cr4_tS5zW;{kBh$|tJJ`>)4#o5)jN#2=*9q&?r z8YR53?Nl!Y?}t-oOnkg5Ph!tMz4`6G>1z~sY1q*`zU{y}(2d1UDfZ5!P|GWrgn)0} z>f52a)iV=bJeEyZ#m(T+^bz*D)08>my?TfV?CgNhBI)Gdnb^u~%pgfVWL7A6a)^XqbS+eh@DJCawz+QCeT_D;jfSiuiq@r^*ob z4tBr1I<(J!xwHj@21i&Rv7j<k`*JTyOzCnw%)5-M4^-%l!y^Ea>_ZN|<^UA_TDn(s7c*%M|izC_zRIPUZ%c>{XV z`R*V7-RVQIpPw+Be5feK1TThxnZjcC*c1xSHJsGqGYzc+^AEP(G@dwc~Lm zEFw|)Q{^gbgCC+6vtQ_W+N(ZPY|}!^!;$0{z>2z-p^b+3Gq0@h$8o*E^4dE2F3z&# zrYFCsPYUG#k38#6bwGb=!k4@}A86XM4;S)ZLUX)acrKi8L)TUAgkK5xzmeZ$aT#8P z^;~ggs1;`XSSEHD4I!4*&Ak{+&b1aU_xfxF5^`ZvV$nP62&3Yqwq?P#Dxtk4^PW3f z;&g6W#{?Di;^a1;q_CSSYEV3=f)JnzbD7#Lh*N^esj++F+hO@Ar(P41U5>_7>M6y_ zqv)r9ZfDEZCrL*;j2vxJz7%SASE;~}iNWOK0{oYlyX`TgbHz?Qqjkb?ban2VBQwEh4anvffa!TD>7SR{~QVGmzvMoSeul%YkQRXKW=LE@#^!0xz3TN)wmpW1yLY zz{DtDPJe+=L_JQM{Y6Pl1i6{@5I=w4^VP;l#n@Q~CGn^D5Q`I+!C9TfIcG+DrK-Ns z2klG^+1e@4pMJ(p|GP?qAZ?bil_+Hs#Ti=zS!#zz7zxWi`0#^%_2Gg1w_BYuy)yM} zlGtX<>{pA2lO&ufvnIFlk!R`{V%~S9Q6|eY4rt}$SY*^(X~ArUOIS`L*U)Xa(zY+H zMBxLrg!to=Cb~smevIt|A~6w(YSQVM@l~vfh)=!i>yG=;npAgN&=OrWn!|$a?$#;_ z4|Scl1O1!?*;B0naqJ5yrnQ8m{W$UyBj#~}@=*dly1F@fs_%Be(OH7=R5a6W!7ttT z_JfCt%T|!(1$>^TvGnd|4_Ou|T;`!PXb8Ba?CzXc7VV-KHotJcYsmiW+L?9!Q}>ER zzDS5fG2W<1_VkG4>H6U%G#p>xixPd5msPBeWa0*TiV&-M!zaTx)GgBxfd(L$I-n4S zyEE#tO6e;887|DBJe~qvlbgjDA@**0MvG~t_NL@MgZ-i~{5p5LD>idpy10b%6Dk{< z=8)ynNK}DFbC~iG4<8h1bsX;Akq+n2%BF^edO$_QJr6uvI$%@Vtuk+tku$=Tt=2@o?A|7gL zc{ZO5$n(mGEG%4@U@z4?S-xOab+0LP*K}K*oJSSOr}Vh7_LA0As*>9)9f3hC=)YJ{ z^OzryH91zRRgyNAb#f)mXo4T3OK=|NC{)(80{UZrYc8b|OyZdW1~-cSIp} zYc!i}DtkUCM>82I53bTB%xnz2R;DO>C3|W5O zYIv?HmNh;%I&5B9&W=}+K&yk*F9x5nUFH&lHzYf(899nuhL%9s+Y}si^f9Yut%TtQ z$vVm0nnkJn1FnTzF04DQK}>k}s;V@Z$#k|%+?zks#+o2(mNQ4zH|lE_p_`Z8sj-Y= zoSOBATq5wZ82gTatzQz%03hrB5JA+5!Si$w>fp;5Q?uQTEV`78-u?R{rQwwGDbz}< zditG~Vbn(>Z34<_r#Ld7b}j70$gewl`Hj~m}5&z1Wo`Qvwh z1K2#U>H9BJeE1EjEWeoi@`aLjFjKKPb|X|D-VAL%ci5r)Rz8Q5k4^0=W2noEUHEyo`3j}P-RT-Q5! zg^gAY5e@VS`eMBIMveqT;OCyt9uFO~EKvbz;BUGpMM)k725j)#mb54R_D=O$1UR7U z+9HEIMjY9um?(XnFCwhHuT|qC8sIyxMpHpHMr;ror?{a?;|$$~$s;O#-$ru%ogXW#Q^vn_ASNvtXK`khQMj z<+ShVSmuc}jB2D3ZU*8(owrgfM)R4A?>qY5t0}F&77%?cQG`8@T+o)shX!ClP3jMO zN(H{#ntG@9=WMHL*|vq@vh1utK>U_ZB;JCpL3*k&)!BMwfZ|+XoDgc>czXM7{skc+ zW4!qFX~{J#_(f;*H33MhjUuv#h;H2c-m=$($F|&Q;h3g{>h5TKht7Gb*3G)94Snn;Z3rOA5Rax>t5<}JHy836)e*pExn->pz z&ICq%iK<*Ln`;W4%!?cje`73Y6|0g&vz{hCzP^f+Wii+$%H6|BnW?RLZl{1@K9qnL*r9IO$WNkd74*k1W$Q{6A@GL^>>gaapyC~QevVQXJq$KJHVwTUSIN#OBf z#^t8_7y%sWwoXekq~8~$$KF;=F5ytF@)}d$>B~nwnVK(G@-tH|wWLFr7`&-8M6{WM zE7Xlzle?#}Gq;4Gj+KBQBfHKmG5_`$o}gzi6CFk}hTbgj`mOFXpC(Oa;%Jb-5>h{avrk`hN7$q?l8$gKR;4vV4UW$g{7U+X)@nA@KQI6YG`wC4-w35O0Y$uo7V1UqDT zu)L*ulBdBZudWmq#rQ=u*4BE$it>5VIo&BCg2h=9HRWl#moS<2$Q?=6o#kq-APDFB zKCps{AOX_f(sDB|A96sizd4}t;+*Ckx9hGwRIF%k>_cT^4kZgUeV560hVfCXLi<9xS$no~xp!C^;}Zt$`PA%S=*ji;<+e~I<%?$5T|q&;hBaYJ0jB-A4PI$@x;e9-kt=ICsdYGZ2Y%kx5r0XfNU2@@7`oj-3Jp36 zlqln1qc?-1~cS`rQP}|>#)Gkti&vvhFF+dKsDn}E9Xq@hj zHdEmng&zCHl0iol8hcC~1NO&o;_<^Lh`?u3wQ9)Aad~!k)Sfgo_M@?!aQ@HTMiU)Y z@}lq+zKCy>QFEe00PEp^0Z7SZdbcWYZ8UqjsiL+hzP#FI##Slb?+gWSUTfqkiY$Xe zHJqsEBGP>6`6&5fv)!u@QMg|1&dT)ONUW+Jewvk> zyi-S7+WFJlQngoqNF$rcIn3FK#ZxJfN{ zA_Jseu;_XvYGm@eyN9luK9^R&aSAc3QA+O-b^u3nft$NIo5+Rl4_SV6bS}6MK%w`SCUoy-i z)DyR^)~CKYw;8bEqFhS^`gnYZ0))QYE(U+3<3&4j!cy^!pyvXJ`+6}lv^{Z<9wP2H zK*SyMK1188i`JMrFFw8q9QB*QB1%@;tmK`wZ8GSsVY{Nx`L&iFv`C<{!xuMXm3P z_eRPWW+xfX%WMK|#rEUCEk3{&{T!12dOJCh*zeRVo?a9Nw!U>wbttc{{F)aA{X3<8Qj5FYhjP zeOxUjcT-DG{-<~8iYxva0LM3Q!AXtA#2b4|y9^=GN-$K_s-jQS!u0$h>a07{)ioi0 zzNzbMF9p^%&FK(V2af?xnt`9uyM zUPQj&HWU<9tL{+^;ax_49W>aXH>5 zW+ojX`c$O7pC8;$S1*P`4vz4iiVEHrUowXb?xn)kgq15;G;$Aq++hg+M8P-akT1W%5Mn{++;!&lir zgzJd@fGRzHKrmOlB477~PN7TG^Nl@I-ESm8Lsl?~XjXSC9{YeuH0SyqcJK{Fn<;Q8 zc068M@vvssoFjjE%hLsz3^1o5#-CPk20?KBn*;J&jlo_W&4oYk;lzI}T|~PM5Wgn~RT)g@>CzeqaY_ z?USkCYp2IuHk|?h>FyYq+TVemCV!Z~p@EVzeZ4XgeM>CB4a1A+y?MW{aIFx^AbC0g z+WdFm$BNN+Ab-Ec4>#N%SE5pQewSi-;&1+6M`0cY82^c%1RT}H3hF|Qghbzm0RpGT zmmL^LT6e^MyCfnZeW8mMg{LnT{n9gii&6XQ)Yb9DK~ng72(R7BBs>bJwGjPh%@Uc~ zI;%7-`=>?H^FM6gt-a{;eRZ^Rh(?Okg#57W4;L^$To8jzp_mwrq%a_wGG3%cBA{^t z$@j5!B(3yMK>6|EN>j}Z^@|MvT59VDRBYm6BJ^9ye-V*9ZzMv)Ku;~#NT8KUVeQ3v z?&caqrQ%0wa?-#%W(*o$&-z)dQAtQaP|LZ``5(hrGXg-??|ztTpp}_(nsYv!$wb)l*N^ z#x_2LH!5An(Nc`BP$xocBg1SCDdhzFdeM0=)aWSo+9a;~Kd&sOKsKaN_kN)$UZL23 zp+tV6G+otS`29y07CZluCypQv&|2a@f}?jDXVd=aQycnrg*{Vn)sLpi?+d)9b^+!# zT%;anu&|v6u|tq|lT#K#_SZ&RP|`e+O;&1wl8$D;6Pd0WnPkNdTH|X}YfWQX(2A7Y zpfbEDLPK+?rhg{2FZ<6M-gx*!P1nQx!ej+5e9y19Nv_B1;IS$){Fh7u%of8WP70~p zF68vpmk0O1PDa%zgJz=PPa%GUd*0Pn7iXvU9qc)ppQ7mv{kLw!OlKfs!QtydWVgW6 zl+xx~&h#$?Kv-cT^YIXwFgFi_OHs`;#4<}vj0>;(Jlkxw;+6?Tt)ts9f!+2M14;+& z(=#s6JRW`n#CJaywO!TdZNrwU9w~a8fwX#RqQXFbD^>}hpL+vM|op~C#vch$mlmzc$xCi;HWCjtAY&rvg# z)?^n)3p3B^CvvnG3a`9t2pgMt!;#qk>jnEWEM#DF7y!~-^9WuX9qy~1TMx?viNTFW z!2;*u3w(sx-0kB;@z`HpiHu5sujhp(I(kec%QI*Q(X3OU`ut za0J##NcV+oN~U#BIIGtnud!_%!x?9N-ug~wG`mIS>3QY}I?9Ob{2GIp%D&Z9kd%4O z_;rnV*;Nw=cMfKcN?hsB{8Jc7=BXR%C9(I)?VAnRMMXL)rl{ElDd^&MJRZ^z**?)V z-+azP9JxL#E>RDK?%)NJ7#7PbenO`zPN`r zaPQv((+S)NOa)W{kmujjvY?arN@r!PLTVM2!Bq*AWsf^C=c3>?2u4fE8f0n2FxL*6zD{dl!|@y_q&r(11Kj4t{= zFUazz^Bu->5UbU1F8#*Ovy-@JLgu9G$4;A4e3DrxE*28z_JnLoDSR2<4KS}pH6T4Z zoCV?YgA|dqUS}W58^1_+obfaz&V2F`6J@SrYo{OL3&qwfV=JY z+X(BVXvgb`%xlVVY0hHP%Zzj;u^sN8YvHT2uX$L;pZN}iL{&wBEGeXJ? zQjdLVU5Rr*lW+1i@$B?BEb4=Dj86B#tXH(ZhTP0M+I8 z{%PE{YL#}=+q16;E3Ir4acSgt53<4z@h48bj<=q!DuHrY_poNh9sr0bc}4Z64h!>a zwF59YGwzW#a!MNu1}h5h^USN+N0(j)v*v zN4t(vu_Cc3JTJ3=N@V*ZkInPT5scgOloj#rn}yDc%I_VawPTKZw(FNlO>}52q<}N} z*A#wYQI=@G8%#!hZAYc?z>kGCE53`Ns57IO+&AM=PhTA%=wc@d`!f>(OR|&`BsJ=1 z6DqV4)^+4{PhoWlJq#6|lpQ(kG0cuU0k%N3E&fWKHX{D z8*qO|nXAVVqakWw8QRxFFaR=~@m42VK zYz8}y61?ThP%R<>JWZ5p{Tqfyp~2D-ZR6KP&8A_$XHQwJ7Bo*J^6Zp%K$|qz`x^`- z5T@Tjbj9=6O=g-?f&OYu*ux&+YxhCTP(^uE2rMVOEMP<@t0B1Zb0CzC{3g06pZNx; zHAg<^Jb*o$5Z&|Fltra99!AT47KBluq5aS4d0!rVOc*7rnl|fCaEh-q-tjZ{a%Wo= zh^GnkZkF!Da8YS$b@{Y!1LlWV1zT55*H4~SVwOm$M#IG9HDn~_X}wdgHd1cL;o#$7aT1X6E*)*Nd1;&u1EUlGEHPTh| zufR)ZoL2CzNieoJHXx!R#&`2;x1ABF@_2r{y#&(-( z-I=V~3m_zyK2TK)kKaiDDbJ(z}P2g8uKwN6g+C zDyC3@mEOVkyga#(y*nJVAi$g>uT@gKGxQmd)s;Q@Q?xlY5jq<47JQQZ(w2Uo=6ikXTQRDiB z@YV|ZYA=%i>wU`n=-mP_<**S1{A85J(nW_cLedl|p-=rN2 zs36Srx3J@pjdItOv2do>x+)VCb*OVBHaW_0mFa0Em2FY4jK?1ROn+({;Yje}Fto&1 z!W@Y@PgOL~_XAiUdDwj6Zb5!L)686;p+1HsK35P=d6H(p?YOa1>%_A$}AhR^qwtF4Q#t)i5f_}Qz z?-QY1LF}NPwJxKPR5#kTf&z8D=#RtD;0?p*!=L9Ejk)90Pd|oifH8JQW-OF5Ij)a z8xvRlq3onxo^@1KHs?q5Y>c0nP2UV74}4y>MyJkB^@~wt-#Qs)$#7pZ>uu?(C?GZE zM<*kXL_6sgJoj$9#^mKYx-Fa6|c+7JgZbhg--QkUypX!eJmy zR5=5P#!6g&2g8U_?6wP8G)*QiK5^n#lM?mQOG|NNi063;z6#jNNrE(Kfkk8}lY)?fIC;k*BVZ-9N|2wUur_~`1MVOMeQBG-Wm1C}Pzybre*^h1!B9Vm=8UJwW3&U$RP=IIP1d&w z^}wWu_A7wT$QtrfAO*1Un?BtrfZ9Yoqc7r&K&S9#zldadb-UIOz`cc42GoINQ|;5~ z21|x0a1_%YzClm*a+9t9+TNXTfz|)xqA?tBA!v$HxM_x7c_FV&jaKjlJj0*?$02S= z-Rq>_;L2wQ01f);i_Ffe0Y(t50j`2vvY-Q!t$5aLr!@=^Yn84i4v{qf_mL}8QGIcm zhN`~hwhEAzS*~FPuTZ_^UZu;iF_3;Ol!1>^wmrgax{7CP*%uTny~H;#cv!kowVB9e zL`VxD{)%b$&nJge}6T$_Ql}j8nfW_NoO;Lz1O2rzxHvXEMt?P@`$Q%2s&EL{S3uOGI=>?dnwCT z6Mw%>!1#C5wg7GiC97yVdQ-(xjic+P%(?b8b=leoo1#2vm;3X4tn+jI)LoXvvxHro zS&Yz-VdkDGowNn@G&ar^Bh&7Z!`-~)5`;Rd2%k=QQ!ONf=8Xv7e1 z$mOxx>fXJCwqkQH!mMujZtI15J!Hv(<_r7*Z69IV^u3d{p|pdt?peE@!Frxqw?=Wo z-OZ^ZcYzg-y-;0pP$?(pRt4_5^XOl27L8fszZX$&$mz8!im?X~CiG13M+AmpyEfEF zK7O(A705YZPTSSJE{9gVoxIfoZ+m+jo2@BqZ#I~Foe>8$4`ud#A@dymHp+2Z8#)>nXdle>#4a%xIvc(lJdkbd<;BgXhcR+rD#e=kV% z1q49E)j+OqZ-{@pM`&={H>taV z8K#U(X-{a7STl0Y>Z_L857vuiEc83F`pep*|ROuMB`y zg0@G`EmfslVR#9z5*v5;&H~y4KiT)-0aZZ0_?*~+M(%>-TwRP;lK0zjmq#l`IVzT_ z!c!jTE&x0lNy{(x*|sh86cg{M!DR;w>gGT-YE;KhV(D$(N8-=uMvP5!vU0?G4Ha!- zA{~sMtxKhJ)@ju?ytv^jmVa+F{p}lxWzrU7=BqFR3hWJC_kbDg7r8~H+BXFS>ybI0 zeAM35PHM$9^CqFFl~|Hf@j1rT4>8OkV`QNEA)VexyKREgrU8h+Hw{?Xb`1Zk6v4Cl zZs2BC8QgSy3MglpHF-r=La7nrw6j~Bk-LubN+PI{I^yj!$9iU6!h>b!GsNGPC#Cec z0w6hN9@;^iA)yQUnv2`<6x8$qaF18>U2Ww)+o7F^pkt2&_61rx%L;V>ya-C-cGT-#h#qR_mE}PH5CEpNH8(a7?DH>P_E#ehzUO_=JOhL%NwQU;vPmZ|I?Q! z=zD$!B5E%ASkvxMzzxCYz1(aLtaKd3ilHQie{VM~D*hxXmlxzk%@rf>!~igs+Opae zSQ|SWz%rJA=}xsX(W2j^0bwy$RXkN)U{wc`MohWxu{zNEdWdkw2hvY@fC~c_EGXPM z$RxJB^LXfz8rmEcQ)4Y$4;}Yw(~rNs%CFq!;G;}P$_%g&!L*2!DENy-@^5>EbwaMF)0Z%Zp)*T-y zY*(F}@F);e=FLM+;1(bayZ6l(SWO5jOvz)@G!v9jRolv1xq+B7^WL4Zn=8Fd+ACK0 z@eWJ0y3^kqPuqD{cWJ{^kYvRT+mD%qf+dK247k~CKh6E6`fS9QC) zJEFdJzK6kW4SLxx$<7O8&o*>>&Q_+2LfBvt2Z>o}v+n_oKPSGZrKI3p$&kSC%?itO zOXMruGMkhZd$J@Nv0by7b;BJeW3{wqYbp_2Wh%+ldX)aL8ZANG2G*<9zP#B_h|W8V zg*k6br!D6a)3^Skm|a`X0dch)>jxe31Uu@h>BPHKx>57_a=k0KeaZLD<+2>M@MRlm zxb7!NUSoY=wfDw_kYA#`-4YC8POQEn?~It3dDqj(I#Gx)8@-^=-o>RD8&JF8&i@iO zsLVzxX>Xk{c#_GjYl6iwu88o@haD1OqGygn_5z{ki6B0KtOD*C`@ni}ueE70T#*os zQ-bqVI2E~USnxb3EpikYrbFapRj)tVf12F?x9_0d=TGKf(v&!nZOIi4?_Uc)B9Q}i zt-xx=(XANzdy&JjX%8tmg`E79$uK?QkNUeCR*+5WZK#-GzLZC-QO87>Y_gnK9A$qp z^fQ=Z0y!t+>Gw5M(<;r))0<2crXAb6Pkz65MC7AYPzp3|FUHzfdCjz6zW|PW$Q)l!lkJe|pJ0S&fiMHCdne)yT zE2k@fk;4rGa7|mau~NtMJc$R;SnVwA$t~DAWAxZ_;cD))(=$zZ@I<1_rHtt~v;12L zIlLaadRmZQrlFjM?h&Hqwyt533xqW_;v8R8%08XMpJ=>s->?F++i$XNQqkqgA*sq! z@stj`AYTO-QNT zN28w6LqApH9uD$5wcYm|2~iZ(Ac^0fUe3U`EG--LDyixO-IEt{iCNs@; z;GcZ4x9uJgoHTHJnELT4B!y?9!&VakqM-V$j`_n}Gu%GbHL`$ezb8O=b&tPOn`pOW z(wg2e?>CZ@Y!4KjRWF~CQgHaGn~djk8-78J_(5ugt4Li| zie82BPjY3DR;7Kt4E-rsT<3r3Ho$sUP0jEn>rI`F1H`QH%}BT6RAZrppP8$PC0PK? zY`&w;zG+fC2PkE#WA~mREnmpzln3(^f{hbqxkh88B#utC5K%@50w{in?NA{i+ROh* zHfT0!p+BCGSZR&WQF?Ey=V}14SFty7PGHrH)&8l--Lm3Rvo+zKkN=(@W@lVGJg%i^ zE;(9Xf=8-wLHrixSdNqoxjq3gSq`wH%f2PpO>WmFA_1FgJE%s(_Xo)5%9Z<6dLt#; zBfCkT&AQxi6Ui+R`1VgW;%qEM^mJ9Ve7yBuS0xG&zcT`R#u;jgn`J=j0T>^%~ zR2GMV21J&u37kutHbb`+;sGYKzyxd`ftmF@88B;T1Yl0_cDM1Thd0hT+j!Z}AE31e zi_l?>Vly0m>+}x1hy{*mVhc=EV!tng?ufq7An4R&Y3+B}gG+kTr}uMu0)zTebl%{b z3y8M2LghM_v~`ka`XAQ9A9dkA@TrN<4W|o(#{TG6C5j|zxC;?zC7Ak8maw%~J=`5D zSVFX!hKr_VA1!rBgqWAdXI2D2DdxBMbTWceXrn2kADjXGQ&XK|82M_;8il!@o*Bsc z8;%yHBh;2(Is9zU=J6`zoYkdd>TCcP+y-uIamx|(2#;kw8lNdH>N9hV2~$xUiyg>5 zXpbLABh8U6LIDMD{M+17Y|qZ_=FUt6pawO~sD+iQ-68>$*ezr;`nk0X@E0AT!`9cH zlHu6Qw~7PJFEE*?wPOtCcslbPA>J|GF=-9l(-SsGM`9wQ9N+n>?KvfzpAsH#%SlWF zp{w7PWmw-4$~^0>?ooMpB#MKHTLv{u?NxcM=nkG&ji&Jp?v=1+?ub=Rb60G=qy!=T;p>~}U=bgt3IweOk+Urkmc<@5(JVD-F@pjLCF<@Ll zcPlHHT;?`7Z0j;15zDZ&c4bcd5)YNvW9<8QnEfW0@>*LQhv@ov*+C!H?FACIWd%r) zeAXOz+fMa6l>>#96LY9>ArahYi$WnDTf{WDVVc}Q)*m$74dS}}-LmObEO_~Pf!c{gIi3?uwb`8=-8ex=fN5wWLHB5r zRQEX$1q1_ruKDK=2ALnpC*0ov)*^|Vg~9~Wl&oos3zTy7uN43&T`InOWVsEwj&5y z!)UwYVBiqdnXa-?Wlp?&L{MSj3(&yghY)$@o#M;zTL;w@Wl)lNUgP*a3wPdvrJYM_ zYUFdmY`MD}HXik4NRYoz>nI~FP&G4h2qFM5U03On zE7#lw@kIb)`#W8q5hAG?1K{zixvkY6@{HRvUy<`o28 z76wByr9ZB|!mE2jlhqCY9|xKYw~m4c;g75UG8!@K`mJc+wt`7QWkW^K*D{|WYEA98>#C$KFLWzx{;obU=(mj{ zx)mB)4~-?N?if~!RR^cS_em*=@PZ1byL#`75m2bTE?8_8ZEqv#!^a$6`cI(t3bfY! z(8TXnHQojt?Q7i!@IctpejE7y%i*a#0C(x3mDfbE$T6KNd@=|fdwqZQM}&1c0!{WX zT+2(i|Tr^TRpAXJL-&i3!`X*@1l++J#`x5k)X5sWRt4lC>7G!Ixvs z>SDug7EY(^z~oEUGOq7?4-*$KNiyPZ5szoICjx%BH4zV8TPv#|SQQ81T$-p)3pf`+ z>FKiT-i~|pV(qQv&+P`RpT{Ecb*D@Orku||WgIwV2#yZcf0qJ+3vV##3)1qeppTW_ z@kG_HnX)q;n~v#*vs7@g&t~hk%erWT5*>`GaHn z6?^S1C}lZ=##=Ib51UABk?mn)dIcKeL6jp3!~wSI?~PH^R9^?&+;s^PuBJTkmrA%c24Py?Kf zvv&gcV|CcfHNYn%5|-2wVmLemVC?*VCPr9>F?9@mnm6URR>n4rm&N6{ zd1rxxiKMX;nqo+3k?xrbZC&sMiaycV0=Q0ZTaq>qwzB2r@@jSyggZq^t;fF9k7oOR z1e&>ukNR|<1jbLy51NAcCnT6$d-DV3rjiWe13r7uIj{p2RIVbJ<;q(8O=;zj;nBaY z!(-9*>$sMk-ze;(U7!IKw?FiPUH^gy6#5MAsxE6?ok`W{Pzhj-+{N^R(m=t^E0u-e z!cZES)TmAc{h0<}3Ie15h)}e_-pr(r2qH;UH$m$h|yC zcSM3I)Se+$$e3$Zu3O8+bGpCTanl=)pqxnyG{WRD`EoNcpN^oZ_@_NOGsc~;#0OMQn$K_@gs+@YgUJtT2_aRZIa`jRge5h*~;(* z0E=A%7@0z}%qWR=>FaQ;$hjKP!lkh5C6^{GVqr_ zD5kZPtuB^?z%-wlyE{J9c!V|r8GvC0C0yg?Gn=>7CPCk#5-vs z1Z(KBD~^KPa6W5z#qyk6E!x`)0OefklQ)|-)~Gw;@S>Z@oe|%z6K$7HVId=***UMB zLzDw(5ZXu44@x0UXllQ|Q)fg}j40Fwti!X9iW7$9hj&*^S^>Nl?V6Vouv|vtr^;iI zC~+^(o1u}|$<Sdm}V#zIMHE&ThCkEXqoj&%Ix{?h0P|xu4?B zS}dwQz1y)0lS$*9oeEVV3%uP<8t-3aY|TNsr-Oa-!k5n>x17x%)9QBTT!tZ){LB%g z&inmvpeY76rVBHr90=vQF-aF&tkd9tDzK=LcRpnZ#EZ1x)8jpyX<1!C-X#~-{%We) zFgqw4^4uS#{L|+XkzUyohN_BfdO594ZtLOD3ED2xEhf3=3wm53CrSfFYj@SPEmS}5 z&ckK~h8dAtkBb9B>g#Oci*lQ^`S+Je7yGzgk!=$b$e$^iHL-ABe9=ip1mUKEN=X(c zm364lwb}k*PfgqRVSo=HP-50liS%a17uLrBviB|ge~;bmI*E2AEq@c~j(h-X@8UAW z0Y9VdS&)BC!}`0t4Q=srI->AX$6!N%g|=_`(@tPOXwBAl3UZLxbcWu8dV?JYx{!T) zXuPlxi=_4hn0Jr;ikas>>s-ae|3)=Ytr)M?6vRq2Qws>e_}vS8v(DN zG`q1S^Ou82od^)O1jCpQTdV%?<`3=-k_j(I^G=rZbpD>CciGA{);GKj zrG55=7K^is`-@rd*0i*y+gn3g!yyp!-{!oGfA>dd(j>PLwe0OBbn=?U6SsMcZ{e(n z&oQ{Ge9m;3aLXmz$dZ-yLgX*O3*bIU3z&nofF+un`{SAfPTAT~C5;IQ!@tYe4Lta3 zi~lC??&b)P@4aMF#uCv{;FFcuva8X}dM&R4mx^f@tNP+fh>f#XS^CXz5!uSCg;04Z zV=AE8%HXG7=j{#8RPZ=OHNErMT~Yjh*7Lvl+5ia%(73((KAcv_CT`%t;Wph>0p<{N zwiIJs-=DFn0@oL*E1v**JkWfjok!D%l4H;fC;^TW`cC(M5$C^YGhme0z&nI!9q{m8 zp28Pb^XQr($D6TCR5}Pg*8}FOdMB$Lbo)wS*f+0_8Ty^qOMn)Mg0?}ShYf#A8uWky zV8C@D@wEGOF6FirzW?eGVJ zz@(YlGR%R8x+bQ52kXEtZfqWiOr9>Y?CNSaz&ucif$gk!sr@&9$Jq8?l-EVVzlTPZ zYzYCtDmFfD2>7*%xK>g5aDW`NNu8~#nP$66+1a4E^0GV0#=D2x-%cgeO!cre*%K)p zVx58b^|KV<1!lt9)TlYTigmbw<`w3DR`E)c8Sc`sK(_AgMES2Yy4N?rB1Ee5UBMUs z040rHLY(KFxtdHFRh2mxyOEjeQ%9*6#tZvB=N2w|kzIV)Ui|QJZI_M7RC#AF7bHBY zHb#3gUo9^OsK=kIhglxxc=H4ECW9fHKYQ$}MegqTbZ43Z9fv0HQ&2MXMDsak1aE-yIm8RHoX6z{WJ%)pxY!!N_+Z;zd~`G6@; zSp(`!mGfS{M!|*rTFqrc?fs|RKi^CSQd+-09UX76E^)XIX$?rT0B%{f6B0# zxHOS@-90je&z!9yZZT1`5cjh&1=H8_2cWAh0332;_`mk#+tl55E$aUD0RO9~WB=Jd z{^1=T=&p1T@O}QLzUevN{eBLl-GNy3e+u~La( zh%)fg{MXI`QXTZyQ2c-Y9KnO}abS380=M4L3atj8AYy+5vn`oQVY#iY2yM@NoZ#tj z@c6~RL%OVp9C~h%_7g4sHbZWpjj~rEbfMDhq3EWg!-e13#QrqU>YDu%P;cX~?=8Jx zk1{c*=l&a#Kfln65X|gBCqOZ*0SiSKgJ$*FZmQ{}OQ1{C>gqaOhuERV1--tFiv-H8 z^@Hb&8m9yQk0VUz5=!b?3i&fXad4IlQJyr=K4F+nY<&9Dwc(|gcp0P7O1pWS6I>dy zAu-eS?mlT?Pp8a=sP4hKz+Uxg4ATI$*0ZzMdFNIKawn-a^l*DkZ!6((d}C7%wzrar z8lR$G(B2BXVVL_xsz400uC6nc{K7EDZIfK%qWCF2rHS}R0p_GBDXds9^wYGfv7IL_ z+!zzK&R0*O;B!6{B&rvf=G%zl{@kKYHb~-AlQAo5|CUjQ55U$Q|G&l7*qB2fwiXab zdsaKM3QgTci?)9whAQ^4hi~q&E3a5&GFx`ticV~P@sY#rNw&x;o^+h79aCH_RB3UV~VWe@dziGKimCv?jHF;5hil}_eCCjf7o}boBAtB zAlZ3Ye1Zv+p>$d!-+=6!jgezME5jV-+@6Xi>LR9|sypPAI0R}ncN#i>`m)HrK*T8T z;}d0HO{m$qn8uHTToVfm38Z;gSPYT?aX0eLyOH^! z=xxfO61&y$pH#Kv@Py_P6$}TDnV>8%-MjYRJC?|r#DYGBs><)8W}kvtJ!~w3z|mE6 zNvyKiM#IX=cw|{2{v+ZRiZ8jgf(kSewJLNyf)fd)L%y%eHw@3tX%I0~J-Ns(;_`+b zL&S#2IsPQaSn}4na+!f=f9qLM)|l5^1tj6{Y4m#9ys2?{1@bB*(B%gu=v)cj*@C|a zryt$gZ&0W&@){G@570)$FGc8PH|~djXZr?TN;5M!A`)KDOL>Wd+k5%?^lZb&;3dW> zF^L9Cj(ftiTw+*4{|OJy;mdk*peX(n{~@${O=*u^eUL?fWmiCUu&7G;%CX?$@JSjD zN#FCTap+n~_UT*M9JxdilIPaglI|uk5h*4k6-C{L)nl~{VEt8@BcrKvCvH_jOXIwc zNt>I~TQKjhM6$dhnxo@H(?n^6CYgaDr;kN2V#%T6c7I zu=(Ej%WLj9%IV{X=`Rj^brUR(15C%E`Avl_ahXL`V-oU_SHjkgC)S=jfs0?Aj6*+p4ej!0=8kGbG3*J0wm<4nnXz3tKYT}X%u$Z!wk|&f%`*Oq%bCYS)Yz) z$8yFM3M?ht2WJ+T8hv(4uB*=nYkbBvG;aE4lX}qz1dCtCvcz#+WYCL-B=;;u^wa$s z2xxVN=c!~r$$Y6(cWuFn!Kiu%fq5OAJ6$Pow%%L8DJp-0f<9quRcPX*?if6Iqjg|r z_WKJgP0qDZbNYw3vJ{te1(I5k=?7iK>(N1Zt@{d0zJxmA$R=7RJq73J<>~L0Vr_=A z!t;Var_HS(?s|1jR(jgz-)7b?taJCnG}~J*BWCe7X&yeYQ7FigK6hz1AYjeomvD{f zbaptwN?!gd7@7$1Y%1!c-X)ZQ6TBVop)I0Jk$cVQwTCzG5|43#?S?(|ex%ER*lT|l zY_5l;AKtg1O^XvzzR$#cP4@Uh<}#Wy5pT7D#LV-MUzi9={?KeX_&fc!*L|YxL=tr3 z8SEPyalHpgZs)7FUfvDfEjNfg_vxyF{04>>Wo7dgFckuo-RYBA{CU^~9y1gNV24Vq z8y_;AG3AhBG<(qDkC}%8jIfq+Dr{;*-fRApo(!q0YYlrKTig$U+gd7M#wy#1&-yyP z*0VRi@o=mx96&k{yvftuH&a_EFEFrYoKs*N1d&+H^=7)l2?`vP=EOHjyS3@Vp0Da~ znh3m#h8Sa>ja!a)4F;F~ZN<7U6vm&NBcS@brB#fFG)zEZtoLgPkHEQqedh7 z_796(*f$D{3RJt*`YPA!XVeYO0)#xitVUUdufw(UkhY|@xI=p_@4ao9P;a|8w?TX} zs|YXadvArX-jofx|xtwkKn8t)`T6|C=roca~G`d8Lh&?%<6=*wQ z@V27%dQ+~NO#Q4vLP(dm(c^gx{129p6Uq^Tqkn+%1_3T4%E5$MD#P$>8aXrXc3p7wwxgC$y` zGq`-+EQphhj&oGQ`8~4$!ez!x<>hP8ILz`3*;(vK5@P<+-aMSgTmD8(}s(lfR)?2OW+rd z*7DMM#b)n(!-i`g-!tV&eVJe>XV<4by!Rp?;?1n`q;FYTf{gwyAYfg6-41PQA4Q({ zJR!ieiA_f|>A1-MDj3ynUg!^#UkN!U-OYI5b5q1ZR;0r-X*_SxZ8X&sTE3KC*v8o4 zc#0m{$7b|)H)2VsoNzCuJXx8BQ_cmYjw=0Ad(=Ce!DH%zer8ZiP-1#+88*?58nAN} z?>%rNTSO6GS82D2@hPR|gmx(u%(8|JkNwIeE}{`{=ZuM43MKC}X7C@LlHZH!&0gxr zoXh|IvaPVfOFo%j?`@ZY^H09TrIW7y>2B?#*_KIX5-Z~9sg1RWj^1ox^L{7sAG7Xt zSnHZLlV)*A%!0f~o>w+Guc|`%UTI&6=>B{PSzT@Ql0W;I!7s$hCCC`F@CY`Os<=zd zNB#=ELp_o$naq4c-~#QbTk>yL@FGQu<8Dl>EPf2xc99Gs=k;U~HH0h$yt=fVuiSkm=Gx0O32k)$I@YLrvg`s5hFr=83!K9IMJLKb0 z?*LAhtHR?DXuwW_VH+DlXuF=}`jgk;y^FzXU&5f<^)Dx=F96EG%yU>viA{F6fy4N$ zBJB0y&k@186aZfir_lnFCdO1rW6Yg@P8a6K>Fm~=6$Ltw;#=jNV%$jP8JC)=ot5)M zCua!w+(S)HvGR7XRabsw4ts_DboB zj3l+ANc@&yqn@Q0$zv?K7e10Dbkcm;di@H&*MLS?wv z(Rs{Wno%(E1VxKZ%XQ}|q{jZtIz0%mAvof}mmQvj9nlWHv~oNZ2DKr?pG4c&NHwJu zgAxnyS{;d$dln>Q)Sm+}Bs7w#3qc!0s+6P%!%3SzfdVxq*2VGu-a63 z=tnHcdV*A42+P0Csv-PRA2`+OO)=iB{RZME&jbpUTJF-q4nCehFGvJT$o|7%63mkD zc?r57ZIz|ez}a(19YzUy!PXAU2-snul$rqGDvLnPCGyzwP+CM;j&I7%`oolz&@?F(O4 zMSV7PlGV=Ab8+zWXufu?70ipdKkB6yERd-+!XmdW!&{K~KEzu5{I?XjHDp{}(B?Ud zKjCd-GA=6`roH8=SzN%?)EA;1v){*$^Ll*bwVgh+z83e=;)olR4;Oy#mLqCzOm4>; z0!`iE$Ka}Q3_0%*O^GyfdgR{rTsLGbY=CHL`Zv!SWW`Jk^rk-)ldw&c5^Egb*ZGGR zD5;7DLT?fd>Ugf|NJRHOnTiqxy2Aj&x%4rri4(`D+IHp@p8xM+;a58N2{Rv%r$G}H zhk;8&w5d|^T=EUz@k&s{CDOg4L0msIfhE!WCq+PoY97}J?-n7LvKIjiL|f9^)_;A=qc zfo_^zypK^|yzh9JY8oqcBILX)a1jyxLrIP5gSPv3TZMy(&dIfDoi`YPj+fZao2Bj} zvX=oXCujej2@jwJ>9mxSD$0Fu(;n0@#Z`SVJ~urSeg8!QoSreiR0KoWjXH8ZBjvJw zx@`x{MV$eEJ4J@wFn=cAnOioThb2qTwS{S1yP)&R_XF?rRRiplY=9}}T%qC`05tCp zKK>*e+FoPHGRE`e9lS4$WROKi`XT>MNcZ&+ zXjTN9%*_i*v}99A%?r%1i$C6dzg(_-w|QEyu(A1)De)n{Ll?M}4+wc+VI7EJ85maN z;KjWf;z#;A6SOBKbC1PP@NxHIJeg9w#G#k&t{8CJ8Cq!x^7Sb~L+VbyJ7V{s9_2sf{a5 zfAG;`;Ry_slBsd~q53xO*fn_^j~&wc<;iV6@07oUS8s~cd9I7mpIuAlD3#1FNaS`j zR^ISxB*tOriE0I#eo(}HPOoAENLGYasG&inpggR25&kA}w^7A@5ak-5g-ff0l22_>tRsU&KTZHd=afWCKYyDJ|dvQkD4+vZ?xg2 z?WAx}`9z|9gr)0+5Q}F>-UuGT*l|TfK~m56%c4DI(yzAT=EzN5(v*_hrXQOK%k9$~ zJWO==%TDB$84WR@FEqYpYNB2w!wY9+Ax0^1FZ#hae_ zJ(yHocnrj)ENR1^NMtqLcWVbeGQCY%`@W=5L~7nqOnc`5Ns*y0nL}$}#c$YzI0qx5 z463BX%;U}CaHD_fs*TOAY@J92Kyw>tf+(8kuJMITe{0q$!xZH7y^6J9S4TH0x|7;_%C}tsvt%42$I9fi+MKLYi1Q!R=}M zHSG3SL0~0M{Ok57b1RA&>GQt)pIk&|O813CH|{Z}Vbs1<8ICOR-Xoxw z-fZ}R^y)i>I%c)d6o7`Amml44`XI$~^`oUEBZ0G8z1d`X@RO5bRS7V&c1;_X=&Id= zm0mIO+fL7Cp-|N}7M8TX~^OP67;8hZ=6ROZ*{Lkq zkeC0tpu6#)VDFS&)BGans|8`hvjh7_>t(l#CF@evJe@WB><4xmP6rMw*J#R3t^Os< z>Z3W9y&3w&(2{q)0u$zyv{1Z}D)tv^8x3B5DW$v(fL%41`-)BA#Z=-b#~rAcLd5bjY!a zDO6AkSks`t4Rcy*Utmha{Dj*oSt^;lEwbF6GtDxWBqjUAzk}%R3y|@xE&RVnv(^kHfit+CxGpDC44~qECL-b#Mtb0R0 z)g63T&Y0-rVIJB^dT7(q-d&=Bm`*I(RZtJm*Nu;lkEX?rBHuVtF=0CVMekrPpn4gZ z%EzT7q8mWquKoF`STt*0Fx16nQpjHS((GMPuy?@t*K{vOh&xP)S*j$DZA+lC)%S`Q>RvoZW+LFs>%cI+u7i>?Hc=B=YJR{M+csLPZI-=(#Eocq(QY+A`8@l7B5LaHD#6;dF zkK|R0jeeiqorsaBEo{D6mgb(9#RVF#D@=MT7hJ8fn)q^L#0A`NIeaOxF|&34n=w0A z=ef9qrj-~i`-wldRj~tx1@U%?94>9P9oyB*%N@H|wc15a+M>pJI=SPWwTiW0!rdjU zzBV@1JXxSVubL3zP*7@pKb`6t*)vzZxs#K@F(($;Vx8kA3XDkDb5WQQXnr_<=clnC+F# zqx;%Etg*XLIS0QYG%>6uuOP4LBg272vpxvU3YIY1wCtbF)NzQ*A?56eO_^;&upbe~ zW?4g#ubp#QJ9B5uc&d^Z!r9m+ReC&7`c->bcjFXW+{W@b2tz{_{3)Cr?V+VZ)*qjy zr5ku6nqI?qf0e+3nOG^kTsc1WV~=`Sb|5m)Z45lwR}ti3?9NL`t7S-G@S&Ost2Vv);Fh{xRX8HxET!27MS{PJO4CAZ&-DOyjH%XSor1ef!2S=ZGCb-sNl zap&&7@ZC-Q71d>WwukW5z2PiBg0@#e>mv!po=S4XD(#vVEml&7wY~Y{B+;dcW7em; zd82qDzcvcwQoa8mBP3f&*;@DCK=aylwKZrTHVb-^==7J0b2`XY{MoTr;~OqcTVOmB zkf7GWc~E=IX8LDv6HD1t^sMEImCbNe5Bo|C>pnbR*$`$kCG*C>Jmz1oHW}6J#b$w>qpeLpU)VCm=t`IUu;Mx#BwXm*5F5ec@`_Rm#T`E0S5Q(cjRmup_hxU&O6;m z)2OYG)jij(rPK8|nzWx~z9gT!jhPD_m!#(Q|D0y%Rk|YD7suZ33=yrOG;m+o8x3~l zYkTOavJ3GnT9`BT45*M`;bDc{tF3j{MsaJEx8P~$GXj9-mz-|V2r8^_(pQNNJFu*_ zwjy@TEzl!U0FiZ&hq--n);2ZaFzlY2o0~Rja&WA*+fiI)V#BcC$P@e{I(LdtefucY zqSG)!)7X7YP+w#&NI!*~@Fj*tP?q9rfMG2^>SY{6B_*DFrgZfNT)eVh)BcJgRK^E} zGAw{qJZ)6Y?7k#TQM4Cb#EtKdvb6?`!t|zr`=GwW_b#c)u+uAwjJW}XaF_K}q*#lH zr{7E$$UbzM0c>Ivneq~SXOx*zg+LAa7EVB0v8y?+qu!}|+3aOvxurGiF!p6qxZ)Uk z(xK*rpO&V0LJlMRKs|$o5ZiNV{D2fhxZK!pZx22Iq)ppllN|64FaE6*S7dawlpdha zMX!rMUrnrC4p@lDVk25OS?LJH3ON!ZdfDL^N191fO)uwU zuLjGd2Asrwl(C@k_K3ixa^YVQ4drrQtIhLb=blHoM=x2?UxGy(zI|&)OQfnO#9-;!@1e#};-jDAD{l>(KPwh>_^I%_lTt`50}jBe$2W;i zpqLl)))OhN+>`zB4;#!7r7yh9g+DX1z)3uV+MSJ9m}*J)(!Rd`Rk?A>phlFc)fzj+ zyEL5WMs*EEa}7dyOJ5)OkhgSPIfE@tR5B3Dipt;5(a=Q-rPh^k!NP~`4IT|^DLz%1 zA6lQPrGvCR>)MwViV@*n?kU;WE^lr&@z?RZ0zPQqn^boxZ?e#`;IKQ z24wwTXodzindtx97I3-$)9rEv3!7H)VxM8MMj_oPOmsEAY=p}!@$);C$<5!!C508z z{IPg-)}lv5?zQJaCr1Wj?91CqBQ#+W3Dh#ufZH3xO=2P@_^~&}0 z382~XpF-|pQi?t~Z`Z3F?`O&F?v?hbf5wn#@?*fq0T8Ns7IU!m6l$N&gMY_^Sky1gok3V5GV9*GQth zNaMj}%T+=xPu-OjYVr?$WeEOah#<80@&pynz+F-Pa&Aq$1$YZMu0dYsYP+!{@KKoG zQ2D;M%OQ@PWhc&LP4~h)SrY}(TlrF_QRy5G!tGT7d1W~A0s#8qVkjIj< zAQHBhac|Uf&rqy3{zeUSq#WBDJ)@jMe_8q)CGS@W5TznJPWjOp+BMuNOp~oq5wyxJ zaQbX*WiZv++?46}P>^3Pm{f?Sfbw_7+%i{eNisp=lM4ITcY1T!26s2H&MWQJYb!UD z=OYKdRUesb=~%4K7uY=YqY)bH3wfh(C-a5w!b}E#fXeN&-AJS`Vm0sbyZZR2 zoM)UxFTlFz@;r~i!8#|oDaBbKez6z82LJmNjKO_WeOG9u%~~u6x1C?rte5BhS6O*N z^=vCyK09ac&xO&aeM=CeoEJ#7uDzLACaQ6NaySEowaJaY9$07?3hjjR^ z!A=0yT4Lzg1-?l!xW*QBSkv#MHe!Zo(r0fnL7q1k3!~@rH^4|>z+iWA*y$O28O%&u z$t`^jBr}tiG4-r^zZbPg{+$e)Y{!5q^MSwe_|W1o*EZSkC#eV1<=_D*AZl3BLR}d6Kb~@e4;BdF2 z;73Tn7Jm(@Cc2^4eV)6CI`Lg70yx+0yh8j0)yCuOflaZiM|CHNhhct`oPuL>U+T>! zWSzfGF7W)YlHyW|b6;up9?bXE&0E9Yn#@{uw6hV>5NI-!oN_y0nZ)sQn`5vW6-ulb zJnNRXW6PiHcPDxB(xLICqyk7*{_;G-)WqcKB1aq5asTbMG`skS$+J`};_ zmiI+nkjR~V^eTbVSzA)C79{@Qe9! zEv8EJvs2v4AE*$X35n3m!C9jFir2+M#0>m6bfAZVq!eH-?79-a*s9hb?}A3NNZ!y`7!`4}^U5yxMIV?+@{v0q-XL4G*KQ0c0Q)u;m}+ zwof;qo$V7DJd8&QsiTDj=F=Mhimb4KQ;$reql+7O9rwaSa6Ue<(yVUS1oX#YSQ+$U zSAAb0uxG@n=wXN-K$@J14Hyy9Ezy$0({?I#M?eD_^ROk<`%QKrSnu-qh#O@+t+?0S z(*Cvi6SwyX+wk}tFg(Tyt(NfK>#TJt8(gt7jg-geizu9oQkPALR;#Cx&7EEta1yDg z34MeGfGdvlVNuahM-u;v9E{sO_jn?OQ$SBl3(4ty(0=FWT&vS9u>-NN=5w>SBUp)I@wy_%}b^rk9ZxtLz7R1|2n=hzbZwC_BV`X@qNNjxo-?cZPV7OFR zTmPWrkfiJ(j6(jkvLG4O`yd|-;Rp52bD8;69n@mVGG)G?M7%0(^mmr*!B9- zX^0F#2R9*Z+0Hc)LBkPJE2Jj%y+*KyW3qg~&zE6y6Ug3@WMZLFK~f&%G^%;|ckINP z+ct{}^WRl?tc-wb1F06C_X-Mqlez45(Z1N;gs9E_J(Q0vam&f1^VL;Hnr;a6ZGBn-@4G((fsMxDhL($U}eFw}Jm^$Sba*oV4 z`YH;y_inbo@zDXs`eZ=BO-M7yeh@}1%Is^I=Gg4Ik#ay1Xco( zZ@79#ZB^A&ipG)B%xgX+#JXBve9C`g;=~ED@4Y3rw*6ID6j&csoh1660Wx&lvdRpVGuT%LX znQP-)k<%FW5La%OVns#hmZUO%1;+a8k4V%?5Me%YT$FpEw+(DY^ayaZgQ~Id=O!dt z@)BNM-}rHjfZ>#IU(Y|P@1TRV{YYXf*_ zb@z&l^itH(2YE7AJeblbKIRqDw6NM7Ut_l*hU|bqK}9kV7`e*-i+@BVTZWfotGHoK z16rx5(|4}-wPnUwq4F3;n?0^oFu)XI6_ou`P7WdzT3s{Dft2_4qjxKv4pj6etCX*) zVzd2{LAPqZl6ykaJpr*=kQvp(Iasok&%M&o?_>1&++$S=?LO{K$<|6)@gky8yn|ut zojN{!?POh3a+Lx2_4`B5U5LTiVJsMeKO7+8K9?Z@^xG@1yz>Ute*?rT1xN$X-<`{P*sC>&@dTII`hNGWc8OolZywxL_=R=!@zXLn@Xg*aRDE&du1 z6y&$Ifz{cY)o|-xp0hxaoW~yL`3#`cSkQY!XE}FKr_iRVXth$NqbcyN&fk^R`Wh0! zeg{vS>z4#p1^2v=9p+o-1AU&y4=IRwIYCnB_{y4ZZPdkYAoYGC2aT*ZD#PVMw6)|z zLM~kTj)VRtR9JnOFl7v}{T;qLc|<4DIeVUPy9w|keAhq|9yOM+&*NU!zT34IS2)#oWOM-B1I*|>SL+EUXg&AXLVr+l%YMnJi`ODjx}AhTb2)zIO1S{pU^VB(DpE-3Om->;g3iK;Qv*SQ!C|vc&bI_Ufcj zbgbE@R#uSBEbt7#U6ZmEg$2$YVo9zJ^Z`_Ydv(-Ncq)4+O(upkJ#Ay1@jOHkFLA}z z8)hV50fgA^W>q}2(;R^~PKLmJVgjmx(@fz-O#tEs_!l0Bbg19l^P>%^5kr6KU99GV zKvi|D?hPkP`}neSiTi=(*)*8Gqi*I1`PyFp2HGyYX3vUsBYg|>k zX(s7iB4G7WRZvJ~pM?l!FFFh=xd@ZK<6>FmuXZ?lzrJ$qsg&JRmGI^5TjkTQ2K3( zcP;cURt3O8qB0`hUZ<1WsXJ7#p3wdE3fXPWFw4`R$h!-RBGfQp-a^ZBi&9g?4@pN!ip$j6g6C3(1*F78SeRu`6 z0mpHkJ2lS@?X>+YS*Dz}Fy^?$QkX0y@n!b4>tfL5X5WUwxsr)n-P#^2(rUuJq?*Jc zXRPt9=mla2{b*|m#Ig|C%bhpHqfYrqBKMg=Cz;_Lk=7^xPl6Mp2K)ZCX<>=f6qkuqc7T}Qk(mZ=Nz2;H6@Hv?)Y{s` z6gi9)5!}!$0KU6y=bS`$ zTOs$3H{$Yi36iN@wgd=Pdq|zPG0`~D@ens;dW9o5H>i9vZJ^!5Q{-+{0zI$VZ?O?@ zGm8o!Z*@(4Qb;Bm%Llvx<}m_Q697$-y!-hZ$myKkjy*Ddd%cy^PJcSCZVjUo2mZMS$9F~k+lkyumnp8wG9*~!K_-X+Vb_JHGwj@GJUb;XKI6JY-Q-ZK8! zuqkFY3b1e5lf+ekxym~vM@LEUG7o?dQUz9AjE*XS9o6@?j|~T2gsb&Uvo-$&(f$sQ z{~bHJxwZ9m{wF<-%gcaU@+a&Efm};aysGO?mNkknqWuAiFw*^7lwlNG<>myBZrU`jN&5>J zaATjg8x)^d*>sH^$w<4;s)hcfp~3lg{PHfl8z?C*LaO~yF?Wt+p&k%OT=2d`G+nh{ z?RlfYz#0m8u2bLrv0V?pKm5M?ni8n-)E^WoOd7t6PkWn~f!v{BuP zdFYA>{}d;XQqfkWZ-c@|6xcMgyu<>x|pLFj? zH70Y$2mgmf&bq5N^hs8t`x8J-Xh&+t*rsrj-9a;F}M58gR3m zhJmi#n}JeZk(mnS{ZU)a|;V<4c#n3iT9sNVD$N}DaoW_7pH_dg=d9NAD=dDU%jqn zv*334l_R)`A$Wf>>*b{Ahtf5oAO>kLw4`A|Kfe;FptGFPSAsp3uESC&Baqe(MP{n^ zT|zuZvo?Stuv@oM*#99GP?3ipi!o#z%TA)FYquG86{TGDfmEQLkKf&-8lkBBG-vB$ z_){#I4$bOx-eqa$?OxqR5Qj_pkXl@koJF}%&hRzVoGYYaxD7N ziBVYX!yM?eOXiw?QUQd+ryWs{o5nt0f)M3eST4jETDjCsu8P=fO?LQ8bQo^ZV1E%0 zaMcweFosBD7)b!Dhr_xD6JalE`~~BSnMOJlp@~2v>tqbX(ESln>jOvoWgEvTH;&hj zi(T4_1=U#6k7h@!^C>CbGuD0Ds$p|)TsXS_@Xud7q4rl>>X`Ru_z1R2Q#qXlc1yahXMEry);d6QUX+PsOSB_Q~G;J1U6U-uQJ9k+hc$xc9^@aOJFxd0C=iSe? zR*v5iaias_?WMM=EwQRr+tYj;1JhOg2JX|=h@nv@kNIFX8MNbRrktjUSh*nWd1&%% zxF><2x4b>CZ89dez#bj|Hvz~R&cB5s{awZ!42NVa9s|l$_*AR0);-LLIZN8V5^Fgp z&@V@qZj!71YuJ3@w)I6sJC7gU3-?9g~O zgf$D=gRE-RibzZ);f9*)N;%NOC?QgvF~7e6nBRnbT4hDgt#xZKG-74(uoUW)u3*`6 zQLf#e57Z}kRGBk>1RIhi!y2A?$U0YKHD8T*cQJXp381r2y$2satQw(lXz#PJk+(UH zKDCjm`9p>N*-a*}#T@NM_3;Dtt79>3%h0)vu1_a(@i39mY8S(79t>|bOKBXt_(Cq~ zMY|lRQLUyR!%ttZ)?z3y{JKO7w&od-YGO;m{Q@A&ufS_;id*E*nN~R9KY;1F?8HL& zG& zJ+d;WPQEpZR&wJ~VdEab(yQ@@_W1J72#h1&U_)UypcQ(WRKIE$Zi%2VIJ;0}#M9;M zTkejnlQsh-+09bcQE>~|YAz<=(PvMIf=feo-c)I>9LL$=$a=Rp{0=e1cKjJ_hof8X zc0PyI9PEt2BvD$Mxq!m?vj$tasGOosP?{F>5*M;|w(dJ2#K>fzm|lOPqt9GI6nn>t zztWM*C8)OIxwH=2ycJJG4X)VgJ3G2CmMQ1yGC{U@2A=&3)Qa-PUAWPVAU9A!a#p%9 zAnZ!6l8A)g;?C8D^-vAvJqA~X(HM`wi+ zg!?3+K!?SkP6n@8nCJE!!GK3`S{d`V1tG%~_|g;nX;VBvTzZ2$8jS=Xcw zC%|goO2p>17$0;xwwJfHJ0~UW@ zboxVXV9T|+Tk`xLmJtILq+~r0fbXu~X@7w$BN%7fQL0ia_)F}wx*x+@!N^1voHxNl z@2IJ5u_-OXAac6@wVOIEvEAu6EG{JDbg=&d)MS83);u*DHkZSDwJ}&wN$qbM$8@C= zerCn%C8Gy7_xH_JewJ*F>JHdi*lT;i2QfM?ND35+6VwqgjknXzh7M?!PbW_7^1Wz) zmrQV-eF*hivg<#~&=`uz7+Z`H79{mh#w^arr)e~>8_abgG4{4^z#qTe(Q=5njwCJH z!D}BMRz*I;Ai**%hwmoW;p_-kbB-&OvT=GEMO!@GZpr5+LpE`5Kp3T@dix5ugxW`$ zTtSE!Cg?#rGm~P+`mWPVg!qG-={tbEI4{? z_mj{6rx|ihIRkhJUE08m?O9B^-nzNJKptqU8&k}aM}tA~XOdX^IBIapePzoI?;B7d z`@7Mc#wFR>Me!MPPzhkCkc>7(xImXi!s^z$O~`Oxb?zcn#R z8hO9!cbld2wuqi6ezKmSu#jgo$rDfebd|g3rd5V1$HNhnZaS$+xPcy3>-oE+IUso=?Cb>Or1fMjPOuqfd>$Gg@~A0k6E z$CBe4yX5#N6~(c>`lIQOD2a6K#Ag9oj08b))8{4v{KmGT%N`(v#^&ZU#q-XG7C4wF zp74Apfv1RBjVw#rTi(8B_dGsxKSm{wvOps-jKu%kb*1eGfx5JsjRjtj(#9BYQMifT&mxn}H z41M=&(#CaXB*YIEQ|4_#Iu#+&a!_~B>nh2{AFLREcP5#) zzTW`)v63V)f=Gv_Oiqhc3?$1HP{_*4G4*4$*8sUf5{ui(i)2Vh4h#&D@0p3KV}ezG z466L2rKC9NmD0IW<7lv_x~Z-Z-q`xQUdcvh!L(<0ux5|Ss_W7bbP_L= zqjT_b3z#j&Hx*~N!I+;hcPd!D4OOY9Rv^IYC&2~V;HwqLv9TT>$eZ6o-N~4H3?xm) zV{C>?)@K=53i)$LAQLqJFDNS;0!Gu8oBCQ~MLP20Yt8#FA-_W_9X%PK^tf2|k_tlj zeAaIqSFcU=r|^GlG)h9XekIM=q~qhXg?4wp4xk)|zWXH>vWClZ^AnRe{X23tZjW?p zf(OdsTw)+%2$@PlJwpM??p#e;Xd6@U%tWuUsl@(8NOZnxli>{=PGq z<2?zaD)5>5JE5oP2F9~IsrG{yL-W%0f!qPUfK?jca9-N~0*a~ZZUd<|Cc+}H57SZR zezb5bMt5LQHw#E=7^z+gwnKf5oU}fjDoB7kiznbSN4ez(_imJQ34DOx-~9f{d0_K& z8lF>maa!HFw_HNycQXyuNOf)D*RPV{CG*pMitA=4`;Qxd3dm+Z^G_uly*pSH%Rqf0gg;M`xUqv9Ne* zuTEM9MHuy*g7Lw{ImDa-++KgOrs*|}#7nr(=>V~oX{l1T%);F03uN-0cuYaHe44Hi zv~fs0bQLX>@3zW~vMV3*HkgzhT7ODG2eB0(g=Q9$B|+U^!BrR3m*mloH8KQaekBF) zT|nuRWca~I(DVQh)9POcm*O`Cj(>Zof-=+`uw9q_r|-N5`cz%>;JbN0H~qr!sFW^8 z^4M~^h03!V- zhB(Ez>a^LRCCDM`02*fI_}MNCFtH5O={5VzTx?_29Jc&>l$xJ76j;5A!yv?BfM9J9 zI08=lt&)g;cn>fL1)%q@{SACA(j9`qKI8DBL)yM~J!DwzMO|A0-J!G%TsnDg9P)_; z&j4@Z8RbJ248WX@-@2WhzF~=y!C(?PH>)-9F|^(e-{_fgO*io49pki5WOi{*S9-J- zSjRFH09?>Wqfw3l&ZHbNEJ(!EhCi%Gy;5rY;4#0*MPF9qmeizU0A$KeESwV=XQr>W zmTlzDW3mC)Jl%cHF%xnyYP`&33aamrVL;~ptgVi97Me}sXkshC)WE{|Px@irBBiLeQhCCqs`TB}mewHHmnCbM& zolLX)tE4ypA#z&_C*Trm^oC|z;UXm3k2BSHB#Idz&Qtph>6+?~x(TpY_6L>pUZZwW zVBHycr*nhKfug+GI1) z&sCR`FVAmi#Z#LOv>QMeUQ8@yY_v;{S958H+fOI2y-5?Nl_f$YUnTUpu`ZN!$oK-? zQq)|S?9j_v``qG)W!wBAhKJUjM)an2_NN-GS;+ugtIPT~4OR-b$je>RuG*)skagTRlJL`j__W0!!0{dqZFvYi*MFPATEs=(&?bf-WH+%@{Km z)uI|)I|$MS5xDZDt#+44Bf#AfA96Z%jGM~C};o*5mWPN5WYMdR4K zvH5Af1+wGh7pMMr7gCY6&A~tMxR=e$zncktT?+QY2l`2fT!WI`O}PftbR!}X8cH+M zsD5~Dr2#h(9_j<~Lu$uJJl=oZ(||9!o9CO^GRGvxOq|6J%ry_mPtXVBJ6$QEUYhin zJEWnMt5*MH_{Zk->eke$aznWt&Mf-e0lsqC6nBCb<^y2N!p9Mxafej zTn5Wsl+AZ6S2-^6I2ko=eh%OYNv-YkOW_tWml;vSGOD>g$K24;op`+IMDuP{nAA^F z^cTVC0~eR=_rA-$U-Qgd^l)-Q5h_&0E!u$YD2-Ly%)`R+31n-1;%su)r>E^rhQYR9 zg4?eO99L1Rq12q~3TZK*e;)Yb@1Tu3lQ>`f#~PdAJ3#-L;#$??e=T`6(K=-sp3 z`y-!}ecSQn$D{Ya)n;)FV(yFS=@y^6e6)Q+?pvNqHyM7<-aM|z7S4v%E$U^!0bR@d<0k{Hwu6D`H*r8d`AF_!RC<8`KSkY5de#maw(c3i=Y701X&96)1z#- zhnxV?iEGqTQtvTDOD&uo*;`*nPwrF$Y z=fHcOP?ZHnU{qjhMrt|H#cq!_?>d(bhy8Y<-WBZ_w8Tig6-@?4UtE;F)P4e9;*E0! zwvHmz1A>A=1JfULe-PLfbni>FDA<@kBcdm)y|Yr(GhK~3>6@ozn-V|1;H(IkWO~e=uf;8SJJ6Q(+eSex( zlLySTlwJ{qMjW9eoeh5+W2y^r(r-fjX<}ZG%LCmr z+%vpVt~@lffQ#F%6FMm8as+Jr)~oM8G!w{iB<{l^99#cTn|ae-V|J)@_yhppK%y1| zewfAouA|kzM)LR*C2QdL9zfP27kN~}`i@j-l6Gw9CI$#BcT-1yjrI`Rs~+=q+5lL} zufyp5Ppa?+MrdlrKbaLkb~!J`4bsZKK79%#jAXocuWk|>EC7`BEtwzz>kd(W0XMTZ zvZPYoTzVS+u|6qQ@!SSB1~dy$a=_v(X0GcPF`%mj>$C+ct?BCN==hWn1*=6u;ZwIZjZrz*u( zp2!ySB`~kxo?LlP-n!?u8mxcm7{ocu#0Lsv_=zVkr>R$6M@NI8nF=<(KbIgLaq=Zb z%C6VNN#@~@&I&0%KkGd6H#nCbMf$;(;G)EnSVTi`CiJ;Xz0-uNAT8X+A^bAxEbdKka4lh%90(&3CI|qoTfh?1IX9%M)av{$ zCXefjGUS2zq+(yeW8<0aBZgU2d@XD$AWP;oOc$d4CA#?DnP*_lvUE9lQD~UUtF=91 zD<*JHvs5wJbNAg*YV&339_sB+13lXPmfB*XAYy`^tmxxO;^n%KpS_N9OeAtu>H0`o z->T=R>yLf?&&RF?`0=(hmSG;19k5G-dN%B0+Lcw1YroeGbn>d`Xs;kT~dAiT#6N% zF|@-NJ7lk{y79xtY5a!(TRVKmv)>fiI72UM9r=Cb zXU>dTLMCGx5j){%DI+Z(TSG-@=hDGp@bO`olOu{xJE1Usa{tkul=jA^d}^J0MZS00 z#_;!?AuF6o3Uf^z87;T>-HnPn?n3P|lLd>XZJlL%8#9zgQ;S;o!h^~wM2(&=$mFEc z3L)wVtZbd53|YIkVS?aD(l%OE*VeNKmi4ckblDP#bbaQhJ~xIYkw5$@%`mlL?r3Xv zNT59MM%KKrJYvX{zQEq)tga(q_cXyG)8f_g;QC!59NsVhIP4)IMo)qdfhk>qZI#>`0BB&WY=#s=tEH7RV#T2ua5%GVL^sBYrV19^OLyqVA5nu#~;UX8J- z&!iqCx^sMR3FF3(GtB6qo}6D260O$V9o?Z1#3&QQ#UeS)O6S7gq^G8PP~&u zCSnj)Gn8lZbfE4j-Vd1y<&JvjjSqH^mkO%3VvC{QX5XfO^?yf9D>evrXfGHJb}^8U zayyIE;BcdwLlcru9q`ku6NgtwmW8AVU8h9ETCz!)bfBS= zDqC3?fb;+A#zp?CWtoLAR4#9>jK1$%-S^OYvdo3vVWBb6r=ERxXpS&Qu{N}wRAuF} zw578&jWP=PmtH_sD_YlyRbkKgHf`~{s6=bjLo{*TQ-d{I8aNXxZ8@kcFS^v$j$j8= z3M=OQcpw`aMeKH?enm(2S%^H2&^&u#4;2Xvjcu#4?6DDuSPQ?i^cG?sH;t(T;mI4O zD%x&nD=3sN7I+Z9Fs=O}^21~%y{TOIKwmd172URR({N8>*(gC2aWAUKXcc13WN3Mn2(S4sxcXnoSKEM($# z>ETzTRfeV%5H3WO8&iLmo{77%%OAtbawcrR(m zL)xs2H*0pJoL?;cHKsG3lY!+=8fA@hfPG5$_mWCn2cP~X|03Rk$TF)g9-GEpU3VDU zLG3T#x>5;Ap0EDW_lC$w=Br~n#lyzgG^>g;3aa1amvk{FUw+Qm$C!{6F#sE|Ofebr z=Sy28ZKKQ42D!E?k*SjQecRaEtvXD4{ks#S#r{|E3%kJ)3u}D8u7=cJP-D*Gh1w)h z=cL0Mky~cAlg{eK=51m*YBjo~srl~|IkaSg{UD5Ntt5hh-AYeW%TuT5BIQ~Gr0P`f zQ-9Soy-eYA{bdrIwMp*$t92tJwlb`4x#n}}7S|*@eL1|BIv`TccQ9rn_)*P~9Z4T- zrUcH)tLfn|WB+zW{+$Jh^V`~E|3-4$ldf)`%am;jNmnl&OAi&sCJZ;2aL%NplfYZ- zZ{;*#KK{W0_x;muwr`)ALKZKe&r7E!TDU z2DUVh)F6FFdMj~=ouc5kYV(D$v!&wEJ7|7+Mq{J&g{Zl5v~mwfGBMD+?OAH2Ijel}YR3+!D>E)L_`l$cR{i#|k+xRRxO-cfA5TFz$cg!yCC ztg+Rt51J)-l-^|+z>>zLStf!e*0UX%9{71i6I_`5S+uv!V-3enlly5Q$T`U&D{uIV z!jdU`dLAz!lMYP>LYyY6To!E%;`%siqKrX zw;?k$>^RRERQpvW*}84q4B{J#e`Y=a1g(Nu4TovB>&OAn6_9OLJYI`c&nHSku9b1E z1g$BZtVh``KSj_&hFyk0VH9_g4jVRi=_0wehM8V52jy_x%r3lSxR5?D4vMIN2 z+z+7pomg|xEbi6g6XQX#cQ6jgC(d@nO1B@nGSbqHFk`P|woFvOZ9+v8*pX$;_X1e# zi48%UD95NpO+%Gt|JTxwl)o7`Q)n}D^wT}4ORh}IAm;TtB6Cz58&-Yd#61Q>bwm9( zA}#1xw;r~Qi*`{@&pY&GmP69M5*@=1s+%xhy@Lk;G_N%4__pqS(TA)<0 ze+TiJKHyPxQT3fNa*S@P8sy85;`h>tSj6*RMZf;mCIX`8%*11PtQG!EpVK4pCsS&~ zpFy*qm6IBUT%5V1T$Qnz=E9f9UeH19go*%kU(aSwQandFe_g?&O@^=)DxBk<`0njK zHzz&f8LAG8@v5fY$PRnM8ZS(!t|@6 z6c+DM!HL-bGhQ`2zP+Xzc&ZV^;aqA&yT-bgQ)=UC0vx! zndbTea5234t2iwUKq2n{rU-J9@Kl!?*d_?1jq{n7SN}9ZwiVWWzb4zT zV~nqfmcgHCvZhvl>a>t>q(gTrE%Al8FFq(BjUIZ{JX*-q9mCAr@jjh{oz9I$0T^|- zLt7`HxUlWGJ|HF{+2F>98eR@G`2HEah0unWD#DWHWqS96u#0+(vENcI@_6)fGW(SK z86{uTEB1|M-R>itK~k=i3?@k%R~g6Cj^3YXoRnyYP<$+(__UUdNKC>&;DyDo38rg;Kmh&eJAsI05MQ&y&<%H>LmTRfR@m@P}#KWH{=bBDELvx z_LwP%F~%cp-T%?;pJAudz_h#{a+f)n5@BGTRQI*DIXwiOVe>;LbYqsEaA#J3xak>( zYH`cF9_oejs~SD)F&gVs!3V9!R7u^{QX5)8?CAaYZzf=986H2RCUCc5zqlBX)h{>w zhi_L!KmuhS;JeMEF;&aGUv!nECXx-QZ-=iGr!<)F^-O(CHxM4JtWL-zkE51UoGayW z4zhvJL?;(+Bt1^qr7n~|S92`8WZM5fY<+cDRPEL`wt|3w)KCfn5(2`|sS?uN<J1(hbtmAd*8UL#K3uLl2!ZG~b5jc;5Fr=bb;iE}1>|-uqtbUcG)x9<5d{)o=>u zDbZDavh{YP$p^!>OK)opCBoUG9_LDLe4txH_H9>|c%LP9(UDSEzx(^h>0mf4|5>#U z6*0WfBMg>*UnLYV9B4(NQcNweny6bXTOyLdaQD~Xr@32Rt2Qiha%%iW;1mtUG-c0& z+4lP-TDCQvI%OsC^?0v0brkjn!%i_HX06!xwP|UV13GS-M1*H}42JsvLS!Ig@5FR> zgMV}H#HVV5BK@C|h~?=KiU0xTd!E&s(?f%G)(fLm%Nios=2;N4I)|SFW##85;V^_; z$%$|1bJw zO#Onk;#ilF0A1EaPm`$&G%_TXsc+mg6M|T0U|q6HZ?)8K`td}N@nt7<{GTG?kU^CJ z4n`q{ysu`5UCa8#Mw!U+hNofa*8qKaS(SBK4FK?ExHkKC*1}jNH;e@tRzfBray|yV!8_(!l<&hRI`Vik2Y~xz# z?ufjlzaBSgVrJ4tKfa(6AG}0&Gl`pW^>o$CRp{w7q-X!O*Lq0e!SA|+Zejc%d<{Jh zMlwTTwVJv5tnHBaYVtGJOI^)Nz`wEP2s3xfMDhmi)*L(69C;p7h(XWB!QI1 zZN;HYqSY)CaQsJuH@iH4u>z^;brD+={ca&B8+9NXlWgnP0*eF^Zo0g8${`7Xp5LE3 z#U6pxxvj!>$eTc0H0rFeO{=zPQlbGCwIBCC2v1h8Bq0q}YH?_YY1X$x46RlezwJ?> zt9uiF*{G5nSR@o8*DW*V)}Q4wz{qa*Yq7#w8`hbfeIEc2vLBivhBm4~DRnL1bbs(+ z(GQsJPif{~$KSHmPh}F4>jgV0a%p_)o7O`BRR`ab4FmFPArzt~uboI}D)#6*>u!*4l=DV$*fA9E)i6e|IYAT+-piL4@^9Pl~gE1E4 zW4rh0K%)sfTlwbSKlD`wR0QB-Iuq*+$tkTlYkYRmp_e#hNmJEURoeKW-O7mv41Zs(bEN+u?1&nFUQkrF0(Bu6ZLx{3utIrPtE!WX zPOu(q=yQdLk}*fd;P^+aBMd6ZiCPRA z^m41#Fn_-H)xOZ096;_f^n5qh{BW0Pb(=*fx2#T^Eo;EDX-w1+VCxicU{lbjfyDW$ z#S~|`R+%1=KeNZc_i@_BnFAD~1mcY+O+YZ^Vr@sF<6ugh0gnQ4*G>C-SyrTd2I&(t zZ-8;@zC`x+Imuq(2Qz1C17%93?@B$xZJn3>&jcyNz^wJ}Pe+K-g#`)eUL#JqNpQUs zU?r0`$GT>`g)P!2>I}d(8%O>GxT5glv^xH$?fU?NUkGcApxxZ6g46cA?-Jrd@|DD7 zW8Ky#tJB4iWrL|y+%;VsPpIG6)3i>>Oo)GJvew4ylqMQm-MYX_`d@W&BNRXmg5%l$ zNS0PvaL*-u4cP$oxMuPT7}F4Q?R_x~?k#@B2SwXLtU!hm3)|*>l)=DX||O)YJ1Ofs&$2yxaNx;+t|?5WPP2tMvl|0z{zG@7BZtg;w*f!wV*NO zxu%blU4v3tteLCh0|rdqLE@gQaMt%UuY5059p!RS5`C-&$Gjf_b&2mJy|z1>lW3;u`*PH6=KN18~ypRARj#o;pMS zfwImX#W%B6sqhdguP>m3Oo(wiO^k)Ro%wpo?Sh&8?qr`za-DPh4eOcwi?DbetJ5V= z6J=C2O4j$P;Y^g}g=TU7ZaDSAbm4oUI{P4_AVs|9@rm!?fo`zmwlaIS=4DL)955!20;e*y zlHuyR&T4qh-0V??1|yBm2UR7yHv7U0?5jYPV!_+Ar56*m4i3Lm`0vlw!h@#1=i6t1 z8Fhwdt4it&d?*AIer_AP7dgGzu2LTsG%}niP&KDxcGJgki$hmIZFIEYMi3ua9KY8E zu92>yez2jyJPA5boWV{iuJgd%+)?|Somb;ksvB9Wfh* zf#W!GWra#1kmYyQk380?;2LcDdIps>o)c|x!uOJ`DQcc;F!Q~0Zj#J`DB^}uUsQ89 zpRd*f$Vs@=m%p=+w@yck=SCXM$GRq4`?R^2aK^az&1d=!K{-DI+`@F3XCwM3Ui#Y3 zmbH(ejkz>;X+DGtAIjE3`#cKQb!+qH8h!Lj&iX6!4rIJ^Wo`kFXT41l zf%N{^(}21+rjCl+1rFxUKRqSC11#K4b^)tIm6vGUqbeY&6cmdzsM6-=b}_fMy%Vn@ zy>2CmxH}fsWl(OG_m~t7otjqq@`#EgP3`wjd7(vo_qo@;o~it9sIE9RPwSSxs?o_V zN*6gIRjnDZ{+~;n@!Ya33-wDujadfV>E~IFB*B=WDR^Ehd6pI^R*klF#J`snEdw zD0pO%;LKw#u&%=+;!)<~x80WtAD!N=k`IYn5B~OjOl+>*6z(;q6sud?OrXehgH+0t!H{o2Zscp~!{rBeMs8`_!=gcm^)ZqKfo`z9F*F7i9mKiI{m z2a&9CZ4@YcHyf?$%A5G${{r{yK~KpQ5}*Rs7=3H$gmj{;?$TVerQKZ8@@$RrnJiA< zMal9#(y7Jy?UZjDBNREW7tba-#1SW@6z7fB0MzjIK96J5@4y-xOD z8O~J+(eqKYr*5qdIZqREW{(YcCsV$3x|BT^_@r2&W2Tj*HWA%vCEd3OWiYGO%|+RM zAtkn!rcLgZmg68xo+TMj=Gq9#KU?RbbG=QI#}0`2SyQ+~;;uyNGi@Vpq0*jvUP9Mk zfSjz`#x((6q=eB?na+fZkY9d}I2{SipZoE?3+DC`9XWi^-yN5deL=}|+o7qv)*63n zX$$=PQULNil**4oTzg?-(J)qS`em)I=Gjm62QaFXa8idI*(h8zH>fHU%mYue?;Vm4coY$Y9c z)jv&Aq$Xaq%w$~m#9g@+04ksZwESrAgOBwx|AJA`a(43`OgOFW9A1EefB5x{h%V+4 z$~KJ`9Xuk*IJ!gcmZoP8KD=&XdvBrjI5jDz>^Y^)RG0OJ(F?V6t0zfhq-Q0`*sxZ z8?vyd>F&{LaP{uW=7n(OgdCN<@U@x*M#p88HIygF-@Re6`0SO~Lo8!_AdK-&oZhr< ztk^mW$~Znz>EQFmL(*PmsCn~|ZrRqm(+{($id#Ar!=~iMt+tyiJg(>}H)@-idZ+>n zsmG^RAGvVjDnIt9>$NZ_ywvfn(EUSR#SAQ%J+yfZ1E?l}pme-J0Df08Fj2NtWLp~4Eqz20izg4Y_) zB@vbsGW&G}sv?^vbfQ>vQRv5!s-IKdZcG68s{8-$WiHBpZ{*p#=bw@Y7sZYG*|6RZ z+_^e^z(=eDyVZgZlV4C@V#Tw4i{tHQSZ7d6nm!g*7S8{?QZVpJ_%E-%SKtd7ufV~2 z=lr*D;L&&Bp>JP-4F0dD1Nzpk?b!F_S3?i~xevg~kM0A+;o$k7X9Hg6=2i1^+duV8 zz=cnNuHf6MR#K=@RYmXlVFlMn86&TYsD9xe$VEmP_X~o(-!AS1+U04N9%@%rq5o^+ z0rc?ugl?pi-b>mV{k-YN2V-7Iia$sw?=NpjejoJTO6IlmwO@GSO3>RlcAPcW*ZqoHtN)fCy#I3bNZA(drxW2L{oJzD-jFxznrYssKyzTJ zr`jiB={_OMz`*SO*Y3ye$IMj}(iZv!lu`=ZBgRgZMs5MO^FwoZ^O7u_?qVSSsZ1A0r)IYGyF@RS3iy3p&rpOvfA%d7=<1Z zy-Yb1Izm+J3JjEG4UM>zV+5N#Qx7f=1)EOM>`k&&UmES=%A7yF4<0@{uPd zPeFJ$=>~QRn~v~M_D6d!k;sPr;mANgzipZVsQM+wxB$(rcRDEFsMd<{-S_va$Mlc+ z?xv3UoxaI<-5TM0X_m=Z68i<+bbh*>t!OZ&mp=e-N;>|<8<5}bnBhRGwJ7G1W&H)5 z1_z(!Vvz9@RP=nmU-9y2I_J`7bMDe7La2UcNYiYKEoPLK;%OL--)=envH#P{B{y|T zNkm|yOANSiw>{lVnEN!6C?cBp^7x_&>R1T_W;S2!(#!^r1;bW^^iNNlE+fGgyXk%+ zr^^AQ=Zh46$76<_2*bv6>y=-}yKlV0-&|o^{(pzY{?Sn=Y|~42&KF~sjbK#RB-8<{ zIiERx1)YnX2+>rJeQ&G46-n3qbp5BdzZRxhtJuDBJP9j}#*=5Sl6jH6FE66RaLJBB z4yylBopVq6ap1k5=$!FBV@6Wx z1zx|4q~C<&P7ksFOD_2T^a5|JI;URj@_#=7>dOR=wJpHZxPBMRq_~$04Soe(QhH|- zrh|=`vsWV|&vX@qUkL#A13x5I=YCbXE(U)TDo*Qq}e{-XGq^14m3B<0yDP5otG({de--f#Fi;5w7A*nH>#On~C_jn}wjx)G^9tZz&#sBi`*wC-o{UocP_!v+d zpvQT0M|D;buM_0Y#x>JI@ela#0z;6o$SD3(s3cvUFB$p?yDoV!DH9(7Q{GQEebp5+ z&MQjM`IDB-4cnho>a`VnFLv4j+YM=+iTYhui=On(`7$2VUgAujO|xHqhLck6j=9Y- z;A^7&Qf4Lvy&;B7e@HTsVeGb9r*8*#>diII_Rsx%c98cL6Tc_#k>ld*7vcM?KZkE6 z^7apf+;k6r^6II!7iXnSQ4;u{wSjW8Qs0uqEjqwNZ>pa2G2cG+ry~lV$7Lp^ zFZ~)X+0`#kR%mL;>n#!}$D1xs)0BcSewTY8>6c6Kuf5tD_wen!-RJM+)h5*gzdUyn zjnnYhj}UFh=l(TXm8Vs6Nx|=!0YpLMJh6vdyc&-mdK`r`T`>F1vDPK`TO2uQ8a}}f z7<|M$cRaFgGSFALNkn~j=l?t_JHQ~ikXT&bV{|DXuW}@>IQmo4kG#)YtS0%tfZiIr zvkl$sDS&alTw?q%68YzWO@0f&!!G!<+dSDc$y0&byTREx6E}j{g}kHtzZ1EU-hW|` z2|@BWivzYowcoM+pyQg9VHO-X4ZJC5**lrq={;GR&q|SAmrF7RtU`4bNl`X zgQ~V)oCCUsPBy~8L&56DCey`s;AMmKi1`y4QKxkK!s;iI2*IYRZHDxo*~d70*l9kS zIN7EXY-auz2;P#b2obHYiYa3W!Ha z_R)q0hy6CdIC{qIdn~1Aa*T%i2_6(@rJt;&<+1yPpVeUhxQWI|vmYLBXjNOAXG8AIz|&O}Ie_h0UH_b9{< z_r3$m2|1rB4<23I+nzg>OZ`311!ABEbf8H6?$e23r#YY9f%qz0;lmc-R7Z_zTAQ@t*Lby-|rIUOlDFMe}NfE>St566`H+V;d2IT(04Z1 z4Mu?Zd1cMcnE~pIlv-bZ0xquZ?EdWeY0t+59@F`ovJ-#Eq>SMW4|z@Jj{IE* z&Y0*MsSnpp>{nn%s0SuBjQ))Wcf^iF#2U%&&prBNhcm?A2|WDuGeZH#jrubTvU>gc z9L!mes(*n3ok{7#0)X;|>AZqRxQIvp%gG7t0{UhCg@Sa%{x}P{!ML*MqRv>gQsF`K zo?vnDY2G9L$p7?Lpgv%4%;yl;5Kqi9aPcOS(Utq8BC`4Hf3DsY1d-fxyRzo5K3V#J zxAh>zaj&j`N>RNm5?Gr&mCX@5k|;7`vmuOcVFQe%8AcW&>N5BKey)yI1fSdey!$RxC=*dd+ zU#dvv{tkmNhG4RYm}Q><+p7HJ{reQ^*HjOteekf_$o~D_EZJGe&a-Kt=3Bh)qAOHE zvnwyPYh;A0wbgvx614_1+dsVq(7cC zV{xEwi-Ji0R%nwUp_bqc>A=b?2(rrw=$`txI-LDUf*4eLEpTmo@s&q@kf_I)I|`A!O9^>+y< z4&49;H(ZJfl=fWzk>eQ9j0mhP83MR+^J7?BEFUlyTEeFPKFFKbSm0Oj=&Oc-t2je7 zH;8(>_Cov$jmTd28xZIVfeZCWZznu*qEW^Fzj$pyhi; zOdt|JB`L&wS@_=@+uWA{hKZ3B0PiDE1cXOP)2~;D;uKqGv9e?UlltD&3xMJp2_L=$ znKczY|I5AiMI?bIs0KuL0T~;0Qdw(~yoWk812gHgb6fI}UsRew+5A4bRvg%Q8uqQPa6w5J&;Gs}Pg zc?=+gbv*F}!N`9Hem#o(5cQ?**_iHzS@CsqY6##9+%mx2p~tv(S3!N)Z7Iu)D9q zCiE&CHlhk+{JZfrGyjN<4^jw6`G0l@5gH65_TK{@(W7hUpN2+Yb-Hoj5B+yCsx8Lf z)*=s5j2u5$0risxaG+U8%s%o){6o0O&LR9a2LCutq$%P zjVTk{>*Ox*@Vx1Dj18>%OX98`=SI{o&BCNlz^=XoX(*DT32^fSu%%4FDK=Fb9X))! zg^R_^V_rA~9hVRGb}W2nr)%7+M1dazk-t@-Z%GHW^C%0_n?$GrX4MOE6n&P>*THQi z=Z*azx8SB+2U_k`VYh&>kL=WKPRKIIXF%H?HQZ63ZBSpLoq22S3RLl4eX@X@uT~=7 zIp@;}VW1ahFFSabLIef&IcI8D|tWxzSouc#_;OPFU=HA zN{bAL3%IBwfDv%@&+{1wT-P&^CKXH5MT9(O-1EiEnq8~uB9zevQJQLfEDqzvHutdP z34&_z`V&>RI?5$DfPMo2)~p4G17ya4w>}c{8^~%W5ptR38qEVix}U`ia^wt%600Yp z-xxB2VI#F=5mtmmk`ha`@pweiEXq+@km-Kz6psV*BYA1-mk>`{xdr}CGkPHaA$jk}Vy z@5nqNV6kW6u&~jX`^hFU{Y(s`g$`Up1m#gXNjJ$FrRyklh^r~9v3_k>nVsN(hd1| zKZ!-(^Kh?|=oXbA4;;s4Z3OXRBvo@leYiwl6tg!$*B~rkSU@P(^YU8)X9~W}_G**; z7N~-1&n%UE)xCZo@tfP0hcLAx*$O-5w&-Z1Z3SG2u8f!JT4?@EcmgmE%D$`{mxTe( z%Sz^vYHshNixy%Ou&-jj9{{aXclE5gIC!xuRAIQ{2oY>tqI_-8K-od;0$K`jV=VF= zz+ZLA6OA-LL@&naR%#_)gWPZS&oBwVFYP*9&=14IX#ldqMY;R8l9$&tioz48CViIc zJc&nRab|keUxLbuXxg2iJ6c0Gn{FBXBSwsyfde@b0DxywlrlY4)W#;=l44Z*r*rH5 z*C%fZl%!tQpQf0Pt1FPBmE~X4fpS9|E!^ShnAClgg)cRnH0um#t6Yf~$x%?Y>#_r$ z<^oW6-6Pc#FC`{F(kK^gOa`FufQY`RYEYd9kIF%W8NJq2hdYa!+MUkbz4SkN(itSO z5~!N4(dhrkgq(R5fkuwf)-M^Cq4nYNrai*7675j2VC%{SJkw_9HBGA(`_1;0pZ z_J0-nyFuO{WQrXFK=*KF5BqCSs@#(dnX&y0EuR~OykrpDFnsnU}osQz`^A0tM`S{0KLC1hSp z$B5Xd_)+qlHgYPmqF$)<3GXZNWKXWxjlz_vctnIxr`mwMpVR!aT*Vr0yUp?;=M-jA z6(z~?vGCeFi+)si*;r-Mfluwq`WQT3Yw6=N)froM)hWO8-l2x%)ZMkPPZE8R4%fx4f42cHc}{XaA;sf z-=1YCfR|4Ym|O)Zr6dp)X_B@G=CKSk=4XVHANIg__IwmEqGp^CV6KJlFs4~9BV zq0EWVlY(VjfTI4rLUF?0GTOJ5{I%w*n$~Zx3 ztMpXpmkwG4xD`Z!`QHg&3wXC51w}=KX3e~rb|77$bDNQ`1^y*;?7oZ)I8PV!N&4D9 zv~p=Ya|7!`i0oVGGSl0U^iT#k>3iMJbVhNrGy)Pujp28gf*5 z6!n_`J+11PPf+d$4SM6hV~`KhENZ?$AqCRY;TA_vlf&d)?x=t%+p-ZVY?Qe)xI|71 z^Ao}+%Zgyi@*2!MX7O`@S1nM|O=>IRorYbe#)jqv4!gIhDyAP&%RD}j0z@Sbc?1$%EFZ&tBSS>x3eMzK1HZO5J6)a*D8mAjlyo-uTsLaO)NfvAE z91G^QuoR2w8+5!FOFNgsiEAqZ7s;m=!<15AG>R&<4ynVhEIu+@ARZV!g0lZQP*#_F zI~h;al>W$K{GCA2!_T$OTI^4FsEMqNN;hx3dux_!()Yt@T*9~3`gKiN>QQ3;hRSP0`d@+WbdB3~mb(HHhT&}z@PXxu-@?k=PXiFJBClpOK9c87&D9D||p z&Ase+|DALD4_d_&yEs1^1=xVwBMS|}wwuB;2WP*nNhg;<$$}eFU?DLnL_{pkW?Pol zo;NQ>xnDMoJ7xTA!)*~Q+Yde|I!pe|A+}N=HEI?da>k$bakD)?EM>i5Uwp&>1r(Fi zhV^%@IL%e{QhB1*5@$-1`l5$R1uWuEc&AX+PoC$II4rGo{;FUM=tPx1q$ZCcO@+F# zj8^heH@+?YhWQ|)R(~bUv9ja=qY{;=912TxP6&ee#Ki6}QsuHI1UM()e{L{u`Ow~i z_2_oh51SkC4#QW|F(n+TEstfzO%|obC9pmaNYdH`l|9C039Q9(g1B~-d0Ln`K^Q&NGS_nl9D)Gi8XaCNPw^2N8E6&v(KXlG}5o>R4rY|(cP5-=3e#6%zuyEk!K zBviY3ah0Sl>S+(Vz0N+?XT3ReEl`m7R9vO6U{mT=`t}P+4sf;V0E>TeSB?aU)SF9w z%Yi>=R)P9aV=$-gj_oIY(wATxKxl$mi;*X|E5ZuscJ z!RpQI$Y z;uEpW%eZ?Ago%L%Vm%YN-|5e3TiOFS@E`8ieW0MlPzk#?PUm7=wR1J zVu!M7@o}ynoww3CACGnioU_xvJYYgh#|I7Uv3?#XGsu7>EicXm+En>* z39EOs*=d7qf|$P93OK(~W%2ajLqTf)M&f>pfSyzr2gY=cCzv}xs>l+`5VIRLcHajN0&#oC+}*d`5r z%BK%8wud9qr+!O}cUjYHi^?koh|DbzTAz9*6PC2AjlH7jSGLS7&sZSPUQf&5*^^PM zgnlqi`~_6u?*C3*aWyM2La6C;uA??{ik`{if2W@8e*^p!@T5(FYQzar^!O$1SP$v`s6 zWTWOQ(Z%=SEaKGr6xab_E6oBq&(z$~jq@JjGl7<^a3fkQ5_+Au^9Ld6`vG@;1;GrP zd{OFmby*K%F9bXlmUL2`dk{Jp?fl%ek?=bz%il~7CNb@Zbr{NjyRU_ zOZ{-_ATfNL zNh|GCFZKQ0zE=SB-N_*WSKtn12N7gWyvl07vfKH_?L7t+>7_g6Y5|%IZWO}!nB!ii zr1k|mP*n|lbz*f^(2Fb#W{7gZA#%ABcLF_e;hRHcE=;y8(+#u8jkNDbVuqO~eO0Ug zTWvu%LHeA(m|cljXw^b%*We5O2eI53s3h;i|AlElY% zq!7+jK^)5^&IaEq80m%w%Ix9|L^KS=6^+%F-*@LWp}f)R1=vysdKeT_URP~K^J5}ZuK1wxWtVB52D#u zRjM?71-57C+vDUfSSkl?Z5dzT4dJRe4~2QHTGWhDwP+&>NDV!#^aa#JV-2hdZ1b#S z4uq51w|IMY@`=kApSwg^^fvRkV_S%XN4d*vFL#F3_b`;WiXXJuTmlQ*$O;zWV`BRhE@%1TGb zt(<4$ObC!VEKHG{`qw~NzYb#iWo_Bdw*r*e+K5>_fuYNB9k-8KAH__H1q-%Nchn;_ z7(qNEuHT@uOCv~iwfD(d>P-t;bKPpg9RSK4q`XTolqpq^>-VmER(DCqp$fH6&}RO6 z&z2B>%`nm&?0u5h>k*uoeIbr0P22Rn@}U)&ds)KuW2|uvx3!A4zCx^0jR>~}BrD7# z4F}4K8zUw=jukW?S>s}cnPhe)3FxUO{Dy9Az&cU0z?w6a}Xkoc=yuq3QNCm-!ghhHws6i^*&fH zwVq78#CoW$;cMyYt8HKIrJTga|B}&FW|(Sm#bJoZvHSb}-V3A+FRohx7o`bl{sW?| z6lX=}uAUzs4>snVr5bz9k|{-$XZ${T$mr=1&M|VqJJRAT!@?TOg=Wm>v&VZ9I}=~3 z(yFZ&Ol~Knh*RslP!%C05$=skj?<4JG}qV;chziZu}J2oA4^jgcTG)8Oa^mV?qy4_ zK^L{<>ufQf9OF#VrTP>wO_~Ft&32fJ5#8s->J|hMZHBua8W^L_bC2=5eG_Te6z?;!_?%Pq|t&4Zym^)Ux$a!{P5 z+GBoIMipU3`bo!H^u~1aV=s*;VfHY;gyd+;IJi7`s}8RDp5yjA@b;JF<>ACO2ngT*YZ)qFJYPSCuwDEt=Mx#7QyFUL%*TpFt>c z9*05%n z6T~zK(x9gWw3c)S0Fr41$G_Af;hbk){&yy1JK7I*+^pgQ5uidho(!DT1L!zCwyQ>Q z2ZL@~0yEm(i&0(D;iRX$+~85+uceX&#z1iZiy5w=XJbX$%>A6Y_xJ*Nbtt{KdJmjM@+mXyzON?h&K;EFNJDxG2}+gzh_6?cp`S(T5=XO@sE}|n;Nbe&u*SL zZj~q}=%Ib#CMoFH$%fta-H|g-E@jmcKRN9SVsk8#PknkLMk|A!AKJ|4aJUFJ2g)|d2hK3IQ3gyzc@B02F!sfNc1lms7Z;APd-a_Gt!eN%;TXCj|! zMBm2&lK8xAX<^Pe?=;uYUwYAYRs^YdUWsmUJbhaOyoLkzTJDlFT*2h|oBQfc3bFiu zTB%!$8|nvwV2K~gBpD=_#F!r5wE$32T(e$-GUCzSjLnqAq4!Bb|aG zZf^~`!~OWH5EKumQ{22@u#3Uww*)I@g*g`^<|Sc_1-lU@eo-#fz)b?w!*F z&HbS|*Sj@vW?8@U6P}j6k&jHXM2ma5s-$QSi2Sm0i_#};hk3aY#Lm;g zJ7hzM!Y3A>g$)UQfZnq&Z0AcR6D1qoS!=XT7_x0|boi~CF6Bu$3Qv3VkIX7M1 zU`OFFc}RF~|KPrKzjNK=0BCH5R4MK-nVU;kmi_im{XRd16^A z-98mrA92Lv!Yq$JnLp-PghyV#)%iM(#$W4+SkEG)=QL9p$`mr-5vaQjNiSgLmxeLs3@78xh_qE^C$v^9~S z%a3*|C9Qu`fOiRc0TFlP_t1*5Y*1|oVA?&NCNOK0*u_WW@gD{5M#s)H=b1523ZJ?x zD}Mi`Not#02>Z@qlGaj9>0SYInHe`#muDI+0_TazB=Xc9|;e#jW2=d9^wU{6IUsB7V%8`a^C(b!4&tamOu+< z2Pdj*y;Pl?W`YIs`-x%&Q4 z&go-zZYtbp{9&9y?RKB9j!P1h4fxm7BA({}d7NCt2SrLW3zbH{?_iRM&_SqHK*Rfc z@1HpJl{oY@#sj{a?0C3eGKj`5-KBI$otmZ<(9J6cXGY&cyuA|4ygr;$Z$WA%l;!d0 z*e9B8Ug^cqvSO#l^-`4~tc>a$*h7Z5CVU~DHJ<*Sv|B-GFT zZ4Q@t4~Z5JfUcrqCIpaoSK!fIH6r-+nx}iK`_UVqIm^Z>mKI*`iY|8)DCt#N)GK`m zj+K>QWMwK0_mR+XHvKxwR+>YK8`wJPqU3g72Ml(vjA@scEWj9iSy97-y6u#|YxbsCmietRg887Q*0FY5$$V(C zZIa%lk|im*>X6z)JJK?0#l7ZLtbPGB#0_chWj;kjy_woD({IQp)%)#`vPUCc5?pG` zmxaVqe6jU}{kJSTE}Ey0UGZ(tTc%R$HT5~P5&>>uu-7+HC8?#gp5T8-y-KVE7E)#e zq4gv=i2?%?@YdyNcpg2Hh1tjAVZGAeF>idYzgQ~9!~H8sAi0Qh3UozfbGeM@jZBUg zaup5FHvuY}$WgnIc}Sa?;UJfbygD;8e`p~y=mXaiWBDXTY^|y?_3fG_X$hlcYZ@N) zX*=x&cR68qf5$c>b$HhVY8&LInn=?J7W(rQ4ou z&8d?KKy8fLSz+-?TEg0VWqN#CKx*4P6Pfv7a)9GhxFzr%;K&Hj)4@KnNpA4^1A?|w zZ|Kb&+eRhP)c!aAJc2vL>3JavdT`J0ms(Q&R388JK$obr4JYJsF?=O z+U-PRs=$-eRlU8*fOh7VXC-30nA7RIinwT8gT`mzw6$sH&oa9PTXs`b4GxX+Z)Ni< zOVX)Vx3|_T=ca~chvwXZ@h(bSe?Kj4Exzy6xpntmotG#4v~MtDr8X-p=xzbzp(fY{ zTee7aN8U=R6b~#g>|?a-)+p8#E21a-Jtw%9D}xz?Mp?}*Ku3D4w_#i$yn|uRfYvdW z=gW^-^gtV%U-2Z#ah+(sHL|Gr&ETsA?FwBbgx}rf;&T`W(kG_#Wd;JQhDqNwi@n7* zYKzoww=5jqGVarB&6YA1hfsI;Y)w^nqp1rKeH z+OSKfcL;~3q_tB?zUl&-;Y4RTO`9l7d#3CtttN7*0#j~+`<4ty@&V%NpCW)b#ahattR)_I>T08R?4JK#6=Cu+Y5Vd7OU%6__%7L;DQtBH6_TMMwm!OUpw8@PE%?FW;LUP5s9lhB;&X+LD>DM1<7SV!d8>2#X(09d3b{1OgG(US z=(q>M2eb9gKPLJZ`NI_1(voXz!uLN=NcGs0m3=)s3i zpKzkTyOeB4|JtUpx>0&n_ z($ieU`O;}!qLehoPLY8)oJRRa3FcjY@ES9Jvzh#I>wCW7y4e+El6zMwd-O|M$;U;9 zE_8YLIj>44WfS36%!zs9I09*&JcONk?Rd(V zp$Xka+xdbN!1NW|)R`+O2IMwf%Fdt5#&}%tQZ!by#}qL@CeD086bLIZ=4RGzK3Dbr z)3wx?wz);AzI(4^^pkaWdGx_-DUf#naAerjrpnZ}-KA%TD+{GYmp!9?bvzuK>_hl# z#^%_y`cO^kYnG_8kE_hjPENTQP(4Z)9GR=CVs0C!8sS5b;X3WE?jPJR6h0Hnt-~Md zUDFzz^gY<%_uMKlKOIvP@!KfB>vq#{NjoIM;%Kl?Q!$Nfbr#+L^=0E)82^F{g9X}K z5$vkFOnFA6)Ol}>tj8PFdOLn?Iv)@S46xywtA*ASW(wfH0_e$5*Om-uvICA^CGgmRR@lj3@QC%*cLU+kwMSd zU_tE>w9Fuxk2bA}a}*BQ80d1nPsXM2(m0WmAlDq*I-prc;+eA(L)=#hek7)_br6mD;Q)aOi7yUyjapkIz~}&(_0$=fC#h_Oh(|M3PW6F?N$?g zx5J)J#|3>=&b>kswVnmMP69z`i~4q+_$f1&i39t5^Wn!*F0J)CTVGNv(=6QU`DZ&T zYxc~8U!?3_iRbX;et9LSws*G$PdNNzQtnuXLFH&!cL=aglX1HJYhrM`056gKZWcfk zipgnY#%Lr^;0XE7z$dfkm=LrQ)Z5?1omxs`(Ax9y%6+>pAvp+%Zj<=B+yW6DgfKrb zW3$MAua!oa)2If(WpQ&F72I+{7PlFc-1_-KwZeX)A%yQAsnn3)iQ*^T(ssIy!(6K# zr_n>K%iL8Qov1GErrljAeK|2f#689GoiXVDHFf53No`vk$0oa%OnlC>S1U(wik!_# z!?KJt5r-7*YHFrpk^^OiqNb=hk2f5_0TrxN$e|EtrL-Ko*`pk8Vrmp=Y4wiYPkr9` zcc0H$d+&2T`>egz_qR4wQgT4I&)K83x>D__xv^g&)S{g;EX2MpHn%$NHBn-cdTY0* zU-v2PcPPA<{}f@7$<1NgRgMIb7jFVTmqV<)EL`0Js*+O4rJFf{Yju}B+40&C!YBVyupLIpgq0?h}UJAjx@!dt|+My|@ zOa2tiITUMI1F8yT^!{CC4O%5C1N8*l=%y)wXC*tF4V}@C2)nYux}x4YnRGcZd_S=; z6wv8d&U%_IjoWqk+%Rv`FpfVP+paiza{+`gMVDlV-|Nr<-0ewzvrgXpRV!WB_Ug=$=fcoDcF>Gjl+B?|8at)@jkDO} z=$y*MjqOHV+d8HZGgJoS5lZ$uOq2`&nIed@D}6{8i*H17{x_Z@wMavq*{&GA7YhKIlF}oqkWt>_oxb&?0@iGv z{lv_oZrF7@P^*hfY_O2+C;ZnLBQ(|Ag&5vUfkXfB`D$v1|1;Q4Ia+LCwx?#}#Itp! z#0XK1F^p`K)>0&FsVXOdn=kSnh#_|Hl6+DC=>yHsueC+2q@z4N-frTY%JdTH^7xj& zu{W^AA0TK)eO81A4=fJ)3vuxHb_Hs6!%H#WmYpDT8U03Y&oz)lNQ!^H0sHxz-EhZwtdvRZ363a1#i>87A1oHa=9O|3h9_Isk5P6XFyZHGIBtL4FM570#o z*WL4eQ(FiR{eoaltUh6yjel6=mL@4n?8qrx@NABGML4Ld99=ck*PRit`A^5p58>S!6oosigDV z2WLa;(EW=n!tj=7*k~T|XXNuiVSMzPEbHc=tB_2wCcKEUGq5`8v`2nn&xxseOwngA zt+~3m`X6xbse5$Hfbn)euTQ@okNK7&x?44p6?8b&ZdQtkG6|{ne#D432vzUSciF11H*3B= zR4ham$lv*_3wePW73jT-hohsIMVnNR|+XA3Ab8 zJqri~e`07PyFMIL^&D$GMlb69{D*aO?T0d9vFg|L`9unIhcKWoBdboKh=^t!|?w6MZ4i?Sl^#G8Owcrk6)T|bXMtXXUJ@6~O3qC5i^Zwxdm8w0cxj4Q&QJRno z{x}k5s0#S7ux&N__JG!bx-;J*(A1?;BW08*M}VtojGKH-k~;;^38X!l^95~LQ^(Wx z3gTZh=*URG;p5kCF-4Dhp;0xb6AormhqbU{mVhB~>$ZOF1@@5I;4C|lZy%5*jDV4T zP8v;4z~SBL80GzU7iW*JZ&6tPuqPQ9(LTjM11E8(8Tuu%y^>aLyq+=LsBAT=f=T~w z^K|x?xT7brl-lKKLt|;Vc+rd|7*b{$pCu#_M)}Wgmp9G;vrfVwvO$arMWTY_ScQ&7 zK@gGsb_!`K$J|yw0K<%99K%-0aw5p{Lk zuIK%q)qQCJ>*JIzmI$K0&rT2jbgCkmv zYa~jSxog&w-;&{tDY=H&)h=Ja29He&SgKyQl(WxKi)LJeA0+-FTg!PliPiO?#|yy zR!Wr2&a?f0se$;ThSn@yq?R&|DWCq$+HV<(;+=bu$e3~ORYCgyP232Hb%{6Ne4=Eu z$iIr*3iVT`uC8enr53T$BcUg(mwH({#TEOXx->+=B2s)JFiQrm z_W=pVzgPzTvH&_4PwS5k0z2Ges4tmPxKKpz4eXqzNd8~!qyCaRTBU?py3vtMVso-g z(pF_B=|Ft?GFgB)sdz_cqGG?O%h=9vWJ<7?z`I~^9V^hD^i})6G2yx!$8Ec0LC49J z*}8{om+ny9!9Yze$G8sd7^DJL^l9u#nEzt-b(h4qvfn$@Zur`#ZFZA93zO@Y_j?{< zXuJ`Y>b^2XWⅈAnGD5hkoyr@~E-tlmz^tE0HnTz_?*>;QviQk;VP~7aM)@9q!E3 zmo_;(?_Cy`Wku>sZWi@dF-($b|HU>uNp{qSLytu_qb>lwqTgJj(^8gr-KBi2qK@h0 z_5a?0G2(khALIdog0jubAG(0+p7{QIdwrkb0g39pB!6+z`T)0?L7IN(Nvds!!wiHh z;Dp-6GI}p2CEKKb9ysXhn!z{sf&-I&nxmE)H;(CV9nCJurk#~9L1~A)&)(u7fuR6)31~4RG1Q7vQVri^5K&g&A z6~Fy=?@K&FMM5gy)zacBYon-p&2SbVN$646-kTQczIhFN@SmYsR)Sw#6(84 zG--c#hvzvw4^(~CAN*p_5&*tSNdnfrCE|DnynB@DU#A0Zx98i2V??t_+g!!0#{ zJ`?0c%LnRd*kH15^(Sw*K?68a{vp&_dW`GB@U;b9;>8d?JrY}1|?M~jrMA_ zncQvZojNVd3c~s4pd6O;P6;~$u%g9K=6q`CPtOTj%do5n{i=e}@{EP1ulZz~A^C)s z=yLOeI=z}6_wCC-*LM((WHkbN4O=!I|D=N)G3Y?vuagy7n$|3uN4eh-!Ce~aK95{k*&q+a zmv7WR(EZkBhW+itJ& zb4^8yP#f9Jfai*4mJ&aZ7tYOhl6n~Hgqx5AE9biRk{Uvp55q)UJ~&N>q*O0cAH1<+ z$&;sdddYu_ZuQ7$gwPx)F$D^t!Hp415D^Xh!ssOP`oz)p3NKs##_U6hX zdT;e#?(f`4rr1Ztf+0^$iCT$xWsYiEJrpEFf!l--?T=jP{8yYn+r_~JwOu@-EEbQ=iRwgOyzUYAt-9TSPiqn;H-3TjtRh&Z5whqK{qV`F+gZ zrz5e&MX#^aIKFnY*yeW<6--Cwe` zUgrYA)V$>?xdKQ*_X5zc#vx-ihW5k$2O*2#~(Jm;IhdZ_ESNu@2 z7Fj*W=h|A?RQv-;-|i_@#cozkuE0ZIeVYb`!U&`CGI&nWtmr*T%G?e`Maik8u0e5J zWR@B?`Ymd6t+D@{b@%JW-q^cpwt!R9ETT5eUQJ9%sj2sDdh2vPrUMzcCd$~nr!N(; zWP4Baq~V0m>3dTYAHj-~_(?1{w+PhKZ?>tLTQ{(}$;SE}^`4-I-N>8Ds~giE6~)7P zCiCj+tc}!z5V3|;N|H3(7l>txVzYIHKm*DkY4&Q-?1{s>7vHO|#Mpvb0DZY9OYV|l z(oe>>&|2*X$W@Wl?cq?qM&9Y_etvjXxMbhCbx%XK=sP_#^fD zgSrQ<0~u_K+hiz3N&RW8js;)$5;9h%gSN25!M2_^_mA4nPlpEjYjh_U(!D7bY?A+u z@NR|s)4-;u15M{QTH#=U^Uu@_4h7}1`{{Qj0J@^S2i@{vUUB4w#JaxMa%PeoCuZnD z0%k8P#%AT}&9#rrjdBmGusR^KK#8WlrzFcA!^hsyAX;fH-|eNoxz5o`!0A$IrP`Ui z^&IX$U!5AYtbZW@bU?m4ekjhU zNE(Rxq3`CJ>C6Qw?%kVT_DI{eCnIrLxf~Wr!pWsH!!M#c!w!*6n|L=|kRogDsuQ?t zbZb9#+MdEK$SBeAK(upB!UZnTy>oND3(?CzXWfo0gvLNdWbeu3U!|rFCj^o1YRYcd zPNea+!^A+EERAoD!dksGX?8*}pAc{8L|JUu;?gcafFlYe%J$`V4t0%>jMxDB>KVcH zXj$9t;yOXk81Y7(*wEe$V=F#Dzlfrq;0!P|iq-1Txec?UOij6aql|fV(j@&ulBje1n`^v&0l`!-^yHe6%-HZEO&CQ3duqZqNW$n)c{R;|1Dk=Bb;L^Yd*@blV788z ze%J=;&1ys9uH#u}AMhaGvA*9ppdPdjzwlaDX>80|2E`AS^=!MuMB>Ktq`iQ&eXQms z=PXCo{%n;2J`G$->yBl*O>2lG$@78P;VCU0J@+pxbI*)Zq?rAa3B8y@0s+Y~9jEQG z0^7Pcm{Cm|CGF>#^k2zBn_P^Kh>5R1_8w8Ph3D=mZS62%9#`-hZ@h+vOU!Cp|L)k$ z+Jrhu&rMgojq4g6f>oCI_`$JWoa0m&H!!RHljX(5<;dKvo^P@5*%9xc>oT>XUNr`; z`32hyhfJMkz6sywNzqpsR&8IyH%9v;V^BGYok=AXKk`-hO02+h!xi?4MHx#hh(l^M zcea`2XwyOy$C(Fy>N2BxCLT)H)J=Xiy83!=Yx=bH*G~xGK*)$TZh)BR$!(ZwAWC`( zK0Pfjbfx8J#;YEXAWh@**}~Usyk1MD%3S|*coE06)@sQd@;>%KN*$!W?AI>ua+P5& zt39eSDX@q(vn0-S^6?5&BzT!wsR%}NKV~RHV%=t5$!oe3?b3$+j=uCV*9y6J{O{_5 z5wfCONr&GV4~`ZrP0S+?ml!XWzq>>|c|4*8&u$ubc%Fgq`cm|%GEEW}d+>1FhHf94 zuNrG>7+t0_PH6th@tImhVdZ;w(%#uje66xj3fppBcG*F6BTThI2j(r;-T)s%YY@0! z8ozh4u7(x0wIx;B_Y})6ZQwS*I_0jCFnDq67DuQ-GQrfdWoj(_{)^yF(tbPZx|beE z@Q=4Lj*L{{@uxHO=bH^z@gJ{I@0ZCwkkDu5Ex8%PS{oo7;bu^?IlPp&n$8z9S^hER z&@D1vP^I20Adb&?()~a8Hv~;$g*m)D&#LiO+R(b4r0e3S9ECcCmu> zYq^H4l2XI9SNOY%WPKex15$O!bO_AY)%Zs^Y|7OR9q%>TSc44#5v$xT@Ap8P0#7Q< zt=P!#ItRm&yeO9T;v_%(I-A`sD-~t08QS@8BHAHrhMMZ!HaF?R=kce@D@?mn;|iX> zBlK4YdR>Wk)~~kl&ERe|cNKtE+~pOYD-5-yxe)c z{CTe}d}8l#RWdE=MNKU%_aA+6RL}7I+#VAuQd0;@J^Ba|fhhGc-pz)mowRYp>+KF7 zd$n748oK5Bsl*!`pO{ z>iu(CNk3doD{1Ty#Wgm^C9TSu^T+DUs1_MI4kK9Y1m%$PH|U(Yy4WOEe71Z_avXn3 zsGgZh1mVy}zeZMHq+`ZCxB#Ba(&i#KV$8~|nw5Ooc5zx*$%#crI77*RO&P;B7WBjn zJ=m&WWD}N0#WuG2)FYx+r_Mp-wZ)f^IyvcN6Xb_JaAgERX3vM|QLDKaCu)$bwu@ON zKH8B%XH-B`0PWTDsskaMhc6q%P63P#x?T_Gph3=C;81bPKr(}Ehflc<1`(fh>}U{&Gz}-lY1Z zJdB>L@C|)z9qOaA}h+F?zh1@!T36OmA*X}WNhQ91~$h-h5{Y1TP#;!9{9tZOj zp5a5LKLj)ui|Ke3SCdPPdRMghEUMkPv3)>uVqX1W>ut5I&1E`UPUPvtb)VhBf)GgM zN5gAJ;_#0YEb0;A6lrb#(@wj`qM zh*Px{pnq{8SEzvD3!-BiwvX}}ADfJtJ9(#fowG2d*$(j=Yj>Ro04tiWg3Aupqvkii z&!(l*mHzB8qO4zWFRCc6dG5IxQ$;pHx^hs+xhW7M-s7Rjr(A%czjbhWRmkeYzlNw~ zm=hBc$AXhq+0K9+6~|EOu~oh~qkXMeAwglnJ4evhQ@I)pweM5qs-tW!^YRyRi-I!? zT$I1)uI{2YYK}{GN;umu@HAVJA@;SoR^RPf{DV9mIu=Tt(dxS*5eh-fh_%ry|MVRD zxyZhjmj2GEv#UkE;!=+Z9TDprpVycHuvO1KQoqe2>e_6U*12@KmGaZ{GET`UPklP4T{ZzM{+b6npJ+MvvZo4tY2g$ifS!wXYn~13sopRs zZX2e~UBGP4bF(8_e?;z(Z(u6HG>rJ>Xz}UuF%I8si|{gTbrKD)@eBq-x2Sw{Dl!WzFDDce8oc0 zCq0phem-&a3sy9R4_ifs=;JI*D9xi|&%q?{sJ^B7)7;zim8BG20VUtvRgdpq=ZFT(f#5>s=fplbqOO@=&``2yjb@$w~mI@pBPX*9k$pJ9zVcq~?F%u}}W)Dr})dFRfxxBB5j;42$IoG}D35*0``gSx-)^5?jaCG$~shj!YoO<(od1G=nI{KlHj9zco*Xe;Mxu`37t< z6aCl_M|e8{OG9cII2@4kCZ;G39x_ zTn^_q_V%(md0pI#t@P5WZdIn=UU-U6rNy{D>%Xl`U)p(UB2pz{*bgKraaN@kvXcAv z(Nc$D$22LwZ%tVmA<{p2-MIZh+iSkLM)H1KRL_$y@C9}Cm2pw7viiP@v&M##{6&kz z;BS8E@l09aeIxfb{CY}NK}ENMY?2!q;~7gDAYwrt6t>Lp+O*VV2;z^V`GR-9wB7^08;=RiIHVtfYFiKz?W-EIP7AQ+lB~;& zg(dTwg2?iKm;@^pz{Uu<9Ww_aK;d4l#cStx?E0rL&cTB04o*&s2h6h{>Zji=yL?^J z?ywhsKaulZk;moMr|G+xFNQtMbWm8Lf|+_^-XnrZcyD)rj-|HV7`slZHB=Oj>zoG{E?TkSJIiaH6()a>r+rpyH)t zoN(}MX6oMLTLewFVWW9eskoPG%VIH7mq&7?!eUENR;zC+lWc~{l7%=CO;VGTE26tO zr2K8Y+vy#~ekzui(0AM=&-$3Ky3SG9=$KzQ)G1(zy@_b{W*e1ooTO5tbWnTR(CaGB ziZ_WGB=3a6@*>Vr4174M4xx~#W0oR?wrCl$FNZSZJRe=TTY zX>C$@SY_OEjE;#VXXcRlsj)(i;hQo*j3bsn>p(}zKZR0i00*p&=1`5w7X|!ZQ z(=_~kRc1*|^e)EHZEqN3Wbln~?z@D{l4@J@^Tksm5D2w$;8um;MNtl}yh8e7Wo|ej z)htOQaa`mVn=Rqz*RmE9I8sGsC^^Z(1$s~sourntpf@Pk?iE{Pql031(4_X2zLM;b zPBiYRBE_0F%~!K99t(DtZ$cfbSzIsqrKLJOB9+Qwym$S)ZJrROD>p()z8WCDabfN_ z--PbR?gCOJphjbzw20SZ_QYi(b$WyG% z%hh{)4SPJD!-Z=DZn-M^yl54MJg?Ys9);{1P^q!>(;o=B(*4hT1$RK4uPnBnfuv;s z-e+m51Lw2E5|E}56JQhAKSBy!ov?TXpZR_+LiI&3=7F2tPYEG>$yNiq2=63_U478= z(?$skI{^hY!z^E`V$bfn`ay`+_j(^M9YukKeF>B)r(@N2x}B$m6z($+LSDBsnw93K zPE)tYhfufwoo~>u1UB%(tX5=el#u3=qu zc($QiKH22cvvxmr)`3l-ng+Bp3!(M5MT>m|@GRnQ(^75Jm#(+f(F+go_=F$RhXW;= zP0?o(<8keW{_n{4k#g#o7UnP|>c-8n( z6}%QL`GWA?4cJuh%T;m+uA@(8fo4~>7apUAKkVzP?Xsq1VQ!Ty%msq2Myvjah~IljvYS*nww!3w zL#jQV0nlB3I~Kvju*uyuviF{km)Dq$J4>b;feXxXuc21S^Jy`Jxak}y5r;wc+%YJj zDv1GX1lW6|4n1csWMHD+m9rONUBbCgfobQOog+#Xt0 zA3b_D08_HmEpY+ws=P3MuFutW>Ef<%+~XD$@1$PI!R>XOuhIMtB*~1w^yo_$2N?^& zBrvlm;&6U4%uc@>N}j^Gx6^h=R=B7@2%jM5Ef#v}+CDG!on4jz)bvDnVPzx$Z=i8~ zZ-7SVm^fl2{}*JT1B&S<^=LnwJ(1mzZn&ToD!gO1Lbv@!KdO(ET0`eshk+hrY>)JG z*`9!-vCP(CRxTW7;DdATCVTa4C+T1n|ZO%>2IKEIAgKKBiYvr59b$(Dsb zxKZ$HFTZiSGqWvL>RfL4ish9s?}*VMwbvc>nqpqLruWp<$`am|*UWD=4BoWl`^s;e zYr!G7B~(8dt8{@Xq(4}HI5DOx*)w6#RalUGmYwJ3>q3T&VzC<@7SW0<4GuEDxKjd1 zv(B&S-MRi4Xi0(qOn{Hcbown}eCr(K#Dd|bE|yeVjIbDD1w>}XyV8a;GBUHUMuCQ$ zX?vA79o68D(tng#l#mL~ymRtvd*DH>4-23js|~QP>!0b;N(gLj?t%1iF@dJck>#naz0PG%m%VD-%#{U(x{T5}%f1v!fDH zEWiG7`>h_b;LX8L@sfAP>HJTvC1D03hrndekfM5l=Hua|R{2+xw9u7G=<|&fgQY*{ zoc5>_Bc%#}O&_2f1V&RNX&lNPb9*<|VW*xkk(@>6L8Gmybm*_FW!z^oHzm7;9uvh5z@#H&}emb<5B!Z*M z^WW>^tM$2TCSBrKwxyed?X_A;Oxzu1fEE^MjtVhVPz!ErGP8Qw9&OwsV0t5W9Gtd0 zeQNi5BLp&JQoYU_h5h;58S)T=W0g$MPc#jGCExEXVfB5%d}qve3{>z@r;rGV%g^gn5>71{C_0(gaC;>?P8Xcr&;GhQfg zD<=)ko-{9evW*14(>lOK8+`n zb4XW5Z*TQfMwYDt`$(b9&O4=~c$;gX=2YJKSaqW;E48}of~-3uLF2&#NogW-(t*P9 z4F}X@PlAvXyi39Qk0@}?mDV-FHv5%sT)lG%lieoVxsXCznFq*tJ(B$%5}fT zQuAigFrzDg1P_ua{M``VxsTGg=Mn=iXzu@F$oo86V8NeEw;cvcs2TsokrCLc6ava) zmgi6fHsX$HS@X)}y-M9nyk_(t@D4_~QrQqDU*8spa{X3BEc?@lwfs+8P*dDa(ikFu z`#L~(ZCNy~(u$-93*Wm?=}%CkFYOiJJi(R9I{WzcgPK}F3N-|n`8@?oY;Yp%e_(WG zCB)c$UT70s*2W86AANEB@%GQ@$c?m!Y&Y1COk#-PrY;f z7#c^uLCb6;XIdK%SxgVYiX4v<$fwthNl^Pw0M(0BhF2X)V%+__@-})ejb_z%?AqIg zT9d@5T}10Ool%Ma>Fe`7Gfj^T!{9S2FU;W$o!x1y2QTPO2D{t z-2{2>y*qqZZq3>Ng+aFD)w-Ep1@7Ys$~s=PL$&m+%Ns09PHY1AAT}&cF2ZNxoM=LK z`<|f!U1gUeG#5d0yX#;{XPdnoBNsDCSj^WgzE|m~+miP?)|1*lgGI!HnwmGW5DU>y z7U7k9XY6avp>Nb&>xengW{Zfk6M8^4qtts_#X{QD$^oxa_}%?HMpd+~*y>3PO?h#z z2UO`*%x4Ap{n}o!^9{er+4rC#_p{rI^iDKWl-NDuA^)DT8aEd4BO}|k2zz(DTgrNn z+-@d?L_4x4Uv|1fpPKAm?bBaJOWxjL_Z&yyY<_icD}i6$&qq`ykP@oZp+Ye<->3#7 zK=?v6;rNwbuoP>v11S~7tc(V(oRn8iaE>XakXaO3~v~s75}`k z|4ZBa!uc1(1e-`J~7umjV{Z_K!2YeX|uON4QZ6-M~yJT%Ki6t_)D|7v{UZ-sM- zAt0mawkNJnVcS+o(RUy1NJUCAtV&fRAC&IZb~?Uu3GRzssvNq=>m`L38{G*unx)Y9 zm|`F52<4qKwyim!y}Mor7>9pZ^scD2CeY1_4=BE0Wc>Bnp)e5i`4>$$c_*No#qmCL zufi5{pu3I_EtNp8lJYMmjy!cT&~4f{$pSVqtvQTo6~@b9y>T&Px8&5P`-cQIU{S? zs*Af4=L}J@IK&d%@O=^+pWcwDhH{ zdD%lPQXUX~P$K?l$gG|Uetlq-IFgdOD3&;1b@B3JgJ*-0?J(pvoGP5vhrC$VC zQ}UdBbRtn#hnu?vEhJiy#6L!k?6QlB9Xy16D+D#&6<59gY3S#*GmWmg!06n^jSK(pQPV{fd#l2j`= zmAsc(JWC>m2nx^x{Z1z#LE*-3cyho=)?Km64+5>9SR%Z5aEyoJ8v~eGrPb&p#qSMTlHXV@YS0!V?0W>pFB+)Vh5 zx_d2c7JT}pW~l~As#a2DyLCv7N0rv;O)-6HWk{DUB4p96WKg2&-t}7jpfYODs_Vh! zH0H!rHcV=&hC>T)E#uw!XM5@cyLk9)%Go$|28U!g*Na0`dPnQU=l$d?wKOC60pl-E z>LK-m*$+G&=xVNw{V!&=CNSmK$voy`&P>D3`saJJ=cMuYx)*&4keZ03NJvb!J zJ)C{~n7R@*h!MBuEO24x5}QZA*N^m6!-Y>Kgm(@z7UPTg9H3yWAPDS<1;e^0ZzPj+E@x66kiG?b0Ts9V; z$-)gdN<6LLEB5q}M%n4cxtowsD$zKW72b_FbVRQ%dJVJ-*~~7qTb7KEB6>*s!%8-D zQN=sZ=TosxUiOOvBXgbBSy2z|b(@x9DdfB^qg{Y?7^h0UFy_=!y#zJ$CJo-AhfKfv zXvxi+s`CwkEaDYd9K)?KT99z9 z-1gYsvO0>b32@efV8uX@%U&&}*_` z)CYTymlF$Mri`YWd~;!{0`P4<=xky^p%I++kVUg_K9Oy)ets_MaAo8|Ri18}#gar| zwg>W|tjGR%tPoipPE~}|2gq}&1XjE$l>I$(@-$t2*R6gZ2b}6M6JJP}0ax;clgGmn*i@sy zhCjW`HT_`0=-K+!QU5EL;o;;y* zne9v}@OraSaj099IGCtivvqpVGZk z+EGw~`Y%&2V12^WnOQuORxCy;dq_n(uWO!3f2E`nAl56h%|BPRchJ6t}fHbUDX<k2&;H z>OM`-b@12pUB|wQRpM6)pt8QG7Tp_S7ot-Qw0kbfYm`eBp89{7sks=-C*-d-?LxTz}%UX~H?dKN3S9{I0Pv zFRMKEZxkBFOZ$xPY+^1>MUIf_v$Gk}zqWlBv$5t)v$AKUrbWCf>=(PqbFz)~!V}k*Fc1iNoLZ{;q&bHR9N=Dirzme5JU9GR@cR@8X1~wAaNwoDmn7$A6UClxj z>QC(;%sjA0?OU?cCTl$ zJdujc;o0TDz<5Xz6hNFPKo(E2Rl{UOCw($<(mEQzNO^j}lK2ocex_@@>)v!uarFS* zu^6!7Tw+U1j24>Bac=@pB;6XwYwPla!MZ3%=A#$ytNp`OFYx`DOr>_C-N#QnVxHaS z4fd1yTxvz4-#(5AxVnoZ9E~}u9eZ~+4xu2Q&+GJ!mp<~yGtI`_O0@?C7pTxbh#%Yk zYB*C?kTo-d9-}VF?bt#*f}YB$OG9=mHqoo4y)jY?)WmJ!z31W#4b$Vy8Dsv z0|bI;=0JnBM0Ia-cdBsMHMX#aUkX}d7lD)*>^dbxA?t!^YIxR@XncZ{Ry&Ok?~di= z7aJi-smo|G0SR2$A>JA`^sWlwZ;ai6L|OH7ReRp7<$dt;2MfsgTVZc53NxQMN5x;N zQYMQ}4nK83`>2K1bG@+29G3Ysob0tIH1PotOjFB4UGeRzlvC@LNjQ2RyXWGoSYNfL z*+{~Eab#{j&I`5eo~!@%HPI$`yyLp4qBNhCEp;`|duH2d5S#3tlYr>(*0oN^4Z7{l z(!Zp8lY|D5*te zn(NM|R{IX>kp^py2m9X&(sIF`#^%+<1G`ecLa|Nz#zv7-GjCn@?e?;-#z{(z0t@y> zi4(oJS|YVa7J1XXydYEGz>*9#4uGeE3fg);;9#wldKH3u+2a^5YBK{X$U>4Lf6Ez^ z+R?UZw)zSU97bQlA|TN6ySqrk^Um>3#!T_!6AjGkbQJ4VNAZiA?%IGzi*$itp4Ntp zh7fn6^>&xzUxkl_svx_LzZ7*~X$Ba>r4)*9rP`fY5@!JU_2JXtVdc zq&l~RnjJwB{m^?e+oL>}^$mSaPUDRL%<&7!j_s1_jW#>}vElto3|8#5F+Qk)@&Nh1 z+`}LL#;sGQDfD;}JBC}zNy!36G~3LMWTLsgm?;D1EY;#LGpb}aIbr5_SRE?$x~IyE z#{7+k_)RQB&R9k{nOW<3J7gLDYgDi%#G9^kX*%pyRvoN&s!5VAtCuv+mfyi{bFto- zB8O7o5a-q0dHfDooO0gqjju9B^u@4J-~I0G-<4oDH|@qu3&P|I1tmXxsVrjTXo!f# zOjq(0*xoHK&pH2K+wDm~SVFxU+SmU*L*=iU-=|%JZ(C5-`&^q4iqV+ujdtaxQ1H)e z2y&`3IKMZmkYvZRUblQIauW3vd-Q-AOJnpsB0=n%A2@p=&uaHNEWqZ*Cq1KgK1my` z^!Al_E1Py;6EJk>(P1hxYu#g2qd;Fw@Dz&2%Cn;5;5q#k&%Ve*E0m&8*{_C~pW-!{ zGdzJWRvXQx{XLZ)Bje5%0+r2vSU;yB>!D3I)`CRp_@eJPFM5MReIWT3*cMVYyQ#P1 z;}Q>LDiX{Q^Jp8yChbz%cCt>i@c_oTcatF+`B8JgZ zb8x8HWiyWR;R8Ijskc1fs#Q0q1%>mn^CV?^O>)HX1Q*NcfUYb;i@vm(7{v|MooQ3k z*)RQ6*LdZynfbQLhNJ*%;`;5#E|Rf1bxF#=qSw}vzI5*0GHubh+(E_N>c@k}a)BE- zDsd}rP?H*x>^FvL9S#-q|~ z{U7FZPU=-JR;D71daRuKp%A7VQ+$vAce*Nj$BUX)?0Kpgfz&ti4dz=)DOcpeD3-h+wTOw@aZ@-l-8*R_-{eF1I^3W;~X%YbvsGmMF2`xu{vlqJ5{i8jO#4n=7dp$|&ZFi0$$ zpxwQo74FEU!Za-ZoygJH+LQ99mjetDI?4r@s=1E z{qNB@&8;2>gwK)%^XfGsxW?1L(+}9xBu_yM7&`-&B)?Gt$ozgVLLb4D^*s5(g+#9e zMwf{1x83CIVIa(p)Lcz$yQ-Y_L}^-$jn)*;9lFC!&do^fjjlav>q$PdC}^<*{K>6J z!jCym*Q@<7QU&q;ro>4r5xHYdvl&wj)O1dr;$ROVZ8f1@50{=cCPDHikCp%_ag-ih ztr=jU?T>R98N1O5~m9yLpD$0&GHQ8N_xdlt;m2*1@CFl&A&Q&jHpYyBckTYc)@5rY zB;o;=Yk4RyJq~g>*W;bLvbR+~_HSzE+;}OP{5nPO;z)hC4p3}s>4mL41mC{!X@8+2 zI*8+x>$0QziQ0Glc6#`Z(cty4qX=By>z;=%yB_uw+u1B@tTbc8v+ugZ{l+kz&I#3< zg~i1>ND}7+(5gR~U~}Q?b`GPMW#A%_3hPyQU;H1WQO`}W;@U_Fu#TQ5UFjl&pzd<@ zX^z^I0M)hEG@eYFl|N2K-fsn*@`(W)D5Tsi?rP=!`2%-cIql;^qiq~niM}UrU1k`q znttMNNl^U`udyq16tfDP`!B{Vro?dbQNE+(%mxu5eEquW5wT#?(^n1+l@B+HE~{F2 z5t$1fCz7GOeIU8&!YdX-=SCen|EPN_Ji6E2N0c2|i3j8R9|uYUK6p`Q1I80v<@-?J z65{^hMxRIhk}~^GLiH-8vegijgIbm<^B+`$!1KVIJ+b5p)uysV%`J7jKsO$RfV^ zR6y(p>*;fwHlj&8iqvfhUQFiL^+>V#p}{}@LeK_;AK1yk_C%Rh&fH+_J!1lgGFI69O5i2q z55zBL^C%Ey8V{SJs`+d+iMSe#qLb)v#TtX1l8w$QO^xsT#=$77Od7a9a$#UjZc`1Ft3i$p;|6AAl<_Txu z-%hY6&y_8W8u8xTpXiSVZvbmVZe4P-6_dizm2y~GUyHA=ty)Gko-He--Ivwx#Arbb zJ;v5g`Q3gVlI0M8@W1Z}z=+xcKPb~^Fc3@YEYs{NYT|a%U&c5T*vU*yyavso+yiMm zbu;u^O^l7E9Y}&*Ke=C25n7G>pO-GTJ(`j#Ko0g7B9ExIuD$-#R{ujC&3n+i7~Ar} z$G4N%jaI&+nM0@;>fMR@sB^0RxgWk_l*;kb4|dmV*p(-(kMz>aI92GChSC{wkMcm# zCA%VV&wY6$*JWg5Ino3kTZo*bF6=+~esD-4|e#qisWGV^HEFK5EsKZLg)!q8XApp`)+Sy(1 zm^pY>c8_`PF6G7jJ5T)F`zg>W4kSGc^J4bts&j#G{#)#+T;uf@8$X2gH(*61_J(5^ zXUs9kIlS`lY9i5A+l2n@2E>4KYL-aWxw+E<8`QrY(ME;gEyJ}4B_ut$#`Ze^CAgaE zHIHqKj=FcuI@sBtE_dYWO@0Wb+)7*fLK=0tAUIZPNg1OQDGYhLr)}m8R2@pfu&gLU z_{Vh9hFKFD+0*U(t8Nf)hWOF*A2$tn8a{D%=N`tVIlbOw>r%D6Eb`45IS>|NgG}wV zq(_@wWsGbq`SaEd&kUFZ;#Hh$nv9Sll45@XXae!867tu1Hn9u6`_~IGR*!#J;Mwb! zwVsd&_chQlJ+bQ7*O}?9qQ`222LyRnqB~6=Y5o5Nxoa4x!Gm#8-UDo&f31!s6 zq6!69dvQd)Sw58*@$C~EF>rjJYw$ZC=@0FP{ntewn9u#U4?ihxMSdZzu$`1J-@mbz z6gZ1ux|a!Gv5QW28ob+fQ}dQu{FS0KHTr)r)EjtGOHIi>7}DgiVmRbOk5$r8EEN*6 znyaqXFYNF9nl3^fc%(k``LKKvOuQXrS6t{n`~yR&6nkOoc3oVkF+Am_EmdRP3HRmD zhDRQ!U}2ViueQ{)F4~_ur%84)lEatj$t4G zeb*0v$vSl=W2jr)7xB{)ArW^|sqR|?KV|5?I)a1|5mx1sTy5jHV$DP_h_UW>B5R(T zSolQCJoH55fpm=Gv&&d=c;we4d)GL3#nAX(BHAu0S42n?{(}N5WanwP@-$Lq%*r3Y zT;CMM_m}HKW*u3@FX9g=oyDxPTOXE>P5R}L+o5=$N1-XSR^C*smMxUpSbbyOnrMpr z^Ng6Csjuucp;28b@nsXpShIo(CZF~-Wreq33?rFiQCKRXlXSzYnUKP)7@Raro-#}G zhHqBn#s4AgEyJSf*2iH68AV4yL`qT+knS#(mQp~vySr-!q!ExDkP<18?uJpi1*B6_ zx_g*;H~O4&eE!$(+dCh;1omF*?ppV{ryMqEA@mu#h4p`pM48QGwLkIb%ZRzkTiBP# z=~fSVT{9Od+T3@McZX(89lYyQ&R_j9Bp&9aUo>kehqPF)!12&3_;s8|(58)+_+Ri= zmj2#qANq!f-?06rpj3vA&0$1U@u1G*RUTUkRdZPmd##U0U~7j|__5`zV9Vz2t-jOS zTWPHuu9Jo=1}j|V|2dig_&zTdm5R1rz9Goo@cLc2>2yiDB{B8%_%Ms9YY0KB`ih4a zggh+6mpOgz=WouF#gJd=%dt6Z-1KG0o0K-TC5+Q6P1-v0{4OZ>X!7fc=9_PptT^o# zNIN&I>3M>r)c86tUEb#wkR-p=LVBhD_OBbT^C$Lc!QxV5QBO#_VFQhQ42Iu3eh&MM zm>!m}jKXJckTUPS*F?vgOcmsm4>w7A_TNo9@%;V1&CkFe$C*fdYqmYFkmUAc+lmBN zR+Y8doN~YUcYi?~mmzSqRWk!C#sD2iy%qVhh_^jT&2wvVm8Mzp zrmG(QVcOP1wN(k{V(v8sQ&AqlfPMiu7;7YI;rn6L7xBT%i_1VsKy6kI|CWx<)O?8i zB**M7#UXSuI`;5V)>_K2t5RQkup~C|sbIFeSxuXid&1Y%+45lNNeoDZw##qXa!rpx zTf6#?Z*oP6GLd{=KH=h@ko<$QRnQ!(Vp21s%C>Ou(oDMaYW4*%jOYwW z^1ra@Ynd6)VZ5_)q$5^=RyV&PHUEBq`FuQ4rfcW5c=pwo+bt(H`m_b^3=J{77kLsv z2U$Oc6};o;>Y0$L#EQ;~qku~Xq&w&u46f8wawfKfzEw6?sfq!Y;I}q%_^b2Lcolg9 z8DHj0Go?z!rP$MQvt(~o_oMDi&v9L=G5z4bC(e|O7NX%f7@Ku|N6m`dml|B4$iioC z&c8ldu9cko4(}zmApPFVHGB|hG2{Kj(>gvSV0vQZ$j;JiEN7o*{v|`JfB*BtKj}hd zvo#vtDj(>OYE4$j3)fr|^3E;Yf|$KOKYOX-5Kr>er`av3(^ktDmnvvmL>!oA)L2j& z;0x{s{h!m8S|a3PpZoDn)cf9h(zB{hnkf6#80vXOwavR2pxtz{FXau1^;5jTU2o4D z@#||nVxEH!?Z-{ou^R08%6?qXw=?1^SvfHOxr^%hgYw7=dEk1wh<0)yN-7L5o|icD zw5oE|i#sGg4Ba02R2nwk<*A~WWux|HP1E^c#kuSQp3O}xro&oazwx=-U8grTrh~8D z%K6`J#kjp34cBa~mJqYMzc~qwpN!O(ag#ws#-B3@LPrDjy$4HF>`ErZXf@X7%FFP7 zkUIIJtM}syP${0EYfk!kc&H)uafTrBx(2?FoRW&3ZdRlSo`yxsw4WOMs8jBSsKo~K z>ls)}v0LA&&bzVcT;FqX3F_#={9gpJ;MrRUPICSvf`(V5U(kCbtEf?#k4c#*klsy3 z7MP(>N=9DD@MAIUv?|WDxn~CrlECyZj9RuN0yO+>1H$8;7e8#Bfr;;zd&TwM4*Kj7C^l&u9RmUrBifOIE^OXB zT@Ip(^(2MwFWE{1N07x1#d;lC#Bh}2B|La!vZ6hft!(d8)psuAf? zB?T|4A3)7zEZ)MtOpmZn7fNz?T#h)xenpOOH_wYbQT$}KD{|Z6=efr%8>j|@&n{R9#H}# z@*uOQ(#S{`F~}il<0~>&^tcTwr!H_&y*D47Qfky>h>9guzX~S2-V2r-AfF$MN7iE0 z)Q7DGm?@Wq$v!X;#*_Yg*#XUM%<47t1Kvw7RlU{t+g+3C=-vl6$>X4wYr9$}(<_x| z`1}Lb%Spzo6N29hdxMho&V?I}lJANhhHFlw<1imySPDA0)FEyb?h;Z1S$c)VzlNUu z5*VY#1bQ+G&Rc{rlTHsDux)!ms#PLr^+2@X30kxRnxBd~o7O_5*zZBF31tukgbJKL zV=YfP5?k8njAD9CByL3L7!ckQeyd|}>>B{|2X!$o&;wVyw`POMkMJ$&KmKos)_*3c zb&6}j-L1_~CHY0OBFY+e*jyC)bYQu=s+?GwxG{KaU9Hf_W&LoThjJrlyDe8oYu3k? zZ>}-KFoOu=1;{s;91v8n9|o*%*;L*j#pgpmhz^W}*!YrC&E0*vi%$$-+OszN+dlr` zQ@U}k9{Y-gGAzN5QfQDFt$u_SJvsW&9!CA~{&~yw!t8leDl8e!X!@9&93)K$VWOl{ z%y|{o(t~t=O#~@$)y>gA2B-}Em+c&@?->v{=Pf@~VEAK9=Pe{&D7H=X%6W?!n(ds+ z53B%^g0NpN+=kJJ$M4sELCLb>%rOYoYjyU|huHLj%_x6qIr$%-V%%ifB~i->sew(J z5B=L9JLgN@Fa3b-t-`x+xO!2hE#!SdIsOuheHR@Wk8W7yK&_1AjU#Q2rM%ca#DAI3 zy`BY#!QMcH&zkJky?psJC6scB%jrzVAcrUk=C@1#-h`k_b6EGDn60Sj=R=gazU zb)pwk@oFeXtDAt#!=)&);GM9>s|pJzCJPCS2$X*EV~f>mEyEM)x`4uqJzap}tK2wJ zI86&7P5#d|MIB z1_9pz6I#dpcVr%{iP~k(!5Dmk9^XRyx-c9EsF(E8xd({(G0L_mfUoy3aC=Ut!?_*X z4Zmz@B+}>`c)f-x9fw`0iTDPVBK#}t>kQUJ+lM|}kZgoZ0Dji+=#sea0@fG*%o@ek z67u5^>WOCC6i z!}HXDsui#kOygTAtX&5vS{>^c*nui^3@AWfm8q5+DL0xxk|scn^sw};FVFw!4EIZa zAm#*;2unx*P0K+Z73qmz zXk8aJ2E-QWN3%PnQ)to*)4#DoN@D z`rD27ogEIQ#YiF^q|QBN2Ta|_;OFCP#=qD#FI2&5Fc9`S8-r96(U>o7I)%2*#;AyY zMgu;xJn>#!9*dcmKS-pW!el;xMiMTd{qE=@uGw~9WiR=(B}1os&4`1o!hR#Ef>$ok zAgVM5yHSk}v%(%-dWvB-UD81a(Qxz+0yl025#DP2{C`dSyZV|BwdmyS#%$q!zxRy| zAKU&Cw&)o(@^D&_{vn?ncRLYY(T^yzaKb0gr++li|! z4czF7Vd2x^M)&j{p$k>iw34l*ht**WJs+hXCw$?1oPhn+lROl?z%6L3Zq1WRL~Nuz zYxkER@ZD=v%CIs#yC2HaFK*osn^ZlGQ@GWQrI2eWs2RuLeQ3z(E0!?6r#d%n@`8Ru zFm0y$VFsEtZ9jv?Bf1F$G|6^pV_tN?8bucD3EI4abKk6M8nP@94L)i1xcQge;#x~= zBg!YTIe@T)^-(3tlN)}!E&s%UZNkLrmWm(hh|}yAn|HH7I_cMTh1*-XQ5>%R8M#vI zb*DmjN_Lp*MGN~m;N^k$GnAYV{)`!pW`$jf8Zyt=87Vuuns|GW{flo zD@d)I?3kYh;u^R6axxGPv{IKus_z2*C3pA zme;+k=diUN;0}L@=JR8!f}LObW33{?Pj4ed#2vY6V3Zup2J4s*ND`Wt?0V_npZiQt zANwANZsVCFCl1!K7*T6t_mH6x?RP(&K`C{SP_%YCHO_xP*-IWz3T3IN;B-tmtW6qY zpvZh>hW>8!8&(7zuECewwKwxjP&WA-8S|JlZv_*yA(rm@L7z&QKC{#poY!{2ND z-lLDGPZ&(C11Via3szwq3Mu{Ptd-j=_~L=HDae zj`z8&u-v?+ONWOrQFwp7aS2P-Mm)aKr-$&{K-mBMA(X=fl2nhVn8XH-MSg<<{A1u} zNT@>f?W|>h1%clGhYz6)fna3lDCQWcKe_J0XJ&&tDPZK^Jh}`CCEOFqekNfBLPbkc zg#4pQft4`I8ik(MJebnfAhgHIl>8r>{NC&k%F~=VHr|B3JfXM(u_t*310RUiy zInVenTG0P_8X|g#5~_i@5_z~W8+-u&jl8Hq@bhajEe(91L|17em@yaU|Do^oZN_1+ zTP3Wu7V+BjIXi@Zt^h?HXur2zhmiZkXCO4dqh!VPmF1212T7Y3H|oR^(6t3A^M!y z-QV?E0PxDGs5?P_yyEq17)kWllvp;BG7x^%JArZFLgan2hQq)=@NonUg9~{v2fHw% zQo&^hnztZlcz-dqtX&69sFVE=dkPwu@%CEJU`a2u5JuE)Hdtd7R69MG1H*G#e@TBR z!_F{f^+ij^?8koeJxfpu%Rd-Ut(Iyb2E^`|*TBei473Tc{^n|E(k+lR!~H+FQgyJi zEO_?>gx!evy(6Qqs%F(_)8!1@Tp%j_xWc{oPo z-dJCgZF3r|Fc|Yv@#~?_{wE^$UrW0X<#C&d>Lz5utanYbM&jAXn~Q6VLP)~FWV{gS z05I8ONPre%&}?#E@o$_S?{I>ttsojRg2AOQ$vYCvP@J7}xEpWdrRZ1xnwg6%EVg*H zJ<#Q5^hUJkXBoY8Ue8OM&RnGJ1qOlTX1N&JYKC~8k6k{TUd&<%n%=TJLKG&_1>Kuo z5Q|KksczDt1Sq@5F0O^Jp-$Ai4>YW=WB$cI4T~gQ$iyo8B}C&Gnha+4it4jZ?-;=J zD>+x6!AD+9xdRX=nEUnL^6@+-y&Aejz*P<#)IDqBm_U!V+1Gn#B$*RBjI9=RP$yc1QU%bc{1-#>eVQ@QUwdwQEgbr;|3#Ej6gBkt*$i=nXI6xf-3HO?Af>ieHOJTcw zW+tF$lxJ)-vV90&%27`f;e9fCm`2jzXRX=u()aoeg!vpa&UwGE?O);-QY!>Wnp>K! z0Ndm~Ltr-VCdGhlu$UY7%9D%POPCifYN8L*_H{EKUng8J96$49Sdw?wRG6Fg|o7aqj6!+*G0 zjLIA;p=-WE7VJ9{`RSia%_=?c%VpI#TEuBKXl>Xwe7d;XLMvxnmSNqH?7lIPQ!_h;2F=k zdMp}V<}rLQdTdWgkDkdKYPlP&yGu$ey8m_3Ra5(i!o*mGpva{S&{51K!0wW8m(H5X zI9;ncRk-PhjJRcJ_R`XCNbaH-NNr~H1FK-t9`saXcpemsMxeqEcYvsOG;W!Ik%v z`lan&hQ#5bN^jyVl%Q>)VdTNG4hh9lyZQLqUJPa0W|?iwQqT9%H`W#J!0u{ed-c1~ z&@W#Ex_Bmk;RCaA*W~nm-fJ;YI=CbEL!kis(AnYjPRc3ts`0aEa?56e(cNO7tbxy@ z=#S%b{1y-I#j7lcd{t#T%;+n=(QZ9CAW%zd?J(sID%y02GkDl*$WPih;Xot z=f5eFWD3!+2jN&>tHw7UK2Jbd2%tVUPAM_W?LVk+7Oy z3|T#|!#;MtzA)~yw{c{XWns{o3*2MUy_g?7CqB@pgFX;WsS}_**3^4)w)~(9n4*#e zO`k!|K$J@Lwp!$emS}5{=j!&2o@b-u)`0zaTh=wu^QbutFwO54+nfEFT8)=Yh*3~b z*2K&OhXANc^stCn`^PCzz*!IlvbVG)SgQsi7xK5&2N?{o7!;rjG!WF_08Ip1poC>X z-L2&-)Am}^Mpws(xsZle>&wahi%TU*zD@5usOeoM@2V+BM*;xn?#@QfU*viMZU^w? zK{17y+P%xc&^?!vy^CtN+kOm7dNI9*Fc=;8(P+@mIEa*2#N4Is5liK?d$ZYHI_b`o zPyrjly@NNKC~m*E=<^GdK5;AmJ0AOK3RVdJg>Nqt<4>rY-qw=wyH?@Dea~-M(BzAK z71?ZxjM$!(!d7~o>#R#WSzM$8p`WKKr1ysCyx-pYTwFc4m{mc??gw<%HDV;>gOfc-+`*T z26Dck5;iJ?=mJ91`+LGQFnyq_hiSa|YPxvb>H)O=rT)h40|sNFsae+Okyo$WI{QfFmgy`;qipq%+gKnxSQ_^ z679BYyRs^!eaYKMT{|e;;hC+d_cpV*f6h1mMM<;N%k}ib4uwBk#Qa6=z6R6z!fI0| z_YL36Q15TdB;Cr*dZlMhdOu`vP(RNN>t*}iOk$snij3NrcQ5GAJs(tjKTPKIK0Wk=4b?;+;S&=u86727UVH=#*dVjg`LgS{pjj~y-P*|P!@na7~| zC(cjGKq`R}(Uq&TnCY2S3#*=3jZUtMgG)+nRQAYmvF2u{rKYigPunG|5uteQGx7S& zCnTtyki*vbFbneA)a|9OJ*5K3Lew6ATp_RA_xkWi+iITLJoB=+cTS$y7g;A{p3O8A zrnzxzZQ1pv&2ByZL;)W(lf?$BQ48MVW@g;qJHGoJx9feEHJscne*@d&rSaw7NQni6 zWq0phOZxOWu4q`ep14xY?mZ^U5_@5(hGZtaIPy3iORUDM#_(;nZqJ-!L2jm{Hid>S zR;HZ{Gu0|vlXD`^NV_3Kq!qlY5ot?toD(!RF#3v;Lt)9gpujLh4|dWkKNWxMKgorG zn>-uls$YGIKemR3k6AU%!m8Vc&MTMQ7$+vjPg&7!wN6)26G?IQ7eA{T*fBRlGAKhI zf7EC|L@S)P_i!m2%}z}4tQ&2IK5XsivLIJ@Z#`$@^BP}a^Ds)MN%Hj3#j5vIMb?`5 zBnY?BHZP^41DeHRge8pHq%60rvnlL$;%_hs(fuNbnLa!*$L|qX-B`X^rJ?)iDoO1b z#f4eC&kTCVeA(DBnP*tqO@1sJuT;Z1*=U!%;edjc?riiHPIpu*>%rKCADgWS6H%VI z-2)*S%$qRJb~!9&pS?c3()XCa&(~26Mw0c&J6KT0MJ_(5q2S&fF_Ymp7G_+Sgz6OD zXVYSEY0sC<;i817RCF(%l2>hf#yze2UZ+nL=mu-Ch|Nrey%q9bGjs4eH4%|-s_scn z>%TcrxnqFV4q0!`9S&C$4JIV=$)XJ&^f?I9&^>J$Q>>RIvpq$4PJ-HF@x<;NrnO@y zz0({^j^weQMQ>_{%x9IvAbWFD<9n|8uHBLktDexFo7X%Syy6!~OTqK`rO}H;Nyw&+ zr_jOswShop4#M>+WqQwzAU4Be3IYz9@m}Vwd>5lu5KRE-6t ziY0jV$AKTNEeXk$tQ(@3i*iI^t(iEJg^^|fAXr=?e%jU12n6mIpTMq00#dTHVYB*V zSrs|h(!Ay1robM=-*(Yr+AurB-Y}J}t`Z4bdN9(iAk=eGo-%wl{VrUOM8ESn)?Exp zL=E=DA$Up}i#Z!{fIZO}?rTB@kbXN=3xvKjUYfZJva zM+9&(;jyQn%kfG}SNZ#)=xFr*nq~wHP>wdN!*45_Hbv8>OrMl8MCfn2P3z@nZ)=j) z#P1dfxKwJY5uZlZ%w>wq+pd3PQMaGXW3iDUA!st2CmmCFYf3ylz|lMh6HZFx)%xxC zkM8T&_1q9*ssrf5c^F|Si&g%H>^4a@zkwEWk`@7+n-@iDOTUyI$w*`H$hzZ~vc1?@ zLiLZv$u74)bdi`3dFx_B#CjzoYC%BgqfJ2|{fIsguGH9=r8Co8kS~S6@+AJ1n_iJo zz8$jFX?a<63z{?1;YqH5eD@@FC&ppN_vbHXk^<_=H1^B{#fC=k&aCvZ{5l=cY{{~ z#A<_+O6?$d_7yw0WHD-GNTn<-RefefQsHP^-R}E|J@jnyO!=|Edi_?v39@p(BP}d} zW2e#MB_8|!g@27Dq>lCWg{zTy0^VMHhg74Q`zx{=uj_lM?JgufGjM#RT|kG1>(SU9 zSUtkqiz;tgRNEB78TK@_iOo4-DZ9T%I+lfsehVl9A)zEzAJo!}WKw*G&&Lh40VHM9`PsuoE8Zv3sdo^K)w3(UEEMDP)O1DSfItB8CT@I3efj;)sAY_4~emIL}~A-?HD!%m6DgAVv=VSD#b5VknstIR&!eH=&H$;r5P5Ub~VxC#vX9cl07FzPxUOh~F=Z!|*rzHiM=y1-Mfm>}IYaCL*^+5BvtLbiX(e zb20=+F6xy#)hr^7sO_!uZ8|k{r}j_m%It24SV|9u$Xd=gS$zVewQcp&gR-Wx1l`Kl zzF3W_#}l77jd3kA08?Nx9mz%z-VkS9JoDPj3vW-^kCol?W}97G*Qj>GDf#`pz)OKY z$mc!;IWw@6g1nU%g787NItyRjY*67eX4r$oz24#EinV6*X#O_4!EwvweO}|1N)x!J zDc1|DPc>DNiX~B1O#pua36^3F?8-Rx4zf%w&^?E9y;b->LVw)rc-*w zWHhWUgMLjGi#a@8FD^YHE=@Z=y^eue@cE>Fm5pSjD2=zTqTyHvOC@z68=w!gIi^MP zs?SgaYuIA;hD?( z%hwun?i&F70@5X3P8t~$BF(&lg5O7N7zVIaC`>-w2SHS4g1m7%+4LW+3(r|_wspJX z4nK4gQQM(DU0yFr;TveU*gPea2x07?!vA37Q&i{du{DO@?%35|FFNd5I7Yn01UV6+ z;Cwf2OcZh}O(8Njb$NZ(MFMe+Dr78G3W1DAiXXEWvZi+o6 z3BD8GnSn|0GpnyChSfQ|WXt{~08*|SQM%q%zQRzc^d4!Qm#g>kE5~vl;($<=ljhBI zQ%WB%?y>P#_kYs*rG4|Ti^LhT&v3f`#}65NeF0c+jQh2iPchRsF6`rm$-9-)C5!Td zq64Pi5(u_u=HA6CHP~?fIrhpp+?;OrA+>rmz45@d9^FIwE%L@y+tm^__$ABj11z^M zOP*dsns_W?q57F07ECD)imq{DhvtXDNc3n71_%9SIzz9qA`B-9htba8K8u9Wn(<(& znbi*dO73~y^{sQ7W&mFohrxV&0gyETdFp(Wu1Ew7t{OceAxRJsI%NC^Ne3He@NGoWs_G2%f z^+EBjOV)jybeM#_Ah~9Fu@Jz~6}4|kBI;>0d+wf2DnO}AnSIFYLe)-wNvE7lcDwD`^*S5E5SGRYX?txrd(R61a?UjO)B z03~kYY6WjQ*GS;{S;%9CfnMX206OX*;0psB`$qSSH=+qWrL#>4h$APhh(=VliWl!) zpJA3#{SM8sb^ITIm`1dZjr5ctQJO!iqS}s_FbnQb&W=&keZA>6U?iE8)6Glim_(9uw`O{hik^K7fBEES*+Tc)tIUGUnpLIGp}}PE8@K+vLaG0i@l1co2F@^dhY&| z4dvpkW#z=3D3U9l>hHNAZnPB9!V|bRVAzfSLh-eJThMHWF<)S&X!F+50a9uI-&6zP zfu!xm8Ah}dx|)xuzH%_dYloR?4)t&;D%zfP-DY(ylU18%mBk`U9OZXN5s2@b{W-;~ zX|g4TIre1BRo5?@KWYN~!r3m{C@?r!RLuD7YW+KhCeT*vVmU797Etl6X|wpw${mja zd0+svdZRDrSe<$w34PWDs{|#n=>X@>g#7dysp7^rRX61GL$t~1PO0>}K2sWY>{(Ru zN|v#025pJMlTg>+gAk!T8cjd&V^FU(&$mHUa!nNe*86coI?3qyMs4W!)xj`f>VC7X zqg+5`U8gBmjAyhIF3C!b_lM{)MR-j`oHgg1X0oB^X1ALN-wXC*zK7R}6~MlRSDJU9 z9dMZ#FwIq#Yh4=D!79uc!SDKkQC2j`;FS2;Of<=7v|!i%8y3+HM`>e{tZ&#YY! zGEo_`s~CVpRoU_0e;O)XDWaex*xB5l_2spxnFPvWCP37U>jT@~F%hR0;&$%lCclO;njn z!K+2k%TW-m!!v9}p(GTfh4?CYWup+Bd4sX%Aa{bT96!9FP3W_q@NyLqnd!(ISM@%3 zEN0KoJF`&XeT%}1daB2CteG;r`6v5T(@OLdQG$%lpNwNwUZxCI97_pH^74;(RdUxU z4#p|z?BpwCj^^mUig%?OoV#6}*n2eh3J^04$Tu!&1T+!75OSSl8Du7%1B8tH>JtvT zCl2oUH71sDz8h=_zoAATYDIl_^`gQ!)+^VXC0+9qgPWPr`qF^%`j2rLxeVfB5i)M? zFMMb3U^FK~)$2{I++NyrWIZ3@jvgxI9mWK2BB)qy!fg z3n2mPkhH&qhvBV|>3dpjZ796IHu8p5l!Su*YVz}!DbrlIx}Yk2~P zux0+2E*^f?P9K}3G;I;b1~K)|O`G=NHEfv#Xl4@DA7$!_3SuElH ze>4$xO&Tf(umo0NpDn{|40Fk5WBwqNuTK>Vc{$>ZOAy~Kzl*D0>ip#NQs(PSB|(PA z>b9F5Q5+8-eu+gff*dnX2gSwv6*(}yxpgv)beX#!s&`{4MTQBCH^o$cdqOajm+kh| zIFEW+!D3JTQIn5OyKl^F&=4wod-X;1osqd6Ck*xnO;Y7Fc7(xcQd z`3)SKa(0>Y%II6ktGKsm>YK?ogmSPoj5iF;;g`ExxxB#924|U4R>yFsUdXg=-Bw+8 z4tr8PA?nPhvi!p&BR&x>6Uti_-j@j^6zk2I*CIiP^*glkXZrX%z^lBGf~mxS&WSfq zO%=vWpJ_^Yo5<{@^@)48LCndbq+O@`i-_RzI>N+Il-hvvNi`=zMm=cRg78pJsDgdG z);MBO5*&0>nvOo(B+q1}kY|dMruchk#2#BceYv0nO04ePZ-80WIsS>or688Ji^gUHS zldfB09Acy)){b495ed`v6KcqhgqafQUs%ivSp#2s*@3e9c?!MPw4E&4r+jV!-X$}k zcwk@gB?lyk4^q1P$Bg_g9~B!@8(gSR{x-Z|vs{`5BA`+s1WsSVo=|@rnlwJrCdv&? zmqH1NSHQ07+9x2x))AF& zvUyg&?A>)b79ryk(O$vZdq~M5>mNGbiDSwU;*`PK+G&t`6@1-Arg1-)1OyS=eSBS2 z(mCMiNhQ4bxab*sfnYwC9->jc(6{tj2 z3ZX=yPvJJY+n3mZJQ!R6k&a{n#BlhjQ1r!-7}dsO>bc(~n5a*cwhX{wWznSi!vvUa2JR2OL37$e+_jR3Km_dC2BifnSK0Y&TWeXf@x4`}xFMkPh{=1~dJe6Xkr_5;lP|{Mf!Z*38x`uPb#4$6 zNEpDR*cF=jNR}XN`gJ)ucZ*eLgVUB{h4WnsWb9EajnB>eILYGrV33>9@hESW4ythM5ihD984YedW|nA4`%u zU^GdlY&n-_HeBFfvTK?4$)MmvAd|e7SCu{7Jd=yMHc7Xl*BYzOC8|$D+op_cowD{x zQ888ar(rDwUt_kn3eUOjp8Y`=&&u{|-l=8}W@*PaaV+9C32)1?1pKU@2da>@;WQp=OhqFfbmf`(r-VIObT#SD zOln_fmA+7}H!R0a%2co<-pBGhy@Xg2!$*BIZID-%^Q`u+1i2FZx_i=+%Zy4k_GCS= zA+dN3Nw7qkH^?U(ip6kNdL)$m{g)*A`E-q!eF-n-(l)?xs|I4 zrj{{1`z$Nq^EPsWl@h+H8C$uz{vD)*_eM|W=TBB?6=z>@aY@r44n=ti-(1(BQTe@| zE4(J|!XHUiX}Z$Ulh(N0s$1N&dp!5vbxx+*wJj#cJgR>(yw$iZfX8P)PGzH&pj%sz zc_Nci0j=FF70zC2<4z^g|3D0U&tLgBZC1HJmy*C9-p;UUaeF>3y^5srCvj3uDy~Gx zR|Ivzw(CZ*o6uR(vb?VPAhU7(SY~%O`|SSsCraV)#`|z@kHC8oK)msTuFO~U9eplh zn8x#SSEQTG>(t(m$#<~4a|>C@*s#mDW^U5wN9OBOE+anT*XbfN_U~*KQ~jd^!io-4 z>4rezfzFrDcPZfnGW8;LkBc5q6+L!MpGQ&NMw{VHwv{@fw<{CjC-dh2eqkX+Eu9B!O2&1oR=N?GAy zRmNdAJOV3u>a2%5ENvM%ehuyb4K7fgZl|&V7MaS{kf>o}b|mxWITvNBDWy@tP5kJN zkO9C#mGJBqva9p{;Hw*$k2irY^WdoVa=x5Aa526yz}k76&m3N``)z{7EV$5{T{^ys z!RJFiw#SF)M4R`^92BmjgliI=Pw1Cbe>5!j;=ZI_u>5~K>?`U8S{1efA%+z}CzX$a zK6m3-r+FOnJz3(oZjx?^+mwaUN3-GLx}D(S=sYrK`h?T89rp4n&-Hf6J-M~yC|qC8 z2~RhV(0V~aU&P(&35GrAy??HYIp2WJ(}ur&{t}y z$eN!N6Lv|scShk}Zdly$q|R>HC5y)Qs<=6m)TY*Rb7abbC*hvzJeU$N)tPce%)au~ z1O0Pvezb)?-Kp=I>yZ@}h1NbRd-0JOGyTU>rZL^5ycPOdw|BA#>P_BA=9xs5wl%$0 zr{^LDJap6l6GpwlA|tDyz7EK{(1#@Z8H!Rz4$E?~UhQV5I+%Cnt!t*>|i;;ziyVq8YzYKJ!v!qL6{K~{K)$E}+=5bVJ{3(x%*14EnX2B(X7^CRcx$lsol9lpwsxq6`g zJFaz>AORgR>@7yZ;Cl?EJ2Hu!({GkEH^ndzTiBygc_t+a>he#bE%|qo4X<1bN{rNm z0K?HKp2f&FK27>GWh}87+iJY@#9O`L4lRD*H86yH5(vAWF$GymDr4Ei%Hy^& zgER3l2{-nbyHq67X%8e~*20B#d!%tN3`Cy+h0X{?i}HNS=Y8=o1I5yMg1KsBVVBYRc_LWWe&Ukg=fa+p_N}_ zQvzuh00^isz+Ts9)3N+(e9v2=zIV_{wikjtq?VskFFqK-AK_fTA(hU3(8xgf0s|)S zo9@@t7=rx5<`)zVGIPFoXY-_G5BWZi33y%9W;tI)G#u#b2Vc_(_#i$J)#B~-jLI%9 z>zkV0Vqa=|NbKwnGC*CUbNwm(1-8HKO&jzd5^PD_bJD}8~?KpGYG@%cU}NI0+VFBPqVPsHF~tW{~AqXM(MfnYbhiQ zfx%AsJB|S!(Ky27z|1W`(B_&lE(yRiKMrSoRu z6;yQom$X=(I0?A)7zk$Z|3Peom0E9i^*W2k%wa_r}6dusIj-H#99z??e`KE6uFmFqcSvKU9wF4D(hidi$2 z23V4$kt65P?i`J70C};2y)l4|<_)IpinH}*Rc60?*eq`-45RjHpoF$;Dm(m?%x%~n zc-LZhh(*0nhnLO7$XdX-pX(v5spscs)TmYme;MZOEqF2Li1XpaLv5lcjV4(?M~{Sd z+MJH#gOb`V+W3_rZRZqg`_ZnGc*~+kEbPSPT_Gk-Gy0S@7}nr=Onkta=NZ)&%Q|NFMhLKqMUM z>w+5by!!IP6(G_w&Ax)@eb(@C9H6iSqFx}Ol0wvhr%h7SHkH|;P!hap8)%KXB_9~J zha#W@Fi%Xpy%`63uwT{0rnAe>9$)$@8R~@v`9s-ro|s=dmY=)ELzBm|ufmE|c+9fs zG#rZ)Iu?Uf6iuOeBQb);jYm>vV(Pmj+TvBOQ#03K&V`bi&IEA$k>9)j0b;bc7Vb#Oc%(fqWDp!>H zO~-OsO_DPoP7k6^cKmwE6YR`0EyF17@W?J~E{0MY$pBnI#C;IZfR#sFWZ<*p2zaU6cwIB3ls&M4J( zn(Te>(E5vI+-c0F6^pDfwJy)v#&$l#uu?h}IOJB4!}4~7lFUxJBHZ-tT0$@Ze=5tU zqkI?XLv2;NwQv1&w=%j0C9t-(++2^U;lGx(XDf5=>3HMHmySqmT5^4~oN||c7)t1;ZKKJo3;8N@&b;sjVMYP?QeEV#cc^~Lu z>sN@rezT&O?^;vb;tt?Rh7Xn|TFYlkqR|(fU8j75z5-6RTaUNvoNI)LbGAC?=_9kJ zgL=jMIGm1m^ifixaOAuU(_NA!^FE&c-zAuSTxQq~n%fR#Jbu>q|((5LN z=2gtcO*b(!D}nNxxKX5lx3=~#7u8R* zX7}?h%htZIhKG$wN^TmttcQ`xw~rR>kQ5>(m{SN}+WN04she)vvHS>U(TQ3w>^;q1 z{+hgE3)l-K&G6w?<$~=iE_{%7Q52z5iW|@bPEwVXFymfO)N})|1X>;(7PXlJLD!KM z;X?@o7eWck#wY*RKSVA#Xu7>a*SoIkpg>McuR%nwvdN~3m4}x@Bs^nIu0ox+ z!`BpU8ZhWEZrUQD{a~bArk5=`J$5!iKmTK7fl*xjU^@M_IFWR_uY04Gq?TX(C6Oa% zp(xGeVgf0?{K~CIp!r&^q0=FAM_H0e4)ch0cqf#8dpd9l-!@N-AH7m@eUh);_=$v7 z;j^Wjm)d~RDMd_&oLE$qdqz%~lS?D&tx_{pq{+l$bH8ogy4**iKq~PImouT^Grz$Q zBR)+meEmV``V(>ABS^)il*kDt7UeZAF9F8tu)g3thD{JtFsT zwB>NmSu^wNs*3A-YNa&CsD!P2lj{ZyZFelK#70~W;){l?PQ0ECa}~rNRmP;-ik!HU zC(h=CG0~@-_H-0BaII*oC-Uxb0rkVHQ`|)wna8QN6OZIbv*e3jOzo6^7$IOV^iYjy zYM=EF!@EjndQw%MInq>>RiwAO5)f*1ufdk*9@*hdcMY=G*RtAHJ@1xJEbp2UYJ20? z;o)?qKgbLZ;;r#$X_eYp>OKj+N+8i{Z@)W2@^Ztqsz^BJ`qDU1XgxW zE+b%B2b;A&lj}CKW*9%=IlQgw8}4q2j^_(_mwB!u4KJ!~tP&>yg{d$(36+Jfk@r~uBSr;itUB%cp6H0fM0LsBRY6Te^sk%sS1Wuw85BHC#3^A1dm%9oU_dO*F zwr9|fXXHxZz*nXma#94--@J#bY0OEvK(U+1y8}&#Rams~vXug#nk1Tdp7+0eB}IGs zjp?9iJQ`utKCwtYI?6ow@*WnS;&y@vxM<}X{^FY)qe7R;`fR6*~Zh<$zZ+4U72ooz{9nKE-Zp>jj7M;(xeqwJ^Gaw zmY?k6?q=oiQ2%)V_jUJ_QSUih3oT3Ki-6B2Yj&JpO4#)CGKR5}?kcOD7%q&I)FzFN zwef!8I1nKMLHD|eY>d565m;jM>UsL~yorgsw2||uy^0Ctd5^-1!8bbf5wmc&%b#Pu zWs-!Th;cyZ*PlHSyV$~<{+!JTPXCJJy90#&eWanVwGIM z?(LH$6pGTiH{*6ZqhX`l4NoNWEeM9q)$En}&3KDONHzm2x^O0pcs$I!oaFpSP9e0K znz}`_`8^uRKl#i|3w{aW?s3V|uMZMXmpa7U>ognd#TowjIs8!cEhq0Foleb(*C}&v z_W(ilBpp^~@dq6-wh=j&g^xNZmoB?%bj2kV-m4Ftw;H&_shv){dS0a#4jSC({6!rx zE=Z6T_5V@#o?%TjUE44q3W$J+h*AXv1f)r?QE4hoMS2GTk={!n;3bGsmEKVikQ#bT z0qI>pdI`M~dJ-UzZ-duezvq4bzQ>ba;;}P(X3Z++T650IqpCWO4}x00(A0IeZfa~> z)eEf@iJs#hJ;EbK7^9DMHhQXHdn$rRZg2l@aB)X#04(>8HtW(+WDuSAUSG!09gQE0 zljT7gtM6rY@Av6TbhO=yGo1PzSez{gib|!t7a7F4i*?Cku6>SXwFDObURP@-q z3S(Fon7mjxkz(ZEJk&pARW8jKmG(`vEgjQ&bLZoLjw|*#W}TS>5p!HgI#IlKrs`Z@ zYr!=p!DBm}gPw*|yV=(-j=w-l@f1XIhaT&3M)3*eyp)Cr4lmn|L4}I!LCX&I)esczTD*zQ8eM^7|)=nwy?`*N8){ceYLhRd*O$0!RU-EH@BI9z+DiqfZx|tGu;dEkEWP7--*}&dRkE zzeG4Pe)m04abDyo?m#EL{A`O4^)ftK6c9vKR^a*yY(UWcT;2 z0VmKY!i|COniRH0^~C(B7tF(R-OfQ1vO@|2>-kY*!m;V~BR^I4O`#AKvERm@-bEAK?CV)L5U z6rg)wl6pdT5-7N-7TdE4=~_r%#I)=iA$MGj=xAR;3J7zvbtaVu0SwT z>w zGZlSIO7BxNd5sc7j1G#!_g{7ciOrF4Y zR=|;|D}D`kFjow!iGr^@^2xn_`6bvCNh@4y;IA}Yh7#4}%ZaGJt#RxR`b6t* zz0m7rw8yIk@za0p^Xx9++V3bcvyIzAlgfgRO`g=bcn9b0FMzIQ_I=s|C_WUp4cJqt@J4ptyB`N-h$(g~CKg(3mx6%Q-zEuSy z?7iceN1irn=4O9h1QLX}P!5SjdgKt!35*>>L@O{!VYAoEq0W=Z=5$&jB$|&+yU=&5 z2Kg?J)rRbAS7ku2MFyc9H=m6u@G=rR3r#dhNU++oM{Z>*bV)EC(#GhuNyId6m2Ei7 zxUTIdYboFs!ry8kk2co3=my4mOgU#L69ZpGb5@;Y9|rY$kQXzz`u>u zXBk23v8`7h`bV^g-+#UlV{THK&Z_#D2d)gL?+}9oroy8x4ZfVx%nJs*$fPxu9G&68 zqeoNE8t}?quWfc7f;CJ6e0@J3)1-+jd!jeWY_$@Stb=*bUVS{3Da|u48lAftt2)M& z%$_5=SPH*F#l!#})dfXE#enR+9^yGGsBr)=k5h^C&lxfcdg zY0%ZW&wtO0p=_FOIBR9a?fSqQ3iefFmJ8!|H9tn)&6ge<98h-p*7Zo0>CT<}6JwPG zRnao@lwsF2)y{9hANcN$X$v;FciYxRztn%OflZe0R*o9)NBdX5=+0UMPTlxE0;dR9 zWiI@lg>1gmSJ1mX_sYbQTr!FJ32EU_YCiU*QSD`#ZsK=W@W&r`ZD%3S$`RYgOG7G4 z%yE<2mCwJ7?;S-4g*0_?jF=x)q}|Uwg8b%GHzw`ISI_5o;nB+m8C9VRZi$A^S04Oi zd6%&+#Q{dHTs|b^1T!#o^sOq`3dO$MB=1;+@GkC)iWo~Fv`-;%EN1%3&s=|e(!g)Q z$7{&mgJ$-^g5FuyO8*9a#C>sk%><|k$z3={*#a|n6*i6!8|lg_zdHyye(nG6=Jf_h za*CGKChG_z;aQ-7(}VauGsrqWNz|-|@3{LUf{^ULiU|_H)d;;})Zy+#L8eI!zC{wH_2Wa(6G_aBWql{PRHy1?yxcD8 z?pYDX`8G)gQwDj`ikGx}uu6WdrckhyU-;Dxza<+t%Wm{)^4z|dHEAKl%o)}kx4(sw zt&zEQAZ=H7l}*~@DwW0u!GgB}w#mB7)GkbF0jd%iNkhKhi)+3Yej0zS1Nkyl{?sV- z*@d^OsQxa~7lQp>)$YB&=_wkMrLGWW<7|(%YwPe&=56@WP7P$*w0^B_j{9+#a}V1V z=$Dks&hrJ&U5R??3xA%_%ChN7VSXRQ5gd@guy`ZY*1bBMc3X5xm9MfcZ(>$+TKaSA zCB5*>)gPiv#FTXAW3|&_V&6-l93+@VmZHlC4}!5N+9D`jWOOud%Z2<1l7P>d-eu&Y z0dyg#sAu;1wIaWMQg>=UL+3u)@p>i;!S3A)_*R3G++N*FmcO)$j!{5=(0#$0qRQsx z4QD8x79{*mFZ5C^Wft&F!=ZlRGDHh7xY7{S8Ye2{FkJj;{{v^RtDOAI-p3%lC3hWT zKP>|QAy5EUd%MrdRRmh$*^Q$hhs`xY5@e0POm5932D9$X9aa@zWSoAtDpLU*r6H%I z(vZ4abDxrK&4#!y6u9^i=z!N7j}*8CgA68oH24^lb)9$Hz4PC^!eb6|s9&Fcff}7W zU2vsuzOqQIR-V4QMuZs=1xF{h3g3 zE(|S-;jG#%d!gudFE!*Nt*EcG1O~Q+so`v?lJS+|u;er`s~hjR^=KvQ8?t=NX}bB| zNps>~i<0SMv0rZneOMCr?@KX`FAw63#LGSF{aj|(OMs2}kKdBN8eni!Ih)dx(WoJOCqF1}s z)2XP;SiZp(n)=G@e(uCuePRZU>i5RG>?G)hr>@XuD3Tf@re81dRn^NO&l&EF!` z$_>xeFXQq5ZiVWk{;f{#Vq!0fEIUSSd?g8LI2q|Y9i0>vp+DQXp@re_x6l|Kx65G> zqGK!9p4=C}$XHd#zIsx4Rb@`Cx~F_JJ%aGKy1va&&+e+`o%JmTdwRjVtVZ_h-J6a= zHVlfC*ZvZP6ud%wkIRiv3DojU!5dF=nsb+W4vXy6rJ=}^J~D=ksww;yPBA@D``Hr_ zrmHnXF3rA8&$ErGD!(;+ja_JUE=Ob`ycL~*B{t-xy^MIPUUvzecsnT zG=12|PC^~lWAEYW{YfxJ#Tow0{Q;2fS-&)}JBc;b(^jkRnSLWb_qt^7b2V^>LY*zc zi?k38i&bq35{zW{tb%XU z-DN)1is3G*Fs#n?iqwNdA(N#M;5s-S%h65SJLKS_Nn28oJ#`{0T}lF+J~Tu;0xI2j z)drv*A!vO1*}-vhC0hX4F4@3CCc8ecFWE#*Z<4B>XX+bLb>l-h3DpS9P9uVM& z4;I%whH8L4MEb7WJp6qyHd1yWX6r4FjR;B9oc7Fe3PA-Z_6PZ%)l#-~DD|~(7s?Hz z%5CM3ReQQKmnZV?m$}97|GZ+XOj_uNTDcFo#UFL@Aemi2o?DlJ-cgKF*(#YliSGJm zr@Q&%gTLM|#86f*ttmldSWni$nz?WnIQ~1cqF)j;B0ACl%h0mAFY*$m@U(#H1$vP` zfvRW!d#P&YoxqE14hxT%>+-&~t1iu%8ix-GT-Q7B6#Sg)zg_#j2y*%byu>+EVw@^V z*xbzH=O5r%&-&*1+`coojAk(9qTJN6)S-j-BjeR@@jY5$Z|k&2w<2!}?j5KrCa5-~ zd}9<0^&G=2sg_#&sK4FRt=u4Ecl`$j#>ZhkTbpe~{&b!j78^QAAn^lO7uNHcZ^?1$ zwU!DHePY6=33<_!#6mG~4|iqwVdv6Wt@!(!Fg?C#V8Uvd*t_+$q9{nB^$Ea;EL-P3 zZT$f5(;afLwOAVhLS^^OKLx46L8$%a3TNthMM{~gqF?XTlcvEGNt%xWKpxzLI&}fc zhj!I$fo2Zhb5q4gAYH>s3h>?en?`!GuWUhf%=ooyCoh5fBlgaYjwcq2_HoHxr0POl zm6>cxv25sI1s(B&5vS0m{5h< z`XL`Lhk51kn<_W=bt#7qJ#KWIazo*#_|^9;66{zH+XogGe|E(UebTWSNYe2nd^)fk zyR6rwi{T0e=ik9r0YrEE?7Ut$BP$)u29r%g+0QPx9tSC=npRb-x4SkG!?>gV*j zerZC;Rb25s`bQReQ#Nc!hpQYjtEjfcx|)Uk8mghHX>G8ZXl=A~vM*V$$7sR+{-F4g z@Zy!Z<*UR(6!Q#R>lHS^JK1*0LjypH-fnX$DKaq5$SmXOH%{x4%W?TT+4XM^hL&~c zh$*G5)I2R+q(B*>=#2py0`2yz`OJ^xqvcEA+f70jZl{{5?p9VEUd@#2m>LKg<&SS; zNO%&5;Pv^cEI#GGq*iWoK)d~YMB_uQy@Web@UqBnSFmUG4d_llEwGLMD0DbOr!DzA zOaTkioZ%u2!w;fHQF^Zqb?589pY%3wg`#Ai>u3j7)Sdw4TL5KGpv*Zp0UJbim1Ra4 z2aqzSe|q%(p<(efe(^`sA?EWEP+oV<#dV?Fs*+x_0TUQWgvKTSRp3|h);TQ1Ja?ek zl&JJ$w^wFUjpIj67cwCSUGqq1!AZ@L51fj8W3{qpC5(pdhoo|n{s0yF>qGne=FV~q zTt(!7oQ{Xwdkk0W?8NwvR|wf&GJP|bzEh~36WDb>KB5Zn)B5+q)`3$6trzXB2HYYq zTDNd#i9L%FllYZUoT7m@Pg>~Lu8D=U;_xtzSXaej)7*o@>WNb;U9P~B%GL{yq(2C% zW+aSVUr)wO_S*=lCoCvD9G`S67UMxyVqZQf+rH+X$$yulyrfZEq7gB%#g^OWPs*Iy zCS)_jp0~x!swa4T@8EZt`BR+jD z9Ou{5`NO1DkF?MUFG@?CXD%XL-lc3~x zK<(td!K=H)~bLp5jH!0K2lZS7fJ6TkAp4O%2t7gqidemIS zXi5B01Pdm|G*R0}z|!J!ABXdaAXrhSH^+Z|SO((-=9mGpC;Z2>M?)r>kBhSht-oH> z3D{vG4$Z!?NFTpcXse6aQ=ui^3*D3-8Srh@zMjOBd9gb&VrHrDz9+p{tuA=t7W%ox zwHK8@fe%7uX5poEV^8){(MLjTpv{ev@xA}2R z)@}GYSX{*l*=Z)n!Vk9Ot(`v;Y`SEZaMGp_iUn!NGd{uw;2>H>v*>d$;A?NZO+JWj zyt@+}q+=@SOR7H2dGv@WQ0Dr@OTfkjllx46G>NUV*y{rFYs;w%d?EsB35WTrUSf!s z6}2w)3Y%e;S$xL{K9uLO?xH|31(A=ynwebN&XYA+OcGP1Jph z@#$S;iSS}Er`=orqO8QgFUdcVi& zatpv(?|8k??b zGk!DqGEV!&NCU`IfD9J5mg5~c!3R7VeYISI*&%ZS_xT8pH~LaO^`g~N)TyX!v^H_e zp}1=s@4YFQJm)dmy_6rrfku-1k{0gP@p8`GR^VN7I!dl1$z1e43l!wE!VS#-JgXN0 zE*dr8TN?`OZS4k8J&}l)e43k5HQJL}EB179#GbU!4fUy_Gv$WJ4G(c~U8ZkMiKH!z z`L!Q|@j+Vg+k=Zg?{{KG{YJBB1*q$#K_;wiO0RC{mW#frb;!=S2kJC)lJfn|FKI{k zw13UQq&~_kRpe2b_#Fvk&A3G(*GzgYV?ICCPFVgI00TJbX}jC^Af8&4A%BubBm*}+ zksLy0uInR6s{M+1;d-@2yXu4`_S zZW3ch-z^4+(YBLyH=xUIC!*mr7;vwH2r_NP11q+{AGvqkb~*UFjzP<#S^OYSJ{U5QgwhIQ!Lt0=^)&RbWqKF`IdExG z8o){JL}v#9bRU8ka!N*j_&Bi?-7Wt9&axmdc8iJiM$xj&>pLueCD@DeSF)Ey@V+eg zIN~1ex9X9yh@ft#{P?>$rYWaQsUSjiS;?pUsYy5E=}}=sTHAcumD=%1lVQ8>2Bwdd z3b`#~MwIQpy5%M0rU+YEOt#%>MAF6#TIoIwB)iIZYl-k2EAY4pWU^Dbi~ZbuB}4MQ z6{t3~xUMI|j7g5tb3T)Tef%!Vp_#G}IEc&gaK=dH>jaNMcm5J?F7iVp)sAP_)go?P z$yEYWwJJ zbZX5TKYqyu8B-ZCn2C&SCvhNN_IAiru1Wx|i-_n<#KeD#n3zQ82jbUK@!{=}32p5! zdxYvuVY86+o~o30MP3M2kFJN>aZrv+l+O$E@Igh5jiBnO%_TAAbSJZO&Jn=>NTU4v zr1Q$vhDVkMIdw8*<%MQ?(FYGaHFvw?qC#q~+WnB7-|(zVb135N8oF0DS6=tjKCx@e zvvpcOF|kmFKy~fL%MIl-mekuT|Gq>ba+7P!tYvcd?>OWaL;?p~L=BGswvjpY-?Wo| zGQ~d7y7(;tCj`RP?R1Yb6#ttP_4X!lZt=vY&+%E2YA-GU{H!23@B+o1zOTEpq(n*9 z!2E%sfzxU@aKJF~{U(rLSzdb)H^$s>?}M5;kJjz~k;ui&k{JVx*w?Om&iE1$h?RT0 zApKGyjfkimSjG8>0ErEln$N-;bl)}=lQ8W~8@3{nho6?89lDSJS8IWdIRAv3-d1T) zdbB$pQJB_Tp>F+%Y}^Y?^RJB;MZ?c)o^~m##*Cx&0 z$K<=#ULMN(pDZd8F%oXw;-yno&`=36uW&bY3P39y(^mKdu%7_rk(N%8>RE zQ{wmT4I&#--@4Z)WQ8PTM0-@{`f|=p1FjKeoWoa(GvSVm(0pxUJdZ>`geHkUtGys zm-yw|NYwY2obrMJht_r9;;W}54X|?#0kCRwfLR78$JLsDiQ)V;Bn{~ePUVODK;vbx@} zN;m`c)eZ>#Yp#a)f$7++9VTD;RO27Gi>`i#{+( zO=ogHi`uhCIZ@%S=VJilIe&J>_iq|*hBqnmSB=NNpy6S|z9++fg$EJb^SF5fjug|=OW8c1zLCjO#++qaXx|s z6DC&Lrhw&{c_T_KfM%RYsm}lC*KH~FEUL>C1T4Y-`3{`&&)n20&h$5^L0p@5JM)xz zcdJq8{k?GgQa(Wi-gh6=I%k#$16$1g{`TxZr;P1CrgZ*IVJcv+1G+e_4JsuA^$L5b zOK4P_x=GmEEXg}7BmLhXJHf|fFb@t15djGi&x4eTA^R6r9(f`Zy!{k@j{f%#rJZz) z2RcWa;|u7rEQgg;);Y51I4X7)mKA&QM;H065><<+qazG%(6Ze4N@Y|UMy+#rVGUm@ z2nKL$lyJQ6-D*vy+6-s?=#0}Dx{;X>Q8pS+OdU0w?B`Miy^UveU61+f->j9EB^yodU8@+J$vnU6CfCd!F~EOMFs)xlu7yO`wvoZBp=gW>hAb&7u}w^>;S zgVXL}sy{7{S!V}>$F6m{zblW~TnB^S?qUXh`3Yh_)|sfins_0V_{fC#5EHb4Tb|hl zx$^(i;3B#4&fE<>RQJ*(|6v*fusdg;*cZ)+4?(%}J&LM45CX%_koQQOQEuByLsy`$ z*NTM!*c5adb2z(9R#qe`MRZ$Y$&ntq*_otd9V(4Ao2bHLF(gJ$!`$eI8Y=psus&Ae z*uX5$qaQb?%E&h}-zC=OUeXf!n+zOui+&~Kw(0NJeE}l7zAS$}bSTnO13F%9>9QSXRIx zO?!{GHZN8_0l-_1gja}4!3NMfOlO~etDNDlXM$wsZVXtXc?S7Dxs-R8gx4EKpxsHQ zLLq~A|LpV9X7!=VcY!8ca^QT6tg?6A5w^b`gb11`Z{7PkDWeCxA_hg!si@@@h{PYG zCMfGhicTzIcL)=v+v6#(tOJRH^8V#h0me7pNsUb7C3K3FhX7FKs_wZ}mQCzwR~>uG z1R2qs=1=AalS_SZh#ouvWCm4{DAdD{5hd~c)2Es~P;CGjmGm>#17ZL=(>%=@O7xE7 z2Uj6b6SBhXWs=qW^|aKs44Wku?zD$uTZ=3wYyn=YZKYd*b&~r%SR=J5vcNnS*-Z@4 zG3M9a8BL`O$W>LpaFb9+hgJSYh`XG20WHk%F2}^HzA!lWB!{F_(aVWezNHmb)H#(` zg)aZD7%(rg$Zd9icscBy*3($BI3`J?y5vFHi3iZAXEfuBnfEqJ+l*dfI~T_Vg}t3; ztAdoZ;$>3>Z(Ki zsul42{B9Z?S)OH1$I~U9g0iKSIetL^j zzBg)@ZOdi**P@YIxisH ziyvjgDf?vr=%2HTi&bd=JjoejXZLmUger4|mY*)QW1gc_q1?|Gh({uuE`^nfwL^(O zQ>wr^><9FZ?x>EC>{JC7#2pFrU1*)?PQBZYfZ~T^9vdk~O&)``41xfJah?WuV?Za3 zJPynAn^T=u*FckyPaH*6{nPSaHh}XTGoYK4O7@2Ol{7+(Mx+sHD|-{Ogb8rgw@wd5 z94a`p{O-i1%#YwlBm@a3C?WUC+wUU{zJ@6f5e=TTQ9ApaUnv-!p48N?y{tA>);{D- z_-XyQnia2Ou#5#P9%U!;Efn&8>hi6@(s24UWsY1K3!ycFbB_nbfAfPIFwX=?2PdehCI~_Dl?{{S5mj^BnN)7vOC-JIQkX{0M zNdbnS!URJ4gvdJp3JD)ihYOBd{8S}XFQ%V5>dMLmNHKAnkaRql042mqNK~ia03*ET zt~<(MTgX=ddKOA5%_I?uqK$UuD|*I?O6(EYA$Yqg6yZxaAynyR%F`Nz8*=TJG%hdiM7tVHR-e#w=ykNo+RhudiC65mX8J4$5FlHVfJ%T_-Mfv=CB4z3Jxk~oF z#10IHt8%x)y97qLolL4Y4DtGftFGHD*uv%PNg2pEf`+WV*WUr)_>5dco%wT`;)ozm z-b=2Ju!3EXQj@d=@Y%-q$MEyAK21}B^9g>0d5u4Ay)BDiGqUm|*fy#WqIp5ST#^Du zovj;$aot}TpWmL{3*r2ieBLqv4N-Z&`%X$`f%+5j$1jb$eaq4s%RH0NL^rxH_U5gV z4*T);6ri=_{+PSRdYdA{n3u>~E=y0l?~6chtA+V~CT}}3S1)&$)R9(3&lGBWnRf0x7}>VDs1r>F`_tnYls-Om2+dRcXMooDH2&^ zU-G%Af)D6z=N*@u6elI(rPMym`Hk*u84`?UZd&}@Y#$Vlb|=_+X##n!+1ggaP@Jq@ z*6QCzkwe}pTCbeCt<;23fxFRtLZ;IG8%mmpC$*hv?SZ8Cas~iaxOUa*@H^yyhUJ2Q zO2dL@sXXAL`!UmRAC5f)TAW}0XR09N!a2kL$Z;5my(ph`mav2i8PB@a=nu8b6Q6RL zL)U9Nx*bUI^P`G&Ll`tgmAr_!HuKQ%z;1T8gLQxk2Iz0djT^YUfNF%+RE{HMVe`Yl~HNnlA3)8RF@+`^6SG88^j@c-23Ba^?z0pd*Bv%>EMs>{H=3<}*0~ZCXB2NEL2|?05gh(~+#$>&3pcQ9I{nZPxEb~(^Tu_g z8%D1B&Xi!+X=x}D*b~(V!Fa(b8|yc#nood+c&m*-L$w6Ai&rPrN{3)$cQLey-kkYw z*21fh`OUf@w`i8%>((({on6;^C1&05aCMgPwp`U#MJyF3$C>Dlk!Rf!4c{Y=-{RL9VI?nbUU6i9afK;9_Zg={NJ4L9 z0Px-xWsD}!+;QE@AgO%P>*zw&)JwzX7I|P_RbL?c8U1megxUnb2h^Sy5t7sZ6pIg> zs3%)N1MKx@=oJu&4L^hPB))7Eu^In#{IRK>$J5U*$CSa;#X`KPW3= z3xs@jdLz?O^rPw?hmF;=Za>f(ukK)=a>9}`^|8VhxsM6P#&sL$-?(Qw{W;e88rjwK zPj1JJP>=Q1@@1gEqKXPIQ%rb^FLo1jmv!xg)%%W2W1=OFFi}r;H?Q-;ENU6FoZ2Co zyg|vw$0u9XN&W2$`Q5NN#J6uh?#v*XKe(D*efB_!J*?(7-?twhezbpc_{I@aV{^YF z@s&-PI;Q1b)^1?EGj`Ey$}~T9ZEb0I-j_PpcU{+Ws7MVtnVDE^7$=l*HSm+6C- zx2XamV5+EpzEJ;IWxu7ppJ&OzeW`X?RWig`W@q4uql_miATx?5N}A_rqh>n|tHylA zmwRTWu`MG?qJCqIsmZG9E^$HxtTQ6}q!QQ%_uq2W&_%ZE%JGYP%h z{+Tl3HC4Wem^yB?F5xbaK+vP$rF1UgW!au=#U~WajgoEMU{-SI-Rb+L+H;lFpq1Z6 zT4K0{Ec&l|{ak&yNYB%wD{^-ai!#kB+2JLvg5Zi|iS_9Po1Jf$nW|xuOB)m&k$YMV zitkxUmnt_iFIGeL>UWwhRZo|mgw5xg>Br*HLu-KzC9mhva54cixCs1V$*aTs1xAKz z!Xt5 z3`e@1EG=AmN6qTHr5G;ZG{*W@rco&-?PDVMv2%EH7@S7PkDE1u(x-Rs3e|S|ob?V} z%|`zEAXCNNdWGuEv3a2|P>YlHA$_Aa3MssCm?i*BzcHw>X<1=ge^hkGJxf zsPV~Qo?5~uT6wZ7PXsMs?=Yje+FQhAEW-QSZx!EHPkAHvFHsG;K{4=4S8jQ1x1Sub zenM2-GGFF6yZQVo1F)ZLwX;O7U5?@=(bL4U?EaJQHDRJSLEvwq+k1D+iSC*Je-k}e zdeT7jtPc2_D5yI&i1>>K@He0hJj53stj3&6SPLL=3~>Fg9oX1ni&gPBO~Dc8X&;?& z+(pOW??3RB0xW6nE7XS8Mag{~?5~xvhtZpjZV_vLMULvCnrDOa90& zJ0l3p?5^YAv-Kerb<^wQ=(%X1$3_5BD>M1R1B;MVZgm-pH43_CV%KuM3yWmi7PNAn zOU?8N&x}H@R=8$uQ!Tcgck8=WFYy24J_fM>+r>dKh~l|=dyq396G&g>{xBfDW1Ra?ilQS0Z~$vkn4H2$T;*X-z1Zg|y6qDc^FPYYTr1DRaPOT6JLVv%X(;ZnKHyP5Ajpeh zOH2E$A$40$MdQs(xh&qD$*c8Z`q9(Jpt@JS`|Y`Du1>o-uZ05`h!|D=wZ6RSDU3tC z|3RyU5hzWR(4P@NgButzM32?Y*U|}1)hwO}OC1G^tkz#&Kx|?*X{&sFH&e-?rS3as zdYKG(+XV?hDy@AK)~Efw7mp+}!M^FFSALD2nF!J4$0V9tv3^Wo^u^RFNX^2bj@d+| zjvHQ9QiEb1+imm-q4p3x0j$;{wWSZ9hR6%jWCEMw1hMjXrPQb?8x$5$-2WU)cJ_C9 z4j?)^+sM9Kpxva{ELiH4vVG7hutf#Juiy}7M}`LYNMLm3+h?ZpM^UbK7uM1^0dJ5LgvkYHr|;Ho z?c_#KO{izM_6gQkE9{PHBYgX-|H_I2IacdBx>fmGu7;H?a%$a!uZV>OElg9R_g+cJIGBS1SFYG%03;+>ftm-QTRyCdFOPWYL5 zEo7P9`t!ETguP7#A;2I)&aCT?LA)T!(z;m=#A&F*__%Gur~RVJE&_s12+r3D6-Czu zE6q1Wu#;PBqiV4`v+~gap(Of zvbc@?7juf(ij4_NK^H=^or_H1=n4CVw{7WeOO2dS-M5fSIeD`WP4&w~OsCnOHUM*p zope}BcW;`xn5fZN3l_Lz9>nqK%+=k!O(6x>Vni3zZ%F$dO#zCE>!!4?+h0kX{&GXs z4aHnv`DSl?f-N|7t)9@>|2Qrvl+}>yhN?UkwkMi%q6u4=SgjvX0_>aQ-|ZVw*2U_L zBD4eR)J&F^^iTgYpEm)s{Pl>VR_xCU)T~}gVlNy?5NmZE7Tc{8HaToxx&KRnsa#JM zR)MMb9GhSvI|cMzTT9SJEZn{KLLaQeroH63^!zc+EmqcTz=@1nZDIN!S5o-wQ{;v; z0#>saZZ;(#4LOLcmap7c9Bk8Q_Z{t119S=4kbmg!?}<-})ToMsrfmLU2z+sR)hL!) zb{FRyu)L(*CkMTah5N3h&mzJz0z@?754+&w3K$QA?Q{uyCpmpblG7t6FdT;I5r&K# z%+EJ!nt-8WHC$I%rS?k}T4^b6emM`e|L`RBw$pzBZpwcrUW4+6K#}e`#^jp-@*q96 zDFHi)EV+%Hs&`zrE*K7N)M1i93H!D-t3m6QmhAXV1FWYIsePK|DO0uSuN{| zVJx+YpR!SoC^~8qySGS>%U|aPFx;@uu{cw;|8fqm0QODlWMK{|^A-7xA1lM{`BbSq zrhY?-NCX)6@OPZO+qePh1L860J`vS90{G(*HvAE|p#(=6uXR!L$IIvd#D3+Gz+Gm$ zDTLbA>AU)ajm!w7F95kNpCh+_L@7WGf1cy8KMMVW4G><*zuYk0n(BWBkl-T#7yn;} zAo{|49)16KLZV-B|27)Y>!*LB;Y%PgH(z9H%N;hujidiBLD>&2)Z}~cBN=oVi5|Bw z|33%jP_t@@G(8%moi?r#z%|;(q(mv|Nl9~0_|)}R4GPcK6x0oXipk}DyIGiDbC0tU zG6zFJB$n*scNzKqzvBEk;<0jFi1Rikzw%&@)`WyvL+jyxfeR_Am0jDPc?VysVYkw) zwnVqT|IQ!|`UZeHF@EJi?WFvHJ-x=ip~yE19{?z78E?BvHL`k5)vfqXR5lQaeO)<1 z`J7=WT0vr`bNh%3x~9?aFxsj(OtXrn7BjRGPAiF|?QQs;1u-A}hk}v@1`z)XN4rExn_HVQ3*){oS8k>sEKyQ|! zqu^WE)mV3;yM@A2!B2S9VKzjHQfOAt9n!J>?e1B_8Y$>09<`6SJhUH?C-zA z{a7)5xQ>|`U@F-g_w1`G8b>ohG*C*bqcw~06&l>g*RbTE5DYV=yz{pK3{d#AmvH7x9ZoG4NUB&^PNi<$(}#IX2CEsOr`T)9dgFuI z0_Nx167JC~3HkE{tW(RJ{w|kv^4;pN<{y-#SIu<_kboUPe5hx`_lqKn_8=!bQ&QF) zyE;(#$wMC_;7QO%7(W!A9gt|c8o8eR*;MZ%(kxP-kJKO4q@|Qv6u1tzD@VieYXCjT zX;+}S@3d7}@4KXv;l)6t3LA(UKN@jej#w5(seCRN*nmbuqDCFN<~8XX_aIJNUM6z_ zObJ2WV_7}uV~>dM&W>`4W&zs!4tkFM{J?bUtS|wypEDsA8b>*EH)T(Mt_*EXz@3kV zRm9h7w1*Kp@csU4u>H0505U2K6jxy6e%;f<6TZI^z}_EDgGSuseSp1FAvE2-37)>V zN|h3?5`f&jw{+<4!w~i-{{S+vXemtG%5V83Z2Q~}gQ;t8vzl!m_zo)!vb*my|DJ0m zXTXP|W^tNj7*pI^ew%~9DW=42TLwa(FC1(1If!%LxK?%j^tjUAF)1=oVQG#rgto|FTCYaZB z(I_!bT`>f@$9rz*ZGlDu09sa;^2*U^d*;G6lfgdzz~+8y{J#IFsc6DG+}RP8t%sdH zM!6kt#zt2K-M0w+)+^SP9nWd6=i>7L{Gn(aY#AOBMcN$RzoAZ5zY@s?~|jr0_;!cx=u_3 z0_@kLrsj9(+S2GE$)t}3V8JfXZMTGbW|a<$M~BgZhk({>jMZtorCvf7kBiToFnUh% z4%$pHVFG;^eja-Qkcq{8;1*TX_<^6vPV2)_yV<_^)p_n-Gu%ty-&#sII>tah{n5Am zHuDHq*InA5#KwbaT?DCaZJ)$gSg_ z)d#e!RWH6IYaCsV(izb(-QQ4~9qKXKZk|=u+dFh)sozKJPmF|2)od(!_7&+fA!>j^ zo_mZnd>VSGK$LiEIDZrH7@?r@**0^W>vagXG04{sa0^Qgx~L{u=GH7NDS7-W{)y)W zl5%wmhb4oU&3>U1QbV!b=q#H*Wgi$2qV`w~nA7XDlBId1r+ty9bb@ue3!=R&B5Sys z;cU9?vuYgRd0ubND)-%~pLP^oQks9VS{a0O8sS15G>a@38e?tTj)85Gz?GHxan0}- zBcx9p-lge0)sk{F_Dj{T5wCR}mDV%W?rB0`Fl2N>>2Z6cPBo4yeqA$%Lri|uH?KM0 zz^M48j%sM5yM0IXjng${*Hc%G_Plqhf&R9Z1rm4Kynza1a5z#PP{*cdg<7k`=^Y=` zL&Z$ro%nt20KdaEsn=DbZ%1_M{orPR`q~Y9v#C4Z=uSoop9Bi+K8KFbQzwZtp`x5* zJbv%!p~S>(j}#5sk{xp5Btu!}60|eQO zk*!kAe5jTHeX3yYW2egEZRmD<2IRLUzU!HxWB|92A23>Ri7iz6jaEbb)}Y8~Y1;LS zH=095BePR=iq)?|qC-GwO{y%3d(4c-ZO_a9)X7Xga?hmZPSK|qLs5A2=neyVIKQfZ zWsm}AgvxuYrX^@JmNSU4fAnr%Q+~bE-kIXe>q=b)LVp)rwz<0480p(Mx2(w`ds_BH z(!po_ds_}|EN2U=?%v!oB0XbXyj`GJEx|?{A8l(*mWva*A2|J7?AXSYjm+6bz4}W ziX>4Z2^jauD?~yjFO-P^BPKfpBQZk1gs9x+oOO8b^>@zd~}oFEstLo zJCq+84mBiuhFR*AC+-wjj!Z02$})L&$e!c8ZY z^DxV-;f~pS?=$)W`nQm9AOo}QDZ-(c06md8Y94Hw@IJNHQ`Xxmo-X0?6>y=2FpC@g z)BmypQ+18L8&1AHop#^7m)AC97&f>IdwK+f?WS@nuad5fp<;KR)VZy*K!HXC&Pvk>X_zwI47bw7p*slmE`9ir ze%uP3rj`>bXe2(`e;VpSLLGlPCmDvdOn&XnK!{E&<#Ve)=* zkZOdCLftd(gZBcJfi>Ev6ga@>n&+p$G+}K8R9v2&AY#4s#x}m2eaD^{T=+M~Vfdlu?#a`f)-=6DB;IVq+i4PkYw8(9 zKXty^0U{m&Zqt>Fr`mkqx#oh4WsTkfy;726MHT~g@i+ahhl1^A>&=TEwgV+DF32-HF(mzyef7Y*qi3no=L`=xg2fN^}Y9A0FH6EKv_b=1$n7mtj8WQ$EgfL z`gmjf7TOAcA|EsN>LgJgFP%k-1y&#YD#N#(w7}8n$58%E?D|p1V3y+z5Z0|%W9K-Y z?&@A5VLb&ffY;?ERkAG<~!GEcknLF)s7BC2%g zL%gcg=|)dFy6rhoMLy5Z1}LP43kWl%%O+Qa1Z(r~bB&01?^UW>DWrHEN(i462WjZL zK*VhRE6+Ppy@cV5nL)=_r~L$p$BNoniskvs4 zbJ(ghf&Xg(4)=@{;Qj}*Sz2rIGhcwCKzV#~dB1tnwQXxX6E??kKrzptgW+ubovnc? z?P#E&1p}b|&b92%3?r~Az53n`fm93kRRB5T_;y2Cg^t^n1dJ-fGZ#For$0juA8N+f zpBo~*5JM{dX6a)X-+nf#*W}tAtwUt76fx@f*SGzmQYm7s3|B3=B$$hWW&@QJHPaVN z&Mmuq(lnSL?#H~Qn!;mP@T05LBR+BjY=Om_F_YPemXThR=T;L?R?lA!V(-v#WlRW~ zkG_>gSc%yuzG^r1wUcH!{iyBvyT7$0j6y1{F7u=(oM%ed8VI5>1ZtW4fwS8E1_A!r z7HL75rU)4aRIz&>F28A8YPBsIaIJlESiBGH3^(CETJ=5wIi&BZH*|!PhO)Uj$enhF zLzUjGlKSf|)|FO@CE`e$My5}1+fohtdrU(Kz6{2+ZC|Xk@^@)QW7%kQx|h!6Co<0+ z%yJ)4@G0p=`*J&bN!evcu?IWpvg%~67<)S%k*!u-}Xo*5Q7>b;k0gAw97Bu-=bQ+1LEw#WtJ zEoQ<8{VKNrw{yA8y~vz4Ig8R$*Q-7<%uZFSB5{WRFj5EwexA)+bwY^%&OxLsM`!(` zo&uaxHL3o8?R|GpQ|;R(gdhe46a;)F6dR~0peS8p7p19`sFa9`CN zqNpIC6j2B@5!8SPNRv)N2N6i938d_k;QM}lzu&jJvokxhGvDl=hvb|*<$CUN-Pg?_ ztR<90>97Yaf{VtdfniHa>Tft?hJ`!q+3i`mrLj6O+KXa#d>EYih7{fvW57yM?@<)z zy=_4;eM8m#TclHLhJW8(eYRuZ#7+bM+XbCwxfQ8cvgrtCkurWYaw1MIS_#(&4jsQv zkal;>4uysbH?xJ5C#v zRq5R+giYCiHe;M4odeLEo~=!c#rk^NIW;qAQ@dVm)Qu_IWL0qd@qo2Jc~3}kGQhVKrkrk{!( zBo&(csva0%Y<1isi`MRYn0@X;k3X5##}IuPv-4~!c9?ZbmcCfjE77p??Zvitms8mO z{d!g@AG{VSfXCeyvcOLOR>V0QUZ+1l_SxTn_bwZR`K!Yd;FC|fcU0K;051cQCQJ+l zv<|g!Kny;51H^J}#mJ>AwPGsSOxMl?N6f|c^dlYi&6D`&Q3z5=KjFNzA@M{}50GCl zBBFts%x4p219WOmYSrg-jJhkp0NDg>Zw#PjoTYI(K_i(A?Y=?GE`F zgw#x1`NA`%^z}SU-Q&9?CF!HMiVGG`@X|`YA;#;Z@INL>eSb9tH$}^p8XB|ce(EZM z^Ty`_W@#~qnf`orLLmLt3#s+qm)@81^8{ulAgV4f$M%3cyszdH;(jH8n0yR`Tb{-K zQ31s>bc!FyC@Kk`Cd(IM1_G|18vDZae#9o+G^$v6{O_Own6%MO_w!}_2RhxoaM}Re z{n5=ZeSH8X#yq1b$BLAv#<^mUwm;6CTtxLn)$0NRUjdbBhw_Hx9PfTU8O*$$oMh+O zp8V}7h(cl=KKOx>v4>`@{f~bczV=zCI=8PesK>XYKVJ?jB6r2-udbtaA@hRO5-91FO!m{}HAINq(psRjw8@6c1cebJP>)br#hLx3~+9Zu^pn6K>< z9{YF$+BiLnVx5uFjG;Fr-@{@0x8nReL9x{gflUP=>?&oyfRMeI9i5JEjc(W%Pu3+T zDLc)6BYHW&F-pA^z*RkF(@Tt{r$nUxDh57{`D4(TuR*EF_Eub?#6lgKQJ}m93dI|@ zHbUV*GBVCj7f_qz&w`xFwGN}*VRx?O0Y1#=GV)~?sPb$%M|$TB3jn@}Z>y2p5x23d zpV2R3RN}Cefgq?Y0(0VCDYY1CQ(92kv*#Xesl@)pe7<*Wa9{Bjl@RJArYie>E~d*y z`9zaSUuy}b2!Om02xtNdKyDn=+;Vno_wMs_da<;zJW2<2Zn11HQczEv(u*LhUIA?S(eOjN*o@l=)++j! zm@mw>bJ0L|HIpiK&VRJfb=;Cqp-4s&9vHa$4+aJ-{PI7~JfNU8R+`Cn{q-3vzOu1v z?NN^RX5qG3qy*m-K3jQ0Pnc3Uwg`e*F(u>cqzbXkNKX%&8feJq>vvX$ZmpN8T(MQ- zAdX3Xby=~?WTHfBopG6a>zq?n+*p&(es;0B!MVv+ELE9PvY@=y&+&uHW*JZM(i`6o zL`8^sbz~JP$W@#Q=o4GD&$w)j$3l-*`GNZ&3U3BErZQ$lL`EZk_n72*+xClqB9uw> z06K)v8cjc5`cfuEq~P+s9ixQDZ8?B?1(0li(}v&Bs1jbO93Fvj>}w2~Q-ROBec*MQ z>;x-VRcTKG8YJh8y?x$KVn#E(>pzV28Tj|+;w&haQltEt8)UiU$dvDZyPS1l8rV-B zM%))a_%~&Jb+asbv1$?sT?Ci~+M=XL=&Job%T zLaR30Cj0)!hj0uRNb*veMtX(5zfY6fP-zpNN)oqRe)?&1GAOH}yuY|}P(q0W?R`gP zK*$sx{)vO$;{3l{2S{GIHm4<-%HfMATeTRpGf}*W+<%xz1 z%eozbk}0QL>~YHEIZ+6Hc>6htjsIG4EN^&5cjS%{5J@IOEAYdr=BJ&r#AWS*Vp zFYgT)?RU7GkL4pljStjy@WZd4_@oIEBWLiHJ;y#L%L-&wtN0>3gYCn~Tluy4OHaU8 z>Q4RtpZveRuxCKtFbNFi)U*|k-7(A371|@_{7~H5_FqcKME9vsmyA*Z=fdJ6MWMtn z?c))d|5mZiw5QS?E7%_%k6C*PU6BH>P)yI#c7ElWzg)WKi&W8|W>MvTwYAXtfAJh} zdgNToe%us##=316y0S7nKK1i*Ex)=99FPN1Ipu=zXNP6cdH$t2jYZa>!dKMNxQgJk?DN^XV%gdP z-&ORQ?Lbr>(l~fVVI5u#Sf4ALDCHxpwM0G;frfNT3f}$^N&^0N<(deZ7VfQ!K8%-v z<6c`FgbRsl{5CLfT-76+_v5%tzeg7YJavU8dUgyg^gX=tl1=g)E-uY$=Wl>JY)JMk znrtrz=h!h=`d+TY3j7z@^d#-E@Jw~{{hwnpJ0iz`$`{&wuO{ay)-uqM&f4=WFB zgoJjW#ZOjjCO8RSuv4!zj48NR&_e_Z8=kdfAJ z{22-t85LSRnh&y6NoSwFiw?MvqPyAfT%~iI$zvhE)4$>53Gwtf)4=Sf-k1)1PlS}p zud815`vH4|r+PnJe9sNznw1}x(}x@X5MA-^z_Nz{C9%d4(p<91jvnSSzs4qV-|7=8 zNFgJo-}mkk+&kOAddo8t39nK!#DcSS3BQc{S5NRStU|}VDc^7OgoVr|n|vFo6z=#f z=>Vq0Q$c!C&*##}HnH?(<-r#N9!pV&w!%%vUP$~c%H|7d!EzRfLya~97xZtRRWwVF zgC~@XA4pAu!lho$Hj3u?EPH*v`ItTHG?pXAx_dwEj}NRMk&zGZq1Ob8x846ISLqK* zEEU-kE_$fqUteY|3_7WuyiZZ}#muKu$sFd9zM?}TaK*_0J64;KC~RHzZv(NJ2DyR= zB>$RQ$msdt+55=W-;p`{QrKU9x(mGc+g0U*M^|ACg0mT8eL5jitYYD=bWuESp%Gys zz6ojUe~1~xi9uO;bsHZi@7G-xuBZzukSnZL9NAlt&NJc8#vSoObh-Sbfu2+~APcCI zu&%sJS{hwIYL+s})HU4!wDth`m=IsWPJ;keU{1?Iaj88b-}|m+>MVO_L&Z`*g@Bdu z#0(*HUrr_M0t`u1uftm1B;Mjh#w{Q_-TXIdB~&fRFGdKl6?@l_nA288nDx5)bFp48 zz3LtsgO&nnu;w*CiiS@=U#)ihpTX`=f$1|Bc&tr2VWE8Kt~8dF!Gp!<^5!T(7AlXqN9AG=ORv$n6e`xx zmEGGWAI?ZZrN+XqfyJGcwkv1${iGjEi*@#iLFKV-FJM_=^m6QU7vU{?;X{IPut%x= zlKRw#(Fxb8LvxAPYEd(_2-mp+D$hT_WbYPQXUt3U-a|QeQ3( zUg5MjY3AW(O5QiM&Tyu6%9GpiBaegQaz}D`H_QUUCyP|p_LPL>dW-$14cc!4V}2l9 zlT!TF!8||PwP`p-9=k*#akjY@R88(Pa1H6RxAM?HUFe_S1&vtYmS)lk*LoMJQ%5z- z$^Znqkah3!wYgCw?yhUiUc=8B=K(gj0|U75j|BF-YfZFOz-Y2IlS3e>aQrH*kw=p_ z^frVOm;3btqI0&5$FXNlB&y_0X$PJU$)&40)7e{)nA6-#l{k*8vK^2~x>kue)5B1l zytdgxnn8;{(G&!aqt*5G2ZVcwxyV6rgOh&BfIL;5-5)~|4{CMf%|cPoF`4Zp)5KfGYq%Ts8YECfNRuNXCGe28X|B)ZejBP(sDsc4by}+bDo3!o=y4V9B8Bd|=LrsiW!Aj|Z)zoR3|u>+ki3f**AYsB z{J>l)%6|>#eca@=@2{8;|A8v!LHnWnx#?GJlz~RMuN1_d91GSR$u61by?%|&NG}bo zMh~wsg=jVn$GvFJimzmtCbgU+E9`hWJ633g<5EyZ+a|vJG$*?n8}917=$LM2GgVDK z8+A-UNtzIRFe$R)%gsOSnI>LeI2T~v3OiNY{%{IGV(`jYV3dLOfmIex`Sw&OyD7>Y zXs55$KGZhxA2?(%kbl5Gy3;Q8%0R3?miw^*yMXFRmt--Utty-EuBeLREm3eh4vM$# z?Y8_O1W9Gmx96_4kFp2JDDr5o_h|CP7qMG~DcOq09s;th62@aPRY?5T>D28r8EFIq zZ|q!Ci#QYSLs2a;WM++cU2Xo}c4oDV_5;af=X9Jbx>}v)##^dGVV7u%F_fV_r>W2o z-gwOLwZEdN3!OKU881h5M$=wh54J!I##tc(FN`8#k-H>p?^^gQ|Ou6=jz7&cA!rcEq0rmXT^#brZ(pM4`fy z)Qo*yX1lfu6XuvlYCO`QY_QwBkp|K>>XufAlig}DQ20)tT^@BkV@sMW?o3}gefkAe zm$p|v9J=aK*s_O}K?}FMmq!EBM&h8Ru4R{;@pzdIHGcOD#=Li|A%?Vcv+izH+fsw! z#t=+>npvSOR@X5glHj?tr{s?AN~&S^jh~ySrijj5-gFnbxg)*~RIDzllxi1SJi=Gu z_)f$c*}xraw&HRb)R;1ydHtc*r+6fr7EkXoYU@q#p`7ms2LE}3+QUT8cC9G;S0pCN zpwXW&8CZ4=h5w`KS&@u94uQu_NS3O$V8MmOI!Z(WRi$?pk zl`F0qD`UQP_uB+36`(5%r!V(lr}0-_J_c<*a0h+>$M+KJ&d&GV8NBSPM!QR3S-x-X z>}}Etq~uBS=1DGQw(uEpdq7^rzrya>JhU5lbsn43Q1w>2jB&=eeXQ^RZ>fgoY4#?2 z^{R^C8?&+QqUK~UHH>{=Xpdd-dCx(vg+x_#lRv7{&Uc7QSLkmr(&`Y9+1Zw z6i&jvGq{Ifei>k#H-YK5<9@8+Z?ZEzQNi=B*tj{_F2vgo9*zE=@F^JBNAQ)H!07M#u|vU`r)6dy zF5Igy4A83GvW4+_9d!5WySismi96Qwtx7vsq?%G=`j?A+>Dt(l&LzG(Qbcxh`OfVq zmYqbH^rw6eC@_{qqw8f8B0rLF>;=9*jc$304})1bvT8C2;ZZ@roS6~I=eue>v5cx# z=cE`eqp@+O!Q2EZ@?UwVJa(AvsUs7{LtwWSX8YW+M3a(%!fE-CfjeDBy@kBO>2vF; z-eR(7IzL8epBCfIsMLIPYL;f*=*nH{}Zt<_nDKo4emnp#12iGlo%KnVRfXH_VMn4ss8{6q`62}?vEhz{FRme zah0ek%u9lPvjm*UyXRk^4!RbgQ9(%Ptg7&#Lw~~m&#SgiFedK=`ByUE_JPX#c9Ot2 zUp6Iw|4*j+e|`np%_faF+OQ`|Yo(r}RbRzu;#2PalL-VSwgE2FM_!E!eUr6I7|A|o4arF!6|i!b zb8BSj$VAXTh?lW(gyigAMxl`r*uMXwy%u<4Te*$GNITk{))lofXcvC_<+CR;Fk*;d zZ^deKheG2U$5Ch*=&1w!Hc-}P+NTI|s~Sz~vpUvg1PeCp*ADSVcn#kAlk>=%z&^#} z95?gn5<$Dv@7CTjH48G&&O3D*ARgnYkCWxhgc2;i3y^YjEdqG+mHZ}N+1gFQ$fp#+ zEK)4{WYMkS4%jq6F77X|$%V2zlA#U;Q-UK^cUD%7HoD3h%u&tS2cnsjxlBmDc7<@8 zq||Be#FOLjwTLy~faBB5w$QfA-k3Z{mNX;p<-HQn{#$RRZKib(;WcE@Lz$On*9`Uz z@*j2~%bYwU>z$0(AXQtC{S$ltT{l(y${KtMb z-?KwlT@%_1%4Ku1eE?5e)7@my9W4$IpbjjB$VA?0K@Z2;WARW2N6{P&VWa^^!$<`1 zvgb2S-h!5Kw1hx&HYns2XL1hGjAhWRR^izi!s?Gja@gOz!6$ce-N5$mO;D$8*9lO9 z5j!LBf@Kg^51D(xu9t%D^hWCHtZ|Yp&v1F(r>bPo{Re50 zU?JV5z>1%|bQ|j6a=Nz<>M*f|EC5ej2e|O=N7_T`T@3^rsca*tFvXc16XjEUMCVS) zIInUAh^0EM%q~a@}1f?s$2Qi3CW8p}IxSQ5ayBqHS0y0s@FYxGRs+r@fzQ>(HDL^Im=SUzLPnbA@0_zF zWj#M{Asf9HKb#69t{2=ED~BEFhN4yI8QenWoixGSuB(BCQ|3d!#dMHmXvT!0Ju_Jr%z1Hy&PlbHT0I zAelJ%?pV?+Y^Tu4IBnlZ*dFs2pdG2%{mCVS^;3*HOO^Nz7!?fIDs*c0eu1B;J9rVd z(Sf50mC8sb@V;s;-58ZgS)TVB=b)Waq&d=WMAXP_2Rj65`l?nP5r0Ie865erSPNw3!qjl8eEDroS}o1aaVqeQbS*pEEI|3aB0&3>lo@XigRxN{OP6gz>Z zcuj>hGGW4N;1yXCvd@0j<*85T2tCScDLa{a_0>`5&aC9hLMtEVn-a|qt;M+)baxQ` zit}-~E)~7#6>YY^g(!a5f}_`9hRa{Br3!-0_c z0tiy_ql+9deRVQUD#vaSt}N8lq+_Zq(^0g@wEuyV4I7F(@5Cwk3U+IbdUFe{o4aHn zc;4-`+QJm|sCbyZZKySjcz;Osr*&wY`j=Cpn$jOX6wx9_&I`3WH=5hnv!VyQTCe7W za`Z4T^kfSk&Cs$#zchAfmb?E$& zb@(qx$CjIM^*E5|mg%GG2}dGU}npO~Bfo4fT;lXF=2x z!m4v2BT6^3uYTk`G?dOg5Yp>pyEv~H#0@ryF)m3W*d6U z4DY{H73bq!sjzMD?! zOgC3z6kS{Krhq3uC_|fQ#sDMw#cbI=tDWLO*{{xRM{gI-&_G*4aw;Im5JzkNN)@Y< z4K@%9t8PM_N$w2MT}7`(LKytG8q!DrJ7{Lfde4^FTw zjuu;=k!0xkGQ32Vyv{;22e*)0vrWk;L8&wGs0oaiEn^+k3F z1?tQ1E#jr}hkfM&f+0C?$eCsj++LH7*4->BqPh8?(^l#+Ta<*R-($WUZ#ZUK;1wNr zE@;IY6Rus#QTJb@u`Q~8vof?6)H<2J0keewO*YKTo;XNjzRbUn*C`RI=oG3bb5o-} z+2`f@ZR#~3LZ=)*e89$v)O;ElAiQ6xso?mzAiEP5_O|JHvu_gak&J%evK=8CcO|v8 z-~dqeau$~Dq|Sqp>*UOt5;s}&=i3*Q6(AV^jfKFqP&`#K%ON9eH;V|TkQ$gYDIv+Y zRG00qTwlF+tM&5aS$v`fFro|>!ID}oP~EhIEoZ5~S&pFOzJBH)m!VV))2E7e(*crf zLw#~DXH8zGDS3S~Xj1- literal 0 HcmV?d00001 From 799074d7f9c67dfc61b2db3872c56c77a8705b85 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Fri, 17 Jun 2022 22:08:10 +0200 Subject: [PATCH 015/118] Restore capability of reproducing CI failures using new Breeze (#24402) The old breeze-legacy used to have a possibility of very easy reproduction of CI failures by executing the right breeze command that contained the commit hash of the PR being tested. This has been broken for some time after we migrated to the new breeze, but finally it was the time when it was needed again. This PR brings back the capability by: * addding --image-tag parameters for tests, shell and start-airflow commands * if --image-tag is specified, then rather than building the image, it is pulled using the specified hash * if --image-tag is specified, the local sources are not mounted to breeze when started, but the sources already embedded in the image are used ("skipped" set for --mount-sources). * new "removed" command value is added to --mount-sources, it causes breeze command to remove the sources from the image (it is used when installing airflow during the tests for specified version (it's automatically used when --use-airflow-version is used). (cherry picked from commit 7dc794a985c5e19126068ee1a83d09a21e894a3e) --- .github/workflows/ci.yml | 5 + CI.rst | 9 +- IMAGES.rst | 5 +- breeze-complete | 3 - breeze-legacy | 24 -- .../commands/ci_image_commands.py | 30 +- .../commands/developer_commands.py | 16 +- .../commands/release_management_commands.py | 14 +- .../commands/testing_commands.py | 22 +- .../src/airflow_breeze/global_constants.py | 5 +- .../src/airflow_breeze/params/shell_params.py | 27 +- .../airflow_breeze/utils/common_options.py | 13 +- .../utils/docker_command_utils.py | 3 +- dev/breeze/src/airflow_breeze/utils/image.py | 4 +- dev/breeze/tests/test_commands.py | 12 +- images/breeze/output-build-image.svg | 268 ++++++------- images/breeze/output-build-prod-image.svg | 336 ++++++++-------- images/breeze/output-commands-hash.txt | 2 +- images/breeze/output-commands.svg | 368 +++++++++--------- images/breeze/output-docker-compose-tests.svg | 104 ++--- images/breeze/output-generate-constraints.svg | 152 ++++---- images/breeze/output-pull-image.svg | 148 +++---- images/breeze/output-pull-prod-image.svg | 148 +++---- images/breeze/output-shell.svg | 224 ++++++----- images/breeze/output-start-airflow.svg | 232 ++++++----- images/breeze/output-tests.svg | 164 ++++---- images/breeze/output-verify-image.svg | 104 ++--- images/breeze/output-verify-prod-image.svg | 104 ++--- .../output-verify-provider-packages.svg | 132 +++---- .../ci_run_single_airflow_test_in_docker.sh | 6 +- 30 files changed, 1424 insertions(+), 1260 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 32283ed26d488..bde6fd15e62b3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -923,6 +923,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" run: ./scripts/ci/testing/ci_run_airflow_testing.sh env: PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}" + IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Upload airflow logs" uses: actions/upload-artifact@v2 if: failure() @@ -996,6 +997,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" run: ./scripts/ci/testing/ci_run_airflow_testing.sh env: PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}" + IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Upload airflow logs" uses: actions/upload-artifact@v2 if: failure() @@ -1068,6 +1070,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" run: ./scripts/ci/testing/ci_run_airflow_testing.sh env: PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}" + IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Upload airflow logs" uses: actions/upload-artifact@v2 if: failure() @@ -1138,6 +1141,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" run: ./scripts/ci/testing/ci_run_airflow_testing.sh env: PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}" + IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Upload airflow logs" uses: actions/upload-artifact@v2 if: failure() @@ -1206,6 +1210,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" run: ./scripts/ci/testing/ci_run_airflow_testing.sh env: PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}" + IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Upload airflow logs" uses: actions/upload-artifact@v2 if: failure() diff --git a/CI.rst b/CI.rst index f24639271e977..7798b077acf0c 100644 --- a/CI.rst +++ b/CI.rst @@ -96,7 +96,7 @@ You can read more about Breeze in `BREEZE.rst `_ but in essence it i you to re-create CI environment in your local development instance and interact with it. In its basic form, when you do development you can run all the same tests that will be run in CI - but locally, before you submit them as PR. Another use case where Breeze is useful is when tests fail on CI. You can -take the full ``COMMIT_SHA`` of the failed build pass it as ``--github-image-id`` parameter of Breeze and it will +take the full ``COMMIT_SHA`` of the failed build pass it as ``--image-tag`` parameter of Breeze and it will download the very same version of image that was used in CI and run it locally. This way, you can very easily reproduce any failed test that happens in CI - even if you do not check out the sources connected with the run. @@ -275,7 +275,7 @@ You can use those variables when you try to reproduce the build locally. | | | | | should set it to false, especially | | | | | | in case our local sources are not the | | | | | | ones we intend to use (for example | -| | | | | when ``--github-image-id`` is used | +| | | | | when ``--image-tag`` is used | | | | | | in Breeze. | | | | | | | | | | | | In CI jobs it is set to true | @@ -668,12 +668,11 @@ For example knowing that the CI job was for commit ``cd27124534b46c9688a1d89e75f But you usually need to pass more variables and complex setup if you want to connect to a database or enable some integrations. Therefore it is easiest to use `Breeze `_ for that. For example if -you need to reproduce a MySQL environment with kerberos integration enabled for commit -cd27124534b46c9688a1d89e75fcd137ab5137e3, in python 3.8 environment you can run: +you need to reproduce a MySQL environment in python 3.8 environment you can run: .. code-block:: bash - ./breeze-legacy --github-image-id cd27124534b46c9688a1d89e75fcd137ab5137e3 --python 3.8 + breeze --image-tag cd27124534b46c9688a1d89e75fcd137ab5137e3 --python 3.8 --backend mysql You will be dropped into a shell with the exact version that was used during the CI run and you will be able to run pytest tests manually, easily reproducing the environment that was used in CI. Note that in diff --git a/IMAGES.rst b/IMAGES.rst index 58ef0cca54852..634c4ac8a0e12 100644 --- a/IMAGES.rst +++ b/IMAGES.rst @@ -281,7 +281,7 @@ to refresh them. Every developer can also pull and run images being result of a specific CI run in GitHub Actions. This is a powerful tool that allows to reproduce CI failures locally, enter the images and fix them much -faster. It is enough to pass ``--github-image-id`` and the registry and Breeze will download and execute +faster. It is enough to pass ``--image-tag`` and the registry and Breeze will download and execute commands using the same image that was used during the CI tests. For example this command will run the same Python 3.8 image as was used in build identified with @@ -289,8 +289,7 @@ For example this command will run the same Python 3.8 image as was used in build .. code-block:: bash - ./breeze-legacy --github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e \ - --python 3.8 --integration rabbitmq + breeze --image-tag 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e --python 3.8 --integration rabbitmq You can see more details and examples in `Breeze `_ diff --git a/breeze-complete b/breeze-complete index 389d18f64d439..2fd1a9d1a8cfa 100644 --- a/breeze-complete +++ b/breeze-complete @@ -173,9 +173,6 @@ function breeze_complete::get_known_values_breeze() { -g | --github-repository) _breeze_known_values="${_breeze_default_github_repository}" ;; - -s | --github-image-id) - _breeze_known_values="${_breeze_default_github_image_id}" - ;; kind-cluster) _breeze_known_values="${_breeze_allowed_kind_operations}" ;; diff --git a/breeze-legacy b/breeze-legacy index d18d3c25e132b..0bb257494c915 100755 --- a/breeze-legacy +++ b/breeze-legacy @@ -970,21 +970,6 @@ function breeze::parse_arguments() { export GITHUB_REPOSITORY="${2}" shift 2 ;; - -s | --github-image-id) - echo - echo "GitHub image id: ${2}" - echo - echo "Force pulling the image, using GitHub registry and skip mounting local sources." - echo "This is in order to get the exact same version as used in CI environment for SHA!." - echo "You can specify --skip-mounting-local-sources to not mount local sources to get exact. " - echo "behaviour as in the CI environment." - echo - export GITHUB_REGISTRY_PULL_IMAGE_TAG="${2}" - export CHECK_IMAGE_FOR_REBUILD="false" - export SKIP_BUILDING_PROD_IMAGE="true" - export SKIP_CHECK_REMOTE_IMAGE="true" - shift 2 - ;; --init-script) export INIT_SCRIPT_FILE="${2}" echo "The initialization file is in ${INIT_SCRIPT_FILE}" @@ -1370,15 +1355,6 @@ ${CMDNAME} shell [FLAGS] [-- ] '${CMDNAME} shell -- -c \"ls -la\"' '${CMDNAME} -- -c \"ls -la\"' - For GitHub repository, the --github-repository flag can be used to specify the repository - to pull and push images. You can also use --github-image-id in case - you want to pull the image with specific COMMIT_SHA tag. - - '${CMDNAME} shell \\ - --github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - pull/use image with SHA - '${CMDNAME} \\ - --github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - pull/use image with SHA - " readonly DETAILED_USAGE_SHELL export DETAILED_USAGE_EXEC=" diff --git a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py index 889a9d7bef459..374dac76cf6c8 100644 --- a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py @@ -319,7 +319,6 @@ def pull_image( verbose=verbose, wait_for_image=wait_for_image, tag_as_latest=tag_as_latest, - poll_time=10.0, ) if return_code != 0: get_console().print(f"[error]There was an error when pulling CI image: {info}[/]") @@ -530,26 +529,41 @@ def build_ci_image_in_parallel( pool.close() -def rebuild_ci_image_if_needed( - build_params: Union[ShellParams, BuildCiParams], dry_run: bool, verbose: bool +def rebuild_or_pull_ci_image_if_needed( + command_params: Union[ShellParams, BuildCiParams], dry_run: bool, verbose: bool ) -> None: """ Rebuilds CI image if needed and user confirms it. - :param build_params: parameters of the shell + :param command_params: parameters of the command to execute :param dry_run: whether it's a dry_run :param verbose: should we print verbose messages """ build_ci_image_check_cache = Path( - BUILD_CACHE_DIR, build_params.airflow_branch, f".built_{build_params.python}" + BUILD_CACHE_DIR, command_params.airflow_branch, f".built_{command_params.python}" ) - ci_image_params = BuildCiParams(python=build_params.python, upgrade_to_newer_dependencies=False) + ci_image_params = BuildCiParams( + python=command_params.python, upgrade_to_newer_dependencies=False, image_tag=command_params.image_tag + ) + if command_params.image_tag is not None: + return_code, message = run_pull_image( + image_params=ci_image_params, + dry_run=dry_run, + verbose=verbose, + parallel=False, + wait_for_image=True, + tag_as_latest=False, + ) + if return_code != 0: + get_console().print(f"[error]Pulling image with {command_params.image_tag} failed! {message}[/]") + sys.exit(return_code) + return if build_ci_image_check_cache.exists(): if verbose: - get_console().print(f'[info]{build_params.image_type} image already built locally.[/]') + get_console().print(f'[info]{command_params.image_type} image already built locally.[/]') else: get_console().print( - f'[warning]{build_params.image_type} image was never built locally or deleted. ' + f'[warning]{command_params.image_type} image was never built locally or deleted. ' 'Forcing build.[/]' ) ci_image_params.force_build = True diff --git a/dev/breeze/src/airflow_breeze/commands/developer_commands.py b/dev/breeze/src/airflow_breeze/commands/developer_commands.py index a8ad686c708c5..bc324bf7e3798 100644 --- a/dev/breeze/src/airflow_breeze/commands/developer_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/developer_commands.py @@ -21,7 +21,7 @@ import rich_click as click -from airflow_breeze.commands.ci_image_commands import rebuild_ci_image_if_needed +from airflow_breeze.commands.ci_image_commands import rebuild_or_pull_ci_image_if_needed from airflow_breeze.commands.main_command import main from airflow_breeze.global_constants import ( DEFAULT_PYTHON_MAJOR_MINOR_VERSION, @@ -45,6 +45,7 @@ option_force_build, option_forward_credentials, option_github_repository, + option_image_tag, option_installation_package_format, option_integration, option_load_default_connection, @@ -113,6 +114,7 @@ "--force-build", "--mount-sources", "--debian-version", + "--image-tag", ], }, ], @@ -141,6 +143,7 @@ "--force-build", "--mount-sources", "--debian-version", + "--image-tag", ], }, ], @@ -170,6 +173,7 @@ "--package-format", "--force-build", "--mount-sources", + "--image-tag", ], }, ], @@ -238,6 +242,7 @@ @option_mount_sources @option_integration @option_db_reset +@option_image_tag @option_answer @click.argument('extra-args', nargs=-1, type=click.UNPROCESSED) def shell( @@ -261,6 +266,7 @@ def shell( force_build: bool, db_reset: bool, answer: Optional[str], + image_tag: Optional[str], extra_args: Tuple, ): """Enter breeze.py environment. this is the default command use when no other is selected.""" @@ -289,6 +295,7 @@ def shell( extra_args=extra_args, answer=answer, debian_version=debian_version, + image_tag=image_tag, ) @@ -312,6 +319,7 @@ def shell( @option_installation_package_format @option_mount_sources @option_integration +@option_image_tag @option_db_reset @option_answer @click.argument('extra-args', nargs=-1, type=click.UNPROCESSED) @@ -335,6 +343,7 @@ def start_airflow( use_packages_from_dist: bool, package_format: str, force_build: bool, + image_tag: Optional[str], db_reset: bool, answer: Optional[str], extra_args: Tuple, @@ -362,6 +371,7 @@ def start_airflow( force_build=force_build, db_reset=db_reset, start_airflow=True, + image_tag=image_tag, extra_args=extra_args, answer=answer, ) @@ -398,7 +408,7 @@ def build_docs( """Build documentation in the container.""" perform_environment_checks(verbose=verbose) params = BuildCiParams(github_repository=github_repository, python=DEFAULT_PYTHON_MAJOR_MINOR_VERSION) - rebuild_ci_image_if_needed(build_params=params, dry_run=dry_run, verbose=verbose) + rebuild_or_pull_ci_image_if_needed(command_params=params, dry_run=dry_run, verbose=verbose) ci_image_name = params.airflow_image_name doc_builder = DocBuildParams( package_filter=package_filter, @@ -581,7 +591,7 @@ def enter_shell(**kwargs) -> RunCommandResult: get_console().print(CHEATSHEET, style=CHEATSHEET_STYLE) enter_shell_params = ShellParams(**filter_out_none(**kwargs)) enter_shell_params.include_mypy_volume = True - rebuild_ci_image_if_needed(build_params=enter_shell_params, dry_run=dry_run, verbose=verbose) + rebuild_or_pull_ci_image_if_needed(command_params=enter_shell_params, dry_run=dry_run, verbose=verbose) return run_shell(verbose, dry_run, enter_shell_params) diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index 839b966faebb5..a5333a08ab0b6 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -24,7 +24,7 @@ import click -from airflow_breeze.commands.ci_image_commands import rebuild_ci_image_if_needed +from airflow_breeze.commands.ci_image_commands import rebuild_or_pull_ci_image_if_needed from airflow_breeze.commands.main_command import main from airflow_breeze.global_constants import ( ALLOWED_PLATFORMS, @@ -260,7 +260,7 @@ def prepare_airflow_packages( install_providers_from_sources=False, mount_sources=MOUNT_ALL, ) - rebuild_ci_image_if_needed(build_params=shell_params, dry_run=dry_run, verbose=verbose) + rebuild_or_pull_ci_image_if_needed(command_params=shell_params, dry_run=dry_run, verbose=verbose) result_command = run_with_debug( params=shell_params, command=["/opt/airflow/scripts/in_container/run_prepare_airflow_packages.sh"], @@ -299,7 +299,7 @@ def prepare_provider_documentation( answer=answer, skip_environment_initialization=True, ) - rebuild_ci_image_if_needed(build_params=shell_params, dry_run=dry_run, verbose=verbose) + rebuild_or_pull_ci_image_if_needed(command_params=shell_params, dry_run=dry_run, verbose=verbose) cmd_to_run = ["/opt/airflow/scripts/in_container/run_prepare_provider_documentation.sh", *packages] result_command = run_with_debug( params=shell_params, @@ -351,7 +351,7 @@ def prepare_provider_packages( skip_environment_initialization=True, version_suffix_for_pypi=version_suffix_for_pypi, ) - rebuild_ci_image_if_needed(build_params=shell_params, dry_run=dry_run, verbose=verbose) + rebuild_or_pull_ci_image_if_needed(command_params=shell_params, dry_run=dry_run, verbose=verbose) cmd_to_run = ["/opt/airflow/scripts/in_container/run_prepare_provider_packages.sh", *packages_list] result_command = run_with_debug( params=shell_params, @@ -540,7 +540,7 @@ def verify_provider_packages( use_packages_from_dist=use_packages_from_dist, package_format=package_format, ) - rebuild_ci_image_if_needed(build_params=shell_params, dry_run=dry_run, verbose=verbose) + rebuild_or_pull_ci_image_if_needed(command_params=shell_params, dry_run=dry_run, verbose=verbose) cmd_to_run = [ "-c", "python /opt/airflow/scripts/in_container/verify_providers.py", @@ -621,8 +621,8 @@ def release_prod_images( dry_run: bool, ): perform_environment_checks(verbose=verbose) - rebuild_ci_image_if_needed( - build_params=ShellParams(verbose=verbose, python=DEFAULT_PYTHON_MAJOR_MINOR_VERSION), + rebuild_or_pull_ci_image_if_needed( + command_params=ShellParams(verbose=verbose, python=DEFAULT_PYTHON_MAJOR_MINOR_VERSION), dry_run=dry_run, verbose=verbose, ) diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands.py b/dev/breeze/src/airflow_breeze/commands/testing_commands.py index ebe4701b73ceb..3946acd2ab3a9 100644 --- a/dev/breeze/src/airflow_breeze/commands/testing_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/testing_commands.py @@ -23,7 +23,7 @@ import tempfile from threading import Event, Thread from time import sleep -from typing import Dict, List, Tuple +from typing import Dict, List, Optional, Tuple import click @@ -40,6 +40,7 @@ option_image_name, option_image_tag, option_integration, + option_mount_sources, option_mssql_version, option_mysql_version, option_postgres_version, @@ -77,7 +78,6 @@ "options": [ "--integration", "--test-type", - "--limit-progress-output", "--db-reset", "--backend", "--python", @@ -85,7 +85,15 @@ "--mysql-version", "--mssql-version", ], - } + }, + { + "name": "Advanced flag for tests command", + "options": [ + "--limit-progress-output", + "--image-tag", + "--mount-sources", + ], + }, ], } @@ -235,7 +243,8 @@ def run_with_progress( help="Limit progress to percentage only and just show the summary when tests complete.", is_flag=True, ) -@click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED) +@option_image_tag +@option_mount_sources @click.option( "--test-type", help="Type of test to run.", @@ -243,6 +252,7 @@ def run_with_progress( type=BetterChoice(ALLOWED_TEST_TYPES), ) @option_db_reset +@click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED) def tests( dry_run: bool, verbose: bool, @@ -256,6 +266,8 @@ def tests( extra_pytest_args: Tuple, test_type: str, db_reset: bool, + image_tag: Optional[str], + mount_sources: str, ): os.environ["RUN_TESTS"] = "true" if test_type: @@ -274,6 +286,8 @@ def tests( postgres_version=postgres_version, mysql_version=mysql_version, mssql_version=mssql_version, + image_tag=image_tag, + mount_sources=mount_sources, ) env_variables = get_env_variables_for_docker_commands(exec_shell_params) perform_environment_checks(verbose=verbose) diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index 327740f7bf12b..0b8b220987c6c 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -60,9 +60,10 @@ MOUNT_SELECTED = "selected" MOUNT_ALL = "all" -MOUNT_NONE = "none" +MOUNT_SKIP = "skip" +MOUNT_REMOVE = "remove" -ALLOWED_MOUNT_OPTIONS = [MOUNT_SELECTED, MOUNT_ALL, MOUNT_NONE] +ALLOWED_MOUNT_OPTIONS = [MOUNT_SELECTED, MOUNT_ALL, MOUNT_SKIP, MOUNT_REMOVE] ALLOWED_POSTGRES_VERSIONS = ['10', '11', '12', '13', '14'] ALLOWED_MYSQL_VERSIONS = ['5.7', '8'] ALLOWED_MSSQL_VERSIONS = ['2017-latest', '2019-latest'] diff --git a/dev/breeze/src/airflow_breeze/params/shell_params.py b/dev/breeze/src/airflow_breeze/params/shell_params.py index 4ba4022651529..bb4ad3393ead3 100644 --- a/dev/breeze/src/airflow_breeze/params/shell_params.py +++ b/dev/breeze/src/airflow_breeze/params/shell_params.py @@ -32,8 +32,9 @@ ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS, AVAILABLE_INTEGRATIONS, MOUNT_ALL, - MOUNT_NONE, + MOUNT_REMOVE, MOUNT_SELECTED, + MOUNT_SKIP, get_airflow_version, ) from airflow_breeze.utils.console import get_console @@ -64,7 +65,7 @@ class ShellParams: github_actions: str = os.environ.get('GITHUB_ACTIONS', "false") github_repository: str = "apache/airflow" github_token: str = os.environ.get('GITHUB_TOKEN', "") - image_tag: str = "latest" + image_tag: Optional[str] = None include_mypy_volume: bool = False install_airflow_version: str = "" install_providers_from_sources: bool = True @@ -206,11 +207,23 @@ def compose_files(self): ) compose_ci_file.extend([main_ci_docker_compose_file, *backend_files, files_docker_compose_file]) + if self.image_tag is not None and self.image_tag != "latest": + get_console().print( + f"[warning]Running tagged image tag = {self.image_tag}. " + f"Forcing mounted sources to be 'skip'[/]" + ) + self.mount_sources = MOUNT_SKIP + if self.use_airflow_version is not None: + get_console().print( + "[info]Forcing --mount-sources to `remove` since we are not installing airflow " + f"from sources but from {self.use_airflow_version}[/]" + ) + self.mount_sources = MOUNT_REMOVE if self.mount_sources == MOUNT_SELECTED: compose_ci_file.extend([local_docker_compose_file]) elif self.mount_sources == MOUNT_ALL: compose_ci_file.extend([local_all_sources_docker_compose_file]) - else: # none + elif self.mount_sources == MOUNT_REMOVE: compose_ci_file.extend([remove_sources_docker_compose_file]) if self.forward_credentials: compose_ci_file.append(forward_credentials_docker_compose_file) @@ -233,11 +246,3 @@ def command_passed(self): if len(self.extra_args) > 0: cmd = str(self.extra_args[0]) return cmd - - def __post_init__(self): - if self.use_airflow_version is not None: - get_console().print( - "[info]Forcing --mount-sources to `none` since we are not installing airflow " - f"from sources but from {self.use_airflow_version}[/]" - ) - self.mount_sources = MOUNT_NONE diff --git a/dev/breeze/src/airflow_breeze/utils/common_options.py b/dev/breeze/src/airflow_breeze/utils/common_options.py index deca1b4882a4e..628bb8197972b 100644 --- a/dev/breeze/src/airflow_breeze/utils/common_options.py +++ b/dev/breeze/src/airflow_breeze/utils/common_options.py @@ -133,7 +133,7 @@ '--use-airflow-version', help="Use (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or `sdist`" " if Airflow should be removed, installed from wheel packages or sdist packages available in dist " - "folder respectively. Implies --mount-sources `none`.", + "folder respectively. Implies --mount-sources `remove`.", type=UseAirflowVersionType(ALLOWED_USE_AIRFLOW_VERSIONS), envvar='USE_AIRFLOW_VERSION', ) @@ -149,7 +149,7 @@ type=BetterChoice(ALLOWED_MOUNT_OPTIONS), default=ALLOWED_MOUNT_OPTIONS[0], show_default=True, - help="Choose scope of local sources should be mounted (default = selected).", + help="Choose scope of local sources that should be mounted, skipped, or removed (default = selected).", ) option_force_build = click.option( '--force-build', help="Force image build no matter if it is determined as needed.", is_flag=True @@ -186,16 +186,11 @@ help='The user name used to authenticate to GitHub.', envvar='GITHUB_USERNAME', ) -option_github_image_id = click.option( - '-s', - '--github-image-id', - help='Commit SHA of the image. \ - Breeze can automatically pull the commit SHA id specified Default: latest', -) option_image_tag = click.option( '-t', '--image-tag', - help='Tag added to the default naming conventions of Airflow CI/PROD images.', + help='Tag of the image which is used to pull or run the image (implies --mount-sources=skip' + ' when using to run shell or tests) ', envvar='IMAGE_TAG', ) option_image_name = click.option( diff --git a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py index fb38cefabacb4..96b83b5ed7945 100644 --- a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py @@ -43,6 +43,7 @@ MIN_DOCKER_COMPOSE_VERSION, MIN_DOCKER_VERSION, MOUNT_ALL, + MOUNT_REMOVE, MOUNT_SELECTED, MSSQL_HOST_PORT, MYSQL_HOST_PORT, @@ -112,7 +113,7 @@ def get_extra_docker_flags(mount_sources: str) -> List[str]: extra_docker_flags.extend( ['--mount', "type=volume,src=mypy-cache-volume,dst=/opt/airflow/.mypy_cache"] ) - else: # none + elif mount_sources == MOUNT_REMOVE: extra_docker_flags.extend( ["--mount", f"type=bind,src={AIRFLOW_SOURCES_ROOT / 'empty'},dst=/opt/airflow/airflow"] ) diff --git a/dev/breeze/src/airflow_breeze/utils/image.py b/dev/breeze/src/airflow_breeze/utils/image.py index 8cd516bb47cfc..36c40cf05a3e7 100644 --- a/dev/breeze/src/airflow_breeze/utils/image.py +++ b/dev/breeze/src/airflow_breeze/utils/image.py @@ -88,7 +88,7 @@ def run_pull_image( verbose: bool, wait_for_image: bool, tag_as_latest: bool, - poll_time: float, + poll_time: float = 10.0, parallel: bool = False, ) -> Tuple[int, str]: """ @@ -98,7 +98,7 @@ def run_pull_image( :param verbose: whether it's verbose :param wait_for_image: whether we should wait for the image to be available :param tag_as_latest: tag the image as latest - :param poll_time: what's the polling time between checks if images are there + :param poll_time: what's the polling time between checks if images are there (default 10 s) :param parallel: whether the pull is run as part of parallel execution :return: Tuple of return code and description of the image pulled """ diff --git a/dev/breeze/tests/test_commands.py b/dev/breeze/tests/test_commands.py index 8866d75af073a..3d375e8613fae 100644 --- a/dev/breeze/tests/test_commands.py +++ b/dev/breeze/tests/test_commands.py @@ -14,7 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from airflow_breeze.global_constants import MOUNT_ALL, MOUNT_NONE, MOUNT_SELECTED +from airflow_breeze.global_constants import MOUNT_ALL, MOUNT_REMOVE, MOUNT_SELECTED, MOUNT_SKIP from airflow_breeze.utils.docker_command_utils import get_extra_docker_flags from airflow_breeze.utils.visuals import ASCIIART @@ -35,7 +35,13 @@ def test_get_extra_docker_flags_selected(): assert len(flags) > 40 -def test_get_extra_docker_flags_none(): - flags = get_extra_docker_flags(MOUNT_NONE) +def test_get_extra_docker_flags_remove(): + flags = get_extra_docker_flags(MOUNT_REMOVE) assert "empty" in "".join(flags) assert len(flags) < 10 + + +def test_get_extra_docker_flags_skip(): + flags = get_extra_docker_flags(MOUNT_SKIP) + assert "empty" not in "".join(flags) + assert len(flags) < 10 diff --git a/images/breeze/output-build-image.svg b/images/breeze/output-build-image.svg index 34c28afd8b91b..81703735d0155 100644 --- a/images/breeze/output-build-image.svg +++ b/images/breeze/output-build-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - Command: build-image + Command: build-image - + - - -Usage: breeze build-image [OPTIONS] - -Build CI image. Include building multiple images for all python versions (sequentially). - -╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. ---debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] ---image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful -when you build or pull image with --image-tag.                                  ---docker-cache-cCache option for image used during the build.(registry | local | disabled) -[default: registry]                           ---force-buildForce image build no matter if it is determined as needed. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building multiple images ───────────────────────────────────────────────────────────────────────────────────────────╮ ---build-multiple-imagesRun the operation sequentially on all or selected subset of Python versions. ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced options (for power users) ─────────────────────────────────────────────────────────────────────────────────╮ ---install-providers-from-sourcesInstall providers from sources when installing. ---airflow-constraints-modeMode of constraints for CI image building                               -(constraints-source-providers | constraints | constraints-no-providers) -[default: constraints-source-providers]                                 ---airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) ---additional-python-depsAdditional python dependencies to use when building the images.(TEXT) ---runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) ---runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) ---additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) ---additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) ---additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) ---additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) ---additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) ---additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) ---additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) ---dev-apt-depsApt dev dependencies to use when building the images.(TEXT) ---dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---github-usernameThe user name used to authenticate to GitHub.(TEXT) ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) ---push-imagePush image after building it. ---empty-imagePrepare empty image tagged with the same name as the Airflow image. ---prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  -image).                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze build-image [OPTIONS] + +Build CI image. Include building multiple images for all python versions (sequentially). + +╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +--image-tag-tTag of the image which is used to pull or run the image (implies                +--mount-sources=skip when using to run shell or tests)                          +(TEXT)                                                                          +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful +when you build or pull image with --image-tag.                                  +--docker-cache-cCache option for image used during the build.(registry | local | disabled) +[default: registry]                           +--force-buildForce image build no matter if it is determined as needed. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building multiple images ───────────────────────────────────────────────────────────────────────────────────────────╮ +--build-multiple-imagesRun the operation sequentially on all or selected subset of Python versions. +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced options (for power users) ─────────────────────────────────────────────────────────────────────────────────╮ +--install-providers-from-sourcesInstall providers from sources when installing. +--airflow-constraints-modeMode of constraints for CI image building                               +(constraints-source-providers | constraints | constraints-no-providers) +[default: constraints-source-providers]                                 +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) +--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) +--additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) +--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) +--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--github-usernameThe user name used to authenticate to GitHub.(TEXT) +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--push-imagePush image after building it. +--empty-imagePrepare empty image tagged with the same name as the Airflow image. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-build-prod-image.svg b/images/breeze/output-build-prod-image.svg index 494e77574c3e0..dcbd32a96662c 100644 --- a/images/breeze/output-build-prod-image.svg +++ b/images/breeze/output-build-prod-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - Command: build-prod-image + Command: build-prod-image - + - - -Usage: breeze build-prod-image [OPTIONS] - -Build Production image. Include building multiple images for all or selected Python versions sequentially. - -╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---install-airflow-version-VInstall version of Airflow from PyPI.(TEXT) ---upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. ---debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] ---image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful -when you build or pull image with --image-tag.                                  ---docker-cache-cCache option for image used during the build.(registry | local | disabled) -[default: registry]                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building multiple images ───────────────────────────────────────────────────────────────────────────────────────────╮ ---build-multiple-imagesRun the operation sequentially on all or selected subset of Python versions. ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options for customizing images ─────────────────────────────────────────────────────────────────────────────────────╮ ---install-providers-from-sourcesInstall providers from sources when installing. ---airflow-extrasExtras to install by default.                                                    -(TEXT)                                                                           -[default:                                                                        -amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,google,google… ---airflow-constraints-modeMode of constraints for PROD image building                             -(constraints | constraints-no-providers | constraints-source-providers) -[default: constraints]                                                  ---airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) ---additional-python-depsAdditional python dependencies to use when building the images.(TEXT) ---additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) ---additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) ---additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) ---additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) ---additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) ---additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) ---additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) ---runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) ---runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) ---dev-apt-depsApt dev dependencies to use when building the images.(TEXT) ---dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Customization options (for specific customization needs) ───────────────────────────────────────────────────────────╮ ---install-packages-from-contextInstall wheels from local docker-context-files when building image. ---airflow-is-in-contextIf set Airflow is installed from docker-context-files only rather than     -from PyPI or sources.                                                      ---cleanup-contextClean up docker context files before running build (cannot be used         -together with --install-packages-from-context).                            ---disable-mysql-client-installationDo not install MySQL client. ---disable-mssql-client-installationDo not install MsSQl client. ---disable-postgres-client-installationDo not install Postgres client. ---disable-airflow-repo-cacheDisable cache from Airflow repository during building. ---install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT) ---installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---github-usernameThe user name used to authenticate to GitHub.(TEXT) ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) ---push-imagePush image after building it. ---empty-imagePrepare empty image tagged with the same name as the Airflow image. ---prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  -image).                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze build-prod-image [OPTIONS] + +Build Production image. Include building multiple images for all or selected Python versions sequentially. + +╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--install-airflow-version-VInstall version of Airflow from PyPI.(TEXT) +--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +--image-tag-tTag of the image which is used to pull or run the image (implies                +--mount-sources=skip when using to run shell or tests)                          +(TEXT)                                                                          +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful +when you build or pull image with --image-tag.                                  +--docker-cache-cCache option for image used during the build.(registry | local | disabled) +[default: registry]                           +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building multiple images ───────────────────────────────────────────────────────────────────────────────────────────╮ +--build-multiple-imagesRun the operation sequentially on all or selected subset of Python versions. +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options for customizing images ─────────────────────────────────────────────────────────────────────────────────────╮ +--install-providers-from-sourcesInstall providers from sources when installing. +--airflow-extrasExtras to install by default.                                                    +(TEXT)                                                                           +[default:                                                                        +amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,google,google… +--airflow-constraints-modeMode of constraints for PROD image building                             +(constraints | constraints-no-providers | constraints-source-providers) +[default: constraints]                                                  +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) +--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) +--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) +--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Customization options (for specific customization needs) ───────────────────────────────────────────────────────────╮ +--install-packages-from-contextInstall wheels from local docker-context-files when building image. +--airflow-is-in-contextIf set Airflow is installed from docker-context-files only rather than     +from PyPI or sources.                                                      +--cleanup-contextClean up docker context files before running build (cannot be used         +together with --install-packages-from-context).                            +--disable-mysql-client-installationDo not install MySQL client. +--disable-mssql-client-installationDo not install MsSQl client. +--disable-postgres-client-installationDo not install Postgres client. +--disable-airflow-repo-cacheDisable cache from Airflow repository during building. +--install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT) +--installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--github-usernameThe user name used to authenticate to GitHub.(TEXT) +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--push-imagePush image after building it. +--empty-imagePrepare empty image tagged with the same name as the Airflow image. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index aedf286361b03..dacbf6c6e2e96 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -1 +1 @@ -4715fa0a006457cc3f4f6447e3fecc95 +bdc63bfd5f0967029b8b32a8068303cf diff --git a/images/breeze/output-commands.svg b/images/breeze/output-commands.svg index a592f4642e045..e9dd89b22e4dc 100644 --- a/images/breeze/output-commands.svg +++ b/images/breeze/output-commands.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Breeze commands + Breeze commands - + - - -Usage: breeze [OPTIONS] COMMAND [ARGS]... - -╭─ Basic flags for the default (shell) command ────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---integrationIntegration(s) to enable when running (can be more than one).                             -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    -all)                                                                                      ---forward-credentials-fForward local credentials to container when running. ---db-reset-dReset DB when entering the container. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flags for the default (shell) command ─────────────────────────────────────────────────────────────────────╮ ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or   -`sdist` if Airflow should be removed, installed from wheel packages or sdist packages    -available in dist folder respectively. Implies --mount-sources `none`.                   -(none | wheel | sdist | <airflow_version>)                                               ---airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) ---use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when    -entering breeze.                                                                         ---package-formatFormat of packages that should be installed from dist.(wheel | sdist)[default: wheel] ---force-buildForce image build no matter if it is determined as needed. ---mount-sourcesChoose scope of local sources should be mounted (default = selected). -(selected | all | none)                                               -[default: selected]                                                   ---debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Developer tools ────────────────────────────────────────────────────────────────────────────────────────────────────╮ -shell          Enter breeze.py environment. this is the default command use when no other is selected.             -start-airflow  Enter breeze.py environment and starts all Airflow components in the tmux session.                  -exec           Joins the interactive shell of running airflow container                                            -stop           Stop running breeze environment.                                                                    -build-docs     Build documentation in the container.                                                               -static-checks  Run static checks.                                                                                  -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Testing ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -docker-compose-tests Run docker-compose tests.                                                                     -tests                Run the specified unit test targets. Multiple targets may be specified separated by spaces.   -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Configuration & maintenance ────────────────────────────────────────────────────────────────────────────────────────╮ -cleanup                  Cleans the cache of parameters, docker cache and optionally - currently downloaded        -images.                                                                                   -self-upgrade             Self upgrade Breeze.                                                                      -setup-autocomplete       Enables autocompletion of breeze commands.                                                -config                   Show/update configuration (Python, Backend, Cheatsheet, ASCIIART).                        -resource-check           Check if available docker resources are enough.                                           -free-space               Free space for jobs run in CI.                                                            -fix-ownership            Fix ownership of source files to be same as host user.                                    -regenerate-command-imagesRegenerate breeze command images.                                                         -command-hash-export      Outputs hash of all click commands to file or stdout if `-` is used (useful to see if     -images should be regenerated).                                                            -version                  Print information about version of apache-airflow-breeze.                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ CI Image tools ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ -build-image   Build CI image. Include building multiple images for all python versions (sequentially).             -pull-image    Pull and optionally verify CI images - possibly in parallel for all Python versions.                 -verify-image  Verify CI image.                                                                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Production Image tools ─────────────────────────────────────────────────────────────────────────────────────────────╮ -build-prod-image Build Production image. Include building multiple images for all or selected Python versions      -sequentially.                                                                                     -pull-prod-image  Pull and optionally verify Production images - possibly in parallel for all Python versions.      -verify-prod-imageVerify Production image.                                                                          -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Release management ─────────────────────────────────────────────────────────────────────────────────────────────────╮ -verify-provider-packages         Verifies if all provider code is following expectations for providers.            -prepare-provider-documentation   Prepare CHANGELOG, README and COMMITS information for providers.                  -prepare-provider-packages        Prepare sdist/whl packages of Airflow Providers.                                  -prepare-airflow-package          Prepare sdist/whl package of Airflow.                                             -release-prod-images              Release production images to DockerHub (needs DockerHub permissions).             -generate-constraints             Generates pinned constraint files with all extras from setup.py in parallel.      -find-newer-dependencies          Finds which dependencies are being upgraded.                                      -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze [OPTIONS] COMMAND [ARGS]... + +╭─ Basic flags for the default (shell) command ────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                             +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    +all)                                                                                      +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flags for the default (shell) command ─────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or   +`sdist` if Airflow should be removed, installed from wheel packages or sdist packages    +available in dist folder respectively. Implies --mount-sources `remove`.                 +(none | wheel | sdist | <airflow_version>)                                               +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when    +entering breeze.                                                                         +--package-formatFormat of packages that should be installed from dist.(wheel | sdist)[default: wheel] +--force-buildForce image build no matter if it is determined as needed. +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =     +selected).                                                                               +(selected | all | skip | remove)                                                         +[default: selected]                                                                      +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Developer tools ────────────────────────────────────────────────────────────────────────────────────────────────────╮ +shell          Enter breeze.py environment. this is the default command use when no other is selected.             +start-airflow  Enter breeze.py environment and starts all Airflow components in the tmux session.                  +exec           Joins the interactive shell of running airflow container                                            +stop           Stop running breeze environment.                                                                    +build-docs     Build documentation in the container.                                                               +static-checks  Run static checks.                                                                                  +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Testing ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +docker-compose-tests Run docker-compose tests.                                                                     +tests                Run the specified unit test targets. Multiple targets may be specified separated by spaces.   +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Configuration & maintenance ────────────────────────────────────────────────────────────────────────────────────────╮ +cleanup                  Cleans the cache of parameters, docker cache and optionally - currently downloaded        +images.                                                                                   +self-upgrade             Self upgrade Breeze.                                                                      +setup-autocomplete       Enables autocompletion of breeze commands.                                                +config                   Show/update configuration (Python, Backend, Cheatsheet, ASCIIART).                        +resource-check           Check if available docker resources are enough.                                           +free-space               Free space for jobs run in CI.                                                            +fix-ownership            Fix ownership of source files to be same as host user.                                    +regenerate-command-imagesRegenerate breeze command images.                                                         +command-hash-export      Outputs hash of all click commands to file or stdout if `-` is used (useful to see if     +images should be regenerated).                                                            +version                  Print information about version of apache-airflow-breeze.                                 +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ CI Image tools ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +build-image   Build CI image. Include building multiple images for all python versions (sequentially).             +pull-image    Pull and optionally verify CI images - possibly in parallel for all Python versions.                 +verify-image  Verify CI image.                                                                                     +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Production Image tools ─────────────────────────────────────────────────────────────────────────────────────────────╮ +build-prod-image Build Production image. Include building multiple images for all or selected Python versions      +sequentially.                                                                                     +pull-prod-image  Pull and optionally verify Production images - possibly in parallel for all Python versions.      +verify-prod-imageVerify Production image.                                                                          +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Release management ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +verify-provider-packages         Verifies if all provider code is following expectations for providers.            +prepare-provider-documentation   Prepare CHANGELOG, README and COMMITS information for providers.                  +prepare-provider-packages        Prepare sdist/whl packages of Airflow Providers.                                  +prepare-airflow-package          Prepare sdist/whl package of Airflow.                                             +release-prod-images              Release production images to DockerHub (needs DockerHub permissions).             +generate-constraints             Generates pinned constraint files with all extras from setup.py in parallel.      +find-newer-dependencies          Finds which dependencies are being upgraded.                                      +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-docker-compose-tests.svg b/images/breeze/output-docker-compose-tests.svg index 75f5c1a31b102..c652741d47579 100644 --- a/images/breeze/output-docker-compose-tests.svg +++ b/images/breeze/output-docker-compose-tests.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - Command: docker-compose-tests + Command: docker-compose-tests - + - - -Usage: breeze docker-compose-tests [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Run docker-compose tests. - -╭─ Docker-compose tests flag ──────────────────────────────────────────────────────────────────────────────────────────╮ ---image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze docker-compose-tests [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Run docker-compose tests. + +╭─ Docker-compose tests flag ──────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip when using   +to run shell or tests)                                                                             +(TEXT)                                                                                             +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-generate-constraints.svg b/images/breeze/output-generate-constraints.svg index ca049566070eb..e0b54f182ebba 100644 --- a/images/breeze/output-generate-constraints.svg +++ b/images/breeze/output-generate-constraints.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - Command: generate-constraints + Command: generate-constraints - + - - -Usage: breeze generate-constraints [OPTIONS] - -Generates pinned constraint files with all extras from setup.py in parallel. - -╭─ Generate constraints flags ─────────────────────────────────────────────────────────────────────────────────────────╮ ---image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---airflow-constraints-modeMode of constraints for CI image building                               -(constraints-source-providers | constraints | constraints-no-providers) -[default: constraints-source-providers]                                 ---debugDrop user in shell instead of running the command. Useful for debugging. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze generate-constraints [OPTIONS] + +Generates pinned constraint files with all extras from setup.py in parallel. + +╭─ Generate constraints flags ─────────────────────────────────────────────────────────────────────────────────────────╮ +--image-tag-tTag of the image which is used to pull or run the image (implies                     +--mount-sources=skip when using to run shell or tests)                               +(TEXT)                                                                               +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--airflow-constraints-modeMode of constraints for CI image building                               +(constraints-source-providers | constraints | constraints-no-providers) +[default: constraints-source-providers]                                 +--debugDrop user in shell instead of running the command. Useful for debugging. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-pull-image.svg b/images/breeze/output-pull-image.svg index e1cce7beebc1c..edb77e062ed5a 100644 --- a/images/breeze/output-pull-image.svg +++ b/images/breeze/output-pull-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - Command: pull-image + Command: pull-image - + - - -Usage: breeze pull-image [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Pull and optionally verify CI images - possibly in parallel for all Python versions. - -╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---verify-imageVerify image. ---wait-for-imageWait until image is available. ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you build -or pull image with --image-tag.                                                                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze pull-image [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Pull and optionally verify CI images - possibly in parallel for all Python versions. + +╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip when     +using to run shell or tests)                                                                   +(TEXT)                                                                                         +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--verify-imageVerify image. +--wait-for-imageWait until image is available. +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you build +or pull image with --image-tag.                                                                +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-pull-prod-image.svg b/images/breeze/output-pull-prod-image.svg index d6ee293d03b8a..e714d08acd989 100644 --- a/images/breeze/output-pull-prod-image.svg +++ b/images/breeze/output-pull-prod-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - Command: pull-prod-image + Command: pull-prod-image - + - - -Usage: breeze pull-prod-image [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Pull and optionally verify Production images - possibly in parallel for all Python versions. - -╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---verify-imageVerify image. ---wait-for-imageWait until image is available. ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you build -or pull image with --image-tag.                                                                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze pull-prod-image [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Pull and optionally verify Production images - possibly in parallel for all Python versions. + +╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip when     +using to run shell or tests)                                                                   +(TEXT)                                                                                         +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--verify-imageVerify image. +--wait-for-imageWait until image is available. +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you build +or pull image with --image-tag.                                                                +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-shell.svg b/images/breeze/output-shell.svg index f279eab2d729e..2c29bda22ac20 100644 --- a/images/breeze/output-shell.svg +++ b/images/breeze/output-shell.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + + + + + + + + + + - Command: shell + Command: shell - + - - -Usage: breeze shell [OPTIONS] [EXTRA_ARGS]... - -Enter breeze.py environment. this is the default command use when no other is selected. - -╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---integrationIntegration(s) to enable when running (can be more than one).                             -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    -all)                                                                                      ---forward-credentials-fForward local credentials to container when running. ---db-reset-dReset DB when entering the container. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or   -`sdist` if Airflow should be removed, installed from wheel packages or sdist packages    -available in dist folder respectively. Implies --mount-sources `none`.                   -(none | wheel | sdist | <airflow_version>)                                               ---airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) ---use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when    -entering breeze.                                                                         ---package-formatFormat of packages that should be installed from dist.(wheel | sdist)[default: wheel] ---force-buildForce image build no matter if it is determined as needed. ---mount-sourcesChoose scope of local sources should be mounted (default = selected). -(selected | all | none)                                               -[default: selected]                                                   ---debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT) -[default: apache/airflow]                        ---airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     -specify constraints for the installed version and to find newer dependencies    -(TEXT)                                                                          ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze shell [OPTIONS] [EXTRA_ARGS]... + +Enter breeze.py environment. this is the default command use when no other is selected. + +╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                             +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    +all)                                                                                      +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or +`sdist` if Airflow should be removed, installed from wheel packages or sdist packages  +available in dist folder respectively. Implies --mount-sources `remove`.               +(none | wheel | sdist | <airflow_version>)                                             +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when  +entering breeze.                                                                       +--package-formatFormat of packages that should be installed from dist.(wheel | sdist) +[default: wheel]                                       +--force-buildForce image build no matter if it is determined as needed. +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =   +selected).                                                                             +(selected | all | skip | remove)                                                       +[default: selected]                                                                    +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +--image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip  +when using to run shell or tests)                                                      +(TEXT)                                                                                 +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT) +[default: apache/airflow]                        +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     +specify constraints for the installed version and to find newer dependencies    +(TEXT)                                                                          +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-start-airflow.svg b/images/breeze/output-start-airflow.svg index 159eec8cb29a0..3e21e1f5e97fa 100644 --- a/images/breeze/output-start-airflow.svg +++ b/images/breeze/output-start-airflow.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + + + + + + + + + + - Command: start-airflow + Command: start-airflow - + - - -Usage: breeze start-airflow [OPTIONS] [EXTRA_ARGS]... - -Enter breeze.py environment and starts all Airflow components in the tmux session. - -╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---load-example-dags-eEnable configuration to load example DAGs when starting Airflow. ---load-default-connections-cEnable configuration to load default connections when starting Airflow. ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---integrationIntegration(s) to enable when running (can be more than one).                        -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino -| all)                                                                               ---forward-credentials-fForward local credentials to container when running. ---db-reset-dReset DB when entering the container. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or   -`sdist` if Airflow should be removed, installed from wheel packages or sdist packages    -available in dist folder respectively. Implies --mount-sources `none`.                   -(none | wheel | sdist | <airflow_version>)                                               ---airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) ---use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when    -entering breeze.                                                                         ---package-formatFormat of packages that should be installed from dist.(wheel | sdist)[default: wheel] ---force-buildForce image build no matter if it is determined as needed. ---mount-sourcesChoose scope of local sources should be mounted (default = selected). -(selected | all | none)                                               -[default: selected]                                                   -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT) -[default: apache/airflow]                        ---airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     -specify constraints for the installed version and to find newer dependencies    -(TEXT)                                                                          ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze start-airflow [OPTIONS] [EXTRA_ARGS]... + +Enter breeze.py environment and starts all Airflow components in the tmux session. + +╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--load-example-dags-eEnable configuration to load example DAGs when starting Airflow. +--load-default-connections-cEnable configuration to load default connections when starting Airflow. +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                        +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino +| all)                                                                               +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or +`sdist` if Airflow should be removed, installed from wheel packages or sdist packages  +available in dist folder respectively. Implies --mount-sources `remove`.               +(none | wheel | sdist | <airflow_version>)                                             +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when  +entering breeze.                                                                       +--package-formatFormat of packages that should be installed from dist.(wheel | sdist) +[default: wheel]                                       +--force-buildForce image build no matter if it is determined as needed. +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =   +selected).                                                                             +(selected | all | skip | remove)                                                       +[default: selected]                                                                    +--image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip  +when using to run shell or tests)                                                      +(TEXT)                                                                                 +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT) +[default: apache/airflow]                        +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     +specify constraints for the installed version and to find newer dependencies    +(TEXT)                                                                          +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-tests.svg b/images/breeze/output-tests.svg index 914f2c4587a0b..1d819816f2596 100644 --- a/images/breeze/output-tests.svg +++ b/images/breeze/output-tests.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + + + + + + + + + + + + + + + + + + + - Command: tests + Command: tests - + - - -Usage: breeze tests [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Run the specified unit test targets. Multiple targets may be specified separated by spaces. - -╭─ Basic flag for tests command ───────────────────────────────────────────────────────────────────────────────────────╮ ---integrationIntegration(s) to enable when running (can be more than one).                           -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |  -all)                                                                                    ---test-typeType of test to run.                                                                    -(All | Always | Core | Providers | API | CLI | Integration | Other | WWW | Postgres |   -MySQL | Helm | Quarantined)                                                             ---limit-progress-outputLimit progress to percentage only and just show the summary when tests complete. ---db-reset-dReset DB when entering the container. ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze tests [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Run the specified unit test targets. Multiple targets may be specified separated by spaces. + +╭─ Basic flag for tests command ───────────────────────────────────────────────────────────────────────────────────────╮ +--integrationIntegration(s) to enable when running (can be more than one).                               +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino | all) +--test-typeType of test to run.                                                                         +(All | Always | Core | Providers | API | CLI | Integration | Other | WWW | Postgres | MySQL  +| Helm | Quarantined)                                                                        +--db-reset-dReset DB when entering the container. +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flag for tests command ────────────────────────────────────────────────────────────────────────────────────╮ +--limit-progress-outputLimit progress to percentage only and just show the summary when tests complete. +--image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip   +when using to run shell or tests)                                                       +(TEXT)                                                                                  +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =    +selected).                                                                              +(selected | all | skip | remove)                                                        +[default: selected]                                                                     +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-verify-image.svg b/images/breeze/output-verify-image.svg index b79efb93f2602..9c6d1a641fc52 100644 --- a/images/breeze/output-verify-image.svg +++ b/images/breeze/output-verify-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - Command: verify-image + Command: verify-image - + - - -Usage: breeze verify-image [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Verify CI image. - -╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze verify-image [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Verify CI image. + +╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip when using   +to run shell or tests)                                                                             +(TEXT)                                                                                             +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-verify-prod-image.svg b/images/breeze/output-verify-prod-image.svg index 4bc7a36527741..48b61a30826b5 100644 --- a/images/breeze/output-verify-prod-image.svg +++ b/images/breeze/output-verify-prod-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - Command: verify-prod-image + Command: verify-prod-image - + - - -Usage: breeze verify-prod-image [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Verify Production image. - -╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---image-tag-tTag added to the default naming conventions of Airflow CI/PROD images.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze verify-prod-image [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Verify Production image. + +╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip when using   +to run shell or tests)                                                                             +(TEXT)                                                                                             +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-verify-provider-packages.svg b/images/breeze/output-verify-provider-packages.svg index cfb88fbcc4755..12853b46a203f 100644 --- a/images/breeze/output-verify-provider-packages.svg +++ b/images/breeze/output-verify-provider-packages.svg @@ -19,149 +19,149 @@ font-weight: 700; } - .terminal-1322667406-matrix { + .terminal-1578520108-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1322667406-title { + .terminal-1578520108-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1322667406-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-1322667406-r2 { fill: #c5c8c6 } -.terminal-1322667406-r3 { fill: #d0b344;font-weight: bold } -.terminal-1322667406-r4 { fill: #868887 } -.terminal-1322667406-r5 { fill: #68a0b3;font-weight: bold } -.terminal-1322667406-r6 { fill: #8d7b39 } -.terminal-1322667406-r7 { fill: #98a84b;font-weight: bold } + .terminal-1578520108-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-1578520108-r2 { fill: #c5c8c6 } +.terminal-1578520108-r3 { fill: #d0b344;font-weight: bold } +.terminal-1578520108-r4 { fill: #868887 } +.terminal-1578520108-r5 { fill: #68a0b3;font-weight: bold } +.terminal-1578520108-r6 { fill: #8d7b39 } +.terminal-1578520108-r7 { fill: #98a84b;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: verify-provider-packages + Command: verify-provider-packages - + - - -Usage: breeze verify-provider-packages [OPTIONS] - -Verifies if all provider code is following expectations for providers. - -╭─ Provider verification flags ────────────────────────────────────────────────────────────────────────────────────────╮ ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`,        -`wheel`, or `sdist` if Airflow should be removed, installed from wheel packages   -or sdist packages available in dist folder respectively. Implies --mount-sources -`none`.                                                                           -(none | wheel | sdist | <airflow_version>)                                        ---airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to       -specify constraints for the installed version and to find newer dependencies      -(TEXT)                                                                            ---airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) ---use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder  -when entering breeze.                                                             ---package-formatFormat of packages that should be installed from dist.(wheel | sdist) -[default: wheel]                                       ---debugDrop user in shell instead of running the command. Useful for debugging. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze verify-provider-packages [OPTIONS] + +Verifies if all provider code is following expectations for providers. + +╭─ Provider verification flags ────────────────────────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`,        +`wheel`, or `sdist` if Airflow should be removed, installed from wheel packages   +or sdist packages available in dist folder respectively. Implies --mount-sources +`remove`.                                                                         +(none | wheel | sdist | <airflow_version>)                                        +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to       +specify constraints for the installed version and to find newer dependencies      +(TEXT)                                                                            +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder  +when entering breeze.                                                             +--package-formatFormat of packages that should be installed from dist.(wheel | sdist) +[default: wheel]                                       +--debugDrop user in shell instead of running the command. Useful for debugging. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh b/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh index d5db23fa7410a..d120b50ec75c4 100755 --- a/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh +++ b/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh @@ -133,10 +133,12 @@ function run_airflow_testing_in_docker() { echo "${COLOR_RED}***********************************************************************************************${COLOR_RESET}" echo echo "${COLOR_BLUE}***********************************************************************************************${COLOR_RESET}" - echo "${COLOR_BLUE}Reproduce the failed tests on your local machine (note that you need to use docker-compose v1 rather than v2 to enable Kerberos integration):${COLOR_RESET}" - echo "${COLOR_YELLOW}./breeze --github-image-id ${GITHUB_REGISTRY_PULL_IMAGE_TAG=} --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} ${INTEGRATION_BREEZE_FLAGS[*]} shell${COLOR_RESET}" + echo "${COLOR_BLUE}Enter the same environment that was used for the tests:${COLOR_RESET}" + echo "${COLOR_YELLOW}breeze --image-tag ${IMAGE_TAG=} --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} ${INTEGRATION_BREEZE_FLAGS[*]} shell${COLOR_RESET}" echo "${COLOR_BLUE}Then you can run failed tests with:${COLOR_RESET}" echo "${COLOR_YELLOW}pytest [TEST_NAME]${COLOR_RESET}" + echo "${COLOR_BLUE}Or you can run the tests: ${COLOR_RESET}" + echo "${COLOR_YELLOW}breeze --image-tag ${IMAGE_TAG=} --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} ${INTEGRATION_BREEZE_FLAGS[*]} test${COLOR_RESET}" echo "${COLOR_BLUE}***********************************************************************************************${COLOR_RESET}" From e4c41ade22dac0c87e97e58801c1708efa8078a1 Mon Sep 17 00:00:00 2001 From: Josh Fell <48934154+josh-fell@users.noreply.github.com> Date: Sat, 18 Jun 2022 15:34:24 -0400 Subject: [PATCH 016/118] Move LOAD_DEFAULT_CONNECTIONS env var to database config section in CI (#24536) (cherry picked from commit fe105f8887d516c4dc6ecff55dded2215280716c) --- Dockerfile.ci | 2 +- scripts/docker/entrypoint_ci.sh | 2 +- scripts/in_container/airflow_ci.cfg | 4 +++- scripts/in_container/check_environment.sh | 2 +- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/Dockerfile.ci b/Dockerfile.ci index 41aaf579ec749..537f84a71f2d2 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -803,7 +803,7 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then cd "${AIRFLOW_SOURCES}" if [[ ${START_AIRFLOW:="false"} == "true" || ${START_AIRFLOW} == "True" ]]; then - export AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=${LOAD_DEFAULT_CONNECTIONS} + export AIRFLOW__DATABASE__LOAD_DEFAULT_CONNECTIONS=${LOAD_DEFAULT_CONNECTIONS} export AIRFLOW__CORE__LOAD_EXAMPLES=${LOAD_EXAMPLES} # shellcheck source=scripts/in_container/bin/run_tmux exec run_tmux diff --git a/scripts/docker/entrypoint_ci.sh b/scripts/docker/entrypoint_ci.sh index 0aa7e5ddc645b..f5198a556c21c 100755 --- a/scripts/docker/entrypoint_ci.sh +++ b/scripts/docker/entrypoint_ci.sh @@ -211,7 +211,7 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then cd "${AIRFLOW_SOURCES}" if [[ ${START_AIRFLOW:="false"} == "true" || ${START_AIRFLOW} == "True" ]]; then - export AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=${LOAD_DEFAULT_CONNECTIONS} + export AIRFLOW__DATABASE__LOAD_DEFAULT_CONNECTIONS=${LOAD_DEFAULT_CONNECTIONS} export AIRFLOW__CORE__LOAD_EXAMPLES=${LOAD_EXAMPLES} # shellcheck source=scripts/in_container/bin/run_tmux exec run_tmux diff --git a/scripts/in_container/airflow_ci.cfg b/scripts/in_container/airflow_ci.cfg index 60b6066144d6a..57863bd9c69f6 100644 --- a/scripts/in_container/airflow_ci.cfg +++ b/scripts/in_container/airflow_ci.cfg @@ -23,12 +23,14 @@ sql_alchemy_conn = # overridden by the startup scripts #sql_engine_collation_for_ids = overridden by the startup scripts unit_test_mode = True load_examples = True -load_default_connections = True donot_pickle = False dags_are_paused_at_creation = False default_impersonation = fernet_key = af7CN0q6ag5U3g08IsPsw3K45U7Xa0axgVFhoh-3zB8= +[database] +load_default_connections = True + [hive] default_hive_mapred_queue = airflow diff --git a/scripts/in_container/check_environment.sh b/scripts/in_container/check_environment.sh index 2ed2a4261047e..3d9247e0953aa 100755 --- a/scripts/in_container/check_environment.sh +++ b/scripts/in_container/check_environment.sh @@ -129,7 +129,7 @@ function startairflow_if_requested() { echo echo "Starting Airflow" echo - export AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=${LOAD_DEFAULT_CONNECTIONS} + export AIRFLOW__DATABASE__LOAD_DEFAULT_CONNECTIONS=${LOAD_DEFAULT_CONNECTIONS} export AIRFLOW__CORE__LOAD_EXAMPLES=${LOAD_EXAMPLES} . "$( dirname "${BASH_SOURCE[0]}" )/configure_environment.sh" From c7fafc6516af9287c5c4da6af87d15d563d63137 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sun, 19 Jun 2022 16:16:59 +0200 Subject: [PATCH 017/118] Add verification steps when releasing the images. (#24520) After the images are pushed in CI we are running the verification of the AMD image now. This cannot be really done during building and pushing the image, because we are using multi-platform images using remote builders so the image is not even available locally, so we need to actually pull the images after they are built in order to verify them. This PR adds those features: * ability to pull images for verification with --pull-flag * ability to verify slim images (regular tests are skipped and we only expect the preinstalled providers to be available * the steps to verify the images (both regular and slim) are added to the workflow (cherry picked from commit 2936759256273a66cf6343e451d81013a95ec47e) --- .github/workflows/release_dockerhub_image.yml | 21 +++- .../commands/ci_image_commands.py | 12 +- .../commands/production_image_commands.py | 14 +++ .../airflow_breeze/utils/common_options.py | 6 + dev/breeze/src/airflow_breeze/utils/image.py | 1 + .../src/airflow_breeze/utils/run_tests.py | 4 +- docker_tests/test_prod_image.py | 10 +- images/breeze/output-commands-hash.txt | 2 +- images/breeze/output-verify-image.svg | 108 +++++++++-------- images/breeze/output-verify-prod-image.svg | 112 ++++++++++-------- 10 files changed, 177 insertions(+), 113 deletions(-) diff --git a/.github/workflows/release_dockerhub_image.yml b/.github/workflows/release_dockerhub_image.yml index a6c3e86ddd814..cd5fe6e9ce08d 100644 --- a/.github/workflows/release_dockerhub_image.yml +++ b/.github/workflows/release_dockerhub_image.yml @@ -119,12 +119,27 @@ jobs: ${{ needs.build-info.outputs.skipLatest }} ${{ needs.build-info.outputs.limitPlatform }} --limit-python ${{ matrix.python-version }} --slim-images - - name: "Docker logout" - run: docker logout - if: always() - name: "Stop ARM instance" run: ./scripts/ci/images/ci_stop_arm_instance.sh if: always() && github.repository == 'apache/airflow' + - name: > + Verify regular AMD64 image: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }} + run: > + breeze verify-prod-image + --pull-image + --image-name + ${{github.repository}}:${{github.event.inputs.airflowVersion}}-python${{matrix.python-version}} + - name: > + Verify slim AMD64 image: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }} + run: > + breeze verify-prod-image + --pull-image + --slim-image + --image-name + ${{github.repository}}:slim-${{github.event.inputs.airflowVersion}}-python${{matrix.python-version}} + - name: "Docker logout" + run: docker logout + if: always() - name: "Fix ownership" run: breeze fix-ownership if: always() diff --git a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py index 374dac76cf6c8..9ce895578e137 100644 --- a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py @@ -54,6 +54,7 @@ option_parallelism, option_platform, option_prepare_buildx_cache, + option_pull_image, option_push_image, option_python, option_python_versions, @@ -181,6 +182,7 @@ "--image-name", "--python", "--image-tag", + "--pull-image", ], } ], @@ -270,7 +272,7 @@ def run_build(ci_image_params: BuildCiParams) -> None: @option_image_tag @option_tag_as_latest @click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED) -def pull_image( +def pull_ci_image( verbose: bool, dry_run: bool, python: str, @@ -338,14 +340,16 @@ def pull_image( @option_github_repository @option_image_tag @option_image_name +@option_pull_image @click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED) -def verify_image( +def verify_ci_image( verbose: bool, dry_run: bool, python: str, github_repository: str, image_name: str, image_tag: str, + pull_image: bool, extra_pytest_args: Tuple, ): """Verify CI image.""" @@ -353,12 +357,16 @@ def verify_image( if image_name is None: build_params = BuildCiParams(python=python, image_tag=image_tag, github_repository=github_repository) image_name = build_params.airflow_image_name_with_tag + if pull_image: + command_to_run = ["docker", "pull", image_name] + run_command(command_to_run, verbose=verbose, dry_run=dry_run, check=True) get_console().print(f"[info]Verifying CI image: {image_name}[/]") return_code, info = verify_an_image( image_name=image_name, verbose=verbose, dry_run=dry_run, image_type='CI', + slim_image=False, extra_pytest_args=extra_pytest_args, ) sys.exit(return_code) diff --git a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py index 8ac7aacab0f2f..0b5ed2e68b06d 100644 --- a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py @@ -52,6 +52,7 @@ option_parallelism, option_platform, option_prepare_buildx_cache, + option_pull_image, option_push_image, option_python, option_python_versions, @@ -184,6 +185,7 @@ "--image-name", "--python", "--image-tag", + "--pull-image", ], } ], @@ -382,6 +384,12 @@ def pull_prod_image( @option_github_repository @option_image_tag @option_image_name +@option_pull_image +@click.option( + '--slim-image', + help='The image to verify is slim and non-slim tests should be skipped.', + is_flag=True, +) @click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED) def verify_prod_image( verbose: bool, @@ -390,6 +398,8 @@ def verify_prod_image( github_repository: str, image_name: str, image_tag: str, + pull_image: bool, + slim_image: bool, extra_pytest_args: Tuple, ): """Verify Production image.""" @@ -399,6 +409,9 @@ def verify_prod_image( python=python, image_tag=image_tag, github_repository=github_repository ) image_name = build_params.airflow_image_name_with_tag + if pull_image: + command_to_run = ["docker", "pull", image_name] + run_command(command_to_run, verbose=verbose, dry_run=dry_run, check=True) get_console().print(f"[info]Verifying PROD image: {image_name}[/]") return_code, info = verify_an_image( image_name=image_name, @@ -406,6 +419,7 @@ def verify_prod_image( dry_run=dry_run, image_type='PROD', extra_pytest_args=extra_pytest_args, + slim_image=slim_image, ) sys.exit(return_code) diff --git a/dev/breeze/src/airflow_breeze/utils/common_options.py b/dev/breeze/src/airflow_breeze/utils/common_options.py index 628bb8197972b..0bc4d5af9fc86 100644 --- a/dev/breeze/src/airflow_breeze/utils/common_options.py +++ b/dev/breeze/src/airflow_breeze/utils/common_options.py @@ -434,3 +434,9 @@ show_default=True, help='Mode of constraints for PROD image building', ) +option_pull_image = click.option( + '--pull-image', + help="Pull image is missing before attempting to verify it.", + is_flag=True, + envvar='PULL_IMAGE', +) diff --git a/dev/breeze/src/airflow_breeze/utils/image.py b/dev/breeze/src/airflow_breeze/utils/image.py index 36c40cf05a3e7..57ccc2e3a129b 100644 --- a/dev/breeze/src/airflow_breeze/utils/image.py +++ b/dev/breeze/src/airflow_breeze/utils/image.py @@ -201,6 +201,7 @@ def run_pull_and_verify_image( image_type=image_params.image_type, dry_run=dry_run, verbose=verbose, + slim_image=False, extra_pytest_args=extra_pytest_args, ) diff --git a/dev/breeze/src/airflow_breeze/utils/run_tests.py b/dev/breeze/src/airflow_breeze/utils/run_tests.py index 63c507afe4c77..6cda9efa1e798 100644 --- a/dev/breeze/src/airflow_breeze/utils/run_tests.py +++ b/dev/breeze/src/airflow_breeze/utils/run_tests.py @@ -26,7 +26,7 @@ def verify_an_image( - image_name: str, image_type: str, dry_run: bool, verbose: bool, extra_pytest_args: Tuple + image_name: str, image_type: str, dry_run: bool, verbose: bool, slim_image: bool, extra_pytest_args: Tuple ) -> Tuple[int, str]: command_result = run_command( ["docker", "inspect", image_name], dry_run=dry_run, verbose=verbose, check=False, stdout=DEVNULL @@ -43,6 +43,8 @@ def verify_an_image( test_path = AIRFLOW_SOURCES_ROOT / "docker_tests" / "test_ci_image.py" env = os.environ.copy() env['DOCKER_IMAGE'] = image_name + if slim_image: + env['TEST_SLIM_IMAGE'] = 'true' command_result = run_command( [sys.executable, "-m", "pytest", str(test_path), *pytest_args, *extra_pytest_args], dry_run=dry_run, diff --git a/docker_tests/test_prod_image.py b/docker_tests/test_prod_image.py index 6e47cb0a9dbcc..aae374c4d02da 100644 --- a/docker_tests/test_prod_image.py +++ b/docker_tests/test_prod_image.py @@ -16,6 +16,7 @@ # under the License. import json +import os import subprocess import tempfile from pathlib import Path @@ -30,6 +31,7 @@ run_bash_in_docker, run_python_in_docker, ) +from setup import PREINSTALLED_PROVIDERS INSTALLED_PROVIDER_PATH = SOURCE_ROOT / "scripts" / "ci" / "installed_providers.txt" @@ -74,8 +76,11 @@ def test_bash_version(self): class TestPythonPackages: def test_required_providers_are_installed(self): - lines = (d.strip() for d in INSTALLED_PROVIDER_PATH.read_text().splitlines()) - lines = (d for d in lines) + if os.environ.get("TEST_SLIM_IMAGE"): + lines = PREINSTALLED_PROVIDERS + else: + lines = (d.strip() for d in INSTALLED_PROVIDER_PATH.read_text().splitlines()) + lines = (d for d in lines) packages_to_install = {f"apache-airflow-providers-{d.replace('.', '-')}" for d in lines} assert len(packages_to_install) != 0 @@ -163,6 +168,7 @@ def test_pip_dependencies_conflict(self): "virtualenv": ["virtualenv"], } + @pytest.mark.skipif(os.environ.get("TEST_SLIM_IMAGE") == "true", reason="Skipped with slim image") @pytest.mark.parametrize("package_name,import_names", PACKAGE_IMPORTS.items()) def test_check_dependencies_imports(self, package_name, import_names): run_python_in_docker(f"import {','.join(import_names)}") diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index dacbf6c6e2e96..58b9c9981f247 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -1 +1 @@ -bdc63bfd5f0967029b8b32a8068303cf +a5442e2ce4b79e862afacff8e79e43bd diff --git a/images/breeze/output-verify-image.svg b/images/breeze/output-verify-image.svg index 9c6d1a641fc52..e2dfddcbb07c9 100644 --- a/images/breeze/output-verify-image.svg +++ b/images/breeze/output-verify-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: verify-image + Command: verify-image - + - - -Usage: breeze verify-image [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Verify CI image. - -╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip when using   -to run shell or tests)                                                                             -(TEXT)                                                                                             -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze verify-image [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Verify CI image. + +╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip when using   +to run shell or tests)                                                                             +(TEXT)                                                                                             +--pull-imagePull image is missing before attempting to verify it. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-verify-prod-image.svg b/images/breeze/output-verify-prod-image.svg index 48b61a30826b5..31c7b66b725dc 100644 --- a/images/breeze/output-verify-prod-image.svg +++ b/images/breeze/output-verify-prod-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - Command: verify-prod-image + Command: verify-prod-image - + - - -Usage: breeze verify-prod-image [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Verify Production image. - -╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip when using   -to run shell or tests)                                                                             -(TEXT)                                                                                             -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze verify-prod-image [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Verify Production image. + +╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip when using   +to run shell or tests)                                                                             +(TEXT)                                                                                             +--pull-imagePull image is missing before attempting to verify it. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--slim-imageThe image to verify is slim and non-slim tests should be skipped. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ From 01eae0e0f0e0b11a8358c26f0769e44d44804c2b Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Tue, 21 Jun 2022 15:59:43 +0200 Subject: [PATCH 018/118] Added instructions on what to do if your command images are regenerated (#24581) In case there are conflicting changes to breeze command in several PRs, you might get conflicting images printed. in such case you should run `breeze regenerate-command-images` (cherry picked from commit 864cbc9cd843db94ca8bab2187b50de714fdb070) --- .pre-commit-config.yaml | 2 +- STATIC_CODE_CHECKS.rst | 6 +- .../configuration_and_maintenance_commands.py | 9 +- .../src/airflow_breeze/pre_commit_ids.py | 4 +- images/breeze/output-commands-hash.txt | 6 +- images/breeze/output-static-checks.svg | 224 +++++++++--------- .../pre_commit/pre_commit_breeze_cmd_line.py | 16 +- 7 files changed, 140 insertions(+), 127 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 15d18dd1b096b..99db08571455e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -341,7 +341,7 @@ repos: pass_filenames: true files: ^airflow/providers/.*\.py$ additional_dependencies: ['rich>=12.4.4'] - - id: update-breeze-file + - id: update-breeze-cmd-output name: Update output of breeze commands in BREEZE.rst entry: ./scripts/ci/pre_commit/pre_commit_breeze_cmd_line.py language: python diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst index 5b7fa05908e46..00d533250b41e 100644 --- a/STATIC_CODE_CHECKS.rst +++ b/STATIC_CODE_CHECKS.rst @@ -205,14 +205,10 @@ require Breeze Docker image to be build locally. +--------------------------------------------------------+------------------------------------------------------------------+---------+ | check-system-tests-present | Check if system tests have required segments of code | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ -| check-system-tests-tocs | Check that system tests is properly added | | -+--------------------------------------------------------+------------------------------------------------------------------+---------+ | check-xml | Check XML files with xmllint | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ | codespell | Run codespell to check for common misspellings in files | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ -| create-missing-init-py-files-tests | Create missing init.py files in tests | | -+--------------------------------------------------------+------------------------------------------------------------------+---------+ | debug-statements | Detect accidentally committed debug statements | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ | detect-private-key | Detect if private key is added to the repository | | @@ -290,7 +286,7 @@ require Breeze Docker image to be build locally. +--------------------------------------------------------+------------------------------------------------------------------+---------+ | trailing-whitespace | Remove trailing whitespace at end of line | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ -| update-breeze-file | Update output of breeze commands in BREEZE.rst | | +| update-breeze-cmd-output | Update output of breeze commands in BREEZE.rst | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ | update-breeze-readme-config-hash | Update Breeze README.md with config files hash | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ diff --git a/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py b/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py index 116319a2efca3..490a43c5f52a2 100644 --- a/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py @@ -547,7 +547,14 @@ def regenerate_command_images(verbose: bool, dry_run: bool): except FileNotFoundError: # when we go to Python 3.8+ we can add missing_ok = True instead of try/except pass - command_to_execute = [sys.executable, "-m", "pre_commit", 'run', 'update-breeze-file', '--all-files'] + command_to_execute = [ + sys.executable, + "-m", + "pre_commit", + 'run', + 'update-breeze-cmd-output', + '--all-files', + ] env = os.environ.copy() run_command( command_to_execute, diff --git a/dev/breeze/src/airflow_breeze/pre_commit_ids.py b/dev/breeze/src/airflow_breeze/pre_commit_ids.py index 2f8366b01ffd2..bcddfa0ec9533 100644 --- a/dev/breeze/src/airflow_breeze/pre_commit_ids.py +++ b/dev/breeze/src/airflow_breeze/pre_commit_ids.py @@ -59,10 +59,8 @@ 'check-setup-order', 'check-start-date-not-used-in-defaults', 'check-system-tests-present', - 'check-system-tests-tocs', 'check-xml', 'codespell', - 'create-missing-init-py-files-tests', 'debug-statements', 'detect-private-key', 'doctoc', @@ -92,7 +90,7 @@ 'run-shellcheck', 'static-check-autoflake', 'trailing-whitespace', - 'update-breeze-file', + 'update-breeze-cmd-output', 'update-breeze-readme-config-hash', 'update-extras', 'update-in-the-wild-to-be-sorted', diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index 58b9c9981f247..d27f8a1a7e153 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -1 +1,5 @@ -a5442e2ce4b79e862afacff8e79e43bd + +# This file is automatically generated by pre-commit. If you have a conflict with this file +# Please do not solve it but run `breeze regenerate-command-images`. +# This command should fix the conflict and regenerate help images that you have conflict with. +09687e7e07a9c1d62fa6c34201532113 diff --git a/images/breeze/output-static-checks.svg b/images/breeze/output-static-checks.svg index 1e8d54ca8ec49..5bd4b35dab40c 100644 --- a/images/breeze/output-static-checks.svg +++ b/images/breeze/output-static-checks.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - Command: static-checks + Command: static-checks - + - - -Usage: breeze static-checks [OPTIONS] [PRECOMMIT_ARGS]... - -Run static checks. - -╭─ Pre-commit flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---type-tType(s) of the static checks to run (multiple can be added).                             -(all | black | blacken-docs | check-airflow-2-1-compatibility |                          -check-airflow-config-yaml-consistent | check-airflow-providers-have-extras |             -check-apache-license-rat | check-base-operator-usage | check-boring-cyborg-configuration -| check-breeze-top-dependencies-limited | check-builtin-literals |                       -check-changelog-has-no-duplicates | check-daysago-import-from-utils |                    -check-docstring-param-types | check-executables-have-shebangs |                          -check-extra-packages-references | check-extras-order | check-for-inclusive-language |    -check-hooks-apply | check-incorrect-use-of-LoggingMixin |                                -check-integrations-are-consistent | check-merge-conflict | check-newsfragments-are-valid -| check-no-providers-in-core-examples | check-no-relative-imports |                      -check-persist-credentials-disabled-in-github-workflows |                                 -check-pre-commit-information-consistent | check-provide-create-sessions-imports |        -check-provider-yaml-valid | check-providers-init-file-missing |                          -check-providers-subpackages-init-file-exist | check-pydevd-left-in-code |                -check-revision-heads-map | check-safe-filter-usage-in-html | check-setup-order |         -check-start-date-not-used-in-defaults | check-system-tests-present |                     -check-system-tests-tocs | check-xml | codespell | create-missing-init-py-files-tests |   -debug-statements | detect-private-key | doctoc | end-of-file-fixer | fix-encoding-pragma -| flynt | forbid-tabs | identity | insert-license | isort | lint-chart-schema | lint-css -| lint-dockerfile | lint-helm-chart | lint-javascript | lint-json-schema | lint-markdown -| lint-openapi | mixed-line-ending | pretty-format-json | pydocstyle |                   -python-no-log-warn | pyupgrade | rst-backticks | run-flake8 | run-mypy | run-shellcheck  -| static-check-autoflake | trailing-whitespace | update-breeze-file |                    -update-breeze-readme-config-hash | update-extras | update-in-the-wild-to-be-sorted |     -update-inlined-dockerfile-scripts | update-local-yml-file | update-migration-references  -| update-providers-dependencies | update-setup-cfg-file |                                -update-spelling-wordlist-to-be-sorted | update-supported-versions |                      -update-vendored-in-k8s-json-schema | update-version | yamllint | yesqa)                  ---file-fList of files to run the checks on.(PATH) ---all-files-aRun checks on all files. ---show-diff-on-failure-sShow diff for files modified by the checks. ---last-commit-cRun checks for all files in last commit. Mutually exclusive with --commit-ref. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---commit-ref-rRun checks for this commit reference only (can be any git commit-ish reference). Mutually   -exclusive with --last-commit.                                                               -(TEXT)                                                                                      ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze static-checks [OPTIONS] [PRECOMMIT_ARGS]... + +Run static checks. + +╭─ Pre-commit flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--type-tType(s) of the static checks to run (multiple can be added).                             +(all | black | blacken-docs | check-airflow-2-1-compatibility |                          +check-airflow-config-yaml-consistent | check-airflow-providers-have-extras |             +check-apache-license-rat | check-base-operator-usage | check-boring-cyborg-configuration +| check-breeze-top-dependencies-limited | check-builtin-literals |                       +check-changelog-has-no-duplicates | check-daysago-import-from-utils |                    +check-docstring-param-types | check-executables-have-shebangs |                          +check-extra-packages-references | check-extras-order | check-for-inclusive-language |    +check-hooks-apply | check-incorrect-use-of-LoggingMixin |                                +check-integrations-are-consistent | check-merge-conflict | check-newsfragments-are-valid +| check-no-providers-in-core-examples | check-no-relative-imports |                      +check-persist-credentials-disabled-in-github-workflows |                                 +check-pre-commit-information-consistent | check-provide-create-sessions-imports |        +check-provider-yaml-valid | check-providers-init-file-missing |                          +check-providers-subpackages-init-file-exist | check-pydevd-left-in-code |                +check-revision-heads-map | check-safe-filter-usage-in-html | check-setup-order |         +check-start-date-not-used-in-defaults | check-system-tests-present | check-xml |         +codespell | debug-statements | detect-private-key | doctoc | end-of-file-fixer |         +fix-encoding-pragma | flynt | forbid-tabs | identity | insert-license | isort |          +lint-chart-schema | lint-css | lint-dockerfile | lint-helm-chart | lint-javascript |     +lint-json-schema | lint-markdown | lint-openapi | mixed-line-ending | pretty-format-json +| pydocstyle | python-no-log-warn | pyupgrade | rst-backticks | run-flake8 | run-mypy |  +run-shellcheck | static-check-autoflake | trailing-whitespace | update-breeze-cmd-output +| update-breeze-readme-config-hash | update-extras | update-in-the-wild-to-be-sorted |   +update-inlined-dockerfile-scripts | update-local-yml-file | update-migration-references  +| update-providers-dependencies | update-setup-cfg-file |                                +update-spelling-wordlist-to-be-sorted | update-supported-versions |                      +update-vendored-in-k8s-json-schema | update-version | yamllint | yesqa)                  +--file-fList of files to run the checks on.(PATH) +--all-files-aRun checks on all files. +--show-diff-on-failure-sShow diff for files modified by the checks. +--last-commit-cRun checks for all files in last commit. Mutually exclusive with --commit-ref. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--commit-ref-rRun checks for this commit reference only (can be any git commit-ish reference). Mutually   +exclusive with --last-commit.                                                               +(TEXT)                                                                                      +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/scripts/ci/pre_commit/pre_commit_breeze_cmd_line.py b/scripts/ci/pre_commit/pre_commit_breeze_cmd_line.py index c01c6bfbf6b17..81b9fedf52f0e 100755 --- a/scripts/ci/pre_commit/pre_commit_breeze_cmd_line.py +++ b/scripts/ci/pre_commit/pre_commit_breeze_cmd_line.py @@ -30,6 +30,12 @@ SCREENSHOT_WIDTH = "120" +PREAMBLE = """ +# This file is automatically generated by pre-commit. If you have a conflict with this file +# Please do not solve it but run `breeze regenerate-command-images`. +# This command should fix the conflict and regenerate help images that you have conflict with. +""" + def get_command_list(): comp_env = os.environ.copy() @@ -61,7 +67,7 @@ def print_help_for_all_commands(): ) hash_file_path = BREEZE_IMAGES_DIR / "output-commands-hash.txt" try: - old_hash = hash_file_path.read_text() + old_hash = hash_file_path.read_text()[len(PREAMBLE) :] except FileNotFoundError: old_hash = "" if old_hash == new_hash: @@ -83,7 +89,7 @@ def print_help_for_all_commands(): env['RECORD_BREEZE_OUTPUT_FILE'] = str(BREEZE_IMAGES_DIR / f"output-{command}.svg") env['TERM'] = "xterm-256color" check_call(["breeze", command, "--help"], env=env) - hash_file_path.write_text(new_hash) + hash_file_path.write_text(PREAMBLE + new_hash) def verify_all_commands_described_in_docs(): @@ -114,3 +120,9 @@ def verify_all_commands_described_in_docs(): if __name__ == '__main__': print_help_for_all_commands() verify_all_commands_described_in_docs() + console = Console(width=int(SCREENSHOT_WIDTH), color_system="standard") + if os.environ.get('CI'): + console.print( + "\n\n[yellow] If you see that images got re-generatead in CI, please run:[/]" + "\n\n breeze regenerate-command-images\n\n\n" + ) From 9c57cf3baf0ce623a65730e0240d0551d86034e8 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Tue, 21 Jun 2022 17:58:54 +0200 Subject: [PATCH 019/118] Switch to building images in parallell (#24580) In the new Breeze, switching to using parallelism is a ... breeze. This PR adds the capability of building the images in parallel in Breeze locally - for breeze command, but also uses this capability to build the images in parallel in our CI. Our builds are always executed on powerful, big machines with lots of CPU and docker run in memory filesystem with 32GB RAM, so it should be possible to run all builds in parallel on a single machine rather then spin off parallel machines to run the builds using the matrix strategy of Github Actions. Generally speaking - this will either speed up or get 4x cost saving for the build steps for all the "full test needed" PRs as well as all the main builds. There are a number of savings and improvements we can achieve this way: 1) less overhead for starting and runnning the machines 2) seems that with the new buildkit, the parallel builds are not suffering from some sequential locks (as it used to be, so we are basically do the same job using 25% resources for building the images. 3) we will stop having random "one image failed to build" cases - they will all either fail or succeed. 4) Less checks in the output 5) Production builds will additionally gain from single CI image pulled in order to perform the preparation of the packages and single package preparation step - it will save 4-5 minutes per image. The disadvantage is a less clear output of such parallel build where outputs from multiple builds will be interleaved in one CI output. (cherry picked from commit 893d935ff01212085351b7ad13b19fca1d53bb57) --- .github/workflows/build-images.yml | 42 +-- .github/workflows/ci.yml | 42 ++- dev/REFRESHING_CI_CACHE.md | 2 +- .../commands/ci_image_commands.py | 85 +++-- .../commands/production_image_commands.py | 90 +++-- .../airflow_breeze/utils/common_options.py | 6 - dev/refresh_images.sh | 4 +- images/breeze/output-build-image.svg | 276 +++++++------- images/breeze/output-build-prod-image.svg | 344 +++++++++--------- images/breeze/output-commands-hash.txt | 2 +- scripts/ci/selective_ci_checks.sh | 2 + 11 files changed, 489 insertions(+), 406 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 9970a82e6c213..541c72165997b 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -57,6 +57,7 @@ jobs: pythonVersions: "${{ steps.selective-checks.python-versions }}" upgradeToNewerDependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }} allPythonVersions: ${{ steps.selective-checks.outputs.all-python-versions }} + allPythonVersionsListAsString: ${{ steps.selective-checks.outputs.all-python-versions-list-as-string }} defaultPythonVersion: ${{ steps.selective-checks.outputs.default-python-version }} run-tests: ${{ steps.selective-checks.outputs.run-tests }} run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }} @@ -155,19 +156,14 @@ jobs: permissions: packages: write timeout-minutes: 80 - name: "Build CI image ${{matrix.python-version}}" + name: "Build CI images ${{ needs.build-info.outputs.allPythonVersionsListAsString }}" runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info] - strategy: - matrix: - python-version: ${{ fromJson(needs.build-info.outputs.allPythonVersions) }} - fail-fast: true if: | needs.build-info.outputs.image-build == 'true' && github.event.pull_request.head.repo.full_name != 'apache/airflow' env: RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} - PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} BACKEND: sqlite outputs: ${{toJSON(needs.build-info.outputs) }} steps: @@ -226,20 +222,25 @@ jobs: - run: ./scripts/ci/install_breeze.sh - name: "Free space" run: breeze free-space - - name: Build & Push CI image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} - run: breeze build-image --push-image --tag-as-latest + - name: > + Build & Push CI images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + run: breeze build-image --push-image --tag-as-latest --run-in-parallel env: UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }} - name: Push empty CI image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} if: failure() || cancelled() - run: breeze build-image --push-image --empty-image + run: breeze build-image --push-image --empty-image --run-in-parallel env: IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - - name: "Candidates for pip resolver backtrack triggers: ${{ matrix.python-version }}" + - name: "Candidates for pip resolver backtrack triggers" if: failure() || cancelled() - run: breeze find-newer-dependencies --max-age 1 --python "${{ matrix.python-version }}" + run: > + breeze find-newer-dependencies --max-age 1 + --python "${{ needs.build-info.outputs.defaultPythonVersion }}" - name: "Fix ownership" run: breeze fix-ownership if: always() @@ -248,19 +249,14 @@ jobs: permissions: packages: write timeout-minutes: 80 - name: "Build PROD image ${{matrix.python-version}}" + name: "Build PROD images ${{ needs.build-info.outputs.allPythonVersionsListAsString }}" runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info, build-ci-images] - strategy: - matrix: - python-version: ${{ fromJson(needs.build-info.outputs.allPythonVersions) }} - fail-fast: true if: | needs.build-info.outputs.image-build == 'true' && github.event.pull_request.head.repo.full_name != 'apache/airflow' env: RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} - PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} BACKEND: sqlite steps: - name: Cleanup repo @@ -320,7 +316,7 @@ jobs: run: breeze free-space - name: > Pull CI image for PROD build: - ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} + ${{ needs.build-info.outputs.defaultPythonVersion }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} run: breeze pull-image --tag-as-latest env: # Always use default Python version of CI image for preparing packages @@ -338,9 +334,12 @@ jobs: run: breeze prepare-airflow-package --package-format wheel --version-suffix-for-pypi dev0 - name: "Move dist packages to docker-context files" run: mv -v ./dist/*.whl ./docker-context-files - - name: Build & Push PROD image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} + - name: > + Build & Push PROD images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + ${{ needs.build-info.outputs.allPythonVersionsListAsString }} run: > breeze build-prod-image + --run-in-parallel --tag-as-latest --push-image --install-packages-from-context @@ -350,9 +349,10 @@ jobs: UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - - name: Push empty PROD image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} + PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + - name: Push empty PROD images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} if: failure() || cancelled() - run: breeze build-prod-image --cleanup-context --push-image --empty-image + run: breeze build-prod-image --cleanup-context --push-image --empty-image --run-in-parallel env: IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Fix ownership" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bde6fd15e62b3..c0cbb9f09bc33 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -132,6 +132,7 @@ jobs: upgradeToNewerDependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }} pythonVersions: ${{ steps.selective-checks.outputs.python-versions }} pythonVersionsListAsString: ${{ steps.selective-checks.outputs.python-versions-list-as-string }} + allPythonVersionsListAsString: ${{ steps.selective-checks.outputs.all-python-versions-list-as-string }} defaultPythonVersion: ${{ steps.selective-checks.outputs.default-python-version }} kubernetesVersions: ${{ steps.selective-checks.outputs.kubernetes-versions }} kubernetesVersionsListAsString: ${{ steps.selective-checks.outputs.kubernetes-versions-list-as-string }} @@ -285,13 +286,11 @@ jobs: permissions: packages: write timeout-minutes: 80 - name: "${{needs.build-info.outputs.buildJobDescription}} CI image ${{matrix.python-version}}" + name: > + ${{needs.build-info.outputs.buildJobDescription}} CI images + ${{ needs.build-info.outputs.allPythonVersionsListAsString }} runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info] - strategy: - matrix: - python-version: ${{ fromJson(needs.build-info.outputs.allPythonVersions) }} - fail-fast: true env: RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} steps: @@ -311,7 +310,7 @@ jobs: if: needs.build-info.outputs.inWorkflowBuild == 'true' - name: "Retrieve DEFAULTS from the _initialization.sh" # We cannot "source" the script here because that would be a security problem (we cannot run - # any code that comes from the sources coming from the PR. Therefore we extract the + # any code that comes from the sources coming from the PR. Therefore, we extract the # DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands id: defaults run: | @@ -331,17 +330,21 @@ jobs: - name: "Free space" run: breeze free-space if: needs.build-info.outputs.inWorkflowBuild == 'true' - - name: Build & Push CI image ${{ matrix.python-version }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} - run: breeze build-image --push-image --tag-as-latest + - name: > + Build & Push CI images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + run: breeze build-image --push-image --tag-as-latest --run-in-parallel env: - PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }} if: needs.build-info.outputs.inWorkflowBuild == 'true' - - name: "Candidates for pip resolver backtrack triggers: ${{ matrix.python-version }}" + - name: "Candidates for pip resolver backtrack triggers" if: failure() || cancelled() - run: breeze find-newer-dependencies --max-age 1 --python "${{ matrix.python-version }}" + run: > + breeze find-newer-dependencies --max-age 1 + --python "${{ needs.build-info.outputs.defaultPythonVersion }}" - name: "Fix ownership" run: breeze fix-ownership if: always() && needs.build-info.outputs.inWorkflowBuild == 'true' @@ -350,17 +353,14 @@ jobs: permissions: packages: write timeout-minutes: 80 - name: "${{needs.build-info.outputs.buildJobDescription}} PROD image ${{matrix.python-version}}" + name: > + ${{needs.build-info.outputs.buildJobDescription}} PROD images + ${{ needs.build-info.outputs.allPythonVersionsListAsString }} runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info, build-ci-images] - strategy: - matrix: - python-version: ${{ fromJson(needs.build-info.outputs.allPythonVersions) }} - fail-fast: true env: RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} BACKEND: sqlite - PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} VERSION_SUFFIX_FOR_PYPI: "dev0" steps: @@ -424,10 +424,13 @@ jobs: - name: "Move dist packages to docker-context files" run: mv -v ./dist/*.whl ./docker-context-files if: needs.build-info.outputs.inWorkflowBuild == 'true' - - name: Build & Push PROD image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} + - name: > + Build & Push PROD images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + ${{ needs.build-info.outputs.allPythonVersionsListAsString }} run: > breeze build-prod-image --tag-as-latest + --run-in-parallel --push-image --install-packages-from-context --disable-airflow-repo-cache @@ -436,6 +439,7 @@ jobs: UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }} if: needs.build-info.outputs.inWorkflowBuild == 'true' - name: "Fix ownership" run: breeze fix-ownership @@ -1385,7 +1389,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" key: ${{ runner.os }}-docker-venv-${{ hashFiles('scripts/ci/images/ci_run_docker_tests.py') }} - name: Wait for PROD images ${{ env.PYTHON_VERSIONS }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} # We wait for the images to be available either from "build-images.yml' run as pull_request_target - # or from build-prod-image above. + # or from build-prod-images above. # We are utilising single job to wait for all images because this job merely waits # For the images to be available and test them. run: breeze pull-prod-image --verify-image --wait-for-image --run-in-parallel diff --git a/dev/REFRESHING_CI_CACHE.md b/dev/REFRESHING_CI_CACHE.md index 85194845dc9b2..5fd458bf34a43 100644 --- a/dev/REFRESHING_CI_CACHE.md +++ b/dev/REFRESHING_CI_CACHE.md @@ -51,7 +51,7 @@ manual refresh might be needed. # Manually generating constraint files ```bash -breeze build-image --build-multiple-images --upgrade-to-newer-dependencies --answer yes +breeze build-image --run-in-parallel --upgrade-to-newer-dependencies --answer yes breeze generate-constraints --airflow-constraints-mode constraints --run-in-parallel --answer yes breeze generate-constraints --airflow-constraints-mode constraints-source-providers --run-in-parallel --answer yes breeze generate-constraints --airflow-constraints-mode constraints-no-providers --run-in-parallel --answer yes diff --git a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py index 9ce895578e137..ce38e8372468d 100644 --- a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py @@ -37,7 +37,6 @@ option_airflow_constraints_mode_ci, option_airflow_constraints_reference_build, option_answer, - option_build_multiple_images, option_debian_version, option_dev_apt_command, option_dev_apt_deps, @@ -115,9 +114,10 @@ ], }, { - "name": "Building multiple images", + "name": "Building images in parallel", "options": [ - "--build-multiple-images", + "--run-in-parallel", + "--parallelism", "--python-versions", ], }, @@ -189,13 +189,46 @@ } +def start_building(ci_image_params: BuildCiParams, dry_run: bool, verbose: bool) -> bool: + """Starts building attempt. Returns false if we should not continue""" + if not ci_image_params.force_build and not ci_image_params.upgrade_to_newer_dependencies: + if not should_we_run_the_build(build_ci_params=ci_image_params): + return False + if ci_image_params.prepare_buildx_cache or ci_image_params.push_image: + login_to_github_docker_registry(image_params=ci_image_params, dry_run=dry_run, verbose=verbose) + return True + + +def run_build_in_parallel( + image_params_list: List[BuildCiParams], + python_version_list: List[str], + parallelism: int, + dry_run: bool, + verbose: bool, +) -> None: + get_console().print( + f"\n[info]Building with parallelism = {parallelism} for the images: {python_version_list}:" + ) + pool = mp.Pool(parallelism) + results = [ + pool.apply_async( + run_build_ci_image, + args=(verbose, dry_run, image_param, True), + ) + for image_param in image_params_list + ] + check_async_run_results(results) + pool.close() + + @main.command(name='build-image') @option_github_repository @option_verbose @option_dry_run @option_answer @option_python -@option_build_multiple_images +@option_run_in_parallel +@option_parallelism @option_python_versions @option_upgrade_to_newer_dependencies @option_platform @@ -229,7 +262,8 @@ def build_image( verbose: bool, dry_run: bool, - build_multiple_images: bool, + run_in_parallel: bool, + parallelism: int, python_versions: str, answer: str, **kwargs, @@ -237,7 +271,7 @@ def build_image( """Build CI image. Include building multiple images for all python versions (sequentially).""" def run_build(ci_image_params: BuildCiParams) -> None: - return_code, info = build_ci_image( + return_code, info = run_build_ci_image( verbose=verbose, dry_run=dry_run, ci_image_params=ci_image_params, parallel=False ) if return_code != 0: @@ -246,15 +280,27 @@ def run_build(ci_image_params: BuildCiParams) -> None: perform_environment_checks(verbose=verbose) parameters_passed = filter_out_none(**kwargs) - if build_multiple_images: + parameters_passed['force_build'] = True + fix_group_permissions(verbose=verbose) + if run_in_parallel: python_version_list = get_python_version_list(python_versions) + params_list: List[BuildCiParams] = [] for python in python_version_list: params = BuildCiParams(**parameters_passed) params.python = python params.answer = answer - run_build(ci_image_params=params) + params_list.append(params) + start_building(params_list[0], dry_run=dry_run, verbose=verbose) + run_build_in_parallel( + image_params_list=params_list, + python_version_list=python_version_list, + parallelism=parallelism, + dry_run=dry_run, + verbose=verbose, + ) else: params = BuildCiParams(**parameters_passed) + start_building(params, dry_run=dry_run, verbose=verbose) run_build(ci_image_params=params) @@ -436,7 +482,7 @@ def should_we_run_the_build(build_ci_params: BuildCiParams, verbose: bool) -> bo sys.exit(1) -def build_ci_image( +def run_build_ci_image( verbose: bool, dry_run: bool, ci_image_params: BuildCiParams, parallel: bool ) -> Tuple[int, str]: """ @@ -467,17 +513,11 @@ def build_ci_image( "preparing buildx cache![/]\n" ) return 1, "Error: building multi-platform image without --push-image." - fix_group_permissions(verbose=verbose) if verbose or dry_run: get_console().print( f"\n[info]Building CI image of airflow from {AIRFLOW_SOURCES_ROOT} " f"python version: {ci_image_params.python}[/]\n" ) - if not ci_image_params.force_build and not ci_image_params.upgrade_to_newer_dependencies: - if not should_we_run_the_build(build_ci_params=ci_image_params, verbose=verbose): - return 0, f"Image build: {ci_image_params.python}" - if ci_image_params.prepare_buildx_cache or ci_image_params.push_image: - login_to_github_docker_registry(image_params=ci_image_params, dry_run=dry_run, verbose=verbose) if ci_image_params.prepare_buildx_cache: build_command_result = build_cache( image_params=ci_image_params, dry_run=dry_run, verbose=verbose, parallel=parallel @@ -524,19 +564,6 @@ def build_ci_image( return build_command_result.returncode, f"Image build: {ci_image_params.python}" -def build_ci_image_in_parallel( - verbose: bool, dry_run: bool, parallelism: int, python_version_list: List[str], **kwargs -): - """Run CI image builds in parallel.""" - get_console().print( - f"\n[info]Running with parallelism = {parallelism} for the images: {python_version_list}:" - ) - pool = mp.Pool(parallelism) - results = [pool.apply_async(build_ci_image, args=(verbose, dry_run, False), kwds=kwargs)] - check_async_run_results(results) - pool.close() - - def rebuild_or_pull_ci_image_if_needed( command_params: Union[ShellParams, BuildCiParams], dry_run: bool, verbose: bool ) -> None: @@ -575,4 +602,4 @@ def rebuild_or_pull_ci_image_if_needed( 'Forcing build.[/]' ) ci_image_params.force_build = True - build_ci_image(verbose, dry_run=dry_run, ci_image_params=ci_image_params, parallel=False) + run_build_ci_image(verbose, dry_run=dry_run, ci_image_params=ci_image_params, parallel=False) diff --git a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py index 0b5ed2e68b06d..9ae3791e31264 100644 --- a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py @@ -15,9 +15,10 @@ # specific language governing permissions and limitations # under the License. import contextlib +import multiprocessing as mp import os import sys -from typing import Optional, Tuple +from typing import List, Optional, Tuple import click @@ -36,7 +37,6 @@ option_airflow_constraints_mode_prod, option_airflow_constraints_reference_build, option_answer, - option_build_multiple_images, option_debian_version, option_dev_apt_command, option_dev_apt_deps, @@ -74,6 +74,7 @@ prepare_docker_build_from_input, ) from airflow_breeze.utils.image import run_pull_image, run_pull_in_parallel, tag_image_as_latest +from airflow_breeze.utils.parallel import check_async_run_results from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, DOCKER_CONTEXT_DIR from airflow_breeze.utils.python_versions import get_python_version_list from airflow_breeze.utils.registry import login_to_github_docker_registry @@ -103,9 +104,10 @@ ], }, { - "name": "Building multiple images", + "name": "Building images in parallel", "options": [ - "--build-multiple-images", + "--run-in-parallel", + "--parallelism", "--python-versions", ], }, @@ -192,12 +194,48 @@ } +def start_building(prod_image_params: BuildProdParams, dry_run: bool, verbose: bool): + if prod_image_params.cleanup_context: + clean_docker_context_files(verbose=verbose, dry_run=dry_run) + check_docker_context_files(prod_image_params.install_packages_from_context) + if prod_image_params.prepare_buildx_cache or prod_image_params.push_image: + login_to_github_docker_registry(image_params=prod_image_params, dry_run=dry_run, verbose=verbose) + + +def run_build_in_parallel( + image_params_list: List[BuildProdParams], + python_version_list: List[str], + parallelism: int, + dry_run: bool, + verbose: bool, +) -> None: + get_console().print( + f"\n[info]Building with parallelism = {parallelism} for the images: {python_version_list}:" + ) + pool = mp.Pool(parallelism) + results = [ + pool.apply_async( + run_build_production_image, + args=( + verbose, + dry_run, + image_param, + True, + ), + ) + for image_param in image_params_list + ] + check_async_run_results(results) + pool.close() + + @option_verbose @option_dry_run @option_answer @main.command(name='build-prod-image') @option_python -@option_build_multiple_images +@option_run_in_parallel +@option_parallelism @option_python_versions @option_upgrade_to_newer_dependencies @option_platform @@ -269,7 +307,8 @@ def build_prod_image( verbose: bool, dry_run: bool, - build_multiple_images: bool, + run_in_parallel: bool, + parallelism: int, python_versions: str, answer: Optional[str], **kwargs, @@ -279,8 +318,8 @@ def build_prod_image( """ def run_build(prod_image_params: BuildProdParams) -> None: - return_code, info = build_production_image( - verbose=verbose, dry_run=dry_run, prod_image_params=prod_image_params + return_code, info = run_build_production_image( + verbose=verbose, dry_run=dry_run, prod_image_params=prod_image_params, parallel=False ) if return_code != 0: get_console().print(f"[error]Error when building image! {info}") @@ -288,15 +327,27 @@ def run_build(prod_image_params: BuildProdParams) -> None: perform_environment_checks(verbose=verbose) parameters_passed = filter_out_none(**kwargs) - if build_multiple_images: + + fix_group_permissions(verbose=verbose) + if run_in_parallel: python_version_list = get_python_version_list(python_versions) + params_list: List[BuildProdParams] = [] for python in python_version_list: params = BuildProdParams(**parameters_passed) params.python = python params.answer = answer - run_build(prod_image_params=params) + params_list.append(params) + start_building(prod_image_params=params_list[0], dry_run=dry_run, verbose=verbose) + run_build_in_parallel( + image_params_list=params_list, + python_version_list=python_version_list, + parallelism=parallelism, + dry_run=dry_run, + verbose=verbose, + ) else: params = BuildProdParams(**parameters_passed) + start_building(prod_image_params=params, dry_run=dry_run, verbose=verbose) run_build(prod_image_params=params) @@ -473,8 +524,8 @@ def check_docker_context_files(install_packages_from_context: bool): sys.exit(1) -def build_production_image( - verbose: bool, dry_run: bool, prod_image_params: BuildProdParams +def run_build_production_image( + verbose: bool, dry_run: bool, prod_image_params: BuildProdParams, parallel: bool ) -> Tuple[int, str]: """ Builds PROD image: @@ -505,21 +556,10 @@ def build_production_image( " or preparing buildx cache![/]\n" ) return 1, "Error: building multi-platform image without --push-image." - fix_group_permissions(verbose=verbose) - if verbose or dry_run: - get_console().print( - f"\n[info]Building PROD image of airflow from {AIRFLOW_SOURCES_ROOT} " - f"python version: {prod_image_params.python}[/]\n" - ) - if prod_image_params.cleanup_context: - clean_docker_context_files(verbose=verbose, dry_run=dry_run) - check_docker_context_files(prod_image_params.install_packages_from_context) - if prod_image_params.prepare_buildx_cache or prod_image_params.push_image: - login_to_github_docker_registry(image_params=prod_image_params, dry_run=dry_run, verbose=verbose) get_console().print(f"\n[info]Building PROD Image for Python {prod_image_params.python}\n") if prod_image_params.prepare_buildx_cache: build_command_result = build_cache( - image_params=prod_image_params, dry_run=dry_run, verbose=verbose, parallel=False + image_params=prod_image_params, dry_run=dry_run, verbose=verbose, parallel=parallel ) else: if prod_image_params.empty_image: @@ -547,7 +587,7 @@ def build_production_image( cwd=AIRFLOW_SOURCES_ROOT, check=False, text=True, - enabled_output_group=True, + enabled_output_group=not parallel, ) if build_command_result.returncode == 0: if prod_image_params.tag_as_latest: diff --git a/dev/breeze/src/airflow_breeze/utils/common_options.py b/dev/breeze/src/airflow_breeze/utils/common_options.py index 0bc4d5af9fc86..292e74891dbfa 100644 --- a/dev/breeze/src/airflow_breeze/utils/common_options.py +++ b/dev/breeze/src/airflow_breeze/utils/common_options.py @@ -377,12 +377,6 @@ envvar='PARALLELISM', show_default=True, ) -option_build_multiple_images = click.option( - '--build-multiple-images', - help="Run the operation sequentially on all or selected subset of Python versions.", - is_flag=True, - envvar='BUILD_MULTIPLE_IMAGES', -) argument_packages = click.argument( "packages", nargs=-1, diff --git a/dev/refresh_images.sh b/dev/refresh_images.sh index ed5591585aeb0..4a11212b2c82c 100755 --- a/dev/refresh_images.sh +++ b/dev/refresh_images.sh @@ -26,7 +26,7 @@ export GITHUB_TOKEN="" breeze self-upgrade --force breeze build-image \ - --build-multiple-images \ + --run-in-parallel \ --prepare-buildx-cache \ --force-build \ --platform linux/amd64,linux/arm64 \ @@ -44,7 +44,7 @@ breeze prepare-airflow-package --package-format wheel --version-suffix-for-pypi mv -v ./dist/*.whl ./docker-context-files breeze build-prod-image \ - --build-multiple-images \ + --run-in-parallel \ --airflow-is-in-context \ --install-packages-from-context \ --prepare-buildx-cache \ diff --git a/images/breeze/output-build-image.svg b/images/breeze/output-build-image.svg index 81703735d0155..389c973d0e273 100644 --- a/images/breeze/output-build-image.svg +++ b/images/breeze/output-build-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - Command: build-image + Command: build-image - + - - -Usage: breeze build-image [OPTIONS] - -Build CI image. Include building multiple images for all python versions (sequentially). - -╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. ---debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] ---image-tag-tTag of the image which is used to pull or run the image (implies                ---mount-sources=skip when using to run shell or tests)                          -(TEXT)                                                                          ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful -when you build or pull image with --image-tag.                                  ---docker-cache-cCache option for image used during the build.(registry | local | disabled) -[default: registry]                           ---force-buildForce image build no matter if it is determined as needed. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building multiple images ───────────────────────────────────────────────────────────────────────────────────────────╮ ---build-multiple-imagesRun the operation sequentially on all or selected subset of Python versions. ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced options (for power users) ─────────────────────────────────────────────────────────────────────────────────╮ ---install-providers-from-sourcesInstall providers from sources when installing. ---airflow-constraints-modeMode of constraints for CI image building                               -(constraints-source-providers | constraints | constraints-no-providers) -[default: constraints-source-providers]                                 ---airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) ---additional-python-depsAdditional python dependencies to use when building the images.(TEXT) ---runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) ---runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) ---additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) ---additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) ---additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) ---additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) ---additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) ---additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) ---additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) ---dev-apt-depsApt dev dependencies to use when building the images.(TEXT) ---dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---github-usernameThe user name used to authenticate to GitHub.(TEXT) ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) ---push-imagePush image after building it. ---empty-imagePrepare empty image tagged with the same name as the Airflow image. ---prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  -image).                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze build-image [OPTIONS] + +Build CI image. Include building multiple images for all python versions (sequentially). + +╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +--image-tag-tTag of the image which is used to pull or run the image (implies                +--mount-sources=skip when using to run shell or tests)                          +(TEXT)                                                                          +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful +when you build or pull image with --image-tag.                                  +--docker-cache-cCache option for image used during the build.(registry | local | disabled) +[default: registry]                           +--force-buildForce image build no matter if it is determined as needed. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced options (for power users) ─────────────────────────────────────────────────────────────────────────────────╮ +--install-providers-from-sourcesInstall providers from sources when installing. +--airflow-constraints-modeMode of constraints for CI image building                               +(constraints-source-providers | constraints | constraints-no-providers) +[default: constraints-source-providers]                                 +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) +--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) +--additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) +--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) +--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--github-usernameThe user name used to authenticate to GitHub.(TEXT) +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--push-imagePush image after building it. +--empty-imagePrepare empty image tagged with the same name as the Airflow image. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-build-prod-image.svg b/images/breeze/output-build-prod-image.svg index dcbd32a96662c..331cd3808ba34 100644 --- a/images/breeze/output-build-prod-image.svg +++ b/images/breeze/output-build-prod-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - Command: build-prod-image + Command: build-prod-image - + - - -Usage: breeze build-prod-image [OPTIONS] - -Build Production image. Include building multiple images for all or selected Python versions sequentially. - -╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---install-airflow-version-VInstall version of Airflow from PyPI.(TEXT) ---upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. ---debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] ---image-tag-tTag of the image which is used to pull or run the image (implies                ---mount-sources=skip when using to run shell or tests)                          -(TEXT)                                                                          ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful -when you build or pull image with --image-tag.                                  ---docker-cache-cCache option for image used during the build.(registry | local | disabled) -[default: registry]                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building multiple images ───────────────────────────────────────────────────────────────────────────────────────────╮ ---build-multiple-imagesRun the operation sequentially on all or selected subset of Python versions. ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options for customizing images ─────────────────────────────────────────────────────────────────────────────────────╮ ---install-providers-from-sourcesInstall providers from sources when installing. ---airflow-extrasExtras to install by default.                                                    -(TEXT)                                                                           -[default:                                                                        -amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,google,google… ---airflow-constraints-modeMode of constraints for PROD image building                             -(constraints | constraints-no-providers | constraints-source-providers) -[default: constraints]                                                  ---airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) ---additional-python-depsAdditional python dependencies to use when building the images.(TEXT) ---additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) ---additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) ---additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) ---additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) ---additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) ---additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) ---additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) ---runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) ---runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) ---dev-apt-depsApt dev dependencies to use when building the images.(TEXT) ---dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Customization options (for specific customization needs) ───────────────────────────────────────────────────────────╮ ---install-packages-from-contextInstall wheels from local docker-context-files when building image. ---airflow-is-in-contextIf set Airflow is installed from docker-context-files only rather than     -from PyPI or sources.                                                      ---cleanup-contextClean up docker context files before running build (cannot be used         -together with --install-packages-from-context).                            ---disable-mysql-client-installationDo not install MySQL client. ---disable-mssql-client-installationDo not install MsSQl client. ---disable-postgres-client-installationDo not install Postgres client. ---disable-airflow-repo-cacheDisable cache from Airflow repository during building. ---install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT) ---installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---github-usernameThe user name used to authenticate to GitHub.(TEXT) ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) ---push-imagePush image after building it. ---empty-imagePrepare empty image tagged with the same name as the Airflow image. ---prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  -image).                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze build-prod-image [OPTIONS] + +Build Production image. Include building multiple images for all or selected Python versions sequentially. + +╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--install-airflow-version-VInstall version of Airflow from PyPI.(TEXT) +--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +--image-tag-tTag of the image which is used to pull or run the image (implies                +--mount-sources=skip when using to run shell or tests)                          +(TEXT)                                                                          +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful +when you build or pull image with --image-tag.                                  +--docker-cache-cCache option for image used during the build.(registry | local | disabled) +[default: registry]                           +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options for customizing images ─────────────────────────────────────────────────────────────────────────────────────╮ +--install-providers-from-sourcesInstall providers from sources when installing. +--airflow-extrasExtras to install by default.                                                    +(TEXT)                                                                           +[default:                                                                        +amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,google,google… +--airflow-constraints-modeMode of constraints for PROD image building                             +(constraints | constraints-no-providers | constraints-source-providers) +[default: constraints]                                                  +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) +--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) +--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) +--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Customization options (for specific customization needs) ───────────────────────────────────────────────────────────╮ +--install-packages-from-contextInstall wheels from local docker-context-files when building image. +--airflow-is-in-contextIf set Airflow is installed from docker-context-files only rather than     +from PyPI or sources.                                                      +--cleanup-contextClean up docker context files before running build (cannot be used         +together with --install-packages-from-context).                            +--disable-mysql-client-installationDo not install MySQL client. +--disable-mssql-client-installationDo not install MsSQl client. +--disable-postgres-client-installationDo not install Postgres client. +--disable-airflow-repo-cacheDisable cache from Airflow repository during building. +--install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT) +--installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--github-usernameThe user name used to authenticate to GitHub.(TEXT) +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--push-imagePush image after building it. +--empty-imagePrepare empty image tagged with the same name as the Airflow image. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index d27f8a1a7e153..0ea56c33a726b 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -2,4 +2,4 @@ # This file is automatically generated by pre-commit. If you have a conflict with this file # Please do not solve it but run `breeze regenerate-command-images`. # This command should fix the conflict and regenerate help images that you have conflict with. -09687e7e07a9c1d62fa6c34201532113 +9139ef44b7f1ba24ddee50b71d3867c2 diff --git a/scripts/ci/selective_ci_checks.sh b/scripts/ci/selective_ci_checks.sh index c9f215c53c251..26fbf13f66ed9 100755 --- a/scripts/ci/selective_ci_checks.sh +++ b/scripts/ci/selective_ci_checks.sh @@ -55,6 +55,7 @@ function output_all_basic_variables() { "$(initialization::parameters_to_json "${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS[@]}")" initialization::ga_output all-python-versions \ "$(initialization::parameters_to_json "${ALL_PYTHON_MAJOR_MINOR_VERSIONS[@]}")" + initialization::ga_output all-python-versions-list-as-string "${ALL_PYTHON_MAJOR_MINOR_VERSIONS[*]}" initialization::ga_output python-versions-list-as-string "${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS[*]}" initialization::ga_output kubernetes-versions-list-as-string "${CURRENT_KUBERNETES_VERSIONS[*]}" else @@ -64,6 +65,7 @@ function output_all_basic_variables() { # all-python-versions are used in BuildImage Workflow initialization::ga_output all-python-versions \ "$(initialization::parameters_to_json "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}")" + initialization::ga_output all-python-versions-list-as-string "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}" initialization::ga_output python-versions-list-as-string "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}" initialization::ga_output kubernetes-versions-list-as-string "${DEFAULT_KUBERNETES_VERSION}" fi From 7e08c3a3df816cad6eb20f4d8a141a211525b0d1 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Wed, 22 Jun 2022 23:26:28 +0200 Subject: [PATCH 020/118] Upgrade FAB to 4.1.1 (#24399) * Upgrade FAB to 4.1.1 The Flask Application Builder have been updated recently to support a number of newer dependencies. This PR is the attempt to migrate FAB to newer version. This includes: * update setup.py and setup.cfg upper and lower bounds to account for proper version of dependencies that FAB < 4.0.0 was blocking from upgrade * added typed Flask application retrieval with a custom application fields available for MyPy typing checks. * fix typing to account for typing hints added in multiple upgraded libraries optional values and content of request returned as Mapping * switch to PyJWT 2.* by using non-deprecated "required" claim as list rather than separate fields * add possibiliyt to install providers without constraints so that we could avoid errors on conflicting constraints when upgrade-to-newer-dependencies is used * add pre-commit to check that 2.4+ only get_airflow_app is not used in providers * avoid Bad Request in case the request sent to Flask 2.0 is not JSon content type * switch imports of internal classes to direct packages where classes are available rather than from "airflow.models" to satisfy MyPY * synchronize changes of FAB Security Manager 4.1.1 with our copy of the Security Manager. * add error handling for a few "None" cases detected by MyPY * corrected test cases that were broken by immutability of Flask 2 objects and better escaping done by Flask 2 * updated test cases to account for redirection to "path" rather than full URL by Flask2 Fixes: #22397 * fixup! Upgrade FAB to 4.1.1 (cherry picked from commit e2f19505bf3622935480e80bee55bf5b6d80097b) --- .github/workflows/ci.yml | 4 + .pre-commit-config.yaml | 2 +- Dockerfile.ci | 36 +- airflow/api/auth/backend/basic_auth.py | 5 +- .../api_connexion/endpoints/dag_endpoint.py | 9 +- .../endpoints/dag_run_endpoint.py | 20 +- .../endpoints/extra_link_endpoint.py | 4 +- .../api_connexion/endpoints/log_endpoint.py | 11 +- .../api_connexion/endpoints/pool_endpoint.py | 14 +- .../api_connexion/endpoints/request_dict.py | 24 ++ .../endpoints/role_and_permission_endpoint.py | 15 +- .../api_connexion/endpoints/task_endpoint.py | 7 +- .../endpoints/task_instance_endpoint.py | 17 +- .../api_connexion/endpoints/user_endpoint.py | 13 +- .../endpoints/variable_endpoint.py | 7 +- .../api_connexion/endpoints/xcom_endpoint.py | 5 +- airflow/api_connexion/schemas/dag_schema.py | 2 +- .../schemas/task_instance_schema.py | 2 +- airflow/api_connexion/security.py | 7 +- airflow/models/abstractoperator.py | 1 - airflow/operators/trigger_dagrun.py | 5 +- .../common/auth_backend/google_openid.py | 2 +- airflow/sensors/external_task.py | 6 +- airflow/utils/airflow_flask_app.py | 37 ++ airflow/utils/jwt_signer.py | 4 +- airflow/www/api/experimental/endpoints.py | 3 +- airflow/www/auth.py | 5 +- .../www/extensions/init_wsgi_middlewares.py | 2 +- airflow/www/fab_security/manager.py | 37 +- airflow/www/views.py | 118 ++++--- dev/breeze/README.md | 2 +- dev/breeze/setup.cfg | 2 +- .../commands/release_management_commands.py | 9 + .../src/airflow_breeze/params/shell_params.py | 1 + .../utils/docker_command_utils.py | 1 + .../src/airflow_breeze/utils/recording.py | 4 +- dev/send_email.py | 3 +- images/breeze/output-commands-hash.txt | 5 - .../output-verify-provider-packages.svg | 140 ++++---- newsfragments/24399.significant.rst | 31 ++ scripts/ci/docker-compose/_docker.env | 1 + scripts/ci/docker-compose/base.yml | 1 + scripts/ci/docker-compose/devcontainer.env | 1 + .../pre_commit_check_2_1_compatibility.py | 44 ++- scripts/docker/entrypoint_ci.sh | 36 +- scripts/in_container/_in_container_utils.sh | 29 +- setup.cfg | 47 +-- setup.py | 12 +- .../endpoints/test_dag_endpoint.py | 97 +++--- .../endpoints/test_dag_source_endpoint.py | 27 +- .../endpoints/test_xcom_endpoint.py | 6 +- .../api_connexion/schemas/test_dag_schema.py | 321 +++++++++--------- tests/conftest.py | 24 ++ .../remote_user_api_auth_backend.py | 6 +- tests/utils/test_serve_logs.py | 13 +- tests/www/views/test_views.py | 25 +- tests/www/views/test_views_decorators.py | 6 +- tests/www/views/test_views_log.py | 2 +- tests/www/views/test_views_mount.py | 4 +- 59 files changed, 765 insertions(+), 559 deletions(-) create mode 100644 airflow/api_connexion/endpoints/request_dict.py create mode 100644 airflow/utils/airflow_flask_app.py delete mode 100644 images/breeze/output-commands-hash.txt create mode 100644 newsfragments/24399.significant.rst diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c0cbb9f09bc33..57b396f6ace7b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -805,6 +805,8 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" run: > breeze verify-provider-packages --use-airflow-version wheel --use-packages-from-dist --package-format wheel + env: + SKIP_CONSTRAINTS: "${{ needs.build-info.outputs.upgradeToNewerDependencies }}" - name: "Remove airflow package and replace providers with 2.1-compliant versions" run: | rm -vf dist/apache_airflow-*.whl \ @@ -882,6 +884,8 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" run: > breeze verify-provider-packages --use-airflow-version sdist --use-packages-from-dist --package-format sdist + env: + SKIP_CONSTRAINTS: "${{ needs.build-info.outputs.upgradeToNewerDependencies }}" - name: "Fix ownership" run: breeze fix-ownership if: always() diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 99db08571455e..af0c2b0e1c84d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -347,7 +347,7 @@ repos: language: python files: ^BREEZE\.rst$|^dev/breeze/.*$ pass_filenames: false - additional_dependencies: ['rich>=12.4.4', 'rich-click'] + additional_dependencies: ['rich>=12.4.4', 'rich-click>=1.5'] - id: update-local-yml-file name: Update mounts in the local yml file entry: ./scripts/ci/pre_commit/pre_commit_local_yml_mounts.py diff --git a/Dockerfile.ci b/Dockerfile.ci index 537f84a71f2d2..337901d4a817b 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -686,9 +686,15 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then echo "${COLOR_BLUE}Uninstalling airflow and providers" echo uninstall_airflow_and_providers - echo "${COLOR_BLUE}Install airflow from wheel package with extras: '${AIRFLOW_EXTRAS}' and constraints reference ${AIRFLOW_CONSTRAINTS_REFERENCE}.${COLOR_RESET}" - echo - install_airflow_from_wheel "${AIRFLOW_EXTRAS}" "${AIRFLOW_CONSTRAINTS_REFERENCE}" + if [[ ${SKIP_CONSTRAINTS,,=} == "true" ]]; then + echo "${COLOR_BLUE}Install airflow from wheel package with extras: '${AIRFLOW_EXTRAS}' with no constraints.${COLOR_RESET}" + echo + install_airflow_from_wheel "${AIRFLOW_EXTRAS}" "none" + else + echo "${COLOR_BLUE}Install airflow from wheel package with extras: '${AIRFLOW_EXTRAS}' and constraints reference ${AIRFLOW_CONSTRAINTS_REFERENCE}.${COLOR_RESET}" + echo + install_airflow_from_wheel "${AIRFLOW_EXTRAS}" "${AIRFLOW_CONSTRAINTS_REFERENCE}" + fi uninstall_providers elif [[ ${USE_AIRFLOW_VERSION} == "sdist" ]]; then echo @@ -696,9 +702,15 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then echo uninstall_airflow_and_providers echo - echo "${COLOR_BLUE}Install airflow from sdist package with extras: '${AIRFLOW_EXTRAS}' and constraints reference ${AIRFLOW_CONSTRAINTS_REFERENCE}.${COLOR_RESET}" - echo - install_airflow_from_sdist "${AIRFLOW_EXTRAS}" "${AIRFLOW_CONSTRAINTS_REFERENCE}" + if [[ ${SKIP_CONSTRAINTS,,=} == "true" ]]; then + echo "${COLOR_BLUE}Install airflow from sdist package with extras: '${AIRFLOW_EXTRAS}' with no constraints.${COLOR_RESET}" + echo + install_airflow_from_sdist "${AIRFLOW_EXTRAS}" "none" + else + echo "${COLOR_BLUE}Install airflow from sdist package with extras: '${AIRFLOW_EXTRAS}' and constraints reference ${AIRFLOW_CONSTRAINTS_REFERENCE}.${COLOR_RESET}" + echo + install_airflow_from_sdist "${AIRFLOW_EXTRAS}" "${AIRFLOW_CONSTRAINTS_REFERENCE}" + fi uninstall_providers else echo @@ -706,9 +718,15 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then echo uninstall_airflow_and_providers echo - echo "${COLOR_BLUE}Install released airflow from PyPI with extras: '${AIRFLOW_EXTRAS}' and constraints reference ${AIRFLOW_CONSTRAINTS_REFERENCE}.${COLOR_RESET}" - echo - install_released_airflow_version "${USE_AIRFLOW_VERSION}" "${AIRFLOW_CONSTRAINTS_REFERENCE}" + if [[ ${SKIP_CONSTRAINTS,,=} == "true" ]]; then + echo "${COLOR_BLUE}Install released airflow from PyPI with extras: '${AIRFLOW_EXTRAS}' with no constraints.${COLOR_RESET}" + echo + install_released_airflow_version "${USE_AIRFLOW_VERSION}" "none" + else + echo "${COLOR_BLUE}Install released airflow from PyPI with extras: '${AIRFLOW_EXTRAS}' and constraints reference ${AIRFLOW_CONSTRAINTS_REFERENCE}.${COLOR_RESET}" + echo + install_released_airflow_version "${USE_AIRFLOW_VERSION}" "${AIRFLOW_CONSTRAINTS_REFERENCE}" + fi fi if [[ ${USE_PACKAGES_FROM_DIST=} == "true" ]]; then echo diff --git a/airflow/api/auth/backend/basic_auth.py b/airflow/api/auth/backend/basic_auth.py index 397a722a98cf2..12f00b435fe11 100644 --- a/airflow/api/auth/backend/basic_auth.py +++ b/airflow/api/auth/backend/basic_auth.py @@ -18,10 +18,11 @@ from functools import wraps from typing import Any, Callable, Optional, Tuple, TypeVar, Union, cast -from flask import Response, current_app, request +from flask import Response, request from flask_appbuilder.const import AUTH_LDAP from flask_login import login_user +from airflow.utils.airflow_flask_app import get_airflow_app from airflow.www.fab_security.sqla.models import User CLIENT_AUTH: Optional[Union[Tuple[str, str], Any]] = None @@ -40,7 +41,7 @@ def auth_current_user() -> Optional[User]: if auth is None or not auth.username or not auth.password: return None - ab_security_manager = current_app.appbuilder.sm + ab_security_manager = get_airflow_app().appbuilder.sm user = None if ab_security_manager.auth_type == AUTH_LDAP: user = ab_security_manager.auth_user_ldap(auth.username, auth.password) diff --git a/airflow/api_connexion/endpoints/dag_endpoint.py b/airflow/api_connexion/endpoints/dag_endpoint.py index e94707b127a69..40113021cfad6 100644 --- a/airflow/api_connexion/endpoints/dag_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_endpoint.py @@ -18,7 +18,7 @@ from typing import Collection, Optional from connexion import NoContent -from flask import current_app, g, request +from flask import g, request from marshmallow import ValidationError from sqlalchemy.orm import Session from sqlalchemy.sql.expression import or_ @@ -37,6 +37,7 @@ from airflow.exceptions import AirflowException, DagNotFound from airflow.models.dag import DagModel, DagTag from airflow.security import permissions +from airflow.utils.airflow_flask_app import get_airflow_app from airflow.utils.session import NEW_SESSION, provide_session @@ -55,7 +56,7 @@ def get_dag(*, dag_id: str, session: Session = NEW_SESSION) -> APIResponse: @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG)]) def get_dag_details(*, dag_id: str) -> APIResponse: """Get details of DAG.""" - dag: DAG = current_app.dag_bag.get_dag(dag_id) + dag: DAG = get_airflow_app().dag_bag.get_dag(dag_id) if not dag: raise NotFound("DAG not found", detail=f"The DAG with dag_id: {dag_id} was not found") return dag_detail_schema.dump(dag) @@ -82,7 +83,7 @@ def get_dags( if dag_id_pattern: dags_query = dags_query.filter(DagModel.dag_id.ilike(f'%{dag_id_pattern}%')) - readable_dags = current_app.appbuilder.sm.get_accessible_dag_ids(g.user) + readable_dags = get_airflow_app().appbuilder.sm.get_accessible_dag_ids(g.user) dags_query = dags_query.filter(DagModel.dag_id.in_(readable_dags)) if tags: @@ -142,7 +143,7 @@ def patch_dags(limit, session, offset=0, only_active=True, tags=None, dag_id_pat if dag_id_pattern == '~': dag_id_pattern = '%' dags_query = dags_query.filter(DagModel.dag_id.ilike(f'%{dag_id_pattern}%')) - editable_dags = current_app.appbuilder.sm.get_editable_dag_ids(g.user) + editable_dags = get_airflow_app().appbuilder.sm.get_editable_dag_ids(g.user) dags_query = dags_query.filter(DagModel.dag_id.in_(editable_dags)) if tags: diff --git a/airflow/api_connexion/endpoints/dag_run_endpoint.py b/airflow/api_connexion/endpoints/dag_run_endpoint.py index a83ca223b07ac..1fad48f7b6fe7 100644 --- a/airflow/api_connexion/endpoints/dag_run_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_run_endpoint.py @@ -18,13 +18,14 @@ import pendulum from connexion import NoContent -from flask import current_app, g, request +from flask import g from marshmallow import ValidationError from sqlalchemy import or_ from sqlalchemy.orm import Query, Session from airflow.api.common.mark_tasks import set_dag_run_state_to_failed, set_dag_run_state_to_success from airflow.api_connexion import security +from airflow.api_connexion.endpoints.request_dict import get_json_request_dict from airflow.api_connexion.exceptions import AlreadyExists, BadRequest, NotFound from airflow.api_connexion.parameters import apply_sorting, check_limit, format_datetime, format_parameters from airflow.api_connexion.schemas.dag_run_schema import ( @@ -37,6 +38,7 @@ from airflow.api_connexion.types import APIResponse from airflow.models import DagModel, DagRun from airflow.security import permissions +from airflow.utils.airflow_flask_app import get_airflow_app from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType @@ -157,7 +159,7 @@ def get_dag_runs( # This endpoint allows specifying ~ as the dag_id to retrieve DAG Runs for all DAGs. if dag_id == "~": - appbuilder = current_app.appbuilder + appbuilder = get_airflow_app().appbuilder query = query.filter(DagRun.dag_id.in_(appbuilder.sm.get_readable_dag_ids(g.user))) else: query = query.filter(DagRun.dag_id == dag_id) @@ -189,13 +191,13 @@ def get_dag_runs( @provide_session def get_dag_runs_batch(*, session: Session = NEW_SESSION) -> APIResponse: """Get list of DAG Runs""" - body = request.get_json() + body = get_json_request_dict() try: data = dagruns_batch_form_schema.load(body) except ValidationError as err: raise BadRequest(detail=str(err.messages)) - appbuilder = current_app.appbuilder + appbuilder = get_airflow_app().appbuilder readable_dag_ids = appbuilder.sm.get_readable_dag_ids(g.user) query = session.query(DagRun) if data.get("dag_ids"): @@ -242,7 +244,7 @@ def post_dag_run(*, dag_id: str, session: Session = NEW_SESSION) -> APIResponse: detail=f"DAG with dag_id: '{dag_id}' has import errors", ) try: - post_body = dagrun_schema.load(request.json, session=session) + post_body = dagrun_schema.load(get_json_request_dict(), session=session) except ValidationError as err: raise BadRequest(detail=str(err)) @@ -258,7 +260,7 @@ def post_dag_run(*, dag_id: str, session: Session = NEW_SESSION) -> APIResponse: ) if not dagrun_instance: try: - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) dag_run = dag.create_dagrun( run_type=DagRunType.MANUAL, run_id=run_id, @@ -267,7 +269,7 @@ def post_dag_run(*, dag_id: str, session: Session = NEW_SESSION) -> APIResponse: state=DagRunState.QUEUED, conf=post_body.get("conf"), external_trigger=True, - dag_hash=current_app.dag_bag.dags_hash.get(dag_id), + dag_hash=get_airflow_app().dag_bag.dags_hash.get(dag_id), ) return dagrun_schema.dump(dag_run) except ValueError as ve: @@ -300,12 +302,12 @@ def update_dag_run_state(*, dag_id: str, dag_run_id: str, session: Session = NEW error_message = f'Dag Run id {dag_run_id} not found in dag {dag_id}' raise NotFound(error_message) try: - post_body = set_dagrun_state_form_schema.load(request.json) + post_body = set_dagrun_state_form_schema.load(get_json_request_dict()) except ValidationError as err: raise BadRequest(detail=str(err)) state = post_body['state'] - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) if state == DagRunState.SUCCESS: set_dag_run_state_to_success(dag=dag, run_id=dag_run.run_id, commit=True) else: diff --git a/airflow/api_connexion/endpoints/extra_link_endpoint.py b/airflow/api_connexion/endpoints/extra_link_endpoint.py index 3e9535603bda3..94b36928bfd0c 100644 --- a/airflow/api_connexion/endpoints/extra_link_endpoint.py +++ b/airflow/api_connexion/endpoints/extra_link_endpoint.py @@ -15,7 +15,6 @@ # specific language governing permissions and limitations # under the License. -from flask import current_app from sqlalchemy.orm.session import Session from airflow import DAG @@ -25,6 +24,7 @@ from airflow.exceptions import TaskNotFound from airflow.models.dagbag import DagBag from airflow.security import permissions +from airflow.utils.airflow_flask_app import get_airflow_app from airflow.utils.session import NEW_SESSION, provide_session @@ -46,7 +46,7 @@ def get_extra_links( """Get extra links for task instance""" from airflow.models.taskinstance import TaskInstance - dagbag: DagBag = current_app.dag_bag + dagbag: DagBag = get_airflow_app().dag_bag dag: DAG = dagbag.get_dag(dag_id) if not dag: raise NotFound("DAG not found", detail=f'DAG with ID = "{dag_id}" not found') diff --git a/airflow/api_connexion/endpoints/log_endpoint.py b/airflow/api_connexion/endpoints/log_endpoint.py index f1335fe527451..171cacb076e7c 100644 --- a/airflow/api_connexion/endpoints/log_endpoint.py +++ b/airflow/api_connexion/endpoints/log_endpoint.py @@ -14,10 +14,9 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - from typing import Any, Optional -from flask import Response, current_app, request +from flask import Response, request from itsdangerous.exc import BadSignature from itsdangerous.url_safe import URLSafeSerializer from sqlalchemy.orm.session import Session @@ -29,6 +28,7 @@ from airflow.exceptions import TaskNotFound from airflow.models import TaskInstance from airflow.security import permissions +from airflow.utils.airflow_flask_app import get_airflow_app from airflow.utils.log.log_reader import TaskLogReader from airflow.utils.session import NEW_SESSION, provide_session @@ -52,7 +52,7 @@ def get_log( session: Session = NEW_SESSION, ) -> APIResponse: """Get logs for specific task instance""" - key = current_app.config["SECRET_KEY"] + key = get_airflow_app().config["SECRET_KEY"] if not token: metadata = {} else: @@ -87,7 +87,7 @@ def get_log( metadata['end_of_log'] = True raise NotFound(title="TaskInstance not found") - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) if dag: try: ti.task = dag.get_task(ti.task_id) @@ -101,7 +101,8 @@ def get_log( if return_type == 'application/json' or return_type is None: # default logs, metadata = task_log_reader.read_log_chunks(ti, task_try_number, metadata) logs = logs[0] if task_try_number is not None else logs - token = URLSafeSerializer(key).dumps(metadata) + # we must have token here, so we can safely ignore it + token = URLSafeSerializer(key).dumps(metadata) # type: ignore[assignment] return logs_schema.dump(LogResponseObject(continuation_token=token, content=logs)) # text/plain. Stream logs = task_log_reader.read_log_stream(ti, task_try_number, metadata) diff --git a/airflow/api_connexion/endpoints/pool_endpoint.py b/airflow/api_connexion/endpoints/pool_endpoint.py index e9c8aee252bec..8c3d3f3b86d38 100644 --- a/airflow/api_connexion/endpoints/pool_endpoint.py +++ b/airflow/api_connexion/endpoints/pool_endpoint.py @@ -16,13 +16,14 @@ # under the License. from typing import Optional -from flask import Response, request +from flask import Response from marshmallow import ValidationError from sqlalchemy import func from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session from airflow.api_connexion import security +from airflow.api_connexion.endpoints.request_dict import get_json_request_dict from airflow.api_connexion.exceptions import AlreadyExists, BadRequest, NotFound from airflow.api_connexion.parameters import apply_sorting, check_limit, format_parameters from airflow.api_connexion.schemas.pool_schema import PoolCollection, pool_collection_schema, pool_schema @@ -83,9 +84,10 @@ def patch_pool( session: Session = NEW_SESSION, ) -> APIResponse: """Update a pool""" + request_dict = get_json_request_dict() # Only slots can be modified in 'default_pool' try: - if pool_name == Pool.DEFAULT_POOL_NAME and request.json["name"] != Pool.DEFAULT_POOL_NAME: + if pool_name == Pool.DEFAULT_POOL_NAME and request_dict["name"] != Pool.DEFAULT_POOL_NAME: if update_mask and len(update_mask) == 1 and update_mask[0].strip() == "slots": pass else: @@ -98,7 +100,7 @@ def patch_pool( raise NotFound(detail=f"Pool with name:'{pool_name}' not found") try: - patch_body = pool_schema.load(request.json) + patch_body = pool_schema.load(request_dict) except ValidationError as err: raise BadRequest(detail=str(err.messages)) @@ -119,7 +121,7 @@ def patch_pool( else: required_fields = {"name", "slots"} - fields_diff = required_fields - set(request.json.keys()) + fields_diff = required_fields - set(get_json_request_dict().keys()) if fields_diff: raise BadRequest(detail=f"Missing required property(ies): {sorted(fields_diff)}") @@ -134,12 +136,12 @@ def patch_pool( def post_pool(*, session: Session = NEW_SESSION) -> APIResponse: """Create a pool""" required_fields = {"name", "slots"} # Pool would require both fields in the post request - fields_diff = required_fields - set(request.json.keys()) + fields_diff = required_fields - set(get_json_request_dict().keys()) if fields_diff: raise BadRequest(detail=f"Missing required property(ies): {sorted(fields_diff)}") try: - post_body = pool_schema.load(request.json, session=session) + post_body = pool_schema.load(get_json_request_dict(), session=session) except ValidationError as err: raise BadRequest(detail=str(err.messages)) diff --git a/airflow/api_connexion/endpoints/request_dict.py b/airflow/api_connexion/endpoints/request_dict.py new file mode 100644 index 0000000000000..4d7ad21250586 --- /dev/null +++ b/airflow/api_connexion/endpoints/request_dict.py @@ -0,0 +1,24 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import Any, Mapping, cast + + +def get_json_request_dict() -> Mapping[str, Any]: + from flask import request + + return cast(Mapping[str, Any], request.get_json()) diff --git a/airflow/api_connexion/endpoints/role_and_permission_endpoint.py b/airflow/api_connexion/endpoints/role_and_permission_endpoint.py index a25856e111b2c..25419066d20fa 100644 --- a/airflow/api_connexion/endpoints/role_and_permission_endpoint.py +++ b/airflow/api_connexion/endpoints/role_and_permission_endpoint.py @@ -18,7 +18,7 @@ from typing import List, Optional, Tuple from connexion import NoContent -from flask import current_app, request +from flask import request from marshmallow import ValidationError from sqlalchemy import asc, desc, func @@ -34,6 +34,7 @@ ) from airflow.api_connexion.types import APIResponse, UpdateMask from airflow.security import permissions +from airflow.utils.airflow_flask_app import get_airflow_app from airflow.www.fab_security.sqla.models import Action, Role from airflow.www.security import AirflowSecurityManager @@ -54,7 +55,7 @@ def _check_action_and_resource(sm: AirflowSecurityManager, perms: List[Tuple[str @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_ROLE)]) def get_role(*, role_name: str) -> APIResponse: """Get role""" - ab_security_manager = current_app.appbuilder.sm + ab_security_manager = get_airflow_app().appbuilder.sm role = ab_security_manager.find_role(name=role_name) if not role: raise NotFound(title="Role not found", detail=f"Role with name {role_name!r} was not found") @@ -65,7 +66,7 @@ def get_role(*, role_name: str) -> APIResponse: @format_parameters({"limit": check_limit}) def get_roles(*, order_by: str = "name", limit: int, offset: Optional[int] = None) -> APIResponse: """Get roles""" - appbuilder = current_app.appbuilder + appbuilder = get_airflow_app().appbuilder session = appbuilder.get_session total_entries = session.query(func.count(Role.id)).scalar() direction = desc if order_by.startswith("-") else asc @@ -89,7 +90,7 @@ def get_roles(*, order_by: str = "name", limit: int, offset: Optional[int] = Non @format_parameters({'limit': check_limit}) def get_permissions(*, limit: int, offset: Optional[int] = None) -> APIResponse: """Get permissions""" - session = current_app.appbuilder.get_session + session = get_airflow_app().appbuilder.get_session total_entries = session.query(func.count(Action.id)).scalar() query = session.query(Action) actions = query.offset(offset).limit(limit).all() @@ -99,7 +100,7 @@ def get_permissions(*, limit: int, offset: Optional[int] = None) -> APIResponse: @security.requires_access([(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_ROLE)]) def delete_role(*, role_name: str) -> APIResponse: """Delete a role""" - ab_security_manager = current_app.appbuilder.sm + ab_security_manager = get_airflow_app().appbuilder.sm role = ab_security_manager.find_role(name=role_name) if not role: raise NotFound(title="Role not found", detail=f"Role with name {role_name!r} was not found") @@ -110,7 +111,7 @@ def delete_role(*, role_name: str) -> APIResponse: @security.requires_access([(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_ROLE)]) def patch_role(*, role_name: str, update_mask: UpdateMask = None) -> APIResponse: """Update a role""" - appbuilder = current_app.appbuilder + appbuilder = get_airflow_app().appbuilder security_manager = appbuilder.sm body = request.json try: @@ -144,7 +145,7 @@ def patch_role(*, role_name: str, update_mask: UpdateMask = None) -> APIResponse @security.requires_access([(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_ROLE)]) def post_role() -> APIResponse: """Create a new role""" - appbuilder = current_app.appbuilder + appbuilder = get_airflow_app().appbuilder security_manager = appbuilder.sm body = request.json try: diff --git a/airflow/api_connexion/endpoints/task_endpoint.py b/airflow/api_connexion/endpoints/task_endpoint.py index 28c39b000c28d..74b6e7e9ee8ed 100644 --- a/airflow/api_connexion/endpoints/task_endpoint.py +++ b/airflow/api_connexion/endpoints/task_endpoint.py @@ -16,8 +16,6 @@ # under the License. from operator import attrgetter -from flask import current_app - from airflow import DAG from airflow.api_connexion import security from airflow.api_connexion.exceptions import BadRequest, NotFound @@ -25,6 +23,7 @@ from airflow.api_connexion.types import APIResponse from airflow.exceptions import TaskNotFound from airflow.security import permissions +from airflow.utils.airflow_flask_app import get_airflow_app @security.requires_access( @@ -35,7 +34,7 @@ ) def get_task(*, dag_id: str, task_id: str) -> APIResponse: """Get simplified representation of a task.""" - dag: DAG = current_app.dag_bag.get_dag(dag_id) + dag: DAG = get_airflow_app().dag_bag.get_dag(dag_id) if not dag: raise NotFound("DAG not found") @@ -54,7 +53,7 @@ def get_task(*, dag_id: str, task_id: str) -> APIResponse: ) def get_tasks(*, dag_id: str, order_by: str = "task_id") -> APIResponse: """Get tasks for DAG""" - dag: DAG = current_app.dag_bag.get_dag(dag_id) + dag: DAG = get_airflow_app().dag_bag.get_dag(dag_id) if not dag: raise NotFound("DAG not found") tasks = dag.tasks diff --git a/airflow/api_connexion/endpoints/task_instance_endpoint.py b/airflow/api_connexion/endpoints/task_instance_endpoint.py index c2416ab0d9d44..6cc3e784e62a3 100644 --- a/airflow/api_connexion/endpoints/task_instance_endpoint.py +++ b/airflow/api_connexion/endpoints/task_instance_endpoint.py @@ -16,7 +16,6 @@ # under the License. from typing import Any, Iterable, List, Optional, Tuple, TypeVar -from flask import current_app, request from marshmallow import ValidationError from sqlalchemy import and_, func, or_ from sqlalchemy.exc import MultipleResultsFound @@ -25,6 +24,7 @@ from sqlalchemy.sql import ClauseElement from airflow.api_connexion import security +from airflow.api_connexion.endpoints.request_dict import get_json_request_dict from airflow.api_connexion.exceptions import BadRequest, NotFound from airflow.api_connexion.parameters import format_datetime, format_parameters from airflow.api_connexion.schemas.task_instance_schema import ( @@ -42,6 +42,7 @@ from airflow.models.dagrun import DagRun as DR from airflow.models.taskinstance import TaskInstance as TI, clear_task_instances from airflow.security import permissions +from airflow.utils.airflow_flask_app import get_airflow_app from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.state import DagRunState, State @@ -188,7 +189,7 @@ def get_mapped_task_instances( # 0 can mean a mapped TI that expanded to an empty list, so it is not an automatic 404 if base_query.with_entities(func.count('*')).scalar() == 0: - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) if not dag: error_message = f"DAG {dag_id} not found" raise NotFound(error_message) @@ -364,7 +365,7 @@ def get_task_instances( @provide_session def get_task_instances_batch(session: Session = NEW_SESSION) -> APIResponse: """Get list of task instances.""" - body = request.get_json() + body = get_json_request_dict() try: data = task_instance_batch_form.load(body) except ValidationError as err: @@ -423,20 +424,20 @@ def get_task_instances_batch(session: Session = NEW_SESSION) -> APIResponse: @provide_session def post_clear_task_instances(*, dag_id: str, session: Session = NEW_SESSION) -> APIResponse: """Clear task instances.""" - body = request.get_json() + body = get_json_request_dict() try: data = clear_task_instance_form.load(body) except ValidationError as err: raise BadRequest(detail=str(err.messages)) - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) if not dag: error_message = f"Dag id {dag_id} not found" raise NotFound(error_message) reset_dag_runs = data.pop('reset_dag_runs') dry_run = data.pop('dry_run') # We always pass dry_run here, otherwise this would try to confirm on the terminal! - task_instances = dag.clear(dry_run=True, dag_bag=current_app.dag_bag, **data) + task_instances = dag.clear(dry_run=True, dag_bag=get_airflow_app().dag_bag, **data) if not dry_run: clear_task_instances( task_instances.all(), @@ -460,14 +461,14 @@ def post_clear_task_instances(*, dag_id: str, session: Session = NEW_SESSION) -> @provide_session def post_set_task_instances_state(*, dag_id: str, session: Session = NEW_SESSION) -> APIResponse: """Set a state of task instances.""" - body = request.get_json() + body = get_json_request_dict() try: data = set_task_instance_state_form.load(body) except ValidationError as err: raise BadRequest(detail=str(err.messages)) error_message = f"Dag ID {dag_id} not found" - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) if not dag: raise NotFound(error_message) diff --git a/airflow/api_connexion/endpoints/user_endpoint.py b/airflow/api_connexion/endpoints/user_endpoint.py index 82375cebcaf16..2ed0db2aae864 100644 --- a/airflow/api_connexion/endpoints/user_endpoint.py +++ b/airflow/api_connexion/endpoints/user_endpoint.py @@ -17,7 +17,7 @@ from typing import List, Optional from connexion import NoContent -from flask import current_app, request +from flask import request from marshmallow import ValidationError from sqlalchemy import asc, desc, func from werkzeug.security import generate_password_hash @@ -33,13 +33,14 @@ ) from airflow.api_connexion.types import APIResponse, UpdateMask from airflow.security import permissions +from airflow.utils.airflow_flask_app import get_airflow_app from airflow.www.fab_security.sqla.models import Role, User @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_USER)]) def get_user(*, username: str) -> APIResponse: """Get a user""" - ab_security_manager = current_app.appbuilder.sm + ab_security_manager = get_airflow_app().appbuilder.sm user = ab_security_manager.find_user(username=username) if not user: raise NotFound(title="User not found", detail=f"The User with username `{username}` was not found") @@ -50,7 +51,7 @@ def get_user(*, username: str) -> APIResponse: @format_parameters({"limit": check_limit}) def get_users(*, limit: int, order_by: str = "id", offset: Optional[str] = None) -> APIResponse: """Get users""" - appbuilder = current_app.appbuilder + appbuilder = get_airflow_app().appbuilder session = appbuilder.get_session total_entries = session.query(func.count(User.id)).scalar() direction = desc if order_by.startswith("-") else asc @@ -86,7 +87,7 @@ def post_user() -> APIResponse: except ValidationError as e: raise BadRequest(detail=str(e.messages)) - security_manager = current_app.appbuilder.sm + security_manager = get_airflow_app().appbuilder.sm username = data["username"] email = data["email"] @@ -129,7 +130,7 @@ def patch_user(*, username: str, update_mask: UpdateMask = None) -> APIResponse: except ValidationError as e: raise BadRequest(detail=str(e.messages)) - security_manager = current_app.appbuilder.sm + security_manager = get_airflow_app().appbuilder.sm user = security_manager.find_user(username=username) if user is None: @@ -193,7 +194,7 @@ def patch_user(*, username: str, update_mask: UpdateMask = None) -> APIResponse: @security.requires_access([(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_USER)]) def delete_user(*, username: str) -> APIResponse: """Delete a user""" - security_manager = current_app.appbuilder.sm + security_manager = get_airflow_app().appbuilder.sm user = security_manager.find_user(username=username) if user is None: diff --git a/airflow/api_connexion/endpoints/variable_endpoint.py b/airflow/api_connexion/endpoints/variable_endpoint.py index 067d163401454..4dfc0803c5c62 100644 --- a/airflow/api_connexion/endpoints/variable_endpoint.py +++ b/airflow/api_connexion/endpoints/variable_endpoint.py @@ -16,12 +16,13 @@ # under the License. from typing import Optional -from flask import Response, request +from flask import Response from marshmallow import ValidationError from sqlalchemy import func from sqlalchemy.orm import Session from airflow.api_connexion import security +from airflow.api_connexion.endpoints.request_dict import get_json_request_dict from airflow.api_connexion.exceptions import BadRequest, NotFound from airflow.api_connexion.parameters import apply_sorting, check_limit, format_parameters from airflow.api_connexion.schemas.variable_schema import variable_collection_schema, variable_schema @@ -78,7 +79,7 @@ def get_variables( def patch_variable(*, variable_key: str, update_mask: UpdateMask = None) -> Response: """Update a variable by key""" try: - data = variable_schema.load(request.json) + data = variable_schema.load(get_json_request_dict()) except ValidationError as err: raise BadRequest("Invalid Variable schema", detail=str(err.messages)) @@ -99,7 +100,7 @@ def patch_variable(*, variable_key: str, update_mask: UpdateMask = None) -> Resp def post_variables() -> Response: """Create a variable""" try: - data = variable_schema.load(request.json) + data = variable_schema.load(get_json_request_dict()) except ValidationError as err: raise BadRequest("Invalid Variable schema", detail=str(err.messages)) diff --git a/airflow/api_connexion/endpoints/xcom_endpoint.py b/airflow/api_connexion/endpoints/xcom_endpoint.py index 9cc6b6d79a933..62c7262f7ed2c 100644 --- a/airflow/api_connexion/endpoints/xcom_endpoint.py +++ b/airflow/api_connexion/endpoints/xcom_endpoint.py @@ -16,7 +16,7 @@ # under the License. from typing import Optional -from flask import current_app, g +from flask import g from sqlalchemy import and_ from sqlalchemy.orm import Session @@ -27,6 +27,7 @@ from airflow.api_connexion.types import APIResponse from airflow.models import DagRun as DR, XCom from airflow.security import permissions +from airflow.utils.airflow_flask_app import get_airflow_app from airflow.utils.session import NEW_SESSION, provide_session @@ -52,7 +53,7 @@ def get_xcom_entries( """Get all XCom values""" query = session.query(XCom) if dag_id == '~': - appbuilder = current_app.appbuilder + appbuilder = get_airflow_app().appbuilder readable_dag_ids = appbuilder.sm.get_readable_dag_ids(g.user) query = query.filter(XCom.dag_id.in_(readable_dag_ids)) query = query.join(DR, and_(XCom.dag_id == DR.dag_id, XCom.run_id == DR.run_id)) diff --git a/airflow/api_connexion/schemas/dag_schema.py b/airflow/api_connexion/schemas/dag_schema.py index 2f369113290d9..6e7410dc4f2ef 100644 --- a/airflow/api_connexion/schemas/dag_schema.py +++ b/airflow/api_connexion/schemas/dag_schema.py @@ -83,7 +83,7 @@ def get_owners(obj: DagModel): @staticmethod def get_token(obj: DagModel): """Return file token""" - serializer = URLSafeSerializer(conf.get('webserver', 'secret_key')) + serializer = URLSafeSerializer(conf.get_mandatory_value('webserver', 'secret_key')) return serializer.dumps(obj.fileloc) diff --git a/airflow/api_connexion/schemas/task_instance_schema.py b/airflow/api_connexion/schemas/task_instance_schema.py index 37005256f6cdc..74824dbaf87c6 100644 --- a/airflow/api_connexion/schemas/task_instance_schema.py +++ b/airflow/api_connexion/schemas/task_instance_schema.py @@ -60,7 +60,7 @@ class Meta: pid = auto_field() executor_config = auto_field() sla_miss = fields.Nested(SlaMissSchema, dump_default=None) - rendered_fields = JsonObjectField(default={}) + rendered_fields = JsonObjectField(dump_default={}) def get_attribute(self, obj, attr, default): if attr == "sla_miss": diff --git a/airflow/api_connexion/security.py b/airflow/api_connexion/security.py index 3562c98eb4b35..6c84181f91bd3 100644 --- a/airflow/api_connexion/security.py +++ b/airflow/api_connexion/security.py @@ -18,16 +18,17 @@ from functools import wraps from typing import Callable, Optional, Sequence, Tuple, TypeVar, cast -from flask import Response, current_app +from flask import Response from airflow.api_connexion.exceptions import PermissionDenied, Unauthenticated +from airflow.utils.airflow_flask_app import get_airflow_app T = TypeVar("T", bound=Callable) def check_authentication() -> None: """Checks that the request has valid authorization information.""" - for auth in current_app.api_auth: + for auth in get_airflow_app().api_auth: response = auth.requires_authentication(Response)() if response.status_code == 200: return @@ -38,7 +39,7 @@ def check_authentication() -> None: def requires_access(permissions: Optional[Sequence[Tuple[str, str]]] = None) -> Callable[[T], T]: """Factory for decorator that checks current user's permissions against required permissions.""" - appbuilder = current_app.appbuilder + appbuilder = get_airflow_app().appbuilder appbuilder.sm.sync_resource_permissions(permissions) def requires_access_decorator(func: T): diff --git a/airflow/models/abstractoperator.py b/airflow/models/abstractoperator.py index 8d2e06442a2e5..4d50288673be0 100644 --- a/airflow/models/abstractoperator.py +++ b/airflow/models/abstractoperator.py @@ -302,7 +302,6 @@ def get_extra_links(self, ti: "TaskInstance", link_name: str) -> Optional[str]: return link.get_link(self, ti_key=ti.key) else: return link.get_link(self, ti.dag_run.logical_date) # type: ignore[misc] - return None def render_template_fields( self, diff --git a/airflow/operators/trigger_dagrun.py b/airflow/operators/trigger_dagrun.py index 0689f14c56261..4578fd2df818b 100644 --- a/airflow/operators/trigger_dagrun.py +++ b/airflow/operators/trigger_dagrun.py @@ -23,7 +23,10 @@ from airflow.api.common.trigger_dag import trigger_dag from airflow.exceptions import AirflowException, DagNotFound, DagRunAlreadyExists -from airflow.models import BaseOperator, BaseOperatorLink, DagBag, DagModel, DagRun +from airflow.models.baseoperator import BaseOperator, BaseOperatorLink +from airflow.models.dag import DagModel +from airflow.models.dagbag import DagBag +from airflow.models.dagrun import DagRun from airflow.models.xcom import XCom from airflow.utils import timezone from airflow.utils.context import Context diff --git a/airflow/providers/google/common/auth_backend/google_openid.py b/airflow/providers/google/common/auth_backend/google_openid.py index 496ac29616686..a267c0e63a1ca 100644 --- a/airflow/providers/google/common/auth_backend/google_openid.py +++ b/airflow/providers/google/common/auth_backend/google_openid.py @@ -88,7 +88,7 @@ def _verify_id_token(id_token: str) -> Optional[str]: def _lookup_user(user_email: str): - security_manager = current_app.appbuilder.sm + security_manager = current_app.appbuilder.sm # type: ignore[attr-defined] user = security_manager.find_user(email=user_email) if not user: diff --git a/airflow/sensors/external_task.py b/airflow/sensors/external_task.py index 40c0a7a5665b7..30c27c7214dc7 100644 --- a/airflow/sensors/external_task.py +++ b/airflow/sensors/external_task.py @@ -23,7 +23,11 @@ from sqlalchemy import func from airflow.exceptions import AirflowException -from airflow.models import BaseOperatorLink, DagBag, DagModel, DagRun, TaskInstance +from airflow.models.baseoperator import BaseOperatorLink +from airflow.models.dag import DagModel +from airflow.models.dagbag import DagBag +from airflow.models.dagrun import DagRun +from airflow.models.taskinstance import TaskInstance from airflow.operators.empty import EmptyOperator from airflow.sensors.base import BaseSensorOperator from airflow.utils.helpers import build_airflow_url_with_query diff --git a/airflow/utils/airflow_flask_app.py b/airflow/utils/airflow_flask_app.py new file mode 100644 index 0000000000000..a14ff99398d21 --- /dev/null +++ b/airflow/utils/airflow_flask_app.py @@ -0,0 +1,37 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import Any, List, cast + +from flask import Flask + +from airflow.models.dagbag import DagBag +from airflow.www.extensions.init_appbuilder import AirflowAppBuilder + + +class AirflowApp(Flask): + """Airflow Flask Application""" + + appbuilder: AirflowAppBuilder + dag_bag: DagBag + api_auth: List[Any] + + +def get_airflow_app() -> AirflowApp: + from flask import current_app + + return cast(AirflowApp, current_app) diff --git a/airflow/utils/jwt_signer.py b/airflow/utils/jwt_signer.py index 941a3d05981ce..e767997ebeb78 100644 --- a/airflow/utils/jwt_signer.py +++ b/airflow/utils/jwt_signer.py @@ -73,9 +73,7 @@ def verify_token(self, token: str) -> Dict[str, Any]: algorithms=[self._algorithm], options={ "verify_signature": True, - "require_exp": True, - "require_iat": True, - "require_nbf": True, + "require": ["exp", "iat", "nbf"], }, audience=self._audience, ) diff --git a/airflow/www/api/experimental/endpoints.py b/airflow/www/api/experimental/endpoints.py index 898988db81c50..75256f13736fd 100644 --- a/airflow/www/api/experimental/endpoints.py +++ b/airflow/www/api/experimental/endpoints.py @@ -70,7 +70,8 @@ def add_deprecation_headers(response: Response): return response -api_experimental.after_request(add_deprecation_headers) +# This API is deprecated. We do not care too much about typing here +api_experimental.after_request(add_deprecation_headers) # type: ignore[arg-type] @api_experimental.route('/dags//dag_runs', methods=['POST']) diff --git a/airflow/www/auth.py b/airflow/www/auth.py index 9d40c00a5cf10..9d36cda883c92 100644 --- a/airflow/www/auth.py +++ b/airflow/www/auth.py @@ -37,7 +37,10 @@ def decorated(*args, **kwargs): appbuilder = current_app.appbuilder dag_id = ( - request.args.get("dag_id") or request.form.get("dag_id") or (request.json or {}).get("dag_id") + request.args.get("dag_id") + or request.form.get("dag_id") + or (request.is_json and request.json.get("dag_id")) + or None ) if appbuilder.sm.check_authorization(permissions, dag_id): return func(*args, **kwargs) diff --git a/airflow/www/extensions/init_wsgi_middlewares.py b/airflow/www/extensions/init_wsgi_middlewares.py index 0ed78073e92f5..00c04006ff68e 100644 --- a/airflow/www/extensions/init_wsgi_middlewares.py +++ b/airflow/www/extensions/init_wsgi_middlewares.py @@ -37,7 +37,7 @@ def init_wsgi_middleware(flask_app: Flask): base_url = "" if base_url: flask_app.wsgi_app = DispatcherMiddleware( # type: ignore - _root_app, mounts={base_url: flask_app.wsgi_app} + _root_app, mounts={base_url: flask_app.wsgi_app} # type: ignore ) # Apply ProxyFix middleware diff --git a/airflow/www/fab_security/manager.py b/airflow/www/fab_security/manager.py index 8381f7b08cdc7..2010e58c348bc 100644 --- a/airflow/www/fab_security/manager.py +++ b/airflow/www/fab_security/manager.py @@ -291,7 +291,7 @@ def create_jwt_manager(self, app) -> JWTManager: """ jwt_manager = JWTManager() jwt_manager.init_app(app) - jwt_manager.user_loader_callback_loader(self.load_user_jwt) + jwt_manager.user_lookup_loader(self.load_user_jwt) return jwt_manager def create_builtin_roles(self): @@ -654,6 +654,18 @@ def get_oauth_user_info(self, provider, resp): "email": data.get("email", ""), "role_keys": data.get("groups", []), } + # for Keycloak + if provider in ["keycloak", "keycloak_before_17"]: + me = self.appbuilder.sm.oauth_remotes[provider].get("openid-connect/userinfo") + me.raise_for_status() + data = me.json() + log.debug("User info from Keycloak: %s", data) + return { + "username": data.get("preferred_username", ""), + "first_name": data.get("given_name", ""), + "last_name": data.get("family_name", ""), + "email": data.get("email", ""), + } else: return {} @@ -1027,12 +1039,6 @@ def auth_user_ldap(self, username, password): try: # LDAP certificate settings - if self.auth_ldap_allow_self_signed: - ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_ALLOW) - ldap.set_option(ldap.OPT_X_TLS_NEWCTX, 0) - elif self.auth_ldap_tls_demand: - ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_DEMAND) - ldap.set_option(ldap.OPT_X_TLS_NEWCTX, 0) if self.auth_ldap_tls_cacertdir: ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, self.auth_ldap_tls_cacertdir) if self.auth_ldap_tls_cacertfile: @@ -1041,6 +1047,12 @@ def auth_user_ldap(self, username, password): ldap.set_option(ldap.OPT_X_TLS_CERTFILE, self.auth_ldap_tls_certfile) if self.auth_ldap_tls_keyfile: ldap.set_option(ldap.OPT_X_TLS_KEYFILE, self.auth_ldap_tls_keyfile) + if self.auth_ldap_allow_self_signed: + ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_ALLOW) + ldap.set_option(ldap.OPT_X_TLS_NEWCTX, 0) + elif self.auth_ldap_tls_demand: + ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_DEMAND) + ldap.set_option(ldap.OPT_X_TLS_NEWCTX, 0) # Initialise LDAP connection con = ldap.initialize(self.auth_ldap_server) @@ -1354,7 +1366,10 @@ def get_user_menu_access(self, menu_names: Optional[List[str]] = None) -> Set[st return self._get_user_permission_resources(g.user, "menu_access", resource_names=menu_names) elif current_user_jwt: return self._get_user_permission_resources( - current_user_jwt, "menu_access", resource_names=menu_names + # the current_user_jwt is a lazy proxy, so we need to ignore type checking + current_user_jwt, # type: ignore[arg-type] + "menu_access", + resource_names=menu_names, ) else: return self._get_user_permission_resources(None, "menu_access", resource_names=menu_names) @@ -1660,9 +1675,9 @@ def load_user(self, user_id): """Load user by ID""" return self.get_user_by_id(int(user_id)) - def load_user_jwt(self, user_id): - """Load user JWT""" - user = self.load_user(user_id) + def load_user_jwt(self, _jwt_header, jwt_data): + identity = jwt_data["sub"] + user = self.load_user(identity) # Set flask g.user to JWT user, we can't do it on before request g.user = user return user diff --git a/airflow/www/views.py b/airflow/www/views.py index e9a52611fcd65..fbebef3ecf5d6 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -43,7 +43,6 @@ Response, abort, before_render_template, - current_app, flash, g, jsonify, @@ -118,6 +117,7 @@ from airflow.ti_deps.dependencies_deps import RUNNING_DEPS, SCHEDULER_QUEUED_DEPS from airflow.timetables.base import DataInterval, TimeRestriction from airflow.utils import json as utils_json, timezone, yaml +from airflow.utils.airflow_flask_app import get_airflow_app from airflow.utils.dates import infer_time_unit, scale_time_units from airflow.utils.docs import get_doc_url_for_provider, get_docs_url from airflow.utils.helpers import alchemy_to_dict @@ -622,13 +622,13 @@ def add_user_permissions_to_dag(sender, template, context, **extra): """ if 'dag' in context: dag = context['dag'] - can_create_dag_run = current_app.appbuilder.sm.has_access( + can_create_dag_run = get_airflow_app().appbuilder.sm.has_access( permissions.ACTION_CAN_CREATE, permissions.RESOURCE_DAG_RUN ) - dag.can_edit = current_app.appbuilder.sm.can_edit_dag(dag.dag_id) + dag.can_edit = get_airflow_app().appbuilder.sm.can_edit_dag(dag.dag_id) dag.can_trigger = dag.can_edit and can_create_dag_run - dag.can_delete = current_app.appbuilder.sm.can_delete_dag(dag.dag_id) + dag.can_delete = get_airflow_app().appbuilder.sm.can_delete_dag(dag.dag_id) context['dag'] = dag @@ -715,7 +715,7 @@ def index(self): end = start + dags_per_page # Get all the dag id the user could access - filter_dag_ids = current_app.appbuilder.sm.get_accessible_dag_ids(g.user) + filter_dag_ids = get_airflow_app().appbuilder.sm.get_accessible_dag_ids(g.user) with create_session() as session: # read orm_dags from the db @@ -824,7 +824,7 @@ def index(self): ) dashboard_alerts = [ - fm for fm in settings.DASHBOARD_UIALERTS if fm.should_show(current_app.appbuilder.sm) + fm for fm in settings.DASHBOARD_UIALERTS if fm.should_show(get_airflow_app().appbuilder.sm) ] def _iter_parsed_moved_data_table_names(): @@ -904,7 +904,7 @@ def dag_stats(self, session=None): """Dag statistics.""" dr = models.DagRun - allowed_dag_ids = current_app.appbuilder.sm.get_accessible_dag_ids(g.user) + allowed_dag_ids = get_airflow_app().appbuilder.sm.get_accessible_dag_ids(g.user) dag_state_stats = session.query(dr.dag_id, dr.state, sqla.func.count(dr.state)).group_by( dr.dag_id, dr.state @@ -949,7 +949,7 @@ def dag_stats(self, session=None): @provide_session def task_stats(self, session=None): """Task Statistics""" - allowed_dag_ids = current_app.appbuilder.sm.get_accessible_dag_ids(g.user) + allowed_dag_ids = get_airflow_app().appbuilder.sm.get_accessible_dag_ids(g.user) if not allowed_dag_ids: return wwwutils.json_response({}) @@ -1058,7 +1058,7 @@ def task_stats(self, session=None): @provide_session def last_dagruns(self, session=None): """Last DAG runs""" - allowed_dag_ids = current_app.appbuilder.sm.get_accessible_dag_ids(g.user) + allowed_dag_ids = get_airflow_app().appbuilder.sm.get_accessible_dag_ids(g.user) # Filter by post parameters selected_dag_ids = {unquote(dag_id) for dag_id in request.form.getlist('dag_ids') if dag_id} @@ -1182,7 +1182,7 @@ def legacy_dag_details(self): @provide_session def dag_details(self, dag_id, session=None): """Get Dag details.""" - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) dag_model = DagModel.get_dagmodel(dag_id, session=session) title = "DAG Details" @@ -1258,7 +1258,7 @@ def rendered_templates(self, session): root = request.args.get('root', '') logging.info("Retrieving rendered templates.") - dag: DAG = current_app.dag_bag.get_dag(dag_id) + dag: DAG = get_airflow_app().dag_bag.get_dag(dag_id) dag_run = dag.get_dagrun(execution_date=dttm, session=session) raw_task = dag.get_task(task_id).prepare_for_execution() @@ -1353,15 +1353,17 @@ def rendered_k8s(self, session: Session = NEW_SESSION): abort(404) dag_id = request.args.get('dag_id') task_id = request.args.get('task_id') + if task_id is None: + logging.warning("Task id not passed in the request") + abort(400) execution_date = request.args.get('execution_date') dttm = _safe_parse_datetime(execution_date) - form = DateTimeForm(data={'execution_date': dttm}) root = request.args.get('root', '') map_index = request.args.get('map_index', -1, type=int) logging.info("Retrieving rendered templates.") - dag: DAG = current_app.dag_bag.get_dag(dag_id) + dag: DAG = get_airflow_app().dag_bag.get_dag(dag_id) task = dag.get_task(task_id) dag_run = dag.get_dagrun(execution_date=dttm, session=session) ti = dag_run.get_task_instance(task_id=task.task_id, map_index=map_index, session=session) @@ -1466,7 +1468,7 @@ def get_logs_with_metadata(self, session=None): ) try: - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) if dag: ti.task = dag.get_task(ti.task_id) @@ -1597,7 +1599,7 @@ def task(self, session): map_index = request.args.get('map_index', -1, type=int) form = DateTimeForm(data={'execution_date': dttm}) root = request.args.get('root', '') - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) if not dag or task_id not in dag.task_ids: flash(f"Task [{dag_id}.{task_id}] doesn't seem to exist at the moment", "error") @@ -1776,7 +1778,7 @@ def run(self, session=None): dag_run_id = request.form.get('dag_run_id') map_index = request.args.get('map_index', -1, type=int) origin = get_safe_url(request.form.get('origin')) - dag: DAG = current_app.dag_bag.get_dag(dag_id) + dag: DAG = get_airflow_app().dag_bag.get_dag(dag_id) task = dag.get_task(task_id) ignore_all_deps = request.form.get('ignore_all_deps') == "true" @@ -1877,7 +1879,7 @@ def trigger(self, session=None): request_conf = request.values.get('conf') request_execution_date = request.values.get('execution_date', default=timezone.utcnow().isoformat()) is_dag_run_conf_overrides_params = conf.getboolean('core', 'dag_run_conf_overrides_params') - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) dag_orm = session.query(models.DagModel).filter(models.DagModel.dag_id == dag_id).first() if not dag_orm: flash(f"Cannot find dag {dag_id}") @@ -1978,7 +1980,7 @@ def trigger(self, session=None): state=State.QUEUED, conf=run_conf, external_trigger=True, - dag_hash=current_app.dag_bag.dags_hash.get(dag_id), + dag_hash=get_airflow_app().dag_bag.dags_hash.get(dag_id), run_id=run_id, ) except (ValueError, ParamValidationError) as ve: @@ -2060,7 +2062,7 @@ def clear(self): dag_id = request.form.get('dag_id') task_id = request.form.get('task_id') origin = get_safe_url(request.form.get('origin')) - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) if 'map_index' not in request.form: map_indexes: Optional[List[int]] = None @@ -2121,7 +2123,7 @@ def dagrun_clear(self): dag_run_id = request.form.get('dag_run_id') confirmed = request.form.get('confirmed') == "true" - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) dr = dag.get_dagrun(run_id=dag_run_id) start_date = dr.logical_date end_date = dr.logical_date @@ -2145,7 +2147,7 @@ def dagrun_clear(self): @provide_session def blocked(self, session=None): """Mark Dag Blocked.""" - allowed_dag_ids = current_app.appbuilder.sm.get_accessible_dag_ids(g.user) + allowed_dag_ids = get_airflow_app().appbuilder.sm.get_accessible_dag_ids(g.user) # Filter by post parameters selected_dag_ids = {unquote(dag_id) for dag_id in request.form.getlist('dag_ids') if dag_id} @@ -2168,7 +2170,7 @@ def blocked(self, session=None): payload = [] for dag_id, active_dag_runs in dags: max_active_runs = 0 - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) if dag: # TODO: Make max_active_runs a column so we can query for it directly max_active_runs = dag.max_active_runs @@ -2185,7 +2187,7 @@ def _mark_dagrun_state_as_failed(self, dag_id, dag_run_id, confirmed): if not dag_run_id: return {'status': 'error', 'message': 'Invalid dag_run_id'} - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) if not dag: return {'status': 'error', 'message': f'Cannot find DAG: {dag_id}'} @@ -2203,7 +2205,7 @@ def _mark_dagrun_state_as_success(self, dag_id, dag_run_id, confirmed): if not dag_run_id: return {'status': 'error', 'message': 'Invalid dag_run_id'} - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) if not dag: return {'status': 'error', 'message': f'Cannot find DAG: {dag_id}'} @@ -2221,7 +2223,7 @@ def _mark_dagrun_state_as_queued(self, dag_id: str, dag_run_id: str, confirmed: if not dag_run_id: return {'status': 'error', 'message': 'Invalid dag_run_id'} - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) if not dag: return {'status': 'error', 'message': f'Cannot find DAG: {dag_id}'} @@ -2295,7 +2297,7 @@ def dagrun_details(self, session=None): dag_id = request.args.get("dag_id") run_id = request.args.get("run_id") - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) dag_run: Optional[DagRun] = ( session.query(DagRun).filter(DagRun.dag_id == dag_id, DagRun.run_id == run_id).one_or_none() ) @@ -2346,7 +2348,7 @@ def _mark_task_instance_state( past: bool, state: TaskInstanceState, ): - dag: DAG = current_app.dag_bag.get_dag(dag_id) + dag: DAG = get_airflow_app().dag_bag.get_dag(dag_id) if not run_id: flash(f"Cannot mark tasks as {state}, seem that DAG {dag_id} has never run", "error") @@ -2394,7 +2396,7 @@ def confirm(self): past = to_boolean(args.get('past')) origin = origin or url_for('Airflow.index') - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) if not dag: msg = f'DAG {dag_id} not found' return redirect_or_json(origin, msg, status='error', status_code=404) @@ -2583,7 +2585,7 @@ def tree(self): @provide_session def grid(self, dag_id, session=None): """Get Dag's grid view.""" - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) dag_model = DagModel.get_dagmodel(dag_id) if not dag: flash(f'DAG "{dag_id}" seems to be missing from DagBag.', "error") @@ -2703,7 +2705,7 @@ def _convert_to_date(session, column): else: return func.date(column) - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) dag_model = DagModel.get_dagmodel(dag_id) if not dag: flash(f'DAG "{dag_id}" seems to be missing from DagBag.', "error") @@ -2817,7 +2819,7 @@ def legacy_graph(self): @provide_session def graph(self, dag_id, session=None): """Get DAG as Graph.""" - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) dag_model = DagModel.get_dagmodel(dag_id) if not dag: flash(f'DAG "{dag_id}" seems to be missing.', "error") @@ -2929,7 +2931,7 @@ def duration(self, dag_id, session=None): default_dag_run = conf.getint('webserver', 'default_dag_run_display_number') dag_model = DagModel.get_dagmodel(dag_id) - dag: Optional[DAG] = current_app.dag_bag.get_dag(dag_id) + dag: Optional[DAG] = get_airflow_app().dag_bag.get_dag(dag_id) if dag is None: flash(f'DAG "{dag_id}" seems to be missing.', "error") return redirect(url_for('Airflow.index')) @@ -3081,7 +3083,7 @@ def legacy_tries(self): def tries(self, dag_id, session=None): """Shows all tries.""" default_dag_run = conf.getint('webserver', 'default_dag_run_display_number') - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) dag_model = DagModel.get_dagmodel(dag_id) base_date = request.args.get('base_date') num_runs = request.args.get('num_runs', default=default_dag_run, type=int) @@ -3171,7 +3173,7 @@ def legacy_landing_times(self): def landing_times(self, dag_id, session=None): """Shows landing times.""" default_dag_run = conf.getint('webserver', 'default_dag_run_display_number') - dag: DAG = current_app.dag_bag.get_dag(dag_id) + dag: DAG = get_airflow_app().dag_bag.get_dag(dag_id) dag_model = DagModel.get_dagmodel(dag_id) base_date = request.args.get('base_date') num_runs = request.args.get('num_runs', default=default_dag_run, type=int) @@ -3288,7 +3290,7 @@ def legacy_gantt(self): @provide_session def gantt(self, dag_id, session=None): """Show GANTT chart.""" - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) dag_model = DagModel.get_dagmodel(dag_id) root = request.args.get('root') @@ -3414,9 +3416,8 @@ def extra_links(self, session: "Session" = NEW_SESSION): task_id = request.args.get('task_id') map_index = request.args.get('map_index', -1, type=int) execution_date = request.args.get('execution_date') - link_name = request.args.get('link_name') dttm = _safe_parse_datetime(execution_date) - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) if not dag or task_id not in dag.task_ids: response = jsonify( @@ -3429,6 +3430,11 @@ def extra_links(self, session: "Session" = NEW_SESSION): return response task: "AbstractOperator" = dag.get_task(task_id) + link_name = request.args.get('link_name') + if link_name is None: + response = jsonify({'url': None, 'error': 'Link name not passed'}) + response.status_code = 400 + return response ti = ( session.query(TaskInstance) @@ -3466,7 +3472,7 @@ def extra_links(self, session: "Session" = NEW_SESSION): def task_instances(self): """Shows task instances.""" dag_id = request.args.get('dag_id') - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) dttm = request.args.get('execution_date') if dttm: @@ -3494,7 +3500,7 @@ def task_instances(self): def grid_data(self): """Returns grid data""" dag_id = request.args.get('dag_id') - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) if not dag: response = jsonify({'error': f"can't find dag {dag_id}"}) @@ -3546,7 +3552,7 @@ def robots(self): of the risk associated with exposing Airflow to the public internet, however it does not address the real security risks associated with such a deployment. """ - return send_from_directory(current_app.static_folder, 'robots.txt') + return send_from_directory(get_airflow_app().static_folder, 'robots.txt') @expose('/audit_log') @auth.has_access( @@ -3558,7 +3564,7 @@ def robots(self): @provide_session def audit_log(self, session=None): dag_id = request.args.get('dag_id') - dag = current_app.dag_bag.get_dag(dag_id) + dag = get_airflow_app().dag_bag.get_dag(dag_id) included_events = conf.get('webserver', 'audit_view_included_events', fallback=None) excluded_events = conf.get('webserver', 'audit_view_excluded_events', fallback=None) @@ -3663,9 +3669,9 @@ class DagFilter(BaseFilter): """Filter using DagIDs""" def apply(self, query, func): - if current_app.appbuilder.sm.has_all_dags_access(g.user): + if get_airflow_app().appbuilder.sm.has_all_dags_access(g.user): return query - filter_dag_ids = current_app.appbuilder.sm.get_accessible_dag_ids(g.user) + filter_dag_ids = get_airflow_app().appbuilder.sm.get_accessible_dag_ids(g.user) return query.filter(self.model.dag_id.in_(filter_dag_ids)) @@ -3688,7 +3694,7 @@ class AirflowPrivilegeVerifierModelView(AirflowModelView): @staticmethod def validate_dag_edit_access(item: Union[DagRun, TaskInstance]): """Validates whether the user has 'can_edit' access for this specific DAG.""" - if not current_app.appbuilder.sm.can_edit_dag(item.dag_id): + if not get_airflow_app().appbuilder.sm.can_edit_dag(item.dag_id): raise AirflowException(f"Access denied for dag_id {item.dag_id}") def pre_add(self, item: Union[DagRun, TaskInstance]): @@ -3719,7 +3725,7 @@ def check_dag_edit_acl_for_actions( items: Optional[Union[List[TaskInstance], List[DagRun], TaskInstance, DagRun]], *args, **kwargs, - ) -> None: + ) -> Callable: if items is None: dag_ids: Set[str] = set() elif isinstance(items, list): @@ -3734,7 +3740,7 @@ def check_dag_edit_acl_for_actions( ) for dag_id in dag_ids: - if not current_app.appbuilder.sm.can_edit_dag(dag_id): + if not get_airflow_app().appbuilder.sm.can_edit_dag(dag_id): flash(f"Access denied for dag_id {dag_id}", "danger") logging.warning("User %s tried to modify %s without having access.", g.user.username, dag_id) return redirect(self.get_default_url()) @@ -4337,7 +4343,9 @@ def fqueued_slots(self): def _can_create_variable() -> bool: - return current_app.appbuilder.sm.has_access(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_VARIABLE) + return get_airflow_app().appbuilder.sm.has_access( + permissions.ACTION_CAN_CREATE, permissions.RESOURCE_VARIABLE + ) class VariableModelView(AirflowModelView): @@ -4681,7 +4689,10 @@ def action_set_failed(self, drs: List[DagRun], session=None): for dr in session.query(DagRun).filter(DagRun.id.in_([dagrun.id for dagrun in drs])).all(): count += 1 altered_tis += set_dag_run_state_to_failed( - dag=current_app.dag_bag.get_dag(dr.dag_id), run_id=dr.run_id, commit=True, session=session + dag=get_airflow_app().dag_bag.get_dag(dr.dag_id), + run_id=dr.run_id, + commit=True, + session=session, ) altered_ti_count = len(altered_tis) flash(f"{count} dag runs and {altered_ti_count} task instances were set to failed") @@ -4706,7 +4717,10 @@ def action_set_success(self, drs: List[DagRun], session=None): for dr in session.query(DagRun).filter(DagRun.id.in_([dagrun.id for dagrun in drs])).all(): count += 1 altered_tis += set_dag_run_state_to_success( - dag=current_app.dag_bag.get_dag(dr.dag_id), run_id=dr.run_id, commit=True, session=session + dag=get_airflow_app().dag_bag.get_dag(dr.dag_id), + run_id=dr.run_id, + commit=True, + session=session, ) altered_ti_count = len(altered_tis) flash(f"{count} dag runs and {altered_ti_count} task instances were set to success") @@ -4726,7 +4740,7 @@ def action_clear(self, drs: List[DagRun], session=None): dag_to_tis: Dict[DAG, List[TaskInstance]] = {} for dr in session.query(DagRun).filter(DagRun.id.in_([dagrun.id for dagrun in drs])).all(): count += 1 - dag = current_app.dag_bag.get_dag(dr.dag_id) + dag = get_airflow_app().dag_bag.get_dag(dr.dag_id) tis_to_clear = dag_to_tis.setdefault(dag, []) tis_to_clear += dr.get_task_instances() @@ -5019,7 +5033,7 @@ def action_clear(self, task_instances, session=None): dag_to_tis = collections.defaultdict(list) for ti in task_instances: - dag = current_app.dag_bag.get_dag(ti.dag_id) + dag = get_airflow_app().dag_bag.get_dag(ti.dag_id) dag_to_tis[dag].append(ti) for dag, task_instances_list in dag_to_tis.items(): @@ -5135,7 +5149,7 @@ def autocomplete(self, session=None): dag_ids_query = dag_ids_query.filter(DagModel.is_paused) owners_query = owners_query.filter(DagModel.is_paused) - filter_dag_ids = current_app.appbuilder.sm.get_accessible_dag_ids(g.user) + filter_dag_ids = get_airflow_app().appbuilder.sm.get_accessible_dag_ids(g.user) dag_ids_query = dag_ids_query.filter(DagModel.dag_id.in_(filter_dag_ids)) owners_query = owners_query.filter(DagModel.dag_id.in_(filter_dag_ids)) diff --git a/dev/breeze/README.md b/dev/breeze/README.md index 7794f25e4e8eb..14a9f089a4834 100644 --- a/dev/breeze/README.md +++ b/dev/breeze/README.md @@ -52,6 +52,6 @@ PLEASE DO NOT MODIFY THE HASH BELOW! IT IS AUTOMATICALLY UPDATED BY PRE-COMMIT. --------------------------------------------------------------------------------------------------------- -Package config hash: 40b9b6908905e94c93809cca70c68c632731242798dba9cbe62473e965cb4e5d44eaaa817c5ce9334397f3794a350bc00e3cf319631a25c461a935a389191e7b +Package config hash: a80a853b2c32c284a68ccd6d468804b892a69f14d2ad1886bdaa892755cf6262660e2b9fc582bcae27ae478910055267a76edea2df658196198a0365150e93e5 --------------------------------------------------------------------------------------------------------- diff --git a/dev/breeze/setup.cfg b/dev/breeze/setup.cfg index 9c7154ce52f2a..c974560561053 100644 --- a/dev/breeze/setup.cfg +++ b/dev/breeze/setup.cfg @@ -64,7 +64,7 @@ install_requires = pyyaml requests rich>=12.4.4 - rich_click + rich-click>=1.5 [options.packages.find] where=src diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index a5333a08ab0b6..caf43d47a409b 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -84,6 +84,7 @@ "--airflow-extras", "--use-packages-from-dist", "--package-format", + "--skip-constraints", "--debug", ], } @@ -511,6 +512,12 @@ def generate_constraints( @option_use_airflow_version @option_airflow_extras @option_airflow_constraints_reference +@click.option( + "--skip-constraints", + is_flag=True, + help="Do not use constraints when installing providers.", + envvar='SKIP_CONSTRAINTS', +) @option_use_packages_from_dist @option_installation_package_format @option_verbose @@ -522,6 +529,7 @@ def verify_provider_packages( dry_run: bool, use_airflow_version: Optional[str], airflow_constraints_reference: str, + skip_constraints: bool, airflow_extras: str, use_packages_from_dist: bool, debug: bool, @@ -538,6 +546,7 @@ def verify_provider_packages( airflow_extras=airflow_extras, airflow_constraints_reference=airflow_constraints_reference, use_packages_from_dist=use_packages_from_dist, + skip_constraints=skip_constraints, package_format=package_format, ) rebuild_or_pull_ci_image_if_needed(command_params=shell_params, dry_run=dry_run, verbose=verbose) diff --git a/dev/breeze/src/airflow_breeze/params/shell_params.py b/dev/breeze/src/airflow_breeze/params/shell_params.py index bb4ad3393ead3..b67d362186653 100644 --- a/dev/breeze/src/airflow_breeze/params/shell_params.py +++ b/dev/breeze/src/airflow_breeze/params/shell_params.py @@ -81,6 +81,7 @@ class ShellParams: postgres_version: str = ALLOWED_POSTGRES_VERSIONS[0] python: str = ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS[0] skip_environment_initialization: bool = False + skip_constraints: bool = False start_airflow: str = "false" use_airflow_version: Optional[str] = None use_packages_from_dist: bool = False diff --git a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py index 96b83b5ed7945..8af6010c40403 100644 --- a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py @@ -571,6 +571,7 @@ def update_expected_environment_variables(env: Dict[str, str]) -> None: "POSTGRES_VERSION": "postgres_version", "SQLITE_URL": "sqlite_url", "START_AIRFLOW": "start_airflow", + "SKIP_CONSTRAINTS": "skip_constraints", "SKIP_ENVIRONMENT_INITIALIZATION": "skip_environment_initialization", "USE_AIRFLOW_VERSION": "use_airflow_version", "USE_PACKAGES_FROM_DIST": "use_packages_from_dist", diff --git a/dev/breeze/src/airflow_breeze/utils/recording.py b/dev/breeze/src/airflow_breeze/utils/recording.py index 0ec34edccac94..2fe9f5b5558de 100644 --- a/dev/breeze/src/airflow_breeze/utils/recording.py +++ b/dev/breeze/src/airflow_breeze/utils/recording.py @@ -53,10 +53,10 @@ def __init__(self, **kwargs): atexit.register(save_ouput_as_svg) click.rich_click.MAX_WIDTH = width_int - click.formatting.FORCED_WIDTH = width_int - 2 + click.formatting.FORCED_WIDTH = width_int - 2 # type: ignore[attr-defined] click.rich_click.COLOR_SYSTEM = "standard" # monkeypatch rich_click console to record help (rich_click does not allow passing extra args to console) - click.rich_click.Console = RecordingConsole + click.rich_click.Console = RecordingConsole # type: ignore[misc] if output_file_for_recording and not in_autocomplete(): diff --git a/dev/send_email.py b/dev/send_email.py index 91a35b97cc425..2d796eb80066b 100755 --- a/dev/send_email.py +++ b/dev/send_email.py @@ -83,8 +83,7 @@ def show_message(entity: str, message: str): """ Show message on the Command Line """ - width, _ = click.get_terminal_size() - + width, _ = click.get_terminal_size() # type: ignore[attr-defined] click.secho("-" * width, fg="blue") click.secho(f"{entity} Message:", fg="bright_red", bold=True) click.secho("-" * width, fg="blue") diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt deleted file mode 100644 index 0ea56c33a726b..0000000000000 --- a/images/breeze/output-commands-hash.txt +++ /dev/null @@ -1,5 +0,0 @@ - -# This file is automatically generated by pre-commit. If you have a conflict with this file -# Please do not solve it but run `breeze regenerate-command-images`. -# This command should fix the conflict and regenerate help images that you have conflict with. -9139ef44b7f1ba24ddee50b71d3867c2 diff --git a/images/breeze/output-verify-provider-packages.svg b/images/breeze/output-verify-provider-packages.svg index 12853b46a203f..fbfd468ffafd1 100644 --- a/images/breeze/output-verify-provider-packages.svg +++ b/images/breeze/output-verify-provider-packages.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: verify-provider-packages + Command: verify-provider-packages - + - - -Usage: breeze verify-provider-packages [OPTIONS] - -Verifies if all provider code is following expectations for providers. - -╭─ Provider verification flags ────────────────────────────────────────────────────────────────────────────────────────╮ ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`,        -`wheel`, or `sdist` if Airflow should be removed, installed from wheel packages   -or sdist packages available in dist folder respectively. Implies --mount-sources -`remove`.                                                                         -(none | wheel | sdist | <airflow_version>)                                        ---airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to       -specify constraints for the installed version and to find newer dependencies      -(TEXT)                                                                            ---airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) ---use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder  -when entering breeze.                                                             ---package-formatFormat of packages that should be installed from dist.(wheel | sdist) -[default: wheel]                                       ---debugDrop user in shell instead of running the command. Useful for debugging. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze verify-provider-packages [OPTIONS] + +Verifies if all provider code is following expectations for providers. + +╭─ Provider verification flags ────────────────────────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`,        +`wheel`, or `sdist` if Airflow should be removed, installed from wheel packages   +or sdist packages available in dist folder respectively. Implies --mount-sources +`remove`.                                                                         +(none | wheel | sdist | <airflow_version>)                                        +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to       +specify constraints for the installed version and to find newer dependencies      +(TEXT)                                                                            +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder  +when entering breeze.                                                             +--package-formatFormat of packages that should be installed from dist.(wheel | sdist) +[default: wheel]                                       +--skip-constraintsDo not use constraints when installing providers. +--debugDrop user in shell instead of running the command. Useful for debugging. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/newsfragments/24399.significant.rst b/newsfragments/24399.significant.rst new file mode 100644 index 0000000000000..7f1833a8e8c7e --- /dev/null +++ b/newsfragments/24399.significant.rst @@ -0,0 +1,31 @@ +We've upgraded Flask Application Builder to a major version 4.*. + +Flask Application Builder is one of the important components of Airflow Webserver, as +it uses a lof of dependencies that are essential to run the webserver and integrate it +in enterprise environments - especially authentication. + +The FAB 4.* upgrades a number of dependencies to major releases, which upgrades them to versions +that have a number of security issues fixed. A lot of tests were performed to bring the dependencies +in a backwards-compatible way, however the dependencies themselves implement breaking changes in their +internals so it might be that some of those changes might impact the users in case they are using the +libraries for their onw purposes. + +One important change that you likely will need to apply to Oauth configuration is to add +``server_metadata_url`` or ``jwks_uri`` and you can read about it more +in `this issue `_. + +Here is the list of breaking changes in dependencies that comes together with FAB 4: + +* Flask from 1.X to 2.X `breaking changes `_ + +* flask-jwt-extended 3.X to 4.X `breaking changes: `_ + +* Jinja2 2.X to 3.X `breaking changes: `_ + +* Werkzeug 1.X to 2.X `breaking changes `_ + +* pyJWT 1.X to 2.X `breaking changes: `_ + +* Click 7.X to 8.X `breaking changes: `_ + +* itsdangerous 1.X to 2.X `breaking changes `_ diff --git a/scripts/ci/docker-compose/_docker.env b/scripts/ci/docker-compose/_docker.env index 4edc849b57b93..b33cfea3602e5 100644 --- a/scripts/ci/docker-compose/_docker.env +++ b/scripts/ci/docker-compose/_docker.env @@ -59,6 +59,7 @@ RUN_TESTS LIST_OF_INTEGRATION_TESTS_TO_RUN RUN_SYSTEM_TESTS START_AIRFLOW +SKIP_CONSTRAINTS SKIP_ENVIRONMENT_INITIALIZATION SKIP_SSH_SETUP TEST_TYPE diff --git a/scripts/ci/docker-compose/base.yml b/scripts/ci/docker-compose/base.yml index 48e4d3df9606e..c1285eda88193 100644 --- a/scripts/ci/docker-compose/base.yml +++ b/scripts/ci/docker-compose/base.yml @@ -72,6 +72,7 @@ services: - LIST_OF_INTEGRATION_TESTS_TO_RUN=${LIST_OF_INTEGRATION_TESTS_TO_RUN} - RUN_SYSTEM_TESTS=${RUN_SYSTEM_TESTS} - START_AIRFLOW=${START_AIRFLOW} + - SKIP_CONSTRAINTS=${SKIP_CONSTRAINTS} - SKIP_ENVIRONMENT_INITIALIZATION=${SKIP_ENVIRONMENT_INITIALIZATION} - SKIP_SSH_SETUP=${SKIP_SSH_SETUP} - TEST_TYPE=${TEST_TYPE} diff --git a/scripts/ci/docker-compose/devcontainer.env b/scripts/ci/docker-compose/devcontainer.env index 1c4b27b36af67..ae51b204436ac 100644 --- a/scripts/ci/docker-compose/devcontainer.env +++ b/scripts/ci/docker-compose/devcontainer.env @@ -57,6 +57,7 @@ RUN_TESTS="false" LIST_OF_INTEGRATION_TESTS_TO_RUN="" RUN_SYSTEM_TESTS="" START_AIRFLOW="false" +SKIP_CONSTRAINTS="false" SKIP_SSH_SETUP="true" SKIP_ENVIRONMENT_INITIALIZATION="false" TEST_TYPE= diff --git a/scripts/ci/pre_commit/pre_commit_check_2_1_compatibility.py b/scripts/ci/pre_commit/pre_commit_check_2_1_compatibility.py index 0d43959ba5332..c28d29d76dcdb 100755 --- a/scripts/ci/pre_commit/pre_commit_check_2_1_compatibility.py +++ b/scripts/ci/pre_commit/pre_commit_check_2_1_compatibility.py @@ -36,6 +36,8 @@ GET_ATTR_MATCHER = re.compile(r".*getattr\((ti|TI), ['\"]run_id['\"]\).*") TI_RUN_ID_MATCHER = re.compile(r".*(ti|TI)\.run_id.*") TRY_NUM_MATCHER = re.compile(r".*context.*\[[\"']try_number[\"']].*") +GET_MANDATORY_MATCHER = re.compile(r".*conf\.get_mandatory_value") +GET_AIRFLOW_APP_MATCHER = re.compile(r".*get_airflow_app\(\)") def _check_file(_file: Path): @@ -57,13 +59,13 @@ def _check_file(_file: Path): if "if ti_key is not None:" not in lines[index - 1]: errors.append( f"[red]In {_file}:{index} there is a forbidden construct " - f"(Airflow 2.3.0 only):[/]\n\n" + "(Airflow 2.3.0 only):[/]\n\n" f"{lines[index-1]}\n{lines[index]}\n\n" - f"[yellow]When you use XCom.get_value( in providers, it should be in the form:[/]\n\n" - f"if ti_key is not None:\n" - f" value = XCom.get_value(...., ti_key=ti_key)\n\n" - f"See: https://airflow.apache.org/docs/apache-airflow-providers/" - f"howto/create-update-providers.html#using-providers-with-dynamic-task-mapping\n" + "[yellow]When you use XCom.get_value( in providers, it should be in the form:[/]\n\n" + "if ti_key is not None:\n" + " value = XCom.get_value(...., ti_key=ti_key)\n\n" + "See: https://airflow.apache.org/docs/apache-airflow-providers/" + "howto/create-update-providers.html#using-providers-with-dynamic-task-mapping\n" ) if "timezone.coerce_datetime" in line: errors.append( @@ -76,19 +78,37 @@ def _check_file(_file: Path): if "ti.map_index" in line: errors.append( f"[red]In {_file}:{index} there is a forbidden construct " - f"(Airflow 2.3+ only):[/]\n\n" + "(Airflow 2.3+ only):[/]\n\n" f"{lines[index]}\n\n" - f"[yellow]You should not use map_index field in providers " - f"as it is not available in Airflow 2.2[/]" + "[yellow]You should not use map_index field in providers " + "as it is only available in Airflow 2.3+[/]" ) if TRY_NUM_MATCHER.match(line): errors.append( f"[red]In {_file}:{index} there is a forbidden construct " - f"(Airflow 2.3+ only):[/]\n\n" + "(Airflow 2.3+ only):[/]\n\n" f"{lines[index]}\n\n" - f"[yellow]You should not expect try_number field for context in providers " - f"as it is not available in Airflow 2.2[/]" + "[yellow]You should not expect try_number field for context in providers " + "as it is only available in Airflow 2.3+[/]" + ) + + if GET_MANDATORY_MATCHER.match(line): + errors.append( + f"[red]In {_file}:{index} there is a forbidden construct " + "(Airflow 2.3+ only):[/]\n\n" + f"{lines[index]}\n\n" + "[yellow]You should not use conf.get_mandatory_value in providers " + "as it is only available in Airflow 2.3+[/]" + ) + + if GET_AIRFLOW_APP_MATCHER.match(line): + errors.append( + f"[red]In {_file}:{index} there is a forbidden construct " + "(Airflow 2.4+ only):[/]\n\n" + f"{lines[index]}\n\n" + "[yellow]You should not use airflow.utils.airflow_flask_app.get_airflow_app() in providers " + "as it is not available in Airflow 2.4+. Use current_app instead.[/]" ) diff --git a/scripts/docker/entrypoint_ci.sh b/scripts/docker/entrypoint_ci.sh index f5198a556c21c..2994604fc9274 100755 --- a/scripts/docker/entrypoint_ci.sh +++ b/scripts/docker/entrypoint_ci.sh @@ -94,9 +94,15 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then echo "${COLOR_BLUE}Uninstalling airflow and providers" echo uninstall_airflow_and_providers - echo "${COLOR_BLUE}Install airflow from wheel package with extras: '${AIRFLOW_EXTRAS}' and constraints reference ${AIRFLOW_CONSTRAINTS_REFERENCE}.${COLOR_RESET}" - echo - install_airflow_from_wheel "${AIRFLOW_EXTRAS}" "${AIRFLOW_CONSTRAINTS_REFERENCE}" + if [[ ${SKIP_CONSTRAINTS,,=} == "true" ]]; then + echo "${COLOR_BLUE}Install airflow from wheel package with extras: '${AIRFLOW_EXTRAS}' with no constraints.${COLOR_RESET}" + echo + install_airflow_from_wheel "${AIRFLOW_EXTRAS}" "none" + else + echo "${COLOR_BLUE}Install airflow from wheel package with extras: '${AIRFLOW_EXTRAS}' and constraints reference ${AIRFLOW_CONSTRAINTS_REFERENCE}.${COLOR_RESET}" + echo + install_airflow_from_wheel "${AIRFLOW_EXTRAS}" "${AIRFLOW_CONSTRAINTS_REFERENCE}" + fi uninstall_providers elif [[ ${USE_AIRFLOW_VERSION} == "sdist" ]]; then echo @@ -104,9 +110,15 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then echo uninstall_airflow_and_providers echo - echo "${COLOR_BLUE}Install airflow from sdist package with extras: '${AIRFLOW_EXTRAS}' and constraints reference ${AIRFLOW_CONSTRAINTS_REFERENCE}.${COLOR_RESET}" - echo - install_airflow_from_sdist "${AIRFLOW_EXTRAS}" "${AIRFLOW_CONSTRAINTS_REFERENCE}" + if [[ ${SKIP_CONSTRAINTS,,=} == "true" ]]; then + echo "${COLOR_BLUE}Install airflow from sdist package with extras: '${AIRFLOW_EXTRAS}' with no constraints.${COLOR_RESET}" + echo + install_airflow_from_sdist "${AIRFLOW_EXTRAS}" "none" + else + echo "${COLOR_BLUE}Install airflow from sdist package with extras: '${AIRFLOW_EXTRAS}' and constraints reference ${AIRFLOW_CONSTRAINTS_REFERENCE}.${COLOR_RESET}" + echo + install_airflow_from_sdist "${AIRFLOW_EXTRAS}" "${AIRFLOW_CONSTRAINTS_REFERENCE}" + fi uninstall_providers else echo @@ -114,9 +126,15 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then echo uninstall_airflow_and_providers echo - echo "${COLOR_BLUE}Install released airflow from PyPI with extras: '${AIRFLOW_EXTRAS}' and constraints reference ${AIRFLOW_CONSTRAINTS_REFERENCE}.${COLOR_RESET}" - echo - install_released_airflow_version "${USE_AIRFLOW_VERSION}" "${AIRFLOW_CONSTRAINTS_REFERENCE}" + if [[ ${SKIP_CONSTRAINTS,,=} == "true" ]]; then + echo "${COLOR_BLUE}Install released airflow from PyPI with extras: '${AIRFLOW_EXTRAS}' with no constraints.${COLOR_RESET}" + echo + install_released_airflow_version "${USE_AIRFLOW_VERSION}" "none" + else + echo "${COLOR_BLUE}Install released airflow from PyPI with extras: '${AIRFLOW_EXTRAS}' and constraints reference ${AIRFLOW_CONSTRAINTS_REFERENCE}.${COLOR_RESET}" + echo + install_released_airflow_version "${USE_AIRFLOW_VERSION}" "${AIRFLOW_CONSTRAINTS_REFERENCE}" + fi fi if [[ ${USE_PACKAGES_FROM_DIST=} == "true" ]]; then echo diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh index d6a637e5c348b..66f2e6b083499 100644 --- a/scripts/in_container/_in_container_utils.sh +++ b/scripts/in_container/_in_container_utils.sh @@ -224,8 +224,12 @@ function install_airflow_from_wheel() { >&2 echo exit 4 fi - pip install "${airflow_package}${extras}" --constraint \ - "https://raw.githubusercontent.com/apache/airflow/${constraints_reference}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" + if [[ ${constraints_reference} == "none" ]]; then + pip install "${airflow_package}${extras}" + else + pip install "${airflow_package}${extras}" --constraint \ + "https://raw.githubusercontent.com/apache/airflow/${constraints_reference}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" + fi } function install_airflow_from_sdist() { @@ -250,8 +254,12 @@ function install_airflow_from_sdist() { >&2 echo exit 4 fi - pip install "${airflow_package}${extras}" --constraint \ - "https://raw.githubusercontent.com/apache/airflow/${constraints_reference}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" + if [[ ${constraints_reference} == "none" ]]; then + pip install "${airflow_package}${extras}" + else + pip install "${airflow_package}${extras}" --constraint \ + "https://raw.githubusercontent.com/apache/airflow/${constraints_reference}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" + fi } function uninstall_airflow() { @@ -278,17 +286,20 @@ function uninstall_airflow_and_providers() { function install_released_airflow_version() { local version="${1}" - echo - echo "Installing released ${version} version of airflow with extras: ${AIRFLOW_EXTRAS} and constraints constraints-${version}" - echo + local constraints_reference + constraints_reference="${2:-}" rm -rf "${AIRFLOW_SOURCES}"/*.egg-info if [[ ${AIRFLOW_EXTRAS} != "" ]]; then BRACKETED_AIRFLOW_EXTRAS="[${AIRFLOW_EXTRAS}]" else BRACKETED_AIRFLOW_EXTRAS="" fi - pip install "apache-airflow${BRACKETED_AIRFLOW_EXTRAS}==${version}" \ - --constraint "https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/constraints-${version}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" + if [[ ${constraints_reference} == "none" ]]; then + pip install "${airflow_package}${extras}" + else + pip install "apache-airflow${BRACKETED_AIRFLOW_EXTRAS}==${version}" \ + --constraint "https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/constraints-${version}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" + fi } function install_local_airflow_with_eager_upgrade() { diff --git a/setup.cfg b/setup.cfg index 1512a6201c46e..a32326cee29de 100644 --- a/setup.cfg +++ b/setup.cfg @@ -101,50 +101,31 @@ install_requires = cryptography>=0.9.3 deprecated>=1.2.13 dill>=0.2.2 - # Flask and all related libraries are limited to below 2.0.0 because we expect it to introduce - # Serious breaking changes. Flask 2.0 has been introduced in May 2021 and 2.0.2 version is available - # now (Feb 2022): TODO: we should attempt to migrate to Flask 2 and all below flask libraries soon. - flask>=1.1.0, <2.0 + flask>=2.0 # We are tightly coupled with FAB version because we vendored in part of FAB code related to security manager # This is done as part of preparation to removing FAB as dependency, but we are not ready for it yet # Every time we update FAB version here, please make sure that you review the classes and models in # `airflow/www/fab_security` with their upstream counterparts. In particular, make sure any breaking changes, # for example any new methods, are accounted for. - flask-appbuilder==3.4.5 - flask-caching>=1.5.0, <2.0.0 - flask-login>=0.3, <0.5 - # Strict upper-bound on the latest release of flask-session, - # as any schema changes will require a migration. - flask-session>=0.3.1, <=0.4.0 - flask-wtf>=0.14.3, <0.15 + flask-appbuilder==4.1.1 + flask-caching>=1.5.0 + flask-login>=0.5 + flask-session>=0.4.0 + flask-wtf>=0.14.3 graphviz>=0.12 gunicorn>=20.1.0 httpx importlib_metadata>=1.7;python_version<"3.9" importlib_resources>=5.2;python_version<"3.9" - # Logging is broken with itsdangerous > 2 - likely due to changed serializing support - # https://itsdangerous.palletsprojects.com/en/2.0.x/changes/#version-2-0-0 - # itsdangerous 2 has been released in May 2020 - # TODO: we should attempt to upgrade to line 2 of itsdangerous - itsdangerous>=1.1.0, <2.0 - # Jinja2 3.1 will remove the 'autoescape' and 'with' extensions, which would - # break Flask 1.x, so we limit this for future compatibility. Remove this - # when bumping Flask to >=2. - jinja2>=2.10.1,<3.1 - # Because connexion upper-bound is 5.0.0 and we depend on connexion, - # we pin to the same upper-bound as connexion. - jsonschema>=3.2.0, <5.0 + itsdangerous>=2.0 + jinja2>=2.10.1 + jsonschema>=3.2.0 lazy-object-proxy linkify-it-py>=2.0.0 lockfile>=0.12.2 markdown>=3.0 - # Markupsafe 2.1.0 breaks with error: import name 'soft_unicode' from 'markupsafe'. - # This should be removed when either this issue is closed: - # https://github.com/pallets/markupsafe/issues/284 - # or when we will be able to upgrade JINJA to newer version (currently limited due to Flask and - # Flask Application Builder) markdown-it-py>=2.1.0 - markupsafe>=1.1.1,<2.1.0 + markupsafe>=1.1.1 marshmallow-oneofschema>=2.0.1 mdit-py-plugins>=0.3.0 packaging>=14.0 @@ -153,8 +134,7 @@ install_requires = pluggy>=1.0 psutil>=4.2.0 pygments>=2.0.1 - # python daemon crashes with 'socket operation on non-socket' for python 3.8+ in version < 2.2.4 - # https://pagure.io/python-daemon/issue/34 + pyjwt>=2.0.0 python-daemon>=2.2.4 python-dateutil>=2.3 python-nvd3>=0.15.0 @@ -172,10 +152,7 @@ install_requires = termcolor>=1.1.0 typing-extensions>=3.7.4 unicodecsv>=0.14.1 - # Werkzeug is known to cause breaking changes and it is very closely tied with FlaskAppBuilder and other - # Flask dependencies and the limit to 1.* line should be reviewed when we upgrade Flask and remove - # FlaskAppBuilder. - werkzeug~=1.0, >=1.0.1 + werkzeug>=2.0 [options.packages.find] include = diff --git a/setup.py b/setup.py index f2cce10ed150e..16fcb73102544 100644 --- a/setup.py +++ b/setup.py @@ -617,16 +617,6 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version 'flake8-implicit-str-concat', 'flaky', 'freezegun', - # Github3 version 3.1.2 requires PyJWT>=2.3.0 which clashes with Flask App Builder where PyJWT is <2.0.0 - # Actually GitHub3.1.0 already introduced PyJWT>=2.3.0 but so far `pip` was able to resolve it without - # getting into a long backtracking loop and figure out that github3 3.0.0 version is the right version - # similarly limiting it to 3.1.2 causes pip not to enter the backtracking loop. Apparently when there - # are 3 versions with PyJWT>=2.3.0 (3.1.0, 3.1.1 an 3.1.2) pip enters into backtrack loop and fails - # to resolve that github3 3.0.0 is the right version to use. - # This limitation could be removed if PyJWT limitation < 2.0.0 is dropped from FAB or when - # pip resolution is improved to handle the case. The issue which describes this PIP behaviour - # and hopefully allowing to improve it is tracked in https://github.com/pypa/pip/issues/10924 - 'github3.py<3.1.0', 'gitpython', 'ipdb', 'jira', @@ -660,7 +650,7 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version 'qds-sdk>=1.9.6', 'pytest-httpx', 'requests_mock', - 'rich_click', + 'rich-click>=1.5', 'semver', 'towncrier', 'twine', diff --git a/tests/api_connexion/endpoints/test_dag_endpoint.py b/tests/api_connexion/endpoints/test_dag_endpoint.py index d95d4c38549df..09ef3c28ae050 100644 --- a/tests/api_connexion/endpoints/test_dag_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_endpoint.py @@ -19,12 +19,10 @@ from datetime import datetime import pytest -from itsdangerous import URLSafeSerializer from parameterized import parameterized from airflow import DAG from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP -from airflow.configuration import conf from airflow.models import DagBag, DagModel from airflow.models.serialized_dag import SerializedDagModel from airflow.operators.empty import EmptyOperator @@ -34,8 +32,12 @@ from tests.test_utils.config import conf_vars from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags -SERIALIZER = URLSafeSerializer(conf.get('webserver', 'secret_key')) -FILE_TOKEN = SERIALIZER.dumps(__file__) + +@pytest.fixture() +def current_file_token(url_safe_serializer) -> str: + return url_safe_serializer.dumps(__file__) + + DAG_ID = "test_dag" TASK_ID = "op1" DAG2_ID = "test_dag2" @@ -246,7 +248,7 @@ def test_should_respond_403_with_granular_access_for_different_dag(self): class TestGetDagDetails(TestDagEndpoint): - def test_should_respond_200(self): + def test_should_respond_200(self, current_file_token): response = self.client.get( f"/api/v1/dags/{self.dag_id}/details", environ_overrides={'REMOTE_USER': "test"} ) @@ -262,7 +264,7 @@ def test_should_respond_200(self): "description": None, "doc_md": "details", "fileloc": __file__, - "file_token": FILE_TOKEN, + "file_token": current_file_token, "is_paused": None, "is_active": None, "is_subdag": False, @@ -294,7 +296,7 @@ def test_should_respond_200(self): } assert response.json == expected - def test_should_response_200_with_doc_md_none(self): + def test_should_response_200_with_doc_md_none(self, current_file_token): response = self.client.get( f"/api/v1/dags/{self.dag2_id}/details", environ_overrides={'REMOTE_USER': "test"} ) @@ -310,7 +312,7 @@ def test_should_response_200_with_doc_md_none(self): "description": None, "doc_md": None, "fileloc": __file__, - "file_token": FILE_TOKEN, + "file_token": current_file_token, "is_paused": None, "is_active": None, "is_subdag": False, @@ -335,7 +337,7 @@ def test_should_response_200_with_doc_md_none(self): } assert response.json == expected - def test_should_response_200_for_null_start_date(self): + def test_should_response_200_for_null_start_date(self, current_file_token): response = self.client.get( f"/api/v1/dags/{self.dag3_id}/details", environ_overrides={'REMOTE_USER': "test"} ) @@ -351,7 +353,7 @@ def test_should_response_200_for_null_start_date(self): "description": None, "doc_md": None, "fileloc": __file__, - "file_token": FILE_TOKEN, + "file_token": current_file_token, "is_paused": None, "is_active": None, "is_subdag": False, @@ -376,7 +378,7 @@ def test_should_response_200_for_null_start_date(self): } assert response.json == expected - def test_should_respond_200_serialized(self): + def test_should_respond_200_serialized(self, current_file_token): # Get the dag out of the dagbag before we patch it to an empty one SerializedDagModel.write_dag(self.app.dag_bag.get_dag(self.dag_id)) @@ -395,7 +397,7 @@ def test_should_respond_200_serialized(self): "description": None, "doc_md": "details", "fileloc": __file__, - "file_token": FILE_TOKEN, + "file_token": current_file_token, "is_paused": None, "is_active": None, "is_subdag": False, @@ -449,7 +451,7 @@ def test_should_respond_200_serialized(self): 'description': None, 'doc_md': 'details', 'fileloc': __file__, - "file_token": FILE_TOKEN, + "file_token": current_file_token, 'is_paused': None, "is_active": None, 'is_subdag': False, @@ -496,7 +498,7 @@ def test_should_raise_404_when_dag_is_not_found(self): class TestGetDags(TestDagEndpoint): @provide_session - def test_should_respond_200(self, session): + def test_should_respond_200(self, session, url_safe_serializer): self._create_dag_models(2) self._create_deactivated_dag() @@ -504,8 +506,8 @@ def test_should_respond_200(self, session): assert len(dags_query.all()) == 3 response = self.client.get("api/v1/dags", environ_overrides={'REMOTE_USER': "test"}) - file_token = SERIALIZER.dumps("/tmp/dag_1.py") - file_token2 = SERIALIZER.dumps("/tmp/dag_2.py") + file_token = url_safe_serializer.dumps("/tmp/dag_1.py") + file_token2 = url_safe_serializer.dumps("/tmp/dag_2.py") assert response.status_code == 200 assert { @@ -576,11 +578,11 @@ def test_should_respond_200(self, session): "total_entries": 2, } == response.json - def test_only_active_true_returns_active_dags(self): + def test_only_active_true_returns_active_dags(self, url_safe_serializer): self._create_dag_models(1) self._create_deactivated_dag() response = self.client.get("api/v1/dags?only_active=True", environ_overrides={'REMOTE_USER': "test"}) - file_token = SERIALIZER.dumps("/tmp/dag_1.py") + file_token = url_safe_serializer.dumps("/tmp/dag_1.py") assert response.status_code == 200 assert { "dags": [ @@ -619,12 +621,12 @@ def test_only_active_true_returns_active_dags(self): "total_entries": 1, } == response.json - def test_only_active_false_returns_all_dags(self): + def test_only_active_false_returns_all_dags(self, url_safe_serializer): self._create_dag_models(1) self._create_deactivated_dag() response = self.client.get("api/v1/dags?only_active=False", environ_overrides={'REMOTE_USER': "test"}) - file_token = SERIALIZER.dumps("/tmp/dag_1.py") - file_token_2 = SERIALIZER.dumps("/tmp/dag_del_1.py") + file_token = url_safe_serializer.dumps("/tmp/dag_1.py") + file_token_2 = url_safe_serializer.dumps("/tmp/dag_del_1.py") assert response.status_code == 200 assert { "dags": [ @@ -819,10 +821,8 @@ def test_should_respond_403_unauthorized(self): class TestPatchDag(TestDagEndpoint): - - file_token = SERIALIZER.dumps("/tmp/dag_1.py") - - def test_should_respond_200_on_patch_is_paused(self): + def test_should_respond_200_on_patch_is_paused(self, url_safe_serializer): + file_token = url_safe_serializer.dumps("/tmp/dag_1.py") dag_model = self._create_dag_model() response = self.client.patch( f"/api/v1/dags/{dag_model.dag_id}", @@ -832,12 +832,11 @@ def test_should_respond_200_on_patch_is_paused(self): environ_overrides={'REMOTE_USER': "test"}, ) assert response.status_code == 200 - expected_response = { "dag_id": "TEST_DAG_1", "description": None, "fileloc": "/tmp/dag_1.py", - "file_token": self.file_token, + "file_token": file_token, "is_paused": False, "is_active": False, "is_subdag": False, @@ -918,7 +917,8 @@ def test_should_raises_401_unauthenticated(self): assert_401(response) - def test_should_respond_200_with_update_mask(self): + def test_should_respond_200_with_update_mask(self, url_safe_serializer): + file_token = url_safe_serializer.dumps("/tmp/dag_1.py") dag_model = self._create_dag_model() payload = { "is_paused": False, @@ -934,7 +934,7 @@ def test_should_respond_200_with_update_mask(self): "dag_id": "TEST_DAG_1", "description": None, "fileloc": "/tmp/dag_1.py", - "file_token": self.file_token, + "file_token": file_token, "is_paused": False, "is_active": False, "is_subdag": False, @@ -1006,12 +1006,10 @@ def test_should_respond_403_unauthorized(self): class TestPatchDags(TestDagEndpoint): - - file_token = SERIALIZER.dumps("/tmp/dag_1.py") - file_token2 = SERIALIZER.dumps("/tmp/dag_2.py") - @provide_session - def test_should_respond_200_on_patch_is_paused(self, session): + def test_should_respond_200_on_patch_is_paused(self, session, url_safe_serializer): + file_token = url_safe_serializer.dumps("/tmp/dag_1.py") + file_token2 = url_safe_serializer.dumps("/tmp/dag_2.py") self._create_dag_models(2) self._create_deactivated_dag() @@ -1033,7 +1031,7 @@ def test_should_respond_200_on_patch_is_paused(self, session): "dag_id": "TEST_DAG_1", "description": None, "fileloc": "/tmp/dag_1.py", - "file_token": self.file_token, + "file_token": file_token, "is_paused": False, "is_active": True, "is_subdag": False, @@ -1064,7 +1062,7 @@ def test_should_respond_200_on_patch_is_paused(self, session): "dag_id": "TEST_DAG_2", "description": None, "fileloc": "/tmp/dag_2.py", - "file_token": self.file_token2, + "file_token": file_token2, "is_paused": False, "is_active": True, "is_subdag": False, @@ -1095,7 +1093,8 @@ def test_should_respond_200_on_patch_is_paused(self, session): "total_entries": 2, } == response.json - def test_only_active_true_returns_active_dags(self): + def test_only_active_true_returns_active_dags(self, url_safe_serializer): + file_token = url_safe_serializer.dumps("/tmp/dag_1.py") self._create_dag_models(1) self._create_deactivated_dag() response = self.client.patch( @@ -1112,7 +1111,7 @@ def test_only_active_true_returns_active_dags(self): "dag_id": "TEST_DAG_1", "description": None, "fileloc": "/tmp/dag_1.py", - "file_token": self.file_token, + "file_token": file_token, "is_paused": False, "is_active": True, "is_subdag": False, @@ -1143,7 +1142,8 @@ def test_only_active_true_returns_active_dags(self): "total_entries": 1, } == response.json - def test_only_active_false_returns_all_dags(self): + def test_only_active_false_returns_all_dags(self, url_safe_serializer): + file_token = url_safe_serializer.dumps("/tmp/dag_1.py") self._create_dag_models(1) self._create_deactivated_dag() response = self.client.patch( @@ -1154,7 +1154,7 @@ def test_only_active_false_returns_all_dags(self): environ_overrides={'REMOTE_USER': "test"}, ) - file_token_2 = SERIALIZER.dumps("/tmp/dag_del_1.py") + file_token_2 = url_safe_serializer.dumps("/tmp/dag_del_1.py") assert response.status_code == 200 assert { "dags": [ @@ -1162,7 +1162,7 @@ def test_only_active_false_returns_all_dags(self): "dag_id": "TEST_DAG_1", "description": None, "fileloc": "/tmp/dag_1.py", - "file_token": self.file_token, + "file_token": file_token, "is_paused": False, "is_active": True, "is_subdag": False, @@ -1399,7 +1399,9 @@ def test_should_respond_403_unauthorized(self): assert response.status_code == 403 - def test_should_respond_200_and_pause_dags(self): + def test_should_respond_200_and_pause_dags(self, url_safe_serializer): + file_token = url_safe_serializer.dumps("/tmp/dag_1.py") + file_token2 = url_safe_serializer.dumps("/tmp/dag_2.py") self._create_dag_models(2) response = self.client.patch( @@ -1417,7 +1419,7 @@ def test_should_respond_200_and_pause_dags(self): "dag_id": "TEST_DAG_1", "description": None, "fileloc": "/tmp/dag_1.py", - "file_token": self.file_token, + "file_token": file_token, "is_paused": True, "is_active": True, "is_subdag": False, @@ -1448,7 +1450,7 @@ def test_should_respond_200_and_pause_dags(self): "dag_id": "TEST_DAG_2", "description": None, "fileloc": "/tmp/dag_2.py", - "file_token": self.file_token2, + "file_token": file_token2, "is_paused": True, "is_active": True, "is_subdag": False, @@ -1480,9 +1482,10 @@ def test_should_respond_200_and_pause_dags(self): } == response.json @provide_session - def test_should_respond_200_and_pause_dag_pattern(self, session): + def test_should_respond_200_and_pause_dag_pattern(self, session, url_safe_serializer): + file_token = url_safe_serializer.dumps("/tmp/dag_1.py") self._create_dag_models(10) - file_token10 = SERIALIZER.dumps("/tmp/dag_10.py") + file_token10 = url_safe_serializer.dumps("/tmp/dag_10.py") response = self.client.patch( "/api/v1/dags?dag_id_pattern=TEST_DAG_1", @@ -1499,7 +1502,7 @@ def test_should_respond_200_and_pause_dag_pattern(self, session): "dag_id": "TEST_DAG_1", "description": None, "fileloc": "/tmp/dag_1.py", - "file_token": self.file_token, + "file_token": file_token, "is_paused": True, "is_active": True, "is_subdag": False, diff --git a/tests/api_connexion/endpoints/test_dag_source_endpoint.py b/tests/api_connexion/endpoints/test_dag_source_endpoint.py index 5fec9f27989c9..cb8b5a9c11e57 100644 --- a/tests/api_connexion/endpoints/test_dag_source_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_source_endpoint.py @@ -19,10 +19,8 @@ from typing import Optional import pytest -from itsdangerous import URLSafeSerializer from airflow import DAG -from airflow.configuration import conf from airflow.models import DagBag from airflow.security import permissions from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user @@ -73,15 +71,14 @@ def _get_dag_file_docstring(fileloc: str) -> Optional[str]: docstring = ast.get_docstring(module) return docstring - def test_should_respond_200_text(self): - serializer = URLSafeSerializer(conf.get('webserver', 'SECRET_KEY')) + def test_should_respond_200_text(self, url_safe_serializer): dagbag = DagBag(dag_folder=EXAMPLE_DAG_FILE) dagbag.sync_to_db() first_dag: DAG = next(iter(dagbag.dags.values())) dag_docstring = self._get_dag_file_docstring(first_dag.fileloc) - url = f"/api/v1/dagSources/{serializer.dumps(first_dag.fileloc)}" + url = f"/api/v1/dagSources/{url_safe_serializer.dumps(first_dag.fileloc)}" response = self.client.get( url, headers={"Accept": "text/plain"}, environ_overrides={'REMOTE_USER': "test"} ) @@ -90,14 +87,13 @@ def test_should_respond_200_text(self): assert dag_docstring in response.data.decode() assert 'text/plain' == response.headers['Content-Type'] - def test_should_respond_200_json(self): - serializer = URLSafeSerializer(conf.get('webserver', 'SECRET_KEY')) + def test_should_respond_200_json(self, url_safe_serializer): dagbag = DagBag(dag_folder=EXAMPLE_DAG_FILE) dagbag.sync_to_db() first_dag: DAG = next(iter(dagbag.dags.values())) dag_docstring = self._get_dag_file_docstring(first_dag.fileloc) - url = f"/api/v1/dagSources/{serializer.dumps(first_dag.fileloc)}" + url = f"/api/v1/dagSources/{url_safe_serializer.dumps(first_dag.fileloc)}" response = self.client.get( url, headers={"Accept": 'application/json'}, environ_overrides={'REMOTE_USER': "test"} ) @@ -106,13 +102,12 @@ def test_should_respond_200_json(self): assert dag_docstring in response.json['content'] assert 'application/json' == response.headers['Content-Type'] - def test_should_respond_406(self): - serializer = URLSafeSerializer(conf.get('webserver', 'SECRET_KEY')) + def test_should_respond_406(self, url_safe_serializer): dagbag = DagBag(dag_folder=EXAMPLE_DAG_FILE) dagbag.sync_to_db() first_dag: DAG = next(iter(dagbag.dags.values())) - url = f"/api/v1/dagSources/{serializer.dumps(first_dag.fileloc)}" + url = f"/api/v1/dagSources/{url_safe_serializer.dumps(first_dag.fileloc)}" response = self.client.get( url, headers={"Accept": 'image/webp'}, environ_overrides={'REMOTE_USER': "test"} ) @@ -128,27 +123,25 @@ def test_should_respond_404(self): assert 404 == response.status_code - def test_should_raises_401_unauthenticated(self): - serializer = URLSafeSerializer(conf.get('webserver', 'SECRET_KEY')) + def test_should_raises_401_unauthenticated(self, url_safe_serializer): dagbag = DagBag(dag_folder=EXAMPLE_DAG_FILE) dagbag.sync_to_db() first_dag: DAG = next(iter(dagbag.dags.values())) response = self.client.get( - f"/api/v1/dagSources/{serializer.dumps(first_dag.fileloc)}", + f"/api/v1/dagSources/{url_safe_serializer.dumps(first_dag.fileloc)}", headers={"Accept": "text/plain"}, ) assert_401(response) - def test_should_raise_403_forbidden(self): - serializer = URLSafeSerializer(conf.get('webserver', 'SECRET_KEY')) + def test_should_raise_403_forbidden(self, url_safe_serializer): dagbag = DagBag(dag_folder=EXAMPLE_DAG_FILE) dagbag.sync_to_db() first_dag: DAG = next(iter(dagbag.dags.values())) response = self.client.get( - f"/api/v1/dagSources/{serializer.dumps(first_dag.fileloc)}", + f"/api/v1/dagSources/{url_safe_serializer.dumps(first_dag.fileloc)}", headers={"Accept": "text/plain"}, environ_overrides={'REMOTE_USER': "test_no_permissions"}, ) diff --git a/tests/api_connexion/endpoints/test_xcom_endpoint.py b/tests/api_connexion/endpoints/test_xcom_endpoint.py index 7c4452dc2bb63..efcba3271188d 100644 --- a/tests/api_connexion/endpoints/test_xcom_endpoint.py +++ b/tests/api_connexion/endpoints/test_xcom_endpoint.py @@ -185,7 +185,7 @@ def test_should_respond_200(self): response_data = response.json for xcom_entry in response_data['xcom_entries']: xcom_entry['timestamp'] = "TIMESTAMP" - assert response.json == { + assert response_data == { 'xcom_entries': [ { 'dag_id': dag_id, @@ -227,7 +227,7 @@ def test_should_respond_200_with_tilde_and_access_to_all_dags(self): response_data = response.json for xcom_entry in response_data['xcom_entries']: xcom_entry['timestamp'] = "TIMESTAMP" - assert response.json == { + assert response_data == { 'xcom_entries': [ { 'dag_id': dag_id_1, @@ -283,7 +283,7 @@ def test_should_respond_200_with_tilde_and_granular_dag_access(self): response_data = response.json for xcom_entry in response_data['xcom_entries']: xcom_entry['timestamp'] = "TIMESTAMP" - assert response.json == { + assert response_data == { 'xcom_entries': [ { 'dag_id': dag_id_1, diff --git a/tests/api_connexion/schemas/test_dag_schema.py b/tests/api_connexion/schemas/test_dag_schema.py index ca7e04ae89226..040ce41f1d066 100644 --- a/tests/api_connexion/schemas/test_dag_schema.py +++ b/tests/api_connexion/schemas/test_dag_schema.py @@ -15,11 +15,8 @@ # specific language governing permissions and limitations # under the License. -import unittest from datetime import datetime -from itsdangerous import URLSafeSerializer - from airflow import DAG from airflow.api_connexion.schemas.dag_schema import ( DAGCollection, @@ -27,174 +24,168 @@ DAGDetailSchema, DAGSchema, ) -from airflow.configuration import conf from airflow.models import DagModel, DagTag -SERIALIZER = URLSafeSerializer(conf.get('webserver', 'SECRET_KEY')) - -class TestDagSchema(unittest.TestCase): - def test_serialize(self): - dag_model = DagModel( - dag_id="test_dag_id", - root_dag_id="test_root_dag_id", - is_paused=True, - is_active=True, - is_subdag=False, - fileloc="/root/airflow/dags/my_dag.py", - owners="airflow1,airflow2", - description="The description", - schedule_interval="5 4 * * *", - tags=[DagTag(name="tag-1"), DagTag(name="tag-2")], - ) - serialized_dag = DAGSchema().dump(dag_model) +def test_serialize_test_dag_schema(url_safe_serializer): + dag_model = DagModel( + dag_id="test_dag_id", + root_dag_id="test_root_dag_id", + is_paused=True, + is_active=True, + is_subdag=False, + fileloc="/root/airflow/dags/my_dag.py", + owners="airflow1,airflow2", + description="The description", + schedule_interval="5 4 * * *", + tags=[DagTag(name="tag-1"), DagTag(name="tag-2")], + ) + serialized_dag = DAGSchema().dump(dag_model) - assert { - "dag_id": "test_dag_id", - "description": "The description", - "fileloc": "/root/airflow/dags/my_dag.py", - "file_token": SERIALIZER.dumps("/root/airflow/dags/my_dag.py"), - "is_paused": True, - "is_active": True, - "is_subdag": False, - "owners": ["airflow1", "airflow2"], - "root_dag_id": "test_root_dag_id", - "schedule_interval": {"__type": "CronExpression", "value": "5 4 * * *"}, - "tags": [{"name": "tag-1"}, {"name": "tag-2"}], - 'next_dagrun': None, - 'has_task_concurrency_limits': True, - 'next_dagrun_data_interval_start': None, - 'next_dagrun_data_interval_end': None, - 'max_active_runs': 16, - 'next_dagrun_create_after': None, - 'last_expired': None, - 'max_active_tasks': 16, - 'last_pickled': None, - 'default_view': None, - 'last_parsed_time': None, - 'scheduler_lock': None, - 'timetable_description': None, - 'has_import_errors': None, - 'pickle_id': None, - } == serialized_dag + assert { + "dag_id": "test_dag_id", + "description": "The description", + "fileloc": "/root/airflow/dags/my_dag.py", + "file_token": url_safe_serializer.dumps("/root/airflow/dags/my_dag.py"), + "is_paused": True, + "is_active": True, + "is_subdag": False, + "owners": ["airflow1", "airflow2"], + "root_dag_id": "test_root_dag_id", + "schedule_interval": {"__type": "CronExpression", "value": "5 4 * * *"}, + "tags": [{"name": "tag-1"}, {"name": "tag-2"}], + 'next_dagrun': None, + 'has_task_concurrency_limits': True, + 'next_dagrun_data_interval_start': None, + 'next_dagrun_data_interval_end': None, + 'max_active_runs': 16, + 'next_dagrun_create_after': None, + 'last_expired': None, + 'max_active_tasks': 16, + 'last_pickled': None, + 'default_view': None, + 'last_parsed_time': None, + 'scheduler_lock': None, + 'timetable_description': None, + 'has_import_errors': None, + 'pickle_id': None, + } == serialized_dag -class TestDAGCollectionSchema(unittest.TestCase): - def test_serialize(self): - dag_model_a = DagModel(dag_id="test_dag_id_a", fileloc="/tmp/a.py") - dag_model_b = DagModel(dag_id="test_dag_id_b", fileloc="/tmp/a.py") - schema = DAGCollectionSchema() - instance = DAGCollection(dags=[dag_model_a, dag_model_b], total_entries=2) - assert { - "dags": [ - { - "dag_id": "test_dag_id_a", - "description": None, - "fileloc": "/tmp/a.py", - "file_token": SERIALIZER.dumps("/tmp/a.py"), - "is_paused": None, - "is_subdag": None, - "is_active": None, - "owners": [], - "root_dag_id": None, - "schedule_interval": None, - "tags": [], - 'next_dagrun': None, - 'has_task_concurrency_limits': True, - 'next_dagrun_data_interval_start': None, - 'next_dagrun_data_interval_end': None, - 'max_active_runs': 16, - 'next_dagrun_create_after': None, - 'last_expired': None, - 'max_active_tasks': 16, - 'last_pickled': None, - 'default_view': None, - 'last_parsed_time': None, - 'scheduler_lock': None, - 'timetable_description': None, - 'has_import_errors': None, - 'pickle_id': None, - }, - { - "dag_id": "test_dag_id_b", - "description": None, - "fileloc": "/tmp/a.py", - "file_token": SERIALIZER.dumps("/tmp/a.py"), - "is_active": None, - "is_paused": None, - "is_subdag": None, - "owners": [], - "root_dag_id": None, - "schedule_interval": None, - "tags": [], - 'next_dagrun': None, - 'has_task_concurrency_limits': True, - 'next_dagrun_data_interval_start': None, - 'next_dagrun_data_interval_end': None, - 'max_active_runs': 16, - 'next_dagrun_create_after': None, - 'last_expired': None, - 'max_active_tasks': 16, - 'last_pickled': None, - 'default_view': None, - 'last_parsed_time': None, - 'scheduler_lock': None, - 'timetable_description': None, - 'has_import_errors': None, - 'pickle_id': None, - }, - ], - "total_entries": 2, - } == schema.dump(instance) +def test_serialize_test_dag_collection_schema(url_safe_serializer): + dag_model_a = DagModel(dag_id="test_dag_id_a", fileloc="/tmp/a.py") + dag_model_b = DagModel(dag_id="test_dag_id_b", fileloc="/tmp/a.py") + schema = DAGCollectionSchema() + instance = DAGCollection(dags=[dag_model_a, dag_model_b], total_entries=2) + assert { + "dags": [ + { + "dag_id": "test_dag_id_a", + "description": None, + "fileloc": "/tmp/a.py", + "file_token": url_safe_serializer.dumps("/tmp/a.py"), + "is_paused": None, + "is_subdag": None, + "is_active": None, + "owners": [], + "root_dag_id": None, + "schedule_interval": None, + "tags": [], + 'next_dagrun': None, + 'has_task_concurrency_limits': True, + 'next_dagrun_data_interval_start': None, + 'next_dagrun_data_interval_end': None, + 'max_active_runs': 16, + 'next_dagrun_create_after': None, + 'last_expired': None, + 'max_active_tasks': 16, + 'last_pickled': None, + 'default_view': None, + 'last_parsed_time': None, + 'scheduler_lock': None, + 'timetable_description': None, + 'has_import_errors': None, + 'pickle_id': None, + }, + { + "dag_id": "test_dag_id_b", + "description": None, + "fileloc": "/tmp/a.py", + "file_token": url_safe_serializer.dumps("/tmp/a.py"), + "is_active": None, + "is_paused": None, + "is_subdag": None, + "owners": [], + "root_dag_id": None, + "schedule_interval": None, + "tags": [], + 'next_dagrun': None, + 'has_task_concurrency_limits': True, + 'next_dagrun_data_interval_start': None, + 'next_dagrun_data_interval_end': None, + 'max_active_runs': 16, + 'next_dagrun_create_after': None, + 'last_expired': None, + 'max_active_tasks': 16, + 'last_pickled': None, + 'default_view': None, + 'last_parsed_time': None, + 'scheduler_lock': None, + 'timetable_description': None, + 'has_import_errors': None, + 'pickle_id': None, + }, + ], + "total_entries": 2, + } == schema.dump(instance) -class TestDAGDetailSchema: - def test_serialize(self): - dag = DAG( - dag_id="test_dag", - start_date=datetime(2020, 6, 19), - doc_md="docs", - orientation="LR", - default_view="duration", - params={"foo": 1}, - tags=['example1', 'example2'], - ) - schema = DAGDetailSchema() +def test_serialize_test_dag_detail_schema(url_safe_serializer): + dag = DAG( + dag_id="test_dag", + start_date=datetime(2020, 6, 19), + doc_md="docs", + orientation="LR", + default_view="duration", + params={"foo": 1}, + tags=['example1', 'example2'], + ) + schema = DAGDetailSchema() - expected = { - 'catchup': True, - 'concurrency': 16, - 'max_active_tasks': 16, - 'dag_id': 'test_dag', - 'dag_run_timeout': None, - 'default_view': 'duration', - 'description': None, - 'doc_md': 'docs', - 'fileloc': __file__, - "file_token": SERIALIZER.dumps(__file__), - "is_active": None, - 'is_paused': None, - 'is_subdag': False, - 'orientation': 'LR', - 'owners': [], - 'params': { - 'foo': { - '__class': 'airflow.models.param.Param', - 'value': 1, - 'description': None, - 'schema': {}, - } - }, - 'schedule_interval': {'__type': 'TimeDelta', 'days': 1, 'seconds': 0, 'microseconds': 0}, - 'start_date': '2020-06-19T00:00:00+00:00', - 'tags': [{'name': "example1"}, {'name': "example2"}], - 'timezone': "Timezone('UTC')", - 'max_active_runs': 16, - 'pickle_id': None, - "end_date": None, - 'is_paused_upon_creation': None, - 'render_template_as_native_obj': False, - } - obj = schema.dump(dag) - expected.update({'last_parsed': obj['last_parsed']}) - assert obj == expected + expected = { + 'catchup': True, + 'concurrency': 16, + 'max_active_tasks': 16, + 'dag_id': 'test_dag', + 'dag_run_timeout': None, + 'default_view': 'duration', + 'description': None, + 'doc_md': 'docs', + 'fileloc': __file__, + "file_token": url_safe_serializer.dumps(__file__), + "is_active": None, + 'is_paused': None, + 'is_subdag': False, + 'orientation': 'LR', + 'owners': [], + 'params': { + 'foo': { + '__class': 'airflow.models.param.Param', + 'value': 1, + 'description': None, + 'schema': {}, + } + }, + 'schedule_interval': {'__type': 'TimeDelta', 'days': 1, 'seconds': 0, 'microseconds': 0}, + 'start_date': '2020-06-19T00:00:00+00:00', + 'tags': [{'name': "example1"}, {'name': "example2"}], + 'timezone': "Timezone('UTC')", + 'max_active_runs': 16, + 'pickle_id': None, + "end_date": None, + 'is_paused_upon_creation': None, + 'render_template_as_native_obj': False, + } + obj = schema.dump(dag) + expected.update({'last_parsed': obj['last_parsed']}) + assert obj == expected diff --git a/tests/conftest.py b/tests/conftest.py index 468da514b0feb..68d318e13c50a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -26,6 +26,8 @@ # We should set these before loading _any_ of the rest of airflow so that the # unit test mode config is set as early as possible. +from itsdangerous import URLSafeSerializer + assert "airflow" not in sys.modules, "No airflow module can be imported before these lines" tests_directory = os.path.dirname(os.path.realpath(__file__)) @@ -55,6 +57,28 @@ def reset_environment(): os.environ[key] = init_env[key] +@pytest.fixture() +def secret_key() -> str: + """ + Return secret key configured. + :return: + """ + from airflow.configuration import conf + + the_key = conf.get('webserver', 'SECRET_KEY') + if the_key is None: + raise RuntimeError( + "The secret key SHOULD be configured as `[webserver] secret_key` in the " + "configuration/environment at this stage! " + ) + return the_key + + +@pytest.fixture() +def url_safe_serializer(secret_key) -> URLSafeSerializer: + return URLSafeSerializer(secret_key) + + @pytest.fixture() def reset_db(): """ diff --git a/tests/test_utils/remote_user_api_auth_backend.py b/tests/test_utils/remote_user_api_auth_backend.py index 1e6a0c70adf6d..187f57a7fd114 100644 --- a/tests/test_utils/remote_user_api_auth_backend.py +++ b/tests/test_utils/remote_user_api_auth_backend.py @@ -20,10 +20,12 @@ from functools import wraps from typing import Callable, Optional, Tuple, TypeVar, Union, cast -from flask import Response, current_app, request +from flask import Response, request from flask_login import login_user from requests.auth import AuthBase +from airflow.utils.airflow_flask_app import get_airflow_app + log = logging.getLogger(__name__) CLIENT_AUTH: Optional[Union[Tuple[str, str], AuthBase]] = None @@ -37,7 +39,7 @@ def init_app(_): def _lookup_user(user_email_or_username: str): - security_manager = current_app.appbuilder.sm + security_manager = get_airflow_app().appbuilder.sm user = security_manager.find_user(email=user_email_or_username) or security_manager.find_user( username=user_email_or_username ) diff --git a/tests/utils/test_serve_logs.py b/tests/utils/test_serve_logs.py index f8d38817592b8..ebbc663a2cb0d 100644 --- a/tests/utils/test_serve_logs.py +++ b/tests/utils/test_serve_logs.py @@ -21,7 +21,6 @@ import pytest from freezegun import freeze_time -from airflow.configuration import conf from airflow.utils.jwt_signer import JWTSigner from airflow.utils.serve_logs import create_app from tests.test_utils.config import conf_vars @@ -49,18 +48,18 @@ def sample_log(tmpdir): @pytest.fixture -def signer(): +def signer(secret_key): return JWTSigner( - secret_key=conf.get('webserver', 'secret_key'), + secret_key=secret_key, expiration_time_in_seconds=30, audience="task-instance-logs", ) @pytest.fixture -def different_audience(): +def different_audience(secret_key): return JWTSigner( - secret_key=conf.get('webserver', 'secret_key'), + secret_key=secret_key, expiration_time_in_seconds=30, audience="different-audience", ) @@ -180,7 +179,7 @@ def test_wrong_audience(self, client: "FlaskClient", different_audience): ) @pytest.mark.parametrize("claim_to_remove", ["iat", "exp", "nbf", "aud"]) - def test_missing_claims(self, claim_to_remove: str, client: "FlaskClient"): + def test_missing_claims(self, claim_to_remove: str, client: "FlaskClient", secret_key): jwt_dict = { "aud": "task-instance-logs", "iat": datetime.datetime.utcnow(), @@ -191,7 +190,7 @@ def test_missing_claims(self, claim_to_remove: str, client: "FlaskClient"): jwt_dict.update({"filename": 'sample.log'}) token = jwt.encode( jwt_dict, - conf.get('webserver', 'secret_key'), + secret_key, algorithm="HS512", ) assert ( diff --git a/tests/www/views/test_views.py b/tests/www/views/test_views.py index 887bd4898a0a6..fa79e145cba6c 100644 --- a/tests/www/views/test_views.py +++ b/tests/www/views/test_views.py @@ -375,52 +375,55 @@ def test_get_task_stats_from_query(): assert data == expected_data +INVALID_DATETIME_RESPONSE = "Invalid datetime: 'invalid'" + + @pytest.mark.parametrize( "url, content", [ ( '/rendered-templates?execution_date=invalid', - "Invalid datetime: 'invalid'", + INVALID_DATETIME_RESPONSE, ), ( '/log?execution_date=invalid', - "Invalid datetime: 'invalid'", + INVALID_DATETIME_RESPONSE, ), ( '/redirect_to_external_log?execution_date=invalid', - "Invalid datetime: 'invalid'", + INVALID_DATETIME_RESPONSE, ), ( '/task?execution_date=invalid', - "Invalid datetime: 'invalid'", + INVALID_DATETIME_RESPONSE, ), ( 'dags/example_bash_operator/graph?execution_date=invalid', - "Invalid datetime: 'invalid'", + INVALID_DATETIME_RESPONSE, ), ( 'dags/example_bash_operator/graph?execution_date=invalid', - "Invalid datetime: 'invalid'", + INVALID_DATETIME_RESPONSE, ), ( 'dags/example_bash_operator/duration?base_date=invalid', - "Invalid datetime: 'invalid'", + INVALID_DATETIME_RESPONSE, ), ( 'dags/example_bash_operator/tries?base_date=invalid', - "Invalid datetime: 'invalid'", + INVALID_DATETIME_RESPONSE, ), ( 'dags/example_bash_operator/landing-times?base_date=invalid', - "Invalid datetime: 'invalid'", + INVALID_DATETIME_RESPONSE, ), ( 'dags/example_bash_operator/gantt?execution_date=invalid', - "Invalid datetime: 'invalid'", + INVALID_DATETIME_RESPONSE, ), ( 'extra_links?execution_date=invalid', - "Invalid datetime: 'invalid'", + INVALID_DATETIME_RESPONSE, ), ], ) diff --git a/tests/www/views/test_views_decorators.py b/tests/www/views/test_views_decorators.py index 0e4fc12857a8f..1de80c1214a28 100644 --- a/tests/www/views/test_views_decorators.py +++ b/tests/www/views/test_views_decorators.py @@ -213,9 +213,9 @@ def test_action_has_dag_edit_access(create_task_instance, class_type, no_instanc else: test_items = tis if class_type == TaskInstance else [ti.get_dagrun() for ti in tis] test_items = test_items[0] if len(test_items) == 1 else test_items - - with app.create_app(testing=True).app_context(): - with mock.patch("airflow.www.views.current_app.appbuilder.sm.can_edit_dag") as mocked_can_edit: + application = app.create_app(testing=True) + with application.app_context(): + with mock.patch.object(application.appbuilder.sm, "can_edit_dag") as mocked_can_edit: mocked_can_edit.return_value = True assert not isinstance(test_items, list) or len(test_items) == no_instances assert some_view_action_which_requires_dag_edit_access(None, test_items) is True diff --git a/tests/www/views/test_views_log.py b/tests/www/views/test_views_log.py index f697cd3772c28..988d28593649c 100644 --- a/tests/www/views/test_views_log.py +++ b/tests/www/views/test_views_log.py @@ -464,7 +464,7 @@ def test_redirect_to_external_log_with_local_log_handler(log_admin_client, task_ ) response = log_admin_client.get(url) assert 302 == response.status_code - assert 'http://localhost/home' == response.headers['Location'] + assert '/home' == response.headers['Location'] class _ExternalHandler(ExternalLoggingMixin): diff --git a/tests/www/views/test_views_mount.py b/tests/www/views/test_views_mount.py index a9fb8746657df..3f504e9b0f168 100644 --- a/tests/www/views/test_views_mount.py +++ b/tests/www/views/test_views_mount.py @@ -36,7 +36,7 @@ def factory(): @pytest.fixture() def client(app): - return werkzeug.test.Client(app, werkzeug.wrappers.BaseResponse) + return werkzeug.test.Client(app, werkzeug.wrappers.response.Response) def test_mount(client): @@ -54,4 +54,4 @@ def test_not_found(client): def test_index(client): resp = client.get('/test/') assert resp.status_code == 302 - assert resp.headers['Location'] == 'http://localhost/test/home' + assert resp.headers['Location'] == '/test/home' From 839d67da27250689dc4580649ed3693ed5cb2d0b Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Thu, 23 Jun 2022 01:06:23 +0200 Subject: [PATCH 021/118] Fix recording console for new rich-click 1.5 (#24611) (cherry picked from commit 8231add0a0d12a1a5f2de13de7e4eb5ac785f839) --- dev/breeze/src/airflow_breeze/utils/recording.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dev/breeze/src/airflow_breeze/utils/recording.py b/dev/breeze/src/airflow_breeze/utils/recording.py index 2fe9f5b5558de..41ca767a7f237 100644 --- a/dev/breeze/src/airflow_breeze/utils/recording.py +++ b/dev/breeze/src/airflow_breeze/utils/recording.py @@ -47,7 +47,9 @@ def save_ouput_as_svg(): class RecordingConsole(rich.console.Console): def __init__(self, **kwargs): - super().__init__(record=True, width=width_int, force_terminal=True, **kwargs) + kwargs["force_terminal"] = True + kwargs["width"] = width_int + super().__init__(record=True, **kwargs) global help_console help_console = self From 5c7b7993b8338b465e658adc18651675134d52e7 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Thu, 23 Jun 2022 02:37:40 +0200 Subject: [PATCH 022/118] Fix errors revealed on autoupgrade of breeze (#24612) Recent changes to Breeze cause it to fail in certain situations, especially at self-upgrade (which was generated by today's upgrade with rich-click). There were two problems: * docker volume inspect missed 'volume' and it caused sometimes failures in CI * inputimeout dependency was missing after recent update to pre-commit venvs (cherry picked from commit 0905e386f17e34d96f6ee575404c62b13242c75d) --- .pre-commit-config.yaml | 16 ++++++++-------- .../src/airflow_breeze/utils/path_utils.py | 4 +++- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index af0c2b0e1c84d..d91d2bf332218 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -778,7 +778,7 @@ repos: entry: ./scripts/ci/pre_commit/pre_commit_mypy.py files: ^dev/.*\.py$ require_serial: true - additional_dependencies: ['rich>=12.4.4'] + additional_dependencies: ['rich>=12.4.4', 'inputimeout'] - id: run-mypy name: Run mypy for core language: python @@ -786,14 +786,14 @@ repos: files: \.py$ exclude: ^provider_packages|^docs|^airflow/_vendor/|^airflow/providers|^airflow/migrations|^dev require_serial: true - additional_dependencies: ['rich>=12.4.4'] + additional_dependencies: ['rich>=12.4.4', 'inputimeout'] - id: run-mypy name: Run mypy for providers language: python entry: ./scripts/ci/pre_commit/pre_commit_mypy.py --namespace-packages files: ^airflow/providers/.*\.py$ require_serial: true - additional_dependencies: ['rich>=12.4.4'] + additional_dependencies: ['rich>=12.4.4', 'inputimeout'] - id: run-mypy name: Run mypy for /docs/ folder language: python @@ -801,7 +801,7 @@ repos: files: ^docs/.*\.py$ exclude: ^docs/rtd-deprecation require_serial: true - additional_dependencies: ['rich>=12.4.4'] + additional_dependencies: ['rich>=12.4.4', 'inputimeout'] - id: run-flake8 name: Run flake8 language: python @@ -809,7 +809,7 @@ repos: files: \.py$ pass_filenames: true exclude: ^airflow/_vendor/ - additional_dependencies: ['rich>=12.4.4'] + additional_dependencies: ['rich>=12.4.4', 'inputimeout'] - id: lint-javascript name: ESLint against airflow/ui language: python @@ -817,7 +817,7 @@ repos: files: ^airflow/ui/ entry: ./scripts/ci/pre_commit/pre_commit_ui_lint.py pass_filenames: false - additional_dependencies: ['rich>=12.4.4'] + additional_dependencies: ['rich>=12.4.4', 'inputimeout'] - id: lint-javascript name: ESLint against current UI JavaScript files language: python @@ -825,12 +825,12 @@ repos: files: ^airflow/www/static/js/ entry: ./scripts/ci/pre_commit/pre_commit_www_lint.py pass_filenames: false - additional_dependencies: ['rich>=12.4.4'] + additional_dependencies: ['rich>=12.4.4', 'inputimeout'] - id: update-migration-references name: Update migration ref doc language: python entry: ./scripts/ci/pre_commit/pre_commit_migration_reference.py pass_filenames: false files: ^airflow/migrations/versions/.*\.py$|^docs/apache-airflow/migrations-ref\.rst$ - additional_dependencies: ['rich>=12.4.4'] + additional_dependencies: ['rich>=12.4.4', 'inputimeout'] ## ONLY ADD PRE-COMMITS HERE THAT REQUIRE CI IMAGE diff --git a/dev/breeze/src/airflow_breeze/utils/path_utils.py b/dev/breeze/src/airflow_breeze/utils/path_utils.py index e4e79d55202ae..c7ee6d980236f 100644 --- a/dev/breeze/src/airflow_breeze/utils/path_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/path_utils.py @@ -253,7 +253,9 @@ def find_airflow_sources_root_to_operate_on() -> Path: def create_volume_if_missing(volume_name: str): from airflow_breeze.utils.run_utils import run_command - res_inspect = run_command(cmd=["docker", "inspect", volume_name], stdout=subprocess.DEVNULL, check=False) + res_inspect = run_command( + cmd=["docker", "volume", "inspect", volume_name], stdout=subprocess.DEVNULL, check=False + ) if res_inspect.returncode != 0: run_command(cmd=["docker", "volume", "create", volume_name], check=True) From 4e43f976a3a007c9d167352b726191478d91edcd Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Thu, 23 Jun 2022 13:57:36 +0100 Subject: [PATCH 023/118] Fix usage of `click.get_terminal_size()` (#24616) We were ignoring mypy error instead of fixing it. click had removed `get_terminal_size` and recommend using `shutil.get_terminal_size` (cherry picked from commit 5f67cc0747ea661b703e4c44c77e7cd005cb9588) --- dev/send_email.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dev/send_email.py b/dev/send_email.py index 2d796eb80066b..1ec2199a9c730 100755 --- a/dev/send_email.py +++ b/dev/send_email.py @@ -20,6 +20,7 @@ # This tool is based on the Superset send_email script: # https://github.com/apache/incubator-superset/blob/master/RELEASING/send_email.py import os +import shutil import smtplib import ssl import sys @@ -83,7 +84,7 @@ def show_message(entity: str, message: str): """ Show message on the Command Line """ - width, _ = click.get_terminal_size() # type: ignore[attr-defined] + width, _ = shutil.get_terminal_size() click.secho("-" * width, fg="blue") click.secho(f"{entity} Message:", fg="bright_red", bold=True) click.secho("-" * width, fg="blue") From d72b745712f0809c0f4e794c13b83de5ac212ce4 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Thu, 23 Jun 2022 21:56:28 +0200 Subject: [PATCH 024/118] Upgrade FAB to 4.1.2 (#24619) (cherry picked from commit b8a4ac51f68a7d1c1f6ac5434cdf027e812553c7) --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index a32326cee29de..21d5912150a63 100644 --- a/setup.cfg +++ b/setup.cfg @@ -107,7 +107,7 @@ install_requires = # Every time we update FAB version here, please make sure that you review the classes and models in # `airflow/www/fab_security` with their upstream counterparts. In particular, make sure any breaking changes, # for example any new methods, are accounted for. - flask-appbuilder==4.1.1 + flask-appbuilder==4.1.2 flask-caching>=1.5.0 flask-login>=0.5 flask-session>=0.4.0 From 4e29345bf24f9aff240f89c2796bbe3645fd4a78 Mon Sep 17 00:00:00 2001 From: Ash Berlin-Taylor Date: Thu, 23 Jun 2022 20:58:07 +0100 Subject: [PATCH 025/118] We now need at least Flask-WTF 0.15 (#24621) We upgraded flask and werkzeug in #24399, and updated the constraints, but not everyone uses them (such as me in my local virtual environment when developing) so the min version in setup.cfg has to match as well (cherry picked from commit 030169dc3e5c17d8cade9c9a431afeee954abc3e) --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 21d5912150a63..323e21b234183 100644 --- a/setup.cfg +++ b/setup.cfg @@ -111,7 +111,7 @@ install_requires = flask-caching>=1.5.0 flask-login>=0.5 flask-session>=0.4.0 - flask-wtf>=0.14.3 + flask-wtf>=0.15 graphviz>=0.12 gunicorn>=20.1.0 httpx From 761b2d0f7f84d9d62355d9f211fe334f4994b900 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Fri, 24 Jun 2022 11:39:10 +0200 Subject: [PATCH 026/118] Limit azure-servicebus to not be used on ARM (#24635) Azure service bus uses uamqp which does not build for ARM architecture and we need to disable it as a dependency for ARM. (cherry picked from commit 477f907b347511a1b14053a418ba258db6c3ed99) --- images/breeze/output-commands-hash.txt | 5 +++++ setup.py | 12 +++++------- 2 files changed, 10 insertions(+), 7 deletions(-) create mode 100644 images/breeze/output-commands-hash.txt diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt new file mode 100644 index 0000000000000..1b791a7afbca8 --- /dev/null +++ b/images/breeze/output-commands-hash.txt @@ -0,0 +1,5 @@ + +# This file is automatically generated by pre-commit. If you have a conflict with this file +# Please do not solve it but run `breeze regenerate-command-images`. +# This command should fix the conflict and regenerate help images that you have conflict with. +518a158901cde1a5a889d37086eca87e diff --git a/setup.py b/setup.py index 16fcb73102544..20f3d004325c1 100644 --- a/setup.py +++ b/setup.py @@ -202,7 +202,7 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version 'mypy-boto3-redshift-data>=1.21.0', ] apache_beam = [ - 'apache-beam>=2.33.0', + 'apache-beam>=2.39.0', ] arangodb = ['python-arango>=7.3.2'] asana = ['asana>=0.10'] @@ -231,6 +231,8 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version 'azure-storage-blob>=12.7.0,<12.9.0', 'azure-storage-common>=2.1.0', 'azure-storage-file>=2.1.0', + # Limited due to https://github.com/Azure/azure-uamqp-python/issues/191 + 'azure-servicebus>=7.6.1; platform_machine != "aarch64"', ] cassandra = [ 'cassandra-driver>=3.13.0', @@ -488,9 +490,7 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version 'psycopg2-binary>=2.7.4', ] presto = [ - # The limit to Presto 0.8 for unknown reason - # TODO: Remove the limit - 'presto-python-client>=0.7.0,<0.8', + 'presto-python-client>=0.8.2', pandas_requirement, ] psrp = [ @@ -622,9 +622,7 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version 'jira', 'jsondiff', 'mongomock', - # Version 3.1.10 is breaking main bump to 3.1.11 when released. - # Fix already merged but was not released https://github.com/spulec/moto/pull/5165 - 'moto[glue]>=3.1.6, <3.1.10', + 'moto[cloudformation, glue]>=3.1.12', 'parameterized', 'paramiko', 'pipdeptree', From de17f27d30dcefff98e06648be8827a418a3edfd Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sat, 25 Jun 2022 00:02:57 +0200 Subject: [PATCH 027/118] Allows to specify different Python base image when building images (#24634) Today Python released new images that seems to break some of our dependencies (at least on M1/ARM). This PR adds a workaround possibility to add --python-image option to override the default, latest image with any other version released previously until we diagnose and fix the real issue. (cherry picked from commit d6e6e7d2979e7a3c71d9b9a036630def0f9432f5) --- .../commands/ci_image_commands.py | 3 + .../commands/production_image_commands.py | 3 + .../params/common_build_params.py | 4 +- .../airflow_breeze/utils/common_options.py | 7 + images/breeze/output-build-image.svg | 288 +++++++------- images/breeze/output-build-prod-image.svg | 356 +++++++++--------- images/breeze/output-commands-hash.txt | 2 +- 7 files changed, 351 insertions(+), 312 deletions(-) diff --git a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py index ce38e8372468d..b9cfa2895767d 100644 --- a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py @@ -56,6 +56,7 @@ option_pull_image, option_push_image, option_python, + option_python_image, option_python_versions, option_run_in_parallel, option_runtime_apt_command, @@ -127,6 +128,7 @@ "--install-providers-from-sources", "--airflow-constraints-mode", "--airflow-constraints-reference", + "--python-image", "--additional-python-deps", "--runtime-apt-deps", "--runtime-apt-command", @@ -253,6 +255,7 @@ def run_build_in_parallel( @option_dev_apt_command @option_dev_apt_deps @option_force_build +@option_python_image @option_runtime_apt_command @option_runtime_apt_deps @option_force_build diff --git a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py index 9ae3791e31264..c8dca4cae1cd2 100644 --- a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py @@ -55,6 +55,7 @@ option_pull_image, option_push_image, option_python, + option_python_image, option_python_versions, option_run_in_parallel, option_runtime_apt_command, @@ -118,6 +119,7 @@ "--airflow-extras", "--airflow-constraints-mode", "--airflow-constraints-reference", + "--python-image", "--additional-python-deps", "--additional-extras", "--additional-runtime-apt-deps", @@ -301,6 +303,7 @@ def run_build_in_parallel( @option_additional_runtime_apt_command @option_dev_apt_command @option_dev_apt_deps +@option_python_image @option_runtime_apt_command @option_runtime_apt_deps @option_tag_as_latest diff --git a/dev/breeze/src/airflow_breeze/params/common_build_params.py b/dev/breeze/src/airflow_breeze/params/common_build_params.py index fd8a2b82e8c8c..68947186d68e8 100644 --- a/dev/breeze/src/airflow_breeze/params/common_build_params.py +++ b/dev/breeze/src/airflow_breeze/params/common_build_params.py @@ -58,6 +58,7 @@ class CommonBuildParams: install_providers_from_sources: bool = False platform: str = f"linux/{os.uname().machine}" prepare_buildx_cache: bool = False + python_image: Optional[str] = None push_image: bool = False python: str = "3.7" runtime_apt_command: str = "" @@ -110,7 +111,8 @@ def docker_cache_directive(self) -> List[str]: @property def python_base_image(self): """Construct Python Base Image""" - # ghcr.io/apache/airflow/main/python:3.8-slim-bullseye + if self.python_image is not None: + return self.python_image return f'python:{self.python}-slim-{self.debian_version}' @property diff --git a/dev/breeze/src/airflow_breeze/utils/common_options.py b/dev/breeze/src/airflow_breeze/utils/common_options.py index 292e74891dbfa..65f0040f89bfe 100644 --- a/dev/breeze/src/airflow_breeze/utils/common_options.py +++ b/dev/breeze/src/airflow_breeze/utils/common_options.py @@ -434,3 +434,10 @@ is_flag=True, envvar='PULL_IMAGE', ) + +option_python_image = click.option( + '--python-image', + help="If specified this is the base python image used to build the image. " + "Should be something like: python:VERSION-slim-bullseye", + envvar='PYTHON_IMAGE', +) diff --git a/images/breeze/output-build-image.svg b/images/breeze/output-build-image.svg index 389c973d0e273..082e97adefffa 100644 --- a/images/breeze/output-build-image.svg +++ b/images/breeze/output-build-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + + + + - Command: build-image + Command: build-image - + - - -Usage: breeze build-image [OPTIONS] - -Build CI image. Include building multiple images for all python versions (sequentially). - -╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. ---debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] ---image-tag-tTag of the image which is used to pull or run the image (implies                ---mount-sources=skip when using to run shell or tests)                          -(TEXT)                                                                          ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful -when you build or pull image with --image-tag.                                  ---docker-cache-cCache option for image used during the build.(registry | local | disabled) -[default: registry]                           ---force-buildForce image build no matter if it is determined as needed. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced options (for power users) ─────────────────────────────────────────────────────────────────────────────────╮ ---install-providers-from-sourcesInstall providers from sources when installing. ---airflow-constraints-modeMode of constraints for CI image building                               -(constraints-source-providers | constraints | constraints-no-providers) -[default: constraints-source-providers]                                 ---airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) ---additional-python-depsAdditional python dependencies to use when building the images.(TEXT) ---runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) ---runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) ---additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) ---additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) ---additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) ---additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) ---additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) ---additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) ---additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) ---dev-apt-depsApt dev dependencies to use when building the images.(TEXT) ---dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---github-usernameThe user name used to authenticate to GitHub.(TEXT) ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) ---push-imagePush image after building it. ---empty-imagePrepare empty image tagged with the same name as the Airflow image. ---prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  -image).                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze build-image [OPTIONS] + +Build CI image. Include building multiple images for all python versions (sequentially). + +╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +--image-tag-tTag of the image which is used to pull or run the image (implies                +--mount-sources=skip when using to run shell or tests)                          +(TEXT)                                                                          +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful +when you build or pull image with --image-tag.                                  +--docker-cache-cCache option for image used during the build.(registry | local | disabled) +[default: registry]                           +--force-buildForce image build no matter if it is determined as needed. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced options (for power users) ─────────────────────────────────────────────────────────────────────────────────╮ +--install-providers-from-sourcesInstall providers from sources when installing. +--airflow-constraints-modeMode of constraints for CI image building                               +(constraints-source-providers | constraints | constraints-no-providers) +[default: constraints-source-providers]                                 +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +--python-imageIf specified this is the base python image used to build the image. Should be    +something like: python:VERSION-slim-bullseye                                     +(TEXT)                                                                           +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) +--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) +--additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) +--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) +--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--github-usernameThe user name used to authenticate to GitHub.(TEXT) +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--push-imagePush image after building it. +--empty-imagePrepare empty image tagged with the same name as the Airflow image. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-build-prod-image.svg b/images/breeze/output-build-prod-image.svg index 331cd3808ba34..0a18dfecae9a4 100644 --- a/images/breeze/output-build-prod-image.svg +++ b/images/breeze/output-build-prod-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + + + + - Command: build-prod-image + Command: build-prod-image - + - - -Usage: breeze build-prod-image [OPTIONS] - -Build Production image. Include building multiple images for all or selected Python versions sequentially. - -╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---install-airflow-version-VInstall version of Airflow from PyPI.(TEXT) ---upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. ---debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] ---image-tag-tTag of the image which is used to pull or run the image (implies                ---mount-sources=skip when using to run shell or tests)                          -(TEXT)                                                                          ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful -when you build or pull image with --image-tag.                                  ---docker-cache-cCache option for image used during the build.(registry | local | disabled) -[default: registry]                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options for customizing images ─────────────────────────────────────────────────────────────────────────────────────╮ ---install-providers-from-sourcesInstall providers from sources when installing. ---airflow-extrasExtras to install by default.                                                    -(TEXT)                                                                           -[default:                                                                        -amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,google,google… ---airflow-constraints-modeMode of constraints for PROD image building                             -(constraints | constraints-no-providers | constraints-source-providers) -[default: constraints]                                                  ---airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) ---additional-python-depsAdditional python dependencies to use when building the images.(TEXT) ---additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) ---additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) ---additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) ---additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) ---additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) ---additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) ---additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) ---runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) ---runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) ---dev-apt-depsApt dev dependencies to use when building the images.(TEXT) ---dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Customization options (for specific customization needs) ───────────────────────────────────────────────────────────╮ ---install-packages-from-contextInstall wheels from local docker-context-files when building image. ---airflow-is-in-contextIf set Airflow is installed from docker-context-files only rather than     -from PyPI or sources.                                                      ---cleanup-contextClean up docker context files before running build (cannot be used         -together with --install-packages-from-context).                            ---disable-mysql-client-installationDo not install MySQL client. ---disable-mssql-client-installationDo not install MsSQl client. ---disable-postgres-client-installationDo not install Postgres client. ---disable-airflow-repo-cacheDisable cache from Airflow repository during building. ---install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT) ---installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---github-usernameThe user name used to authenticate to GitHub.(TEXT) ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) ---push-imagePush image after building it. ---empty-imagePrepare empty image tagged with the same name as the Airflow image. ---prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  -image).                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze build-prod-image [OPTIONS] + +Build Production image. Include building multiple images for all or selected Python versions sequentially. + +╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--install-airflow-version-VInstall version of Airflow from PyPI.(TEXT) +--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +--image-tag-tTag of the image which is used to pull or run the image (implies                +--mount-sources=skip when using to run shell or tests)                          +(TEXT)                                                                          +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful +when you build or pull image with --image-tag.                                  +--docker-cache-cCache option for image used during the build.(registry | local | disabled) +[default: registry]                           +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options for customizing images ─────────────────────────────────────────────────────────────────────────────────────╮ +--install-providers-from-sourcesInstall providers from sources when installing. +--airflow-extrasExtras to install by default.                                                    +(TEXT)                                                                           +[default:                                                                        +amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,google,google… +--airflow-constraints-modeMode of constraints for PROD image building                             +(constraints | constraints-no-providers | constraints-source-providers) +[default: constraints]                                                  +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +--python-imageIf specified this is the base python image used to build the image. Should be    +something like: python:VERSION-slim-bullseye                                     +(TEXT)                                                                           +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) +--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) +--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) +--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Customization options (for specific customization needs) ───────────────────────────────────────────────────────────╮ +--install-packages-from-contextInstall wheels from local docker-context-files when building image. +--airflow-is-in-contextIf set Airflow is installed from docker-context-files only rather than     +from PyPI or sources.                                                      +--cleanup-contextClean up docker context files before running build (cannot be used         +together with --install-packages-from-context).                            +--disable-mysql-client-installationDo not install MySQL client. +--disable-mssql-client-installationDo not install MsSQl client. +--disable-postgres-client-installationDo not install Postgres client. +--disable-airflow-repo-cacheDisable cache from Airflow repository during building. +--install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT) +--installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--github-usernameThe user name used to authenticate to GitHub.(TEXT) +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--push-imagePush image after building it. +--empty-imagePrepare empty image tagged with the same name as the Airflow image. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index 1b791a7afbca8..5ddc9626a325e 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -2,4 +2,4 @@ # This file is automatically generated by pre-commit. If you have a conflict with this file # Please do not solve it but run `breeze regenerate-command-images`. # This command should fix the conflict and regenerate help images that you have conflict with. -518a158901cde1a5a889d37086eca87e +b48267467e8d9a9a4af99df180177b43 From d52eeaf022033b3e8e38238342c466fd6d29999c Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sat, 25 Jun 2022 09:46:13 +0200 Subject: [PATCH 028/118] Convert selective checks to Breeze Python (#24610) Instead of bash-based, complex logic script to perform PR selective checks we now integrated the whole logic into Breeze Python code. It is now much simplified, when it comes to algorithm. We've implemented simple rule-based decision tree. The rules describing the decision tree are now are now much easier to reason about and they correspond one-to-one with the rules that are implemented in the code in rather straightforward way. The code is much simpler and diagnostics of the selective checks has also been vastly improved: * The rule engine displays status of applying each rule and explains (with yellow warning message what decision was made and why. Informative messages are printed showing the resulting output * List of files impacting the decision are also displayed * The names of "ci file group" and "test type" were aligned * Unit tests covering wide range of cases are added. Each test describes what is the case they demonstrate * `breeze selective-checks` command that is used in CI can also be used locally by just providing commit-ish reference of the commit to check. This way you can very easily debug problems and fix them Fixes: #19971 (cherry picked from commit d7bd72f494e7debec11672eeddf2e6ba5ef75fac) --- .github/workflows/build-images.yml | 20 +- .github/workflows/ci.yml | 18 +- .github/workflows/codeql-analysis.yml | 20 +- BREEZE.rst | 36 +- SELECTIVE_CHECKS.md | 144 ---- dev/breeze/README.md | 2 +- dev/breeze/SELECTIVE_CHECKS.md | 98 +++ dev/breeze/setup.cfg | 1 + .../airflow_breeze/commands/ci_commands.py | 237 ++++++ .../configuration_and_maintenance_commands.py | 64 +- .../commands/release_management_commands.py | 35 - .../commands/testing_commands.py | 4 +- .../airflow_breeze/configure_rich_click.py | 5 +- .../src/airflow_breeze/global_constants.py | 75 +- .../airflow_breeze/utils/selective_checks.py | 480 +++++++++++ dev/breeze/tests/test_selective_checks.py | 464 +++++++++++ images/breeze/output-commands-hash.txt | 2 +- images/breeze/output-commands.svg | 380 ++++----- images/breeze/output-selective-check.svg | 132 +++ images/breeze/output-tests.svg | 160 ++-- scripts/ci/selective_ci_checks.sh | 768 ------------------ .../ci_run_single_airflow_test_in_docker.sh | 6 +- 22 files changed, 1814 insertions(+), 1337 deletions(-) delete mode 100644 SELECTIVE_CHECKS.md create mode 100644 dev/breeze/SELECTIVE_CHECKS.md create mode 100644 dev/breeze/src/airflow_breeze/commands/ci_commands.py create mode 100644 dev/breeze/src/airflow_breeze/utils/selective_checks.py create mode 100644 dev/breeze/tests/test_selective_checks.py create mode 100644 images/breeze/output-selective-check.svg delete mode 100755 scripts/ci/selective_ci_checks.sh diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 541c72165997b..e28a9d7619c5a 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -109,18 +109,20 @@ jobs: with: persist-credentials: false submodules: recursive + - name: "Setup python" + uses: actions/setup-python@v2 + with: + # We do not have output from selective checks yet, so we need to hardcode python + python-version: 3.7 + cache: 'pip' + cache-dependency-path: ./dev/breeze/setup* + - run: ./scripts/ci/install_breeze.sh - name: Selective checks id: selective-checks env: - PR_LABELS: ${{ steps.get-latest-pr-labels.outputs.pullRequestLabels }} - run: | - if [[ ${GITHUB_EVENT_NAME} == "pull_request_target" ]]; then - # Run selective checks - ./scripts/ci/selective_ci_checks.sh "${TARGET_COMMIT_SHA}" - else - # Run all checks - ./scripts/ci/selective_ci_checks.sh - fi + PR_LABELS: "$${{ steps.get-latest-pr-labels.outputs.pullRequestLabels }}" + COMMIT_REF: "${{ github.sha }}" + run: breeze selective-check - name: Compute dynamic outputs id: dynamic-outputs run: | diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 57b396f6ace7b..3606036f5a37e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -192,18 +192,20 @@ jobs: fetch-depth: 2 persist-credentials: false if: github.event_name == 'pull_request' + - name: "Setup python" + uses: actions/setup-python@v2 + with: + # We do not have output from selective checks yet, so we need to hardcode python + python-version: 3.7 + cache: 'pip' + cache-dependency-path: ./dev/breeze/setup* + - run: ./scripts/ci/install_breeze.sh - name: Selective checks id: selective-checks env: PR_LABELS: "${{ steps.source-run-info.outputs.pullRequestLabels }}" - run: | - if [[ ${GITHUB_EVENT_NAME} == "pull_request" ]]; then - # Run selective checks - ./scripts/ci/selective_ci_checks.sh "${GITHUB_SHA}" - else - # Run all checks - ./scripts/ci/selective_ci_checks.sh - fi + COMMIT_REF: "${{ github.sha }}" + run: breeze selective-check # Avoid having to specify the runs-on logic every time. We use the custom # env var AIRFLOW_SELF_HOSTED_RUNNER set only on our runners, but never # on the public runners diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 6d6f4d02562d5..4e6c7c83f4dc4 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -43,19 +43,19 @@ jobs: with: fetch-depth: 2 persist-credentials: false + - name: "Setup python" + uses: actions/setup-python@v2 + with: + # We do not have output from selective checks yet, so we need to hardcode python + python-version: 3.7 + cache: 'pip' + cache-dependency-path: ./dev/breeze/setup* + - run: ./scripts/ci/install_breeze.sh - name: Selective checks id: selective-checks env: - EVENT_NAME: ${{ github.event_name }} - TARGET_COMMIT_SHA: ${{ github.sha }} - run: | - if [[ ${EVENT_NAME} == "pull_request" ]]; then - # Run selective checks - ./scripts/ci/selective_ci_checks.sh "${TARGET_COMMIT_SHA}" - else - # Run all checks - ./scripts/ci/selective_ci_checks.sh - fi + COMMIT_REF: "${{ github.sha }}" + run: breeze selective-check analyze: name: Analyze diff --git a/BREEZE.rst b/BREEZE.rst index 36b10759d3444..78dfd74621458 100644 --- a/BREEZE.rst +++ b/BREEZE.rst @@ -556,13 +556,17 @@ Configuration and maintenance * Cleanup breeze with ``breeze cleanup`` command * Self-upgrade breeze with ``breeze self-upgrade`` command * Setup autocomplete for Breeze with ``breeze setup-autocomplete`` command -* Checking available resources for docker with ``breeze resource-check`` command -* Freeing space needed to run CI tests with ``breeze free-space`` command -* Fixing ownership of files in your repository with ``breeze fix-ownership`` command * Print Breeze version with ``breeze version`` command * Outputs hash of commands defined by ``breeze`` with ``command-hash-export`` (useful to avoid needless regeneration of Breeze images) +CI tasks +-------- +* Freeing space needed to run CI tests with ``breeze free-space`` command +* Fixing ownership of files in your repository with ``breeze fix-ownership`` command +* Checking available resources for docker with ``breeze resource-check`` command +* Deciding which tests should be run with ``breeze selective-check`` command + Release tasks ------------- @@ -1286,8 +1290,8 @@ command but it is very similar to current ``breeze`` command): -Resource check -============== +Running resource check +---------------------- Breeze requires certain resources to be available - disk, memory, CPU. When you enter Breeze's shell, the resources are checked and information if there is enough resources is displayed. However you can @@ -1301,7 +1305,7 @@ Those are all available flags of ``resource-check`` command: Freeing the space -================= +----------------- When our CI runs a job, it needs all memory and disk it can have. We have a Breeze command that frees the memory and disk space used. You can also use it clear space locally but it performs a few operations @@ -1314,8 +1318,26 @@ Those are all available flags of ``free-space`` command: :alt: Breeze free-space +Selective check +--------------- + +When our CI runs a job, it needs to decide which tests to run, whether to build images and how much the test +should be run on multiple combinations of Python, Kubernetes, Backend versions. In order to optimize time +needed to run the CI Builds. You can also use the tool to test what tests will be run when you provide +a specific commit that Breeze should run the tests on. + +More details about the algorithm used to pick the right tests can be +found in `Selective Checks `_. + +Those are all available flags of ``selective-check`` command: + +.. image:: ./images/breeze/output-selective-check.svg + :width: 100% + :alt: Breeze selective-check + + Tracking backtracking issues for CI builds -========================================== +------------------------------------------ When our CI runs a job, we automatically upgrade our dependencies in the ``main`` build. However, this might lead to conflicts and ``pip`` backtracking for a long time (possibly forever) for dependency resolution. diff --git a/SELECTIVE_CHECKS.md b/SELECTIVE_CHECKS.md deleted file mode 100644 index 3a92d9c817987..0000000000000 --- a/SELECTIVE_CHECKS.md +++ /dev/null @@ -1,144 +0,0 @@ - - -# Selective CI Checks - -In order to optimise our CI jobs, we've implemented optimisations to only run selected checks for some -kind of changes. The logic implemented reflects the internal architecture of Airflow 2.0 packages -and it helps to keep down both the usage of jobs in GitHub Actions as well as CI feedback time to -contributors in case of simpler changes. - -We have the following test types (separated by packages in which they are): - -* Always - those are tests that should be always executed (always folder) -* Core - for the core Airflow functionality (core folder) -* API - Tests for the Airflow API (api and api_connexion folders) -* CLI - Tests for the Airflow CLI (cli folder) -* WWW - Tests for the Airflow webserver (www folder) -* Providers - Tests for all Providers of Airflow (providers folder) -* Other - all other tests (all other folders that are not part of any of the above) - -We also have several special kinds of tests that are not separated by packages but they are marked with -pytest markers. They can be found in any of those packages and they can be selected by the appropriate -pytest custom command line options. See `TESTING.rst `_ for details but those are: - -* Integration - tests that require external integration images running in docker-compose -* Quarantined - tests that are flaky and need to be fixed -* Postgres - tests that require Postgres database. They are only run when backend is Postgres -* MySQL - tests that require MySQL database. They are only run when backend is MySQL - -Even if the types are separated, In case they share the same backend version/python version, they are -run sequentially in the same job, on the same CI machine. Each of them in a separate `docker run` command -and with additional docker cleaning between the steps to not fall into the trap of exceeding resource -usage in one big test run, but also not to increase the number of jobs per each Pull Request. - -The logic implemented for the changes works as follows: - -1) In case of direct push (so when PR gets merged) or scheduled run, we always run all tests and checks. - This is in order to make sure that the merge did not miss anything important. The remainder of the logic - is executed only in case of Pull Requests. We do not add providers tests in case DEFAULT_BRANCH is - different than main, because providers are only important in main branch and PRs to main branch. - -2) We retrieve which files have changed in the incoming Merge Commit (github.sha is a merge commit - automatically prepared by GitHub in case of Pull Request, so we can retrieve the list of changed - files from that commit directly). - -3) If any of the important, environment files changed (Dockerfile, ci scripts, setup.py, GitHub workflow - files), then we again run all tests and checks. Those are cases where the logic of the checks changed - or the environment for the checks changed so we want to make sure to check everything. We do not add - providers tests in case DEFAULT_BRANCH is different than main, because providers are only - important in main branch and PRs to main branch. - -4) If any of py files changed: we need to have CI image and run full static checks so we enable image building - -5) If any of docs changed: we need to have CI image so we enable image building - -6) If any of chart files changed, we need to run helm tests so we enable helm unit tests - -7) If any of API files changed, we need to run API tests so we enable them - -8) If any of the relevant source files that trigger the tests have changed at all. Those are airflow - sources, chart, tests and kubernetes_tests. If any of those files changed, we enable tests and we - enable image building, because the CI images are needed to run tests. - -9) Then we determine which types of the tests should be run. We count all the changed files in the - relevant airflow sources (airflow, chart, tests, kubernetes_tests) first and then we count how many - files changed in different packages: - - * in any case tests in `Always` folder are run. Those are special tests that should be run any time - modifications to any Python code occurs. Example test of this type is verifying proper structure of - the project including proper naming of all files. - * if any of the Airflow API files changed we enable `API` test type - * if any of the Airflow CLI files changed we enable `CLI` test type and Kubernetes tests (the - K8S tests depend on CLI changes as helm chart uses CLI to run Airflow). - * if this is a main branch and if any of the Provider files changed we enable `Providers` test type - * if any of the WWW files changed we enable `WWW` test type - * if any of the Kubernetes files changed we enable `Kubernetes` test type - * Then we subtract count of all the `specific` above per-type changed files from the count of - all changed files. In case there are any files changed, then we assume that some unknown files - changed (likely from the core of airflow) and in this case we enable all test types above and the - Core test types - simply because we do not want to risk to miss anything. - * In all cases where tests are enabled we also add Integration and - depending on - the backend used = Postgres or MySQL types of tests. - -10) Quarantined tests are always run when tests are run - we need to run them often to observe how - often they fail so that we can decide to move them out of quarantine. Details about the - Quarantined tests are described in `TESTING.rst `_ - -11) There is a special case of static checks. In case the above logic determines that the CI image - needs to be built, we run long and more comprehensive version of static checks - including - Mypy, Flake8. And those tests are run on all files, no matter how many files changed. - In case the image is not built, we run only simpler set of changes - the longer static checks - that require CI image are skipped, and we only run the tests on the files that changed in the incoming - commit - unlike flake8/mypy, those static checks are per-file based and they should not miss any - important change. - -Similarly to selective tests we also run selective security scans. In Pull requests, -the Python scan will only run when there is a python code change and JavaScript scan will only run if -there is a JavaScript or `yarn.lock` file change. For main builds, all scans are always executed. - -The selective check algorithm is shown here: - - -````mermaid -flowchart TD -A(PR arrives)-->B[Selective Check] -B-->C{Direct push merge?} -C-->|Yes| N[Enable images] -N-->D(Run Full Test
+Quarantined
Run full static checks) -C-->|No| E[Retrieve changed files] -E-->F{Environment files changed?} -F-->|Yes| N -F-->|No| G{Docs changed} -G-->|Yes| O[Enable images building] -O-->I{Chart files changed?} -G-->|No| I -I-->|Yes| P[Enable helm tests] -P-->J{API files changed} -I-->|No| J -J-->|Yes| Q[Enable API tests] -Q-->H{Sources changed?} -J-->|No| H -H-->|Yes| R[Enable Pytests] -R-->K[Determine test type] -K-->S{Core files changed} -S-->|Yes| N -S-->|No| M(Run selected test+
Integration, Quarantined
Full static checks) -H-->|No| L[Skip running test
Run subset of static checks] -``` diff --git a/dev/breeze/README.md b/dev/breeze/README.md index 14a9f089a4834..380eb402b83f4 100644 --- a/dev/breeze/README.md +++ b/dev/breeze/README.md @@ -52,6 +52,6 @@ PLEASE DO NOT MODIFY THE HASH BELOW! IT IS AUTOMATICALLY UPDATED BY PRE-COMMIT. --------------------------------------------------------------------------------------------------------- -Package config hash: a80a853b2c32c284a68ccd6d468804b892a69f14d2ad1886bdaa892755cf6262660e2b9fc582bcae27ae478910055267a76edea2df658196198a0365150e93e5 +Package config hash: 7279229e03b197f2bbd10ebb7b313f67bba3a704735d3688652efc5bdc1b3a60f2d1e0a144c89a2ecd11268b06888c5302a8774a8f392dc383bb940c99521db3 --------------------------------------------------------------------------------------------------------- diff --git a/dev/breeze/SELECTIVE_CHECKS.md b/dev/breeze/SELECTIVE_CHECKS.md new file mode 100644 index 0000000000000..4504f8d9cd9cd --- /dev/null +++ b/dev/breeze/SELECTIVE_CHECKS.md @@ -0,0 +1,98 @@ + + + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Selective CI Checks](#selective-ci-checks) + +# Selective CI Checks + +In order to optimise our CI jobs, we've implemented optimisations to only run selected checks for some +kind of changes. The logic implemented reflects the internal architecture of Airflow 2.0 packages, +and it helps to keep down both the usage of jobs in GitHub Actions and CI feedback time to +contributors in case of simpler changes. + +We have the following Groups of files for CI that determine which tests are run: + +* `Environment files` - if any of those changes, that forces 'run everything' mode, because changes there might + simply change the whole environment of what is going on in CI (Container image, dependencies) +* `Python and Javascript production files` - this area is useful in CodeQL Security scanning - if any of + the python or javascript files for airflow "production" changed, this means that the security scans should run +* `API tests and codegen files` - those are OpenAPI definition files that impact Open API specification and + determine that we should run dedicated API tests. +* `Helm files` - change in those files impacts helm "rendering" tests +* `Setup files` - change in the setup files indicates that we should run `upgrade to newer dependencies` +* `DOCs files` - change in those files indicate that we should run documentation builds +* `UI and WWW files` - those are files for the UI and WWW part of our UI (useful to determine if UI + tests should run) +* `Kubernetes files` - determine if any of Kubernetes related tests should be run +* `All Python files` - if none of the Python file changed, that indicates that we should not run unit tests +* `All source files` - if none of the sources change, that indicates that we should probably not build + an image and run any image-based static checks + +We have the following unit test types that can be selectively disabled/enabled based on the +content of the incoming PR: + +* Always - those are tests that should be always executed (always folder) +* Core - for the core Airflow functionality (core folder) +* API - Tests for the Airflow API (api and api_connexion folders) +* CLI - Tests for the Airflow CLI (cli folder) +* WWW - Tests for the Airflow webserver (www folder) +* Providers - Tests for all Providers of Airflow (providers folder) + +We also have several special kinds of tests that are not separated by packages, but they are marked with +pytest markers. They can be found in any of those packages and they can be selected by the appropriate +pytest custom command line options. See `TESTING.rst `_ for details but those are: + +* Integration - tests that require external integration images running in docker-compose +* Quarantined - tests that are flaky and need to be fixed +* Postgres - tests that require Postgres database. They are only run when backend is Postgres +* MySQL - tests that require MySQL database. They are only run when backend is MySQL + +Even if the types are separated, In case they share the same backend version/python version, they are +run sequentially in the same job, on the same CI machine. Each of them in a separate `docker run` command +and with additional docker cleaning between the steps to not fall into the trap of exceeding resource +usage in one big test run, but also not to increase the number of jobs per each Pull Request. + +The logic implements the following rules: + +* `Full tests` mode is enabled when the event is PUSH, or SCHEDULE or when "full tests needed" label is set. + That enables all matrix combinations of variables, and all possible tests +* Python, Kubernetes, Backend, Kind, Helm versions are limited to "defaults" only unless `Full tests` mode + is enabled. +* If "Commit" to work on cannot be determined, or `Full Test` mode is enabled or some of the important + environment files (setup.py, setup.cfg, Dockerfile, build scripts) changed - all unit tests are + executed - this is `run everything` mode. No further checks are performed. +* `Python scans`, `Javascript scans`, `API tests/codegen`, `UI`, `WWW`, `Kubernetes` tests and `DOC builds` + are enabled if any of the relevant files have been changed. +* `Helm` tests are run only if relevant files have been changed and if current branch is `main`. +* If no Source files are changed - no tests are run and no further rules below are checked. +* `Image building` is enabled if either test are run, docs are build or kubernetes tests are run. All those + need `CI` or `PROD` images to be built. +* The specific unit test type is enabled only if changed files match the expected patterns for each type + (`API`, `CLI`, `WWW`, `Providers`). The `Always` test type is added always if any unit tests are run. + `Providers` tests are removed if current branch is different than `main` +* If there are no files left in sources after matching the test types and Kubernetes files, + then apparently some Core/Other files have been changed. This automatically adds all test + types to execute. This is done because changes in core might impact all the other test types. +* if `Image building` is disabled, only basic pre-commits are enabled - no 'image-depending` pre-commits + are enabled. +* If there are some setup files changed, `upgrade to newer dependencies` is enabled. diff --git a/dev/breeze/setup.cfg b/dev/breeze/setup.cfg index c974560561053..7db0782695ebd 100644 --- a/dev/breeze/setup.cfg +++ b/dev/breeze/setup.cfg @@ -53,6 +53,7 @@ package_dir= =src packages = find: install_requires = + cached_property>=1.5.0;python_version<="3.7" click inputimeout importlib-metadata>=4.4; python_version < "3.8" diff --git a/dev/breeze/src/airflow_breeze/commands/ci_commands.py b/dev/breeze/src/airflow_breeze/commands/ci_commands.py new file mode 100644 index 0000000000000..c8260698d71aa --- /dev/null +++ b/dev/breeze/src/airflow_breeze/commands/ci_commands.py @@ -0,0 +1,237 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import sys +from typing import Optional, Tuple + +import click + +from airflow_breeze.commands.main_command import main +from airflow_breeze.global_constants import ( + DEFAULT_PYTHON_MAJOR_MINOR_VERSION, + MOUNT_ALL, + GithubEvents, + github_events, +) +from airflow_breeze.params.shell_params import ShellParams +from airflow_breeze.utils.common_options import ( + option_airflow_constraints_reference, + option_answer, + option_dry_run, + option_github_repository, + option_max_age, + option_python, + option_timezone, + option_updated_on_or_after, + option_verbose, +) +from airflow_breeze.utils.confirm import Answer, user_confirm +from airflow_breeze.utils.console import get_console +from airflow_breeze.utils.custom_param_types import BetterChoice +from airflow_breeze.utils.docker_command_utils import ( + check_docker_resources, + get_env_variables_for_docker_commands, + get_extra_docker_flags, + perform_environment_checks, +) +from airflow_breeze.utils.find_newer_dependencies import find_newer_dependencies +from airflow_breeze.utils.image import find_available_ci_image +from airflow_breeze.utils.run_utils import run_command + +CI_COMMANDS = { + "name": "CI commands", + "commands": [ + "fix-ownership", + "free-space", + "resource-check", + "selective-check", + "find-newer-dependencies", + ], +} + +CI_PARAMETERS = { + "breeze selective-check": [ + { + "name": "Selective check flags", + "options": [ + "--commit-ref", + "--pr-labels", + "--default-branch", + "--github-event-name", + ], + } + ], + "breeze find-newer-dependencies": [ + { + "name": "Find newer dependencies flags", + "options": [ + "--python", + "--timezone", + "--constraints-branch", + "--updated-on-or-after", + "--max-age", + ], + } + ], +} + + +@main.command(name="free-space", help="Free space for jobs run in CI.") +@option_verbose +@option_dry_run +@option_answer +def free_space(verbose: bool, dry_run: bool, answer: str): + if user_confirm("Are you sure to run free-space and perform cleanup?") == Answer.YES: + run_command(["sudo", "swapoff", "-a"], verbose=verbose, dry_run=dry_run) + run_command(["sudo", "rm", "-f", "/swapfile"], verbose=verbose, dry_run=dry_run) + run_command(["sudo", "apt-get", "clean"], verbose=verbose, dry_run=dry_run, check=False) + run_command( + ["docker", "system", "prune", "--all", "--force", "--volumes"], verbose=verbose, dry_run=dry_run + ) + run_command(["df", "-h"], verbose=verbose, dry_run=dry_run) + run_command(["docker", "logout", "ghcr.io"], verbose=verbose, dry_run=dry_run, check=False) + + +@main.command(name="resource-check", help="Check if available docker resources are enough.") +@option_verbose +@option_dry_run +def resource_check(verbose: bool, dry_run: bool): + perform_environment_checks(verbose=verbose) + shell_params = ShellParams(verbose=verbose, python=DEFAULT_PYTHON_MAJOR_MINOR_VERSION) + check_docker_resources(shell_params.airflow_image_name, verbose=verbose, dry_run=dry_run) + + +@main.command(name="fix-ownership", help="Fix ownership of source files to be same as host user.") +@option_github_repository +@option_verbose +@option_dry_run +def fix_ownership(github_repository: str, verbose: bool, dry_run: bool): + perform_environment_checks(verbose=verbose) + shell_params = find_available_ci_image(github_repository, dry_run, verbose) + extra_docker_flags = get_extra_docker_flags(MOUNT_ALL) + env = get_env_variables_for_docker_commands(shell_params) + cmd = [ + "docker", + "run", + "-t", + *extra_docker_flags, + "--pull", + "never", + shell_params.airflow_image_name_with_tag, + "/opt/airflow/scripts/in_container/run_fix_ownership.sh", + ] + run_command( + cmd, verbose=verbose, dry_run=dry_run, text=True, env=env, check=False, enabled_output_group=True + ) + # Always succeed + sys.exit(0) + + +def get_changed_files(commit_ref: Optional[str], dry_run: bool, verbose: bool) -> Tuple[str, ...]: + if commit_ref is None: + return () + cmd = [ + "git", + "diff-tree", + "--no-commit-id", + "--name-only", + "-r", + commit_ref + "^", + commit_ref, + ] + result = run_command(cmd, dry_run=dry_run, verbose=verbose, check=False, capture_output=True, text=True) + if result.returncode != 0: + get_console().print( + f"[warning] Error when running diff-tree command [/]\n{result.stdout}\n{result.stderr}" + ) + return () + changed_files = tuple(result.stdout.splitlines()) if result.stdout else () + get_console().print("\n[info]Changed files:[/]\n") + get_console().print(changed_files) + get_console().print() + return changed_files + + +@main.command(name="selective-check", help="Checks what kind of tests should be run for an incoming commit.") +@click.option( + '--commit-ref', + help="Commit-ish reference to the commit that should be checked", + envvar='COMMIT_REF', +) +@click.option( + '--pr-labels', + help="Space-separate list of labels which are valid for the PR", + default="", + envvar="PR_LABELS", +) +@click.option( + '--default-branch', + help="Branch against which the PR should be run", + default="main", + envvar="DEFAULT_BRANCH", + show_default=True, +) +@click.option( + '--github-event-name', + type=BetterChoice(github_events()), + default=github_events()[0], + help="Name of the GitHub event that triggered the check", + envvar="GITHUB_EVENT_NAME", + show_default=True, +) +@option_verbose +@option_dry_run +def selective_check( + commit_ref: Optional[str], + pr_labels: str, + default_branch: str, + github_event_name: str, + verbose: bool, + dry_run: bool, +): + from airflow_breeze.utils.selective_checks import SelectiveChecks + + github_event = GithubEvents(github_event_name) + if github_event == GithubEvents.PULL_REQUEST: + changed_files = get_changed_files(commit_ref=commit_ref, dry_run=dry_run, verbose=verbose) + else: + changed_files = () + sc = SelectiveChecks( + commit_ref=commit_ref, + files=changed_files, + default_branch=default_branch, + pr_labels=tuple(" ".split(pr_labels)) if pr_labels else (), + github_event=github_event, + ) + print(str(sc)) + + +@main.command(name="find-newer-dependencies", help="Finds which dependencies are being upgraded.") +@option_timezone +@option_airflow_constraints_reference +@option_python +@option_updated_on_or_after +@option_max_age +def breeze_find_newer_dependencies( + airflow_constraints_reference: str, python: str, timezone: str, updated_on_or_after: str, max_age: int +): + return find_newer_dependencies( + constraints_branch=airflow_constraints_reference, + python=python, + timezone=timezone, + updated_on_or_after=updated_on_or_after, + max_age=max_age, + ) diff --git a/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py b/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py index 490a43c5f52a2..546f52ba5acdd 100644 --- a/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py @@ -28,8 +28,6 @@ from airflow_breeze import NAME, VERSION from airflow_breeze.commands.main_command import main -from airflow_breeze.global_constants import DEFAULT_PYTHON_MAJOR_MINOR_VERSION, MOUNT_ALL -from airflow_breeze.params.shell_params import ShellParams from airflow_breeze.utils.cache import check_if_cache_exists, delete_cache, touch_cache_file from airflow_breeze.utils.common_options import ( option_answer, @@ -44,13 +42,7 @@ ) from airflow_breeze.utils.confirm import STANDARD_TIMEOUT, Answer, user_confirm from airflow_breeze.utils.console import get_console -from airflow_breeze.utils.docker_command_utils import ( - check_docker_resources, - get_env_variables_for_docker_commands, - get_extra_docker_flags, - perform_environment_checks, -) -from airflow_breeze.utils.image import find_available_ci_image +from airflow_breeze.utils.docker_command_utils import perform_environment_checks from airflow_breeze.utils.path_utils import ( AIRFLOW_SOURCES_ROOT, BUILD_CACHE_DIR, @@ -72,9 +64,6 @@ "self-upgrade", "setup-autocomplete", "config", - "resource-check", - "free-space", - "fix-ownership", "regenerate-command-images", "command-hash-export", "version", @@ -395,31 +384,6 @@ def get_status(file: str): get_console().print() -@main.command(name="free-space", help="Free space for jobs run in CI.") -@option_verbose -@option_dry_run -@option_answer -def free_space(verbose: bool, dry_run: bool, answer: str): - if user_confirm("Are you sure to run free-space and perform cleanup?") == Answer.YES: - run_command(["sudo", "swapoff", "-a"], verbose=verbose, dry_run=dry_run) - run_command(["sudo", "rm", "-f", "/swapfile"], verbose=verbose, dry_run=dry_run) - run_command(["sudo", "apt-get", "clean"], verbose=verbose, dry_run=dry_run, check=False) - run_command( - ["docker", "system", "prune", "--all", "--force", "--volumes"], verbose=verbose, dry_run=dry_run - ) - run_command(["df", "-h"], verbose=verbose, dry_run=dry_run) - run_command(["docker", "logout", "ghcr.io"], verbose=verbose, dry_run=dry_run, check=False) - - -@main.command(name="resource-check", help="Check if available docker resources are enough.") -@option_verbose -@option_dry_run -def resource_check(verbose: bool, dry_run: bool): - perform_environment_checks(verbose=verbose) - shell_params = ShellParams(verbose=verbose, python=DEFAULT_PYTHON_MAJOR_MINOR_VERSION) - check_docker_resources(shell_params.airflow_image_name, verbose=verbose, dry_run=dry_run) - - def dict_hash(dictionary: Dict[str, Any]) -> str: """MD5 hash of a dictionary. Sorted and dumped via json to account for random sequence)""" dhash = hashlib.md5() @@ -443,32 +407,6 @@ def command_hash_export(verbose: bool, output: IO): output.write(dict_hash(the_context_dict) + "\n") -@main.command(name="fix-ownership", help="Fix ownership of source files to be same as host user.") -@option_github_repository -@option_verbose -@option_dry_run -def fix_ownership(github_repository: str, verbose: bool, dry_run: bool): - perform_environment_checks(verbose=verbose) - shell_params = find_available_ci_image(github_repository, dry_run, verbose) - extra_docker_flags = get_extra_docker_flags(MOUNT_ALL) - env = get_env_variables_for_docker_commands(shell_params) - cmd = [ - "docker", - "run", - "-t", - *extra_docker_flags, - "--pull", - "never", - shell_params.airflow_image_name_with_tag, - "/opt/airflow/scripts/in_container/run_fix_ownership.sh", - ] - run_command( - cmd, verbose=verbose, dry_run=dry_run, text=True, env=env, check=False, enabled_output_group=True - ) - # Always succeed - sys.exit(0) - - def write_to_shell(command_to_execute: str, dry_run: bool, script_path: str, force_setup: bool) -> bool: skip_check = False script_path_file = Path(script_path) diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index caf43d47a409b..17bf70b3af9d1 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -45,14 +45,11 @@ option_github_repository, option_image_tag, option_installation_package_format, - option_max_age, option_package_format, option_parallelism, option_python, option_python_versions, option_run_in_parallel, - option_timezone, - option_updated_on_or_after, option_use_airflow_version, option_use_packages_from_dist, option_verbose, @@ -66,7 +63,6 @@ get_extra_docker_flags, perform_environment_checks, ) -from airflow_breeze.utils.find_newer_dependencies import find_newer_dependencies from airflow_breeze.utils.parallel import check_async_run_results from airflow_breeze.utils.python_versions import get_python_version_list from airflow_breeze.utils.run_utils import RunCommandResult, run_command @@ -140,18 +136,6 @@ ], } ], - "breeze find-newer-dependencies": [ - { - "name": "Find newer dependencies flags", - "options": [ - "--python", - "--timezone", - "--constraints-branch", - "--updated-on-or-after", - "--max-age", - ], - } - ], } RELEASE_MANAGEMENT_COMMANDS = { @@ -163,7 +147,6 @@ "prepare-airflow-package", "release-prod-images", "generate-constraints", - "find-newer-dependencies", ], } @@ -767,21 +750,3 @@ def release_prod_images( verbose=verbose, dry_run=dry_run, ) - - -@main.command(name="find-newer-dependencies", help="Finds which dependencies are being upgraded.") -@option_timezone -@option_airflow_constraints_reference -@option_python -@option_updated_on_or_after -@option_max_age -def breeze_find_newer_dependencies( - airflow_constraints_reference: str, python: str, timezone: str, updated_on_or_after: str, max_age: int -): - return find_newer_dependencies( - constraints_branch=airflow_constraints_reference, - python=python, - timezone=timezone, - updated_on_or_after=updated_on_or_after, - max_age=max_age, - ) diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands.py b/dev/breeze/src/airflow_breeze/commands/testing_commands.py index 3946acd2ab3a9..45df368afa89c 100644 --- a/dev/breeze/src/airflow_breeze/commands/testing_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/testing_commands.py @@ -28,7 +28,7 @@ import click from airflow_breeze.commands.main_command import main -from airflow_breeze.global_constants import ALLOWED_TEST_TYPES +from airflow_breeze.global_constants import ALLOWED_TEST_TYPE_CHOICES from airflow_breeze.params.build_prod_params import BuildProdParams from airflow_breeze.params.shell_params import ShellParams from airflow_breeze.utils.ci_group import ci_group @@ -249,7 +249,7 @@ def run_with_progress( "--test-type", help="Type of test to run.", default="All", - type=BetterChoice(ALLOWED_TEST_TYPES), + type=BetterChoice(ALLOWED_TEST_TYPE_CHOICES), ) @option_db_reset @click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED) diff --git a/dev/breeze/src/airflow_breeze/configure_rich_click.py b/dev/breeze/src/airflow_breeze/configure_rich_click.py index bc1963684a8a4..8933d57dbeea3 100644 --- a/dev/breeze/src/airflow_breeze/configure_rich_click.py +++ b/dev/breeze/src/airflow_breeze/configure_rich_click.py @@ -14,14 +14,13 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - - from airflow_breeze.utils import recording # isort:skip # noqa try: # We handle ImportError so that click autocomplete works import rich_click as click + from airflow_breeze.commands.ci_commands import CI_COMMANDS, CI_PARAMETERS from airflow_breeze.commands.ci_image_commands import CI_IMAGE_TOOLS_COMMANDS, CI_IMAGE_TOOLS_PARAMETERS from airflow_breeze.commands.configuration_and_maintenance_commands import ( CONFIGURATION_AND_MAINTENANCE_COMMANDS, @@ -52,6 +51,7 @@ **CONFIGURATION_AND_MAINTENANCE_PARAMETERS, **CI_IMAGE_TOOLS_PARAMETERS, **PRODUCTION_IMAGE_TOOLS_PARAMETERS, + **CI_PARAMETERS, **RELEASE_MANAGEMENT_PARAMETERS, } click.rich_click.COMMAND_GROUPS = { @@ -61,6 +61,7 @@ CONFIGURATION_AND_MAINTENANCE_COMMANDS, CI_IMAGE_TOOLS_COMMANDS, PRODUCTION_IMAGE_TOOLS_COMMANDS, + CI_COMMANDS, RELEASE_MANAGEMENT_COMMANDS, ] } diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index 0b8b220987c6c..41acd12e458de 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -17,8 +17,11 @@ """ Global constants that are used by all other Breeze components. """ +from __future__ import annotations + import os -from typing import List +from enum import Enum +from functools import lru_cache from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT @@ -67,21 +70,36 @@ ALLOWED_POSTGRES_VERSIONS = ['10', '11', '12', '13', '14'] ALLOWED_MYSQL_VERSIONS = ['5.7', '8'] ALLOWED_MSSQL_VERSIONS = ['2017-latest', '2019-latest'] -ALLOWED_TEST_TYPES = [ - 'All', - 'Always', - 'Core', - 'Providers', - 'API', - 'CLI', - 'Integration', - 'Other', - 'WWW', - 'Postgres', - 'MySQL', - 'Helm', - 'Quarantined', + + +@lru_cache(maxsize=None) +def all_selective_test_types() -> tuple[str, ...]: + return tuple(sorted(e.value for e in SelectiveUnitTestTypes)) + + +class SelectiveUnitTestTypes(Enum): + ALWAYS = 'Always' + API = 'API' + CLI = 'CLI' + CORE = 'Core' + OTHER = 'Other' + INTEGRATION = 'Integration' + PROVIDERS = 'Providers' + WWW = 'WWW' + + +ALLOWED_TEST_TYPE_CHOICES = [ + "All", + "Always", + *all_selective_test_types(), + "Helm", + "Postgres", + "MySQL", + "Integration", + "Other", + "Quarantine", ] + ALLOWED_PACKAGE_FORMATS = ['wheel', 'sdist', 'both'] ALLOWED_INSTALLATION_PACKAGE_FORMATS = ['wheel', 'sdist'] ALLOWED_INSTALLATION_METHODS = ['.', 'apache-airflow'] @@ -113,7 +131,7 @@ ] -def get_available_packages(short_version=False) -> List[str]: +def get_available_packages(short_version=False) -> list[str]: docs_path_content = (AIRFLOW_SOURCES_ROOT / 'docs').glob('*/') available_packages = [x.name for x in docs_path_content if x.is_dir()] package_list = list(set(available_packages) - set(EXCLUDE_DOCS_PACKAGE_FOLDER)) @@ -145,8 +163,12 @@ def get_available_packages(short_version=False) -> List[str]: ALL_PYTHON_MAJOR_MINOR_VERSIONS = ['3.7', '3.8', '3.9', '3.10'] CURRENT_PYTHON_MAJOR_MINOR_VERSIONS = ['3.7', '3.8', '3.9', '3.10'] CURRENT_POSTGRES_VERSIONS = ['10', '11', '12', '13', '14'] +DEFAULT_POSTGRES_VERSION = CURRENT_POSTGRES_VERSIONS[0] CURRENT_MYSQL_VERSIONS = ['5.7', '8'] +DEFAULT_MYSQL_VERSION = CURRENT_MYSQL_VERSIONS[0] CURRENT_MSSQL_VERSIONS = ['2017-latest', '2019-latest'] +DEFAULT_MSSQL_VERSION = CURRENT_MSSQL_VERSIONS[0] + DB_RESET = False START_AIRFLOW = "false" LOAD_EXAMPLES = False @@ -217,10 +239,10 @@ def get_airflow_extras(): CURRENT_HELM_VERSIONS = ['v3.6.3'] CURRENT_EXECUTORS = ['KubernetesExecutor'] -DEFAULT_KUBERNETES_MODES = CURRENT_KUBERNETES_MODES[0] -DEFAULT_KUBERNETES_VERSIONS = CURRENT_KUBERNETES_VERSIONS[0] -DEFAULT_KIND_VERSIONS = CURRENT_KIND_VERSIONS[0] -DEFAULT_HELM_VERSIONS = CURRENT_HELM_VERSIONS[0] +DEFAULT_KUBERNETES_MODE = CURRENT_KUBERNETES_MODES[0] +DEFAULT_KUBERNETES_VERSION = CURRENT_KUBERNETES_VERSIONS[0] +DEFAULT_KIND_VERSION = CURRENT_KIND_VERSIONS[0] +DEFAULT_HELM_VERSION = CURRENT_HELM_VERSIONS[0] DEFAULT_EXECUTOR = CURRENT_EXECUTORS[0] # Initialize image build variables - Have to check if this has to go to ci dataclass @@ -268,3 +290,16 @@ def get_airflow_extras(): "virtualenv", # END OF EXTRAS LIST UPDATED BY PRE COMMIT ] + + +class GithubEvents(Enum): + PULL_REQUEST = "pull_request" + PULL_REQUEST_REVIEW = "pull_request_review" + PULL_REQUEST_TARGET = "pull_request_target" + PUSH = "push" + SCHEDULE = "schedule" + + +@lru_cache(maxsize=None) +def github_events() -> list[str]: + return [e.value for e in GithubEvents] diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py new file mode 100644 index 0000000000000..22e74f455301f --- /dev/null +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -0,0 +1,480 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import sys +from enum import Enum + +if sys.version_info >= (3, 8): + from functools import cached_property +else: + from cached_property import cached_property + +from functools import lru_cache +from re import match +from typing import Any, Dict, List, TypeVar + +from airflow_breeze.global_constants import ( + ALL_PYTHON_MAJOR_MINOR_VERSIONS, + CURRENT_HELM_VERSIONS, + CURRENT_KIND_VERSIONS, + CURRENT_KUBERNETES_MODES, + CURRENT_KUBERNETES_VERSIONS, + CURRENT_MSSQL_VERSIONS, + CURRENT_MYSQL_VERSIONS, + CURRENT_POSTGRES_VERSIONS, + CURRENT_PYTHON_MAJOR_MINOR_VERSIONS, + DEFAULT_HELM_VERSION, + DEFAULT_KIND_VERSION, + DEFAULT_KUBERNETES_MODE, + DEFAULT_KUBERNETES_VERSION, + DEFAULT_MSSQL_VERSION, + DEFAULT_MYSQL_VERSION, + DEFAULT_POSTGRES_VERSION, + DEFAULT_PYTHON_MAJOR_MINOR_VERSION, + GithubEvents, + SelectiveUnitTestTypes, + all_selective_test_types, +) +from airflow_breeze.utils.console import get_console + +FULL_TESTS_NEEDED_LABEL = "full tests needed" + + +def get_ga_output(name: str, value: Any) -> str: + output_name = name.replace('_', '-') + printed_value = str(value).lower() if isinstance(value, bool) else value + get_console().print(f"[info]{output_name}[/] = [green]{printed_value}[/]") + return f"::set-output name={output_name}::{printed_value}" + + +class FileGroupForCi(Enum): + ENVIRONMENT_FILES = "environment_files" + PYTHON_PRODUCTION_FILES = "python_scans" + JAVASCRIPT_PRODUCTION_FILES = "javascript_scans" + API_TEST_FILES = "api_test_files" + API_CODEGEN_FILES = "api_codegen_files" + HELM_FILES = "helm_files" + SETUP_FILES = "setup_files" + DOC_FILES = "doc_files" + UI_FILES = "ui_files" + WWW_FILES = "www_files" + KUBERNETES_FILES = "kubernetes_files" + ALL_PYTHON_FILES = "all_python_files" + ALL_SOURCE_FILES = "all_sources_for_tests" + + +T = TypeVar('T', FileGroupForCi, SelectiveUnitTestTypes) + + +class HashableDict(Dict[T, List[str]]): + def __hash__(self): + return hash(frozenset(self)) + + +CI_FILE_GROUP_MATCHES = HashableDict( + { + FileGroupForCi.ENVIRONMENT_FILES: [ + r"^.github/workflows", + r"^dev/breeze", + r"^Dockerfile", + r"^scripts", + r"^setup.py", + r"^setup.cfg", + ], + FileGroupForCi.PYTHON_PRODUCTION_FILES: [ + r"^airflow/.*\.py", + r"^setup.py", + ], + FileGroupForCi.JAVASCRIPT_PRODUCTION_FILES: [ + r"^airflow/.*\.[jt]sx?", + r"^airflow/.*\.lock", + ], + FileGroupForCi.API_TEST_FILES: [ + r"^airflow/api", + ], + FileGroupForCi.API_CODEGEN_FILES: [ + "^airflow/api_connexion/openapi/v1.yaml", + "^clients/gen", + ], + FileGroupForCi.HELM_FILES: [ + "^chart", + ], + FileGroupForCi.SETUP_FILES: [ + r"^pyproject.toml", + r"^setup.cfg", + r"^setup.py", + ], + FileGroupForCi.DOC_FILES: [ + r"^docs", + r"^airflow/.*\.py$", + r"^chart", + r"^providers", + r"^CHANGELOG\.txt", + r"^airflow/config_templates/config\.yml", + r"^chart/RELEASE_NOTES\.txt", + r"^chart/values\.schema\.json", + r"^chart/values\.json", + ], + FileGroupForCi.UI_FILES: [ + r"^airflow/ui/.*\.[tj]sx?$", + r"^airflow/ui/[^/]+\.json$", + r"^airflow/ui/.*\.lock$", + ], + FileGroupForCi.WWW_FILES: [ + r"^airflow/www/.*\.js[x]?$", + r"^airflow/www/[^/]+\.json$", + r"^airflow/www/.*\.lock$", + ], + FileGroupForCi.KUBERNETES_FILES: [ + r"^chart", + r"^kubernetes_tests", + r"^airflow/providers/cncf/kubernetes/", + r"^tests/providers/cncf/kubernetes/", + ], + FileGroupForCi.ALL_PYTHON_FILES: [ + r"\.py$", + ], + FileGroupForCi.ALL_SOURCE_FILES: [ + "^.pre-commit-config.yaml$", + "^airflow", + "^chart", + "^tests", + "^kubernetes_tests", + ], + } +) + + +TEST_TYPE_MATCHES = HashableDict( + { + SelectiveUnitTestTypes.API: [ + r"^airflow/api", + r"^airflow/api_connexion", + r"^tests/api", + r"^tests/api_connexion", + ], + SelectiveUnitTestTypes.CLI: [ + r"^airflow/cli", + r"^tests/cli", + ], + SelectiveUnitTestTypes.PROVIDERS: [ + "^airflow/providers/", + "^tests/providers/", + ], + SelectiveUnitTestTypes.WWW: ["^airflow/www", "^tests/www", "^airflow/ui"], + } +) + + +class SelectiveChecks: + __HASHABLE_FIELDS = {'_files', '_default_branch', '_commit_ref', "_pr_labels", "_github_event"} + + def __init__( + self, + files: tuple[str, ...] = (), + default_branch="main", + commit_ref: str | None = None, + pr_labels: tuple[str, ...] = (), + github_event: GithubEvents = GithubEvents.PULL_REQUEST, + ): + self._files = files + self._default_branch = default_branch + self._commit_ref = commit_ref + self._pr_labels = pr_labels + self._github_event = github_event + + def __important_attributes(self) -> tuple[Any, ...]: + return tuple(getattr(self, f) for f in self.__HASHABLE_FIELDS) + + def __hash__(self): + return hash(self.__important_attributes()) + + def __eq__(self, other): + return isinstance(other, SelectiveChecks) and all( + [getattr(other, f) == getattr(self, f) for f in self.__HASHABLE_FIELDS] + ) + + def __str__(self) -> str: + output = [] + for field_name in dir(self): + if not field_name.startswith('_'): + output.append(get_ga_output(field_name, getattr(self, field_name))) + return "\n".join(output) + + default_python_version = DEFAULT_PYTHON_MAJOR_MINOR_VERSION + default_postgres_version = DEFAULT_POSTGRES_VERSION + default_mysql_version = DEFAULT_MYSQL_VERSION + default_mssql_version = DEFAULT_MSSQL_VERSION + + default_kubernetes_version = DEFAULT_KUBERNETES_VERSION + default_kind_version = DEFAULT_KIND_VERSION + default_helm_version = DEFAULT_HELM_VERSION + + @cached_property + def default_branch(self) -> str: + return self._default_branch + + @cached_property + def _full_tests_needed(self) -> bool: + if self._github_event in [GithubEvents.PUSH, GithubEvents.SCHEDULE]: + get_console().print(f"[warning]Full tests needed because event is {self._github_event}[/]") + return True + if FULL_TESTS_NEEDED_LABEL in self._pr_labels: + get_console().print(f"[warning]Full tests needed because labels are {self._pr_labels}[/]") + return True + return False + + @cached_property + def python_versions(self) -> list[str]: + return ( + CURRENT_PYTHON_MAJOR_MINOR_VERSIONS + if self._full_tests_needed + else [DEFAULT_PYTHON_MAJOR_MINOR_VERSION] + ) + + @cached_property + def python_versions_list_as_string(self) -> str: + return " ".join(self.python_versions) + + @cached_property + def all_python_versions(self) -> list[str]: + return ( + ALL_PYTHON_MAJOR_MINOR_VERSIONS + if self._run_everything or self._full_tests_needed + else [DEFAULT_PYTHON_MAJOR_MINOR_VERSION] + ) + + @cached_property + def all_python_versions_list_as_string(self) -> str: + return " ".join(self.all_python_versions) + + @cached_property + def kubernetes_modes(self): + return CURRENT_KUBERNETES_MODES if self._full_tests_needed else [DEFAULT_KUBERNETES_MODE] + + @cached_property + def postgres_versions(self) -> list[str]: + return CURRENT_POSTGRES_VERSIONS if self._full_tests_needed else [DEFAULT_POSTGRES_VERSION] + + @cached_property + def mysql_versions(self) -> list[str]: + return CURRENT_MYSQL_VERSIONS if self._full_tests_needed else [DEFAULT_MYSQL_VERSION] + + @cached_property + def mssql_versions(self) -> list[str]: + return CURRENT_MSSQL_VERSIONS if self._full_tests_needed else [DEFAULT_MSSQL_VERSION] + + @cached_property + def kind_versions(self) -> list[str]: + return CURRENT_KIND_VERSIONS + + @cached_property + def helm_versions(self) -> list[str]: + return CURRENT_HELM_VERSIONS + + @cached_property + def postgres_exclude(self) -> list[dict[str, str]]: + return [{"python-version": "3.7"}] if self._full_tests_needed else [] + + @cached_property + def mssql_exclude(self) -> list[dict[str, str]]: + return [{"python-version": "3.8"}] if self._full_tests_needed else [] + + @cached_property + def mysql_exclude(self) -> list[dict[str, str]]: + return [{"python-version": "3.10"}] if self._full_tests_needed else [] + + @cached_property + def sqlite_exclude(self) -> list[dict[str, str]]: + return [{"python-version": "3.9"}] if self._full_tests_needed else [] + + @cached_property + def kubernetes_versions(self) -> list[str]: + return CURRENT_KUBERNETES_VERSIONS if self._full_tests_needed else [DEFAULT_KUBERNETES_VERSION] + + @cached_property + def kubernetes_versions_list_as_string(self) -> str: + return " ".join(self.kubernetes_versions) + + def _match_files_with_regexps(self, matched_files, regexps): + for file in self._files: + for regexp in regexps: + if match(regexp, file): + matched_files.append(file) + break + + @lru_cache(maxsize=None) + def _matching_files(self, match_group: T, match_dict: dict[T, list[str]]) -> list[str]: + matched_files: list[str] = [] + regexps = match_dict[match_group] + self._match_files_with_regexps(matched_files, regexps) + count = len(matched_files) + if count > 0: + get_console().print(f"[warning]{match_group} matched {count} files.[/]") + get_console().print(matched_files) + else: + get_console().print(f"[warning]{match_group} did not match any file.[/]") + return matched_files + + @cached_property + def _run_everything(self) -> bool: + if not self._commit_ref: + get_console().print("[warning]Running everything as commit is missing[/]") + return True + if self._full_tests_needed: + get_console().print("[warning]Running everything as full tests are needed[/]") + return True + if len(self._matching_files(FileGroupForCi.ENVIRONMENT_FILES, CI_FILE_GROUP_MATCHES)) > 0: + get_console().print("[warning]Running everything because env files changed[/]") + return True + return False + + def _should_be_run(self, source_area: FileGroupForCi) -> bool: + if self._run_everything: + get_console().print(f"[warning]{source_area} enabled because we are running everything[/]") + return True + matched_files = self._matching_files(source_area, CI_FILE_GROUP_MATCHES) + if len(matched_files) > 0: + get_console().print( + f"[warning]{source_area} enabled because it matched {len(matched_files)} changed files[/]" + ) + return True + else: + get_console().print( + f"[warning]{source_area} disabled because it did not match any changed files[/]" + ) + return False + + @cached_property + def needs_python_scans(self) -> bool: + return self._should_be_run(FileGroupForCi.PYTHON_PRODUCTION_FILES) + + @cached_property + def needs_javascript_scans(self) -> bool: + return self._should_be_run(FileGroupForCi.JAVASCRIPT_PRODUCTION_FILES) + + @cached_property + def needs_api_tests(self) -> bool: + return self._should_be_run(FileGroupForCi.API_TEST_FILES) + + @cached_property + def needs_api_codegen(self) -> bool: + return self._should_be_run(FileGroupForCi.API_CODEGEN_FILES) + + @cached_property + def run_ui_tests(self) -> bool: + return self._should_be_run(FileGroupForCi.UI_FILES) + + @cached_property + def run_www_tests(self) -> bool: + return self._should_be_run(FileGroupForCi.WWW_FILES) + + @cached_property + def run_kubernetes_tests(self) -> bool: + return self._should_be_run(FileGroupForCi.KUBERNETES_FILES) + + @cached_property + def docs_build(self) -> bool: + return self._should_be_run(FileGroupForCi.DOC_FILES) + + @cached_property + def needs_helm_tests(self) -> bool: + return self._should_be_run(FileGroupForCi.HELM_FILES) and self._default_branch == "main" + + @cached_property + def run_tests(self) -> bool: + return self._should_be_run(FileGroupForCi.ALL_SOURCE_FILES) + + @cached_property + def image_build(self) -> bool: + return self.run_tests or self.docs_build or self.run_kubernetes_tests + + def _select_test_type_if_matching( + self, test_types: set[str], test_type: SelectiveUnitTestTypes + ) -> list[str]: + matched_files = self._matching_files(test_type, TEST_TYPE_MATCHES) + count = len(matched_files) + if count > 0: + test_types.add(test_type.value) + get_console().print(f"[warning]{test_type} added because it matched {count} files[/]") + return matched_files + + def _get_test_types_to_run(self) -> list[str]: + candidate_test_types: set[str] = {"Always"} + matched_files: set[str] = set() + matched_files.update( + self._select_test_type_if_matching(candidate_test_types, SelectiveUnitTestTypes.WWW) + ) + matched_files.update( + self._select_test_type_if_matching(candidate_test_types, SelectiveUnitTestTypes.PROVIDERS) + ) + matched_files.update( + self._select_test_type_if_matching(candidate_test_types, SelectiveUnitTestTypes.CLI) + ) + matched_files.update( + self._select_test_type_if_matching(candidate_test_types, SelectiveUnitTestTypes.API) + ) + + kubernetes_files = self._matching_files(FileGroupForCi.KUBERNETES_FILES, CI_FILE_GROUP_MATCHES) + all_source_files = self._matching_files(FileGroupForCi.ALL_SOURCE_FILES, CI_FILE_GROUP_MATCHES) + + remaining_files = set(all_source_files) - set(matched_files) - set(kubernetes_files) + count_remaining_files = len(remaining_files) + if count_remaining_files > 0: + get_console().print( + f"[warning]We should run all tests. There are {count_remaining_files} changed " + "files that seems to fall into Core/Other category[/]" + ) + get_console().print(remaining_files) + candidate_test_types.update(all_selective_test_types()) + else: + get_console().print( + "[warning]There are no core/other files. Only tests relevant to the changed files are run.[/]" + ) + sorted_candidate_test_types = list(sorted(candidate_test_types)) + get_console().print("[warning]Selected test type candidates to run:[/]") + get_console().print(sorted_candidate_test_types) + return sorted_candidate_test_types + + @cached_property + def test_types(self) -> str: + if not self.run_tests: + return "" + if self._run_everything: + current_test_types = list(all_selective_test_types()) + else: + current_test_types = self._get_test_types_to_run() + if self._default_branch != "main": + if "Providers" in current_test_types: + get_console().print( + "[warning]Removing 'Providers' because the target branch " + f"is {self._default_branch} and not main[/]" + ) + current_test_types.remove("Providers") + return " ".join(sorted(current_test_types)) + + @cached_property + def basic_checks_only(self) -> bool: + return not self.image_build + + @cached_property + def upgrade_to_newer_dependencies(self) -> bool: + return len( + self._matching_files(FileGroupForCi.SETUP_FILES, CI_FILE_GROUP_MATCHES) + ) > 0 or self._github_event in [GithubEvents.PUSH, GithubEvents.SCHEDULE] diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py new file mode 100644 index 0000000000000..492135ebd38b0 --- /dev/null +++ b/dev/breeze/tests/test_selective_checks.py @@ -0,0 +1,464 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import Dict, Tuple + +import pytest + +from airflow_breeze.global_constants import GithubEvents +from airflow_breeze.utils.selective_checks import SelectiveChecks + + +def assert_outputs_are_printed(expected_outputs: Dict[str, str], output: str): + for name, value in expected_outputs.items(): + assert f"::set-output name={name}::{value}" in output + + +@pytest.mark.parametrize( + "files, expected_outputs,", + [ + ( + pytest.param( + ("INTHEWILD.md",), + { + "all-python-versions": "['3.7']", + "all-python-versions-list-as-string": "3.7", + "image-build": "false", + "needs-helm-tests": "false", + "run-tests": "false", + "docs-build": "false", + "upgrade-to-newer-dependencies": "false", + "test-types": "", + }, + id="No tests on simple change", + ) + ), + ( + pytest.param( + ("airflow/api/file.py",), + { + "all-python-versions": "['3.7']", + "all-python-versions-list-as-string": "3.7", + "image-build": "true", + "needs-helm-tests": "false", + "run-tests": "true", + "docs-build": "true", + "upgrade-to-newer-dependencies": "false", + "test-types": "API Always", + }, + id="Only API tests and DOCS should run", + ) + ), + ( + pytest.param( + ( + "airflow/api/file.py", + "tests/providers/google/file.py", + ), + { + "all-python-versions": "['3.7']", + "all-python-versions-list-as-string": "3.7", + "image-build": "true", + "needs-helm-tests": "false", + "run-tests": "true", + "docs-build": "true", + "upgrade-to-newer-dependencies": "false", + "test-types": "API Always Providers", + }, + id="API and providers tests and docs should run", + ) + ), + ( + pytest.param( + ("tests/providers/google/file.py",), + { + "all-python-versions": "['3.7']", + "all-python-versions-list-as-string": "3.7", + "image-build": "true", + "needs-helm-tests": "false", + "run-tests": "true", + "docs-build": "false", + "run-kubernetes-tests": "false", + "upgrade-to-newer-dependencies": "false", + "test-types": "Always Providers", + }, + id="Providers and docs should run", + ) + ), + ( + pytest.param( + ("docs/file.rst",), + { + "all-python-versions": "['3.7']", + "all-python-versions-list-as-string": "3.7", + "image-build": "true", + "needs-helm-tests": "false", + "run-tests": "false", + "docs-build": "true", + "run-kubernetes-tests": "false", + "upgrade-to-newer-dependencies": "false", + "test-types": "", + }, + id="Only docs builds should run - no tests needed", + ) + ), + ( + pytest.param( + ( + "chart/aaaa.txt", + "tests/providers/google/file.py", + ), + { + "all-python-versions": "['3.7']", + "all-python-versions-list-as-string": "3.7", + "image-build": "true", + "needs-helm-tests": "true", + "run-tests": "true", + "docs-build": "true", + "run-kubernetes-tests": "true", + "upgrade-to-newer-dependencies": "false", + "test-types": "Always Providers", + }, + id="Helm tests, providers, kubernetes tests and docs should run", + ) + ), + ( + pytest.param( + ( + "INTHEWILD.md", + "chart/aaaa.txt", + "tests/providers/google/file.py", + ), + { + "all-python-versions": "['3.7']", + "all-python-versions-list-as-string": "3.7", + "image-build": "true", + "needs-helm-tests": "true", + "run-tests": "true", + "docs-build": "true", + "run-kubernetes-tests": "true", + "upgrade-to-newer-dependencies": "false", + "test-types": "Always Providers", + }, + id="Helm tests, providers, kubernetes tests and docs should run even if " + "unimportant files were added", + ) + ), + ( + pytest.param( + ("setup.py",), + { + "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "image-build": "true", + "needs-helm-tests": "true", + "run-tests": "true", + "docs-build": "true", + "upgrade-to-newer-dependencies": "true", + "test-types": "API Always CLI Core Integration Other Providers WWW", + }, + id="Everything should run and upgrading to newer requirements as setup.py changed", + ) + ), + ], +) +def test_expected_output_pull_request_main( + files: Tuple[str, ...], + expected_outputs: Dict[str, str], +): + sc = SelectiveChecks( + files=files, + commit_ref="HEAD", + github_event=GithubEvents.PULL_REQUEST, + pr_labels=(), + default_branch="main", + ) + assert_outputs_are_printed(expected_outputs, str(sc)) + + +@pytest.mark.parametrize( + "files, pr_labels, default_branch, expected_outputs,", + [ + ( + pytest.param( + ("INTHEWILD.md",), + ("full tests needed",), + "main", + { + "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "image-build": "true", + "run-tests": "true", + "docs-build": "true", + "upgrade-to-newer-dependencies": "false", + "test-types": "API Always CLI Core Integration Other Providers WWW", + }, + id="Everything should run when full tests are needed", + ) + ), + ( + pytest.param( + ("INTHEWILD.md",), + ( + "another label", + "full tests needed", + ), + "main", + { + "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "image-build": "true", + "run-tests": "true", + "docs-build": "true", + "upgrade-to-newer-dependencies": "false", + "test-types": "API Always CLI Core Integration Other Providers WWW", + }, + id="Everything should run when full tests are needed even with different label set as well", + ) + ), + ( + pytest.param( + (), + ("full tests needed",), + "main", + { + "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "image-build": "true", + "run-tests": "true", + "docs-build": "true", + "upgrade-to-newer-dependencies": "false", + "test-types": "API Always CLI Core Integration Other Providers WWW", + }, + id="Everything should run when full tests are needed even if no files are changed", + ) + ), + ( + pytest.param( + ("INTHEWILD.md",), + ("full tests needed",), + "v2-3-stable", + { + "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "image-build": "true", + "run-tests": "true", + "docs-build": "true", + "upgrade-to-newer-dependencies": "false", + "test-types": "API Always CLI Core Integration Other WWW", + }, + id="Everything should run except Providers when full tests are needed for non-main branch", + ) + ), + ], +) +def test_expected_output_full_tests_needed( + files: Tuple[str, ...], + pr_labels: Tuple[str, ...], + default_branch: str, + expected_outputs: Dict[str, str], +): + sc = SelectiveChecks( + files=files, + commit_ref="HEAD", + github_event=GithubEvents.PULL_REQUEST, + pr_labels=pr_labels, + default_branch=default_branch, + ) + output = str(sc) + assert_outputs_are_printed(expected_outputs, output) + + +@pytest.mark.parametrize( + "files, expected_outputs,", + [ + pytest.param( + ("INTHEWILD.md",), + { + "all-python-versions": "['3.7']", + "all-python-versions-list-as-string": "3.7", + "image-build": "false", + "needs-helm-tests": "false", + "run-tests": "false", + "docs-build": "false", + "upgrade-to-newer-dependencies": "false", + "test-types": "", + }, + id="Everything should run when full tests are needed even if no files are changed", + ), + pytest.param( + ( + "chart/aaaa.txt", + "tests/providers/google/file.py", + ), + { + "all-python-versions": "['3.7']", + "all-python-versions-list-as-string": "3.7", + "needs-helm-tests": "false", + "image-build": "true", + "run-tests": "true", + "docs-build": "true", + "run-kubernetes-tests": "true", + "upgrade-to-newer-dependencies": "false", + "test-types": "Always", + }, + id="No Helm tests, No providers should run if only chart/providers changed in non-main", + ), + pytest.param( + ( + "airflow/cli/test.py", + "chart/aaaa.txt", + "tests/providers/google/file.py", + ), + { + "all-python-versions": "['3.7']", + "all-python-versions-list-as-string": "3.7", + "image-build": "true", + "needs-helm-tests": "false", + "run-tests": "true", + "docs-build": "true", + "run-kubernetes-tests": "true", + "upgrade-to-newer-dependencies": "false", + "test-types": "Always CLI", + }, + id="Only CLI tests and Kubernetes tests should run if cli/chart files changed in non-main branch", + ), + pytest.param( + ( + "airflow/file.py", + "tests/providers/google/file.py", + ), + { + "all-python-versions": "['3.7']", + "all-python-versions-list-as-string": "3.7", + "image-build": "true", + "needs-helm-tests": "false", + "run-tests": "true", + "docs-build": "true", + "run-kubernetes-tests": "false", + "upgrade-to-newer-dependencies": "false", + "test-types": "API Always CLI Core Integration Other WWW", + }, + id="All tests except providers should run if core file changed in non-main branch", + ), + ], +) +def test_expected_output_pull_request_v2_3( + files: Tuple[str, ...], + expected_outputs: Dict[str, str], +): + sc = SelectiveChecks( + files=files, + commit_ref="HEAD", + github_event=GithubEvents.PULL_REQUEST, + pr_labels=(), + default_branch="v2-3-stable", + ) + assert_outputs_are_printed(expected_outputs, str(sc)) + + +@pytest.mark.parametrize( + "files, pr_labels, default_branch, expected_outputs,", + [ + pytest.param( + ("INTHEWILD.md",), + (), + "main", + { + "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "image-build": "true", + "needs-helm-tests": "true", + "run-tests": "true", + "docs-build": "true", + "upgrade-to-newer-dependencies": "true", + "test-types": "API Always CLI Core Integration Other Providers WWW", + }, + id="All tests run on push even if unimportant file changed", + ), + pytest.param( + ("INTHEWILD.md",), + (), + "v2-3-stable", + { + "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "image-build": "true", + "needs-helm-tests": "false", + "run-tests": "true", + "docs-build": "true", + "upgrade-to-newer-dependencies": "true", + "test-types": "API Always CLI Core Integration Other WWW", + }, + id="All tests except Providers and Helm run on push" + " even if unimportant file changed in non-main branch", + ), + pytest.param( + ("airflow/api.py",), + (), + "main", + { + "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "image-build": "true", + "needs-helm-tests": "true", + "run-tests": "true", + "docs-build": "true", + "upgrade-to-newer-dependencies": "true", + "test-types": "API Always CLI Core Integration Other Providers WWW", + }, + id="All tests run on push if core file changed", + ), + ], +) +def test_expected_output_push( + files: Tuple[str, ...], + pr_labels: Tuple[str, ...], + default_branch: str, + expected_outputs: Dict[str, str], +): + sc = SelectiveChecks( + files=files, + commit_ref="HEAD", + github_event=GithubEvents.PUSH, + pr_labels=pr_labels, + default_branch=default_branch, + ) + assert_outputs_are_printed(expected_outputs, str(sc)) + + +def test_no_commit_provided(): + sc = SelectiveChecks( + files=(), + commit_ref="", + github_event=GithubEvents.PULL_REQUEST, + pr_labels=(), + default_branch="main", + ) + assert_outputs_are_printed( + { + "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "image-build": "true", + "needs-helm-tests": "true", + "run-tests": "true", + "docs-build": "true", + "upgrade-to-newer-dependencies": "false", + "test-types": "API Always CLI Core Integration Other Providers WWW", + }, + str(sc), + ) diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index 5ddc9626a325e..194d9690d835a 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -2,4 +2,4 @@ # This file is automatically generated by pre-commit. If you have a conflict with this file # Please do not solve it but run `breeze regenerate-command-images`. # This command should fix the conflict and regenerate help images that you have conflict with. -b48267467e8d9a9a4af99df180177b43 +b5c4ae62f5ac472a83af3a8eca75ed9b diff --git a/images/breeze/output-commands.svg b/images/breeze/output-commands.svg index e9dd89b22e4dc..2e21ac9804afa 100644 --- a/images/breeze/output-commands.svg +++ b/images/breeze/output-commands.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + + + + - Breeze commands + Breeze commands - + - - -Usage: breeze [OPTIONS] COMMAND [ARGS]... - -╭─ Basic flags for the default (shell) command ────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---integrationIntegration(s) to enable when running (can be more than one).                             -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    -all)                                                                                      ---forward-credentials-fForward local credentials to container when running. ---db-reset-dReset DB when entering the container. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flags for the default (shell) command ─────────────────────────────────────────────────────────────────────╮ ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or   -`sdist` if Airflow should be removed, installed from wheel packages or sdist packages    -available in dist folder respectively. Implies --mount-sources `remove`.                 -(none | wheel | sdist | <airflow_version>)                                               ---airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) ---use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when    -entering breeze.                                                                         ---package-formatFormat of packages that should be installed from dist.(wheel | sdist)[default: wheel] ---force-buildForce image build no matter if it is determined as needed. ---mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =     -selected).                                                                               -(selected | all | skip | remove)                                                         -[default: selected]                                                                      ---debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Developer tools ────────────────────────────────────────────────────────────────────────────────────────────────────╮ -shell          Enter breeze.py environment. this is the default command use when no other is selected.             -start-airflow  Enter breeze.py environment and starts all Airflow components in the tmux session.                  -exec           Joins the interactive shell of running airflow container                                            -stop           Stop running breeze environment.                                                                    -build-docs     Build documentation in the container.                                                               -static-checks  Run static checks.                                                                                  -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Testing ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -docker-compose-tests Run docker-compose tests.                                                                     -tests                Run the specified unit test targets. Multiple targets may be specified separated by spaces.   -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Configuration & maintenance ────────────────────────────────────────────────────────────────────────────────────────╮ -cleanup                  Cleans the cache of parameters, docker cache and optionally - currently downloaded        -images.                                                                                   -self-upgrade             Self upgrade Breeze.                                                                      -setup-autocomplete       Enables autocompletion of breeze commands.                                                -config                   Show/update configuration (Python, Backend, Cheatsheet, ASCIIART).                        -resource-check           Check if available docker resources are enough.                                           -free-space               Free space for jobs run in CI.                                                            -fix-ownership            Fix ownership of source files to be same as host user.                                    -regenerate-command-imagesRegenerate breeze command images.                                                         -command-hash-export      Outputs hash of all click commands to file or stdout if `-` is used (useful to see if     -images should be regenerated).                                                            -version                  Print information about version of apache-airflow-breeze.                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ CI Image tools ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ -build-image   Build CI image. Include building multiple images for all python versions (sequentially).             -pull-image    Pull and optionally verify CI images - possibly in parallel for all Python versions.                 -verify-image  Verify CI image.                                                                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Production Image tools ─────────────────────────────────────────────────────────────────────────────────────────────╮ -build-prod-image Build Production image. Include building multiple images for all or selected Python versions      -sequentially.                                                                                     -pull-prod-image  Pull and optionally verify Production images - possibly in parallel for all Python versions.      -verify-prod-imageVerify Production image.                                                                          -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Release management ─────────────────────────────────────────────────────────────────────────────────────────────────╮ -verify-provider-packages         Verifies if all provider code is following expectations for providers.            -prepare-provider-documentation   Prepare CHANGELOG, README and COMMITS information for providers.                  -prepare-provider-packages        Prepare sdist/whl packages of Airflow Providers.                                  -prepare-airflow-package          Prepare sdist/whl package of Airflow.                                             -release-prod-images              Release production images to DockerHub (needs DockerHub permissions).             -generate-constraints             Generates pinned constraint files with all extras from setup.py in parallel.      -find-newer-dependencies          Finds which dependencies are being upgraded.                                      -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze [OPTIONS] COMMAND [ARGS]... + +╭─ Basic flags for the default (shell) command ────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                             +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    +all)                                                                                      +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flags for the default (shell) command ─────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or   +`sdist` if Airflow should be removed, installed from wheel packages or sdist packages    +available in dist folder respectively. Implies --mount-sources `remove`.                 +(none | wheel | sdist | <airflow_version>)                                               +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when    +entering breeze.                                                                         +--package-formatFormat of packages that should be installed from dist.(wheel | sdist)[default: wheel] +--force-buildForce image build no matter if it is determined as needed. +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =     +selected).                                                                               +(selected | all | skip | remove)                                                         +[default: selected]                                                                      +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Developer tools ────────────────────────────────────────────────────────────────────────────────────────────────────╮ +shell          Enter breeze.py environment. this is the default command use when no other is selected.             +start-airflow  Enter breeze.py environment and starts all Airflow components in the tmux session.                  +exec           Joins the interactive shell of running airflow container                                            +stop           Stop running breeze environment.                                                                    +build-docs     Build documentation in the container.                                                               +static-checks  Run static checks.                                                                                  +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Testing ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +docker-compose-tests Run docker-compose tests.                                                                     +tests                Run the specified unit test targets. Multiple targets may be specified separated by spaces.   +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Configuration & maintenance ────────────────────────────────────────────────────────────────────────────────────────╮ +cleanup                  Cleans the cache of parameters, docker cache and optionally - currently downloaded        +images.                                                                                   +self-upgrade             Self upgrade Breeze.                                                                      +setup-autocomplete       Enables autocompletion of breeze commands.                                                +config                   Show/update configuration (Python, Backend, Cheatsheet, ASCIIART).                        +regenerate-command-imagesRegenerate breeze command images.                                                         +command-hash-export      Outputs hash of all click commands to file or stdout if `-` is used (useful to see if     +images should be regenerated).                                                            +version                  Print information about version of apache-airflow-breeze.                                 +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ CI Image tools ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +build-image   Build CI image. Include building multiple images for all python versions (sequentially).             +pull-image    Pull and optionally verify CI images - possibly in parallel for all Python versions.                 +verify-image  Verify CI image.                                                                                     +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Production Image tools ─────────────────────────────────────────────────────────────────────────────────────────────╮ +build-prod-image Build Production image. Include building multiple images for all or selected Python versions      +sequentially.                                                                                     +pull-prod-image  Pull and optionally verify Production images - possibly in parallel for all Python versions.      +verify-prod-imageVerify Production image.                                                                          +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ CI commands ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +fix-ownership                  Fix ownership of source files to be same as host user.                              +free-space                     Free space for jobs run in CI.                                                      +resource-check                 Check if available docker resources are enough.                                     +selective-check                Checks what kind of tests should be run for an incoming commit.                     +find-newer-dependencies        Finds which dependencies are being upgraded.                                        +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Release management ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +verify-provider-packages         Verifies if all provider code is following expectations for providers.            +prepare-provider-documentation   Prepare CHANGELOG, README and COMMITS information for providers.                  +prepare-provider-packages        Prepare sdist/whl packages of Airflow Providers.                                  +prepare-airflow-package          Prepare sdist/whl package of Airflow.                                             +release-prod-images              Release production images to DockerHub (needs DockerHub permissions).             +generate-constraints             Generates pinned constraint files with all extras from setup.py in parallel.      +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-selective-check.svg b/images/breeze/output-selective-check.svg new file mode 100644 index 0000000000000..3ea08a5bf7a29 --- /dev/null +++ b/images/breeze/output-selective-check.svg @@ -0,0 +1,132 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: selective-check + + + + + + + + + + +Usage: breeze selective-check [OPTIONS] + +Checks what kind of tests should be run for an incoming commit. + +╭─ Selective check flags ──────────────────────────────────────────────────────────────────────────────────────────────╮ +--commit-refCommit-ish reference to the commit that should be checked(TEXT) +--pr-labelsSpace-separate list of labels which are valid for the PR(TEXT) +--default-branchBranch against which the PR should be run(TEXT)[default: main] +--github-event-nameName of the GitHub event that triggered the check                            +(pull_request | pull_request_review | pull_request_target | push | schedule) +[default: pull_request]                                                      +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + + + diff --git a/images/breeze/output-tests.svg b/images/breeze/output-tests.svg index 1d819816f2596..0bb6680bef9ae 100644 --- a/images/breeze/output-tests.svg +++ b/images/breeze/output-tests.svg @@ -19,177 +19,177 @@ font-weight: 700; } - .terminal-3301140235-matrix { + .terminal-2980146706-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3301140235-title { + .terminal-2980146706-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3301140235-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-3301140235-r2 { fill: #c5c8c6 } -.terminal-3301140235-r3 { fill: #d0b344;font-weight: bold } -.terminal-3301140235-r4 { fill: #868887 } -.terminal-3301140235-r5 { fill: #68a0b3;font-weight: bold } -.terminal-3301140235-r6 { fill: #8d7b39 } -.terminal-3301140235-r7 { fill: #98a84b;font-weight: bold } + .terminal-2980146706-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-2980146706-r2 { fill: #c5c8c6 } +.terminal-2980146706-r3 { fill: #d0b344;font-weight: bold } +.terminal-2980146706-r4 { fill: #868887 } +.terminal-2980146706-r5 { fill: #68a0b3;font-weight: bold } +.terminal-2980146706-r6 { fill: #8d7b39 } +.terminal-2980146706-r7 { fill: #98a84b;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: tests + Command: tests - + - - -Usage: breeze tests [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Run the specified unit test targets. Multiple targets may be specified separated by spaces. - -╭─ Basic flag for tests command ───────────────────────────────────────────────────────────────────────────────────────╮ ---integrationIntegration(s) to enable when running (can be more than one).                               -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino | all) ---test-typeType of test to run.                                                                         -(All | Always | Core | Providers | API | CLI | Integration | Other | WWW | Postgres | MySQL  -| Helm | Quarantined)                                                                        ---db-reset-dReset DB when entering the container. ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flag for tests command ────────────────────────────────────────────────────────────────────────────────────╮ ---limit-progress-outputLimit progress to percentage only and just show the summary when tests complete. ---image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip   -when using to run shell or tests)                                                       -(TEXT)                                                                                  ---mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =    -selected).                                                                              -(selected | all | skip | remove)                                                        -[default: selected]                                                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze tests [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Run the specified unit test targets. Multiple targets may be specified separated by spaces. + +╭─ Basic flag for tests command ───────────────────────────────────────────────────────────────────────────────────────╮ +--integrationIntegration(s) to enable when running (can be more than one).                               +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino | all) +--test-typeType of test to run.                                                                         +(All | Always | API | Always | CLI | Core | Integration | Other | Providers | WWW | Helm |   +Postgres | MySQL | Integration | Other | Quarantine)                                         +--db-reset-dReset DB when entering the container. +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flag for tests command ────────────────────────────────────────────────────────────────────────────────────╮ +--limit-progress-outputLimit progress to percentage only and just show the summary when tests complete. +--image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip   +when using to run shell or tests)                                                       +(TEXT)                                                                                  +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =    +selected).                                                                              +(selected | all | skip | remove)                                                        +[default: selected]                                                                     +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/scripts/ci/selective_ci_checks.sh b/scripts/ci/selective_ci_checks.sh deleted file mode 100755 index 26fbf13f66ed9..0000000000000 --- a/scripts/ci/selective_ci_checks.sh +++ /dev/null @@ -1,768 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# shellcheck source=scripts/ci/libraries/_script_init.sh -. ./scripts/ci/libraries/_script_init.sh - -# Parameter: -# -# $1 - COMMIT SHA of the incoming commit. If this parameter is missing, this script does not check anything, -# it simply sets all the version outputs that determine that all tests should be run. -# This happens in case the even triggering the workflow is 'schedule' or 'push'. -# -# The logic of retrieving changes works by comparing the incoming commit with the target branch -# The commit addresses. -# -# -declare -a pattern_array - -if [[ ${PR_LABELS=} == *"full tests needed"* ]]; then - echo - echo "Found the right PR labels in '${PR_LABELS=}': 'full tests needed''" - echo - FULL_TESTS_NEEDED_LABEL="true" -else - echo - echo "Did not find the right PR labels in '${PR_LABELS=}': 'full tests needed'" - echo - FULL_TESTS_NEEDED_LABEL="false" -fi - -function check_upgrade_to_newer_dependencies_needed() { - if [[ ${GITHUB_EVENT_NAME=} == 'push' || ${GITHUB_EVENT_NAME=} == "scheduled" ]]; then - # Trigger upgrading to latest constraints when we are in push or schedule event - upgrade_to_newer_dependencies="true" - fi -} - -function output_all_basic_variables() { - if [[ ${FULL_TESTS_NEEDED_LABEL} == "true" ]]; then - initialization::ga_output python-versions \ - "$(initialization::parameters_to_json "${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS[@]}")" - initialization::ga_output all-python-versions \ - "$(initialization::parameters_to_json "${ALL_PYTHON_MAJOR_MINOR_VERSIONS[@]}")" - initialization::ga_output all-python-versions-list-as-string "${ALL_PYTHON_MAJOR_MINOR_VERSIONS[*]}" - initialization::ga_output python-versions-list-as-string "${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS[*]}" - initialization::ga_output kubernetes-versions-list-as-string "${CURRENT_KUBERNETES_VERSIONS[*]}" - else - initialization::ga_output python-versions \ - "$(initialization::parameters_to_json "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}")" - # this will work as long as DEFAULT_PYTHON_MAJOR_VERSION is the same on HEAD - # all-python-versions are used in BuildImage Workflow - initialization::ga_output all-python-versions \ - "$(initialization::parameters_to_json "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}")" - initialization::ga_output all-python-versions-list-as-string "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}" - initialization::ga_output python-versions-list-as-string "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}" - initialization::ga_output kubernetes-versions-list-as-string "${DEFAULT_KUBERNETES_VERSION}" - fi - initialization::ga_output default-python-version "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}" - - if [[ ${FULL_TESTS_NEEDED_LABEL} == "true" ]]; then - initialization::ga_output kubernetes-versions \ - "$(initialization::parameters_to_json "${CURRENT_KUBERNETES_VERSIONS[@]}")" - else - initialization::ga_output kubernetes-versions \ - "$(initialization::parameters_to_json "${KUBERNETES_VERSION}")" - fi - initialization::ga_output default-kubernetes-version "${KUBERNETES_VERSION}" - - initialization::ga_output kubernetes-modes \ - "$(initialization::parameters_to_json "${CURRENT_KUBERNETES_MODES[@]}")" - initialization::ga_output default-kubernetes-mode "${KUBERNETES_MODE}" - - if [[ ${FULL_TESTS_NEEDED_LABEL} == "true" ]]; then - initialization::ga_output postgres-versions \ - "$(initialization::parameters_to_json "${CURRENT_POSTGRES_VERSIONS[@]}")" - else - initialization::ga_output postgres-versions \ - "$(initialization::parameters_to_json "${POSTGRES_VERSION}")" - fi - initialization::ga_output default-postgres-version "${POSTGRES_VERSION}" - - if [[ ${FULL_TESTS_NEEDED_LABEL} == "true" ]]; then - initialization::ga_output mysql-versions \ - "$(initialization::parameters_to_json "${CURRENT_MYSQL_VERSIONS[@]}")" - else - initialization::ga_output mysql-versions \ - "$(initialization::parameters_to_json "${MYSQL_VERSION}")" - fi - initialization::ga_output default-mysql-version "${MYSQL_VERSION}" - - if [[ ${FULL_TESTS_NEEDED_LABEL} == "true" ]]; then - initialization::ga_output mssql-versions \ - "$(initialization::parameters_to_json "${CURRENT_MSSQL_VERSIONS[@]}")" - else - initialization::ga_output mssql-versions \ - "$(initialization::parameters_to_json "${MSSQL_VERSION}")" - fi - initialization::ga_output default-mssql-version "${MSSQL_VERSION}" - - - - initialization::ga_output kind-versions \ - "$(initialization::parameters_to_json "${CURRENT_KIND_VERSIONS[@]}")" - initialization::ga_output default-kind-version "${KIND_VERSION}" - - initialization::ga_output helm-versions \ - "$(initialization::parameters_to_json "${CURRENT_HELM_VERSIONS[@]}")" - initialization::ga_output default-helm-version "${HELM_VERSION}" - - if [[ ${FULL_TESTS_NEEDED_LABEL} == "true" ]]; then - initialization::ga_output postgres-exclude '[{ "python-version": "3.7" }]' - initialization::ga_output mssql-exclude '[{ "python-version": "3.8" }]' - initialization::ga_output mysql-exclude '[{ "python-version": "3.10" }]' - initialization::ga_output sqlite-exclude '[{ "python-version": "3.9" }]' - else - initialization::ga_output postgres-exclude '[]' - initialization::ga_output mysql-exclude '[]' - initialization::ga_output mssql-exclude '[]' - initialization::ga_output sqlite-exclude '[]' - fi - - - initialization::ga_output default-helm-version "${HELM_VERSION}" - initialization::ga_output kubernetes-exclude '[]' - - initialization::ga_output default-branch "${DEFAULT_BRANCH}" - -} - -function get_changed_files() { - start_end::group_start "Get changed files" - echo - echo "Incoming commit SHA: ${INCOMING_COMMIT_SHA}" - echo - echo "Changed files from ${INCOMING_COMMIT_SHA} vs it's first parent" - echo - CHANGED_FILES=$(git diff-tree --no-commit-id --name-only \ - -r "${INCOMING_COMMIT_SHA}^" "${INCOMING_COMMIT_SHA}" || true) - if [[ -z "${CHANGED_FILES}" ]]; then - echo - echo "${COLOR_YELLOW}WARNING: Could not find any changed files ${COLOR_RESET}" - echo Assuming that we should run all tests in this case - echo - set_outputs_run_everything_and_exit - fi - echo - echo "Changed files:" - echo - echo "${CHANGED_FILES}" - echo - readonly CHANGED_FILES - start_end::group_end -} - -function run_tests() { - initialization::ga_output run-tests "${@}" -} - -function run_kubernetes_tests() { - initialization::ga_output run-kubernetes-tests "${@}" -} - -function needs_helm_tests() { - initialization::ga_output needs-helm-tests "${@}" -} - -function needs_api_tests() { - initialization::ga_output needs-api-tests "${@}" -} - -function needs_api_codegen() { - initialization::ga_output needs-api-codegen "${@}" -} - -function needs_javascript_scans() { - initialization::ga_output needs-javascript-scans "${@}" -} - -function needs_python_scans() { - initialization::ga_output needs-python-scans "${@}" -} - -function set_test_types() { - initialization::ga_output test-types "${@}" -} - -function set_docs_build() { - initialization::ga_output docs-build "${@}" -} - -function set_image_build() { - initialization::ga_output image-build "${@}" -} - -function set_basic_checks_only() { - initialization::ga_output basic-checks-only "${@}" -} - -function set_upgrade_to_newer_dependencies() { - initialization::ga_output upgrade-to-newer-dependencies "${@}" -} - -function needs_ui_tests() { - initialization::ga_output run-ui-tests "${@}" -} - -function needs_www_tests() { - initialization::ga_output run-www-tests "${@}" -} - -if [[ ${DEFAULT_BRANCH} == "main" ]]; then - ALL_TESTS="Always API Core Other CLI Providers WWW Integration" -else - # Skips Provider tests in case current default branch is not main - ALL_TESTS="Always API Core Other CLI WWW Integration" -fi -readonly ALL_TESTS - -function set_outputs_run_everything_and_exit() { - needs_api_tests "true" - needs_api_codegen "true" - needs_helm_tests "true" - needs_javascript_scans "true" - needs_python_scans "true" - run_tests "true" - run_kubernetes_tests "true" - set_test_types "${ALL_TESTS}" - set_basic_checks_only "false" - set_docs_build "true" - set_image_build "true" - set_upgrade_to_newer_dependencies "${upgrade_to_newer_dependencies}" - needs_ui_tests "true" - needs_www_tests "true" - exit -} - -function set_outputs_run_all_python_tests() { - run_tests "true" - run_kubernetes_tests "true" - set_test_types "${ALL_TESTS}" - set_basic_checks_only "false" - set_image_build "true" - kubernetes_tests_needed="true" -} - -function set_output_skip_all_tests_and_docs_and_exit() { - needs_api_tests "false" - needs_api_codegen "false" - needs_helm_tests "false" - needs_javascript_scans "false" - needs_python_scans "false" - run_tests "false" - run_kubernetes_tests "false" - set_test_types "" - set_basic_checks_only "true" - set_docs_build "false" - set_image_build "false" - set_upgrade_to_newer_dependencies "false" - needs_ui_tests "false" - needs_www_tests "false" - exit -} - -function set_output_skip_tests_but_build_images_and_exit() { - needs_api_tests "false" - needs_api_codegen "false" - needs_helm_tests "false" - needs_javascript_scans "false" - needs_python_scans "false" - run_tests "false" - run_kubernetes_tests "false" - set_test_types "" - set_basic_checks_only "false" - set_docs_build "true" - set_image_build "true" - set_upgrade_to_newer_dependencies "${upgrade_to_newer_dependencies}" - needs_ui_tests "false" - needs_www_tests "false" - exit -} - -# Converts array of patterns into single | pattern string -# pattern_array - array storing regexp patterns -# Outputs - pattern string -function get_regexp_from_patterns() { - local test_triggering_regexp="" - local separator="" - local pattern - for pattern in "${pattern_array[@]}"; do - test_triggering_regexp="${test_triggering_regexp}${separator}${pattern}" - separator="|" - done - echo "${test_triggering_regexp}" -} - -# Shows changed files in the commit vs. the target. -# Input: -# pattern_array - array storing regexp patterns -function show_changed_files() { - local the_regexp - the_regexp=$(get_regexp_from_patterns) - echo - echo "Changed files matching the ${the_regexp} pattern:" - echo - echo "${CHANGED_FILES}" | grep -E "${the_regexp}" || true - echo -} - -# Counts changed files in the commit vs. the target -# Input: -# pattern_array - array storing regexp patterns -# Output: -# Count of changed files matching the patterns -function count_changed_files() { - echo "${CHANGED_FILES}" | grep -c -E "$(get_regexp_from_patterns)" || true -} - -function check_if_python_security_scans_should_be_run() { - start_end::group_start "Check Python security scans" - local pattern_array=( - "^airflow/.*\.py" - "^setup.py" - ) - show_changed_files - - if [[ $(count_changed_files) == "0" ]]; then - needs_python_scans "false" - else - needs_python_scans "true" - fi - start_end::group_end -} - -function check_if_setup_files_changed() { - start_end::group_start "Check setup.py/cfg changed" - local pattern_array=( - "^setup.cfg" - "^setup.py" - ) - show_changed_files - - if [[ $(count_changed_files) != "0" ]]; then - # In case the setup files changed, we automatically force upgrading to newer dependencies - # no matter what was set before. - upgrade_to_newer_dependencies="true" - fi - start_end::group_end -} - - -function check_if_javascript_security_scans_should_be_run() { - start_end::group_start "Check JavaScript security scans" - local pattern_array=( - "^airflow/.*\.[jt]sx?" - "^airflow/.*\.lock" - ) - show_changed_files - - if [[ $(count_changed_files) == "0" ]]; then - needs_javascript_scans "false" - else - needs_javascript_scans "true" - fi - start_end::group_end -} - -function check_if_api_tests_should_be_run() { - start_end::group_start "Check API tests" - local pattern_array=( - "^airflow/api" - ) - show_changed_files - - if [[ $(count_changed_files) == "0" ]]; then - needs_api_tests "false" - else - needs_api_tests "true" - fi - start_end::group_end -} - -function check_if_api_codegen_should_be_run() { - start_end::group_start "Check API codegen" - local pattern_array=( - "^airflow/api_connexion/openapi/v1.yaml" - "^clients/gen" - ) - show_changed_files - - if [[ $(count_changed_files) == "0" ]]; then - needs_api_codegen "false" - else - needs_api_codegen "true" - fi - start_end::group_end -} - -function check_if_helm_tests_should_be_run() { - start_end::group_start "Check helm tests" - local pattern_array=( - "^chart" - ) - show_changed_files - - if [[ $(count_changed_files) == "0" ]]; then - needs_helm_tests "false" - else - needs_helm_tests "true" - fi - start_end::group_end -} - -function check_if_docs_should_be_generated() { - start_end::group_start "Check docs" - local pattern_array=( - "^docs" - "^airflow/.*\.py$" - "^CHANGELOG\.txt" - "^airflow/config_templates/config\.yml" - "^chart/UPDATING\.rst" - "^chart/CHANGELOG\.txt" - "^chart/values\.schema\.json" - ) - show_changed_files - - if [[ $(count_changed_files) == "0" ]]; then - echo "None of the docs changed" - else - image_build_needed="true" - docs_build_needed="true" - fi - start_end::group_end -} - -function check_if_ui_tests_should_be_run() { - start_end::group_start "Check UI" - local pattern_array=( - "^airflow/ui/.*\.[tj]sx?$" - # tsconfig.json, package.json, etc. - "^airflow/ui/[^/]+\.json$" - "^airflow/ui/.*\.lock$" - ) - show_changed_files - - if [[ $(count_changed_files) == "0" ]]; then - needs_ui_tests "false" - else - needs_ui_tests "true" - fi - start_end::group_end -} - -function check_if_www_tests_should_be_run() { - start_end::group_start "Check WWW" - local pattern_array=( - "^airflow/www/.*\.js[x]?$" - # tsconfig.json, package.json, etc. - "^airflow/www/[^/]+\.json$" - "^airflow/www/.*\.lock$" - ) - show_changed_files - - if [[ $(count_changed_files) == "0" ]]; then - needs_www_tests "false" - else - needs_www_tests "true" - fi - start_end::group_end -} - - -ANY_PY_FILES_CHANGED=( - "\.py$" -) -readonly ANY_PY_FILES_CHANGED - -function check_if_any_py_files_changed() { - start_end::group_start "Check if any Python files changed" - local pattern_array=("${ANY_PY_FILES_CHANGED[@]}") - show_changed_files - - if [[ $(count_changed_files) != "0" ]]; then - image_build_needed="true" - fi - start_end::group_end -} - - -AIRFLOW_SOURCES_TRIGGERING_TESTS=( - "^.pre-commit-config.yaml$" - "^airflow" - "^chart" - "^tests" - "^kubernetes_tests" -) -readonly AIRFLOW_SOURCES_TRIGGERING_TESTS - -function check_if_tests_are_needed_at_all() { - start_end::group_start "Check tests are needed" - local pattern_array=("${AIRFLOW_SOURCES_TRIGGERING_TESTS[@]}") - show_changed_files - - if [[ $(count_changed_files) == "0" ]]; then - if [[ ${image_build_needed} == "true" ]]; then - echo "No tests needed, Skipping tests but building images." - set_output_skip_tests_but_build_images_and_exit - else - echo "None of the important files changed, Skipping tests" - set_output_skip_all_tests_and_docs_and_exit - fi - else - image_build_needed="true" - tests_needed="true" - fi - start_end::group_end -} - -function run_all_tests_if_environment_files_changed() { - start_end::group_start "Check if everything should be run" - local pattern_array=( - "^.github/workflows/" - "^Dockerfile" - "^scripts" - "^setup.py" - "^setup.cfg" - ) - show_changed_files - - if [[ $(count_changed_files) != "0" ]]; then - echo "Important environment files changed. Running everything" - set_outputs_run_everything_and_exit - fi - if [[ ${FULL_TESTS_NEEDED_LABEL} == "true" ]]; then - echo "Full tests requested by label on PR. Running everything" - set_outputs_run_everything_and_exit - fi - start_end::group_end -} - -function get_count_all_files() { - start_end::group_start "Count all airflow source files" - local pattern_array=("${AIRFLOW_SOURCES_TRIGGERING_TESTS[@]}") - show_changed_files - COUNT_ALL_CHANGED_FILES=$(count_changed_files) - echo "Files count: ${COUNT_ALL_CHANGED_FILES}" - readonly COUNT_ALL_CHANGED_FILES - start_end::group_end -} - -function get_count_api_files() { - start_end::group_start "Count API files" - local pattern_array=( - "^airflow/api" - "^airflow/api_connexion" - "^tests/api" - "^tests/api_connexion" - ) - show_changed_files - COUNT_API_CHANGED_FILES=$(count_changed_files) - echo "Files count: ${COUNT_API_CHANGED_FILES}" - readonly COUNT_API_CHANGED_FILES - start_end::group_end -} - -function get_count_cli_files() { - start_end::group_start "Count CLI files" - local pattern_array=( - "^airflow/cli" - "^tests/cli" - ) - show_changed_files - COUNT_CLI_CHANGED_FILES=$(count_changed_files) - echo "Files count: ${COUNT_CLI_CHANGED_FILES}" - readonly COUNT_CLI_CHANGED_FILES - start_end::group_end -} - -function get_count_providers_files() { - start_end::group_start "Count providers files" - local pattern_array=( - "^airflow/providers/" - "^tests/providers/" - ) - show_changed_files - COUNT_PROVIDERS_CHANGED_FILES=$(count_changed_files) - echo "Files count: ${COUNT_PROVIDERS_CHANGED_FILES}" - readonly COUNT_PROVIDERS_CHANGED_FILES - start_end::group_end -} - -function get_count_www_files() { - start_end::group_start "Count www files" - local pattern_array=( - "^airflow/www" - "^tests/www" - ) - show_changed_files - COUNT_WWW_CHANGED_FILES=$(count_changed_files) - echo "Files count: ${COUNT_WWW_CHANGED_FILES}" - readonly COUNT_WWW_CHANGED_FILES - start_end::group_end -} - -function get_count_ui_files() { - start_end::group_start "Count ui files" - local pattern_array=( - "^airflow/ui/" - ) - show_changed_files - COUNT_UI_CHANGED_FILES=$(count_changed_files) - echo "Files count: ${COUNT_UI_CHANGED_FILES}" - readonly COUNT_UI_CHANGED_FILES - start_end::group_end -} - -function get_count_kubernetes_files() { - start_end::group_start "Count kubernetes files" - local pattern_array=( - "^chart" - "^kubernetes_tests" - "^airflow/providers/cncf/kubernetes/" - "^tests/providers/cncf/kubernetes/" - ) - show_changed_files - COUNT_KUBERNETES_CHANGED_FILES=$(count_changed_files) - echo "Files count: ${COUNT_KUBERNETES_CHANGED_FILES}" - readonly COUNT_KUBERNETES_CHANGED_FILES - start_end::group_end -} - -function calculate_test_types_to_run() { - start_end::group_start "Count core/other files" - COUNT_CORE_OTHER_CHANGED_FILES=$((COUNT_ALL_CHANGED_FILES - COUNT_WWW_CHANGED_FILES - COUNT_UI_CHANGED_FILES - COUNT_PROVIDERS_CHANGED_FILES - COUNT_CLI_CHANGED_FILES - COUNT_API_CHANGED_FILES - COUNT_KUBERNETES_CHANGED_FILES)) - - readonly COUNT_CORE_OTHER_CHANGED_FILES - echo - echo "Files count: ${COUNT_CORE_OTHER_CHANGED_FILES}" - echo - if [[ ${COUNT_CORE_OTHER_CHANGED_FILES} -gt 0 ]]; then - # Running all tests because some core or other files changed - echo - echo "Looks like ${COUNT_CORE_OTHER_CHANGED_FILES} files changed in the core/other area and" - echo "We have to run all python tests. This will take longer than usual" - echo - set_outputs_run_all_python_tests - else - if [[ ${COUNT_KUBERNETES_CHANGED_FILES} != "0" ]]; then - kubernetes_tests_needed="true" - fi - tests_needed="true" - SELECTED_TESTS="" - if [[ ${COUNT_API_CHANGED_FILES} != "0" ]]; then - echo - echo "Adding API to selected files as ${COUNT_API_CHANGED_FILES} API files changed" - echo - SELECTED_TESTS="${SELECTED_TESTS} API" - fi - if [[ ${COUNT_CLI_CHANGED_FILES} != "0" ]]; then - echo - echo "Adding CLI and Kubernetes (they depend on CLI) to selected files as ${COUNT_CLI_CHANGED_FILES} CLI files changed" - echo - SELECTED_TESTS="${SELECTED_TESTS} CLI" - kubernetes_tests_needed="true" - fi - - if [[ ${DEFAULT_BRANCH} == "main" ]]; then - if [[ ${COUNT_PROVIDERS_CHANGED_FILES} != "0" ]]; then - echo - echo "Adding Providers to selected files as ${COUNT_PROVIDERS_CHANGED_FILES} Provider files changed" - echo - SELECTED_TESTS="${SELECTED_TESTS} Providers" - fi - else - echo - echo "Providers tests are not added because they are only run in case of main branch." - echo - fi - if [[ ${COUNT_WWW_CHANGED_FILES} != "0" ]]; then - echo - echo "Adding WWW to selected files as ${COUNT_WWW_CHANGED_FILES} WWW files changed" - echo - SELECTED_TESTS="${SELECTED_TESTS} WWW" - fi - initialization::ga_output test-types "Always Integration ${SELECTED_TESTS}" - fi - start_end::group_end -} - - - -upgrade_to_newer_dependencies="false" - -if (($# < 1)); then - echo - echo "No Commit SHA - running all tests (likely direct merge, or scheduled run)!" - echo - INCOMING_COMMIT_SHA="" - readonly INCOMING_COMMIT_SHA - # override FULL_TESTS_NEEDED_LABEL in main/scheduled run - FULL_TESTS_NEEDED_LABEL="true" - readonly FULL_TESTS_NEEDED_LABEL - output_all_basic_variables - check_upgrade_to_newer_dependencies_needed - set_outputs_run_everything_and_exit -else - INCOMING_COMMIT_SHA="${1}" - readonly INCOMING_COMMIT_SHA - echo - echo "Commit SHA passed: ${INCOMING_COMMIT_SHA}!" - echo - readonly FULL_TESTS_NEEDED_LABEL -fi - -check_upgrade_to_newer_dependencies_needed - -output_all_basic_variables - -image_build_needed="false" -docs_build_needed="false" -tests_needed="false" -kubernetes_tests_needed="false" - -get_changed_files -check_if_setup_files_changed -run_all_tests_if_environment_files_changed -check_if_any_py_files_changed -check_if_docs_should_be_generated -check_if_helm_tests_should_be_run -check_if_api_tests_should_be_run -check_if_api_codegen_should_be_run -check_if_javascript_security_scans_should_be_run -check_if_python_security_scans_should_be_run -check_if_ui_tests_should_be_run -check_if_www_tests_should_be_run -check_if_tests_are_needed_at_all -get_count_all_files -get_count_api_files -get_count_cli_files -get_count_providers_files -get_count_www_files -get_count_ui_files -get_count_kubernetes_files -calculate_test_types_to_run - -set_image_build "${image_build_needed}" -if [[ ${image_build_needed} == "true" ]]; then - set_basic_checks_only "false" -else - set_basic_checks_only "true" -fi -set_docs_build "${docs_build_needed}" -run_tests "${tests_needed}" -run_kubernetes_tests "${kubernetes_tests_needed}" -set_upgrade_to_newer_dependencies "${upgrade_to_newer_dependencies}" diff --git a/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh b/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh index d120b50ec75c4..bc5bc041ed17d 100755 --- a/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh +++ b/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh @@ -89,7 +89,7 @@ function run_airflow_testing_in_docker() { echo docker-compose -f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \ "${INTEGRATIONS[@]}" \ - --project-name "airflow-${TEST_TYPE}-${BACKEND}" \ + --project-name "airflow-${TEST_TYPE,,}-${BACKEND}" \ down --remove-orphans \ --volumes --timeout 10 docker-compose --log-level INFO \ @@ -97,7 +97,7 @@ function run_airflow_testing_in_docker() { "${BACKEND_DOCKER_COMPOSE[@]}" \ "${INTEGRATIONS[@]}" \ "${DOCKER_COMPOSE_LOCAL[@]}" \ - --project-name "airflow-${TEST_TYPE}-${BACKEND}" \ + --project-name "airflow-${TEST_TYPE,,}-${BACKEND}" \ run airflow "${@}" exit_code=$? docker ps @@ -112,7 +112,7 @@ function run_airflow_testing_in_docker() { docker-compose --log-level INFO -f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \ "${INTEGRATIONS[@]}" \ - --project-name "airflow-${TEST_TYPE}-${BACKEND}" \ + --project-name "airflow-${TEST_TYPE,,}-${BACKEND}" \ down --remove-orphans \ --volumes --timeout 10 set -u From 4fbea89f0da02fabccc9eb7b6d74bc50f0be7929 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sat, 25 Jun 2022 11:20:48 +0200 Subject: [PATCH 029/118] Switch to new selective-checks in label-when-reviewed workflow (#24651) When #24610 was implemented I missed the label-when-reviewed workflow (cherry picked from commit 2703874eb27c85c781f8c20c8a62b3a4e8d8583b) --- .../label_when_reviewed_workflow_run.yml | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/label_when_reviewed_workflow_run.yml b/.github/workflows/label_when_reviewed_workflow_run.yml index 9b11d71ad2498..e818e8411a311 100644 --- a/.github/workflows/label_when_reviewed_workflow_run.yml +++ b/.github/workflows/label_when_reviewed_workflow_run.yml @@ -83,20 +83,20 @@ jobs: with: persist-credentials: false submodules: recursive + - name: "Setup python" + uses: actions/setup-python@v2 + with: + # We do not have output from selective checks yet, so we need to hardcode python + python-version: 3.7 + cache: 'pip' + cache-dependency-path: ./dev/breeze/setup* + - run: ./scripts/ci/install_breeze.sh - name: Selective checks id: selective-checks env: - EVENT_NAME: ${{ steps.source-run-info.outputs.sourceEvent }} - TARGET_COMMIT_SHA: ${{ steps.source-run-info.outputs.targetCommitSha }} - PR_LABELS: ${{ steps.source-run-info.outputs.pullRequestLabels }} - run: | - if [[ ${EVENT_NAME} == "pull_request_review" ]]; then - # Run selective checks - ./scripts/ci/selective_ci_checks.sh "${TARGET_COMMIT_SHA}" - else - # Run all checks - ./scripts/ci/selective_ci_checks.sh - fi + PR_LABELS: "${{ steps.source-run-info.outputs.pullRequestLabels }}" + COMMIT_REF: "${{ steps.source-run-info.outputs.targetCommitSha }}" + run: breeze selective-check --github-event-name "${{ steps.source-run-info.outputs.targetCommitSha }}" - name: "Label when approved by committers for PRs that require full tests" uses: ./.github/actions/label-when-approved-action id: label-full-test-prs-when-approved-by-commiters From 44e181796c2890ffc60064c10dc20ccacf5c372a Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sat, 25 Jun 2022 11:26:52 +0200 Subject: [PATCH 030/118] Cleanup references to selective checks (#24649) Selective checks docs have been moved to breeze as part of #24610 but some of the references were still left. This PR cleans it up. (cherry picked from commit aa8cd30c46dde496423c72aa4a2c72b44d554745) --- .gitattributes | 1 - BREEZE.rst | 2 +- PULL_REQUEST_WORKFLOW.rst | 13 +++---------- dev/airflow-github | 1 - 4 files changed, 4 insertions(+), 13 deletions(-) diff --git a/.gitattributes b/.gitattributes index 497db03fbcfc5..083e1747cba4a 100644 --- a/.gitattributes +++ b/.gitattributes @@ -17,7 +17,6 @@ Dockerfile.ci export-ignore ISSUE_TRIAGE_PROCESS.rst export-ignore PULL_REQUEST_WORKFLOW.rst export-ignore -SELECTIVE_CHECKS.md export-ignore STATIC_CODE_CHECKS.rst export-ignore TESTING.rst export-ignore LOCAL_VIRTUALENV.rst export-ignore diff --git a/BREEZE.rst b/BREEZE.rst index 78dfd74621458..12bdeda0e9ffe 100644 --- a/BREEZE.rst +++ b/BREEZE.rst @@ -1327,7 +1327,7 @@ needed to run the CI Builds. You can also use the tool to test what tests will b a specific commit that Breeze should run the tests on. More details about the algorithm used to pick the right tests can be -found in `Selective Checks `_. +found in `Selective Checks `_. Those are all available flags of ``selective-check`` command: diff --git a/PULL_REQUEST_WORKFLOW.rst b/PULL_REQUEST_WORKFLOW.rst index d7ca2f9b93eaa..7d7a7860fdcd4 100644 --- a/PULL_REQUEST_WORKFLOW.rst +++ b/PULL_REQUEST_WORKFLOW.rst @@ -43,16 +43,9 @@ We approached the problem by: 2) Heavily decreasing strain on the GitHub Actions jobs by introducing selective checks - mechanism to control which parts of the tests are run during the tests. This is implemented by the - ``scripts/ci/selective_ci_checks.sh`` script in our repository. This script analyses which part of the - code has changed and based on that it sets the right outputs that control which tests are executed in - the ``Tests`` workflow, and whether we need to build CI images necessary to run those steps. This allowed to - heavily decrease the strain especially for the Pull Requests that were not touching code (in which case - the builds can complete in < 2 minutes) but also by limiting the number of tests executed in PRs that do - not touch the "core" of Airflow, or only touching some - standalone - parts of Airflow such as - "Providers", "WWW" or "CLI". This solution is not yet perfect as there are likely some edge cases but - it is easy to maintain and we have an escape-hatch - all the tests are always executed in main pushes, - so contributors can easily spot if there is a "missed" case and fix it - both by fixing the problem and - adding those exceptions to the code. More about it can be found in `Selective checks `_ + ``breeze selective-check`` command. It selectively chooses which tests should be run in the PR based on + type of the PR and its content. More about it can be found in + `Selective checks `_ 3) Even more optimisation came from limiting the scope of tests to only "default" matrix parameters. So far in Airflow we always run all tests for all matrix combinations. The primary matrix components are: diff --git a/dev/airflow-github b/dev/airflow-github index aa202db085b9d..5dc5f5bd9d4d9 100755 --- a/dev/airflow-github +++ b/dev/airflow-github @@ -133,7 +133,6 @@ def is_core_commit(files: List[str]) -> bool: "BREEZE.rst", "CI.rst", "CI_DIAGRAMS.md", - "SELECTIVE_CHECKS.md", "STATIC_CODE_CHECKS.rst", "images/", "TESTING.rst", From fbca47eba4c0a37a7bb669fc609124eabb3252cf Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sat, 25 Jun 2022 11:27:21 +0200 Subject: [PATCH 031/118] Remove misleading message from CI (#24650) The CI jobs do not usually have mypy_cache volume created and almost all jobs print misleading "mypy_cache_volume" error at the beginning. This is a noise - we are not interested in this stderr printed message - we are only interested in returncode from checking if it exists. (cherry picked from commit ac104025f45b54efff26407e460a5e4279c6495b) --- dev/breeze/src/airflow_breeze/utils/path_utils.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/dev/breeze/src/airflow_breeze/utils/path_utils.py b/dev/breeze/src/airflow_breeze/utils/path_utils.py index c7ee6d980236f..413401c0e8332 100644 --- a/dev/breeze/src/airflow_breeze/utils/path_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/path_utils.py @@ -254,10 +254,18 @@ def create_volume_if_missing(volume_name: str): from airflow_breeze.utils.run_utils import run_command res_inspect = run_command( - cmd=["docker", "volume", "inspect", volume_name], stdout=subprocess.DEVNULL, check=False + cmd=["docker", "volume", "inspect", volume_name], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + check=False, ) if res_inspect.returncode != 0: - run_command(cmd=["docker", "volume", "create", volume_name], check=True) + run_command( + cmd=["docker", "volume", "create", volume_name], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + check=True, + ) def create_static_check_volumes(): From 1bea5b0d1473c57cd2f969795919a5fc8470b0b2 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sat, 25 Jun 2022 17:45:08 +0200 Subject: [PATCH 032/118] Remove selective checks from the "release workflow" (#24655) Missed that one too :( (cherry picked from commit 47f54b609983c6cb08553bb85245f2288deaf2dc) --- .github/workflows/release_dockerhub_image.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release_dockerhub_image.yml b/.github/workflows/release_dockerhub_image.yml index cd5fe6e9ce08d..3259759cac7d4 100644 --- a/.github/workflows/release_dockerhub_image.yml +++ b/.github/workflows/release_dockerhub_image.yml @@ -51,9 +51,17 @@ jobs: with: persist-credentials: false submodules: recursive + - name: "Setup python" + uses: actions/setup-python@v2 + with: + # We do not have output from selective checks yet, so we need to hardcode python + python-version: 3.7 + cache: 'pip' + cache-dependency-path: ./dev/breeze/setup* + - run: ./scripts/ci/install_breeze.sh - name: Selective checks id: selective-checks - run: ./scripts/ci/selective_ci_checks.sh + run: breeze selective-check release-images: timeout-minutes: 120 name: "Release images: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }}" From 0c55c721f2299d27f8aa8359750a0b666e1aafe0 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sat, 25 Jun 2022 23:34:11 +0200 Subject: [PATCH 033/118] Use target commit SHA for build image workflow (#24659) The build-image workflow should use TARGET_COMMIT_SHA as the selective check COMMIT_REF otherwise it might not build image when needed (cherry picked from commit 2ffaebee255c73e7a137b55ac25b95c16e159d75) --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index e28a9d7619c5a..5c6b085b97c7c 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -121,7 +121,7 @@ jobs: id: selective-checks env: PR_LABELS: "$${{ steps.get-latest-pr-labels.outputs.pullRequestLabels }}" - COMMIT_REF: "${{ github.sha }}" + COMMIT_REF: "${{ env.TARGET_COMMIT_SHA }}" run: breeze selective-check - name: Compute dynamic outputs id: dynamic-outputs From 4baf1d569a62d43386a260e47ae222543b6c9553 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sun, 26 Jun 2022 11:07:49 +0200 Subject: [PATCH 034/118] Handle "workflow_run" event properly in selective-check (#24656) Unfortunately testing workflow_run is a bit difficult because the changes are only effective after merging them. Fixing (hopefully) yet another mistake in the workflow run where commit hash was passed as event name (?) We are going to handle "workflow_run" as valid event type so this should now work without passing any event. (cherry picked from commit e83e7c85c1bc3e634ad8a97224090028c0fd81b9) --- .../label_when_reviewed_workflow_run.yml | 2 +- .../src/airflow_breeze/global_constants.py | 1 + images/breeze/output-commands-hash.txt | 2 +- images/breeze/output-selective-check.svg | 96 +++++++++---------- 4 files changed, 51 insertions(+), 50 deletions(-) diff --git a/.github/workflows/label_when_reviewed_workflow_run.yml b/.github/workflows/label_when_reviewed_workflow_run.yml index e818e8411a311..b84ab34a79ea5 100644 --- a/.github/workflows/label_when_reviewed_workflow_run.yml +++ b/.github/workflows/label_when_reviewed_workflow_run.yml @@ -96,7 +96,7 @@ jobs: env: PR_LABELS: "${{ steps.source-run-info.outputs.pullRequestLabels }}" COMMIT_REF: "${{ steps.source-run-info.outputs.targetCommitSha }}" - run: breeze selective-check --github-event-name "${{ steps.source-run-info.outputs.targetCommitSha }}" + run: breeze selective-check - name: "Label when approved by committers for PRs that require full tests" uses: ./.github/actions/label-when-approved-action id: label-full-test-prs-when-approved-by-commiters diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index 41acd12e458de..8dedc191f7db6 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -298,6 +298,7 @@ class GithubEvents(Enum): PULL_REQUEST_TARGET = "pull_request_target" PUSH = "push" SCHEDULE = "schedule" + WORKFLOW_RUN = "workflow_run" @lru_cache(maxsize=None) diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index 194d9690d835a..e7e333b83e18d 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -2,4 +2,4 @@ # This file is automatically generated by pre-commit. If you have a conflict with this file # Please do not solve it but run `breeze regenerate-command-images`. # This command should fix the conflict and regenerate help images that you have conflict with. -b5c4ae62f5ac472a83af3a8eca75ed9b +60ea24df0a84c6739fc5f06197cdd201 diff --git a/images/breeze/output-selective-check.svg b/images/breeze/output-selective-check.svg index 3ea08a5bf7a29..67bc6c52533d9 100644 --- a/images/breeze/output-selective-check.svg +++ b/images/breeze/output-selective-check.svg @@ -19,113 +19,113 @@ font-weight: 700; } - .terminal-3258521135-matrix { + .terminal-417995322-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3258521135-title { + .terminal-417995322-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3258521135-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-3258521135-r2 { fill: #c5c8c6 } -.terminal-3258521135-r3 { fill: #d0b344;font-weight: bold } -.terminal-3258521135-r4 { fill: #868887 } -.terminal-3258521135-r5 { fill: #68a0b3;font-weight: bold } -.terminal-3258521135-r6 { fill: #8d7b39 } -.terminal-3258521135-r7 { fill: #98a84b;font-weight: bold } + .terminal-417995322-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-417995322-r2 { fill: #c5c8c6 } +.terminal-417995322-r3 { fill: #d0b344;font-weight: bold } +.terminal-417995322-r4 { fill: #868887 } +.terminal-417995322-r5 { fill: #68a0b3;font-weight: bold } +.terminal-417995322-r6 { fill: #8d7b39 } +.terminal-417995322-r7 { fill: #98a84b;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: selective-check + Command: selective-check - + - - -Usage: breeze selective-check [OPTIONS] - -Checks what kind of tests should be run for an incoming commit. - -╭─ Selective check flags ──────────────────────────────────────────────────────────────────────────────────────────────╮ ---commit-refCommit-ish reference to the commit that should be checked(TEXT) ---pr-labelsSpace-separate list of labels which are valid for the PR(TEXT) ---default-branchBranch against which the PR should be run(TEXT)[default: main] ---github-event-nameName of the GitHub event that triggered the check                            -(pull_request | pull_request_review | pull_request_target | push | schedule) -[default: pull_request]                                                      -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze selective-check [OPTIONS] + +Checks what kind of tests should be run for an incoming commit. + +╭─ Selective check flags ──────────────────────────────────────────────────────────────────────────────────────────────╮ +--commit-refCommit-ish reference to the commit that should be checked(TEXT) +--pr-labelsSpace-separate list of labels which are valid for the PR(TEXT) +--default-branchBranch against which the PR should be run(TEXT)[default: main] +--github-event-nameName of the GitHub event that triggered the check                                           +(pull_request | pull_request_review | pull_request_target | push | schedule | workflow_run) +[default: pull_request]                                                                     +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ From 174ee470d50f005b4057c215a031576651e826b0 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Mon, 27 Jun 2022 03:00:10 +0200 Subject: [PATCH 035/118] Fix behaviour of build/pull after recent Breeze changes (#24657) The behaviour of Breeze after some recent changes related to pulling and building images in parallel have been slightly broken. Nothing serious but slightly annoying behaviour: * when starting breeze shell, the image was attempted to be build even if it was not needed (but cache efficiency made it fast enough to not be too annoying (unless we updated to newer python base image * breeze pull command for "latest" branch makes no sense any more - we stopped pushing "latest" image to ghcr.io, we only push cache and "tagged" images. We are now turning --image-tag as required in "pull_image" and when someone specifies latest, error and helpful message is printed * --force-build flag in "shell-related-commands" was not properly propagated to build-image so it did not actually force image building. All those problems are fixed now. (cherry picked from commit b678dc25f6604ddf603e42500e3e5583b6663df6) --- .../commands/ci_image_commands.py | 51 ++- .../commands/developer_commands.py | 12 +- .../commands/production_image_commands.py | 21 +- .../commands/release_management_commands.py | 6 +- .../commands/testing_commands.py | 8 +- .../airflow_breeze/utils/common_options.py | 24 +- images/breeze/output-build-image.svg | 288 +++++++------- images/breeze/output-build-prod-image.svg | 356 +++++++++--------- images/breeze/output-commands-hash.txt | 2 +- images/breeze/output-docker-compose-tests.svg | 104 +++-- images/breeze/output-generate-constraints.svg | 152 ++++---- images/breeze/output-pull-image.svg | 150 ++++---- images/breeze/output-pull-prod-image.svg | 150 ++++---- images/breeze/output-shell.svg | 224 ++++++----- images/breeze/output-start-airflow.svg | 232 ++++++------ images/breeze/output-tests.svg | 164 ++++---- images/breeze/output-verify-image.svg | 108 +++--- images/breeze/output-verify-prod-image.svg | 112 +++--- 18 files changed, 1062 insertions(+), 1102 deletions(-) diff --git a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py index b9cfa2895767d..298d264b0b975 100644 --- a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py @@ -37,6 +37,7 @@ option_airflow_constraints_mode_ci, option_airflow_constraints_reference_build, option_answer, + option_builder, option_debian_version, option_dev_apt_command, option_dev_apt_deps, @@ -48,10 +49,12 @@ option_github_token, option_github_username, option_image_name, - option_image_tag, + option_image_tag_for_building, + option_image_tag_for_pulling, + option_image_tag_for_verifying, option_install_providers_from_sources, option_parallelism, - option_platform, + option_platform_multiple, option_prepare_buildx_cache, option_pull_image, option_push_image, @@ -191,7 +194,7 @@ } -def start_building(ci_image_params: BuildCiParams, dry_run: bool, verbose: bool) -> bool: +def check_if_image_building_is_needed(ci_image_params: BuildCiParams, dry_run: bool, verbose: bool) -> bool: """Starts building attempt. Returns false if we should not continue""" if not ci_image_params.force_build and not ci_image_params.upgrade_to_newer_dependencies: if not should_we_run_the_build(build_ci_params=ci_image_params): @@ -233,12 +236,12 @@ def run_build_in_parallel( @option_parallelism @option_python_versions @option_upgrade_to_newer_dependencies -@option_platform +@option_platform_multiple @option_debian_version @option_github_token @option_github_username @option_docker_cache -@option_image_tag +@option_image_tag_for_building @option_prepare_buildx_cache @option_push_image @option_empty_image @@ -252,13 +255,13 @@ def run_build_in_parallel( @option_additional_dev_apt_env @option_additional_runtime_apt_env @option_additional_runtime_apt_command +@option_builder @option_dev_apt_command @option_dev_apt_deps @option_force_build @option_python_image @option_runtime_apt_command @option_runtime_apt_deps -@option_force_build @option_airflow_constraints_mode_ci @option_airflow_constraints_reference_build @option_tag_as_latest @@ -293,7 +296,7 @@ def run_build(ci_image_params: BuildCiParams) -> None: params.python = python params.answer = answer params_list.append(params) - start_building(params_list[0], dry_run=dry_run, verbose=verbose) + check_if_image_building_is_needed(params_list[0], dry_run=dry_run, verbose=verbose) run_build_in_parallel( image_params_list=params_list, python_version_list=python_version_list, @@ -303,7 +306,7 @@ def run_build(ci_image_params: BuildCiParams) -> None: ) else: params = BuildCiParams(**parameters_passed) - start_building(params, dry_run=dry_run, verbose=verbose) + check_if_image_building_is_needed(params, dry_run=dry_run, verbose=verbose) run_build(ci_image_params=params) @@ -318,7 +321,7 @@ def run_build(ci_image_params: BuildCiParams) -> None: @option_github_token @option_verify_image @option_wait_for_image -@option_image_tag +@option_image_tag_for_pulling @option_tag_as_latest @click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED) def pull_ci_image( @@ -330,13 +333,20 @@ def pull_ci_image( python_versions: str, github_token: str, parallelism: int, - image_tag: Optional[str], + image_tag: str, wait_for_image: bool, tag_as_latest: bool, verify_image: bool, extra_pytest_args: Tuple, ): """Pull and optionally verify CI images - possibly in parallel for all Python versions.""" + if image_tag == "latest": + get_console().print("[red]You cannot pull latest images because they are not published any more!\n") + get_console().print( + "[yellow]You need to specify commit tag to pull and image. If you wish to get" + " the latest image, you need to run `breeze build-image` command\n" + ) + sys.exit(1) perform_environment_checks(verbose=verbose) if run_in_parallel: python_version_list = get_python_version_list(python_versions) @@ -387,7 +397,7 @@ def pull_ci_image( @option_dry_run @option_python @option_github_repository -@option_image_tag +@option_image_tag_for_verifying @option_image_name @option_pull_image @click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED) @@ -397,7 +407,7 @@ def verify_ci_image( python: str, github_repository: str, image_name: str, - image_tag: str, + image_tag: Optional[str], pull_image: bool, extra_pytest_args: Tuple, ): @@ -421,7 +431,7 @@ def verify_ci_image( sys.exit(return_code) -def should_we_run_the_build(build_ci_params: BuildCiParams, verbose: bool) -> bool: +def should_we_run_the_build(build_ci_params: BuildCiParams) -> bool: """ Check if we should run the build based on what files have been modified since last build and answer from the user. @@ -436,9 +446,7 @@ def should_we_run_the_build(build_ci_params: BuildCiParams, verbose: bool) -> bo # We import those locally so that click autocomplete works from inputimeout import TimeoutOccurred - if not md5sum_check_if_build_is_needed( - md5sum_cache_dir=build_ci_params.md5sum_cache_dir, verbose=verbose - ): + if not md5sum_check_if_build_is_needed(md5sum_cache_dir=build_ci_params.md5sum_cache_dir): return False try: answer = user_confirm( @@ -581,9 +589,13 @@ def rebuild_or_pull_ci_image_if_needed( BUILD_CACHE_DIR, command_params.airflow_branch, f".built_{command_params.python}" ) ci_image_params = BuildCiParams( - python=command_params.python, upgrade_to_newer_dependencies=False, image_tag=command_params.image_tag + python=command_params.python, + upgrade_to_newer_dependencies=False, + image_tag=command_params.image_tag, + platform=command_params.platform, + force_build=command_params.force_build, ) - if command_params.image_tag is not None: + if command_params.image_tag is not None and command_params.image_tag != "latest": return_code, message = run_pull_image( image_params=ci_image_params, dry_run=dry_run, @@ -605,4 +617,5 @@ def rebuild_or_pull_ci_image_if_needed( 'Forcing build.[/]' ) ci_image_params.force_build = True - run_build_ci_image(verbose, dry_run=dry_run, ci_image_params=ci_image_params, parallel=False) + if check_if_image_building_is_needed(ci_image_params=ci_image_params, dry_run=dry_run, verbose=verbose): + run_build_ci_image(verbose, dry_run=dry_run, ci_image_params=ci_image_params, parallel=False) diff --git a/dev/breeze/src/airflow_breeze/commands/developer_commands.py b/dev/breeze/src/airflow_breeze/commands/developer_commands.py index bc324bf7e3798..a81ae92745684 100644 --- a/dev/breeze/src/airflow_breeze/commands/developer_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/developer_commands.py @@ -45,7 +45,7 @@ option_force_build, option_forward_credentials, option_github_repository, - option_image_tag, + option_image_tag_for_running, option_installation_package_format, option_integration, option_load_default_connection, @@ -112,9 +112,9 @@ "--use-packages-from-dist", "--package-format", "--force-build", + "--image-tag", "--mount-sources", "--debian-version", - "--image-tag", ], }, ], @@ -141,9 +141,9 @@ "--use-packages-from-dist", "--package-format", "--force-build", + "--image-tag", "--mount-sources", "--debian-version", - "--image-tag", ], }, ], @@ -172,8 +172,8 @@ "--use-packages-from-dist", "--package-format", "--force-build", - "--mount-sources", "--image-tag", + "--mount-sources", ], }, ], @@ -242,7 +242,7 @@ @option_mount_sources @option_integration @option_db_reset -@option_image_tag +@option_image_tag_for_running @option_answer @click.argument('extra-args', nargs=-1, type=click.UNPROCESSED) def shell( @@ -319,7 +319,7 @@ def shell( @option_installation_package_format @option_mount_sources @option_integration -@option_image_tag +@option_image_tag_for_running @option_db_reset @option_answer @click.argument('extra-args', nargs=-1, type=click.UNPROCESSED) diff --git a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py index c8dca4cae1cd2..07bd42cab90c9 100644 --- a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py @@ -47,7 +47,9 @@ option_github_token, option_github_username, option_image_name, - option_image_tag, + option_image_tag_for_building, + option_image_tag_for_pulling, + option_image_tag_for_verifying, option_install_providers_from_sources, option_parallelism, option_platform, @@ -246,7 +248,7 @@ def run_build_in_parallel( @option_github_token @option_github_username @option_docker_cache -@option_image_tag +@option_image_tag_for_building @option_prepare_buildx_cache @option_push_image @option_empty_image @@ -363,7 +365,7 @@ def run_build(prod_image_params: BuildProdParams) -> None: @option_parallelism @option_python_versions @option_github_token -@option_image_tag +@option_image_tag_for_pulling @option_wait_for_image @option_tag_as_latest @option_verify_image @@ -377,13 +379,20 @@ def pull_prod_image( parallelism: int, python_versions: str, github_token: str, - image_tag: Optional[str], + image_tag: str, wait_for_image: bool, tag_as_latest: bool, verify_image: bool, extra_pytest_args: Tuple, ): """Pull and optionally verify Production images - possibly in parallel for all Python versions.""" + if image_tag == "latest": + get_console().print("[red]You cannot pull latest images because they are not published any more!\n") + get_console().print( + "[yellow]You need to specify commit tag to pull and image. If you wish to get" + " the latest image, you need to run `breeze build-image` command\n" + ) + sys.exit(1) perform_environment_checks(verbose=verbose) if run_in_parallel: python_version_list = get_python_version_list(python_versions) @@ -436,7 +445,7 @@ def pull_prod_image( @option_dry_run @option_python @option_github_repository -@option_image_tag +@option_image_tag_for_verifying @option_image_name @option_pull_image @click.option( @@ -451,7 +460,7 @@ def verify_prod_image( python: str, github_repository: str, image_name: str, - image_tag: str, + image_tag: Optional[str], pull_image: bool, slim_image: bool, extra_pytest_args: Tuple, diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index 17bf70b3af9d1..6a2c331e6d706 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -43,7 +43,7 @@ option_answer, option_dry_run, option_github_repository, - option_image_tag, + option_image_tag_for_running, option_installation_package_format, option_package_format, option_parallelism, @@ -402,7 +402,7 @@ def run_generate_constraints_in_parallel( @option_run_in_parallel @option_parallelism @option_python_versions -@option_image_tag +@option_image_tag_for_running @option_answer @option_debug_release_management @option_airflow_constraints_mode_ci @@ -414,7 +414,7 @@ def generate_constraints( run_in_parallel: bool, parallelism: int, python_versions: str, - image_tag: str, + image_tag: Optional[str], answer: Optional[str], debug: bool, airflow_constraints_mode: str, diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands.py b/dev/breeze/src/airflow_breeze/commands/testing_commands.py index 45df368afa89c..8c9200a840c6e 100644 --- a/dev/breeze/src/airflow_breeze/commands/testing_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/testing_commands.py @@ -38,7 +38,7 @@ option_dry_run, option_github_repository, option_image_name, - option_image_tag, + option_image_tag_for_running, option_integration, option_mount_sources, option_mssql_version, @@ -109,7 +109,7 @@ @option_dry_run @option_python @option_github_repository -@option_image_tag +@option_image_tag_for_running @option_image_name @click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED) def docker_compose_tests( @@ -118,7 +118,7 @@ def docker_compose_tests( python: str, github_repository: str, image_name: str, - image_tag: str, + image_tag: Optional[str], extra_pytest_args: Tuple, ): """Run docker-compose tests.""" @@ -243,7 +243,7 @@ def run_with_progress( help="Limit progress to percentage only and just show the summary when tests complete.", is_flag=True, ) -@option_image_tag +@option_image_tag_for_running @option_mount_sources @click.option( "--test-type", diff --git a/dev/breeze/src/airflow_breeze/utils/common_options.py b/dev/breeze/src/airflow_breeze/utils/common_options.py index 65f0040f89bfe..af2b6c75e023d 100644 --- a/dev/breeze/src/airflow_breeze/utils/common_options.py +++ b/dev/breeze/src/airflow_breeze/utils/common_options.py @@ -186,11 +186,29 @@ help='The user name used to authenticate to GitHub.', envvar='GITHUB_USERNAME', ) -option_image_tag = click.option( +option_image_tag_for_pulling = click.option( '-t', '--image-tag', - help='Tag of the image which is used to pull or run the image (implies --mount-sources=skip' - ' when using to run shell or tests) ', + help='Tag of the image which is used to pull the image', + envvar='IMAGE_TAG', + required=True, +) +option_image_tag_for_building = click.option( + '-t', + '--image-tag', + help='Tag the image after building it', + envvar='IMAGE_TAG', +) +option_image_tag_for_running = click.option( + '-t', + '--image-tag', + help='Tag of the image which is used to run the image (implies --mount-sources=skip)', + envvar='IMAGE_TAG', +) +option_image_tag_for_verifying = click.option( + '-t', + '--image-tag', + help='Tag of the image when verifying it', envvar='IMAGE_TAG', ) option_image_name = click.option( diff --git a/images/breeze/output-build-image.svg b/images/breeze/output-build-image.svg index 082e97adefffa..e84a5e189afc6 100644 --- a/images/breeze/output-build-image.svg +++ b/images/breeze/output-build-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - Command: build-image + Command: build-image - + - - -Usage: breeze build-image [OPTIONS] - -Build CI image. Include building multiple images for all python versions (sequentially). - -╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. ---debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] ---image-tag-tTag of the image which is used to pull or run the image (implies                ---mount-sources=skip when using to run shell or tests)                          -(TEXT)                                                                          ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful -when you build or pull image with --image-tag.                                  ---docker-cache-cCache option for image used during the build.(registry | local | disabled) -[default: registry]                           ---force-buildForce image build no matter if it is determined as needed. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced options (for power users) ─────────────────────────────────────────────────────────────────────────────────╮ ---install-providers-from-sourcesInstall providers from sources when installing. ---airflow-constraints-modeMode of constraints for CI image building                               -(constraints-source-providers | constraints | constraints-no-providers) -[default: constraints-source-providers]                                 ---airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) ---python-imageIf specified this is the base python image used to build the image. Should be    -something like: python:VERSION-slim-bullseye                                     -(TEXT)                                                                           ---additional-python-depsAdditional python dependencies to use when building the images.(TEXT) ---runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) ---runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) ---additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) ---additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) ---additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) ---additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) ---additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) ---additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) ---additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) ---dev-apt-depsApt dev dependencies to use when building the images.(TEXT) ---dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---github-usernameThe user name used to authenticate to GitHub.(TEXT) ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) ---push-imagePush image after building it. ---empty-imagePrepare empty image tagged with the same name as the Airflow image. ---prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  -image).                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze build-image [OPTIONS] + +Build CI image. Include building multiple images for all python versions (sequentially). + +╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +--image-tag-tTag the image after building it(TEXT) +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful +when you build or pull image with --image-tag.                                  +--docker-cache-cCache option for image used during the build.(registry | local | disabled) +[default: registry]                           +--force-buildForce image build no matter if it is determined as needed. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced options (for power users) ─────────────────────────────────────────────────────────────────────────────────╮ +--install-providers-from-sourcesInstall providers from sources when installing. +--airflow-constraints-modeMode of constraints for CI image building                               +(constraints-source-providers | constraints | constraints-no-providers) +[default: constraints-source-providers]                                 +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +--python-imageIf specified this is the base python image used to build the image. Should be    +something like: python:VERSION-slim-bullseye                                     +(TEXT)                                                                           +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) +--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) +--additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) +--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) +--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--github-usernameThe user name used to authenticate to GitHub.(TEXT) +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--push-imagePush image after building it. +--empty-imagePrepare empty image tagged with the same name as the Airflow image. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-build-prod-image.svg b/images/breeze/output-build-prod-image.svg index 0a18dfecae9a4..9efd67fc9ce0c 100644 --- a/images/breeze/output-build-prod-image.svg +++ b/images/breeze/output-build-prod-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - Command: build-prod-image + Command: build-prod-image - + - - -Usage: breeze build-prod-image [OPTIONS] - -Build Production image. Include building multiple images for all or selected Python versions sequentially. - -╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---install-airflow-version-VInstall version of Airflow from PyPI.(TEXT) ---upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. ---debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] ---image-tag-tTag of the image which is used to pull or run the image (implies                ---mount-sources=skip when using to run shell or tests)                          -(TEXT)                                                                          ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful -when you build or pull image with --image-tag.                                  ---docker-cache-cCache option for image used during the build.(registry | local | disabled) -[default: registry]                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options for customizing images ─────────────────────────────────────────────────────────────────────────────────────╮ ---install-providers-from-sourcesInstall providers from sources when installing. ---airflow-extrasExtras to install by default.                                                    -(TEXT)                                                                           -[default:                                                                        -amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,google,google… ---airflow-constraints-modeMode of constraints for PROD image building                             -(constraints | constraints-no-providers | constraints-source-providers) -[default: constraints]                                                  ---airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) ---python-imageIf specified this is the base python image used to build the image. Should be    -something like: python:VERSION-slim-bullseye                                     -(TEXT)                                                                           ---additional-python-depsAdditional python dependencies to use when building the images.(TEXT) ---additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) ---additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) ---additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) ---additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) ---additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) ---additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) ---additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) ---runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) ---runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) ---dev-apt-depsApt dev dependencies to use when building the images.(TEXT) ---dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Customization options (for specific customization needs) ───────────────────────────────────────────────────────────╮ ---install-packages-from-contextInstall wheels from local docker-context-files when building image. ---airflow-is-in-contextIf set Airflow is installed from docker-context-files only rather than     -from PyPI or sources.                                                      ---cleanup-contextClean up docker context files before running build (cannot be used         -together with --install-packages-from-context).                            ---disable-mysql-client-installationDo not install MySQL client. ---disable-mssql-client-installationDo not install MsSQl client. ---disable-postgres-client-installationDo not install Postgres client. ---disable-airflow-repo-cacheDisable cache from Airflow repository during building. ---install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT) ---installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---github-usernameThe user name used to authenticate to GitHub.(TEXT) ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) ---push-imagePush image after building it. ---empty-imagePrepare empty image tagged with the same name as the Airflow image. ---prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  -image).                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze build-prod-image [OPTIONS] + +Build Production image. Include building multiple images for all or selected Python versions sequentially. + +╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--install-airflow-version-VInstall version of Airflow from PyPI.(TEXT) +--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +--image-tag-tTag the image after building it(TEXT) +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful +when you build or pull image with --image-tag.                                  +--docker-cache-cCache option for image used during the build.(registry | local | disabled) +[default: registry]                           +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options for customizing images ─────────────────────────────────────────────────────────────────────────────────────╮ +--install-providers-from-sourcesInstall providers from sources when installing. +--airflow-extrasExtras to install by default.                                                    +(TEXT)                                                                           +[default:                                                                        +amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,google,google… +--airflow-constraints-modeMode of constraints for PROD image building                             +(constraints | constraints-no-providers | constraints-source-providers) +[default: constraints]                                                  +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +--python-imageIf specified this is the base python image used to build the image. Should be    +something like: python:VERSION-slim-bullseye                                     +(TEXT)                                                                           +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) +--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) +--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) +--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Customization options (for specific customization needs) ───────────────────────────────────────────────────────────╮ +--install-packages-from-contextInstall wheels from local docker-context-files when building image. +--airflow-is-in-contextIf set Airflow is installed from docker-context-files only rather than     +from PyPI or sources.                                                      +--cleanup-contextClean up docker context files before running build (cannot be used         +together with --install-packages-from-context).                            +--disable-mysql-client-installationDo not install MySQL client. +--disable-mssql-client-installationDo not install MsSQl client. +--disable-postgres-client-installationDo not install Postgres client. +--disable-airflow-repo-cacheDisable cache from Airflow repository during building. +--install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT) +--installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--github-usernameThe user name used to authenticate to GitHub.(TEXT) +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--push-imagePush image after building it. +--empty-imagePrepare empty image tagged with the same name as the Airflow image. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index e7e333b83e18d..044b799ce0385 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -2,4 +2,4 @@ # This file is automatically generated by pre-commit. If you have a conflict with this file # Please do not solve it but run `breeze regenerate-command-images`. # This command should fix the conflict and regenerate help images that you have conflict with. -60ea24df0a84c6739fc5f06197cdd201 +e1bc752aeb5e9c4095bb9d3cbb614252 diff --git a/images/breeze/output-docker-compose-tests.svg b/images/breeze/output-docker-compose-tests.svg index c652741d47579..424d3cda29fc8 100644 --- a/images/breeze/output-docker-compose-tests.svg +++ b/images/breeze/output-docker-compose-tests.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - Command: docker-compose-tests + Command: docker-compose-tests - + - - -Usage: breeze docker-compose-tests [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Run docker-compose tests. - -╭─ Docker-compose tests flag ──────────────────────────────────────────────────────────────────────────────────────────╮ ---image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip when using   -to run shell or tests)                                                                             -(TEXT)                                                                                             ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze docker-compose-tests [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Run docker-compose tests. + +╭─ Docker-compose tests flag ──────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip)(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-generate-constraints.svg b/images/breeze/output-generate-constraints.svg index e0b54f182ebba..24663d4bc7aea 100644 --- a/images/breeze/output-generate-constraints.svg +++ b/images/breeze/output-generate-constraints.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - Command: generate-constraints + Command: generate-constraints - + - - -Usage: breeze generate-constraints [OPTIONS] - -Generates pinned constraint files with all extras from setup.py in parallel. - -╭─ Generate constraints flags ─────────────────────────────────────────────────────────────────────────────────────────╮ ---image-tag-tTag of the image which is used to pull or run the image (implies                     ---mount-sources=skip when using to run shell or tests)                               -(TEXT)                                                                               ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---airflow-constraints-modeMode of constraints for CI image building                               -(constraints-source-providers | constraints | constraints-no-providers) -[default: constraints-source-providers]                                 ---debugDrop user in shell instead of running the command. Useful for debugging. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze generate-constraints [OPTIONS] + +Generates pinned constraint files with all extras from setup.py in parallel. + +╭─ Generate constraints flags ─────────────────────────────────────────────────────────────────────────────────────────╮ +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip) +(TEXT)                                                                         +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--airflow-constraints-modeMode of constraints for CI image building                               +(constraints-source-providers | constraints | constraints-no-providers) +[default: constraints-source-providers]                                 +--debugDrop user in shell instead of running the command. Useful for debugging. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-pull-image.svg b/images/breeze/output-pull-image.svg index edb77e062ed5a..7ebae02e45d41 100644 --- a/images/breeze/output-pull-image.svg +++ b/images/breeze/output-pull-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - Command: pull-image + Command: pull-image - + - - -Usage: breeze pull-image [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Pull and optionally verify CI images - possibly in parallel for all Python versions. - -╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip when     -using to run shell or tests)                                                                   -(TEXT)                                                                                         ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---verify-imageVerify image. ---wait-for-imageWait until image is available. ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you build -or pull image with --image-tag.                                                                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze pull-image [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Pull and optionally verify CI images - possibly in parallel for all Python versions. + +╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +*--image-tag-tTag of the image which is used to pull the image(TEXT)[required] +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--verify-imageVerify image. +--wait-for-imageWait until image is available. +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you    +build or pull image with --image-tag.                                                       +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-pull-prod-image.svg b/images/breeze/output-pull-prod-image.svg index e714d08acd989..6cb1051d80c49 100644 --- a/images/breeze/output-pull-prod-image.svg +++ b/images/breeze/output-pull-prod-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - Command: pull-prod-image + Command: pull-prod-image - + - - -Usage: breeze pull-prod-image [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Pull and optionally verify Production images - possibly in parallel for all Python versions. - -╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip when     -using to run shell or tests)                                                                   -(TEXT)                                                                                         ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---verify-imageVerify image. ---wait-for-imageWait until image is available. ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you build -or pull image with --image-tag.                                                                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze pull-prod-image [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Pull and optionally verify Production images - possibly in parallel for all Python versions. + +╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +*--image-tag-tTag of the image which is used to pull the image(TEXT)[required] +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--verify-imageVerify image. +--wait-for-imageWait until image is available. +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you    +build or pull image with --image-tag.                                                       +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-shell.svg b/images/breeze/output-shell.svg index 2c29bda22ac20..415b1b09c2005 100644 --- a/images/breeze/output-shell.svg +++ b/images/breeze/output-shell.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - Command: shell + Command: shell - + - - -Usage: breeze shell [OPTIONS] [EXTRA_ARGS]... - -Enter breeze.py environment. this is the default command use when no other is selected. - -╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---integrationIntegration(s) to enable when running (can be more than one).                             -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    -all)                                                                                      ---forward-credentials-fForward local credentials to container when running. ---db-reset-dReset DB when entering the container. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or -`sdist` if Airflow should be removed, installed from wheel packages or sdist packages  -available in dist folder respectively. Implies --mount-sources `remove`.               -(none | wheel | sdist | <airflow_version>)                                             ---airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) ---use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when  -entering breeze.                                                                       ---package-formatFormat of packages that should be installed from dist.(wheel | sdist) -[default: wheel]                                       ---force-buildForce image build no matter if it is determined as needed. ---mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =   -selected).                                                                             -(selected | all | skip | remove)                                                       -[default: selected]                                                                    ---debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] ---image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip  -when using to run shell or tests)                                                      -(TEXT)                                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT) -[default: apache/airflow]                        ---airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     -specify constraints for the installed version and to find newer dependencies    -(TEXT)                                                                          ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze shell [OPTIONS] [EXTRA_ARGS]... + +Enter breeze.py environment. this is the default command use when no other is selected. + +╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                             +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    +all)                                                                                      +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or +`sdist` if Airflow should be removed, installed from wheel packages or sdist packages  +available in dist folder respectively. Implies --mount-sources `remove`.               +(none | wheel | sdist | <airflow_version>)                                             +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when  +entering breeze.                                                                       +--package-formatFormat of packages that should be installed from dist.(wheel | sdist) +[default: wheel]                                       +--force-buildForce image build no matter if it is determined as needed. +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip)(TEXT) +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =   +selected).                                                                             +(selected | all | skip | remove)                                                       +[default: selected]                                                                    +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT) +[default: apache/airflow]                        +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     +specify constraints for the installed version and to find newer dependencies    +(TEXT)                                                                          +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-start-airflow.svg b/images/breeze/output-start-airflow.svg index 3e21e1f5e97fa..d7cf283cd2f48 100644 --- a/images/breeze/output-start-airflow.svg +++ b/images/breeze/output-start-airflow.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - Command: start-airflow + Command: start-airflow - + - - -Usage: breeze start-airflow [OPTIONS] [EXTRA_ARGS]... - -Enter breeze.py environment and starts all Airflow components in the tmux session. - -╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---load-example-dags-eEnable configuration to load example DAGs when starting Airflow. ---load-default-connections-cEnable configuration to load default connections when starting Airflow. ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---integrationIntegration(s) to enable when running (can be more than one).                        -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino -| all)                                                                               ---forward-credentials-fForward local credentials to container when running. ---db-reset-dReset DB when entering the container. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or -`sdist` if Airflow should be removed, installed from wheel packages or sdist packages  -available in dist folder respectively. Implies --mount-sources `remove`.               -(none | wheel | sdist | <airflow_version>)                                             ---airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) ---use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when  -entering breeze.                                                                       ---package-formatFormat of packages that should be installed from dist.(wheel | sdist) -[default: wheel]                                       ---force-buildForce image build no matter if it is determined as needed. ---mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =   -selected).                                                                             -(selected | all | skip | remove)                                                       -[default: selected]                                                                    ---image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip  -when using to run shell or tests)                                                      -(TEXT)                                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT) -[default: apache/airflow]                        ---airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     -specify constraints for the installed version and to find newer dependencies    -(TEXT)                                                                          ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze start-airflow [OPTIONS] [EXTRA_ARGS]... + +Enter breeze.py environment and starts all Airflow components in the tmux session. + +╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--load-example-dags-eEnable configuration to load example DAGs when starting Airflow. +--load-default-connections-cEnable configuration to load default connections when starting Airflow. +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                        +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino +| all)                                                                               +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or +`sdist` if Airflow should be removed, installed from wheel packages or sdist packages  +available in dist folder respectively. Implies --mount-sources `remove`.               +(none | wheel | sdist | <airflow_version>)                                             +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when  +entering breeze.                                                                       +--package-formatFormat of packages that should be installed from dist.(wheel | sdist) +[default: wheel]                                       +--force-buildForce image build no matter if it is determined as needed. +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip)(TEXT) +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =   +selected).                                                                             +(selected | all | skip | remove)                                                       +[default: selected]                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT) +[default: apache/airflow]                        +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     +specify constraints for the installed version and to find newer dependencies    +(TEXT)                                                                          +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-tests.svg b/images/breeze/output-tests.svg index 0bb6680bef9ae..8bdcd0c29be62 100644 --- a/images/breeze/output-tests.svg +++ b/images/breeze/output-tests.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - Command: tests + Command: tests - + - - -Usage: breeze tests [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Run the specified unit test targets. Multiple targets may be specified separated by spaces. - -╭─ Basic flag for tests command ───────────────────────────────────────────────────────────────────────────────────────╮ ---integrationIntegration(s) to enable when running (can be more than one).                               -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino | all) ---test-typeType of test to run.                                                                         -(All | Always | API | Always | CLI | Core | Integration | Other | Providers | WWW | Helm |   -Postgres | MySQL | Integration | Other | Quarantine)                                         ---db-reset-dReset DB when entering the container. ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flag for tests command ────────────────────────────────────────────────────────────────────────────────────╮ ---limit-progress-outputLimit progress to percentage only and just show the summary when tests complete. ---image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip   -when using to run shell or tests)                                                       -(TEXT)                                                                                  ---mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =    -selected).                                                                              -(selected | all | skip | remove)                                                        -[default: selected]                                                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze tests [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Run the specified unit test targets. Multiple targets may be specified separated by spaces. + +╭─ Basic flag for tests command ───────────────────────────────────────────────────────────────────────────────────────╮ +--integrationIntegration(s) to enable when running (can be more than one).                               +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino | all) +--test-typeType of test to run.                                                                         +(All | Always | API | Always | CLI | Core | Integration | Other | Providers | WWW | Helm |   +Postgres | MySQL | Integration | Other | Quarantine)                                         +--db-reset-dReset DB when entering the container. +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flag for tests command ────────────────────────────────────────────────────────────────────────────────────╮ +--limit-progress-outputLimit progress to percentage only and just show the summary when tests complete. +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip)(TEXT) +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =    +selected).                                                                              +(selected | all | skip | remove)                                                        +[default: selected]                                                                     +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-verify-image.svg b/images/breeze/output-verify-image.svg index e2dfddcbb07c9..44dc59c17b0fb 100644 --- a/images/breeze/output-verify-image.svg +++ b/images/breeze/output-verify-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - Command: verify-image + Command: verify-image - + - - -Usage: breeze verify-image [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Verify CI image. - -╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip when using   -to run shell or tests)                                                                             -(TEXT)                                                                                             ---pull-imagePull image is missing before attempting to verify it. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze verify-image [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Verify CI image. + +╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--image-tag-tTag of the image when verifying it(TEXT) +--pull-imagePull image is missing before attempting to verify it. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-verify-prod-image.svg b/images/breeze/output-verify-prod-image.svg index 31c7b66b725dc..242ba205e54d7 100644 --- a/images/breeze/output-verify-prod-image.svg +++ b/images/breeze/output-verify-prod-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - Command: verify-prod-image + Command: verify-prod-image - + - - -Usage: breeze verify-prod-image [OPTIONS] [EXTRA_PYTEST_ARGS]... - -Verify Production image. - -╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---image-tag-tTag of the image which is used to pull or run the image (implies --mount-sources=skip when using   -to run shell or tests)                                                                             -(TEXT)                                                                                             ---pull-imagePull image is missing before attempting to verify it. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---slim-imageThe image to verify is slim and non-slim tests should be skipped. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze verify-prod-image [OPTIONS] [EXTRA_PYTEST_ARGS]... + +Verify Production image. + +╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--image-tag-tTag of the image when verifying it(TEXT) +--pull-imagePull image is missing before attempting to verify it. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--slim-imageThe image to verify is slim and non-slim tests should be skipped. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ From 0d659c96a982a7b64cfb4296d5a3ab797a4e416f Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Tue, 28 Jun 2022 16:23:02 +0200 Subject: [PATCH 036/118] Remove "Label when approved" workflow (#24704) The labelling workflow has proven to be far less useful than we thought and some of the recent changes in selective checks made it largely obsolete. The committers can still add "full tests needed" label when they think it is needed and there is no need to label the PRs automatically for that (or any other reason). For quite a while this workflow is basically a useless noise. (cherry picked from commit 359700a450b9bf7afde5a2a6442dadadba36cd90) --- .gitattributes | 1 - .github/actions/checks-action | 1 - .github/actions/label-when-approved-action | 1 - .github/workflows/label_when_reviewed.yml | 28 --- .../label_when_reviewed_workflow_run.yml | 177 ------------------ .gitmodules | 6 - CI.rst | 6 - CONTRIBUTING.rst | 27 --- PULL_REQUEST_WORKFLOW.rst | 151 --------------- TESTING.rst | 2 - dev/airflow-github | 1 - .../airflow_breeze/commands/ci_commands.py | 2 +- .../configuration_and_maintenance_commands.py | 6 +- .../commands/developer_commands.py | 7 + .../commands/production_image_commands.py | 6 +- .../src/airflow_breeze/global_constants.py | 15 +- .../params/common_build_params.py | 4 +- .../src/airflow_breeze/params/shell_params.py | 6 +- .../src/airflow_breeze/pre_commit_ids.py | 6 +- .../airflow_breeze/utils/common_options.py | 16 +- .../utils/docker_command_utils.py | 7 +- .../airflow_breeze/utils/md5_build_check.py | 2 +- .../src/airflow_breeze/utils/path_utils.py | 17 +- .../src/airflow_breeze/utils/run_utils.py | 1 + dev/breeze/tests/test_selective_checks.py | 90 ++++++++- images/pr/pr-full-tests-needed.png | Bin 88512 -> 0 bytes images/pr/pr-likely-ok-to-merge.png | Bin 98362 -> 0 bytes images/pr/pr-no-tests-needed-comment.png | Bin 80852 -> 0 bytes 28 files changed, 160 insertions(+), 426 deletions(-) delete mode 160000 .github/actions/checks-action delete mode 160000 .github/actions/label-when-approved-action delete mode 100644 .github/workflows/label_when_reviewed.yml delete mode 100644 .github/workflows/label_when_reviewed_workflow_run.yml delete mode 100644 PULL_REQUEST_WORKFLOW.rst delete mode 100644 images/pr/pr-full-tests-needed.png delete mode 100644 images/pr/pr-likely-ok-to-merge.png delete mode 100644 images/pr/pr-no-tests-needed-comment.png diff --git a/.gitattributes b/.gitattributes index 083e1747cba4a..5f8117153f511 100644 --- a/.gitattributes +++ b/.gitattributes @@ -16,7 +16,6 @@ tests export-ignore Dockerfile.ci export-ignore ISSUE_TRIAGE_PROCESS.rst export-ignore -PULL_REQUEST_WORKFLOW.rst export-ignore STATIC_CODE_CHECKS.rst export-ignore TESTING.rst export-ignore LOCAL_VIRTUALENV.rst export-ignore diff --git a/.github/actions/checks-action b/.github/actions/checks-action deleted file mode 160000 index 9f02872da71b6..0000000000000 --- a/.github/actions/checks-action +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 9f02872da71b6f558c6a6f190f925dde5e4d8798 diff --git a/.github/actions/label-when-approved-action b/.github/actions/label-when-approved-action deleted file mode 160000 index 0058d0094da27..0000000000000 --- a/.github/actions/label-when-approved-action +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 0058d0094da27e116fad6e0da516ebe1107f26de diff --git a/.github/workflows/label_when_reviewed.yml b/.github/workflows/label_when_reviewed.yml deleted file mode 100644 index 189a2d7343b9b..0000000000000 --- a/.github/workflows/label_when_reviewed.yml +++ /dev/null @@ -1,28 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# ---- -name: Label when reviewed -on: pull_request_review # yamllint disable-line rule:truthy -jobs: - - label-when-reviewed: - name: "Label PRs when reviewed" - runs-on: ubuntu-20.04 - steps: - - name: "Do nothing. Only trigger corresponding workflow_run event" - run: echo diff --git a/.github/workflows/label_when_reviewed_workflow_run.yml b/.github/workflows/label_when_reviewed_workflow_run.yml deleted file mode 100644 index b84ab34a79ea5..0000000000000 --- a/.github/workflows/label_when_reviewed_workflow_run.yml +++ /dev/null @@ -1,177 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# ---- -name: Label when reviewed workflow run -on: # yamllint disable-line rule:truthy - workflow_run: - workflows: ["Label when reviewed"] - types: ['requested'] -permissions: - # All other permissions are set to none - checks: write - contents: read - pull-requests: write -jobs: - - label-when-reviewed: - name: "Label PRs when reviewed workflow run" - runs-on: ubuntu-20.04 - outputs: - labelSet: ${{ steps.label-when-reviewed.outputs.labelSet }} - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 - with: - persist-credentials: false - submodules: recursive - - name: "Get information about the original trigger of the run" - uses: ./.github/actions/get-workflow-origin - id: source-run-info - with: - token: ${{ secrets.GITHUB_TOKEN }} - sourceRunId: ${{ github.event.workflow_run.id }} - - name: Initiate Selective Build check - uses: ./.github/actions/checks-action - id: selective-build-check - with: - token: ${{ secrets.GITHUB_TOKEN }} - name: "Selective build check" - status: "in_progress" - sha: ${{ steps.source-run-info.outputs.sourceHeadSha }} - details_url: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} - output: > - {"summary": - "Checking selective status of the build in - [the run](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}) - "} - - name: > - Event: ${{ steps.source-run-info.outputs.sourceEvent }} - Repo: ${{ steps.source-run-info.outputs.sourceHeadRepo }} - Branch: ${{ steps.source-run-info.outputs.sourceHeadBranch }} - Run id: ${{ github.run_id }} - Source Run id: ${{ github.event.workflow_run.id }} - Sha: ${{ github.sha }} - Source Sha: ${{ steps.source-run-info.outputs.sourceHeadSha }} - Merge commit Sha: ${{ steps.source-run-info.outputs.mergeCommitSha }} - Target commit Sha: ${{ steps.source-run-info.outputs.targetCommitSha }} - run: printenv - - name: > - Fetch incoming commit ${{ steps.source-run-info.outputs.targetCommitSha }} with its parent - uses: actions/checkout@v2 - with: - ref: ${{ steps.source-run-info.outputs.targetCommitSha }} - fetch-depth: 2 - persist-credentials: false - # checkout the main branch again, to use the right script in main workflow - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 - with: - persist-credentials: false - submodules: recursive - - name: "Setup python" - uses: actions/setup-python@v2 - with: - # We do not have output from selective checks yet, so we need to hardcode python - python-version: 3.7 - cache: 'pip' - cache-dependency-path: ./dev/breeze/setup* - - run: ./scripts/ci/install_breeze.sh - - name: Selective checks - id: selective-checks - env: - PR_LABELS: "${{ steps.source-run-info.outputs.pullRequestLabels }}" - COMMIT_REF: "${{ steps.source-run-info.outputs.targetCommitSha }}" - run: breeze selective-check - - name: "Label when approved by committers for PRs that require full tests" - uses: ./.github/actions/label-when-approved-action - id: label-full-test-prs-when-approved-by-commiters - if: > - steps.selective-checks.outputs.run-tests == 'true' && - contains(steps.selective-checks.outputs.test-types, 'Core') - with: - token: ${{ secrets.GITHUB_TOKEN }} - label: 'full tests needed' - require_committers_approval: 'true' - remove_label_when_approval_missing: 'false' - pullRequestNumber: ${{ steps.source-run-info.outputs.pullRequestNumber }} - comment: > - The PR most likely needs to run full matrix of tests because it modifies parts of the core - of Airflow. However, committers might decide to merge it quickly and take the risk. - If they don't merge it quickly - please rebase it to the latest main at your convenience, - or amend the last commit of the PR, and push it with --force-with-lease. - - name: "Initiate GitHub Check forcing rerun of SH ${{ github.event.pull_request.head.sha }}" - uses: ./.github/actions/checks-action - id: full-test-check - if: steps.label-full-test-prs-when-approved-by-commiters.outputs.labelSet == 'true' - with: - token: ${{ secrets.GITHUB_TOKEN }} - name: "Please rebase or amend, and force push the PR to run full tests" - status: "in_progress" - sha: ${{ steps.source-run-info.outputs.sourceHeadSha }} - details_url: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} - output: > - {"summary": - "The PR likely needs to run all tests! This was determined via selective check in - [the run](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}) - "} - - name: "Label when approved by committers for PRs that do not require full tests" - uses: ./.github/actions/label-when-approved-action - id: label-simple-test-prs-when-approved-by-commiters - if: > - steps.selective-checks.outputs.run-tests == 'true' && - ! contains(steps.selective-checks.outputs.test-types, 'Core') - with: - token: ${{ secrets.GITHUB_TOKEN }} - label: 'okay to merge' - require_committers_approval: 'true' - pullRequestNumber: ${{ steps.source-run-info.outputs.pullRequestNumber }} - comment: > - The PR is likely OK to be merged with just subset of tests for default Python and Database - versions without running the full matrix of tests, because it does not modify the core of - Airflow. If the committers decide that the full tests matrix is needed, they will add the label - 'full tests needed'. Then you should rebase to the latest main or amend the last commit - of the PR, and push it with --force-with-lease. - - name: "Label when approved by committers for PRs that do not require tests at all" - uses: ./.github/actions/label-when-approved-action - id: label-no-test-prs-when-approved-by-commiters - if: steps.selective-checks.outputs.run-tests != 'true' - with: - token: ${{ secrets.GITHUB_TOKEN }} - label: 'okay to merge' - pullRequestNumber: ${{ steps.source-run-info.outputs.pullRequestNumber }} - require_committers_approval: 'true' - comment: > - The PR is likely ready to be merged. No tests are needed as no important environment files, - nor python files were modified by it. However, committers might decide that full test matrix is - needed and add the 'full tests needed' label. Then you should rebase it to the latest main - or amend the last commit of the PR, and push it with --force-with-lease. - - name: Update Selective Build check - uses: ./.github/actions/checks-action - if: always() - with: - token: ${{ secrets.GITHUB_TOKEN }} - check_id: ${{ steps.selective-build-check.outputs.check_id }} - status: "completed" - sha: ${{ steps.source-run-info.outputs.sourceHeadSha }} - conclusion: ${{ job.status }} - details_url: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} - output: > - {"summary": - "Checking selective status of the build completed in - [the run](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}) - "} diff --git a/.gitmodules b/.gitmodules index e03978e263653..aa1358f88496d 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,9 +1,6 @@ [submodule ".github/actions/get-workflow-origin"] path = .github/actions/get-workflow-origin url = https://github.com/potiuk/get-workflow-origin -[submodule ".github/actions/checks-action"] - path = .github/actions/checks-action - url = https://github.com/LouisBrunner/checks-action [submodule ".github/actions/configure-aws-credentials"] path = .github/actions/configure-aws-credentials url = https://github.com/aws-actions/configure-aws-credentials @@ -13,6 +10,3 @@ [submodule ".github/actions/github-push-action"] path = .github/actions/github-push-action url = https://github.com/ad-m/github-push-action -[submodule ".github/actions/label-when-approved-action"] - path = .github/actions/label-when-approved-action - url = https://github.com/TobKed/label-when-approved-action diff --git a/CI.rst b/CI.rst index 7798b077acf0c..c058598449299 100644 --- a/CI.rst +++ b/CI.rst @@ -426,12 +426,6 @@ CI, Production Images as well as base Python images that are also cached in the Also for those builds we only execute Python tests if important files changed (so for example if it is "no-code" change, no tests will be executed. -The workflow involved in Pull Requests review and approval is a bit more complex than simple workflows -in most of other projects because we've implemented some optimizations related to efficient use -of queue slots we share with other Apache Software Foundation projects. More details about it -can be found in `PULL_REQUEST_WORKFLOW.rst `_. - - Direct Push/Merge Run --------------------- diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index f862d33b1963a..1904c15abe6e7 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -360,33 +360,6 @@ Step 4: Prepare PR PR guidelines described in `pull request guidelines <#pull-request-guidelines>`_. Create Pull Request! Make yourself ready for the discussion! -5. Depending on "scope" of your changes, your Pull Request might go through one of few paths after approval. - We run some non-standard workflow with high degree of automation that allows us to optimize the usage - of queue slots in GitHub Actions. Our automated workflows determine the "scope" of changes in your PR - and send it through the right path: - - * In case of a "no-code" change, approval will generate a comment that the PR can be merged and no - tests are needed. This is usually when the change modifies some non-documentation related RST - files (such as this file). No python tests are run and no CI images are built for such PR. Usually - it can be approved and merged few minutes after it is submitted (unless there is a big queue of jobs). - - * In case of change involving python code changes or documentation changes, a subset of full test matrix - will be executed. This subset of tests perform relevant tests for single combination of python, backend - version and only builds one CI image and one PROD image. Here the scope of tests depends on the - scope of your changes: - - * when your change does not change "core" of Airflow (Providers, CLI, WWW, Helm Chart) you will get the - comment that PR is likely ok to be merged without running "full matrix" of tests. However decision - for that is left to committer who approves your change. The committer might set a "full tests needed" - label for your PR and ask you to rebase your request or re-run all jobs. PRs with "full tests needed" - run full matrix of tests. - - * when your change changes the "core" of Airflow you will get the comment that PR needs full tests and - the "full tests needed" label is set for your PR. Additional check is set that prevents from - accidental merging of the request until full matrix of tests succeeds for the PR. - - More details about the PR workflow be found in `PULL_REQUEST_WORKFLOW.rst `_. - Step 5: Pass PR Review ---------------------- diff --git a/PULL_REQUEST_WORKFLOW.rst b/PULL_REQUEST_WORKFLOW.rst deleted file mode 100644 index 7d7a7860fdcd4..0000000000000 --- a/PULL_REQUEST_WORKFLOW.rst +++ /dev/null @@ -1,151 +0,0 @@ - .. Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - .. http://www.apache.org/licenses/LICENSE-2.0 - - .. Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. - -.. contents:: :local: - -Why non-standard pull request workflow? ---------------------------------------- - -This document describes the Pull Request Workflow we've implemented in Airflow. The workflow is slightly -more complex than regular workflow you might encounter in most of the projects because after experiencing -some huge delays in processing queues in October 2020 with GitHub Actions, we've decided to optimize the -workflow to minimize the use of GitHub Actions build time by utilising selective approach on which tests -and checks in the CI system are run depending on analysis of which files changed in the incoming PR and -allowing the Committers to control the scope of the tests during the approval/review process. - -Just to give a bit of context, we started off with the approach that we always run all tests for all the -incoming PRs, however due to our matrix of tests growing, this approach did not scale with the increasing -number of PRs and when we had to compete with other Apache Software Foundation projects for the 180 -slots that are available for the whole organization. More Apache Software Foundation projects started -to use GitHub Actions and we've started to experience long queues when our jobs waited for free slots. - -We approached the problem by: - -1) Improving mechanism of cancelling duplicate workflow runs more efficiently in case of queue conditions - (duplicate workflow runs are generated when someone pushes a fixup quickly - leading to running both - out-dated and current run to completion, taking precious slots. This has been implemented by improving - `cancel-workflow-run `_ action we are using. In version - 4.1 it got a new feature of cancelling all duplicates even if there is a long queue of builds. - -2) Heavily decreasing strain on the GitHub Actions jobs by introducing selective checks - mechanism - to control which parts of the tests are run during the tests. This is implemented by the - ``breeze selective-check`` command. It selectively chooses which tests should be run in the PR based on - type of the PR and its content. More about it can be found in - `Selective checks `_ - -3) Even more optimisation came from limiting the scope of tests to only "default" matrix parameters. So far - in Airflow we always run all tests for all matrix combinations. The primary matrix components are: - - * Python versions (currently 3.7, 3.8, 3.9, 3.10) - * Backend types (currently MySQL/Postgres) - * Backed version (currently MySQL 5.7, MySQL 8, Postgres 13 - - We've decided that instead of running all the combinations of parameters for all matrix component we will - only run default values (Python 3.7, Mysql 5.7, Postgres 13) for all PRs which are not approved yet by - the committers. This has a nice effect, that full set of tests (though with limited combinations of - the matrix) are still run in the CI for every Pull Request that needs tests at all - allowing the - contributors to make sure that their PR is "good enough" to be reviewed. - - Even after approval, the automated workflows we've implemented, check if the PR seems to need - "full test matrix" and provide helpful information to both contributors and committers in the form of - explanatory comments and labels set automatically showing the status of the PR. Committers have still - control whether they want to merge such requests automatically or ask for rebase or re-run the tests - and run "full tests" by applying the "full tests needed" label and re-running such request. - The "full tests needed" label is also applied automatically after approval when the change touches - the "core" of Airflow - also a separate check is added to the PR so that the "merge" button status - will indicate to the committer that full tests are still needed. The committer might still decide, - whether to merge such PR without the "full matrix". The "escape hatch" we have - i.e. running the full - matrix of tests in the "merge push" will enable committers to catch and fix such problems quickly. - More about it can be found in `Approval workflow and Matrix tests <#approval-workflow-and-matrix-tests>`_ - chapter. - -4) We've also applied (and received) funds to run self-hosted runners. They are used for ``main`` runs - and whenever the PRs are done by one of the maintainers. Maintainers can force using Public GitHub runners - by applying "use public runners" label to the PR before submitting it. - - -Approval Workflow and Matrix tests ----------------------------------- - -As explained above the approval and matrix tests workflow works according to the algorithm below: - -1) In case of "no-code" changes - so changes that do not change any of the code or environment of - the application, no test are run (this is done via selective checks). Also no CI/PROD images are - build saving extra minutes. Such build takes less than 2 minutes currently and only few jobs are run - which is a very small fraction of the "full build" time. - -2) When new PR is created, only a "default set" of matrix test are running. Only default - values for each of the parameters are used effectively limiting it to running matrix builds for only - one python version and one version of each of the backends. In this case only one CI and one PROD - image is built, saving precious job slots. This build takes around 50% less time than the "full matrix" - build. - -3) When such PR gets approved, the system further analyses the files changed in this PR and further - decision is made that should be communicated to both Committer and Reviewer. - -3a) In case of "no-code" builds, a message is communicated that the PR is ready to be merged and - no tests are needed. - -.. image:: images/pr/pr-no-tests-needed-comment.png - :align: center - :alt: No tests needed for "no-code" builds - -3b) In case of "non-core" builds a message is communicated that such PR is likely OK to be merged as is with - limited set of tests, but that the committer might decide to re-run the PR after applying - "full tests needed" label, which will trigger full matrix build for tests for this PR. The committer - might make further decision on what to do with this PR. - -.. image:: images/pr/pr-likely-ok-to-merge.png - :align: center - :alt: Likely ok to merge the PR with only small set of tests - -3c) In case of "core" builds (i. e. when the PR touches some "core" part of Airflow) a message is - communicated that this PR needs "full test matrix", the "full tests needed" label is applied - automatically and either the contributor might rebase the request to trigger full test build or the - committer might re-run the build manually to trigger such full test rebuild. Also a check "in-progress" - is added, so that the committer realises that the PR is not yet "green to merge". Pull requests with - "full tests needed" label always trigger the full matrix build when rebased or re-run so if the - PR gets rebased, it will continue triggering full matrix build. - -.. image:: images/pr/pr-full-tests-needed.png - :align: center - :alt: Full tests are needed for the PR - -4) If this or another committer "request changes" in a previously approved PR with "full tests needed" - label, the bot automatically removes the label, moving it back to "run only default set of parameters" - mode. For PRs touching core of airflow once the PR gets approved back, the label will be restored. - If it was manually set by the committer, it has to be restored manually. - -.. note:: Note that setting the labels and adding comments might be delayed, due to limitation of GitHub Actions, - in case of queues, processing of Pull Request reviews might take some time, so it is advised not to merge - PR immediately after approval. Luckily, the comments describing the status of the PR trigger notifications - for the PRs and they provide good "notification" for the committer to act on a PR that was recently - approved. - -The PR approval workflow is possible thanks to two custom GitHub Actions we've developed: - -* `Get workflow origin `_ -* `Label when approved `_ - - -Next steps ----------- - -We are planning to also propose the approach to other projects from Apache Software Foundation to -make it a common approach, so that our effort is not limited only to one project. - -Discussion about it in `this discussion `_ diff --git a/TESTING.rst b/TESTING.rst index 2271e73ecfd8c..4f7de58b76766 100644 --- a/TESTING.rst +++ b/TESTING.rst @@ -487,8 +487,6 @@ This is done for three reasons: 1. in order to selectively run only subset of the test types for some PRs 2. in order to allow parallel execution of the tests on Self-Hosted runners -For case 1. see `Pull Request Workflow `_ for details. - For case 2. We can utilise memory and CPUs available on both CI and local development machines to run test in parallel. This way we can decrease the time of running all tests in self-hosted runners from 60 minutes to ~15 minutes. diff --git a/dev/airflow-github b/dev/airflow-github index 5dc5f5bd9d4d9..fcda3355d914d 100755 --- a/dev/airflow-github +++ b/dev/airflow-github @@ -126,7 +126,6 @@ def is_core_commit(files: List[str]) -> bool: "CONTRIBUTORS_QUICK_START.rst", "IMAGES.rst", "LOCAL_VIRTUALENV.rst", - "PULL_REQUEST_WORKFLOW.rst", "INTHEWILD.md", "INSTALL", "README.md", diff --git a/dev/breeze/src/airflow_breeze/commands/ci_commands.py b/dev/breeze/src/airflow_breeze/commands/ci_commands.py index c8260698d71aa..c65753e1a65e5 100644 --- a/dev/breeze/src/airflow_breeze/commands/ci_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/ci_commands.py @@ -205,7 +205,7 @@ def selective_check( from airflow_breeze.utils.selective_checks import SelectiveChecks github_event = GithubEvents(github_event_name) - if github_event == GithubEvents.PULL_REQUEST: + if commit_ref is not None: changed_files = get_changed_files(commit_ref=commit_ref, dry_run=dry_run, verbose=verbose) else: changed_files = () diff --git a/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py b/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py index 546f52ba5acdd..7ef0a11a2074b 100644 --- a/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py @@ -319,7 +319,6 @@ def version(verbose: bool): @click.option('-C/-c', '--cheatsheet/--no-cheatsheet', help="Enable/disable cheatsheet.", default=None) @click.option('-A/-a', '--asciiart/--no-asciiart', help="Enable/disable ASCIIart.", default=None) @click.option( - '-B/-b', '--colour/--no-colour', help="Enable/disable Colour mode (useful for colour blind-friendly communication).", default=None, @@ -404,7 +403,10 @@ def command_hash_export(verbose: bool, output: IO): the_context_dict = ctx.to_info_dict() if verbose: get_console().print(the_context_dict) - output.write(dict_hash(the_context_dict) + "\n") + output.write(f"main:{dict_hash(the_context_dict['command']['params'])}\n") + commands_dict = the_context_dict['command']['commands'] + for command in sorted(commands_dict.keys()): + output.write(f"{command}:{dict_hash(commands_dict[command])}\n") def write_to_shell(command_to_execute: str, dry_run: bool, script_path: str, force_setup: bool) -> bool: diff --git a/dev/breeze/src/airflow_breeze/commands/developer_commands.py b/dev/breeze/src/airflow_breeze/commands/developer_commands.py index a81ae92745684..cbb2b10d06872 100644 --- a/dev/breeze/src/airflow_breeze/commands/developer_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/developer_commands.py @@ -53,6 +53,7 @@ option_mount_sources, option_mssql_version, option_mysql_version, + option_platform_single, option_postgres_version, option_python, option_use_airflow_version, @@ -226,6 +227,7 @@ @option_verbose @option_dry_run @option_python +@option_platform_single @option_backend @option_debian_version @option_github_repository @@ -267,6 +269,7 @@ def shell( db_reset: bool, answer: Optional[str], image_tag: Optional[str], + platform: Optional[str], extra_args: Tuple, ): """Enter breeze.py environment. this is the default command use when no other is selected.""" @@ -296,6 +299,7 @@ def shell( answer=answer, debian_version=debian_version, image_tag=image_tag, + platform=platform, ) @@ -303,6 +307,7 @@ def shell( @main.command(name='start-airflow') @option_dry_run @option_python +@option_platform_single @option_github_repository @option_backend @option_postgres_version @@ -346,6 +351,7 @@ def start_airflow( image_tag: Optional[str], db_reset: bool, answer: Optional[str], + platform: Optional[str], extra_args: Tuple, ): """Enter breeze.py environment and starts all Airflow components in the tmux session.""" @@ -372,6 +378,7 @@ def start_airflow( db_reset=db_reset, start_airflow=True, image_tag=image_tag, + platform=platform, extra_args=extra_args, answer=answer, ) diff --git a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py index 07bd42cab90c9..d08e1d89b583a 100644 --- a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py @@ -37,6 +37,7 @@ option_airflow_constraints_mode_prod, option_airflow_constraints_reference_build, option_answer, + option_builder, option_debian_version, option_dev_apt_command, option_dev_apt_deps, @@ -52,7 +53,7 @@ option_image_tag_for_verifying, option_install_providers_from_sources, option_parallelism, - option_platform, + option_platform_multiple, option_prepare_buildx_cache, option_pull_image, option_push_image, @@ -242,7 +243,7 @@ def run_build_in_parallel( @option_parallelism @option_python_versions @option_upgrade_to_newer_dependencies -@option_platform +@option_platform_multiple @option_debian_version @option_github_repository @option_github_token @@ -303,6 +304,7 @@ def run_build_in_parallel( @option_additional_dev_apt_env @option_additional_runtime_apt_env @option_additional_runtime_apt_command +@option_builder @option_dev_apt_command @option_dev_apt_deps @option_python_image diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index 8dedc191f7db6..5ba825e71f67a 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -19,7 +19,7 @@ """ from __future__ import annotations -import os +import platform from enum import Enum from functools import lru_cache @@ -106,7 +106,8 @@ class SelectiveUnitTestTypes(Enum): ALLOWED_DEBIAN_VERSIONS = ['bullseye', 'buster'] ALLOWED_BUILD_CACHE = ["registry", "local", "disabled"] MULTI_PLATFORM = "linux/amd64,linux/arm64" -ALLOWED_PLATFORMS = ["linux/amd64", "linux/arm64", MULTI_PLATFORM] +SINGLE_PLATFORMS = ["linux/amd64", "linux/arm64"] +ALLOWED_PLATFORMS = [*SINGLE_PLATFORMS, MULTI_PLATFORM] ALLOWED_USE_AIRFLOW_VERSIONS = ['none', 'wheel', 'sdist'] PARAM_NAME_DESCRIPTION = { @@ -144,8 +145,15 @@ def get_available_packages(short_version=False) -> list[str]: return package_list +def get_default_platform_machine() -> str: + machine = platform.uname().machine + # Some additional conversion for various platforms... + machine = {"AMD64": "x86_64"}.get(machine, machine) + return machine + + # Initialise base variables -DOCKER_DEFAULT_PLATFORM = f"linux/{os.uname().machine}" +DOCKER_DEFAULT_PLATFORM = f"linux/{get_default_platform_machine()}" DOCKER_BUILDKIT = 1 SSH_PORT = "12322" @@ -296,6 +304,7 @@ class GithubEvents(Enum): PULL_REQUEST = "pull_request" PULL_REQUEST_REVIEW = "pull_request_review" PULL_REQUEST_TARGET = "pull_request_target" + PULL_REQUEST_WORKFLOW = "pull_request_workflow" PUSH = "push" SCHEDULE = "schedule" WORKFLOW_RUN = "workflow_run" diff --git a/dev/breeze/src/airflow_breeze/params/common_build_params.py b/dev/breeze/src/airflow_breeze/params/common_build_params.py index 68947186d68e8..e6c2c70029de4 100644 --- a/dev/breeze/src/airflow_breeze/params/common_build_params.py +++ b/dev/breeze/src/airflow_breeze/params/common_build_params.py @@ -22,6 +22,7 @@ from typing import List, Optional from airflow_breeze.branch_defaults import AIRFLOW_BRANCH +from airflow_breeze.global_constants import DOCKER_DEFAULT_PLATFORM from airflow_breeze.utils.console import get_console from airflow_breeze.utils.platforms import get_real_platform @@ -44,6 +45,7 @@ class CommonBuildParams: airflow_constraints_location: str = "" answer: Optional[str] = None build_id: int = 0 + builder: str = "default" constraints_github_repository: str = "apache/airflow" debian_version: str = "bullseye" dev_apt_command: str = "" @@ -56,7 +58,7 @@ class CommonBuildParams: github_username: str = "" image_tag: Optional[str] = None install_providers_from_sources: bool = False - platform: str = f"linux/{os.uname().machine}" + platform: str = DOCKER_DEFAULT_PLATFORM prepare_buildx_cache: bool = False python_image: Optional[str] = None push_image: bool = False diff --git a/dev/breeze/src/airflow_breeze/params/shell_params.py b/dev/breeze/src/airflow_breeze/params/shell_params.py index b67d362186653..b4c1e73f5a827 100644 --- a/dev/breeze/src/airflow_breeze/params/shell_params.py +++ b/dev/breeze/src/airflow_breeze/params/shell_params.py @@ -31,6 +31,7 @@ ALLOWED_POSTGRES_VERSIONS, ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS, AVAILABLE_INTEGRATIONS, + DOCKER_DEFAULT_PLATFORM, MOUNT_ALL, MOUNT_REMOVE, MOUNT_SELECTED, @@ -78,6 +79,7 @@ class ShellParams: mysql_version: str = ALLOWED_MYSQL_VERSIONS[0] num_runs: str = "" package_format: str = ALLOWED_INSTALLATION_PACKAGE_FORMATS[0] + platform: str = DOCKER_DEFAULT_PLATFORM postgres_version: str = ALLOWED_POSTGRES_VERSIONS[0] python: str = ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS[0] skip_environment_initialization: bool = False @@ -189,6 +191,8 @@ def compose_files(self): backend_files = [] for backend in ALLOWED_BACKENDS: backend_files.extend(self.get_backend_compose_files(backend)) + compose_ci_file.append(f"{str(SCRIPTS_CI_DIR)}/docker-compose/backend-mssql-bind-volume.yml") + compose_ci_file.append(f"{str(SCRIPTS_CI_DIR)}/docker-compose/backend-mssql-docker-volume.yml") local_docker_compose_file = f"{str(SCRIPTS_CI_DIR)}/docker-compose/local.yml" local_all_sources_docker_compose_file = f"{str(SCRIPTS_CI_DIR)}/docker-compose/local-all-sources.yml" files_docker_compose_file = f"{str(SCRIPTS_CI_DIR)}/docker-compose/files.yml" @@ -239,7 +243,7 @@ def compose_files(self): if len(integrations) > 0: for integration in integrations: compose_ci_file.append(f"{str(SCRIPTS_CI_DIR)}/docker-compose/integration-{integration}.yml") - return ':'.join(compose_ci_file) + return os.pathsep.join(compose_ci_file) @property def command_passed(self): diff --git a/dev/breeze/src/airflow_breeze/pre_commit_ids.py b/dev/breeze/src/airflow_breeze/pre_commit_ids.py index bcddfa0ec9533..db839f0712685 100644 --- a/dev/breeze/src/airflow_breeze/pre_commit_ids.py +++ b/dev/breeze/src/airflow_breeze/pre_commit_ids.py @@ -25,10 +25,11 @@ 'all', 'black', 'blacken-docs', - 'check-airflow-2-1-compatibility', + 'check-airflow-2-2-compatibility', 'check-airflow-config-yaml-consistent', 'check-airflow-providers-have-extras', 'check-apache-license-rat', + 'check-base-operator-partial-arguments', 'check-base-operator-usage', 'check-boring-cyborg-configuration', 'check-breeze-top-dependencies-limited', @@ -36,6 +37,7 @@ 'check-changelog-has-no-duplicates', 'check-daysago-import-from-utils', 'check-docstring-param-types', + 'check-example-dags-urls', 'check-executables-have-shebangs', 'check-extra-packages-references', 'check-extras-order', @@ -59,8 +61,10 @@ 'check-setup-order', 'check-start-date-not-used-in-defaults', 'check-system-tests-present', + 'check-system-tests-tocs', 'check-xml', 'codespell', + 'create-missing-init-py-files-tests', 'debug-statements', 'detect-private-key', 'doctoc', diff --git a/dev/breeze/src/airflow_breeze/utils/common_options.py b/dev/breeze/src/airflow_breeze/utils/common_options.py index af2b6c75e023d..778b34ad1c9ce 100644 --- a/dev/breeze/src/airflow_breeze/utils/common_options.py +++ b/dev/breeze/src/airflow_breeze/utils/common_options.py @@ -37,6 +37,7 @@ ALLOWED_POSTGRES_VERSIONS, ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS, ALLOWED_USE_AIRFLOW_VERSIONS, + SINGLE_PLATFORMS, get_available_packages, ) from airflow_breeze.utils.custom_param_types import ( @@ -214,12 +215,18 @@ option_image_name = click.option( '-n', '--image-name', help='Name of the image to verify (overrides --python and --image-tag).' ) -option_platform = click.option( +option_platform_multiple = click.option( '--platform', help='Platform for Airflow image.', envvar='PLATFORM', type=BetterChoice(ALLOWED_PLATFORMS), ) +option_platform_single = click.option( + '--platform', + help='Platform for Airflow image.', + envvar='PLATFORM', + type=BetterChoice(SINGLE_PLATFORMS), +) option_debian_version = click.option( '--debian-version', help='Debian version used for the image.', @@ -452,10 +459,15 @@ is_flag=True, envvar='PULL_IMAGE', ) - option_python_image = click.option( '--python-image', help="If specified this is the base python image used to build the image. " "Should be something like: python:VERSION-slim-bullseye", envvar='PYTHON_IMAGE', ) +option_builder = click.option( + '--builder', + help="Buildx builder used to perform `docker buildx build` commands", + envvar='BUILDER', + default='default', +) diff --git a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py index 8af6010c40403..cdb1ceb258725 100644 --- a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py @@ -28,7 +28,7 @@ from airflow_breeze.params.common_build_params import CommonBuildParams from airflow_breeze.params.shell_params import ShellParams from airflow_breeze.utils.host_info_utils import get_host_group_id, get_host_os, get_host_user_id -from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT +from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, MSSQL_DATA_VOLUME try: from packaging import version @@ -352,7 +352,7 @@ def prepare_docker_build_cache_command( build_flags = image_params.extra_docker_build_flags final_command = [] final_command.extend(["docker"]) - final_command.extend(["buildx", "build", "--builder", "airflow_cache", "--progress=tty"]) + final_command.extend(["buildx", "build", "--builder", image_params.builder, "--progress=tty"]) final_command.extend(build_flags) final_command.extend(["--pull"]) final_command.extend(arguments) @@ -388,7 +388,7 @@ def prepare_base_build_command(image_params: CommonBuildParams, verbose: bool) - "buildx", "build", "--builder", - "default", + image_params.builder, "--progress=tty", "--push" if image_params.push_image else "--load", ] @@ -525,6 +525,7 @@ def update_expected_environment_variables(env: Dict[str, str]) -> None: set_value_to_default_if_not_set(env, 'LIST_OF_INTEGRATION_TESTS_TO_RUN', "") set_value_to_default_if_not_set(env, 'LOAD_DEFAULT_CONNECTIONS', "false") set_value_to_default_if_not_set(env, 'LOAD_EXAMPLES', "false") + set_value_to_default_if_not_set(env, 'MSSQL_DATA_VOLUME', str(MSSQL_DATA_VOLUME)) set_value_to_default_if_not_set(env, 'PACKAGE_FORMAT', ALLOWED_PACKAGE_FORMATS[0]) set_value_to_default_if_not_set(env, 'PRINT_INFO_FROM_SCRIPTS', "true") set_value_to_default_if_not_set(env, 'PYTHONDONTWRITEBYTECODE', "true") diff --git a/dev/breeze/src/airflow_breeze/utils/md5_build_check.py b/dev/breeze/src/airflow_breeze/utils/md5_build_check.py index 0900793a48e1f..e572c5c373424 100644 --- a/dev/breeze/src/airflow_breeze/utils/md5_build_check.py +++ b/dev/breeze/src/airflow_breeze/utils/md5_build_check.py @@ -86,7 +86,7 @@ def calculate_md5_checksum_for_files( return modified_files, not_modified_files -def md5sum_check_if_build_is_needed(md5sum_cache_dir: Path, verbose: bool) -> bool: +def md5sum_check_if_build_is_needed(md5sum_cache_dir: Path) -> bool: """ Checks if build is needed based on whether important files were modified. diff --git a/dev/breeze/src/airflow_breeze/utils/path_utils.py b/dev/breeze/src/airflow_breeze/utils/path_utils.py index 413401c0e8332..c54b54e5c7f34 100644 --- a/dev/breeze/src/airflow_breeze/utils/path_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/path_utils.py @@ -238,7 +238,9 @@ def find_airflow_sources_root_to_operate_on() -> Path: AIRFLOW_SOURCES_ROOT = find_airflow_sources_root_to_operate_on().resolve() BUILD_CACHE_DIR = AIRFLOW_SOURCES_ROOT / '.build' +DAGS_DIR = AIRFLOW_SOURCES_ROOT / 'dags' FILES_DIR = AIRFLOW_SOURCES_ROOT / 'files' +HOOKS_DIR = AIRFLOW_SOURCES_ROOT / 'hooks' MSSQL_DATA_VOLUME = AIRFLOW_SOURCES_ROOT / 'tmp_mssql_volume' KUBE_DIR = AIRFLOW_SOURCES_ROOT / ".kube" LOGS_DIR = AIRFLOW_SOURCES_ROOT / 'logs' @@ -260,12 +262,17 @@ def create_volume_if_missing(volume_name: str): check=False, ) if res_inspect.returncode != 0: - run_command( + result = run_command( cmd=["docker", "volume", "create", volume_name], - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - check=True, + check=False, + capture_output=True, ) + if result.returncode != 0: + get_console().print( + "[warning]\nMypy Cache volume could not be created. Continuing, but you " + "should make sure your docker works.\n\n" + f"Error: {result.stdout}\n" + ) def create_static_check_volumes(): @@ -278,7 +285,9 @@ def create_directories_and_files() -> None: Checks if setup has been updates since last time and proposes to upgrade if so. """ BUILD_CACHE_DIR.mkdir(parents=True, exist_ok=True) + DAGS_DIR.mkdir(parents=True, exist_ok=True) FILES_DIR.mkdir(parents=True, exist_ok=True) + HOOKS_DIR.mkdir(parents=True, exist_ok=True) MSSQL_DATA_VOLUME.mkdir(parents=True, exist_ok=True) KUBE_DIR.mkdir(parents=True, exist_ok=True) LOGS_DIR.mkdir(parents=True, exist_ok=True) diff --git a/dev/breeze/src/airflow_breeze/utils/run_utils.py b/dev/breeze/src/airflow_breeze/utils/run_utils.py index f0a44b425f4bc..9754605f49be4 100644 --- a/dev/breeze/src/airflow_breeze/utils/run_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/run_utils.py @@ -96,6 +96,7 @@ def run_command( return subprocess.CompletedProcess(cmd, returncode=0) try: cmd_env = os.environ.copy() + cmd_env.setdefault("HOME", str(Path.home())) if env: cmd_env.update(env) with ci_group(title=f"Output of {title}", enabled=enabled_output_group): diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py index 492135ebd38b0..2d7e8fe83d88b 100644 --- a/dev/breeze/tests/test_selective_checks.py +++ b/dev/breeze/tests/test_selective_checks.py @@ -298,7 +298,7 @@ def test_expected_output_full_tests_needed( "upgrade-to-newer-dependencies": "false", "test-types": "", }, - id="Everything should run when full tests are needed even if no files are changed", + id="Nothing should run if only non-important files changed", ), pytest.param( ( @@ -371,6 +371,76 @@ def test_expected_output_pull_request_v2_3( assert_outputs_are_printed(expected_outputs, str(sc)) +@pytest.mark.parametrize( + "files, expected_outputs,", + [ + pytest.param( + ("INTHEWILD.md",), + { + "all-python-versions": "['3.7']", + "all-python-versions-list-as-string": "3.7", + "image-build": "false", + "needs-helm-tests": "false", + "run-tests": "false", + "docs-build": "false", + "upgrade-to-newer-dependencies": "false", + "test-types": "", + }, + id="Nothing should run if only non-important files changed", + ), + pytest.param( + ( + "airflow/cli/test.py", + "chart/aaaa.txt", + "tests/providers/google/file.py", + ), + { + "all-python-versions": "['3.7']", + "all-python-versions-list-as-string": "3.7", + "image-build": "true", + "needs-helm-tests": "true", + "run-tests": "true", + "docs-build": "true", + "run-kubernetes-tests": "true", + "upgrade-to-newer-dependencies": "false", + "test-types": "Always CLI", + }, + id="CLI tests and Kubernetes tests should run if cli/chart files changed", + ), + pytest.param( + ( + "airflow/file.py", + "tests/providers/google/file.py", + ), + { + "all-python-versions": "['3.7']", + "all-python-versions-list-as-string": "3.7", + "image-build": "true", + "needs-helm-tests": "false", + "run-tests": "true", + "docs-build": "true", + "run-kubernetes-tests": "false", + "upgrade-to-newer-dependencies": "false", + "test-types": "API Always CLI Core Integration Other Providers WWW", + }, + id="All tests except should run if core file changed", + ), + ], +) +def test_expected_output_pull_request_target( + files: Tuple[str, ...], + expected_outputs: Dict[str, str], +): + sc = SelectiveChecks( + files=files, + commit_ref="HEAD", + github_event=GithubEvents.PULL_REQUEST_TARGET, + pr_labels=(), + default_branch="main", + ) + assert_outputs_are_printed(expected_outputs, str(sc)) + + @pytest.mark.parametrize( "files, pr_labels, default_branch, expected_outputs,", [ @@ -441,11 +511,21 @@ def test_expected_output_push( assert_outputs_are_printed(expected_outputs, str(sc)) -def test_no_commit_provided(): +@pytest.mark.parametrize( + "github_event", + [ + GithubEvents.PUSH, + GithubEvents.PULL_REQUEST, + GithubEvents.PULL_REQUEST_TARGET, + GithubEvents.PULL_REQUEST_WORKFLOW, + GithubEvents.SCHEDULE, + ], +) +def test_no_commit_provided_trigger_full_build_for_any_event_type(github_event): sc = SelectiveChecks( files=(), commit_ref="", - github_event=GithubEvents.PULL_REQUEST, + github_event=github_event, pr_labels=(), default_branch="main", ) @@ -457,7 +537,9 @@ def test_no_commit_provided(): "needs-helm-tests": "true", "run-tests": "true", "docs-build": "true", - "upgrade-to-newer-dependencies": "false", + "upgrade-to-newer-dependencies": "true" + if github_event in [GithubEvents.PUSH, GithubEvents.SCHEDULE] + else "false", "test-types": "API Always CLI Core Integration Other Providers WWW", }, str(sc), diff --git a/images/pr/pr-full-tests-needed.png b/images/pr/pr-full-tests-needed.png deleted file mode 100644 index c863153d0699f80335f2a383285643761cc30f43..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 88512 zcmeFZgNfNHMo0`Lh<57OVQx&UZ91dCBcHW1d0TAmr{yDaCevB z7UY-r?7h#~|HJpK>$#GwWM$bibI(1qCW+EiS0uux!Uq5VMDLX3wE%#}lmGzcBRov> zJC|dMYv?B|ciDG3czAeoD;mq_>I)AA0}pLyYY%U8H!FaRle420(A~n#%F4;z*4YDz z)g}c1FaX}kztQo@*k1^A0Z(OeUm-<@RW8SzzZ-HB+0A|b%%FucgY{$_U}Sp;d6q$~ zhM!?RRqvH*sO_FsA}P-Q!T;4e7@wyNKlQMfu<%R;=V#}%q7L0Zy6dy0?9;qaGveh_ zZj-;uAFF;WSIQIl4#WT!8X?Ap=R{G8b>S?JaHtvg1EjrX)Go&gG_tpR4Og!(kTj#S zv-nImv5CqsYbOm=)z{`4U;nQ-xw{pL%gFy%@eu!%@bF<|Rg5w<_Jzw}KV3HDK$hL6fb_!Xi8 zsiEoVTFMv+-$liq{%;wr;ZgcIm-y`4#FSNKji6v*9*L|yQO*gyEgty?ZG{4#j`7^D zZ>F{E*%)g`NMx!1*s?4_@go;pn7$_8z~{X<*k|{t|4TBf8E`-zO(ZgEuXN4yjm)6$ zM2`C;8^7nW+rlG4LP$7VUg1YYg)xFjI>EB+;mV35_=W`Rp?~Vr+#SRzI8`jlb%Jah z-0IgWm1F-;LB2ZSeHLg0zP2xU_?_8raZCvqh;EkO2qu+%gQXS0?k%wqtMV8`8ImCL z2o8YNjD$>%P*EWgI(hhbo47Lfn}3>j5^LPp8PkMF2$#wj&`V03OMLpJ9;{%B%~=jfG(^ut{X8NDVn77@}ouVPLJV4Atf!#sh=b z$x2H7-;=R1*u$+}yyU()zRoo>r8Mdj4(Ojb_bL*^ZtoQbSgQWnwE#4$WSx)!yhpHk zMx|HGPEx81{%hOnp5uV3E%|oV`mU;~=a2L_|a~I{bpR@M7O|T0ga<=h}${ z9KjT_AUNd74>lcT5G{MSAqRVexq)kssz@YAxho+Al&;QUA$Sow#~lIj!Y1wBrX)o2)b;C);$Aa#^;@@2S~tir`S}0s$+j@JM2)T2Dr1* z{!dcFe5Jo0oCca&FLJ@lu9j3=f;wol$~H-^=R969az9ZYbe_OtpZ8uMUZVP6ZFxGX znAk8*o3r#f+qU8KFj*Xc2jn@`c&Um7z_SGwMc85*5||WfR&|cV1-z^oA5jL$;5{v5 z)8xREjZo5O4IP)s)1iFC+fS;c+*ST5j%P4uKY5{dgcF+V_{x6O^Bhqwg>t@!i}{_I z1t0grsJ}y*XVM+!$8GbSH(vl9(@atZt8KWNgUl-**7O<`|1CfsFB#=uQ&c(5@x<&- zwMJQw8gjyZu$=@hok~bbmQg$njHzIXb$ZGuzf~aRYjetn!N|Z+pfVhu$|YYMMuOf` zO`z&CS-h1;PP5r0keZQBHXSNulsP3EIR|YdM+|sU3^5VK2$OxiVh|M$`jAYEwVblR zgGboqVz+moqn4k`K9LILNU=x;8BP9$sn9Rb`x8N_sDuUkrq4?wyyPrZ^XCwJuL>5^ z3}Y^${w-@xxUyd@`O?%2EGj6q1sbUsIm{M>9p>VC)?~l(5T-P0%U9PMB)+W+vr4{cmV=d1FALqbQn8FzJZHM3iETpCtW<_h9Ec@ zIP$Gv-sed}&M}@28pPz$gXDnje?Vh={=4g&g+wJ=y<@ki zNY#plf#m7+2@$rgX_3b}HJ7iRMeY1nRoB=yP#b2*V`SmdIK*QN$0^z?z`!gIsLMBY z3+GWA<56XgU>sD&fND`0vM~zeg@uqbe*g0#p9E3vC?ki*NT|fl_f|f zt9kTU22kr*K&l?}VT?QIyqGOI*=wZqOBx^Y28pahOacQ(!_z>+;6*N873P1f;z#@! zH25mR0O0-&t-{}=0*yh+?8B!Vf_Alzito=7>r0j7g`?k40ZRohy(TBebxte##t~yG zIgc3`6|7o8t`Ii?0Mps0Zk4EV2cvw)83dR_R+$}!!7dmc8N;9hd)k!{362CACYz^5 zTH$vkXeqNtfRyprVF_BP)+7->*VdjTB?XZv;n~t+zX^1-o+zr-mX>bv!16N5148}N zon52#7ztO_GoxA`(;ka|__wZ}japN_RZ}Ahachv^<*gWB$c<-Nx{M%6w(9=GJA7*J zSLy7yiy@X`lvTgf!O8TAjE;>f_!mW{E`ODB?CjaWZcIFtAtziX?}w5-u!Y^;!v{gY z{PI&R=micyM@s-G5RS?85*&%i5Dr3<0a5i%uho1jWe}b@9(#m99ypSWhY+|vedS~B zF+kASe%Zwn)8oKOr@>0v5b3~?ys&jbT!t=BoeYzcDbd z8m7fQCeX$6n&O+v!t)zEf-WA4Z@R66!`+ClQbCk?qn{d@&JT5q^43!QO=@&^d_f9T_kQzB-x6M{C<*E zKQ1<&n8?gvm(kduD?f68m0x8Jw`nq&SM+>n9O3o(-_Wy-^-NahmFId)+}Vo4d$Moc zgk5YY{pi&&HTJG+d51`4B?FCumIAZ0Od|A5$PP{%MU#qGPV(YpIcXK%FXJAL>lBJe z2?>emrH>o3!HlJ8+yjLP1(&hSM~QoybCz);+;}#zwck*J*x+3}XLmXKev#l_OR+@N zw0w5P@JU160%pR#4Iccb3LN+VxGq^C-iY}!R>DY=m&4s}lWQ1l7xqh|q?*h+g)5V} z4^Rg+E>G_Nm}sdSgV(RZ@xmAgyI_X|f$O?YSQx_{=l(T&^!5*?C>PLQdAHe6ZkCV{ zP5j_znVfaKiKdwmrmyk3!@OfyAkVU(Hmx_uP@A~9?m0RQ5`RK8aqAHQH%O};yh>Rt7DugF0WKj$@&pd~{f13>U}j_8}) z_BdcTQfg7XUG`_fx5%#!Plz;!o&?{}w|-3fqZPkFgGi2rQl&ILPg&5*Wc)V}H8NHh z5CYKH|AC`NOM=d9H;!af&6>*-ciUpy`(eb8Wl1Cxir)xMiiRRm-eMuTcqS|3 z`W$RTO$}s+I@jD#2mtEQ^#S#r=g+%ovCt@r)xKmlGgKGbN!6mv-|@|2{7A(Y1Yb=E3*elr#d}>3+7`PGTn%nC%hHbuH~Amc1Q!uwhQE$Q%33~CgbGfV%Wle zDn-1fd~3e!+lurzC$WQ*hp>HbWxcI@aot(Xm{UVa5(l%-!)MMEc&d=9^_C@1)3E5T z5RnE|Q%(c)vKzi zY3qdwX43Yvjdx8+{%`6FmV}7ye2{}fNpZiOtu5MmGGYT{IAOY!XxzmyCwbILe-*Us z8$=Rrp7T?Qed9W)Vc&lEyoG@-LRQ`o5B@`(Kdw+W4+C3Tp5maod1|TBIH?o0WGWfhkSD?Sv!~qr82NaR>FEpq?Rkr;u9ZU7g$EM z@MtyS()7Vu&%)xNy^I-_M^Arn6W! zgdwLbT*t`5k;#cVH2sVjXg*iVO|(SpLG})g?IGJ)YxxK-H)13*1Oo*4%B0NzQJsmB z=Sz#}NunAdF|;RFs3kXAG-xZ~HKL-+I5HXI+yBJ-!Us)XNoLRg4By&=K5>L4qshD7 z>V;A{SOJ7~?dtYxvaW8QWt-g1qy(KHF5?3p#InCPi$^^){>p4rAO&7}2pB--o}(72 zDBFPPKcF2YMx$BYA&B;!Pz0`BZmE^06cHF__({w?4p5HP+0NOX?3AWq|NHeW^$T{xz2G(a%T)%YMtXPQqlYS7oue4 zt9AY{>+iD%(exAysGh`>ReH>rn|pqS32MX~6NW^Cqkphz@pmdIaA>{Ht1fc=q_z`o zZkVfS{^9z4UW<(Ud)dBr9{gw8IPzlFJ8UB%k;#~{a(L!S=1QRSS9t>AJmCo;lN|V1 zw&ZwELA2jk!=n-EvIWh@q24itmC-YX_$#ZM3@J zjU{ORU{2_OnaS`UB-*sKbwCT}d-e@ z+A8FEy;pEHMQG-ayQ4GWvNoE7pTEpc=^r+RAVRPFvv0Gr?`Jj)D+r_m*92JrfHzAD zlL>c!?3Ruv7VN(%X8WCx>urt1Dwl099v>ygm3R)q!$vz@IvQ_;)P?o{7*4%Cnm&FH zX2ItGlE}K(Vx@ESDC#-G;eG$rnIzT!v6kBw%#AF2t(>6FX;@uE}vU$ zxB09%=c=c0<*Pmj1arS)YHZcwRmoInZTcfbrw$DR9==1n>qH6F4|=8x6FP%zM$3E zH$^_WQ;eJcu_MrR0*4dwpE~N(QvE|=f5ey8^(=}{)c+WB^5Nli`J~KUADd}#(Znkg zOHbp8(JT;f_{aJYx*^Qv8-?kQ_&Ns@6VHpcaEMl-7N)E+h%yqarGr0a345gm7zs&N z9}&=uopcekc@{#ZC1=a{k>QcUob3RM7Q5nyO#>2eu6Epkf!41Pw8KQCjrN7Y1;Wjh z`^h57(Cn5>_7_{pcmsPV9+fNuo+pWX)^lG|(7J4&yCb=pf*3wnHGi=mm{dpwN>7}O z6d;XDQfB#7VGuM^38P+9I9t)K?zVbqXGc24b(q>xV`cKQ4+}6-Zr;_0?Oq)Z7!z1KO*X=mrvud*_I zwCGrJz&TX1K|qe3Gy#rh;?aG-zsahm8n zCN2=Dfws^!j4%?}RBN<+@q|q(E6L$uu#vyWX9Q{+MkdQXwvE!HG)xZ(ks-Ts0?lW0 zXh4+TatIn<7&F2dNV9FDmeL;Rr|vGKAFfz?;yaHz?=3Bg36N z^+c!nLFG})eYt`Yq6M@2ldW_|KR{(g_jq!R=~f+~#aw;PasSq~L)2Yi2Bh z5|A~p4H?Qk-?pOdI2hZjEWa5~M09p{J1u>;R>7ZAV5{mdeRs}XS`AO%+@M^;D<~i{nw@UufRlr`w{881@E|K@jaNmkn}~0nJrwx zz;WYlaci^Zew_gR4$wY5eEG1jSof#w?ok!UQ{Q_ky*5C5vI4;1LcknY5J%^Ge$}3V z-SXZWEu|DnY??1Y@P-SJID79xGvrK_Ad`1_v@plGMGPoxsRcY`348Oa;d1_5j!rgWV21yUY}EJ387){GhSU-@yO#AxAM zW~y-~Q31VTl=^w=&F*n7r;vJ`!~UV^)Z+Jk>kQ?lh{TT8B*l1|%qnJ%Rp1`o_TuzR za<&v(bQhM5fBXR5-ldHYe6bSd?|9DhX88$mhG@|G@@T{?9^ki;kkj7G{LJ@a2U8je z|C7HRpIOWH7tV~*M9IQdk7Sl0YS7}~hhe{2)ywfvB&}Gs>1a!hjfJAU6>C$(J%7s= zVmeXrV9yGl(?(psMlnuYxqtc>^EI^8)t-RE5L-`HqMi8DslH{O*Y=PvttM@hb#4PWT<>7O~6Wn)Li(~0=e z@*0k92DiCeP<&R*^1t_)UTkTxaX&BjAh;`tYd %Dmu#PB|`2w$xUxg{`_Q^?t@R z6_vEN)DO7VsPh(E`CGAdaiMqE+F_zCTJe56aAn5gbW08%#G^}J9B?R-_8L&_7?X>3T@ zlbObp`WRc&t3pG9J*qVUfUVrkNiKeo8`Gt4_Zx5r=2OkClTypd83D(QwmpqzUa>3U zm)@9w-^lT@n=Y0i6N$@gYx0(vijAd#iF}Cn(SjLG`gtQTO(Os%>ncG+Y%+#VG$-mllX15*^*|@IO{GQS$fBo7WdJ|yct|2yI z2Nlx9UkL3@ynD`l>!W8;D1Yxv#b*jy-ut<@o1QFPRP0#uQpcxI;sWuvUFAMQxphN* zZRjQ+^@*2y6n3($@_bQrot3_2ZD@k+zH50o{(K%-rFJP=;m00Ge%AC;x^dNHnr~vP ze4jsR-EHja8;C4&e!M${%S6O=Ba4YC{o5O=K!Zs0@u{gd_An+mv+c`jiV?T;1T8!) zrV>jK2_|G-RZf3*RNxVZ_dg`Fu!aKDwLgYI0?70drk6ii3!2V<@s>lgFM-Jk5AHcb z{GHbb4;veV=Az0npfLfpE&)!6E1+6{i7}UqP|kJ*XJeEgB2{Z9GSb5&aHsPn3IY2= z?tQQF`l^N8`}RWevU+}E^*U(fQ*LSgtIR^dJO>BcYE{Jz4$}YR0`TKGpZ}h)_Ny2g zgBpDE;@`}!wljnlKbL5KYreIuJk~a8x|dTqT?#tA=CA7FDBcL=4!+Ly zr{0=Oov9h*lwN2)8G}ksHC;*{ie5Nr`i5?%>8CnXi#`TeK)%<>Te#c1GXvV`l?{HC zYJeS@Z>u+bYaFWPGD>`x{_>9=|JA3PnKoA2FrB+xMZs*3FP0vv6ood5Fl2H`Iz1}W z99paPR)&Ty0yU(A-6p&D8y%!lA|~b=4-?_U-qs}b=m4iK4;KkLz zMRL;MmfK%}Eq&y4jeFo^dW-MYUDNFB6REp_#N3^=1?4R_QCz25EBDJGH8=&pfL~f} zAU`%&o&ySuDTM7 z4X%#%LvLI^y13kwk{i*_O+dT>kDScwmwzCR@(*b3r)cL-2NFX+*yPQeTub|(-7CZU z&;HOtYwKzR=|#?iP&gqw0)iWElLJ@lGywInjAuMDW19<2wKbxlMz6_?SuO#6LGqhJ%q;QWx2ch25weUfVkai%2 za_&wZ|4hK#O~72br}Lq|^}|lT{=#7L;X?dbwN(+)?Ke@TgSOCAZIIDi`gs|0;q-IW zlzW6!i(k*riA6@2Z(JEMN%lr_W0MV=+lvKeqT=3L;VybepHEviCFEoT*i))D{CcmJ zW@@V7y#aQU@3B+W(8D*v5l!gPqZ#>j99~V;))v4m&h7oPLmY)pG7+H@sW;UU} z@o7@Zwr^v0a&uN}Dr;JL>o%%v`eypD^!6--Y<^d2d%7BKad$IyArZKJi&8r<+Fd}k z>`~s>gBB|TwJF$|gO-ol&u>e3$7TY!aM#_7=W3CL*+lcQx!ZaC|42|1!1>GkwD@Ee#p9lzhnbP$^{=Gp>yKGCF2q@EL07s?(MEl;PoA{2n8 zpEL}dS*V+6OQjS`Sz5U@5CVi9F(9=RdnFCgje_wKwN-Ba+?RP0YcXhVZD~teO_h81 zBJ%#@P!x~>Gus>iGxX|Q%S*^F)vwEa-KByzzVmeo~ zHG;V!{tpuj9KY6i#5~9Q6P2y}SLR~HT57?mc2@(_4ctWJixI7P2Pmx_UGtl?!d2C;HI1(>n+hN0 zXwr!2Wb{Uq5P7OyzMPPbH$C^(m~#p!^4!3A_O4Bmr_2u z4}WNt*Ybsb7QIUCD{8}}v`MpvGmUR!W*>!iAL5&&ubYHOAj1$=6`qtHd!g|uhhM!? z*ZskPpiTQBYdg83A?@c)pNd-(Yq%i`jAlYTNyU6oWh_aBX1D#VJ3u48>|2!h?jFW) zw#4&a9+lT+h#6F5q!FKIrZR2wyl_SBy|%`N9YQmu4>o26sZ1)iKddjyFSI`zf3`N^ zZEh61)a1Vr_^R2~uKkp&qa=PJQ0V>T%xom`EZf&rolZ$!fc~?~)nheOn=&B_NM+m6NrRV2H;0aVSGO{p$BgaQ0ycf^8j7epcx-v32 zrY0w5yuHD*+8uMqxOfv)R_)7x$_s*Q)O@hy%@S35x?6v;!g~QJp)7S6zJ-Mf-_R5i z9kscF9dfsvC8e|mucM}>_iLe}ym(y0b|be9()T(RZf@IKHa9&Z^(R@7iHrfKyLtMD z4h^~`cWb|~$V2Ynz^#@sU#RqCaK5uGX$_n-3NGy)w_mgrqbSGwe9`KG(7` zzrpq9anke1KUnQ;Jf57^I_1gIM;qKrX0fYWx0b_6{=pYg=l0g93tFffg{H`Hr#o(b zKkq=kr#RhAt5AwbQ{dbVzdv`+dq@(`LW3uCEz8GT#{`h|lStFW!#G0Wr7{-J#g|0rNyrDgz+^SI^(uyVYZ} zMn682&UG+{1T&=J)863$?Thwb9XE!#8FFTI z_WVVGqnhIF`Yj7^v7&SVLsMkJ2&i8?SMRQ`zxakHV70w9?dezHvbk1w)U2`nFta#L z+xhJP>~Iq(emHCtG8cT-&{C#DAhkM3O}|*%QAnDuy~=f9P27(GHg4aJ*jeSS@m~LCR)B$mxUo`pg=?{vou<`v8sV%THSGJKf7N zO5@%&TAjuz)_Pis)k-7yMV>WJz?UY+HUSaOaI?fm#7(@OIUC-!$9-usbl2 z-c9(ekNlNq8==jO4%asl4mlWBd2Yq2UEgk0wC3jz#_RY^J(rKq`#NtCyP==FMTe-{;I2xM=z0VsJgn}t-T7vDjf!kL8$JF6+F@}1=!<_U)$ zhb28xdTUx6YZFxR7w`Et3;)H;iBFy=bW7iQegZO->0BLB^E`xSZ?mV*u z?(-5+zA|McAD#&o7y_PbPird%l*AOk4jM))4ukq-%{0 zb!@P^je9YnMvM1NRHqIGAfb}_A>VT&a<#vxiJ*S_TVMeg^d?Jgq)*SO)92_i zZc{!ecnSvB4~X9k8EL5FNC6D8+twD_`lhKXiGbbSoEo_ zOwczJWF2FCEzNI2^7HCkU8gKkL$mDu?)C-lLeD+pg(e=3rc&_r=h)@tjhSPmnhgeo z2+_=$(b8&$9|M>Z5%NB*ui|^QE~`^XiC%t9CQ_%Jcy%y>ZgM=J`u z2-^w?sa!1bM;vUpOx6X#O|Xm0V3j~7MvGM&Y1G5b(u)Ml?AzZ9my7-yTy@R4t&C83 zeD~S~3fNXd*@=jqXg|0`dSUt;jS)-TBk9^^g@h#)PPqmj(kF%i-mTYH*+Fac zOIi1P?=7ZExoa`n$Ep2WFRs>tFaJdB)V>P7nb)TezD4)zMa$0@WGi@iaNjqiq3b7p zeHyrQ-6YJ_x}-f&>q+|p&{6mq*ILbvB93M$_y|nU4l)~a?Ve&RiQx}$89wFLz*E_Z zm+)Q=Cup~ut8aC>zetOjYjQRdRKU`t+g+$bo?`+y+gvUBo8hOl@QBa;S&|;N`|a)R z*SVyCyea8&DS~#%^Uf@}s{D3NV%p&QhDCFLx}vu?%!ylK$-hYR;O4dgxth5*`x?+y z;KvynF>~LZ`~(;8#x(6bl7k2-DM1NHtC57J=YJ_~u@8JAGxD(8s?VhX)%$07bnHxo zAnur`-@5B3dT6(x8^_3>1;jKGdzj^hbEo|N%&qwa;?5Y|+PD|D%V!(B>_jwkMlTlT z{{Za~6Ipe&bq0W&_-2H-{Ym4=-yQu6IuTxxX=%#*f`TdMpd1@V96TH|eW39GkKqXS^{Qy+}qzwV}gXo^23GZl6NBgwDGdsiN(ek|6vjc-eepi`T zI-0iKm*9!>YZRf+v$oqCk7%5aooz?!N561STW|6qAxLudE144O165k+KuO%s^;XnR zK7{(gZkoP_o&JlX`A%xPm6WQaK}pxBqgDPci!n2rap(9;WVV(}C8yvXQz{r~Dkhxy z0|PI}V`udx;P|X)S2Ev>>(=FV^&7KXfz2oJgI1pkO-zKkm*6JfIJMjuI~73ig4iMn zkiOPS24H~p!36=q>pOPQI1D0U0f*B5Zig94dKz+ak}ijx4G#jIqLg(uTjzr&4^^Iu@%Ysy%?5R$|15a&@}_v7-|9SgLyD??h32kjwqh`)9W#@$TU)6WJ{0 zLhCOmfx4Z#M|*}rYr+02fZBn+De4d@ zDdtSt;F9y*i*asJh*n$8%)on?W+$`BL%Y#k?HhKE&+axP<>8B8y#7hjh2TUoj-Lh* zR{8AIpb7$!2`Qfa+~d#LG~{43m4>mLCC2wagu(25@#GQU#9pj;>c{B;I>xlRURkT&cP`E;X`Rt+SBCoOi}c4*XVT%;O&xxUmteDfwB7mi=$Ysr^fkEc8BZo zcE{&3T8XQZ(R<(F7gE=|x4tKDac~xt>$q(Uek!(JJ`cYCt>1P$uyVaXeL(*jQ|48Z zOyg+q+23aT5|?u4M*wsxC;!6Ug&sl3+?2SpYFDU!d`81+c+sBmm>qChNK-1XcC{klDP1mvMW#---?AdcO{Pu@^}mQEjM0hA zdl+04MKRIPQQ`ntlUlQA_WisD)hcam1}DJr(x`vW>>y=}o)Om3feAnd6Vqtw;QYjZ z?lixdH0vQc39s%gm9fS+8USIoh*$T@^DKBkyCFQ2_FLu<=|2GRUGupa$F)pzajB5! zwP{6eYwim+NF=eTr$VWLJ`=|BywA!)197uaqmMeq@hplEkmRfyA#>7dDfmw1<(}R< zPp2P8kw%dwRpvMNe~WbT=_uJ^2kjmI?CQKC@=1!Q+texXMco89-|s`ssIu>#121F0 zi;Fv+$RSAqlsmf<8_U};vw*;(d46s8*%P@3Qb1S6L%*}gg`D?LGEz@gtgEEVLn<)_ z<7FLBo!>BO)E1=vgxy|Pgfun4-suu>_qNagbN88GF`zwISSKXVtk$(AwFQFud8{Js z*7BI-sR==WS01s7ounx5(?_9Oz9FgUZZMEN5Ugck@w2<4qOg#?Z2u-}3uU@|-Kzg2 zP?|7|Et-iOommp`o+RARo;{qAB`-mXO3TbOV+RaJGh#-N#V^=$DkK0+UozA-%OmCvie2?r;)RJ#s%Uf(Ao3swu2b!uVm1R>53 zW=mPVXK07j^-Yq^V!HU=O1s7)9m?1p2w<3*N5lx@B-CFnrXeWWkg?fAoE&?o-eK;5 z(#F*f2Yvp~>Wx=klgS+oDi(Bbm4LWx+-e}QEN4V&5RdE z05V+MG7%iPDIx%YbwHzaiQ$>HLC(}hRRQkEsLjjI=Q#HWy zlMFMqL23zU8izhZTf5$$m;1OoLjQ!g78cJ&JG*n!H#a>>RpMxi0*(zP1En0XHhxBQ zPVhsgPbdKp^`+~SiiRb9iemx5Z@rn2;-yr|OS?V&g701x+gQa(X#(?O+8sMCKXkH9 zGI@CDFeYc^F;o5QSM9J}eh6A))sumj+UQiSkwYkm5j$}R*nRT*^NLcB3vbe1zcD41 z$Bd3ojh@JSY_)shomK|H{avI>|6@@I!?88=!dz;1Vji9 zhe}aw<9(STtG+e*!1OhSw(OsQRIA^ri9EUB+>eiCcK=rIvxP;2al*~n;V?!pBQqo5 z>Dxq-NJjGydatbu27F2@7CBSwkq-F$&pO2Kv?HQ&cuK)su?^Zu8^=HF(~=5v6aVl~ zc#|5Uv+QL1R2@yU$0|?n>crxXxqQ`6V_yOwKKp&I9gnpHyhmC-sA05Ujf^zkd?%25 z9-^EODX&HZ5D*rgt;%vy5E2v+z|jd45D*YDsLx_5SNOPeI>LORs`2^wm>T*PQDSel zRO95SB9$_jN4s_Rjp?(NdFWoJQ!W{RBIE4VqL8PuU0~U~VjGi!hE-Dd+~r%uH31Y> z^#o84TMe74AW(Szx7`f^`bY+N8%Iyy+{Wr$1K*3umN0cJHJqX#q{%L1?YHE_=Dt4c zp^BM)_&ILRBfi(!Vnoe&`3bIOU~yBxzjh^ zzTzT!9^Jl=S?D3=h#}MH-m?ZLI2#)BEapBNzhy2*XyjzU^ksOjTmmW6MUn zS2yjdNOkN<&U)EAxA|vAjK79qV$tmHXGglZHALWIUz0YGNdYl?*-eci4pp``*kqeGYEPtzv z?1%@`$HwD`@qulAld^MDQ3J2FgYs%ds7j5Vbg@yyi>m*q**FwlybX3f#w$L-O%pus zd##3d{PdSXacV|JhPMWfr)Wv}>wZj3PS@+o4NIPf$L`smjCwig+N>P>C#R~?o9M4s zE6Q4Y0$odHs$I7Z;h7N5`%ojc7_xZ=Kqtvgz#jRx6P@rk_3fTBJcP3CVW6~Au6dje z*|zR46K`MJA8x)8kru2h=_CGLY*J3#HwGNsFH3ypeBnGZX;%Z^iJ>oJ0eRX_))>f) zTy93~r2ZBt28z$z*66vrA)fHy7Z}n#13$-CwZT_)ib^~B+iOz1muw*lY3HO7yBMg$ z5q8>HY_4VAGEgst^G{7S6yhEprA-mjiAc1BB!To6^E?k2c{A zx-T1}iwx2r*84sORO0sH1o;45+HkG5=SmYmT*)Y*cTzYv1- z<`0Q?sb_BoG_72Bmu8#U3}(cIMHc*=xzmLPhhd|C1RetYa`$s}9LZ(B3vx2ir3s3U z(GV1V!E%HDlgKb6gW33J(xOMUH?uM+{{1yP18OvBsn-u(8A*pb(V!q|aJhKoT>OgIF{K!1HVW~^5D4! z2~H^Xlc$hb1qfcVL5gVA)CML|OblsikZdRz2r0BlrldNFsB=VYeZvETl1*?-(l> z$a1+^)}(kpMWb8o{U+}aNg8-2%Qr-(!0(|u#fsl;J?7%j)ByJ^(xdge1T%H&P!_Y# z<2_4v>a(`z&^_g>wHIe1q;zHf>)ygcmT_vycfB&~BB-whcP=j)c?ldq?3OA<>kR@4 zD@()Po|e@O!|jd(gNGizepQ*a9p?&&r27cb^EWbPyX1u+}r#2TKM9vspriufXq^neB z_VGZ$;3H#q{JLmG;w>Q?@!^-CvAqv7= zR1hia?b>lx4T;;oUCiqEMi}U%-Ea~`AT4Y3pIm^?(o(KRJ$CMj`j@*|r9(qQ4tL=BBtY7SU<3JFbZz*RDpiXPFEmk=am74~Of` z6u+`{RUyYlL6$pB_R@?r^=*966hs{2Cj@ zn#A}Y5B8PJ-TK({3>9P>mAg&VAZmjGBfsv4%H6d!(cI0oJ~LE z9fO{)JjlYK%o&|g+3F^@OVfugGI>WL*bPTv1cejGhBiG^0E?f#&Wv2)+&h|Lup|=` zDz@nKI{8Xc?DSKV2zDDs3U>x^#IrXtc|C%e+n)u8Kd_P88^K8$Y#wg+f6s5L>pw4v z^ZL-~#`e|6SbH-ffS0+^VW`gZqXtQhI~}XE_pvDSzMdt{#TceoNlavmLn{6xRZhK^9`mg1~1c_-;~6=YGV+lASvI(gyjo;*B);Dtzhxq;T`Ml) zptR7s$~}7N${xilP2$Ax<aZy)nqkLuXpuST_&+mgVFI>YOPhMHiozm^zVR7k3Ji z9fg(Vkw$z!h&KGtCSUcMFvHqwKg;PVRs-2AaT4bo6j*d@H3%)}zCI0nYvR6sx zN`w0$4{mOzW-xLiyE}h+YU&NZNhH?r!DLqS+uHTw5`Lb2xUsXa- z%U(^I+~BgqENG7YBM2|wpxaL`OK`ligD{)S>8nmfBEG9xCPI`1VysplfBX8kZmZV# z^(8IUG-5_FDNpdSWRUdz&Q-^~sD?gA%GcKj0S@wSDJ5fRJ8x_Rh11j1gc9lFa*Rud zqc)#Mr@a1_b~*j&aJpl@;I-XhGi#Nmq}v-=kRacJ5PH@eWOG{E6E$x9xpD(Mn3rz1*4Z|Nq$g3a>bmreAi6 z5Fo)l*x+u1TLObyaEIXT?jgb52_7^!0fK9Af(3V%!EJ!S=RVo@-M#yr`v=@}?zw$V z0>jKa-Cb2(_3P^Ds^)`G@74RKD5B2z?fs?Ge7AaQ&7|A}#MZNY*|=EqKy2Z0uLOMUry|p-BdXNdQeIuEm)Wbt>JGYrZx)&|v`;3u4I{_juV)2lHZIwp zu$08YyD zCU~$Fv;(eAm2@{j7em-ik==T{*kd$}LXO&S`NI3XT|C zIA9H!n0jr~K2u25E-OR5scP=`r%gRV#($U?5DK)Pgmw|A!R~7g1QU|mzP*3$PA|R@ zXqu1HD_b&5_yS8O1uL8mkBQWwy+TigZgi>(Ymr&NH;)+i=Y*RVVUT+;znNUTvN$vX zM1v#dy|SSPE6r{__pbU1oS?(|AKM=<-S&4qxw~zcy`br%Fng_;dH#$DFYzWyP+rL< zvN%_Rgy6|>wnw%5{PJ=wMoBx|ebXQ!)h)1Lf9x?g-S)&X>+bZ}XGX7C+R55-i7s8> z8;tHm!GXSc$7S)vs=23oSWVz9M2(P83lsUXzCOXR-^|STn#JCgH_JOCSAQqPQmWe1 z4G$rG!;`P|6Nv(Yj???yAK&nmn=9 zAF!^}@j&H_uwpy2D^Ad7=iGOt7;Sb&E6erge@3%cCHk_S;KZlw*|wZVKiS54?tUy{ zj1!-HG>nOb2fM+>QeONSQ-*RW~RF9+oU>_3m)>?m@WU4|uy)vpP4D zqorjbEKE`f-PiO^V1~qAXQoTln$fSRf8|oZnw28CGPH{70jCR%#!F^9Vo}77PwUEVm^amZ#-fDF5mLh4|i$KQdjW28S zp5|GKmHM8SdE@%ILeA9G6N~;G^XYaM`8IYKyb?ODZe2G>2uK==P-@u4^^xu9H=MHU zuxZg3B|V~R61#B5-nXC#<~!f5B{k;A)!REK|3|4N8&Q@N{S6P6rb|`mNZQq&S;r+} z%!-$KxfWlJwSJ828|g z@@w)zqY^1fs{;GN_$(Nq5ncQym%6xpo)7SY%&~rU5Qw1=rTfc9qeC`}{L?(fw0`YQ z^^($$PMUGzOLRN&8?t1M+*`75PQvz+nAc4w2(_Q>;F2KE~xjJkaRGIR3w>PK$wF=r}jvt z!9i z^WU2q;EyA1`ZLM*RL1T`Z8yl4E7!k5(o8o{q1=RrV>nne0tAPvc5wTB9n6_1vLg~o z&`Po5kDJ#j_rQj>f_tEfRtHhGYBQm!XBW!s^wq8zxpx zJ=?+r1&C?k!p>2Ob${O_0sqx##_@ss6qsl7wbta0sJ8FrPMRS(^lIz;P30R%(y_K6 z8(3A4XL*)Cpo&aj!H@#zR;z)+Q&n)k&M`@J#zRIymj~XE<3tIqK`{8JB`IhL?oiKtx}3@I%os7O>$pr zgl~=ykMf*{^F7N8Lsctcq@@ek9yakBgvY;ct?%FkEL0yixgSh(opfMfpB1gz>7MTuU;c6X;B$lE z*v3lB&@8|4r@afcn-|qTJ0my8K8eZBf-CRABk26qUP?PWC^7jw=CM3D=54P}MRYXU z9g*{_tWP5gql3FO+MB34a9GShMXngxaXoI_{3_AY#-S{jP5-aLk zV!eveHIvHME|5W-Kx$yOmiF_&iC8m?!#I;Gm?_g~Ba>saNLiTM1QLbCk^V3ofjI#a zUUbu2B`0Jd8-1zy=>t*o6j;SAw^&74Mp>-LJ7Fm<^UYk~>HRMtodU+|#ma?B%v`g* z@vssn$=tC&4XH{P^ky#sA9=~UwRT|P^UiS;M-Vt1;pYyhRG-^176|7K0u(exiqcVI$x zBPOeliT&jMj+38q=Fg%<@8BdBu+qWVfUEtfi`j0rhxe<$T$74`;0)ls*jRt_$)e|? zu-#MyEJwKB-2Ad;@RMKwr$unZ;GoN;OPL6Kn~wZ(*g^!B*0RGhm(2#h%`mB5W!`&O zm_*!&`riz)1#Iqp?U-3i_kp$XS2VN5V_rAq94?90mTdJMVT@Jf369xHD8T*7#`Kvq zGBh2$?yKG0SKow((Z|cOG;SRB^V%*qdyih))|pJs?6Jm&#xSGw`PSJCQ5`@bY}Si? z6&W?eSkV`xt(h9``Bkk`nxMR?R%qe%PnJcp0nB*OZw`q2^v*AcHK&Y$eZ3AZyO1!+ znAe#B1YaFibFn}*&EyMbdobB=!&lLZ;4&G<;Nqj5CTieCBXpTUk>+!td;gqIJzdEJXs zQGWcCf|D|0#!uI}9lMks!R;28DzI*Df*ty{KeICA#YDqL=W~Z+q6M>;5{QMe=|B@I zB}{ts6>}b*onO-)Y|Zb??{1~Y11R(j<{V6SJKHyTunKua>g|ra=8aozhDUE)OiY<1#0J4Di&wP9N#cB2~q$@;Tt+;L$4Y8jkW# z;E}GzD5()_zhaRGpxG3Bf{A>77xy=^U`@P>>`Ib@P_NL(ba-fEyf7r;zv{i8Z+~+l z<0h2Wv}rRWz(Y4IYj@Go)P6GOGD&u0-K4jAJ^ZJY+f`ZJEuE!}gBokr^0w^B{oCVV z5>_)DxrRc)`a+$WVK0;WBhdjRy}pHGm**m9(63_`PxEM+FJNivh6eSmniY@q9*2T; zU8B_{oM(%wWsiJW)(8sS``&k}Ir6xn1@n_r?Gwy{C1b zSi#iMdLP5arvE6T-zw;KUPkxapF=+pdNKlk$l2_Di;pms>rWig@bjAgIo9x<6>TU$ z)b|iWl4R^)SKk>u+_`MQU2~Xa$>9N@|Z%qx^$Ce zvHCgPW$0Sh;icV>B4Q7J#Qxsaol4=D{2GYDx+mD^9P_`p6-GQBr?Io8>AQHOgcaXj z!E^4+;Jh>CrMz!`vFclGc3kF@x3!yltI8b*9Ihi9B7SrRV`t}ndLFadQnx5u;d7|F zx7jacJ0fdVGoPfs@AH_p{JpM>D_0x1@zGwvBuEbwq_xF#>&j}8H_#dymwb`UHyfHuNeAE8A_^cjq7>QBkbuqEEgTaX7m;C+N ztCv4h-Ip>H9Xt;YvOXby%HMty-C!nI3jlajDR<$H({KhtzBx^u2Fmf}EXuqepPm!o zcRXzp7Bn0Xx_S3CxeXz=YQW1E$2Qk)UdTDA%SyW12J1Xb$;$UJSeX0$iCwR)gVm*^ zq1XjnhavOH6uv>3nz-OxbZ6?>&-v6K-k@b-8APS}$I}y=AG$=UQjvcp;9tnpZhqm7yDctD! z7ntIVv@Sx__I;ejY$u1tkp(?GL~f*&7+p>_R}TlF&Qvrxj6B(9`^ME}@6^0jmKisME^>ahK z|IvWGCoe_&_U2*iSZDz@eJmfrnNfdYNj5;b*ieN)rakQyXS?@(NaeApy9nuTpH>}+ zP4Z<*mZ2+lw1lEviN{`!8qyw}R%T@>9nQrmRxCayA3t|SzDZEh0Zop_XNyo|2N-!9 z422pl)l%NQv#?&KGCc8T&lv+A17e9Eoucbyd%f^5QCQPkSz#F9yZrHINec>ZqE-&j$X2Nd3PvL>e-I}@ew&xa^|v#p>F-m!3}*h^N$)UVqYU@bzQ8(Xtt)X z?kE&(DlKoSP03d{gDsx>c4D$6=IQK7vDI$RBl1mM2A57VJlw(k6?)WMdh)2Nxr!R! zcbD1ri%sEq^gz0h{th}r7a}(!FRAjWc(hCET|b`Rk3f;&p?!g#VL%*1j%#w9c?Lk@!lsYO&+W$aCsBx)u0l5xgT&BQ@?p8-Rv71y7yPPND8dn5yvN2i@FW=hX$!x z^GU)J@QF$&20BY?W%&)DzV3bkZ|!UI>Mm^)#GI_Q**GA~40qga1MY{pnno~$8*{mm zj}%9QstgJFnjOZ_p1Zu{#mxjX;hdvGMA&^`w-gDP-j82_C&Jv#R1*CgJ6hU^EUE%dx-`g)|GOV(ILmoeZvI?7XVA$RjA&enDcGwB_&`boHTB?7zHO{u`J z-<2=q%qqFnIE>#6y=rZQ&h6sygkmVRUN2`YT(OYUgsatmeM~sIk6#`;5#Yo9wTl+o zT<;*Qo0jdv#c;Rw{M7_K;R5`StME64D!I%GIp=Zbg9F7|;H{4AsVnF2K0Qym^ALs& zzj^p$Z!~+}tm=19{*N2k`7Q>~-9U>RMxoPRpGcZHp*#8htASK^26HZ+yWYWMDTUZr zH}0oc_JibGS3)hS%k5_3JVu4J%afrie~m_K<(D|*QrtoExN!MSumNt)%mSRY4dg6$ zR4pn`4*Y7j$cY3l=vQ;af9CIApz`x^7wtF+5=Uk`G7V(I9_3CMRz0=SKS;346WjCD{q}-ENguh2=raqZxViD`jL)QdYNSAhj@Y zL}n7a>IGd6#EG0-jt^4-;=aG(FNg`?Yz8i}dpbFb;U2d^ZIf`8tX8^1Dk>E$ z#7BL~Q&(4)F?Za|*6XS;3H2s-=w~vq9Vm;UMU54=%9GW6xD{t>F?c+F{9Gc$o~NNu z)k$vBca_!f+>g(pMk;bNxqTvSu-V+<^ZZh_3=OzOF8?MMX84K7VdmcX=4YRGjCCJN z&6u6}cD<;Vv-n5ZDqnbPz)~^!-l3YlvjO10W&wsnd;>%%5w@M1tn-dbjA}$5FQ%vr z)}!UWJoZ|SNkGOrYg=7V=KFUkeMekj>51+jq>q*PizA2P5RC)TWGBqO?lic^D~M)3vaa-maU)#VW@@oIAn<#o#SDGb%vj#XV}8)-*5Nleqg8qKXkc4qHor=yAL zicD1#t)~7|cKVYIMOs!tRiT@mMSRxvQ(4pi268I3g`Rq2QAv6cM$TY||E~1G`73*f z3WG^+M9kn|!<=hK-)O}oZ7|fQPRGqR)7pI+te~z=7crZEyYH+(mLcy-Gj$#9JojvS zExpi^U_)s;ro(L>@P2=8K76(EsAm5~`FT)eX*4t@;5q>1>Grr@latMlFM6$x@mmsP zvvI1g1(aL!V*FwC2`#r3V9X?`(4YCK^ET)Eh7qc-o3|3QQV)<@aTJ}vnyrVtN9poc zbzZA%J{gKP`&|OuWSU1cIDFepmy#oC^-r*rjV}I7^Z;COdJX#5yg#1vA3WI}&qX7M z)(7GSJ_${Bag`Irsx}?+ZNn1UsYXh#tW<`R5gNuG|+H4=94G@HD*ls3FE zFCA;BhI!R#>7AN&eUa7K6Nd@z;9_GmrJ&|Ktq^nIGX*8|=|A;RwF%il`l>JWJ@nw$ zXC9&bbS)xIRqwus*{{@Wq_j7zfQ+H|Hf4#Nrw`6e8*|)+oq%iFq7+DbKORH89#Q?t zWQa_wUJLoN#o^kVF~%QA{x08_;k{KtNz?R*p|9C1g1pJ^1bOU+v9Y$F-?B}QVyvOM z+1@PRv{{Xi&plYI3ObBSxm^0$s6w`&F)zQ&q5me=+QOg$>FRo6Ku4jSK6P_txGJGM zAC6B^2%<=%|1DoMOwQ;a) zu4Ad#K;x)v*E>61Ra^yiJ`DXI)bXYfBymTLJ-(}OKDrV1Cme58#xJG8DlExj8;TT6 z^)cnLffzm5?Yb6G-_Zxp9=e?#%gofScxBmIp@GX!w~P7~4%uDJT$E3{}}F zXtSjha|y?PcgnV9=bF0kXBtijF`b06+9-{a!nBMm$XrXqg^`h)N>4H( z&nAYv@iR;!fBcSjIlw}5 z*5{-h)^T<0x1Ibc*_s7K9F#y^NIh6kX`Nyv5L2~FoTF;epw(wcG`pb#CeiQVP3(2=Q;<<;&?Bn&74_El;id6QY@QFoXpzzp{ko(ch7V>8Y%M-XqA=zp@Yu5h! zz1G-Hlju2`UrUbP*^~Y8-vk1hy&^yuV$4jALFoxw?$kVb8+#i=7hRvODKXJZM#Y39 zp9wTP<3Or{B%}BA%&y1`-PVLW5>%ymsOvHeNomQ`AK8JBwn=cG7v?j0Wst(4ZjFwJ zht5J>Tq9e-(evR;P*hn|1}YWSt>W6?FIE2FKQz_V)f)@akG7=x#xhA+1PwgkM?LB# z#A33Q^kUNLleG<%_xD$04XoPd4FPv1K(^nHVbn6g)nR7bbQz5G?;|8##r0@Oyu7Gk z^HJl*%s*>eaE=FtW;Z9_)K%x^?z!7-nmoFBe$2?yCnO>|8)ThNb%TEab8_aXylZT2 zL0&@o`5@|lyZC(8ey*{ZDNDA8h5Or3@VGBx{2}x6FmtUrkS$DmlaZ~4T6E-H+=$VjTzU=OT zIGMIOy4Nh#yIS?L3>mZR6t61rr^Dx_9__Y zOpRdm%OFgo=1#r~{G}#=jTZFow{|&t3PM*8E`G ztaQQOg_9f@kz74B5UQ!Uuh=LqN+K5V>_1#a;{9V1Ok@(XIWSKvhol+kVy~To*;PM| zc)nKjaVEww+L1QcHiD3Rh*dw{UR_#*B!hNjIW5o+v2k`tdIRLa?xwp3;aB^X+bmr?nKZan=d7RzDuxR_f|K4qD z8*4oRW4-CWvqq}qrF;U_Q^uuqY(lR5Lqa(Wr3I&R1loO!_MDe#5YKpOB|)B>=BK(8 zCXQY-WHXQ=;Um`|mp?h^m5%uopm2qaU7%B zXK-sZ%oaArEt>$-$fVIK!`|nb7{&pjq>*6=mywC$dMmGtD~kJ;o)8+vVH`!6s>UX} z+0&S$!cHT~LlpK-SskQO@Vz|DWL9Iv30iY_RG9m=FU*BC2$M?*sGSfM_8Aj&`YD}+ z&j1seg>i|RT%AgZ29s)HZzr64bhcdIkUn-t zaht%+pUwO*xq9K9-C!S$_5A|*_*;HLK|T#(?Y14=zeQIIk_{ya38Z*gnS9$GMDcQM z=MykFJ9fZLtu?<=nl9Ok_Lb3Wa60awxvR0X=62tVI{b5iH;JSQu7q+IJ37BflnZDh z3TIO+Ar8{>M22!+tt78FJP_4H|HtD;Zipx zflkF2(O!$5cC*LZwwrA;77-W+X&$fKdO;p$35UOYzCN_1xTbE(_l= zJU|1Y%9z1p5k^j5;R&U5Wz&h%Q@3h0bc2^Rs#btUXP#G^Pp^?y_xH)_@79<*g^Zy; zdyq3>aJb$nMFL%7M+cn?IaK%mi8T8;L7tYt2i-BT*z3p7E1Hrb+QGVrBhQ$8DX$bj1ZxQ}V|rZ9UB;35uxvy13XPa&_jpcs>}R zUoqYFE-tC}%xTQcesAC>yO7_xU%!()Q@S)yS-LxIY6c-`9n-cJXpr#WM6?~zDav|v zXn^lCknQawi)vM;tJzf2MG{;K;AcVg7Fy(gCO$6B09a9V^;x%mCww2i)A5<9sRH^4 zsMlO7lD!CE(RCP;j$&8qGi^eSg6s8F2~G5X4LU|4c}Vt2h|xe8A2ib?*hLaowuHTi zLq#w3X@WF1oq{?Zu)I=`4H##EYnX2O%6R&d@{l_f?XVMLW6*pOtca0~{*eMZk|tf3 z7A?5;hN~S)ouL|T3cDUSWi#+^QmJ6RnlfM|k_9GxnBgi(ie1plUG#0pxBL{c4JkbJ zHaaBlxBhC)nKbKG{2#U>Aw@Ew{5c8c2iD#yoB)`X;00-#RoNJ9tj3r|fVg4UsIvYU7rLTyk<(y5#p#fIPbJ zvME451GIVjgQbGUtUF)+Lbw1yMD&aZH5ijUh3vKQP0DpDKu-fPfT9v;@q&e-{*97=)k~wH0Z>YOE*`i`+8Miy(<(BfiEMGO4 zVq%y_OpU{7jSp=fhjsg7*{-4tSUi>_#Qs)dzX0u&39|%Srx#To023;-a!dm&MD`j- zvzGmzuIiP-J%g;-=@>3m#V5N zVaOvoF5wTMOcy&zVX-m~SI8r(BM#txpIv!u_pom>^I{%1byD00vTT^B;5=pxCD@H9 zATAC9;ffgZ*-{CFR5Fl+j}(XGITLs(VV`AppMzZWc~{cOSW;ZoIb0P~x%==&KX2*D z$|MN22=Vin_Tr?M(514vDQ-8OaP?8g6NQ%_-e@@)8$0KHDK0JDi=s#O4d#+i|C5ps zOJ-f5{$Qcmsu+N%&A2t}-Wt3wm)Z6E`7_4gqyawFP$1dce_EyB7%ZaKm>K`K5&>+< z{>Uv7>nN;Mw(w&o-r7zPb>bl4JY2H<$S&mO?m9wR*0aSYBLhSzq6w|SeUpcZ!b;d_ zyWYtX1%WU>O3NnXCNV;fUI672%$e~WtRf3k5GJz-Az@Af=2^y=1J8DH)VC_@nNFib zIDt~2DZY|)oiYu-MGeA88tkkx(HSRbe~8qmccsnekWxlqgZ8Ynk|f@e@LDBEPnkL% zFPftdS3R1SM&@Vyq*DO{&&p9hN-^b%%yYYa6mJOz!F=*jI_^{) ze=0p}xh}85#>%=tjfgEdvR>4z#cZ)o-=TG1UM)*)IKhZQNY~r`AYrCq)$7F8C?X79 z!7f4~-FO@_6`^fD#Yt6^>)AfiWR_lCRHRdqWUKGo-+Vb}HiTSx+fE+dE8lIJ8T|Gg zp0UiGGgr!ILSbE|pO^E>o1q zq?;G>UNy$>We;Q;C{jUf5}1^Wn+x^aF3GMG^nVy_f%Jqm`SbB;1A<6MHTi23AhWRq z3zd2LV6j=rerfEgTYvwFV!Pi~#&dd{s$9@t&N-4yDtj5v6jJ zL?%Xr5$q_wsR<0mnK?h9i1lql*5lT{bv7`>&S<3HV@%l18B=^Hn+e;P%QXV` zgWeJjuu~)iLeUh|xqD&0>!I!ByYcMyX|gUjsRgA1n~bVcYMeFY6)dSh@EaBtXrsq` z;gy_BAZDR#g-UJz*Pj-V&k;99O+R}!d$dOU&pAG=R`f=o4n(n8)s+D#5LdcI!>YPh}19>k%N1h=&w95PlVgh|U% z0@v88qQgSQvW1<_3#@j-J&osU%tjOx6vDNjq-9LWH7CtRn2w`1jLP8XGo;_F0J`{bt`n9JDT-F0&jg=4;sb{A%N9X8!4^<4#)Egph=OPg{ zu}6H7e9Gjn^k*$Pu7n)0r2f$oK#=<61X6 zpc04fhxOz~=iNz;C*ncep_Qx}WGAQ7(%j5V%iJx`B7@izJz9mIi!v`9{)bF|{oncW z#am)u&zLBAd^?Bb33Dve%Kq$Vt0#U6Hy&w@Z31;KE`tRt` zQE{2a+tN{l96YKcKpjlLwlExUTg%H176tH-lwrxpY~8d&l@yYT6wzNN5Oq=>Shp1s!{$;d1(d-dUaL^J%nCHHsLG` zFH#Z+jF*v=pRo1rGmreWU!5-0;JzTgropD2@#ubA)J%Z_Axr@<`Zs#wfAs>Wk8lL#9;=M;*g!-R!{B;JC8oI z*sPt8i{i70!ZGRjoY{_6mQi6nsUVCU%Mfu+*=GB%F0PB;N)R*uX*kgE_3%f8kHy4K z9tm*p4G1?1hl5r|nlcFep|dQ-bT^bTvfud9JX%E-G#G}Jada-tRcI9@&6yrEKPUg5 zN55PW;@k|+FJ)-9a*n|z`d1IE&)yO{v@{u@30?jRKz zcyi50l6E0I>xS>g1xBSR>juh#^u%>!E9I!!j{_#9n~mR_wsk-AuSpAx*^(>SI1MB z1?#J-1-rzc*x`Ttdyfx1@WW6~&HSNp>>q0%`Aih5r5WdwtQGnF9jz^GZB2XJ?0~CD zz}ce9s3$3vhE&)HD+oVw#9zNe?f)DQQinEX!M`-P9&P*Fzm+9mD6uG||Hoe}0UiI( zq<~-EAfrtD`!y0Va2fI6fB1#;@38};fA&ARL_z|8pm_Q3FMRpqiPgVfBfX^f|3d(Z z8UMdTg=@ro3!;(dz$M^bQ~EegFb>HhusodANg??gD1XAXkmPkCiEB-zJU^4L)V*}< zjEURT)bYW^aC1qlx~TWmsCSH;RK2p&-m9zc8hY#Wik`3cf;47S&@2?-l#>PL-KSJ> zZ;q*b5sfm$VkjMAA8x%LB@#VWP7c}=GwR{V2j2gc={G3S|CWCwq@WLw|0&-8H$Fh2 zeTcNSzjf(N`YN=0ea&lS^DHVln#CQ$1p++6#vI-3&P{r(sr^07$?~<*)Dh9q$+CsU z)qJ$HlGNA|raEcz`0-xSvxp z$jHdL=;Yh(^@o{k!_>lx0&*G)2Q-{nrwF?p~{&^I8fI*dqscFxiD=6!kqnkMB zC1`Nn@#5;rKq0^-43f9CW!j|IPG81_p0CCjjGO*$1?t=c^d2M?r}K{%3467k{ngP% zkr5cQN{U&q)S-LJ(6-JY(jV`0!*FnL2n3%s5(D#niH<&JTi^SN`uz2)=UBaceUki6 zy*1@xxtQS)`-YZ=24EyXVUR!ai2gcc{5{A{0_`wtIy8Pwgjt{|iUhD)S1#!nF9;Z%UB0$Guy5@5cpQF>Pa{Ps>x z9-f)mS%R!T`Tk+sXRc~3+uC`65d|s??Kl{Ym+{Q*NZ$_mU&lPnG6cmJ6cz2eHrrG- z(QTBbpTW3=gj(NlS(uqEyUprn z@Yu=9$b_F^g+l@j4+vpr-fd*D7LJbJ0RQQ)T=A1O_==cX>`M#`Dav?B2x}^u_CIFu zgBc|)t+}f!n1@G$HW7HTV=~sBYjbbRA1MtTkC#>P%VLQ}pA!-i`pJ9_LrCmD!>D;O zjtDxgd&{cY1;Ak3A9E1UcxuP}Hi{x2QNI%h2M54RwB~=@7)THeXXl#PgSvU!NKzCe z2sXAAyP<9>P=k)UmQlqop{{AZc7Iz@7X%+DZ zPnI<+D8dVeFfcIm_Vxm#vbMFo2aXRLL?6#$g@uI;s(4l8m|q{t{xjrnQ7F6@r`w(% z%3}FAIGn0YK5W~)fB#;Dtz#$MMpjM^e!QHKlf!0;fQ^kUaqb_29 zKLPmJsFPbqH9TvpE69mn>vwk*O6DzHsPfbf8Q)*?8_F>eJ`Rpc_e)-&PTNLo!2RaH zz(BbHNuBTnzAN58PYGjQnn!MA{7baUAPW(AOnZ%dvVX?{xF0RJAs*m{K1X$cfV~|X zrHkiTv@qkGf62S_W!w^28MZvSbH#LTK>rXNiMBgS_(R!W;>M*^z>xeC7?vfl!yFtO z1ZwMmUtlTtv)|Wt4F`JFq=_;m4svpGYLsg^?F^@-!95}W$AeTHWiUItZgzM(wDt5} z6A}YFHHJ;gN&@dQoW1 zM<{z>6yqN<8$OXPcnj8Z8R5ndy+37XSmL~4$c6$DZ&?+(4SGe|Z%@y_wsVNlj#r@4P)-_-vm04#DAWru!GJZj?cr*XPAU8Ts-eRV;D42F11k+pujm9|$fKeFRXskg zoI3+71VG%<wNYg7hR4V9J0N&&a3PEKdQYQzB)1L7Vw zCW=@xuf4pwdWDXT{_0h;9&6Ps3D5`#Qsd&j8TUqPZEY14P{(Yx8l2pnbRb$*ACAWY z9$kLv5U}d)9vn>Wxh8f4Cc9zTeeom!amTyri}RET35!mzqU$kCOwu>EOjMXG_FK@< zHV9O$HC$63=aD*S+TGo4x&2?wW(Z~;s?@1tq@wx+qXcCr>FO>P%B5y1LXAUTAbtD( zn-lhn^>(v{0S{_pF)=ZUqn=pGnwqmrRjm_KQ%OllQ`6Jai;Ih^t0(R);)=k22oJRv z5MC7(l~w}>8yluESYqh>(G@UNUqFR4W{^{JaGZ?_o|IQq^okN7f#d+w6Kp9ows>?U zF{loRW^OL!*GVFB@-g-(0FOlWz6zzfjsLH3K*;-@o<4mlna{DMDkiDmS>M#uR9QJ{ zUMnF8LHbaL3g|5@85tQsVvU-Mqhn%1no|9amzvVbH_kk7?|wHn{bpSMtzW<5W)A@U zPnpxUoub=*Raf;?eLx~H&KAX1>czM6a7bQbBVU@lBm}^2h=jMDXmSE!OK0ww0MCH5u8tDUoWrE$ISss;oO`O9> z)mpFX-WTb!va%irJ}1*Ehorx&;{tRH82p|7kKs2pkF<6lPxHIID<7Q0nmgVP%XUZa zpXpaW8@7;dL&Hha^U0@7!}34o&f>se9_B=2mHo(gNYGT z_>tT9H!#{n8B3^idjz=6pu>EG7%|Y*)dj4qR6~EFCIBK(PrU!bZ%@w9`}D_iU`53X@&Gz zmAjbkR50&008*p1Q7za4IF6k?4#3oqFtS)p33+O4SW|u~ zl{l!TscFHP_A%rI5jXHcSGR2T0ATZlciZXtZ``w;YFMwuJpPIl={>_+&`bPZe(+`Z zHd{bi7;S^fg=kmJA#eLxG_WGMP|x%{mfcE9N&uatr7ykr^x-u*Iid$g$#$uc1NH2h zC6;-?^|NW)TNXNE_8i^%%lFEz(>cHKU9Nh`vb~|THaM_S%1or^)g%aIC0It_W`goPrNaFe!WWnV?yYThM}$%Z)5Lz9D&7rD zd%6BKPfe;^{E-}qveHs(d;9Dwb=t&1Q%<7jzKIDH1htaL4fXy6(pn@su$JfnQ5U^m z$LKEj^@KBxnTd%>ApASffW;Aj6d3=Epehuidd`xVXDrRB_k;~&2jdf@? z8jhUj*1Q>pXdJ1O>s7K~j+I`~<@<52={;vXzRODA`wSywELXmx-`LpDVlsgo1SzLx&O zjaGww7KBrwXlC8p$ zM$zXz$MrvxhPKyv-=Ui=7&_yGjGmV@lo0OUerfI#N@dtVkd3%+XvHK<{ z@Bg1&3|6<3p1pnLsf)V$bfyO%SDHK!{?IiB*pAn{iHNa3ko1BbBa!eU460(o!(Xg2 z9s%1SL7ywzDKSvqn8@8>L&w7rj*<|T{~UlOzx|45PG;8mnCp08M^bya&&PoE60Rq7 zcRVickY#cXNJK#jc6?dK`kJc43zBDpzy+SkiD?xODD`p+HZ}{#cG)BqWgAN3d|_>9 zJn+qJ)g_|}9i?rf$ktCbGihLYb>4r!;wRNt*8j!cTXAbgJCx?o-67rGao*u~$8)ds{s+%`j%$f5uR3$h?Af#T_p?XG6xK1T zcy~PPut3Aer{m3aFjjjm(4a=i>w37M@LWcUf#h!hY-M_OrV6{5V*`_tV4~28R9|^s zvBq`z%At{)Tw2w(rqhgD!~3s^r+Zhx3hjh-6TJYBR*CfM{tg?NoK1`6ZmiRFH^1e? zGY9f(8Qxk3Wj@D)-nQk$*O;|eMMfij9g&oHGwzh^(IZ~_IXp+0Su3f$zhzxI$ED&6 z6#n{7RHkxsANI;rK0`fsP8dkxc1=I)%^XUT_R*Avl9B>f$N6dq`5kqJjXn{)g59Z? zE`0saMOawafH_{1A$91KN}+_7uD5$|n==LmBi>k_xcAw%HXLmf2>Z&oZ_WVWWL$1&lWSyi^I3+J{Fj}DM4|8^k2fL^7&rk1CM3~x3 zRIR-AHN5;NA)+tk=-Kj7e0;X4K$Ow8N@?GFrchf&UNbI@n0Us}tT@l>+JhK_df1Ad znmOAv3I&79GIQpY{M89PoSPWQap^QcuDgFNqD z*&sg0xfuG(w{A;xM?PBFRaXfKF)e{IpKIEtIFr>W3e>2$_ zAD}`6lTb@mxQZN`jt%9k;C4j9z+ehc5s$!Ba4@Sp$el8)e3>jty;9lt~-Q&BOWlLSx8#2REKB2;YEVLM# zqCg>u8JiBD&4u^QO&$!7JRdk2KFJ9Z6FVc;@&4=Ejcd8IH>bfu=?@)-o^Dy3ZtcuU zNxebX{bG7Q6dkvb7$`?cQL=C~`53Fef!luMvCIZmid)0D$0}QQZ9OsV0GhhbUrfhM z*SeEad>IwqTaRT!B$?bOjfVbB%w+ax6Q_0?=^O=<1J2vQQO8xDYQq;}Tn+YvVukW~ z%(y|QZ`IhI>l8Q3hm(o*Q7@?J$zr~odQ`9R$Bsr?|2;N-nH_(w%VkIlL0rU0Zq&*O z>-HY?f|2Zz4?F&Vop_AWCP&n+!R2~reUMW$K9?QZv_@XbcQSj| zvG+wGuCCakvkHabor6%mc$QAVuyNsS?p90McvE+HPBHbIw$Tc%*PxNT`0JzyIWrVj z!#c(8@{+0(jBsBmDWyv%h`y?HJ0?aVO?+!QR7`EPzK7X^vU+AOhKkiZ$O%`Oupqhp z`G9BBR_!qhF_E~umDpGRwBJ?wo!myl39QWf7?JtIxiw$PMKGV{$mPLUXKDJ*2wm~~ zou@6>Ulz!giAR5KWp5O|oV{~HJfX`O&gJ}Dul7<-)(a*jK9;MvQt=BD!jT~g{@LUh zO%ojfqY5D3B85bl96LwhY(_(&(i$q&BYk#xN#fRYhwE9Kj*MM zxOgN8JXBx7v>Fhs($mue(K$tZ5NZIc;G6deaxWQlnl!aQb&^)}?{6dH;%eMZw*+oC zWsosi{diLiuyvT#rw)QQWbqPZy#5fB3shcwvQsow@Eii_EjhRV>_e6RMsjh5Q zpK-%9$5R%0GlRMr>N$A)G+S>Uqfz|U*@xP0YE-YMy5)N*pXw>-IVA2Y734J^7v{yr z7NBl!9nN)OvcUK08x9nAQRGc&o6lRojEiXZSI8;y5Wl+8BngR*(>`ml+;tDt6#GD4 zxNvyjgy<%y(l*)O$%z`{D3%V#3|CA(Z`u2rLnNS=(6^aD#pQi2184TU&ab`VnUNa* zEIxXPCX?S^drs+rAbmb?X*Q|(PzcnR+O6fHUcuXYYt!|3;AX4H`AO2{${+VzwTjQ% zQ{^~?jc5^+xu;X{b(|1Wai`123-uQp8lUA&LBxm-+xg!efQ@l{} zbzNuUTye5^Zdw8F0h)_^MD*^D_&0UpED95i!qEhYvBKdKJ+`2C(rf=UqvV=;u}&&b2e zkLP-NxOJF)~@?JVWu zBgQxrb=?16T=H{ZP>)CL%~~lk(db*L_oW>-MXpdl^b{|%xXpMPO6sy^q@aNnGtN=g zgYT0{UB3H|I5fK-)YhTZUyE-A4y~@5;V&F`rjQp@MOniaVb7%Ym6e6to_wN*9M;pGmZ;P&RrwiRI8=!6wr=_8#rAa!Q3wsf?MNQC1#Z++i~QP-V- z+mMIxG*b!kBtb^zJj$+hm;h$mBKBQ;ZO)qN*wk|Y z$&Tcm#p?X^Ueo^X=lf0j-I2)*2!*RtupuaM)caWVKFwT@o8Ka-u6LK~snf@cBdB+a znVa|=o5t_`K92|^YD-f*?(%%T&%14lwzJ3()oui-Hu5hl68Ga)S@j(n4qYW`PnWOf zt)mmT+#NnAcHP}^ag)raolQu5Uoi59ip2R^*&DkEaYE}(@NnuhBlP4w|e+S=WPI+BupzPeN;A-W%{h8E{5b`_+n2kz%{prA5MEV_BxzUQ0>2$U);Y0WR?R5~U{lK5blUE%y^ITK-UUH>7J&^=Vi*9UNC+1 zP8J#IB=#oY|K9n&mu*55>(DV)8Z=kM_Q>s+$!$(SXvH;Byo>#An%nm4l7yVQ>%R-&CpF{?(j)l!Wo%NXN44kC$Urn1wM+1mg zI*IeDxSK<1f656FASPGfc}cm}abuw~0G(*Z5|O^7-Da2|<|8Lahqr?%(!TEv#X9!Y zh)Jt4dgHK*ArI^KP0^N_=rVpp#}75%e?5f`*Ktm9l56F-vpL98^&_ZAQIm4252HKJ z`Huz#?pl*)&IpjME!jyhR_$v4pdyivss5}aaCh2Ymf2ruMr?MT6Pf%vXxj=Dw|l)z z8es<)XXXSg4i1RC*6}(WQ#lv7U1{nGvuN82f+LMWS%=*=x+!TjlA#9X`7TG@l&P}< z%L8Sv%745_`bI}DG4M)GMMGiGNR^cElx@FB!+ys_g5wi3v#V>E8T5hi_qDR!)1!JM z5cw>_tY-qorl!m?adk6vik}a&v%@G>%!KLYoFEY8w4d)9YV%wLf5*)X8iY!>e4sG5So*bFv(u%fEQI~G6fvTJ|?E#z(b zA(K1U(VHWzAnoi_@eD_GN+WSPV_cuCTh(9%mSpKzS$-G7RdP;6VC?Q`FDpM_RW%8N zfuLHM<%^0Als6cM+5X+JYC86x5+h+X!E_5HPD?6Ctu&Nb@LucYo0Gd(+W96xXs6aM z4`v`vmPO9^U2Et~GG{#Uf?%*E&V|!=Lphtc5W9o~_1%v?=6}en?Z-o7@{(02o~12wY7ci|L{TY{fg>c zL}cV`Kt;=5=vM*ntIeDtG`O*eN!OB%?^~cYXg743J~jvScvt}`Zv7%s&{%dS?;&oldm_c0dO60&Qi4b_cp903LmfBKA= zzUP;gX_g&8+su@ou41_abz|~Q@)0%o#;*l^Dc4lY!eDRQVwX{mn8mhNxoC$7rS}a? zut2NUuEyUwIikEzJ*T3BXD!BF3qXU($|~KELSjfETTKRNwvRmW=%ouViWm3Lj^AOF zz9m18UCYdpR`6hFJBQ8jGjHO80@5V$`|sY+ypez!u*mR}k`k`_l=RkW|GWF3ciR3z zXHTS;x4$|U3ewF|k+q#kU^7qu$HLhNJivy^%SS*VhdF%|`%x5_0ocyg^4{BAB zbpp~rTalDR@Ow2z6_`3q`I}P{*1-|1DB?|~^H)MmeoA3eyq`(thxXs$$p}g*{N}VT z*0z_8g^I`^epwTm3Vus6c}Id+TCbAclAH9}_`Qzbi6w0ZHb%TTjd!yYPuJ`ZR#ErM zFE@~v_oy^o!Ta^h*(Av`vQ=z!>oub8vGR7NL(wGsrlzg7`F)^?PwqLv5v{pwVy3ydqpJ%X49nU!TW$1HaTl5ep#pcqet z0_M{cOOUPFk~OX(ouI=}da?g_p8&;{)u}%D{EYxTnVR~YdMIq)!X(aZxh$`5oiD*? z{uN_tl5^k_s|F|oTaJZ{GHhLK39;9s7}_PhvX`4@H6HU;sjx$}tX1`Ig&H@H6yf^J z^fbe1ALN6Wd;*j89A1%dJM|kn9zzyo!-#{0>yH{Rt-c+ts*mYkFq{8tO$HPLDwaFL za9+RuMW>;n!ZBu^gQs5_>>VyQNsS8P`_Djp{}mAA#qPK^l`aMC82$z`TSsS zn6{CzH3Gi+5WnYX3E@l052f=qus0W91evH=HgionjOT6F7FkM$9y_n(QZ}3qL$%yO z8TgDx#)Bwz-0lWzzy4svet{GEBP!vIaV)4h*<*xHrl*!AD9rjNj|*ZqT}L_BS~c@5 z8Cn>zOlFYm{%4YWUH(5>fDfW~Vq^-4N~*Vrotn9kqQA75_TMELFbq4ndZO9oyd zZmnJwmtn^{le}<0p)9q;ZLhZK+qav_x7?whxkA2<-~#sP*eAKju``U#kH!Kyffl^a z&KMMu(jFemDyziE^?+F`c@u~HmpZ|OC5y{*B#g<6=JpT&y=c$ME-LC_&gCiijcNAqi$Ks?SxPbAkj?n?_F}+FEf3L5 zCNdsBiC8;bn=7*BM?5{h7M}XzOQn{>wX4VO(v6M^R(n^Ht!5yW%wSVnGnVMIzvsPj zSj`~A^*hOrD${mm4hqGLA+bYtZg(|?wJs-1Lt+eZZl0ZA)S|{(;1hGEP{x~)kwylG zY5dSU@;F$o4K7AUBPv86A+x>dQwtWFTh3#CN%}InH*t2$?qRTXE?Zvc5psL1++Lw; z)ltTKLx=t^3M+g3h_kd~>qt6BC1G{&CqcVFo>@G()F4M$>+ml=;b(i(t#3yWC}41z z6&L55GCh~1Q*$0DpFzR;gKvF?TbE{dP+1#X@~`-aE_V4O@ii`UW zB61zj(DN zdzJ2}-xGbiCwQ`%#P|JDayN~o657@kS?TZa$!V|SbW_k>Y;!9l@x)pwF59iBa~W&^ z3`}Ahzf}GWGsXO>(rNdHwpYV<}C5A+yJeL&PI82s| zV^!2|cp}lJ+wk*F)yEW86sETGQ=tN%IXWCiZdXVQF1w;qQi;^cw}`BwA?DZlBO7C1 z;y+8f4YG^2VKA+voEjTuUdv0IXT=CZ)XbFSOuL^E7EH@lq?KlP9o_kO71)7Q$zL9G zH08=C^qj7#&ldV^3cryfc_kVUaM<23HyQntwfaU6h4dk0_FheanX^D8Ab|931yrPMa=+vWtrgm<<1nYVR7+ z!hpP|laX#y;Ly6!y)EA@>ca6Fl}gR`a3qE&ulpW#woS6l6Ug9?x&7E{QO^wuNP~G z>_lw=Q)`!WpclZ=c1f$Ka5z!`4x#&!9}W^SGNr)kegbY|iRPmRbmXF$zrStl?j|?v zEB`>nXMf>boUy-_XpJV&vg_o1Hf4-ldNSOd6)%&zdwxq0+gN#%?RzLdHtQndx<_|x z@^#;_$D?5CLVG)UWo%2TdTs;0MK-IdUFG%cI!26z!x~kTy@+l$X?Gy_RFjiWE1miv z13xzXEE&hh@a}9zS;y3YO6tUq+2$&$J60^x$f`+dUngrJ&(hU0qF z66V{DV_%_I#T+MVk>(`$K@~-Udi{N@F8s2xa&39V|)`;C$nOR$X*=m{2U*75> zh?DCUsY)R|+(HU^Dv9{a5fHsHs;mjft#AEH29ZkO&y7<|A2G*;(O+~w$4tl1Swk5Uv|B_x5`-rTf{PG%qA0Dd^yg+jM>{>+Y4 zt`U{jI{!Qr-OWz5QkDpSjV2P!#|~fYsnO=KaI6_!Tb-b%^0|#xpX|s?tWJMAbw#(9 zmk1EepSc@eSchdJWWs#U82S-Xyih)^G;_1lF>g~Dlh z^>@KV)^XOSi{E~+k5^ky8tE!=%}De;FHo<)rIZV=GITGMYsFmWT^Qu<^nJ9y7N9C` z!irQ^);YF=UCCbUxSFS!*+X4hxw|+M+i^Y?VR{S?JjG1O`9b*M(~qv#DiO?>P9Dx| zAZv4BUgnbHNw%R+CYNOg>Q8E%mla8uCDOAG+h+FA^qS?G5F$LpBgQSL2iB8(+Ndb+ z9uIA<1>nk?vN3NR@9MAZ<(BOC(u_0JrSDcd6MJN|BD}=q%f)+wm=#fMtJXS>p({>h z39OC&Lou`Sr_=cBkDLGGi_-RtVPaTszW1Af^jVfpcKw9(|EL#s+PHxUtfiM(nkQRk z|3+zyh$5gQSvvouqY;!Rnq%x6^y|taH4!G7t*uhl_`$H6%cqWqVc8t}J{+J6i8Q@A z^AXM%+CIG#Kmx9apJKdr3A$R@`0z012~4}>d_NpWRUUXS0f8*?-w_!BpaKKJWyvNQ ze*QZ!WcG>OcS=p32}7%A?cgd+N727fXMq5d-o)S9bY+-fqDe9QuB~xu*-z;{H3@~J zF9lI*n~w4~@Co7VjbU*zB(c>pkNDw;j!h+smv=kXottF1N}Cdn1d!%T2rME8V>-r9 zaf8#4OOzVSFY*|}v0ch}7d!7pL}c=;XQFCLS?&K;wBm#+j%sI{#>=E|Cc0a%T@)K{ z^@b$+1!yJ%$A7-Ubm6g}G&TA9e5YL@CGnJfS8rnIjfCG7<+)MLD~$HMa*k+28P(e3 zm19P>t3<#4C5OY^?5KZPy0PQR|)`jmoEU zYMoJNon?|RY}c14`WCrjZCwgl8?wI<7KCc;CMOK@p=YyI$JM#AvymEqdM3hpA=meO z*`1=6aZHf|-wBsZ=zM>Ooug7M38g>`&SaE}J3p?ewG7V)@6#idFMF%eW4*sYp`jtQ zoMq#@ek=DLMXFE)<@1V%qoKW7tz0|U_HZRYFenR7EthS^D((+^HdBDkae662RD7kY za``N}r{)&JGbgY7K4mHI=tkK2DttF1pH@E>*Ihy>%gY#CqlkV+pXz-goewXQc!)A^ zfuRjqO9!Q)T*g1>D3GlcnS+~F!c$iqYP+X2FU|DN>2QEL!CJe%wTr1P z>!LSAD!Btfn9)n+zEY|g*}fO^D-W+IaMvm~S7J^sJ?TJW%#uls>XXRMbeRY%c4A^B zTZ2b#yYyU0^w`yPwV_P{Di0C-S6rW#4{vFbaE?MLbY-V7uMmHr#)(6b!(ewdcgSM0 z$=OoH*toi{8>^LuD@2*)4x2c2E#enDTa`5x)34d-U3aD3T`g(H^Wu+~ApWqe(h&Ke(x)co~}tkv%pjV>?Y@dHL@k z2^P=?$HZW#H5C5*`Hm{wX=jotA$=zpc0alve)Wa0BSq^!esJ*(XvVSIHa0d^;jxX{ z))z$)dkx3^#wo583Grm%q9^oZ^xGJZ3?i}x%+El4hCAB-% z0aax-!l&&UztRqDI-If-xwLugRTU-Lqi)r-R2xsaos9-$h~P$oQ)i#u2%{obbJ`TD=?CW)7gOxX7&c^G{c?9x{xaM8Db&x-K_Bo<jjZmHRd*sCq4v;{6|A3~OwB94MnfM`p!KOiX;7%ICDB3K*)-pO^BL3OGYa zAwU3HsSbKG044?%!HlC*qpYEUAARufK@35-bdJUjuC6Of;wNWrH{hESyDUER{|p57 z%{R0M#t<-c>bHyQ(FnO;hkO8AhA=YjH{WeHV`Ak@nQWgo1BHImdE*Zm)5+sZaINPXJL}cG%a*UEp=Hq0zY18K#e^Y&I5vau`ZfP&vW0PT}KqWKg7> z+;Z?(8F*1=rjSeVMs^|T(69GS7242?67*7DQW#hYCW@4x!IAfNcTYCQXy!_L)w_LT(_(Z$)$yejS zS-w8wk@Ko4K`j+5{_G2&#zJD?8;f+^+&8f6ISJ z>{rSXXkN(8{Ed#rd)JXQC{ic+?+AyjpP%2Ic-eZ2dp~myU&^W5EpV~@Qn$CZeuyRp z0C*6xwVOOu#QH#$LKe6SS(TMz%ZD#!(4>kVU=aaE>>hl&M?Qe~kq(JOENd8WDYVhz zpB+iEI4+GL+@4nsum7eA6-(%2MvPH!tpJ7h6A|~@yf1q6!uvCjV3SF+WZU`=UW;X> zy3ZQ1jC+U7tx`J$cnM;qc8^~g_e>mz&chYfP6--~MuNYk9B&3*xT2M|1&8SYl7b|% z+qYp9C206j&F-Ah^@)DrNU-h9bzM$khsZy$1q4ulEFLdIo=@{-q>*|m+bu`l+n`3r9pwB_Fk$R%@Zii^)w$a4@r9~iz?9`aGLme%tZ331#yO1d07o!_ zLP-p2u#Lty@Smtd2iW+#P_0~ea>fFPWEq=pjx6j=-5Q%38Z! zMu6HW7ze4r9jEUXF(6Kg)LWqCeS3O+zK^q~$4CVxh797{jl_kU?B*E*U9yjIKv-{; zwtQ&MNg((akbnGUL6{iiEBjtn3s?fK8(((~4S9NyEAJSz?OMl&@jdyTe;-IO3-f5i z((D6J=?{|7GZD99UrvCkPHb!Ks6KcnFK);xyIRUguIKy^Y9X1)NZ9nd|4YIy?9=kxA<;{!{8=eXy& zx}Hc@y^2K{IEJ>BSpdB+d^PamCM`sLcVcBtla>^MS<=Te(Z4Wndp-ST zxw~TtS>*22c~>HTn~d(cRka2H4#*G~~ zb3~f)cT4vA5v&5y+n#CQP;aX+LwzjAq7f{qsEB)a`9Tb4^~~JRPiUP+k52s}_QSih{ZYm9HxG5={}2K4P+=DkkU*k80m3@X z&rcmuW*O-Z4(-?bcR@}Rm@rRkYwOIth9GFVudlDQ=e7pmdwSsTpn!O}muvUD4h!r7oQoxw|fKh9Du!jk@uGz_Bqbi z_X(tIW*0D7L4;@zpfQl$GeSdr)}0mOXHsl$o^SVwX57Eima(vZ7!4#oqNmtFSs-X^ zz#I}B{Nk1faDtK)Sj?6mN#%k7>qd&YnC}b7Rh77ZU^#USjmL;bPTa}F-ZzK9lUZI~ zRw>c;>|>jF5{dxWX&4dT6##LPc%1vqDz6l*>?0Er+yIlevbt(H2R2isw>;vvB;iW# z*RZP1s;za+e~26_njzGkg%RnYzn49G`b0+K#2I55GJmQ80dMKc1P#ne{EYCfP}&IxC|xtu_N) z>>ug&#UEWYc1FgMpFee#OY#c}%uP%_U|UxG5dAj^_=W?dkbdKthPS8@hl zGx^hZG(QAVFceaII<9d)VDF(4ovYzkMVjbAWh>>ny1Kx?Kv2FF%LoP2^Z^ScW&jY< z0nz}|DiTPu0kw2Y=k;&WQmn&Wf&hw>~ z%j@RCIMoyTLR3)j4<9IIdfdlRT~+1QIDbE_QAAR&fa>}pKsdGFzTX&c(9RET49FaN z0PtGb0;o+{MdiEoyO$5_7i~BJF+Z_wh)V&`V1EAjg*`Si-`DE$@|dmf1q;Ls{!<4T ztk2c|0$BMUCfXEB1#n_EHt!i3WoV)Up^&AIF@_VxS|IHJ zd+_2vtAS=xhJ?S81A__R?k?U!K!F%8>RSn3^om{WucRMi_Vsh&T4?`jtOpOA&mI(u zo9w&`MwXo=2vH_6T zKYVA~Kf=f=nV?5tfnZmgB%L#{9nv58pND#2^9C%h($kuooAdLxwgK0C4_mZos(X3e zf!ed?mFnIAH3c4sijLzWDq9<06GWuvs9B|0f4+RG6Rt^{Iy_e zP|#HXG z62a@8sBq7`^aP3-VEuKjrzfJsvzqaGb>G*_?McKkoSvWS)YvQ`ZkrLE#~um_k?>WZ z7xcawNj1VB*sWejNl77!D@Y8KZE`wT{C$sxOXt}8AM1zxi+;JkR~*VeZy!Y3J^FaV z*G}CKN3qSI$7%wP1<6m)OstHw0h4cU|QF2SEeSTu>2YXPS0^!~anN~j>|{X{ya92m=z5^KQhUtV5*l_{lzmlYzySf;kx@_zXTP~Vx|;(~(u{ibU$NwuORb}Ji~9|48pO{>67>r(6Y zaH0P>=gR?bx`T|sFfm}}1G;NKM}T4j-ii0lili*LS|+CHA9VoI0$>_(dDuzp-Z)>| zhjrLS^gqo(r*{7?1KRb%I$73Z6Z_U;wcR#3nlJl6f4EyFM!0~Y178RF(<=3WdGHQn zC83`f896S{x!qd)?E@x1hFJ6dG|X^Sx|9L1qqz>%a#fzn(xxgcv0d?|fuF4f;1&A>Qrn-|c~&DW~p(^aFHw7)&TE1OHo?O1Jid z5U8CYNny{Oj1do*xuU{C$s%Rz*_zVaT!3WXc6#4*T2OL1Y{=^ArFSmp6%@E%?^fSq zxuXnr{Hie_ql*9YrGK>rfKml|oe$vn1INk*j-?qe%n)JHa5{@CE-tQkn47yh4yYJ_ z-q>RDA1%Ot&i;4xfcr88IIZuQ<1N6s&p@i0JLrFFC`mlWG=24upp)L|#n8c1F3JC# z4Sv1?d3cPL_q~8-#^wq1s{}4Z)ISc{zmM@q0|$g-5EG{YSbI{AE((k;kbrCt{Xa5=G%6{HE*6f%^Xh+Y2xC{w{7BZlYFDY=SGJ?~ z9P=$5Jb%pWeeRW&G{!oX$*>>M4o1X%=NM*;gD z*^Nyb0X&X&_5ap70e1tm|LV-aV1HWQDC8IPnk?VK4A)Y zN0G!7VY`QRo|4}N=pU7>tULFu1u&JJwIhQBZl0W-7C z@SpZV7fc%jzkT_wAM$_fSj*FZ|M$!P)osu|kXjEq_Ol+*N!A-kk$93TCctZNzvfS9agk1*8U5j`b zP`755i9IpL$E>N*P~aflbo*ynpbt~$t$*IF?$1-tzYUMSh=BgOAMAb7o!gA)8!*p0 zil{l3sjBd*4*$P4#4#TaDr)BJr04 zM#>$mT(_2+0`p1&&Zf^GQRVobUGQkAXXLgZ$c+x+O&<;14@M!?dP&YJ#1jMA-WyT-9X&(_W> zKlzkQ7U}VPM>l!X=}P~x1>(?tUY*330WeXpac2X^&)=<$a2Ca@l|MmwvZ<|^_20>Z z#cXiG4iU)BvjTR9uTvLF#Ey4nTwr^p-$O4#=LRSF7i>n%CXS1t>nbr)N`)Dx zmnu*Hpnuta6TrI2Gh#N5A7U;eRPgl+z+x)#iV8*)oVJ7FhE3VFip-at`-le>m5}4R zgH1a)CbU3fR1 zS8&ioQO5V-1YpaAdtdJS>G+cb(VLl5c)Fk}!z-Yg$kX8UgLFt$6uNR!jRW6j&?~?1 zyZUqluQ2i~N?gSJsfP?84y9o$1}+19{WU4)BvHnb+#%(wUW~@4ob#f{KoGV#j2|8;{ME zsT0+%H_j@_yg8Y!>5_v@)D65$(~?{j^q?-tUVQ+6GKcM7N z`S7KDk2Qs{&8~-RwA6($5{y4DP4#l2E?=Kt`aF_$9?FkWNZ@(aHX~uT+|28MgFbT> zVGo-pcJ^Eu;cN4Efp_?Db$L1er50L|$^4rDQ{tb66DmW=-p4fTS1QfY{#I~JN=7M_ za>x$17--)bdK(F_k`v!*rhxTvwj`RjvRVsO==;qFM;AAJ=6SBGV6l{mU+>~i(f4RsD58)y0K%hJ<5lv7P-~N&Ggk3ZKpk1&YK!_ zVI@Gy{cEAY#m_xC(^!=~^*9OV3^@eK{O%!Xp+q-)WXs94*(L$S56bd~Ca-WH-nt?ILG_W z_No1|^O47jT6Y@`ZhK}VXJz$5=YOw-JB^Jy`f}5rv#uAiZAHs7rPgmNKA}uy%=&is zg#m(@?0y?&icw!zUv*e&Ul+YNGBsszlRm1)CJoQ}bGLULR_WD$)U|RPtti`nlxTi! zMsp0`QQgpcdc84Qf^6Hz54SGfRVumFa@kvDwDmr3p4%swYavcaN+BXf#&(K zhca}xtrgG&KMqExB!BOHY`bJeUM_*yz-nYm@Nj;!{byg{(ox|ic1A|LMC?h7aPb0d z+q&=DcpvXf%$$sP=RHefFAlpea!j zpkAAA?28@{0y5K02QxZ?VMUArTXpVP*ZPr}jz&j&9T#T4iK#r*5b@0ePp9>(fAT%vuF&3BziFonk<1sYEK`X`&ck~BnY+_#yw>eO%S>g$Q!$CVX? zQ^<^SI`lI>-lofp^>kh?IGtbfQ1) zX8P&YGFg?o-`49|Eb2WI)6hj1=Ip=<@yk{tDR^ox9!8%e#SJk!P#aIUvs>r-A{Yzq ztOr&%bJjBK8FBvJNuO`sEpL>^B4<-rO(YL(j`d!^k_asdSIr<*b7EPjYuy(0Q9OlY z#wbIF4T4ue67ABd4^T{`sa<;UW@Nb!aDX3MUz_{J4F1w*q+Sc zV*W|~6s?>g&#<;o&!BW(*@mcRIN$vgFP4DQX<4~dSx-|*oD8IZVinFFtMYRqhtKfF zZe?PXjYTRyh(g=qv~N2N*30ac34OdhOdZ`6@4Kb@sg2p88V%9Vj8tA6+-f&E0U~~| zc|RmkpMuxos1D4A(J_bCiz{<|=diYC91a%W%eUsQgqb+2E8+q-^OGzXZ#?&lzuGk~ zf8}>Om!i~FJ?v@kx)>fl$)zaTnomTn7JB67O&?tYqc0kX0dynb=1F= zn11G9W$OglLfKZ&S#51BvS>02x=7D!uT{M6?=n*iyxc8HmWi!&*d6xxq<<|!eo5>+ zi+lFAg0Jewl1LRlcl$dd_UWa@hVrXyObu0a+^{E#1vT;7@Km0@kEL*)0&AoRrw!k- z0>rrEJTj(q*1Ej@Qhl*I-xbfk^pDc^GJqeMJZQ2f8-Cz|nWU{1A7-jtAXie&>iuEl zL={3vM5w0W<-Do^C6)13>yz|N8q#bWwxIUn%M|IzhUf>h%~YQHJfwdlU&0~u2+B+l`8yiCBW{Y+|B{U#l_sKv9cSCbfhy>?ssl*kM{^YVM_PVMLDIzjroI z6C7MpY%FQKtei1lA#}L?RhAATkR0={FIwd?aX%qickqQHcB95LwZRpF}#EqG6TJMS(D?1R^9qR=GLpq5)zQ zzeg%}eaBpNn?FSq3JNwSg+eMVhLZ3S!{P(Z&1rcat16agd413R&Wcgm&w$qP$z4`B zM`dD7KiLp%TS{Z-=HVP{Q<#p3gG0N4npKFE4;QJDm{@71IVU=Kf{18Xu=t=8dmJZ% zM2X?gr+2R<7@C6dwrMp|>&)lRSEi`Mbac))sS4CIND;jCZi;zppQ7w2o&0y=v73PfgeK!j@itZ zp?NG~T!+s&dh*k`^N{H+rXqB2C!knZI9OJc3^$mOzL?CRU@VuzB8P`;xWLQYv@u3q zBbj+rIYYbp#_a>DZ*N{S+wSUc;$Hb`QQ~#VO3c~Jd8FJQ@ z1F2k};{pyB#+4VM_kvNi`jiWmGsx9H*yBx{>KqHGxzt8DxM|k+-_*@-{+_Bl-BbEy z75Ww8VcqNVK4WFYOIVrspa|dVNiOGv)Y-0B|eH^LaXtLvDtY~AFSE5<9{c&lU;m+a0GB58qJH)z@=_^5`s&0!0T zJg)mpfg?5I(Rh_lNMg-6znI~sn^}D{{l;A%HMAmeHPTpuR)J(wlXvV36MJ*MIW6t? z(y>Yh$#T3?w@=5zr07p~Sz7w3TLtfCe+pG3xpSq1C5Ol){dd6!u&ZQr75 zEgSOMi0nIYG`Ot(Ax0QllB5;uK=ye1^^xlU_ z(}{>^!;t7*)GI+@lrW1!C764@9C3*Fy#P{+>T`Y#6J4WLh_pzaiYbKyai@t4F9Wlp~1d&;ULOp z#^^n!lG45Db8^H^Z_`Zk&3C1=fG>n;CBM5!e>&)-n((q%VAs<^Sdc5-lAovm!QWx}9;zW{e`QjY^+%zlo{XcjqZgCE&Ic`; z1Ocj$w03U20@xNpzP^C)vax_Ix5<9#89t~H$~quXPbi)AlT;GDog?5;_D&hMNWeu; z9SZhD$V%X$)~p$Hf{oow=WUt?GE2C_)gEG+9~HzGoi>#1707SPw_(A8dd4saUQ==r z72#t#J-nXYdtNR^eOrS&i=-v`1Ho-$MwW9YdtG(@bT5tE;qjE*f(J#_?!;*dV{SVl z=X=Q4$cmG0YZ9hpQ=>Eip)yT!q{#I01Yg97A{&jtz-?)PJ!siv{O-QAks&=0qNlWayjh?E=t?Onlzw@CaxjOy7hLgh{=MtWim$(&1?Y{swTc4==tb2fj3(Kzur&n)t}leB=6!QW>m2myNvDaV z=6_OP5fcCLkG1U`#im;2%>G-~K9nW7@WI4)mIZshhgEheA!r96AgAYiS>a*c;;E=? zSU_ZYTFF~gPiG4`gKV$eiRIRo!u$wXxM_QL^lBMOy}-BZ;l`s7KP34jDW?3eu)NH{ z$jcngr42ptz3aWpk+;nRwMp{N6jtN6p*F35!SpTbg*~QYw|zU6rRvMS3000x@^xf3 zVkM+;g%L!g>81j7rzIPJ01mJ_yEtxoK?YY+tJ}N3s%0NG(P;6hf6|RTom)O#>bUcn zuY{MyGtiNrPU-CL%nck3(5WRxtgGDt)WZw9mud5%hq z%}h%ci73z`V-BREZ*B_LF&o#dWq-ozC$f6#ttq7xP9<-rs2)ni-4bscpc3)Dp$509+{Mx5nz29G2_B9Q z|3VK>67hjhn_lgVj^0@4nQPbidpIf=!aU}@b^jxLY^81}J6Vt;)9B3U2r0K=T67x7 z^m--rXeT&oy0Z~7?hNy9qDg}oA!6*{;*wOoE(gM&aW%s2Vh&kmy407TxrR^3-rcO7 z?kq0eGK0cfW$-msCXV4@_RW82$bC;+v9_doS#JB@PKB-~zpN`Nkqu?$G_fz;ohLWk zxcU^+;k9APneLNHB1Mq`aJa@bFqpGNZbpZ`_6L)5kyqepksc1_Gv+3>qBuN~z@``5Bex(7X4|NgcO%JN*yR$V?{xsI-;_5Vf6BQ?b}9wvwpOYadSFdi#|_h*=qtR*fI|V6sB$;!M+EVqdau?BNV}uB zQGy+*_BK7x|A-T4?Ytk0>XTzteGelGKm%gk&R$jRd+yF2N28Rn@5F=paZVx}e&lT@#Pv-Bp%M$uCHHg+z7 zg|E%XHOU3PrZQj@vd%560B1xaA46=$fWA( z?&a0`GW!)2J<^5|{C2$V38uDVtlywtf+*5BIoe41mKxKmTRD(?8fuE}`sD$co)WhQ zMAdR`eDhQR{j=}RSbfG*5S(Je_ILgHB;Q-|`5Ug^V|%J}NVmUY&L_Uml>lVm`Ysq} zd{_3Gd_w`lv$HlAOf|3UYZzp@tD5EmxzZNJUl$vrgmwiPvP%s*7puS5@$Ortfi=HM z5^x|8dQJD0hE}UJ{e8E*NZw)2CGQDzzZRVDrgW zgFeQUF^h(E7G6r9UK%gf#}4>_^i7YWp~6xR&&=`Z-NwG&Y#PXZotJ*0ECKNe zW8vDDrG01e24T_xxbr3hzIF4!TS)zUajmau4^4}8(AwhEtIi}$XcVPj6X5b&B3HtU zZ~9p@&TbfmN=C7?0*0;bp-+Ge#skt$y*422H6g<^OtT^mtC}2#aT5cbaG!;L{aFC4 z9K-Ch19;vN(r3DHrnf=Xo&yA$JN&fkCSy?Fkf<3olHs;DE(JIFXRsO;HT1WC+H+NU z3UfV~tT6NJ_5GC^anz5y;r4i2cdu%GCqHauZhNUHjZ(O&pt7@tpsfHVNim~}>Xkz3 z@jl>-#CEfhy^&C%3+F59R!C*L9E3h8^NZOTT%ax{cyS2ds|f-sdQ0-1Tb;CwYvR{Q z?UQ`={EnbmjBGbCd`8~lFi%bpWrUeJN5TwGZ+CC@V5z@1MBMwzn@7GUq@5TBV(A^$ z`HjaOCD>oDc@Ba5I^9=SEl~!b@(Tp09@7(UFGe?OTNy6~-Pa6=|P0#qXwK zG*dtz{Cz8bV$G*3I z7v=hrib9+R#rUSe)QytnMP}bbf}$kmV2DX-^S`+Ou7QZmi7{+Xg^kt*=SH^c>10Qs zp98t~NsVQ=GCQhV;z($n5aj!gCrgj#Ag!rF$CmfkOozqwrc8L4^3w6B^=?Mo?wt@N zM3sf2)eDMP#p0u)w8Xp98|xd4_Y2H4{>r$B1Q=&onnO7@()bh(%=?(o#O+}&41;!; zU3>KSYL&zW^yMZfDPVp1{CkC8{rs*nQ0RkUHV{eaO|Mu#Iq?jf6Vst3l8f-)O>6U8?7I zedc8Mx2(49t-C-5U|5`RsRW=aVZNLoVT`!*Zy{pcjJU9Edoj3eL7R z+qNYg3h9USVk((7LXSXImIe`{ek^DL)H{9XwZNcYsuAJ9$=jjDFLKTkmWfq)$2&c5 zoBN#r>E~>*mj??8)*q^^bvVT^lKLiL-KV#_vfSmB^S~@_)1>@XJtjwcXy0jYYgM?mM)*UR#~NsZ&5|s z;dq+9eq~|H3Nl(6%Hlo43zVimRTrH;(w-MCAb$)on~UVF>t^w^L)4j@%-~$=H#Nm1 zc#fRSxz;z<*O%_9+JcTsvo8yYP}mK3V$uxlV~($6cVe}lHkX1_m4 zQOl*>?+$6#IPhfF&z{ubjuCAj*thW}C|}(UyhuY}GVGan0CFH7hF(MgGg_8M%#Vu3 zo@R)3y?c9kiY>6%t|X+pnXC|Jc(6bK{GBm@Qd+(W3%;_*^gftpiVHd#r|O7-nJ~LC zQIfJIKvH-m?O<2fpJhY})$1qDV^Y1kQpgfBgcvk%_9fqwVJXq2wyjs*E4#b#phgSQ zzs0Dv$j#;X;zdoJbC0jS_vSY({OLEcCew~SsCsE;ek!5lvmKiLS&LP@>DF41#;f4~ z8}rR3hok(ZYVU5qPcqn(Z0*r%kI?_DmTkH7^JP)3`piDPDT&<>&iSafCqenSZhB65 zYGejZ^XQq)2!ze-aNd(#4?_Ui&SMCGr=oDo*5Kg!0#*KwuNdvnRd@#3`p|*dg^9Oq z#M{LqjuRX`G@HXXh+@2Kb1IE|)ZPp+6bG;g1bEVi$Tn8HYA>|PgX%rA~5$0UdMj1+RJ`7yLNy;Gx*ZkiaTpKT~7wI*XH)d13c_nDc{`Fe) z`>S?pS7U}TN6Z?ma=`qyBx{P_TMKFs5Om2a+tGc79K?Xf97_tSq)n>~T7};}f|b#;g`!PNZXz|- z{iD(u0U2M{8Y?ExRHJ&_lce03q>KdO&?hy%x?-FOy|ws`@1cJ6SVEeTf72>5MRWgE z<)(qyeV9-zy%xCXg|sU^{=6`nXmI^ZbJTIj?B-2iq+Y)gfqez=(FAr`lHt#qGG9o~ zVx-K2m?aBlR$HqQoe~vDohCL;M|>CpR+QJL&C17uwpKNd8ov2{Ub`A&FohS%F%azd z!l}=Xw{PRYEGh2b5LS&=;+p}OflKr$VTSsBBO5oR*QfD5rCgCSOVcm#zeV4D{Onkd z_+v!`qG@SNgQaPp!a5?a`NkLWcPL&Kq^@M?};Tsxy_ieZH zf}>2Bdb9)KoCAFtQF`Lzjn!A+PNFGQ#wW)$(7~RsVTLW3JqPAe(gQ_Sb7$^Wp;y`7 zE8>x?12ZpmSE0_}#*JgiK1%=MlEd_|O%I>y{YeO7$=uq0p<*!qlif94jhG}V3J*(G zr{S1A^wDX1?Ts{1H z^cJaP?lKBLIy@Pv{!~~}F!`B0ydW54^`qU;-KNaT`0Qug6vt$;HIpt$nHl!7MBJqU zkMw{Y*DP;?qrc!@u#>Et!_gi!g}JZ)(h0O16^8_TiS^SmUKtD&gp#&77mUk=0hB&G zrMFg6cOMqh_=0JmHH@kIYH5Jp0e6=ArtE%Rf>U@kj4%{b~}Bj1WT(^Q1U(|HQBVb7)^ zGC}_MYCzlrw#pGzJ~yPhJ;b-BV}{%|{ZA))%=NzYew%L}%+|wZ-T>(r_7`DHc)BRd zoOozS%*Z)s6weHJOL8}LR_d!3EoOdq#1B{IBe^Oer5LANIR!z`k09PgK#hm?nlJ>6 zDz7(~4XhD|zv3Ymi-#Z0g=&iK9P>pZ1&X=3O`%(-XYQz+N&j`TkbvKW@+r}`vd1@E zf5rA>>-esl2YEm_{WfmDR+F9X@R4P+GBURkShIxoIJgLP%573YK< zt(6xazWI*R-#oIxE}fwbPJAL@Ls4w%g7nb}a`1w-ugM3l+bx~F()ub6FEq^rxs$~m4_Q7eEoUJsC z54^Ohl8qHWFqH&bE|a-@j$GLpid%2xG3S1Gt5g^Uk>E73Q6~@ms9kWPA;>>GA~3KxdVc;EFr znKwsot;lJ^r4#f&`Sm& z=pWoI2);_O6U;^fQ(==Uh=nus)r)QW+X9+ZgThP~_S^$67i;b#L!5bgc+#I_QZp6J zxi%FZkKb;K`^VFi;vk9138bEJrBU*b%EFJ@F8z6da($F>oY|qy>a=F;=*@dRr)MsB zo!sd$e3NFiW2~xCfs+S|7Vxg`zt<1e%kuK}#zGi0upW+ZShMxa)HmM&qoul}z=O@x z#pGqDlluxdKeN1v;)C^KJZ!Aad+#uUG{f%ww7Md;OYCF+x)5oZ1ElbzPXE-9E3i8p z@cWH*|54|@N1Ku!9W==~bu1cPyZ61q{AgxJrFwb8#QyzZn<|43Y{>SJlU-h!gjiZs zKCmnw4>EU^eq`+l4dTqX-ktwxODE{jj0rmY2=4MAQaGIMh2YvmVW@^Yb)l{!pRv2Q zsZ6{wyZvWX72&4KvHGZ&weDdD3uiZ5X1K}LDw+egX=Rz+6)IAOwNBP@&(gy9jy;3y z`0d{QSUgkT9Ms&P2=Rl&?0<0Be}~-ffdobIsuW@ZcqFYaZB9v~fj0Wxuo3;U*9B*7Ay)$JTek>!wIYMkYLF ztgYC9dFw<+bif(?8zxk259Ev2?sKo+yfhQfEbDXm64Yb8paN85?`!ON)Ia2&0(j*B z(^D|+iTv^Viikb0gYOiZRvQkb!+M1h72dV|`2EHK2=?3=yYw+y6z@gxD%$Xv>Ey@Y z;dsxFpA+VIQrkJKVzE0hpJk#PFwymJ^?@Tzn_OnL%Hc#P^Wg0aUfHpm{e(zeofuH+5Q9Bo1O}irGb;iFmr&x^v5Dys`FXjLM-HO6Ym7>1vc*Efq%~fil-|@ zxfpgDE@@`n=+6fX`3<$lFo;8_Mw753y@fnAh4_J#(i(|GH|M#lx>*F#M4}Bc zRbhefW46lvuqAHC0a*xH8#6f~miw(Q1AYMzC_{m&-=Qs6N~QHCJhX)AftCU|YUf** z=P-~L(}g7&)KWrd$9(3Kf}NaVg4uYhoRJbU>!{+)(QLW=ry%s*@R3TidRn- zI_5O(XL6SdTeQu<4AGxk0G)U0wrc4;$V*e05JnNL1b89}p}G&(NW2I;w6fA>{oh>I z5{DB@&3jQAmyk`SxfP_aB)V}91t%A)!coe8kVjwo)XC3LztvW&qnMs<4UL{|7ArVUjEBd*(Y_nj zW9B=-wJE|ek;%>`b6Bi#&P|BuDK&n0kR9KmV$?kE+N)KYs20ypUoftYTG}|t)C2knxVLy@L(P&l=(p`F=4sY zGCaTI=K$*Wo}t;&*<5}Y;N;on_D5CmN!5S*%sCQo_c5V3v?t7>m$%1^VVdiSmMv8X z;K`h4Bzf({^o0cC*HE53(}Pg}GpO0JqHmrxZ0`?1)v3!kf#QSYCx`OIx=!I?VC@Pm zUbWI|ZGvjIvv=Nc&-Q3`0Z>*JskC+f?*lZN(&lP7YunFC2S@mZ0W)r_ny>M8XXGcy zxV+kx96El=RrR%T4RV)CFUq{wG(AP69W9(928v)x_iW7Qn}u3!?*~ zCwh~zj-5+f@~y{je2}z^#3gH1Q?4{az)?KAG-)&J=WH0AWAkTzH{PUW*P zRS60PwH)VB^^4TER^j0z>1Cgx3W({jf{#z-y;X z%pbkd;e5>T44}sz^v#y* zOmcm-!;+3|iR75@CEYV14{-RKHYb2x6EbUT^Ey-uCpsqvP;fNDV8FnR?Ucfm=vcEf z{Coy<=E4f$98p+scxH?@6Oc|@yLq`kGqYxIVs{EU>Ut5su(r6-GT{Gb2wG$0wIOK3z z>Phr3Q8<5QiVLtpKYj?ToC3CMVvD;Lx*bP{7XY{AuT&LpVJ4X8=E+&!L0J0L`nkGD zzJ_gF>BSD?wSCu6k4xLu7p^UN*YnklR{ZQ*6veYa29x|a_oX6huJhMoj@N+f!Jrz{ z0bSxH8VbCQNWj|?mMidN$V0x8rJE-3@#$85UunSy&%Sb{?5Ltl9mXR>C`py%;9-@T z)uP@&>?ok9WDUO_+6;uxg>y!#TQxbWa*9`e|NPOldagO9uC+oF^JOf1WBsOwwOmYh zQ9YwR@b;AjhhJU2Z5%L)~rNE6$V@t`*0w<|zkoJa|7 z?yP(K5mL2Y_oWNd_O+Hi*?cA{vC}qT8Kcys_?)w`xc)q8^qG7uZAUiN7-c#+#U;fp z_)@YLEizs<-DJeMbSVSQ#MzOFKK?~nH(JDiRrY4u>;GuyOS6hY`N76F++{Y-s7o;f=p?<7{)jvTRg)IFp68wqbv@%GaY&D+NHbuzae^ z3{Y>;MP&(}?P8Owkm0KN+74q8B9`EZ6q*Vb_e7QGipgbtPoULrX%sm|Bhqx?)$^jM z1L9t8ydX;$FxJEA_=-5QRr!&7Wg}AE)HYV( z@9ewlCZiGys}kP&|9$3Jcr0R)Zlt-E*w+_OXba?+z(n_(1J?6l7i!>K)A?*E>PHBx z{xm$^y~>&E{5wT|)v1NIY*HHiY)V$o*|N?4(g0Xl>kTAj5ZJmlM%g1PL4OxWK*jAN zqaD#>!sON7l(^NTw01{$1yGlKq*9^(%>S7q@ZfsHFH`k6dEw!y9>g|~#GsHfGR>;V z1<1;3$PWjQrC!j)WWzD-a7#j2$>#T%)<2qw5`K-b8wA&Q^!v)}AI)pA;VQPmDf8!M zYR^V?@og26{CY9D^JCE#)rMQbE{UHzi<=GE?^Bi)7^KsQamC{(`{f@4L4!l7u9E9b zvC7ZEJW^{og|zwHHZEgl2iYC>&Rb6nsws2XmJWx1osj4?7c5r0Bl6ql$oJ~ZIL1FR z6(&s_4YPT|x5d@Y4$K)ENo(8QnSy1uwK|NxLDq zvMjp?Hndhf&|U8qsJ{u#Nzmc zc@lyM-gT2qO`oEl?=_u%hIgt z(B&s0Bgrz=^zoKKqG*$vTKzKQngns*#;ZPFh0UrqN6E4_EDV4>!O*CH1Y;VCSG<(! z*kDn$!r^^WPKti{*yni_wq^A3PZGb@aBMY9#qh9^X^_#zzB4>Hgdw{UvqieW82$(D z6JzyS3J~93}%(>;f zh)eElC_u?Q7_Opiz~DC>pg~&3vz>A-i9qNuqKeGg?lB$7HHjX;KRP+>UvD5mV}6Ki?)>&6xI6rBUSQ2$87nrp zrwaKsS&P>A(S4FnU@$!!zErfFxoYeiKrcfD!4v%t#|e0E9j%9)E>n}dK%Bnn!2_FC zIpf@nqSlV{iD<`uQw=pASO1?jb3L7^b~4f_VXHqzmyF*P56*{9Z6IQXyX;GrM)c`D zR>#0XSq%Jy-Lopq7;+EE34-a@b$e&0d>OP@qLBRimn?ss!w%w_j!ecruKD#Ptd1=v zQA9+0(Ky1`&?ejEnr+X=2WG7f{)u@h*SnsSuSNr@W7#K3`PaMO{4r<`I4?W*EA>Mg z78zRc>j8N_3x&nswK7@qt6kta0>wFV0f1;+f!x7h?f4cG`IzTFd=~ijcTW^|*LY6L|Dcq)WBT zh~wCl%$?k}{pT#PLTB+)rmI|0#1gezO-RY#pY)W{oYis1% zwprCWH;?nJRj##=Phf)VS45R!R&rXs7ovY4=jwf89*&iq?OSy_rY@P6;uC3Xx;Odv zcU<^HrvvYtTZX^QkZPr|cOezHbcy1`!qao#Bk}UDY=GL}0&@5FKShxUh(*Z0y`}_> z7{&IS+8wM{*#2coo@cmC|Dq9ODcWaWWxRKYm*5*79vhL7E!5i_-&soA@?pIXdzR+_ zWY9X)!&VvS$gUG)V$UPyzy3aWSCpoN}OXuvUd(dZfsaW*6qSD1$QRVqf{NwFo|1Qahl%aD>Y5mv_cnPZ`6>>{ooX z=;9&0-UVa9{cQ56n$^;iyMU7!E&Ho z05p)Ej(&9MMmU&$+xp}NmEC`KqW|)F2fU$iy5m9fKU>p}DSDXS3%D;D!>7C2%xPxDbEvOOBh+P!P zznkj6n->%S@V{9TFo^89`~sf>lmh%u=N%WE0qif(?ggjEgXY?=cR(LEa3@%+U@!ke zpas5VejVH4OKJBD8G0ckZY5R31=v|kdeG^rN8-O6p zypMQ->iYN2XGMY<&0l?!C7$i21Mo56Ebo5fJGPEsn*x_==7C?FEyTzIRf;drLa+;H z1_19O;yn`oEzADPOZIGA{?WZ-fH1=e0;SU4Sn+rN-&*gmsMmo@>mE};=P0sex!0ULW z{}S;qB5-1SBTpA7|6j(0RKrDM0rwYKP(VC%As+7a$ws!P!_H64ft}gaPO#04%d`p1 z0K-Anhwjqk3P6`T18x`q<6wFJeUNut++C*^I3D0L=;nt@v>;&nWy4!Q4B7yG4~!!s zgT3>jH2?rG;TUKV055{&{deqcPn`f4Bm!>(T|xkEGSO+GF6;)B{GyxQUxvgZs!{+}Z?GSQoZ*b|- zvK|aZ+}?ghaXUCH^M7oDOX@eCtX`VogWl)aX8y0X0RG;4@c$?zBg$8gKT}1vP=_bA%jz>7P+b&xH+Vvheg@2{%s?}grNJ$eu9M2iA&c*-nS z-alFwfr>>dUUhUwLEpS-1}cTzz3cw4t!?ywes-y$?;db9O^RFBxVt)+c9S;%;3-h& zkpiG^p{JZb4gSwpXYr&LSGXnfskM9^%<*;*l-OmI;Dw$7AY>JM{|;4}GOP3;`qhzsD~x3*OMF%d-Vx5fR#)@g58c zdsKIVRkH;+IoDM0|1XT&rAw|n@{07oywo4vW50g~;BjV?H2Z=CZ@FXCqRnJ`9H_poW|yop^)yM7Mr z@5ZRY!P@!XENB5gv(qR7uIN?Ry4>7g&EI?J`)zN=<>9#r1F2Kkk?VaUOzM4kht`#@^jX0z)Vz_^2B<;ls&+<<;7 z#-99rMjR@WPbudkHtoJ|kxlK8yt9q)nd8biY#bw+Fj{Y=Z8iKmXU@`Jbi;wtqkCW_ zCG+B>FBsg$#zwunhtwfJR;%?~RsF_y@74oeQ})CzY~7o8M!+)zv#g7zOwrOm#p**s zLQ24{YAlq1tqh1@tnw9kAE2+DkqmJ)K?eC&p-iUO@t@1(>i$)QS1@(&UvLsjuj4;+ z-P8QfD0sIeer@1ViL7e2JJX0l%BQvkHwlD6#$s0@AMpYy7tcPnbyN2E_lN5wB`c3F z)q(5|lJLk#c?iAflVjntu&@QO#4ax{TmAdtV3G&ivlyEN1NORz z`1j(?VM=KPKds*}*qXtRt(e_LFkwyBQfv#V#s04ty7D}KL^kUWfT+HyE)ACi#|*CS z15&RC2YO&-CFzYEYIJn;0<$M5sFOPgbochEdhX}Xmmpk@-O!ax&=%|u2c7JT`Maq`*As|(Aw8`bk4O~&+dX6$KK)u^aFr& zCmsXf+=~h$6baq{tq$P5FUu15WCH>Mnw&3gz_y|%Ke%=2^zC^%ff1X+=Wg&Ok?ys# z5YO%cF-XSnhGWYG`Pv881znnMv-S3s~sAJHc14UIo-Z8u3p1 z(2k<88YDD3+v@dN5A`Xy{zJR9Lz#B~ww8B(^!OU1+G086XRVE{T#?KxpOymA>t$~j zXGzRDBJU}D6yLUs+|T9X-oLBxEivaW>x!PG`vgz%;jN&SA30-W>VrjvKGHStK=Oa| z%rBoilmeGZ7`VGM=@d*V-@dZ9FMyP$Ln~Yw35S_6GN<#Ad^S4FKVDvpg*UsR0J!zl zeT`Dg)&z131{my5<29`;(gEV$j!z@r3@G}hqS66$;Dg#1=H}+=>fI07vO5x;e9L`r z6Su0Zv^K>&cU0E}s7M4wq*1-Ui0J(N_lez@)u_O|A=5nzprxVrivbeaxdAwZvdyi& zzP`V|AE2}O`ud(@eTTA~xO-p5qJy?RU**wp=22_soPGsZvq&8=yX zU$I8_!j8`Oe&beo!8P;m8| z*3|iE#f<7Fef6&-Xa1)-fk_N=6HsGI18*ZA@O1X|#bi}7IM+Oul_gH1JmTZyEApwk z%AJt`hk}h7Lh1k>ZhI5G1y^T1mt_oANT#)7N_2q}8>C zQ{PDtOT@vI-8gFSyCLW@Ui$8AGM^*iD*h#6)vUe2V!@ZyLD2_`EOxYT_R119K5irF zrSu1aF}#qyW`XNnuFjZC0<-_f%BORQfw_L8iahbHHtSncL;p)c7@f4;s2@{#Z8R&^yX4MsKO!%WJ*Mqr%K+%h%b)cuEq)rLU)x$p30-~uImkY13j%iD z<&^!Db+g3eNy8+fJDvBIN>8yGeF8qF^$a8zbBlwJbl^JRj9ah zff^Y#ezyh5QpeEsuhbVAlaTEVBG{c=fo}$^;s4?J&2q{CT2SKAy<3a!C*M-0T${+w zWW6`Wg;h?&bU6t-F|az%Rc&~rA>5QVJy08tVUWto(#}68%n$xz^5ZDE%sL=~G zfLy(gd9GdpEI8*|B0;e$0B{r-szbPgM*P&)CP3Q+hOvli#oXN7*pN7BujRDMZ9JKk zaTT}C=a@rA5epBE$v+qhfV+SH&Q0I|(U0pEcXyZSBP`MR}+%bLALFk_x{9Pn_FD)>>IecFnef_wkrAC}fB$C4`ZWcZD} zyWK7}Nq@c%-a}#czWm34+Ov!}NVqg>{Og%c36j5eG43aTA!U*cIy*brJA?6>RCBYj z*)GSY1Rcl)_c|Y~WX7CN%@7!gTL2dv;Fqr`$)3Klv02`N5d&gu{~QDXcIEGnH(_T0 zO!MNiT-a%D8k4z*A^;T@?31FZbi3B^+=>Ew;GFoN@lEk_2+4NYEk1g>tAogueLw7NUH)K#A4(SqhD5hJa&1)atdpQ>V44Qy<4p3-?Wbh`FxI;bH~1tr;% zp4_IuXzl1x7g;;b$D|?>TF2KqYuk6bDBY3EtUpz$<-r9*^G>%P`kC2s9wrr^Yt&`h zenG%zB}34Iwb7n5}B`* zMVy(kP^#YFqqwEPF_)Z%{KQ6MR(4G~Lu#W3Lsk0cN2%(nG1FVn!y`dE(=rMECxTDL z@)$Orh#{Mt?TYvf5V!Z_w>PL#zQo7HTMBH;-b7{=qe^NACMj88at^ZE+T^g&dC_MR zm0awK@=F?Cr~5gNylZlwe>)by!wZ7Xkv&!G?$NLecPNxFqQ7?x(5BZ)ar!s&O08@`*=0$h&vB6M;(fTD(c?5 zn6z|iJWZBxA7X^&jxY@|iWfx->&3s9t`eI&Z}W(@EbTG%pH+7)rn+dy4xM5!pRX2f(BT z=m5@G<+DKr_lMY+XfiUg_=JQT8w0?`((&tx|~@?TyJAU%Jgno+4Xa=-C)RDd1!apV>)X~oOZVZ|LMYDTF! z+aW_TcIS+)40C)p)pex8_4LHDvutx_q~Zg$NQm+NK9vLMLd-pCE5if}!)g#=bTjd; za7+vbkLf{s^;e;d8b%iW@uzgx)%1(S(Zu1I5y&FwZOPvUOf8d1kli=0{!%DYQrrIX zWBVSIJlel!4nkSJ{hFRyafgKua&P`KFe;66#GO^wYke3OA}U&fF2c*Hj?H4PZIlVAgI~ntU9+g5{|btFARh z!C$R~TZig+{D;%bl!`AuL6;v-Dn$r-a?d=2S_^R^?7SK{tgV-(rEPG!PdZ>e6Q1VI z_r8RNDSiV|S|vr{ykR$1?}WsadY*jo$G<^sQQbc7r^0%Q)v?&8c}rFTIgpL19EP8YkEinxNk!gCjpjj%*?NPF??Fz6hke)izouwjma zRllbT=qK^~!liGZK3OkUh3G-00+mP18~f0i@L_v|jpt=?bDDN>Lw@3&%Pp)AzTMBU zhw^cZUfgE(E4r@&q$(X9)B%T>>z~A}cTh3k-mE|`3*|O#P*>c^R<$)o*s5wsmmT#S z8Rtgmg%7VjHQ7DhO!tsv1UiD6GrKkEtCi5xv{5v+-VKo_;ncO#6g;UJnsUE3OSnaG zA$-6%{za}uN7s#n66Ko4thZjj7Ctogk*dI-{db^KTnuy^K<|J4tho7uT~eGO{Q0AM zku5)PsXg62fC~ZObl*%XS7%+YZAy8NtlnDAZ7zLI>hc-a*;x5JAT?-Tl-Vj`iYjBrP+}$lu+&w_hZ`$)b@A}qy|AjX{ zuojcd+%xywy03lh-MTPsr}y>87kveFQJjMjH_Fhjx{9ttQENEH33BR-!d-hciaw-r zL!@`7K@!<*>NN#4d*r%)zIzmVxHmY8&exlgcZ${U+Q#`aHF@8KFjdBCMG&ruMxBT9 z#59nmXV#k-Rx6y$L%ee4>@#<3LO@`|#ylp01);X>+O!cDUbsv@%20EPEFL?Ju}PU& zm53gl`D$Uimq~cAy(6_6k0)|x{&2BWll^t_8+uP?$!s`nP%*Of89%NRyBPS~C9Okeh?Gp@geemR`0}CB~MkfOULuV+FyL$;(UpJQ8k+;LniQ zn;-LXrNP-7W_fzWW`JnFxjC?+3I%12hHCU|JH46##tO4D$el^HrrPRk=i+Mnl)!Xi zO2%}v*?z9C!$-X~%h#XQtKZG(3p6%Rid2=eG157sWzC)k=upXYx}>u#Xgm0e(p3oX zw-&r-<*dGh(BK~{a!Ia1Dg*EP3!qNu=xTe;#pviIWzC#Ct;sya29Z$|j&)P9hoZX! zy+14_c<{G2VNpNr&Ul*7MqbC3?`*{2#tT^Kc_GaudwG)2y{Q(|P^RK?R&}g`%&j?W zsvYLzl`%s`aiMaIOv5YcMnxZr30rzO4*GA~Z*e0j@(X@l^!vw4J&Rd$S%l47IQJrZ zaJ`6wV8S5k+hfeI)ekYuWLHCX9e8B}qWdIzm;WyNtg(b_E0hfscX*Q(`b%8&8AWUa z`~VkS(laUMz^jhpob^Yoex~qWlxSFsYIe-RZihAi+G^ML8 z9eJ+cE4nmz>$lXmq9jI$vn zaP^$*9-MVy>(AwSb%A@yir#)nhDGM~6N)n8_u6+-B#@M@aZ@#YVC6Kc@7#fD*)>O5 z)ySFoYCC)sdB}E~Bxgb{FBl^G1G0b`W1tZ|`gnF9{L?E%uD9PL4TY8GlIF_8Kk*<0~=ot#>BjPQY_m?uL9)J?~D^fpQdF}Rv5@Q z6a|mvXUl$U?7812XK zNmG1+JWE2y1rZUA-QttkAg&F9joI!jxB0fn-4qkmT#Ov02K094Ea?#TXLbDdo1ot@J=EAfy&=Y+t*6!5n0QVpltKQ}) zP%s|jx;~i~k^ocWcGd_}q`9TU!WxUYqX>^zLgR$K%YwkQ`JuR{7GCXjd6?j8?X#n? zuw7mPN`bptJT=PV%`>}M$jd)GNV6m3bG{wDUm7_O71Ua*$Fq9;YP^h9kCRw@&x&CQ z?y^+|pu?9k+I9DNrDo0{5wDYY<;xl#v&rz{-DI2l$e9R|U+BU}qYvI$btmGH@GxKI zAEW7T_r*kTp%Qw0k+Xr(!WXk0n6|8f1hc0^*~}wMGWxrM2cjP>Y)ff~@v?D@kwU(An4;%HX)m5OaT`Ioeet;$-r?)y|G z)^bh%5(|1O?}Mv!PkT5_E;_ty33z0&dwOeO+=p{3b*Jpxm*65Zz zTR)ydO<7t|Nx6c!m>-E%=V=TD%YEhvI+u!wsZl)?T?x;^f(&SBt0^8)Q%=Xm?`K%B z^Dxt?egZ{B4_v@`FE}q;>zeB>4O*aJc4Jc-y-4*JcorT+#FTDMCN5T;zwz!F7`dGr zOG<33F-gjk8A;Lr{$n;mPxHF8mD5?5&(L-g4CW@J(#VS#D`=U@J({d->~74anqqzz zc@(u7%*0Qp-1C;7gz+M3DzVoDwxoYi5eVk8(Co{`d>%AbKtWlW+t^MnPL&0``wf%0 zp|o0_=k$3RYn(jvibHP|w|el7A|=May;Ii+v8qV$XJ~8*(hfG#yF$*wZL)E{;P`V* z7L^`ms_Gu~QEY-%Vm+$|^-o9CpasWXg zmnsFxCcu=($j}hH4fws?0(k#N-oy^%xG5_yuR1k0>cPp_IH$S!_77Rdc>r~>VE4y? z=>XB9{o%ZY9RM+>%k*1;LFUDUUcGan-;XY*!()GiyE8GpB4*)Kc(r>UUZTy!g!6V? z$ncruZk<{jnaqQnzyE`@S4w-*=0aM0rjOTQ`m~p*ss4S14Sg_!72p)F%C_ z?K+jX_+jo36lYgR z5lPi1^JbjMnbSsE zdW5_E{Qawp82gnTiH%{xvnJxl!jM&ntV#^D zTC33{jm@D_D?A zJJ@`M@q$i4oZGuCo~O}`PA>Z%)c&N5`D$$@Amz}R%zdyUK_Wr?##Oxa3#(sN#LWZQ z=9QScjOt{o4+ZtoFL(m0*~xFp=$#>KCWJs$WncpL^+srRG&5X zy5`w>+d`Kt^HpfD%nFOiXTCh)X$qHRFwB3&iG>9mLh%&{5|?WptTkmfP{?TYZFzAg zej@(0prvi5otAAB(f6{F)9hLAL#rH75aqgB`3(Hb(&nk;K2Ph(h-rmvn`dD3%#x{7 zjJ%X|To_pNK_gpH9B5mFljG|G%02=MvFwM6T-WU{fWlvU)YlX5B`@XS#&gq>biIb= zp#2?Je!09L0wQ1n749OB5mkdS;RYu;U@$1_XHLbRkRo8imZoU|3>P+*malG&yTb6$ z30P7+6(sx?G?Y@2iwzC~MaXnv!L&!9PNAVqsK0+sIDWXO!oQT!$;Dy^=}vUa{ze%( zXCC2xMdgGozN;1MiY-GmrcSC;Cu_h`qUiO9@V%{DH1{e#4#3`7Tfw`%cLy5&Lh!34GoImu@a7 z=mJOjEOL-4rE(&Hn4mut>*~uk_vK7}8uxX7v$DeEYcpvL9=RJf#CAhkgj*^t z`Kw{5fG7*5j!^fM+k6lJtv&(Q7x_6qZR;!6EcKP zoK#ig`iwhIVu01lxGp#f)iiG%b&~PH*%J>7(lSL%yT4Tf;yi5Pf@HLhWw&N~yOrydjhJ zoZcJ`*AXOH}hMXx|_NnGJI={Mhw_o4i$JESx>YQPsWs#T1O^w0V9B|qywiFyc z{9Uz2?I1ILs2SsmiRIxbz!aZX#l1PN0L$#N^}ky0{Y<5_Hb+bxh!|`&a3~ooP!VW} z*nh&|6OarR&?XL$c-#^%!I+TTxs8o6Ir@>ahi+W_$NS>ZGFb%~o?UK++yFdn zr#Q5YjcJ+so@p$X^(lIJUn)T$+k8PC_dYV*U8TY}5GY5UaWVN_qi}_Hn7`5xU2lan z%_q>hWAFe0z^DN^u{Tj|y@~7%T6~bk(|6I`q|%;f$8A>^)~W0!)Lld-zNEM<*8_3I zc!$uLKNS$@f)0MRg2zwlld-Y<d@WtRc0Kg`?v|5ls}8Lj{QXx?I{) zQwSfEs&7;`35WHD;NPV62BSqsb1L3qN$~gK!=0(tnsf^~r92D-RyA8nH`HaIw~)1y zJ7YrO@O>kb$eetq`;qj!Wh3+Y5^V|j$c=bO&qe>TPPuoG;S@*Z`XSRv9M*`?f}L)h z9@-q!64}W;E}&XW@_k>2IPtgNvMtFINOr#6OYv0A{K5tTh2;dmg4skLRfJKT-u*It zJDSYy;@JqHaO9aDeoxBmUDkH?=BXbi%xc?Je3W$PWrcJbHn3qy?Wr>jK_M<{KGNi= zv0`Z!GsqK$&unYRDaku1z1gbPWR8!I@9yqayWz@1#alqt^J(Y^oh_$~M<|H~M0HY2 z*`fiDVp355H_TXgNcaHRg=6EpTO0nP%l};ebR7PohL5-SQzL(Og!m^1DPpap! zZoWm^D1eDrX_+n0Z>wFwYSEqn=77(OWkls^d>`3Dr#GrjkC5h_%ZV_IUyg`F?Q*<5 zoOkaqp4DYv9drFkS0+$wK=&r6YlynpVJcK>0S!c@i`-{CXw&DX0r|Akl8hNPq9pG` zxfx|zP08Igh+7kz_TG%>hNcVJk|r@Ym{1r06h(bh>5vTbD=<6y{QFS)w;GKjee&lg z3l8iZZ6O8{V6aet#Z+sHE+Fa7a#tt%GILVk~_x)U@; z%>N?qFyw~1SYiy6!;}1;Yb|@-)l66K4@OXDhnd6*7=;Zd#y~#VYPU3;7Braj|HxsY zxP-K0h%_beEmwhovWepK@?YIBb^x~A;2Q#58Nh~8dCP8S=@xoRspM~5sX6qSpwVlaqJ5=1BtG$k)spj+ zKe^BGO+=X%wX$_^>KimrBW396_KfIGbe2csa8h&{pV`5|247Zgir$A8nnJ^${J)kk zj@qZgzp-g-&tXY*2c4R7U%2nA-;-`3^cxwBre00hNVK@>l(YB$P^^TtTHX9Ju-K!y zboc{eyi8}9lnB;fmjR{n4)&%eWGqS#a#X#_5r5CqQvP1G1lHlMjIhhG7cwL(X^77t11Xk- zRwN{6C!iV`P#ycZXtT2Nrfz9uNyq_JslDXqX;V<0dbdWuIVriSOkM$^a;S4ZMZfm* z=oy_`d*U&6_jV~L4+kzWW7E*dv=3|!X()}Z84k98*(RXsntIwL9I1k63msZ?@3Q6k z$h{U9!y|M=7BE_65(4sV--$LEowosgKM}xJn-T&z&+h=~>r5VN+P{OM|M9SJo=bQOXB)MCj=XYi?8A3g z&d((#q}(lGTq1AA|B?9mjSGt4`m3gi`FDlQ6!gK7J=dE&2j%ZXEh>cp9A8r3&_Tk$ z6n1Wx)L3+Wx>p8(doBx=1d+})-i9-OsD>e-qpRBp+v(gCl~Qk6tD+A5Cr}4U(6g5L z4S8kEwNn*7#?s}_J@|99W$)osYvUpXDNDFPRGfIm?ILo^k0_NX-W6&#pq$Q3)@BwN z14YK{dTvh6jMwRz?viiAGKqJ|3fHu(@^E$ApeP zNEAhI{b%3vvrie;e|E_S?Lv20+!>v#y?T)+jXJiJ@fyKS&Ta}-M#SZHJhR#)mZ3lh zUGooLEUrH%wd5+;jV?--A!Zy~gVHgbW!dVD{8J1Md=xoLzrEW}X&CW?e^pBBGHykY zy%TSxvG{?)Fv)|8hYwtnMzdvyY2^Q1oTk}IYx z8h7kXj3E$Y)d4DiJvQ~46%?S&TUfLeW7O0=AMwR|W_IUj&CYrkW-{ilkEl0{`)Z1p zjQ)-n|7EOsg4iXdfMg!`?*VAy(+uwk{@bibY$gI#9th`@79=6tC zrB=gG@7tNNfo|e1BYx2ZYjz1Z+q~cRdztAoy6B?^q*K$Lr#HEHS63h1EcdBN_s398 zar^d&0L<(9CHd2#v(FJqxv%EpFrPG}4NGBHQL-T)+gaYP0Bn@rm8{vs0(3G57iT^` zO3FAjQev9P9qzTd%j?ZrV{T&c-sx9qEZZHne^&T8Q8SV;w=@@*c-T_) z%TmU@m7gQ7iw`C1olw9hDG6JBk*i;UrD4l1>cT6bRKE)yJzS2TmZ=xr@6LXubadqU zZR$PCWU0~j7BWh8a^D9>Lea=Whxg&XKA(u3;JM6-C{8#g1 zduP>sA@VDH!W@Md{<@XYVO7Ns2CE3@9vdgBN6wpn!NQuE9(TP%tM$+m=}J-k?kY*_ zvwcR(1b|3bSlOIK`HQYG$RNMySde~$#C_u8YKCAipLRG+&RosQ>z@Qv667q> zF2_#a!IPogKcV}zbBllxLK@3phj>Z&%v&r6*S!c@nZMb6->eA#a9sudG(^DGg20=D zXh@vNMvIn2FC*%Yp2&Ev4Ann?S2Vfxe6@vX?h@eh4KQL?_qr3h9dv-f=4q3jfER)s z;|G&wEB~3qayfjl-t|nN!`X@hzd3Zw(u_Y;O;R%YVR>G4Z$gqv*>Np6X0UM098|Yd z^%XVy?Y#XQS-dLokX^P@Vp6mu>|~v2|GF_rC zfN$2w4h(kYd1#j#{12J=b2_(xFwmjTibBo_J!%eEzGtX)g)?e83=t-j?0{2^t887+8DtM^q`gmRWUHMyVn0Y8ErxT4a5EK;L^zPrM z5xeM-G_+{ZKf7tmCZ8|UPNOr+QyNBC6n=fCGqod*Y11#Ka2|3z&jj~Jtmmv$DB5U^ z8(YN%;rWL3_PR5_#^=SNm)q&f>}XuQ@*?*vSV2@@Nqs`o6?QtESputk1hub}&&>>) zsZ;;piLG=`{5<-F^^H^~j`sxpu^*qk=K0)K`%jxJ=weqW6$ogKbj^xhg5t;nvE`^m z_^^*U21N2|$QqhR344gaEsH5AR#YNa`re|<7q37fXdFSg3VN*{xxCHcbh`Je3VdEa z#`}5+1Ugt9VToQaUvZM^7~~w!bISMd8SrGcK0f+{p8>U46~zrnV)|qu9f^6bk}@dC zXU&YiOg~RTEEo#LuppWrGi=|5tUC__!Shp+P)+QEY2rlqA>Njk_eU@z}3eWJ`&eIoRAf zNxh9iD!>VKK-OA%MctQn4|06h_G2_OXf6$voh6e$4wuJg0ld6BF&TUO(*U*_0H&`p zMP{-K{SEom4^#YjcAVyTe~aP|&4uYU{mBQo=h(_~#3CdnE-EaUHw`p+YH!9>1-KHS@*-zRi?^`_^xjVMf0?Y3XLNzZ*VDPh@ttt()mpx8=l ztyf2XSwpcnEQ3WWrN#AAo<)0SZXYsTaAETyIuS2Le-1f_6D@QySjwxXlixBq$%Su% zPFNKCbP!|nq<4bM=OihsW-e)tL^|EW;f%xYsyMrL_g3XeQR>wA?~%n(Q;`$dvEUk# zuyl8inQRLOA5xq9&JvnSV-{{=yI35D3^I|6jcpbOB8BEt=dt<0!Ts0K&9{cXZQ!0B za3ODZXOYewUUj>J%nB;=8KJv=PH~b-PR<41N%O?NBdvnb37DI02X?y0@J#qt0dbG-gEf5o00&+)=-7H% zJ_@X%tmXJ`tRz;3rb-M=E!WvglcI|6m51G&_));c8&IJ}Phx;@c>ApikNeL_t3oU1 zw^&BKh(t0r*;4Qb^WZdES-2zuzp5dU$`FIU>yqy28@I5jJ~VNDsl3<-t`>La0;iuR zvf+TPvhr|uT*du(S4AjVkVR3i+3I@pJH>k31`9%Su8hM!p(b_bqx$axNjxz7HDV|| zx6#IWMayO_zvh|c2R-4B)IvmrnJ3_jJ27H0wWHXpxW;br9W@ioZq7-hB7i`Puq+}Y zJ=%OQ?+s9`(gz^WAbNC<>Cu;a?Jt{2l9QtVBB$09^_{gf?XEAd;_}XFGDJ+_n=2$Y z`ZI_3S;<5qWwEGuK8A5&sP(vdROsUl#wYrHwL+y@_rC%#QNyzBCN!^cUKU5ov=S{y z+gGN-i2OcDxa%}dIVB{6p|NVAk6Fzv8gjx-%{|?cMa>3d)A?MD!i{j8WWtmeBk}35 zAiy+ykgU>&LWTZA@VR7UP+`q1R7m$|FnR2y(>UKD5 zp(!3i6Dey{Q0L_f{hXxrC64>A0qo41#QwV0kRot=buZ9U~>|n&WJnB9lv@)r&pYJx}|(PzY#H!Qi$Od ztF0D;`&sN(vd~A52p6P4Vk+GyDf!RSBQ#nZjQ~;@_+9=GwDk9XxZVs+3FMhQ?98)8 z)_CvB@m2lIe2rD8`QLnqCFJ;YsqS9J@X6K58ieTw&mDILt@H6pTXVCJuNW=jpl%s% zHB%ZB6Vn?_iTg_5{dW)^quNtnK7iT}aQG?4JjI zg^-P|=x#Z<*ZWe`#3r0ezcHTLwl6t=2BaU~VrLuG9Px-?D8zEwdVmo2Z>($ID`qT6 z{=w`*NYao@{PA8+q)ZsmO@4mEE*n(ympbK-{DZsk4lG%3_riC^^4&tc8M*G7U_h&qua7?2!kzY6Zn>aju$9R91^e2>9tJjLsOy|?(7;EY6X&S| zR@lMos;aUVQB+wLcd3%(6K}$U`aHag@7E@nU3s&4N_EnTjCz-JOvrTU^0FJVx~pkE z)itiuPwh0KtI_oo;lwB_V=(cjM60BpMDOxjzoDc_iPd)Y3r~9&8CL^?P|F-orSJ1@ z3)t8WGRZ4e=0z&l$kfcHb}~#o%MNzYj%wC49q}H{F(fil8>59y&3{o{5E%7otfaLW zASTV0V)o@}Gp*rY%o!!ZV3=0)%aMmBWM+R8Rc?k&#YIX5Nl8y9UQUKv*x0xdR4}CJ z<>%D)TS}VBzmoBnvnpdQW)rDom&(=FJ++P*vOI8h>@0GA78p!J>85R>iJW~G`{Z?; zsztXDRk7@R{oR~w@z5b{(7eHD`QawjRBVFLscuA{;QP{OV(-ItazRlO)!GbL8 z!1Zv(!8(!Lk@7}iF=Mbfpvg>`x_NcIN^ASMN{p$-R~gQ!0{W+oTC#YkK_Z)4cX(}t z)=eOk3D2(CEZ0At>PBI^wq%T64y7QRFgHEDX^+!kc@9V4DG=2dTd|^^Ny?5Z3>L~} zFj;d|t{pPnb9Mv)?w=ETDi~N>e7cIvLT*Ia*YDdZFHB6cG4V{e(*wC9wL)avqFV2~N<6pbN2 zr&aIqGLd{8y+xvFgis#I)nrl&NzsTy6wH)kJXrJ3hk0}TSx|jDBKwn{>cW}G?Z8t} z{qw`TcV6~Ma&q#%>rEst;xmYunVBO=HhC5F`eXcnUG^l>$mmM^t+)=-w1W!neB!;GLcJ2=r0Pj$Z z4EW?*96n0K%#`!RT}Mb!GTj?{@q$*==5{<}%9xk(USW?DR%*V+C-mf9^af%jZzb@XASv ztE4-zlNEQ(`ldeX);w@YCQg$0HA;#hm%^k*e(r0j4e9$bWDD=*8|%Rl+3|AeG{r(H z%3%xC)k<47zOobUgwg^i3oC zenaW$2`LckFFO(UFNyi{VH?g+#Parm?Nna6B^TK6woKBA8P-lifDU)OM7TB1MM|G`B zm~n_2$(d@%mD+)IkKu8 zoCcTW=ITSax>UU}u@w79JRh88CK)%zlh@EKz5Av*cxf8lL`EN=+CE;0)SX)#9He){ z`Q-D{WK3^z>Fj1AD^}45&S;1BQ^`uQBVl{xNyZ*(ypW`@G0zV!iwfx1wM%PhP@)IQ zLQ$;3gv&eBMkc}iFEzh*`0-2faAL(>p{~~Lg?O@KoTjOkBaiue{F?;y{bB+zIJOSs z@HSz9K0k|51nbMc9QxmTTKFeie%iHNI9hZ_o(4yQ&G|&ST8=8j2CT-7s0mXy865Oo zz}op9A#O8qag{+NRk93fX3}F=SPpMwR1cpG`yf;%McsNavK#IwxTd_Jlovh4llj^y@lpS{H`tk3MON19I z*1>#Dabu$=a(8N>)&?q_D=aL`lXCD!jddju6XCH%TIXeLFU=4a9~*nyA$r>Zt3wn` zS*kLqZpX+2QDj9|SJxFi&lX?WDHAo}yBo=b6G&|7Wro-Ouh_k?lcqkq$vq>&)$684 z0T$%2NJw<(^j^)F^aOHi zi?VE5I{aqAOGg_%waX

?F9)b5s8cZ1M`Nqd}+bd0f}73o*|k*>A?OslqtmG(j62 z-cMg5xfp1G{N()X2@w?kjnxt0W@^eq#t=Sb8N2S<_ztJWue0-2jTTIaqq89`N{Yo3 zZ03tNtMZF5CtfJ@cpC4T3H=~1?0go`KtHa3%IiuY{<^0t>SQ*gGVKa9O88t~zbJ2Osl8Vyl{d9^Go5%s)h?dp3PB4AR=#>kaU0Hxw@kSX?SC zcWczho8D1j>Ht!(ihWMW10N(u8gwDfjBIH7flAcE*?+3DbKN<}>uIE(&+6hEuYBR# z+zT6qa8)Z6>3E9a184sIiI#{;4VZkcaqmI(f z|Hgw}QBx0JbEH>n&0_{;t)%|TvF0vxa^~$Q-~DhVO|~$OLiy-FdNgbkz_tB2Fffp= z=VSg_L}hinaD*YnHE1nJzHqgKCVxDOyqunlC8y4|9U z2wyIK!W8DpXFd2GLhcL9{R%GOh86C6usYji_+4kXK5mY9LnZ|BI}^x)&Rhu`BhMHu z!GkTF4jN?~bTT}g+Z}rC_P|&XhM_Mz${Uh4fc#TH-eh7UHQtz`0pMi(EVcwRv8TvI z+&)a)*M9_aYQHH(c(%=gP6zZ~>AtTi;U$qBibN_UJnR6*EJx_H%l8X(_oxQ}WgrM% z{+U{P&-ovGK)?l%-X-evygL3J8X7vl(!}&4AdXQTP#ig$tD?JzJCJo|`lGf&H2nR# z`D3QNA%iQhT?7;<0Br?!XrGSPK}{L_dKb~+)xoCI#~79dZvVJH`>5C+@$&M+kc`OwB3noRgjxds966I0)u z&2`)99LjWSp13{Cc?F}78$hIWeQ$7Gka9{6!P zx}fX6JD}LT?0q%_2(tOkl(D~s~~?X)(FNB z(({~0ucemvMZo(xual`68?CyFaYmjc`!iC%b0oT`m#4PYP^ik4|!i$x-x%?T(HYmjec~2vlao#kmyb)F`bT|9}}T=hMvP zyb;$L6FK}a<+3#-PnO1{nl%`P?t4&Y*K*eF;?wuj@i_A6e!;f&pMM@16yoHF5fseq z0U9M63Jr5 zPo4vA?9k`eAkUP9)6o^PYXx3hh;pp&jQp4r2Zkwz;dclCdBI*i$>jjT`7F? z+Nuy8bsmDMY_H{uEnD36J%Q4tr~BYiDO?OG`^*W8<^Up^B}2pt%E6WY5E#ITs+1 z;4;8m2OPRt2iz%s2kDEjaqjBZ?W8QvR@)x*+WWD*3izocbb4}+>3a3aJ?0X-!=;=jeIhUGz1Pvi!{qV~g8@7@ zzO3QYN?i|r-HbP9%}@|ei%EWKKjYVWcjzlB`p=_p@89N#)d9&>jB0?+(tL7`vtj)7 zV0^NTrC2e&R`wX5>;7!$-jJ5-`S#e(&JMIJIwHb)sS(-soE+H243Xf4cPSt6EL53| zBy(E@yaz56C%w@{pznRXldo_njvnwnV+2q(s?q1EE~%^Ih4yjf)20{oRVpWwJndjh zO-iz8-H3hjs$-=|;T>woMU9^W6XDM5bgA4CULW~KopJb;7_oEYMap=#$)xMg~v-DpctQ)MG>Tmzvy$lDXrgO@JYmwe|M2 zYD7d(kmT%&5Ar!8BEs84kT_K?6hxUZ!V2WLya*^PES$Zxlr-i@GF21iOfk~0vRSBwKp?>17UZ-xK=-^3 z$exd;{;Ky5vlo!(%KWmupV$Mu;E4$waYeQ=c79TVT;k}vxvK--PUpM!ovVbIGe6|` zzq#{j+JCToXL_KXpw0qIGcz;y`~_0t$t0I5ylF%By_91xQev zmxKJzo!0UWovn+FN9Ku-DAg6i`uW@3H7J=13XhSy%&l^!fSu>dp@%_~N}_ z5MAvpM^bE%{WHs&ZX8zF6~f$`iVrB82@RO`I8fdqfXGr~*iRLPu2L>5ec<6e&$s6i zX%jwE19~p!Srsp6utTkRyl#gyi>H7|q(R4)3vi^nvtG9Z3dHCDQ9|VNvp-1FLWlN% zzNNxIwB=sf=$_*^b3|~zLNWmvtMyK@WWJi=b!5GhJc37O%g_8-x292l4m<>uwG!m> z1vUO{&`D2C74q6rFP=I%IYGVNJw@IA`t_~FoXPYn;ih8)ju8w^xp=C7!x2bbn%P75 zw!%lC1=cj#CEu=D<#VRp%C)bmroMz5l)hyj->}X){9-^_W${Q&Gc)%=jx&hxkdK4c}K7sZKRxo9`wzf89 z=|W*QpIZMx{BLr*LFk`e4lK8Ud){#L_-d=pl0ZjGn`8d>OeRv`x`x2T933kG8H@uG zV1DoG>qDCj{|_Awmn;IVe2|=;4veut$N0+kFy`K($BO$3efsi$e*)ap&k62F2S6c1 z@an(fbekK464_(cAkJZ|KjnPOm69Y*6$FM&ppTkX0dHR~ieun^zLNd%Y%3P{_24f+ z<8TAxd^{V*miK@EcUDpX|Ld^0-L8Gi9Bq^Sd(;3j5%Z_T0U!3K|04tW_S66BKmNZ% z|EDID(pbKg);VtU%P0IB?&;|%p!>JlQk8)Wxyc>uZ>Suq|Itx1eDVK#0smF4;VeX= qY4G+x-Q8g+f&cpl|L2_UhbK*p_;1s-TVns5D@c#iEtbo`6 diff --git a/images/pr/pr-likely-ok-to-merge.png b/images/pr/pr-likely-ok-to-merge.png deleted file mode 100644 index 9c04dee42211b29357360f4c2900bdffbd83b32d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 98362 zcmeFZWmlVBw>BIKMT- z^y;sDGd)M9Ntv%z`fN3^>O=?_yQW_~OktO(xsd(<{o| zp!Y_@#FKB$xJj+ilhANzrvde7u8T1)gZIPuL>Lm`u}{C`W@c#+Px;*}jBaGw z84WKi5WLc(E7Gt%-{|S-A)~^1{@-Evx^4uW_SL`Rn$z@`vh=^B8sphS__u#YQedml ze3|C7w6H~D{yKI-cK zsrNrN@&63zKYsK7m&=%xp8_`9e^;8nO>qUyI@wLSQ+2u9{qv$S8UlZ6BL2aTj=-yy7*>$P^%D%pW_ACuG@TPXIt$9T?iMZ#2F1I zk)GhFOu1((U);C%wIe`lx+o%|Hl2wd#0N@WU$0qnYp~pD*b9tn`0W$^3m>k*X1|ob zlSml89iLKN)XDnbd9=+|kXi%=nuP=g5(%BY#S8EGLSIePmLv(5o?!&yn}t9nT;>M7 z;FRAr^LaIj8+pZe)tIXy{ttu1@`0tJue5mi{dXtED?Fb8)}@znuQ_mXOaVoC8eXr~ zROE4NbOU1pzYGWR$pTw|Ky-AQ52m^)KW1Z<-4{pRy_9yiJ#M?gBZ-{+7ZW$7pL5DAgitS#TLI zX-Eat>wL=Z9mF_W0wYeew4shvC4Wn3GHC8QLXQ%C+a@z+Hw$Le0ga7V5}oFyT$nA* zf5FD+jD(43N(4%n$zRAoO#dP(TXd_?xcNvAqNZltYGgc`V|>LG9UWa_jN2V(W_wjn zdBvtquHHL2E{>NI#WfS6AfpF1gUXucC#mTqkzDMCSWP3VS#hS2doXTzGO=WnsFuii zLI5?hQt96WW6P05qmtx$(Zohkrg<<*zKGy-jHBRf0GBQ``%WNe>R)x(lRlfk`7oqE z>JN4?Lk}Us1UzH!s_m4}DA&Mku?EaY0Oyy;wXtOMC0^!LM^~enRO29`q0!Y^Gt#ow zbor~D@gJt()%mLeNu*pHDYgdmyFb)&ilL#nnCy{v;^I5a8mW3g8_ntYEISQ5RcTet z>;AY5xT}SoFU$WuUFNY%udlBYsnN^wHQ7E=);Gk5hmaCW;?e=I>0nSv>~GuRoU_It zqCiJCsOG4RqM{-NorXC89rDg3a#+&3MZ#7B%*NX1Ohw0L8fo#ND~f?zw@)$Dlt?Kl zS7ADu!KF^qqf}p5Owh~e_AQfPGTiQRAWSpMD6y99Xy;&{&&RF#6GJ{@v*G>Oh3vl; z-ArKAc=~$J3{tbO;Eo3{#jt!R4%-0$(9ERER1I5zrp08t%A|G7e@5id^AKnuL<%LU z;HT0Qy<@4JKnWXAsA<1b2y~<}{Re}ca4D2!t*EMR2R; z3Py72fngd%7)=LhDNU48UZ zc$;!w!?2);emp%5uJJi~PiSxxrZ{w7zpJUU-_m~T1M0b2v8sD-F`I&UA2jA&jb)u6dm6_c5Q>yd8=uCAJy|`)nbLg91 zFgcn=xt1R4efJCvpe1|8%8=PCTNQWoGB+ZbIIbCBSq}a)+Z@qI zF4@^6G7mE%P6e6HNm*>Vd<(@sWOiiumuxjA74(8T; zpi;DM4y*V8?lCgh{UF)~|1S}8J#nI9Na)c>IHkpILqF zWh93*N7|MWFM$%2Ks3`2@_k32?M3K9LWuO_>0q&vdeLd}@;1}a zBq76>zc0s5p%cY9Ii`E5yv^6+vCX&TL4*+*mHHkv-en7Cg6aAPZ|xP89-LFB|4Z}c zl|+UV3r8ifVHy=WtjT?!NSR<_jeGdkS3t93n)=Ho8jPS-0kvKo)&hjl{)ijq4h5zt=1|i6nv969JE&M*`hprdPq9MY8oh;?D^DMJ*U& z67r%=!WJ9bw0Nu00*{Vu^69e>qP8zk;x;GHY!pr(rfNDX8Ez=;v!_e-_RPy`+KAiR zLXpPja3MqC zGQ`2nIEDM9TcTQDDhs#HgY~qdm4~09Asa_IvLMwahdm^OblhKag?VioApN&1BANJ8 zhnwA`6O4^61gfThr$&K|UPSBa#*jqM#>6)3PSsb**7YKZ9tKszolPQAQ=o(W+EbM= zBOyspSAP#wzy{{E{i+bFvN!B1%E~aT!)bUsl33QDU+z+DM9;dshU&ThkawBF_P?&_ zSF4+(sjZEbtyZL@tPDSO+>~O%enJ6+f(kx$mFEUE$DsJS}`b@M1ngp*Vf18B0giQ|qI{=;ClDb|Qw9agQfeZwZp zoo5r`%=Qk=e)IJRPRyU@D&?A`Lyw631Ai3sj{19}TgyED2Vg4O{~G~$ zFNP3}zvbKoNHvSWG+19$v6iS6v5wOH<=#uSXDrbwTd3Yja48wHg6j(1RHYA0JJsPt za?UWq+Gtr-RFWe|R#(kdVN7Nr6ZKTmt83X*GMI<$KY`M#{yL0bLlp$}zmA&bKda{Y zMEdl-4%1B%^bySBl%|o7X2m5tIg=ccsF44P5vKWQFNq_#oS%SzSk9aCg_DZeV;14B z-xZJyI}HxsVhXk!b`th_5~-j57t3!*zjuTXnF84_cp0|xvg>0*h~PWyqjbR1#ap$l z)A@&W57xfSgyK9cDEk{r2Mi@dUOg8l<|FJOxb&FF!h#Nx3kw+(6UY_Sn_0E|ohk$z zZC13&-{QiU$Yc@{mqw}sa(2mk{jDR-J`5#sp~=a^xHNIzzAcO(k;!vqrS_QrK@SUj z9tg;{kolCI*Bw>&q_Z-=x|H)UG~zSTH6%7^ZRR-ODobo*YC>UXjVzxft%kq+l##t5 z2UF^Qji>reh0Zg8y4!WO#-y4EwQ<4SY+to;z0>V^^R>pSl%vAJV$JQA-81=;fL0Bg zvM4Tvmt|nKJA|6)&o<6e7Un}YqD{_cQfJDWU*Erbk@G$@YK_xITcIiS*_fx`59S+W z2us|rd=^+W>m#UtSj7sJ91b@wcCd|4ddgJIg@g#h(M?(Vh}9$?&oVidE~HsfHN6IK z>cdcco0Az*!sRWm!4I9DqDD184^&1|kkNbhnL+#B7e*YVrxO3A!Abp^qOk0YG=EdS3l>r)OBKlk%s?J zr-@;KmEn;`O4K(s&FnwLM6N!GwKtGUF6g^=t&1nc8A{09ktdVyQe+U-$L&MIQ7!4`W!uuZ4HIRHAm4!hZZWD$fNoz%YW{u9regYfEpee)%UftPbK6 z<$7nR(miT04HDt>C_v}5T>*z}w!Ri18rx4mDp$DI(=_2W7D{T{@k}b%`m6C>gAy=|2>)(TNq0JoX(1G)__|nk^qAM6GvIUfOh~B7>9l z?pS-@weH>wl%nMPxsr;Rtg7q>TnXoB17WUbhRX3S?a^h6zQPD6w$HVBeE^>rqz6Ulq6x2OxA2q zLx>W)p#=rOw3V6~r+s(r?e*_o@(9UWHLAhluga<%2&`dbu@MdT=3W*I6W}u~cfi3Q z_^bf4K+Bu-U$V>3hPlIEw!Xd|7FHS*6cnqb_n2UdoRF1MU?L0s1Ppsmg!z8+<~>mM zlQnci@8-Q~5k@BmmOoHCj;I&gw7Q;mby}7M-5TRL6dFs+DT!MoWANkp$ElRrhex`O z%LH?T^u?>q@a2;}F%ro8BO*rU*CrY17AON5z zlJe-%kGu^SYD)U_6OpXxZq3a&8@=}8d(WTbB(_jai-48>X6V~OvPjZL=1uM)*Ph6#r?QaQ zXy0|+Yt?+bb(ZD7-!*i%H{6Ws{QAa0Wu=*ICmWt1Jqt4YsC>5Z{jQu)d5)+dm+;H{ zuL7O}G+66sFzf>HH4J%n^-h&SNLb+fd41`}Mp#u|hpfh?xUjF3f&GeQ7ta5XsN53M zU;Zu%Ftrp{J$baesEI%#$u?k_CTXZCP!`dgmy-1w2GwHz2g!UFvzWD+{j(EC6e!DH zm9f%MBp9`Q*G+<38_S!rc1bIIWmeN;S(_d~hP+1b$x^<~&VQ~Z1vJ%7dTl=EgpwX{ zGMIIvdt=*)6QBd~STc{~S2MXLq$=xg%zkA4mE>?Ifo1(EIvM@rp5wED}osf$nrQ+wyJ0z

-9hpS6ZT)7Kb-D^7*4PW8rPD&)fzNw9h8liu4sWj4Q!ZshcKb@;d;Xn>#%@6Ku=(`c*pPgIw3p=bzuR@>i zU<>7;xPZ=ic-QQCdO4*K^FJqGQr4M1{uMP=g8J(im8*kFLw)D7yAN2V%gvFd9(^c}>JvB8ggh)*TuOuo~at49iknG+z zl*HC}2;&UolFg&21OuLMj*}WuqNAI*&3qSDF3K%@#Yxo|Eu`xH5o^O(q#>i{ z4orlAP3pPVu%hV^!y}a>J?84%2({MwNaGmiv$~>dLgk00nZ9xH$W-^+)~s{58`tf#-wht|o+ViYV55=?MH0>=Ncmeo+^{!kNScT$}_;Op5Z6N@Sn&n*d%HKi#z)gcl<=#3>|1qFVA#a!44D|tU{d~a*en1|5P!^38RS?H=Md}1?59#3^cYXNw_{lqW9X4|zk zX9!S}M>h=#48?XdED=nQhl%V>t(xiteei;B$mhJ)SLj08F|qX#j}RxZf%J9M>N#vvf#-lxR{c(kxD@3MeQbI3RLWIA6#p+_or%~`F6$}b4qanvMjP9Kk=|px&L8I{Ao=5B zmq7X1kIBZ>3(+u7eX)jTT{>{VD~a=l-+rtIqek!!bf4)7i;wjsg@yBYatof<4FPV4 z)(==0GJ}*R83Vg_ej?^Cv8v~+p#Lb)I()2SDv*B=&5yZW0X>FfX(^geY*Lw~}pZYNR5PRIf!|x{Z4T z69Yp72vsC$b+xG1s8lP8i!gpMf|GcuqklVb`_OpB|F9(|F6Oj?KO04_zNN$Z_Nl!- zsy@JE=~|^c!KgFo35`ZEyIJ$OmE}j_=G(QQ>Usd@SpvhuZPER>{=vcB?F4+{dK*IS zf4&;vxm}bO{QXqgL(8H5#>E42CoKndsia83T5S)$6BoXZd2qb#UrS;SFS~izJpmgQ z!V&IL$Af+WXM0503%BKqB8S`WX8bo)E-u~~Q@5pTFYt?A9$fghJqmSB1cA<`*$+{9az;X;LK}V7;QgyS(F~)pfWHlkl-swG z2bpYpFBwE!Hg{Cg)2A61>b*7wA@iWCZKFp)>+A=_D#v+72Fl%R){16kGy~apHza|OtalI&Aj})ph|@Hi*n+i{$igk17QZ0 z!&Er~UELqGh8kN}P)=OT%~yF@T0T$0le#OsHu63CefTA66f*0+KQ}j>)Mwea?T?oy zb;l3`5_Rtg{~1vOY4X`X?nmGD=Q2h=%iI$d@_;X<%~$$Q!ZRqreHf-*=7;^^d2-Qg z$QUAtM#wCx!Nav?tvHSR>tLpc_uuZG(7ZG+<|;)x*63&F9CHZNJzhX#Lpytx(CsGP z#wgf*p~^>J+vn^}G9J3swULnX>F(|R-=m2EkAwX!ID6PH5pK`z%edEm!D}<{L9vI; z6RQnTllH+3VfOOAUjp1y0ozlG9Bk=Y4LL5egCNTKD+-Xn$;?sSq)_=%SWaaDjs>T&D)M%4qKt&;N zHZzX;B!q@+C%HnNS36@mBRmk z#-@O$OCWqxYgVSRgaM*R4E$oPlL9J0Tw zNsp%C9oL~1iFWy2T=-3@{goZ_xUJ%*W39+}TSv~u?bAGdoxL?`TO)+netr(eRYLg- zOd5`>*2IQuJE@(0*W!7EVK!+ZR-tTGo#(Qgi+hs7--k-3)nv>EyMANi$@3fhvpsl7 zG`nVJ7A44+y*76w05Lf?J5|Z_!X@W%HnZVy0`L8Cj=sHN%*rAm{$000odwb&fgB9w z{Gn7TWAx+DVInDs0&$X%cH!H2_at?;)z$&87&-@tIQ1(?)P*R#iPV@qw z=7n-SksTyb!!>;`J#Gh*%udat{9#)azwqt9y#T2CAc?c}ZI~R4OBVY#NdNt%EF&9i z7|8{P)FDAkAX~_X=QSZ*1;imeKUhM-!m9JDA#B-EkXfX~?0ByiKvqvd-WtD;SQgse zI~6Lav}32PUY}Fpx*YGbrFN34{~-9{elS(v|0G&?#vXhUob6cgM^!s{a7A@>kQ8I-3I{?fV&-qc9dXir4UYT`icOFnFKe-5@8L@7L9> z*25;1XXlOYkMBJAjCF(eCu-n#<~uVg`2zRQK6=0zq=(3^Vd+wI_4JuuT<-`7mPat+ait59)& zM<4hL>=(Gt?KskU)*F1NH~05NPN&ovHr^QEPy4dt6(amjf-gk=WW3j5fAKvo`KZzE zrgQ#ActYfEoe`jhy`C0_I$+slX&Bgrea_HLU z9M$uYmqUGSc6Apy$lcXNfB$`xKfnAB?ibhuH;&WutLp50JXCxx!tlS+h);P4KCRaG z*nJp5fv5o?kJ6;mMnVY#9(Fw*c2l=AIbB(Teu6R8kB}PR=pk2d&MMazmkjAy7eda> zR+#p&U=jKr?6&PX=bT=n2*+i={xV=~@AIsSj%pCD=cf|BoI0^87ZH5kc;?eQ(m=lx z(jl2+S6KCuA>k1a_pB?Ld95_j-20;M2V{@>@bjRcb5?Lg*4ZI`;s1) zJ8!$XuzjkvEquAjt8|B`J;#x&Kf$}9nG>@b*UmEc@vv`|tSaH~hH~S@Z0741UwFBn zoxE=8SwF60{S!YsH|wLV&07^vUXIaff3qufb6{J}wl0~2Eu1iDerz~I(lP zL#m&A*;)=b))%8GnKdN$L2^D27eL%fCKPFFFXuFPiIEfZ6nu4wHD!IX<|KOo9{S)0 zX(q;uw9GGiWr~Pfqv9>~)r}?r15e8Jcqe|?$WHF*>S)Kbs*j)HevLYgM4rH}lOVf+dQ-2K zi(s9{BH>tOfAzyQLx4s)lXW%4!|EQ*1B6^P(Iv5ncz67$zAVa3f2T6$cEva|jm@W8 zU0)r)scDKt|01f?r$~*Oz#J@6$DVL$%bjw4B~Ft%UCDNG)Z758czzgMrxBOj_+5bCS*9w#9!C6pY+j^8n!7xD_Hps0z{xII7U5j&V~}X4Rs`LtAGVug1)%*?kI~aA14X9S zd2aR3S9NUXPS~T)vFP#v)$jhyBpo7NrZLB;KtWNS{c-lJma`N;aC31ZU$Zhjg;pT= zB!M^p%wGmrrf(h*Hy@%e`|VGQ4zc7DNlN)V_Qo@0z;gU!+5UXY71or~ib9o)rQk05 z`h33qER<5ex_Xv%({FV@Tk)_kHWIIoP+;Qa5{pxY;jXT-a1TxHH>oUKbYvL&K4xZ6 zH&5jqY6&<^J+Jgt%d)P5Ro*cah6T7=notdZ4|MeV2IkL)Zd(uTFPG7+EcqZaS8L)s zD3`YZgq>J}r-UjwO{g7`@!xZh`Ex8|{H|vArNU7O0WJ~FbfpOd-;jvOcH5yeHU@b? zc^Ch$^l^`Xt3HqM+$%hBvBU1xtR^|XhWR?HgY$HB(#Hs*r=15;=-#)EBHW+;(PuyB za(_|>a!~P@056_N3>7)hl!cm@l0KnMf8TiyN--EfE z=MfTgbG|sc>}BY2QJvxGkIZj|6?%BcL71p~x9p0}t0#St9*c$`qZ>mXj||_xp?#wi zXn_ekTPS|f>*JtD3(TY`HB~kIPCiT_mCmZ%7!PRy+Fx;Dt^6~lll{!OrsZ99d^e-snb-Ad^ z;SGbF)!_S`bsw6kD*CKu@X&lAP2KIg7IkB>E{HE&VM(1a@0s)Ec!Vi_V!UF^XL&)O#z30cCYPp0*0C%Max$+MP~;^7Wm!0 zc2FJ&VF`iL-W;hRZI;1Ni17L8ep4a8p{T0BMmlnTuExV`;=RD>49{1Bp|Sm0ww;4V z6V4p6pPLmHINdy|YZd(`^)l?nf48f%by702L*cxG*DT`KVYsj#Ru6Q9AD(M+Kx}y* z*2>EExP{IJx5`Ni@Bo3(+1unjaLt5=ui5rra)!3nqHxoEebluZB=d53Yz2QY=(ki&>9p-mP}_8QBaaw1;4@^t{()z)7J*o8DhkL{ zcQwB7DY-RsK;3t100Ds~)B5|jCy@=jvtwGm4c9G?mDal&!^_cNV=5{kL&dwsh1vRM zsGaXm*gcli>SGMr(t`E52gxC~8q?@?ANmInSo*NOmv!k{y5D5CG99-C2<-Rwax;5< zTKB#1p=P+}2|z%!ZqNbVgy73f*?@k8U-cOn>Z#mK8fV_`Y|VU|9RbBG%v8Fa)h}{n zd5ZvOz4h!D&CO~Lt2ml?#6}}pwtD$r&m?`3r$(3I$V2i0SrfTErnS`HU(8+Hz@FN`CKjD;lO z?U?D$b_K@I_SgKg+s%iR*Jd~`NyP8A9{3L@vPmy5yX>1!28EsL#y>`VoVGK2tWhpj zX*s-c z_qWUCpzGdo2 zj8K_ z0Oc+u8~#bnka@4)SAuahH$dWY)1$*O0UVEKk%)m=p~Z0{0q3P~a^-Xo6_tp%!IQw# zq=p)EG^mQG-%_X`8xM0kL#B7714iH^!o>;!7Z+OeC|0GszBPJzv_AkM2#izr8=v~K z-{_;Aj1_V>G~^{Y4x;h3VZ`93^l)%UNr4Y$vIzo~TfTqIY@Tz><(~7{R5czxd6BV7 z-)EU;hLtJc?Z0j#YgR}|BdCwLRPC{DH6-iXzMt`k)0slPOFo4hLmzAoFI%^F*O7ZO zvGAxyRMh|)r{n2WlVFB0b6#PVxaVb5PEJq`qv&X6`Pu$FCC?)S94BCHyvqugRq;RQ z*|?Ev*{S#0xUJ&Lny&*}HK+@lRppgD%ko@TJ@a)tNUu@l7A$|*LLW*`2N_u$UM_3P z0%&K4oxEnl_RrWJZ&I6}+41~XUwi8oPlX2bB+%d@OmLB+2(?}1br2=xGO4_E8~2*w zJK#@cfth8-Z~on>STt3!le^%mSy?kHV*$rQ9lB(F)sXpLkc%X$z&4hz1dWbf@1_6w z?C_LQrp0El4(zuFg$WBe1KO%MK$#!Mt=9i`r^Mw1JsH{Z$Qmac<5jMG%n=U~5VriO zV8}|X^31KM;-C`oEqF#-W1)Rh7YdAndolRUC?6pUp=p|G)jksn#!>|?cn)^@*< zBN(Q`= zx{E&+&0Di>JF*7yw*ha1@nRw)uYU3KB1@k>X+D$%{NCmpoT%&TzZOtfs|nC~KT~2X z?jv39pYgQWz*CdMUx0tQUYPy4p|7vGZ-H-DqafE|e>G;jXptb@52%B&5Pw23I(C5P z)(Sn~=(K)@&0Ur0)?{U+DzClJ%FI{Y|NL_fMm}oS+n{HpoM7m23#XThF)C=*Q4H#U zKs)A|;U`~)yKIYu0uQIU>mFA6SbW|JG^C|c1i6q8-fe94WuZSy8Wb0!mMHMBQfCMJ zp6sxr6)>dA8eQTS;q%_-2)ke7VE^bZk2~?QbkTL?XuEt@5=@3^3dH!Q?w6g*8TnY} zdaS&*(PBSYyts=nCc7NhZ)bd*jeu(PTAM*XDS*-Cnfl_iI?YA-7k^w$6&K7_xjBV* zmn)Gm6fzXDj3yq1_w+c+%(!D%OQ4VQ?LXFX{t5X7`DoufQ;j+d#ehz9Zbs?XyhF^d zucb;iv0qBJPl1$~D6jRNb~f?~dEZVA^ABZZrKZEn`zFT6I~r^EgeY|4Ohj4P%2Kak z>KvK@Q{yP`u$`Zv?KmPJk$>yLruZPG&|hejp5FJSqcD1CqP8BC)mY_Ck?KAtN?D?E z_qV87*uO60+V!DI1(e|@e=?EbQQDG{*1lEy&=*i!Heq48MALkdL5@|vTqt)t&+2XVRbn7}5-Z=b?99#%YuIInL+5Q@8=uuI{YeQ<&G$10_~bm?c~M zl^_sL#hDVOo|sGZLGPD8t`}i^SQ1BL6<3h7pyei#aS?=z6htN1uTT~skdd}Qzq7TE z{|H4nB!1)SRvvcyg8;@XA)yBxw7l{&{4l^VWj&b5tf^`yzq$?dt@9hV$Kb#-46 zktYslQy@uvakd`-z`cQfx{U#007;UIE9a=RRb|BMWcHo@>Ou^4(+bY@l! z=GSXEw3Z%oZl1h-I}ZT#=4O4s=H2DWFb|%QD*im>Ooq$_zJ@YQxqs6rA}3Gi0>QJ9 zPu{ta*Ole@cQ!`s==OG_xe*R&19Q1gEqeI2dNXEh3#(%;x%G?mT(D4yc%SNl7qGG( z2m_82%l%dnda6kIZ6B9?2Aip~s4_=U)!E7&=%~CxTbh$z z-eWoNY;ZkF$6Pr}V+E*}E6YhZt5+tK7RKZ6?qWSe*tR*T_Bak*R3rofytH0DNyAuFrQ-{03M*WIkqi-Sx6F1&jN;-13y2 z0Rqjz`Yc8u>6JlYD@Ehrd+h-3^3BGFF7bxL6uqv~YlH`KrEs!5s$FD2|G{x_V`4(D z<@9dA*m#=$UQ{}9Sgf2b&M*5>Bqx%OOz&_=#~3 zv=I|>LxgPU^UetTaEP$PQ@}CE3roT)JD@@$z}4jEH{Z@4kH1bUB1HJrK2ua9h?gJ85^i+fZ5{!+Kw!={M5QI@SY&10^T&p!EArH9@*iuqYUH``RY>evsB`*3;SI%UdpjNGb!Q@Qp;92(( zd2#5s%H=8>@}E>Jr0~q;POUD$rt9^Dc$A@TGvkU4;9HqXP^Fifb6dHCL6$;&$7evP ziscAnRwjXpTmr0f_c|mL=cxcC369D2Sx*UB_xJN3cuQ(JUI*R~P^0KUz;DDkn!HyB zVpF=5A>*!0x^fsfq9!S?{aXv2=tp@{pYaRe9(sk|N2IkY1dz;p-&kQ-3}w%;=Oc6m zIUq1h%D}aIDjU3w6=r~?H`?fhIsT5OmrcLfcDTWW1@@x+y1stJhueRDQ|4U%8Y|Z3 zdJI1{d;eZ`AG&sMd`t|M7p$0qwRxJUClEPT3UG;vZfP^;9DFtXnJW)2%VWg;A}^!2 zV`~g7b33zAl(%UGj3@1ytu1Mrse^f(`WWR}l47}p*LAq`-`H70Yy_Uf%1%d@kXDai z5+&tD#U=eo(vSqaQd7Y0i+7}Ac;TY%E;2c{sm**=gOpC`IV=m>YBSgQ@n;6w**B`5 z82)*>qEM|pZ{QQ_K7C~DNM#5#ce2kia(SI4Q^)*E{`4)M5ZEkBP z`_sG)`}$bPHv%>RG=kmUY;Oay5ogC_iVR_2$<4>x+gBxcb>!4t5`w`@#KxHWpbPQM z!RUACKXmkFUp|KFibLyXkShbeg!9IW`w?j!W>#Y6L_8Rg;}UG69M-n^3i3PmbHy`( z!^YD@ChIz#AB#ZIj59#C0W*6lCV7MfEqa$?VGtJpDC+ci6F%=%Xy)Q^`!{@M-`4HG z1+2|8ZEI@pzmZJ6fb7mhv2v=LiC}UhI03S&t?@QLS77@RwR-Cq)sB&~vI^^GSeAn? zXvBRl+A^jP4Wqi08?Q}D4-Vt@IUe)h77hE`d-W`=G0)B~vP9jk`={@hS-8*b6x|f` zkY{3zrg6iXg%~MGtd3{a6?_$@MV2|NQ%w*lE1xF%v~#-+)TEtP z+y_J2QB~1^j*bZrn|xJ_b@$*8g`)(3{Dup$xu$M`y;AhC_0PbTrSD&j_oqCx7y8@| z7A8@`tOWS~Y`|xGtmq_5g~HB) zXS<)@7T%#;rcaG=z;5+pD8f^=r#;=02;dad6)2=^07E8FWgo~}JG7-R zYi5$0e=y1AIDV6SV1h+RTEjF*Lltb;o>I+* z5hb<|1_L^Q=#b*5>8N4E!cMO>1WFW(xv{`Bm)U(~`tucJ$M(iu5!+0cn^={Tle3o= z6DIRiQc}LJk`;%JmOfv#mk}LjDrv3L%)WovKRZ`mp-({|59UF{2>Kab@rP0IMvFD> zc>Gd_=?4Lns|^V4OJr25@a#V!-h)<(f#ZYQYQ2kC6J-8sX(J}!ZW?^!*=avL(eRMDZ~Fw8 zgcmw2DfldiQBr6zQ(uH3y3IOHnmxjHJU3dj+(_UV`AzwC|Jk>Uhb&YBVf1nRiS&c; zhSz~sjUHPLCa)}HbSfSzG!VTm43uD)URfwv$%&&G#+<XY%|2Zkl9RZG~^D_LadC-nF~`ZZ$?LpOpKF-ai{ zfN=il%bP1R+3HOH%+U=&mA)&njVlmjechi^SZ1PnX40F}fk^(ZaDq>&kg*g)^qE*S zPoBl3$B!;>k9YfZR2fCwqRZ}Y^EU;>BtFRat<%N^c*W-zpMpxmQwExN7V)7ibp_*; z<-;0KYZ!dYbzr1a4+D5gbGJ})%_m;H7lgmZq*$dk9j$>h>vONfQfcPdr{fk~Py4oe_|4z)%J0+PJXK@Dym_SU>Ud`P^^YGvRtHlzEk6eKz-8Ui##j zcv(_O!V4AH2GlsKNgMn*MA*vcb}+$;PrMNVmgBRrIdXNE5^6Braj(2lrV=}CTi(Ag z(GvCw?+Pj0ivZeA{5P zAjTb#6&}=+vTMQdu!nc$zypzbQ^A|mSPoyd&s!d4*%8KEoZ{E>GgdN``{myd*Hjiq z8P~s>({9d-T})s8PV6nclPs=PBqPzAa1Bi|Os>H%w9jrI+yB+VKIgvkhD@u`Sup0u ze&H%$>2>UJ^81kUOzLHbDI<(loc3)vMxKP4JS(=$CSFgiO5~4Ru7VvQCmGJcHzy&n zkvuZP&kV=UhJABd-%-ulTia8VYh5pj6@KG%F z4RWjMjp)PM0WD)4lY_J7>s}rA9Td*15gx(=pUZBVX4g7gtI=waY6oq(_-+~z_rp4= zu0m}yRet~d9?`vIuSH+us$@d-jiUbd$;5Ld9_cDPJd4xSJuN;nzxUgER_pdsc46jz zxzxz{d9R}M$8|Te#4g8L>*%WsG2DYT)m+6q4v7pS4(+2%a%UqL2z(Dz56Du*P@iSD zlr~}pUGL=~))27q>~PY6CC`UG1%!C*14~Nq;+4?`k)Z;u522{n(r>Z?!6|LrAP3bs zp{%KEQG^C+%4M_8D(0N7y!XGo0In*=8VbN8nzc>p-(7s)}QWur3k51v0Qve zbyRoA=`UXnLN)Ni-W{J9e$5n}^0K@}Cx^t`Pn%H|EE1$QT5ddS|FJZ#BGDdS>)aco z3D@@`#Q(<^mMSqjv6SCUE;rI}2g#5;xj66I)pg>4Vl2O(p#N%e=ic5HTVl@KsJ!9+6Z^9mUNGk~+oxmb zpQXQiHN#uv+uWiY{VQ)HbC2E7-MA@VkY+iTd(3uKzX~wBh#C4aH zH#^;*%9jpCj_Km}gaM7&Ne#CZxHCCs;6Ib{^~YDv)IPytrTZ&HrP8VnR|httYuiW+ znv+5U3axf{`Cn_2XFAHUH{_ooTdrq(z6BOTdCZn9iitQU^1~Mz znPjB>4{{2#gvCmfsx1tWrQu{X{ox9Wqd%P#U%#qUtXkD!u^*_q`i*AS;ELb|DmPh^ z=$dz|o^%>k=+$8@W*X(f#@tG+@c4)PPA!K8gq4-tNUwc>^wsaywPzWEg0Aa-KfOZa?B$3y(DZk%f@W#-{Ep?U;H&6!7#)(3 zKV1~@C*$4~D+rsLju8=<*1DbV>xp&yN9E)_A2!*z$Vx$_7YkWoC>d5&ctzCw?9rXH zQ)N#47M~2NsR%~|Y_PGS|Kk>pJK=3Aov53HxEqqVtgjzm<_?c6SrllfI4Msp*h@7F zYZ+^&YepE_jN7y{Xb|1*jZ9gq_sz#m`H5Jnu95k?(h$>ROQuKw=Y8U^jc2^AcZOBh zmZ&+Ac=s#VWmgy*d#6AQoAyiG#?k_I7T3u2-dBGb$A5Lqg5oi<)FeK3)&kG6M&rd+hV)X7Tx?YTbHeZcMN@OyTmMNDar|X`W!QNwSdsjMJgeL9x$F zHa?6u?)ZlfkAS8O<0{_Okox~)@2#KWTAl~c+#4eVg1d#q-CZ`o;uhQ?xVy_jNN^4A zaB&N6!5u=-puwHR7he{AC-?Kc_w)JTRlR@UP1RO$>KtZzdfKL^r~5^&frZkLs@e2J zbDqpl(3k8cZnbqr%POeobpo3x5~KJb_v&1+?1`($Xok_oiaXaVWdYv&p}O4hCF~|V zY(N>I@SN}Gq4a`fp*WN1r08Yx4Eopx^faOL5~LU zm9wNC8L$d-=)D)&*!)~sc zix*k?LXSW2^6&htParY{e2-$Ec{EVo&81V6#&YlPm(+zlFP3^bP6HD0K-9a-&GpR* z8){_`$jvzX)%#J90P})rl|>)gBcR_HxMkX=bKzN3b%a`tH=O2 zau*{WA8T)bETDa-_YqMh1Qx9$Y9Sw`hxF`0bA)dr0BxgoQM&g16IDdz$ukZUR9Gfc zrt5$#=8R1V7icW)M{}OjnD!A~Fo$95O3^ad6arC|!wM0bcQ)!wrNDqla>mwuELYWd zzFGM;^qcYsRezH3n$7p5kEvg_f_g4{YmYQY!I-M-v_t}xBsY}tYSHq_*EZu$2s z4MXPErE{0D$zvCp;76t7yEIpVaU>vXw}mPe4W-QyZG9@^~jVj-#SQ+AT8knPF=RxXo1 z_n}uu?EzT*fiK>G&nL|m60g7K$F2}GZ|rnKwx5oslRajXH#uf}i~Vct9QS=2^nQQ7 zs`2YMZFtyCi?79_L49q8phc()I+<-oc-UEcyM&e5tvUskO0DT;=BBDE3sVTl*t;$vq z^7WvVg+=Ohg z(Q)}^?W)3lw&6~?U)?x<9^L9!o9on;VB*}bDltb-U?##smVSJW^Rv8k4CI3IJTj2P z`L+gghX8K7rTgoSaXYtT1lbBO*8!R<_HuVhZ%V195F6E4J*@}g^tC&OTs z*Y~Z%QlqiQM1b{@LuR{x%2!i0O)5|;Y^%cX1L<#WU2_rkJfYJNliIw0q2hi*_u6ZE zwQl?oY1O|Sr`%L|`*3!#ax<{%C=+119h#%_6BB1fqL(U#{5WN^O8qG(34E&5EVf_vl!;fAw2C;K&z6HiJIgJipMg2b8J*`#2khTXzMhX;U!(t*HJBI)dh=5#g z%iLF$Egwu>ca*iD*Dv?37bOTR@1-Awg1Nw7pB3=k!)C|=erEbmuUyNx%or^-Jj5O- zo=H7ejL?F{$*wb;V=s0dGmow#)qNa$Yo&=*V~dX;y0a>Xu-lOxRPazi_Mi{-W){Du8&WTLPruI#mBQiNOtHs?&jR>;<~o;N|TP(#cY{%gNH`aObCSma}+6r#CM zq4?LtvWn|1m4o{Skw{_0QCv;U>(3GrriFE%spwX`5tql=OTmG0HLAHb7*TkuyL%!x z`?il42U`B;H{qVXt9umuo*nSRc(+=S>IrE_Rv2B?!`AO#2)9M$f#Zr@c5%hx<0#ns zFfO!7Xqd-jrQ#&s+APM1kQ7VS*qrYeEZSU>wg?ia z-k9uer?f;s5;NWPBPv#EZgaV@kRqY6BTB8Gj>o2WP0hG)NAT;#J~PVK0_Jaxzb-W~b!@+v^d8;pSfq>O~u%93=NgHeT3 zbLuZEi<}NE=^`PxtM$ICJIO?fj?0Y!&Vex9{0!4MWF>gyX7SyV6ud7A_3 z%s2C6MG_fDp`ta9yX#ZDScSas)pq}Z_7LsGn0p~UbD>5%hse2>A4O(*yb1Fywuk{8 zPnwb0ip2#M&<~i4R>mv)fueBbt!Z6m(W4m>{R1$+`}MYHj?Az_j)2efPPDIwLl}{$yg%h#WG)hpO*I?u5l&GcMCEeAL!Y$ughqIBrYV%Bo_kF$Nr(gT_x8%MuM*>-N zpxXG-p+F(`QIEsSE3yLKs{0Ys*4nGJ!xxIti|DNue&%uWXI(2F!&f7D|`->3eLRCvR?I48JY_>(fZ4iS`VDl0V`+HZ5kYPao=riu5?_SnleHH z2|X5(W`UQ)w@EhkQU;d|XESwo`ll7(5d*K$LIpqMvHHQWnQ4eXRdgFN6q^59o5aDV zSNiepF1Yd?w^T|1Z~Klslcs|P6L=)+#)Bu|gI5~_xB-$5R-)0vuN%xkc-{|Lmo_3EDB zw&h_wPMj?g`nBfroN3n2x_^J1m$w2%{*m2jA0zY!pWnc);wR#Rri?Gnd??{E#v^M{ zu1cz9szI4^*ltSkTw{1+{B%-L)SFRQRAXV{-qwwR`9oF)azv=P^m1*i#5|Yp)@wpq zW=gl3AJ%gNv0+iAEo{LAA^w=yvPrHCX&-Q}#nA=kWY^8oS_;dfWU-#%UMt8*s%eaY z;=-`Pvk&~W?5&ciSAB}Onu@NvrvH?~y>+}h39YKk_%&1e5&?F( z2=6><*56oM^}e`BHaOCB>3wi4P|6{%=V3}4GmF=(Ihco&78uyif3t8u^t1ESGg5}V z9;BlSB4`zAozg>qf6gvWajBzDYYS#54)TCH;orN*1gL(EyL;$nApDRAdcHn4a)HFQ zV-{S_(P3w7ohOq^wWH_GN+P#Q!$wO@G)W8fkKuNX%JBCL?@dS3>}X4|R37?V$Hzq2 zpNnJ}^rL5N935GM_KyxTg#<*ulL?;q-JA>6Y3_8h`MXLuDjTg_0kCHOay`B1ekxsj zb(kr9F}ZU%_7xBQ>%4ESY5OL`E`rCBlVMo&{%+g~cI5#|_bi4wKK9ut z&v^N?HkRxfE?p(+9N>}T27Vl2n~Vjg#x%iie(*J3UR~|jPkvQD=o|W;<8`$*v=DIc ztoRMt;g92^uqOE355A_$?VA^oyg#?*Y4c3xUWXP<_T=Wjd}9IbET&3ZIr}vybmP+8 zou{nHnKe|v4660TAiJK|P}XzW34Yr7UHIX{g3T&2$3SN_fS zx%$dRKj~Pq9qbhq^)bgq!9S}CH`z!R66ezt9e$ldZhN6n=x50snaJYF2Bp*U!8aXl z4Zi61t=Xx~{B&4Yg%o!id=!~CTC2H^qVCsH4`h1*r(Lhp*p7eb&Sx1R2X?Zv^;c+_ z=F{yJb#(3d+njbLKT-&KY+W>p93>|$Be$y@e<<25rN^YiKp(%fSdPCqNY9~v3(A|U z=`~oqatzZu^6faAQ51nn)@}fF@}t%_`|&t1*M8MXSy%Te7aH5R9e8cZfFW?>C!K*2q^Ha#tdA1O`fz@U3gM(^~uDWEg({z*T`8F%^ zbgENGbo`*R!^>o8dbJC3BOz-k~Hq^zIPmBWfX!RJe_g9T@~#V zZ_=dSdVak40Fk^_L#{p;idmbPsLRBL$vKWL(yc7b72a;F?Gd~zjbUm*JC6^O;j0$} zVXsHg>o2gq$@X{tQZZ41hgv)Vzg)gMl-)fZ4^?{f3!~xT%4ej8FjTwdw%u)1!X;5= z9r-fP7e1pzaSRB+P!r;*on;%$vZzg?W-O|?7-)W8%#mmi+)IU}s8hpCL|>NJrYRf3rI~@M9PcK{;m%`2UsOpSWky)r z1rzHM0OV*HensSq?0|>mI@_j7!}gm>muB3M+{Adr>Axst`@$~Xe)!qqWnkc;#Pt#``J? z(UUo_pXn?`bX@)_6oeQkra(?VJz`OTF0cWwJI|&l9#5>s50fff!4HlvbvrJ;OD8MZ zA8Az8vLddT9*-6mItIy?)6+6vC?~B@f&FgMdYfmH8C8xnM2Klx79t8Zt4Oa;8!?VL zwdg=L^NsNJ{D2EY%J2bd*2@mZqviAWzLRoIjxsn{NMttE%QtNk2B6D{T*bYkc`wdZ zYx(jSs1>93+~M)()uTqwqlKjoO0H%m-|B-xShmOlm``=~ZYlSvL7@R~o7FBg{q6{+ zOd)W+6uJDGwRp$vaaU+{$bJ=hQe?+Dm7OVEnFAN_zMeSGYWDWT&D=Zu;kXb_4eEql z4|%gy{`I(v$R=HAgx)?S+avO2z49OfPJ4cGam})m3f|nqU$kD?t$u0TD76!ogqR`7 ze6Qv|(P>Hxucj}G z7LnPNI$5gx$Pu-xMluYMiMa6Mx>I>`+dDt_5LM`#(Yw^(&2eFYi>@&;y*bPHy+idl zlFnaq(8EGlfQOA(dEtv@g3j~li5LS6#~-u_dv>*r1imL1H4<#3f;?`~3I3nC*!&P5 z8$Ab$6ok|EN}y(~@V;)#oP$FQt6ik2y)R$kSGU1rx5;CDw`wJ^$Mxb)wWW^pb@cgy zBn8WOGnj7-nec_5C2NBzOxTg$9gN6=Ureuj`Qf2}pHKOFq?~rA-L3j@0k?}ix^s*p zKQA@C8`Xg6#v=jeYox^BErE*uNiy;N-AV#d`EaSDkV<5e>&m@d*pojohQdp*!xGUL zlZcl?DD2+>BT%OBs%b@XiN!De|0OZ* zY3|`py_trtH+lNdD?QGyzU#Lm)PE$@==h_^rg(ZBk- z)%iAS}mu)OOuW!oozTG}Tdj_!P`r(r$S3bb|8_8&=PaRLu+ z;tP7xN&#=Z9Az*2#iwes#hEgeAax!^KE<){pf9;7{e97m>?r!HIPrTenglhv@uxgY zYAi#7!|eNcSXpYj&F;y$Wo5?cNoDtA^BqR*SNIt1Go#@*AwgAEct$?CD1~cL45)Mr z?ur$Wy6!bHeYG;GyS}+FCTeaCE;p*UFiXu<9#PHf(V=7IhwjG}KFx=ofNKm>(@pW& zbnY5eb}Cg37W@i%TAb1y^HRK0h(r(fXw!;MD{N#^{WE3W7IX-)P;hy7>i5u4UggmI zHqPGYHsESi@3t9H{5VK)@ANUO{$|$BwtYt{)U`QqKdBQ@e}h-SG7KV)Ga@@4N7ol% zp4->c(*qGBP4DlYZr1f8h8t>18yX4?&j)W^o8z8$z-~%wcHCSiC+{{s7zK2vAtE;7(hNJdHw1}X<4uw3nTI9} zM&EWiQn|fpzc-yre3mF=;$F71Qlc?UE<$z~wCsDQ{+yS14$}qinOQDZ<5=+47bG}G zA8x!idK?-5FhI=Rsov7qXqcDI5qT)zs?&a}EZWJS)SuFMI|xp%^8cx>Wzb-xQI{)_ z6s1Ru^r%UsXt}ao*UaD;tK{@38k=x8IMP~qyj!I$BZ?dq`cs|D!up}mb>S?$%742X z7iqBim~W{xYQMfkMAOOt%d_G$#cz%IS7~W@n3Pl{uk0HKk?gxF+l6DTPnD4dM&B13 zkb5n&*ToeGKLrAVr*sGc{P*?#3`^5u!9)j5BttSPshADco0+bXoDAW(3<59+B+7s@&31&IrTPKRN%z4d4YQ|OhPATXMKe5SL;dU}yfx~Nkw ze8!Y6U9-`JDqexOT(co2y1kp0)7_(VtQ9vKu2G&8hgG(KH^r@?LB_HaH9!BF$HRts z1z*b)o)k$>|D2Cmqmwj1*z+d(xuehoSH~LahX=1p%MQAt$`ZkNA?U&5!i&OOed`BR zYOzu$WGH>*Nr$&U@B>%yL1q3L?v3kQXNvc;!rX_+i9c}=D%K5&y5 zJad1n%XRy4D%)_}m=~QXmsIz--JV{yY6nkJz;qxTqZ@9Yj_5{VkVP5e-%Qz0W>*TS zEKaG?Uzw(th()es?QKUmX!~=JH=y;F@OPjH2XhI1sH8~8=Nb(=c%DsBQbrs8EiWAl z2w_=uUV5Kc-dJQ^!(TPW6I^2VS~;jCPdmf{_@v~Lqre8KBimv6Pj7Iz{oN9d?*hw@ z&bwvD``wX?C+*(DvtOkk$ZUrK*l$!lp?KeXI(6GNO~K9U7zW{+c18WHVtFxf-L;Av zp{4iqpfMP5e&b+o_VKqS$|*DJ{CH-{OdS5T8AdLvT6}VxG_0ppp~`@(B#A;arK3Vv zW^WsjSBeVDWlE)vS8#q>OO}!fza@ti5QV&@m0@fl50_%>H<4L-Wj24Mz8tBoZ(86& z+gw(Hs>P4<$+j;QQ`{z#3&_E!q1MZOIBMV}G`H%3$kGHc6xcb~`J8N0fIW|w=D93M z7xBe|T=qX#zeid3=}^v83_4aU{muekBVx2o1=Neak{6^kKjIcj^PcjI0^aL+IqjujMNpTJkrf`}? z5mK2nJW53UG+-hIUsfI4HY2F+VXh|B1ksSJ2^{0M!9QsUt^SDx_#&Q2wtij7_zV$5 zA*iufDJUvi>ar7*J0Poqo8&61ujU>fA4Zfs^F<*hI!caG97KmQtdSCzw@YlP5F-A< z_(Q8Iilo}paViFM#U?Z^+@90B^QZUltsANuZ3(pmg9>(&pH>&$_P}Mu*{iWd$NMS* zBR}N!m`fpX5G9suylT9esus^MH`$<;Tl9IMk&xTgHWo{fBwIyZ^E1Zwq_`B7k7!RE zO2uJ;mb&DNtu$og4$J4r3NC-GLxGEtKsETxbE$RCOto=QSF6Vkz-F`iW7a7(N1MiKtax|ea3-D-ki3Fd23 z%0jfK62`eCGDMm^)uO~Icp50ma=8kc78pJ6Rd5xj2w`G`?2_TrbjRPVpv~u#l_7ml zN_+5!N&;hj^Cfk{@Cna9l|!3Jpj{SjzSgSmp$B$%;7a`S9{dbL}pS03;e%vI>; zOYI8dHBZuQ z;Dov)_UoYc8j>2{j-G?iG=fV}`P`yqus1d9$IyFTQx#^vmS|o=$`m*pNsBJr-`Msb z&?M&6lL%iIqM6eNeE}t^V#=aQC@FB3sb|%kp4ON+x9QVJ+8Q*}vXaMEX`AH$KWctX>N z9w3B;e{(72;tIK52V>K;*OM74T&ln9wi|>N2i!9a@M~#Eso<P!}dr7otGAnh(4K z6SLu3cEu(_4sX-7xTO`Q!e%pm-guefz#>p234> zDw_Q753~%ukvX#84g?okRs}kz4TLj{&Tjn|Qf}r-_0$~OC-sQ#w*PYKXW_^Hze#tE z(gv+%GDU!%ZM^yR2d*S#UIt%gvj36vHzI1SFco=IkPZc#2C0~W6AyOu^G`CUW7Jbo z>}WKKMQadL7?4X+iP=+E@2dVvUa?~Nqp`wI)|h#CcIz&(Bd;Wrsq!F5?4-35;aHd_ zXA;E}QN})IBb3~5vkQd1t>1JU{lZ(UVjtS2( zMH2|mRcMw+xtc^_+3M8Xt@`h)<=h`yK8|+Iw=mB*-9hH!-H*3lX`!`ov#;>@*I{t4 z@Q9vFJ^`*?{-=Gj4>Fd4ltp-VIhjs>x_tXh2gv@Y>ZleqGpA!aIa*^?uU?5OZ_cF2 zMrhh1+eaApCB7iBuYh_W?QIc#1VzMK+$h%pLg3siy1RFuMAQE zC4#M&7qaVZroKC}}Y$3lgKkQZYd%pUX)24Kc9`)8lKr6s$rBI>OVG3gS!wsV{;X&sch)mW0x$}kA_1h!@V;P`?aIP z)4!rSJ3gthFA3-i=rynVzWBe{X^UsWh(ZECkU1nSXp2|BiR>j*b2oLDPp~02XCq}x zq1KY9JV4zn`(ZOPE{n|}+fd6=Kp&9@W@JOyl zg>mgPAI~=_6;4)Uh+a*MXW!e(_Q`NEZ1+WZim86tQjO@suxE3+>%!>u%3F__u}!sz z(#SmR7E@*i;g>734}{U9_{I0(Y4D*b&b_#~eaWhSBBB+Po5x6<^cj3JSa`HjUY1#* zKBgH@k^tBU(Yim?y-dE^G6x6;NL>9hvy*Jq*YDg7szw*|_jkH(NnlItXrMD;I4#+q zktr*UWro!)<~p?6F0}Xj88X54>~XT=7wLx_ItMOz}47bFx}^8Y*}1 zy~MhR_REve&w0YAezPtYnPh zs7x=~3!TIOd;-1fW{^zRvF08DD%3)C!YM+A%Hcm{slyYB$|CYm807z8OD+RGZmBGZ zsI)G!zR89OCABWhMH382-MXMsQTDvWq<&5nFU~0i!i_&y_mONo%I-VE6IDa)SDk6> z?V97xay=IGSbJFm`D{%L^BsR7hFMx3$;TbDeNJ%8dYzqMB?HW#>hn|Z<2KuH(wVB~ zk@(R2h5IY*xm2#LFK=0jID%PD+Iw)exwh}hk#S>j`^{x&H6Y&qqvRzZP!3Wi}TIvJ&wM&i*rlnlAuDDQhf$K;*c-#44GWX z6jfUHlke;d%BM{E+%P)(ybMlCCjf^6<7c zvxN@6=~va#r8ae32Q7DZ_abX-l~VPVQl4PUud6%$GnXD7=8@*QE!G+Xc7x7QR#63L z!TgJ!r_p??s5DsltL&AGZ46j?Kc}n9nrr4KzoWaEr1yk{CFuY3u}t!M@XMLU+@uzW zBVY#&$=IM0r3+?La8PO5XcT2qc!a#{c`rjrO%=@1oyG>-Q@ibd2<&P06dBjx$#P~b zTvITQdWD4<&VF)EMK$BXa3aE_Tbz-sR)HV#BBglB{nW?g&}223PbtT6pS<1hI4rX7 zKjtS8nq*jc;4+r~v&1^Rr7}g1M!_KT`6PEmWu=k|E{HK@n6IkE2mRCK;?jv1(oFf7 z>Oc_mQ@iFpAq7!m=*v4B$%Z^8(}5419PGL$pTA&vi)K-{vFeiwjIGS#`CCHWB;+~q zVnYJi2oZO0XIgHSf?Xqe!eub2HsWPa#Z;rv4e9YwYj*HQxsxrA>j$NvU=$#NU?7z( z%(|tB#MG232}mx?BdI1U*G;IUF+@G|9u@1E*zQYm)9i!U89u@gj$fW)FFG7j-7nDr zF1W}P5#kl?m=^h6x$N)87iYg{OgLb|G3E^%YY>J!P&3=0);Wze$TrBdYyPo=zZ5H0 z?SCsYdHMS`@7KsVy-YQ0Vd}}$>7W331jJS19{kt{%R0YrgvC)0$Y946yhUl19&iut zrz*m}2}CiNi{>c|L6tPn02~hL(aynKv;rxr@zmq2(dY=hnr{~S;oHMmt^&umQ5>>n zQlN4nQFTHxGLliIvN9V%D%7V&>k2AK?@**5s^4OcS{faQ%K)*!8a#qwFs&%Tr!eQt zdUffZ90bSDQG>+|46-T@B7m%|s!Yd!IGJa|l7_eST%W!G3%H3IPp z53#TZ7_vJ~yj0gifAIYNJxV+(EXs8(6-5{u*nQ1R+Z1jrHuh*62xlAI=Fa+px!Jvb zx*GIyt81)B4Y9v>Uk-!xcBO!Ty+rs|D>73Bl^Jd5rvg&pRg&<&_|f4|);D+e@@&aW zNrlJ5B(ZbH3JSOX$lE6ciIoSl7Nv8XI0J;HcBPBY;VeL{2ngo0$^*@E4Ir#YrzeEy zfC5Frw4WfhRi2>boz`XM9W$6*dlf*`& zGF9u#Yp3v4o5*PfaW7+#Wt|#T$Bx1%&TOk=J=-rG%BmUJZv_RR1RV%Q)mRxQOu4nH z;u~9O(`qF?!V3vro0`fmNAgKhW2fCT>$9>M%m1@}E4DUxACg<}o}9m{eh_nRLHL1) zb#PQ@U=?gH_K=>NkKo8}T*oe!P#EaBZ9Pl3o<) zgdW&~O#1Y}q~uKKco9)?*1I(_)c7@d$tsrV;U(s!`{@{~p5vilY~_YTCJhGh-g>uQ zb_Mwv?s4a!FT?O}@?46QCC}}tlMHFX+h^Q+Dw5HA*P;YwIAu$%Z>nPM&JLTGR1tss z`~C*K>)EpdinpB)M?gsoP_Au+aeB)@;npYm#OwnBW6T;ijR=&Q7Qxc?94g1mc(IGXN zs0PF_6#;mL_}NnA;Ri=eh~Z(Vf?NUYs_PLQP{?>Tx0HO$xrNGjNef|9VbXHvx$HqdoZBj2`b|e&ZH0@ zR#^@A0ga-PA&8lzt7~Nykf)gt3yY3dr%^@@HHbdBe7j79)38N*_bz3%Ew|JrPqfBm z`-i!rA`@I&!;&y&Gf}?(g3Ple#-Kto2s54@Hy0+@VapX?WX>_*H(2@W3Ve4yTq6AD zpQV2SXF!<+Yx_|ek+_087_EV=cwuAuK|IgLve(bmwZNg_7i=t<2gZgs@>Dhlf2mgld8yZcV z46{4QTC$78Pc92SsZpO{%&6daKT2_iKgS6m@q}2iAW9l%Al7S)+kne$nVUs~xDM)k z*e9E(TRmp;*xAU;g+(>+t%nGP^U@R&Re&jm$sfaI$asZ7@87# z`22oUNph|HKq|GYiw$urHG{2QJaxS47c?lKPsUr}+1`j{)2epj8qnx)`oh8j4=r=A z0;~M}6qrKbZa6@{cI96w_2e4}pfY{B7lWvcO~&4zQG$QSG%2iVwY^Lw`(!rbdRcu0Fm2;t zx7lD(LsIb6V!5?0l=*6S&X9r)-x2zc4**ytX3S5Q81;4iHbWS^+*0Uc+ym|!<`7D% zF@a4583svu4OR#}gkDxIz9vkAAXu2X7&C!K6S< z)(`U;2raTK&x^k7f9TWtZa<^T-oEe3e;({%v2+CmCl=V2F@|F~gM#DGdM7Z#v1G9# z-pS>3^`?{Q8#x zp3I?g%?A2Wu97zS4j;F_Vp5~{iMtc|o?WRgm6|dblk?(fm1<%xMF_K|mKM`7sE|57 zim;}%jGbG+Jqt@#3a3UW18stiwhkvQvji{SCc+^-bCVq7-~QXcZ?!{T&Ae|D)b(yU z{3A13=oZXKu2xpiFAhf@s7EddOt~wn%it(2`iisvDR{}S`s=XY`pK`6Zw=fZRD zj^>9$oM|I0LcbozSMCRS+*fa(283<}Wxo3pB5q+KovHE)j{(0YFeylnlyL~z}e&m6;x>~SnOw0a``(?;X zR#sL#*Xaf9Ehb8VYAFPiR>uBcmZKftJVkr+MvydSFMJN`>+1JI493;j`X$T z&6wJYx_lM{lwqzRe_FJ9a^mFZST$plwHK}+(z>h!IXyj{A~lnz#s(~EOcZ@qEnw8O zwYONW?i65V|Mu%(@z9((xarJK>d`79Dr!iNJ1{utD@>j;j5qb_t#&5|IvSdrr|0+u zvx&Ktl?*j@c>jhq2?y{cZ9*KP3pKbW4akA>Z+D`83E>Y9{ziYaE*^QbF4vG?b>EW@xoL|cR9e70P&%lqysld=37zzqnHjrxneCO8X zH=(9QmKUcgd z;e6$#M*%t%T~y28eQnu-ZaFn|sLLVJ9GKavZ3Rd)avi^jn@@x)gb7Z*ct z>Zs{yb?w< zTFK`B3!pnSC1qe3z_-jEF(2-wxYj=yFyoV;g#Z~Cy$)t8fT?FK=sZstGBdj0*K%y$ zPF`7AiHV7MRYl1z(%sU#9=1pSFLdm|&;AUoG6PlznCx2+P=geF-HoAHW!P@gj~ZfJ zWcfb#z1O%1!m4565t;LdysQeiN5XQH$G`pC#ghcUYYuSY{n|^g*G4#|u{bGKsXgVMYaZ#U(h0$M+oy4B`UX71Z1*n7K%*@S4)3{(GXR^o{~?M~^<%*4EG4J1;97D|8!d%69&h6i?W< ztG)I{=~-D>>F6wh<@0G5UCUixUk4T~R`e#!m@rz3%W{k~O@S6al5SN6>gDz8$Yas2 z@%Y#wvVU@M(HA(bT5%a>5zn0n;7XFa{7x0?=H^B!Lb{!oBPe zYD^SJ9>c@YnoINbc1&nU2!D)g>#|yF!2jX#;~sEb$b#E(rR`|3Y1Mtspc})P6tUt9 zuh469@bcmZ1P&V;d~x-`LrkoDqd)Q~_KDT~Jv;#HzCRsDCH-(w4Ax=BO;$7fCl(+X zmNxqM%5s{Gza)oHU3TH3<^g9Z&$B&mC z_y(N;S-pAM=cfM;VA}7h{ilp|u6~a#EBnewgt*J0Dd430<(R_PEA$umd^j zD0(p=F`U8%)Sc~MS$04GN7udUgv7)fwkndKIBn)+8EVzi>CUBw&d$!}X7BNAk(rs9 z&!0bc+=6d*^8u76CMK>~Z*6UTHR^o0L!M`4WvL-I{^*i_i&@P)j3B0RoIG+93=Ith z7(LNw3L7=p+1c4|kWO@v05uppyw9OYh&1L4h4ep!S%?4WGGh!_pc`OW`UVF>{ifHQ zGKIY-AIW0}UGJ_>l|_*S_4RI-`!j&nHu5{|2Z9-l8+2P+T6}zcZ*Oh@?K`*`9viDH zBO@axm(FFWWMg9^Cl?`}d*UTb;djyv4|u#)2H$gk_!>#T=HcmS+pw@^jq>c%?QttS zHsEe^(Xli7ljcW;!N>Zu1Ev$L?lD30?|+PY=`rb=?_JVN4>AEe3+mHovWn+VL&|6Xg&V zCUv=-2GISI-0QERq8AhtPt@cYlg@(oNeAcc8i|@cwmn(_jZF4N+QbCZ%g&jm02l;T zaSs21^W(f!?t5H3JR-D^FBqqM#!3xa-H)X7W&mZo0;^X6M0ae3pKoAlV z(qpCv$i_%BTtJac{cf(vv+WZ-z{%k&oL!?S)qYUf>d<=B_!0GHuP-$A;mGz$rR|@A zZz(CoKyjFHqZS#bAZOrz7Z*t=DA*E)3RJE2^_RN3o-FkI2cRzWDD(Neyu44JKA|-6 z5u&~LoLf{>)X_n{U0jC>P{Qz23}JR4=HhHR$3ksumkL5;~VX%ocH8R>nMLPJcpQ)U-tcgz;ExX=^&= zOV#eQ*_d>v+B7!RdDH#c(n3Cjq|jPWf9f;b;`r1HU=Y(~6%k$1LE!EOEe*{n7eqOg zB($j7Z<90mt2>*h>o43HE*k6C^{64FM^b8@yN#x~^vPAr-&7^_&sXReGe7~vf3I5? z_kJ;w-+o%5{fbil6)k0|s2HtO%*4nzx#xB_mE*Hty4>oSF}kyt zEf|E&Iu%4R66=4F&jzHWhftrKiEOoon$K53k8G#7asU(-7ZyHOpk4DYG1-_>28)QC zpEirCUY@d-wiL>zuo~>MScg_ST=(KpP>eV6*3R42v1q`WmmM-jfkTB-x34ovl=u*U z=xNjTbfQ3VI<-6NgA@T)Q$>myKVEZhg8ySQewf{~oq?A6XlTqFFSmYHyL=**?~FKJ z297@|J^$o-jd#Eo`R&^`xIdz`dwJyPahwb_VB$G;-d}MmFi=rZQBrn;r7*IS-o3pJ z4Y&>k^z3grG{sKS(xK`;=62Ti>UBWt`Nl4`k+lQzilzA##L@`5)_ikNZ|HXu<^JuE zKsozM%v}suTIFKz_a;rcJQ=~R0%q+>Ee@zXMNX3fV*k%OlQPaUW?-Z}x4ikA`tS6< zep9s_EwBLZUz}5Flps*Jprbu9-JH>)i0(gbLw5JOr_Q0hSoSmCKxMBPn~IW>4?4-K zcs@NNFMul(_`np_Tg%U5$b_!$&c~H|{lZF$igrf%R+n1b%e9%`)ad}cndP~HoYiww z>`kmJR?iCKH?`j;B;YM`t+bZb04Om%o!Sz|W8p=dJUS)a+x zv6?O_6BdKD>GA-v77poQM;orSLg6Bt#JAGU>qyIfq1^aawH-9hu={;ZO=a5I@dTye z-&@6xB;wA_HC1{yeLl^E(E~qz{20yP!--5B-oCZpx+uC zf9MtrUi5`IfPR9hl5rxA%Afycn~zwi!5}a=r_(?~gJ=ECo>czNIh>-B5}Dkvtw&c( ziEE0PQSss_B%2=b4SvYsU6SJav)D-!OTPYrT9l_L3Ni%-4)@5s_(q*j%FT&Wrp z&NbFcL6(8K2I)7mjX_IsZpRTb2)rGO{pq2*pM<-1T-g<1kAf?Gg2$Ty5&hDRr^;jU z8bHrnHs4iIy^7SeUR?oMJKACYKJ+cefR;nE|uEfp%d~8cjcqvVR ztQq(#Fq?O3K4CCsI`h?n;I#1q@i5ruid!ZR@l-$)Tqe=e0Dpv&n>RjLDQ6q(?=uv04-k`%YN0ljn1l3=`+QF3_FX zoBWvue>Tk@_phv5y(Y* zQFs51)?CJ?4(7KiFkJi{qYS~hIR{SOIWn++hv`UK8-+A0C@ zipZASAffvQ9vT({8VSYl+IMpyjrP`A+@rrB5Nf?Ioke}V{NIi3ZLRtJ>m4#J0s~3O zD74^JHq{DZ1!Gv@08ELv7JJuwt^gW#7V@Q)p9kPkP2xx-IzLj`x`9%fuP&j*El)k- zYj%F}5apB0>vR~K=7xMlzcsS`a5g*~QK!z_AU)wOoj1n7JTg34=nLx=__~uEdk{K$Blx;MANQkWZ#rceI%+&Hf~~Z&YP>lJk|GhN_Bx^{*+LYeWRU zbnE=Dh4*SW8x#N0Ikrvndev&Hb(X$%^5 z@D?*-xgux$K#g3PEa4KbSIZ z(=#Ilks7YrkLgW@TYH20?FxR0zc=Xj%qq^)B+|3QaCEC%yVh2Ten)j!%%4S!z-l+a z;BMYK;_ksMy0#pZ>_jK$u_LW`<`A5MEmdp1(q!zv)5tVkx|>b~HF{a0WNhHR%JEwJ zDYa22M{!^vXPg0U(Bs*b0jt&GReNuynRpa)Hp0*Vt18`upaupI`>4?m;4plcHBMB? zA8EmJ0jkFUQBK7S_lk4XmQnF`k3YH&QMMx=aFGZ@oHWYSFiXGHIwn5FT=GSG&q~+j z#n|Fdhsfk87KiDattwA>2|SV+;SwfBXha(h%-0osFryXS$e5w!#l=WGN67+iG&B;- z7Oaz_Td$O!Soi&`r$I}+V8y~ColLXT$R|BsCce%wPtI-q(lb~s&v1a#Quc#p?_)aM z(KrhjjBeEe?d+(WXGr9tr!MkzGZFo$!iaN9{L!-rrg0&X($@Y9I-}Y4LS4gF;t8wC|)MF zalF!KJ^;%}o2|OMq>k5q`ce^>@m#RYm1;m8L-u7VjO%TdcRg2$=NT*OJQSk@Iz8n& z^QT7=dM9%!!F|4Y5P=fOEToP7>8H?j7UQhaWtOxHwY)Zu$##Ud(2U$D!-}K+P##JJ z(xfCkm(9v9f6DWuNJbmMe4R$;93>gBp-X2{dE402g+FUr7pIox+6?tjZJW|J3eQXd`EU04CA^Xxt(4|@hQIZ z-#VC!Ok(1fjGt~^w6;NjXryXaZ)Hi#w_;Oi0dX#nY51E=J*r}+L4;U;(v!^jRHRt7 zxeGIYrCQAJ*TXZ-T5+)PcG;s_iKkZ=x7T-eQ$ymST87SD=tx8BLO>$9S5LTd&_W0RjS5gVdz7>*F#Z=vuDT zK;dtNX34y9=ZrtKF!$0C7sf%;pGdcRmv$awW)lINxUgCruum_x*L6F;QYf?301X|p zez7V8J$*xpIAsqQjM{Z~i7LLaoGG5pha4V8v3E3dwpWSyoXc@5C;qBhwtl>+!mT4rWi((9#|*RqPK0coZB0s@X)di>47 zpQZP`k0-k2qFzKtb<&)nP7ye5}~vLAym>T4$zyjve8JM!__ydaMQyp4Lo z{#FV9OcIxeJfKHUc&OpzOA0F^JN9c!tK>=kk(``VsoT`vKj;_SWp_;Lv9;x9j=x?; zM&{erY>@wP>ArpN(u!Vc*f%-k-?aOJdh(tJGj+r?k9Dx}GQ^OB5NIN~QmtDw#mUVr z@0-oS`gh_HMvGC!V=ATuTmbpBmtZ^P^^>(O_+2HO(DZq=M(f`VAR?@mN|joy!xJiE z*S`Og*b6v?<}E)aCa@Mf4x9~JaSj_V9eJsz_ubXtUX6Vy%cg==;$aRy`@NF;6@cY=7tKj#&CU_-iqEb6~M;7WusOS5T z81Jnl*kFG@MO2F&CQ-(ym+W^iH&26}+>!qf4FmdHkL{_uwdx2@-h3|6Zg?DkKIA7L z5gnKM+{_O-dD>B!;Za;?EZyG>aZ>R^R61<$XP~ccm%?4>9pf!;jRk4lg0Fq-_-i)8 zpU5gxWdZcf0S7CA!a_md(LI89D_fj3y}?<%n8 z5~_DWUm&NccRGH@fCTB9H??)LFB6GxFZmnP9%%8$z>7k%7}@$`$Al}|&(K9yD+!n6w2cfUn0Y4z)e$yk~q_VpGQR!0a>Wb4G!&&k;OB@3jv z)sDSktqtZ2u1^X!(xaY&U6w9{e0FCmFKKZq@>(i%<*VvQD{MFu%vhtuiY%A!f7%IE z#3+XnqSi=p-`2-%%(rKQD0x7fy5|G?Y`>7N z&yMzv53|d-ir;QmYj&9mM5O5a(3@f6b(&%S*q87hmDTgl?Z(8&pV8J=enK`%m&nCbO)J;i(r$8azK@rlT{|yaHy5 zQSgS54UnZW5z-hN984?ul4&x>X*C1npsN`c{`_&jIa&KwccR=fcZm*Y2XR#B5;X)y z*Q1^ZFHi0L4t}-9?h|6L4Md+?8>3l3{5z>`d{+YXl2a!!KMk4BC^pqU>=wy%xa4RZ zE!qbg+woRawol!MX{2TenRNXm@p#nAdwbvHgEDqk(h0p=Y}ydY-sAN-xtzTQTr{8K zW`8HP*l|VkX#EX(lv26enQi+|me~cP9*@2Lukr1J_oa0x@@LEwdz;gDGJA;n3YNar z%xYvjEyvJ1tHbHT-siq{$&R1bCcoK`c@paFD7&o3J`7li>t!Atw!~*-thOX$-X9m0 zK0&jO+`Sis;Zx6HWUu{n1C9&Vo%yOngTlg1@r5v-e+ZhYaOOTcc=O#b)t1T#T*fn1uTV@wp~#$Ap){Oj`C7!F-h-p>zn6?zPfY=?!eN^0Rwb${))#?CVTmnxvtCD z^$lh~k2Ed0%|Jgw0mMJK)zS!k(1fQl*#>Q!}qxJ4ZtKtk-{{PcJ^}%Vf2l%oqA1#0aE5 z#0YG7QD!EQ+L2c1U2cvZrqAG3XryHgzQLR|VN0vHn*YE6&i*z35{)2M4SJu%M%?Os zp^%ptCOI~+9=LJw8-Yi@T-TVGv>a*vy!`VE&s`7gPx=-uZ82U*>2d)E6J~&* z-&npxJT4iu(Lh-`NBk`-MyC_c!;O4$pW`9vz18qY@(3|zbsEyM31%}cWJb1Q!qnC< zgXd&~jDw$43EP#uLTTM~y^`l}>Q5yPoKxll2-Bj7L1LSaRVlmSn3PuO4i8V_zB++i z+V<$jgctQmr#_~Z)2QU+p|_4FPqHJ7K6%HI6Xrnlq~@d@Hp8{t5Fsg@ecVj= z!ig0P1WKs##HQH3Zzt50h1EXjxg19+U1(4XBiWC$MFlL09MX@MhvKG%_X3gqJ{nEG z6xG+(yKp|SK-;=rgNGY^Uc7V*8%uL z&IQkkeON~A5YG7LrR6#HRTA-x1PaQ1S-Qn`t4yYoV#a$?-yB~G|D?i78U`ImxwJZ! zbiuF$rR$Gl0~V=*2G`7twxnO^M-$wiU*@XmYs>zeHGCPd!8yo>nVPPRK;GOrb51o0+uVCK`NIVZFGzqCBlxbWSuFosH5eSz z-tM3!B5jQ7E3LiDSPmQ##LHSHBeqFK>15=~(2V??jNrcf#d4(E+YP^iLE{=o+I@|9 z-Fqr#B_Kj_49@6>n*;SH1!m5gEyU;ap}jybbwH)&3rB7zYu?jg+Inqb9V~l1*?9Kbqn6O-`F@ zvsk?CdfnK@G>ePf4%#f;r{`<1(qtbwd?w7}EkS}S-ojZ$zF>Ly!BDeFMug*s0)1G& z!QqPtBAXc@6ij+`8nAIVL;vgG4*Yjm(T~zn9F9kgIvdgJjO1>?XzAgNkXFtJ`A&1D zaT!xeiICXFa7ZbK0byepz8Xi)y&=Xs^Nbnl=};vX;|Qk4{^xL` z3VC6CZa8N5d$ALYwT$@EZ#yz|l>(k;nG5IV>c%j<{pmqO z_!`#RiuvZcozv`gCzoLo8I3S4%j6LtNHZiL$IG5@ht>%TF?dF--eW=&fbcjU>+u8PT_je{HYf_>`LbIzBALq(gl50Cm;cGrY5zP$1f!Y&9!g;C^-EPNVlY$an1Lse^rM9)hqZv?uV-1vhM6N z3^mxWq`C7373xy}i&qC2H85W$kSb6vk|W`BHYen20kl}~lixV&JhqE*&`R$u`pe5p zplkyI&WBI#5;yAV>Jk+dRp#Gi7@G0o_wpXmi%XEAt-ImIS1h6z6|$N??Sm1DrE=I3 zc!&UE+D(1c5t_>K!M&f?S6Gm?t^VbQgj&E82AZ^h)-#oE!*rIlbBTg&z!|Q`55aDlvq)|CU*aDpq>B0kuQR5)(uM3md!0duL*_$M}rV zopxr+^nPbHib>dut@F;fE9=>4OAr-h>C~gw68jzQ+{U$;XdOVwzR*o^@p2c35;*G6l#zf5Ns&pC;KZZXZxdAEI75IXF znKXv4D@AB&L(lH30xajIJAbp`5$OGL#aC%OQN{@T*HpBe1NVeHUdg92kNjR)ONt&d z3jtJ;#c1p%Lh{)ksVSQnexSU_UXFI=XqdkX3sdc<_?j+x1GI&2AAl;HFRgX?s`j+} z$)yr(EYKN@`_1XLqMMQp5dlw=no8&5$jON-J9zi^3qJ|hgULq_a3}C1pZ%)@|LVR)O>%DjUbhtns5AT?Vfx95 ziAcgfeIM`i;diq@e!3Imwz*73|K;1DfG>}Q<4`E>Vgvs#61T-pYR#$RQT}7Jj;}*o zcR*Rri1p%NU`DoR?8@Dadb6Um5je=rY;y87?ychX{?l=tV-563CwT?o`z!UOhUxS( zD#mgeeaufQP7YW5F1~B$i|f#)_WaiK^}W*oGq^JMr{XJ8JX71X-ut(DR#D_~S8@f{ z7m|70ns{cF?eaTB=@aF8P2=~mdU{f>D-?t$c&hfy2}R!o$Bhm;-<_niFEbv=;%($Ny#=9jU6oFB;+*FsO_Y$(n*_UvO*fyzVk@eK(2@O z)VJ!!r=xVErYSZ|B0drKTbXnc4nY9ZTl{owVsQ5^+E74%yJW8;i8MH)dX5KH!Thgs ztTXE^JDJR&iS=NvsECOP$S8nNky*1Q$V4;A1o(RmAT$IrtJsRZzP=~5Zy7Y+R5_(! znj<+WHv%XPpjWv0zF)s!tKDe7_Sa`xq~kKPvYrCM6Lewjn2o2q*^XXwgxY*+v{}c4 z{(^@yNWMmD!(o9;!YJ`1#A)Ci^~fz?E@9vMiY9d?!M%%pxbv}5$IIm zlo`LeS4%q0E7FBT-x>3@d@Y~Cv?k{(CC_^1<*Uf;hct4Y%>Bd#H#!(hE}f@9sDniw zwzTZA*Imt;o^qoXHDR1H4Ebfd*4$TLDzE=jn%zm=xD?xSo*_yt^CLKalyOv8*Jz(3 zwb>JfFK#Ah;Xu`Pj7Xp%|#0KAS7qw=5)ixY>IkK zrk%W$(@)2w&@iK?ozKBoNh2=zH+tlABoimb92(5k(N$)6dwz~lD|;QvITB1%s}XGA z)8sI=tW5IoMEUB7iFx`BrkV9k3UmA6O;sVUpHY&IfdfOFVNQ_YNusXI~K0n%mw6Dfp{61644BMcYB&8(}KhU!B#pI7;iquFQPJZr$ zzOah3-9C|pZ)${M_KiKNqKc*MR(GmjhGPHWtj=?PLqe#D`Gre5|D6Oq^~hPkMRQmC zQqfj|TEgD&h>dkp(tD4zfH1#Oo!$^rSPef;R7HO4_=9J;1u)(Taf&MKLPJnnO6ZTO z6_@raQKuQH*Ln1`V|HF`%j<`$Oj6qI?aryde2%V@t*v zMTJg=VCPv~H=iwf{g)kF#ef*vVqKWO=CYKNS>06V@yn6KS{;vO8%t`K`RXJ@9X8cq zry|ylUi7%Gna9o_?-!tv*;2xgoKe(*K9f#`b50mb%+qEV>1LQvHZlsu!hEgc-F6oA zBxzT5M3!~CbO5Ab^6%PJ=jexO#E)H9*S`*?y+~T(AmMl9f_EN3D)#md5=(DMco$;Y z9s(Soqm16YWy4-4#R5asePl~!>rp7as>QwsojR15iUE9Xsvbu_Ai#T19NQ|J#u3r+ zW^0a<6D8$UH)(so3ExNcaBucKM~{4L$5ID4SZaXg?w*;h(qFY?626b<7L2@}@3!DQ zwKVyY=H8CL$J95ySZt&VU~x z=BNKYsPi1^MKnc+M)!HU1mvSS z4R8fmP_1e0#4vS_6`3DH<zQ2Yht zZ2n&|^`Y#>ntYhh*;y3k`vJTr@+!}$0(v{YS?RQ^V72t(lHm(!RacjiD1 zAST-Jd)jgE61up)?yE4&J2y7(>w7FZ zAGN!bo?q@G!W+zfIbLl-0#Wv(uUNB*a+cY>Tpxa#EHy*v5klTMy6W-yob`7GnpK1d z2iZ2H>vyQo;508CNr-I?1=;MBmdN8KmTNZ6usC+)m7gl8y1?obysPT=d(`=6lHQt{ z;3mX4yKUUf3svxXHa?xz<<1`UZn%V;Aw*00g-VSP@W+V1DZ8fTAmPxn;0oJo^gJ{$gdZNgYGp?!6E(zZ_JP>h`)GrPEextWI#FI%xFzUKR*bva1d zcUR}Zhg1rDBN&`fw*_{Awy1UGfx)@ox2bABJ5%?r-j)bLf3_Q^h5&JjBQB<*{z52F z9)or->Pm3TsaqB=Dbvz34^0hh5k9iL#)Rax-I`xM%?||{3#c?08+}bu-Fr=kJ9}ZP zi~>1)Xve$Om;tsr=9`X*YH~W+O_$)2oQ)kD-d7@M;}cc)9LLJ3)X9)ENh3MFdQwyC_iQj0!O%_JAY@DU!MQl8U!%>)P1N2C@7U{eMi*b=&VTwQ1gYrg9F%gb z)LA9Z2|!#1ocWf3n#S!yUo4q;6>0cCgkwK~K`L_6n(=*L@yzt}Ccwkg_jKZ}KL{dx zU8NSj9q>AC!^=w)c^=lY4v5(Yw;X+)h1N0JWx+-A*;ygC<7l(x?2^Tn1$?T-DpYgY zmhyMFPVCJxD{21E-%58Mer{YY?OlH1Sq>gvHnB|YMu9w*RGY9(5uD#DdIq=PP_Zk(8FO$ok4>fy{?-QoXgwHDir>kSR%Qc zf3+M-(wJe+IxvtcT_2HoGM_#y?5=F)o3Q8Xwbwsop8ZmBX{mON@yit=pY!dJ7PsRV zF74SpXIxy|2bzF0qXKX7ePp_7%7IXKLenZ34%OG%LcKo&sPKYXDBGUZehn^VR`=G`z~zIQf> zhuas=fzg%E8|lIS0@05Lyu>kp+E(~-TA2Z8NqN7U7ptbKigXe5vg->=LSo|W zFqb|?t9h;i{6(FBq1Rgbjz`TY#=P3M`+Oq{@uJ!fxqobp{Fzr5Stnjyt zS{LWp@P|$mvQ2IAV6)mefcNO#S?V-&Ksbj{)-Vv^;q5r{6?wyRUTa#PpPk!6-eA7m zG2UA7xL@zN%fMZ^>wA68oX+leY5;7z{NO%AW&29cXP+kz0Te!%T#-U|$V?3YE{19V zjlXH+&)+1gghff@@E64Hb4t%~4-@-H%detxwI9!a3q z&HBblaP~RVqoyzPBjB! zHKC_r(%I!UFZfBQaMY!Fl-z1+|4dV<><&tMTq!&bECCjgJWHN#Sj<%$Y>iaA(h)eD zd=&~0@pMUf5!;6i+6|n_q}M-vL5Mh^U(}mSy@?c ze(J2QH6Seg4cs?2HZ~rU4v*ck0Q|60r^)eTwev620_0BLMw-N(dTMFS11!rcRMeSr z?PLf9b8$zIBDBi^*vtptS`JS$=@EXRnqf0hO5Uy<)X$lvi;vGo!vr+v8uMCm*#e4Q z+jJr>DTq(rSMCA;`A}PBt4o6e$kBm3$=u-;{^RfGjjZ8HZ}HF22G!|u~hcHhR2hWsvg7Jsq81`v&{hj`2w(vtbZ@S z{7+mKMREWE03A-m=B9ChQYx_ErKbaztpbJ#kcao{%K-xNkk>@NjqiXVrR^2=1x+!q z0&5)PBwFpJ(qtG5t*hU-26*dUTg*Q{0|K$Iy-FpqNG_8y^w& zCitY&)6#Muh0Oa~YU%(Su-q&mpS#_OJV2HXtcer3p8=eRh|Bs-WY05sVtoAF-Q6k; zhHSs5TVMkJB@9oJ`@!TBOKvMZ$8Fc6*}`yX{X1YiUGIQmB?31FkR1RQ6*Pz3$@g&+ z5#f`FNC24pJ$d>F7Y7H&6gxRNKp*Z1lOY3UkBnSca*gwEs6oFN`fAGLiiQRUr6eT{ zG?R|m4Y<+)zQoPV&B>{D!W^7Vfw~yB=WGCQam49gQp;@-iRWFep8hXrB9dyg_5#Z%Ax$TxCaOrQb zE?)+jP|IZiW-0P>48gw|4Flu)FM2`>Je2qW3M@$g|3Iq`24vOY;o%Q8jJu%!kQpFu zqR_61#lJ6sDeSM|VY##@i}>hdLJt6mcfS%3;DTLbPbWa$&DzrP%w6CwiwJ-V{X6Kd zUjrNV4*1hOB!3f_1Mc^S4SiZNE;>M78{nR%>g;KL$pXyMf;CrcAJAV$N=nMmFt<{0 z`sdn*%Y+1+5QCc;4hiUgp{c8U*uV2u1tU@GOYg75I{3jGnB;0 z@B85H=GHK^XKigg4ZD;oU~}IA*g>5j)lDFo{+DiYYd zX9fiUyFcrO1;C^VlxVwHfcZp&b9Oje^cA3s`7+T%yEaZ`#=`z%XdsaCi_W`|A4Nq) zB_;c%0E@}paVrbAZmX&~Nz%5h)?ny>KOO_uv#M;ns;l&21n^Nt#>3h%AuDU^ht)ug z+~(ix0s@OuQycU?m$4I%b|{NZdy@sTdS2xP1t#m2Z$RW5r|vh$!7qYELJZacfsn+z zHz!-0%Kcl8r({ULGbZjE`C{buUq8ox0H_+pTUuCn00MkvB_&{6yR9Gp#0)=UhF{si zUDtji0WHxip(TK?0AvuRz{a7n{jNl>5`N^?aSzxum=g%~6gCAZLqbCr=I33H7H^A$ zA8AVzL0{q)Z5t|u|D8hp_i90*q=4V*$9wg7P&)?Xxd5h$t4Zgm^zAI{k|`MmI152R zK|E%?2LR{mbpWg}aB7buedMq5^5=OIV6Ffu`dE8keX@1vb#>se5zP${(?R#*vH#l~ z;>Fu3V~ozbXsWX>z>eYpm}b$Dk=j4aAu?1O$5FAdVb4gyIgOSW#n>dav32&hV*1Lsbh4blVd zK$GF>e|wy;r~)154m>?Q>FDVJOl{2fsy6r)-kBR4TCMZmB(UOzb{RA*WXb{VhG9sr z-J5s*i|#6}FM5mXq38JsH^2o_JxTqyfdOnWQh>`lpqQeo3t*6KF0dH@CWePMOT-^ zv=;cJ_*NH&@I#T`UD4zH^#|4e>4D%6L!z58=mu7r3G>BvA1{Eo6Motd-rn3~LBm$C z5K`^r6IOF^xda?i=qo%XaLc>>gt7nmH0Yh<-<`@o1*!ba9&p&iR{P7yT zNiD2d_2nx(58E6G8`y=xsz>`SfM@fUJZ>@@%ZiCXFPh?AE5?E-&}0n%FQN~k0+oNU z*t*EF- ziTMxj^!G!`^#5^U=J+81qy@Zt?^lg=nezXdz5f?r_&x%a&B7uU65F`sm;ZCATADq1%{-Zu#ooQiFDwMmq13Dw|8uRyR)uQc4*<7oS(j!q2Xvj7{-UW? zuJ(UhNosJ!B~gY1g_N{35%IqlAt7L8whi~5#Rh=j18|)bSUr7xKtBi|7FA{kkp9f# zoc~&BfYyffKx;qclsT3E0D6C~yeby(h2)F(--otG`#(SBBMuM>;L7gLn8F76@3Ibl<~@%OM}C@d&WTUon;W(rh(wEw3r8N7 z#T#b#`4Lywz$k@1<$wEtRO|CFk{F2wX&Z_@ieYf}` z_W`Z7opoUTHqAKXKybSw)ywp5cI-8Ep|H?RcSjB)9QoxnCKHq)Et`RIfA2{+oAzc6EJ|MF>rJpW-koOaP2UL5f-Mi@3Idvm?uPUTl2JCZ%~26=QL z1!!f=k_X(6Mf``AMceih3u^I~yenP@os?j|;xhM#-HXQ~0@#}2Uh?vxylU0Yovij|Z1i!b65-2t8^GBWY~W2NP3PnKzV@t-B*y|*t3=r` zElth?*Q5(ILA!SMTsHjE1hzw)mFfcZ211HcLn}Vp@{c?xS^upCV88N1pwa|77esE4 z%~DW@^~mjfjJ>tAm@Ji>oqZ9L=Bs|$ z74B>CM6eFs*=ME`ZQ8jv1F2QFpD~2g62z8{h1*UO<$v22H#toiU5|b+o2ujzI9~&g zp_op%H3(cm5u49OXUIAxxH>a+^#48vU)Fg$8=tcEQVT=!T!~SdFg0sGZ}04zjxzTg z%>Krb{O_8@i!(qAM1O!T z4frntVhmC;bF{}Dg}89`Fxn9c0%B1I^H^#*3jqh@t9NPr&(i}Q^ca?NaVQ!WzgqW^ z)iCn9ItpFcIm_i*AGQH2WqI3AVf@(ho$Q6!8f1_TN8()0H*78L4ugk!oSpX<0y9`z zAKv<3xYmB}=0iBmat|1Y|Im4RJ}A52IK8Cf^S0(^O~|qZ)V8v+>nD<#?L>+8q!5XC+<>`=&auDzs1xax zObLPLVO?Z}2z7m!>1EDS>5vhfLD+sTiSTn-OlZ&ZL#P`0z%IR``fRcfw)aNGocY2E z!LVptP|nEhV5|no@CK!W?v$^=fT8w4LZa#{zkGzL>wT}M{_1J)2Fg7@xfS*sqQ{~; zo8p@xq)yxDwKro|23I^_(66)(yVKKH6a*wtNJ)Q5Mccien%Vf|+Q74&hudwBv&|#C zse){OUi4k{tES^uXE#>Q!X0G)jYq|9+WfIR<907?1P~8jv{hRHDF=HcNgpcK!Ca+vjN@8saM&&3T)%DRL|KOAx z@;M-Zw*L(K1xlIByb67Z_KRbMJUc?H50P~5Mu3toT{C+ZGk_Q!^r-b-^#}9eQo~VN zbp7K<6`9bo`PKR_czb$-uj~!@eTVo1n^%4FmP~HIvy_c+M5b@ zk-t_f{osCA1p;N-QTHWx&4olJt{w*)jLP9T1d!WC?1LP=vk&p=Zh^RbnB>^eB_zBt<>&#y(|?%A}|QQxrt1=(nfshfwv^2g0p z7N%-M5c6Rj# zeN(n;c?*Q_CEtKRB@C55Ml`kQ1C*Z;K+aqDs?{nsvJC8}H@{Ir)!q;f8UGxXylty5 ztRPD6wSR#f^Mc3!?lB!}{uSm|-2v^)_;}|%G1^J3rfC~mVJJ9HeUv1z$ankx#LOTT z$-Q~>VV+=t3=0pi)YGOR_n~iZLErlHk#Kly;`~mRaX-F$V};OF)wGkcAh3U-S(MVI z*7S|H^zs>KC-?r@4l@g9lcz&mr85Y$tH9B?*8`kaTN2Fd75iJwo4Gzw=CGQ_f8rKs zcG=XL-BjLtFk|Pmx5(WC0%=K+*zz|k_`ft0qW+Bf9d4iWxbB|W61rnS4b@9e$;7*O zd+e-Apl;`UdNPS;R~VDtm810R7lw)j|02g`^jz3t5)Dz8({wLt=o-#^#^~bQ*-Qw` z#m23ZAMJ(Qs@jNtV@3G_x(CL6RlUN_CHC>>tRYRxr56t_zqw{Ii4AkfP&6-1S<`Pl zXVaEjZx<`{GK@!w_&{$&*6rPFe{AX<&ZtgW_B67Gh~Ah7JqOh}xar#Lf*Sp}*S%Cp zimWzE__{=VzhE54aA!TYaUQJ;`R;C0FzoOV-RJ18_ex76R)R|lGVA_eVq$0AT*I|i zK1SXq7sUiSg{3I+k4Ly5u2c7NfLN$fBzg-#;%lHUyCsV>`U|8}VXU3z0)*%%WiDp1 zIi*tPYe!d}`!W~mCpwkW_ewzs5gQifO`48I^>mkVK_*{U-QON1Gei4Q4&4RF&xl2} zUl~UEibJ%aXP23W1c_9jrn>AXaj>ur$-)mIPOI6T$qkwHx97H>*S!oNJ3o>?$XSCw zhfK}+keLn^9L(-9R&2WR5oMpZq|PF347h7#?2DKbzc_gJ{C)ro-ekzZ_bE_qp-nn6 zlZSM-ewAQ&iylK*TpF?XV_An^aD9p>TABldFV$pU{e|+*309-9E+6lk@uKJ2=UTc# z`qb8Mun>&;eT7GuR)xd_T3z1d&Gu1&10A#}cPBpq8sFEKm--+`{=DT>1H36dpIfS{ zKH}q%%8f67B>lUQaOrI&j4QOi!?F7|cW$}@uX*bsncs=t!JTCqQ`g5b0jQvyvHSNm zq|1nj3P`Vl#5$%*5=W(L6CLK4mY3vHX`Z+3YgPn^E0b-!cik_MWP2mq?e@7;QB26jb(8z>V)}j?`V-YvR??M=)md{#5!0lr(gzqm12sM#WVe90q>KWw zi43DPqpH}pzIN|r-?Qk?qc`{VoDkeFcJ_@=OCzO2?Pe1@aq))c+gpRSzC9d0`SNr- z1(N^Tb=y>{^H!~|mSW1hp@{DaoqJ4(VWar`JPOgSzS1f9zK7D|X35yl{G-0W_QQ_P zn!C69W+(}Qr@kU#UAk3^7kKejQ@up!Ja^SV2=muPZx2~=d0hyf_xgF?JIQfG(3|y% zy=-LvIXY9OZL*ix5)X$g5-E?o1CT+4??v_Jb2MnN8r!SpM)+W2f}OEVh_QRok-}11Jc)9M zyr{EQ*>E$?r{s5EwYdV`#25Kdg&5osP1-6hYc8R${taGFg^bS$S+0;y=YZ`*CKxG;7ONRmk(= zJ>kVvU_HlRPvk-pvOnuWxUo)+2;0y$P9(HCxq|wq>!$xxa_Bg`kz)DX?b73;garP1 zKvIA#L8D3M?WBS|rWMwK`wKz6<{{0%_;F99SU9P2$r^EI0bf zA8z+XTyBp)o2u0pRK-b?b0Z5?kT-ybQczV=6`GBv0e=dem=4Pl0-Ng@kx zPw`*;D!ovh>V6St`1pQ?EH2-Wi(AlxwX-Gt(VIf)`?V^x3cQ8UBj?U1>~Zp;$>oL@ z+o0U(6*Q*g`KQSX1(hD1rvmdRW1b|W@q@Q`ip#ehM4{oP=aQXiNLTzN$cQ7-1H@z5 zs!0(BSkJ@0@cA)JeA#<0sm#+8?$zh zn*$wiMM#glPY|)@WQK(k9Msua9m|@Sfi^4*Pt>APrC)|N*qdvQO?TI575VPw7yBv5zR6cAm@_izA7qI+qd={TSghN2Njan z8r%QCn%I^7?1Ood$xDza1$#0T()pp86Uy%sIdoNa)_{C-x$nI1@o`>J>%-FWLS#1n z{Hm~g?m%_QOK`6JT(U2;LVERfYFGn&{MziC7N%lY>f;aL=t0Gzmry!kI5#;MN>?@^ znjS1c?y14~<&O7fV>)CNt*bika;Iy3N6O1#wvj+6>sP3bYiw6&?j%adyCeHtoTsEA zz4|P?h9%9HuR6Uaths0vm_LIIE@!{Klj4ro>#xJk_MO{2>BKj!x4DZm3CJpsy&e7~ z1q1T%A1BwB9kO;R=C2NkGUIG6pfOxk`M=SrCDx?YjQ3yF5_Q4Epod$Z*cl>;1k+yj z2^E0{HPnA>yifgF2>SI=GJ5$p2m>&!f{#T_OEPy7`BCj9gov`3i1c8kY{`wA-!hw1 z{2XWxG+&igzj7+j_&(%DXY$IoSAq9EU zhd=|J30Usz4ub~CZCOgzizfs8OOJ*g1TIr|D& ziRtwt5NSaQC2lq#b5+>BTu@;_m^(C0v(RuuOg5nAE(x6iU3s!fAC5h?Dj8`&&CK#N zN>S-@k4Qh^A9^#?ce*?CL4v$*9rRK1;j@oGY|u-kFr%05&0eMF<}&ero-`y5#~UZq zRY|;3$*RD7+)t|gx_`K0SvOEZmS?|K1(=a3V~ zAZyIV`9575HE2Q|D)b56S}rC;J)2)TbPauL3d}uoZ0>y~HR0W>AWQy%4*#d>A7)bicm^|}n z6B%JIGJpd=d1r2t1qohK&R~XRQ56zVL z8PJ=Rcq^f8+}=z)}KI)brR=sZau9{TIzgS1fYFJGCPAvZ~2~gc8I@<2<|a zE2F4+@&ot;VZ?=4@3kcIbEAvdmj}L_`73p70q`b_)sY@Da^S#2+4Euca-wT_>B1qX zr&EC};W6Tu#$AJ-o&q!d!>6R;{u9NzIHRS3VxE8hnGe1<%8uDA zXpn(CvodR3F5_*C9kf;@ddwt2-SyuMR9k58FOSWh!ipB{&>U{+FL|DF-Q?#Li!{{} z#ZlKv2omVba~_rlT@a}U*Dg%Z<$b-nH?lw(A^C{7{xVmee+;*@#uNcW5LP%T2_X*( zG;Vyt(yuV<*6b+i-h7|4Y|K8(ID+`gyCwAiB7rHwkoYX?M|J^rq7hZIipmYcbNP~1MDF{e+4&B|2 z(jn5_-Q8W%B_J_$Bi-p<`t0ZU_TKLw@clA#9BbCBS##gly866M($eSk++1t~+?>9o zqpiVQDS9@7dPwSCwa7Fo%)?k}ZO>J1{R8QRP5;awn=n)UwDd!CjOzELz@FLQ3oirK z?!mThF`D=g{{6IF2~n7-Xh+IuXk`HNPO zvZ&WXF~thcg!b~#qDVxaFG@bsVadyPwP{T$ zABbdq7|OMS@a&K^Lime^#OIyOe+n{rS8GYVgfI+JIO<(j?XVc>L z)-c$U1XE)7VIgtD%sXWeU#phr-6J{Z_245o$gm}eEA7PjP!-mqsXgj zqmjNR=SjhUA}xoOzwXeJ{l=K8I8Gp@&ECKbT68bKI5H+C{ATD_R_NWb7^f2&Loi>o z-k%o_!{iyeR2)3oW?XK5D}n-DOZ}fp7=_{dg5`~B*@WMJ-7pVWyR;B8zLOXw78o^g zQBQ*|;LGR*nat#O>21ExD5b5Y|Aj8j{;2oJ?SoU84;A^Cas48K+8)psP_Yc>h{tz_ z1+r#}>JvZOyi$XSKna2o0bUQ}q|M#BJe99_BZ}7hv}RQGRqudN9E9bY3*2pz`j~3| zTxg#~Dxq3RbyzZ=U!w#dA2qJA#jzOcHK zZKi89zk2+BntCayf}X*ib6z%JlqjIHj%in|BLv{uF)c z5uL5yBqy3tmUi9;MKf&{dI=M|yPG_)rZ29Wy^a6A9qWIgVLmQ?8cH=>wu2RZm4V{t z-l)_J2_0zuVW7H2(454}FhT@A)Kj0B-(r6uh-9Cgu39`qDW^?3^c%j|#E@q-HlCf8 zsJ7E2bEAk@kSkEai3%qv+;XzH6n6$SA}gNuw7DhrZ$@TQ4AF$(IR>}Kn(_^bfq7fIa{CD*ANWUG}21KdkHYPfj47{!m2 z!2|rJi&(-r2xAHXFwPh3ct7$}bNfT~@^88t3Ae%xqp*IMW*TE+aDu1#gsn^b2+|C&rjuObBz_}ndJ z_W73LZnF55?cg{ymuGb2U3gfIz*|H%?XgX{1??JD;tf1CCd#h>8X$3aZ|F6DvYG;D9RF)%NjduiS-Fd|P5f09#w%h)W063xli=@hDqy z9>xzlDF!v(C zO4kulp?a$2K?u*XRZ#^1u-f1?!r1_zi-rBXe<*S3VK8BtNUB?0kVIxB?vRNTAP{0@ z7Si29VcoJQM-xHwKTCN>DG%qjSk}2fT8}5gG2y7E_i3uuON(ULs=reiBwUYMjSh$R zn_}DKlZV&Y7cm%uVcF+?X-zigj$a%AG8@@u;h$j|9z99ACb{mYb%!}t zi!B<_Lr_V;Fu`Y?Z*Of>tbE^2SbhgZGtE_~a9obo0NlzspWQ`5|K+dD;tS>1S(j>1 z0g-Q8lWiU!6#W38-i5ywW7aC(5*1i$qwa8d70w2Z4(on)C?(wnUfUs|fw59KXp0FYztwU%$A zRs{4pO1G~sW$HEai{StqY0(N?%1q|KYjQ)G=S|2=jOT_-9?q+QQ1L-eLrB zySw*?iGusYQ~P@2DtvQOSgr&8D4pNfaVW()g_jh!< zdazEm(SNK3Vgv*TE2MqzB6BWb06LF2QXe={Em)cmknAex@yVdXc;ZUhd$BSmr*9LO z^rf4yvSr_HExFnkA@Sj}Gj-D`x>t-F{!~QOsaBNxlcv<#r98!Ns>CM~d&IFZ7Y4xo z`5BfOq2m>nVXtGZVlAQ;HP3!^a#bIU-%8R{=EtJ<0b=t7wv5hQB?FSS%W~rMmZ&mZ z<-F3lGj1n9qCRR#OaFGQ7VJKEz8d2;ib!{u!Y%ljIZf_ zuM;v72x({cB6;xud(tE0;m)bj|CWJA3KPx1o+)G%#}_QBeC04b%#2Iv}jIjcJ^ z2?LmgSwWs<^TAHNE;}|K@G`#4Aaz2vziT?DPceQky^Auyk=9Tm%C;88|Wug z01#&~W0FufG#5-5bi;Hu9tifD7+kNCERA#ND6Uc9ZraH>g9L_eNp+Vtf$)r8mn3nW9d)ozd+1*wyLcQH3 zXf?)LMv5$F%4~9{k{?k9(9pX`^5MUwhQR<|Y*~}kSNNkb{r0RsCv zyV2oNV(Hoi%VAO=1uH_LlJo4V4a8p82nV`j)iP2wih8_O5i)Op`Q?WM`$8XAX9vnE zD&pK5iB?XACewa)clh3K0ASMk`VbH~$`c|#Mr^xS+ErXwuC^iGKu%;jz(A}Kj(Msh zzbwx}t^A9H9W^cIP8I6=G@>7=eWbPbi(M+{H2^60@0c_iR7Q1q1zc@6LP4;h2B{Hw zkj-5Fx^u;W%sxXc(S>ey)Jl9|K$8D1Y<;eP{2z(Tq8u&%%E7>2`VI8;ph5Ew;mBw( z?0Js~++)prj4`iz{{un*(sh>Ec$r+;2pS9(a1P=|Xi8EEDAP2=nAb~#s)&mOgNKYX zXj$~G>%N~ZGAJa8_-zD7>wI~0WW%u#Jc^7t(GI>Ay zO+X&**Tg2wrv(fCqs_wD?b_};BGK+EWS1|k8ojqH+=QB)gK?-W=a=5we`P)5Ke9dn z&j%s(zYS%p6CbfdG=Yk0$b3P09V1-NYyP5EPMnr*5iaW3u1`ZDMbP(W$+yKaZJl3G zw$3D&4W%pU09#VA2$Tn>F@1N(lq6}E-~@e?Xu;SaBF@zV5hIdj4D;L z`Z0&0Qb;`nyU_fFe)qG95;u%p7h6v_NY z)DiH%f&{H7q?D`XDwJ|BGB8GwR0sGwhpwnu4^Js$_#t#jp=KdvsZ4}-d(n$2JpWK& zcE~H0cc`5$nkOf6ux;}`jvF)9nYR*6TW=6QzgpmaT|0AN6mS8T)petq3pmUss!$&IGsf!R8{3FRD6r|5PURgI$Jb zC^iPoSC_DCOQJfXLk5H}9Y%Jne)Ce?=Dy2*RSn5XCo)9J~bCq8TF=!O5?-;esV`P7Rk4c?bCbVFT}s*7}{T>Drs?e z7W4MxZ`G^jqQOdDJgy}~R z(T0xCc1!`Jpci%8BO!X|nyl#0rGEEE@h458vP{Yusw!!1W%YnC?%(S4eRxZvTRCT|dGAO$7CNmvy(c_t!xC=X`Vtr+WL{!*eux(@cqxI7aBmSrz9QS| zhmXFL$-J@9uoBoGz!PKu>=ebow@Z-%R3YM_&Ppl|06Z{O1z>iF;?^aCrQFimnXU1g z30ggJPM~R2uj(#Ns@gMo(}u?N=HP-h*z)Zt@6<+|U4lVM!^9{0fknw>uY^iJt@_r; zg#^Qd79y_BfT_d;93D1-L;vkp%dEm>t68}_CGh}W6U52 z2L=fU#NJr=eOX!_-};3h{zuCb!gnQtYtbbfn>VYVH;6_D;fQlS7qSF{DR#rZP4NL7Vfr( z5)F`_9l~|DW?OQ=|3I>qM(1&Sth0Z4A2HF=^Buzib3pP9gW6Z6G#+kOj%G)FrKSY0 z))mDY^%PwA-xW5RH;r@{hkIQ%qQ@uI=;RdHrS1qB2tfYI?HMMCZZs|FkWJM02;uu) z;+#mfxak?$g3tRCzH+W5G^wZvdV!#XO>|AL=P#0Uo+UZM{ztG(^k!g?DOi<`nwOLx z5if`JDtil&NM4iVxey;K9)gu5i=#n{hiVVBHG>AMX-S+YeJZ5tbI6c0L3Z)nBHnkJ z!t1}BtnW4Wt1mm?2F1UPboOe|W^RalbJ;CM=SJlVrYO>o494lbFmb{KKGm-xQ2>J? zCs(RizsGh&l@dR|+pQO!T-UVUnkz2iAOoyh`LUt9bH+9x9m6iq%=pf5<-+XTi6$k@ z#~1pluY(>2otN6(YO%jS5lthic63`Rl20L#P0r5T==JeHK+UXbN`|tVEOA;ioJnJ& zlR6gKpf!TPGTc6V`P5!O7n@<1W9d0%aNv{$x{LNr*;fe66cUs6)>}ZQVnX}Vb#BeC zyd=S9AKLj7fR!ElWBzwaS>!XZ(+7Oh)}7si))s%A&jy~clIXT~N~ZWIDVnW5^A)Qw ze!;7P_epW;Y2h?I+p+v(pjtEftJW&bryLudSFNFmFs!z$8GI2acrW=vkd9e2i5dOT zC1Bx!OH0${>7V-qNv7-Y9_ez+yn&8`N6?+qwTNmy%rZW6-+}(FF`^XbfgwxulMgC- zScmA&Twg%1bS&`K`7)jGr6@lhDJl~4c7pR!)>N6?YNhFjKHEENL$UJ}78HkMGro0e zZ9Oe5yg5^^IlKg`&yK8YM0qe>ZY6jpzlAArHhdHf?K@+$F5;UK+m^pwALxBML%np>W{?L~GY*!39CHfrC{=V!P&ZFJ$v_9XzPdShDhLGLT?^CXP3qG7 zlF-1+ng0_XIY+6tFY*A9%GFmVCD(D`+XXN{+13W?%&Z-Yd+psMN7=(vyP>{SDG4CJ zO^gb3hqp;$yOU(il(i4SZWxyho|uIhLK#OV=urkvd~Ux<{SJ1=dc+GdgIprsu)JF? zAb}2ic)#0}%K3=?$f#5jdBz{DLHn;cb0nAZ0E{&2>RLH1kp#PYCl;bZp72SkO0d$$o2GLR- zJniznb7UvMA<`N@B}pQ3J|<6YbbF z?<*;z+I_64^6e-625E5_?dQJ2%j-7b)6<9KO;RG@az|xqU1$%~U=_c>#3oa|&`~F) zU(AUXJpj;(d(CwA!meXu09ilNnaOhcPUoDAZ}q$AjotPiqB%C^=Ab}(xAzU0YL&&u zjYgeWT&&P&Ty$J78k4#Tn#mLc;X%=FUl6yd}B3?eN~ zM4({Vj2O_SAiyR7M19O2)eF;F*xs(J1v_2a7MizyisWqzu=uKkp+opf#hQW`{h?r$ z_!}cyDTfg*@T2_c?^z2)=?(`weK{_!!`A`1aFla@1B?>!j??wRi9%ogv}##EKkB$F zb^2>)L#1-hmeN4h&txit_SVq!_qvW|)LgrL>5ySr1i%kjYUU%K)yVK!f(56wn2oo&S`G+V~Maw_oG471uxAS-bF*5(`HB=O5Z z1`*Hr5osjA4>e!NH?39`fVj*g!wF;R(`}c&ANTPdFa_go1`boUl~Wr4$?sV-pUrp6 z@?YppkCz_mJRjjXB-}_;1Ji{Jqp)QMx?>|P4;hTE0G%9qr~u65AB9R6joDZ^{e}ZD z32f)%YAXE^RFSJoARFrY=`R>1GtomvZLjBvyn!ybR(>uN6TQos-OFIJ$(`C04$!d9 z`;BPy%KtO01f2^>&KUm;t5EyN{FM;!nZI~*@5}5PxQAqAF>F`Ljn?g1m@bg6{)8UP zk9RA$C5y5FX)LQOF%T|@~-~fC~#fQyOWL6VyXF3HS zOVw7QG;Y+T77Y*yt^T2YDT??a6Vq!TK=eZ5#8Rc~$1u5!_+ZW8)?Bbtgd;hMf#JEE zYZ(nettZ|Em;o$S2}-@qJe*$zQJkrwXUAMigxSph(5oO^}t zZl}N;0PtnK32d2mFv!S)%z-`OWn~f;vYfo!-Xpg9i_=gnoL4|Z;XzRxxp`lCW$XKj zIS}q+#c^d|sr-WQ*F+xGmlC;B%ZKX*u&71+v9M>e7H*XI@!Ln~4|FeH4owLi7_7(i zV)^68xZgfGvHKC)3Y>lwk^vy=`1Yt0MF0J0F#w!NPXWT9KkG0pE6H`Z6H_eT`W76Z0a+0B{MtyB()06&}Qw!Pgx zH$JxbqwcmP`%pLn8J#92E>?^~671GD&b4I=L7&(PEtl)0dI=QP9%FKcXkof~2X7Ey z2Rd068lWmD4NgX2JMCI8&T5l)gwOCB^Mw?gjV*o0iGs9SJ2-@1e)Sb-DU=)31_498 z)!MWJzYCOJut6MJ;!}f#fXVgV-cYP{qcu^!aTvd;wq`;d6!>HSsexTZ*(F85v|> z86gfaERmR9TA61W^6DF!j%n-RK}azc?(6GT6V41KG<_kmnwI6DH}JP?)ii<15`4a>t#s0gbBiM5E0P3O0V>!!(y z&E4??zSV54Hj{*h$bGh57n|3>xTNT)$(}a>NqLv59gNxj96qlywVRDEbGN_Agmzg z&~sF^_?Ch!nC^qrtFTsi2Q|2Wvol5ndG2}5EeZ}ggV%wp#E}kPBgoP-B#oQP4@AI9 zDcC-o<-?-esHaPxh$_yUh==UU=*s+-DZez8V0|0bhX~2fC-=7gHzUIHqg%{S@nCV2 z6mFT59V4k<_6;W#?7;JAzARpC<7J2=(<%Aoc(@@GGfo}~|5Dmm=#o7#{2iKfsrB#A zJj^8NPlWqPzZ{QbjvLV0TWjC3`I6)`wJ&Sz6~A?Lh9e&u*i#-KPpHf#hbMkolnsm~ z%?zzpmD!KR&y8bYfXF2JWZ^Up9NVmFD+eqfhdX-G?((PGNFsqvbf|H*cf<#`Zug(M z4i+@l96f^b^M4jMv5Z*DA_ZLDxy`+JYW#4v&AM|fd)DeyBf7b)drWzm=fsJdOPG`5RMRVFp@J9=tp~kB&Gx?od4<-!+;jtQE(wtFmms_%!D+n$2T)UHA2ww5E z^rxNhhp=T;PAGiDsY6>P4OpK3+){!?e3J9J0=-O&*eo(nu6+3=gmyQ(Fk$(K7=8>5 zRtE4}X=BC&>vb*kKKSqW>x2{dv0US>v&eLVb{QR)8FeCaTwRAvhHGG73u9+rE@bM= z_SKyaCEn?pGI1F(JGKmnhu5`+e~*(#t&8Ks-3-fd;w&8`$jG$^M4xoq$V?o5FC7=a zYk{Sq5~8rlrau(EudIdyAz5r$UL)x%G6*sG{^5DTI5o>n9nc8ec_Z)-VmMKIjx!Kp z4_}?l-wQNOmBtOPPq1N{y0Y))P&TUIQW1{J<64xf8+`YSH8l}f)k_QgWD;!b%5J)$ zR62V^%+r4gCt5N$K0Y8!+REB?tn4k=gyx*JWOV~!M9^mRWp;7$GdYWvDKx#V3;`|Z zMBwQAQWYYk4=`-rVIcBop^q3_-q~Qr^MZgi8SRbn&B|afgxH2nGoDxt;y1<4 zN*$vni%3J>AN4glf80xUPVq;cfc^DP2Zhj-BM0X4f!H&t5Xjfbz1G?lb7krN!+WEl z_5+V*4BETlx|#)P6a+|v*KR$z5JD+MaE2g>1bS1tA-KU{c)dj3VH_)+Ued$|0&;+ zM4A4jgz&eIj)z={CAQ+%kXFhGMIO1&mv&3_*{)l`7IzKWdex#Wu7&Q*w*91;>_?w$ zrgRJ@>J1f!X~cb8ie}2(+ClO6$XO z$_hcc7NR#gm*c%--~bb&xj&Lb_J@bZyL0^?D^fp+1^T4u^9mDq6smxH<0`Em7I)ms zpE+aANvSu#i_`}22&Hs@=&obh<9u(Q$CR%a;uUT^7QX;;b=6na zBt+Rhz+%pTwpYY_=$=d#@LRO2BKE9M1RLgQ^3|Tc?`u8rdCm2F&OF#wtQMH7wISLV z?rN#^5A;1ea#&aW=<_K^++4Y(;%CM9__40yn~AH+eJlRt5NV&3DmFkz@e z-uY!BzUFE;QEj6T&cEkvLT;BC^Zp#?uW!wxM@0Bc2MLbJK9q!su#96Um*Mn#=_cjc zEkxWr*Ve8|0Zb;PN&NlOL!?ne8Ui}S>6z)P^_kgPk7QCORq*&>c6GYV<+cN%BlEj2 z*Liwyc>3?&vc4jl!z$1o-oEVZRX5Xi&!pe|YA8BNW3aeo7OnS=)`1--7Jl(hPIJ(` za%*!M=K(W%B|qrV9)BRcI4e=V$ftfqYc`HGsdpQ1tkZ;hdoS>VB;znX*vBjw_Va=3 zF@nBJ>DNt(Hzd*G0jdtKXoE(uB08pv;%zHSxq7YsxwfJFWA=Wu1SsHS1`dYV{A7n( zm11W~wpUH3%)Xw@3W82dcP{Pu1L~Hcc95F46JsX$VwN`_q5USojf3&D>4PF4qh8>| zm6mq3;R#W8Mp2I{S7nyy`9Vef%X%Bzg6%`4 zXM>MBj`9kxETMm+rg>;mFtwfNe9#;HrG-)mPVl(wLFJp&=IX&ii23xC8By#}js1-( zqhiTycR*%XGB!-Q?S7|jL!sXOa9%_o+Kyx8(!Pab-Ke#wHo>o1D+-Bwo}f$pf;p8} z5s@()d_>}M@fa7Z=rVpQYAS=G29RENgLxiup`^i)h~l1Q)7?g>ISLw44;wXN!CV6E zOPGsq)HY^|KOg3f``Er4n899lEIQ8#Ck3zbsL`1Il2XrUV&QbKaDx`#Yub?==u`Jj zeaS9`Hgb*Hz<2OsZ7e)H*^^L4?A%QaK@SyGz~x2X?`ECxnj=wlj0EEy@n)3oz7{@t zbjKL>I1NmUeGbS`qMeyK={r1GU(XPC&e%oK<_{}u^wNU@PW+cN0xtrf9*^~BN}q~J z)C>^Vp5q{Mi%OUBe=g-y=}7*z*@`~~fGQT&U!4@|V?c%MVDw|~#UmMp#3o0wXMZt4 z=S%g|85No@qPsLt^NJ7cmVqGlEaZ;$p%Ba!RtRrlb@FsH;rWYD_*hPs8Nf*K*p5ly z9`2nhf|mr(99b8Byz5+z3)@Zp@l&TJLti-urt2HI&wS~c8bDR<2z+`fRNwx4W+y|E z?H@=)FAQJ2Wt@s6_#@j{8ojXJ?0f{Dqo}#P%DG_A-hq=+fHjc6K6AB|5*Z%$Z2Xb` zi2PahwlmL;5BvOS#OGx7$@kBn)hC&I;fMJ@v%Z2JBHcUxbJhuUjrj=<+Sowq74;bM)xYz-SS@%iOv(gumEPh# zvutgF9~$zS%~b)^X|C&krv<{S-MLd`GEOS6BKJ}LeT9c*>uF6NFQ9&vdG|G22+#3r z>+0P!(I7gPyJpd*>tL zto^FIn-LWgbrJX$hXXd6US(3V^shN1gYnbL}0{ zA!A_j9Uc)$G@g9vkX+yx@f@q$1mPUbj}0cqUB>ajlg*NTb-A-$INWKp_mpHG2F#-# zE%Lg1X~9MYW^<7*35UMFw|A>Q^|72;-ws?JFw9z=1gC+3Yj{>0@OKY4BD-$Y>8WtC zd`3ughGobL^Nn`=;LOtc9cQ_vmOT2zXx;z{3Psg~^cd0D=s4I@G1G<wc-0)hSCbx47{=5*OC1bRHUA0tXLmkWLh}NucJT?6MmbsGCQZ~5+B2{9rsL~F_^^q4ztR%s}@?_X|Gu~@_jd#b_q02MG%0PW5jl7hh*Rb7!l4XjC(9-o3%A( z&24A$xcwyI(P->=^-lry(Y$8>O)5D&J9gw?Pd8wy!&-dvvRSveqb0Ubp7-jur&^td zU~QpWB`&CfeoXW3ey2n5%!=Zyv6sT&FV2_mbjpa5Xu}*93COLiv*7NZZr`kJAnkOT zOn5S)q`WtW1w_;`Q|gyCoFQ@xNw)m<*s+LA-1nr-y39JzW*wGb_eRRI_NuOTs#Hvb zTqWH34kOz-X0c6{t(@!07O@q>k>}1+4V2WKt)1j3D7_ZMPf>p{!inUbrEhvJ=P`tVT)MjY|G(5onbHV|r+Xe9^zd z%L2e_vh-(jAgSDNr-x4r2GX#37V^Upa<2Lp@anxQBCATIM7HX}_zDBD$sZQP=uVd; zR#qMjk`KV=o|(Dj+5TO~;MOGtG}_@E*Vs_A5K~$Pwnmr=R9As-7>DQtGj<$iq*!!} zZb|a+tnKz^gT~>@i}C}shYmW7f^y`_YpN9Q;DsKKKG(IV<6zDN8wh_Tj;uAR;44WT z!K{yee+pslhInjlM1OxJmyi2Cs^_X(gB*_OP-ViQ@u*n1?x*LYYpIm^tCQoxPjTId zR0vCaqqK1F;+ufTao!(rAlf2 z+6SS%zyDdsmen4Q9IYTuqS=V}Q}cyQ#RryA;t1MJ1AF=$rJaT@721|ie+);vIdaXa ze{Mz})_?jb0fNrVF*4*icTx{MZo7aS9ySMRMA2Q^yVk_J81BcniKrSQLzf*TsUWsc z{6UnhlK}5LIeah?1zuvBQN~}cwmW+N5;Kz6TV2Ka1|tC@Py!%pa0k)$Dx>$ysn*$B(I9KrJmXq_G!QYKxuC zhT^@-EDV)Y*hU|z96CaHvbKBw10S>=3TyfzGnP&GFFw>cEe|>1RbF1#MPc$#8qC0n zsj$X;SU`nK3m-OzY;DiDKgGv*uj7rWCt4UKrMRf*y%Ih^Vv=#>MXw&t<=QGbqF)U9 zU=2WIu7tdu{)azNKjI1`J??s&kPnEEmj_>1dH*0iv4;l;L=p7Obzq|;s*-) zR_wSkUog#1Ka`j_BQi*pQGD#K9Zj^r>Jnw2;~&tpx-4^v?T)^?#1u>tzxlsGyJ|Kp zs<`tIRIp~UKqcc3P9K>16k()3$5Q>Ag~vDY>X;TO>wn{eXDn5woFeix7ReVcf~off z5<7`2vrG>mLhszvLB=Jd@2|j>7I2+yC~Yt8fubALR??BaXgN54zhfa{z|mXNG?kld z_uElA?=J=!@h=7$1k>VXj2EIp^n8|4`w-D{)f3);P|K$$r@m-7V2z~|8S1h}@+n|L z{Z#}RsT4^;-qbDNMp(}yJNmpX(mv?Bt18{F25+tMgFZ2fFriz?e+?KLv{eEATH27nVzQe{!89P?5|)OYqou=7*3 z2e@7yeixrZ*N!wBZOj~NhbJsCF@*m(l_ui&ftBu+T*q8}|I=~V7lcm-1QHQ%$M%dN z7Vm~$fKL;vtly<(AjQ-aaxVwp&FmvF>8pGwp+h*~)*BEsfidE+SHt6E*& zUMlBA2$$O1J3ICLZ;$}SUle~m8#h-0t>Aq)e?Zim?iF(hi2BoCDHpqUn~)D3lDEIB z%;y4tZ>^HSuZg<$yo4Vd*tDa!)^i6lzf<;0C=M-7%}*I^3Rk(FbSr4zEMa*j&u1!Ubx{3C)p{ZV6<@2!+=VzczPO)Qc=dG7Kp4Q2rvR-^fh1HL=&Q9?_TbG zO+VQshwN#zdpKA6JbGN&e*I&~#K_QyNOY}2f*Fm($vc1itNE?`jP{{S8iq>jn4F1K z{*JL$BXe~~$jR?+!T5Dk!Z(7xn^EVHhm!a5=*`DvE79liv73st@>^ZGD}K zy5lc{=hduOhNj98^7A9d8X;T-kLQ!Q>6Ohb4thGb8Zy;P4CQnz4m@EEiLF$g7xuKS zM-?aYL4TH&n~U=l(~OaB@HE~`HWk*@m{&FI52RAjTL*XOIna3@b(F|r#>B*q=U1*& zdq;}K>PtdYw!4E){fuM1ujWYe5$4jq<2SSf2WS@<=jStrOR0^l4-dD@eb%&9m?mdt zW?79h)lh7yuX1+{{Il%O^ad%Y!R^+`SNeFMCSFwqb~pN{vW$>Ku_Vbce0Z3fxq8Q% zk;Nv%=vg4pTY88ILtL;0r0T8|a2O{)#>|H6(fq8tZtiLSi|EMGHLh}0`ORAKs-6ZE zPiSSd*>Z$|tC_K){3Y>{&4f^?fHqMWl~X;*N^2pP*KLyHysC29DhFQG_dQhZxtJ@* z;0E76lVTX$>LuNnFY#~`tovEwQrabNe)GIoG2dCv=fayW&?uyh>c9lh`j453{5K3ntA& zWx>c<$wO#5r9URz5{HpT%806V2ipYY3m#u5z3rG&0=53M*5|LJZ+KI3jh5~Qx(~1M zB7C`UA%cAEMONM!G#mV$RSth9UF~R=YOaIT{Cf`CT!#bLkY1&MuLoV+F&jZ(+$TQx2@Alb;gPuubJic`iFYMAyNK_ifNSMbWQr zur`t`>p1g|e28vsXz36I8F5F`Iod3&^7j$?n^QuZILXS|1V2cfSn<`hwy4zM>?_0v zhoiBJQ@cHzZD%i0eeqZxWMCkMT_k|R9^6sisR;{7s}$rRP{As%FDdD!rL-8Yc{3H! z9#zrJ8NR65{?PR~@GyPX;+hPTDO0b`$n0mK_tOAuxYVSh(?mwg5>=AZ$v((%!So(y ztiHp@=}hnSaPQ&#t~q6QLJBkba!bM)Ceb8|Q_h%XI4sCsN+AExWKF>T8^&CX#s-~KT#{BfU{X_lb*}&Gy+a95K z-MZ>YddiO2cu$M*J6qRaN6yxp%%c|yj?#0xShEYtO~s(Ha!MC$YAxBnKHIS^hbn2D z;Kq#RM1BoKswvi&^%oa|9pj9zZ^R<^wLHowDOmeaVNzumNXn}?8Sa&vRN$W^~wqkLnSvt3BEqyS38OZPSA^R+dmpQf7Vt!?D zW>*th9#|SGXH?Fy^l4-cpI13XU*;+UuJ^k6SkPNve(WwQBt@z1dL7vj+tR%`VZfW8 z;ZUxz1zLU_Qu8AM5;kT$!ctXCIc;B4vg5LY46cCVT9Kp}^}Eh4GVdcw{US*QxkS;I zF&EA%y|p7(Zt~=dTF||-v>U|w(2ElJ_Ai8{Lq9b!kA)^#BMXh&n>&VRDN{;|^tYJ4 z@OHAQy~L3h^zL z;|uW~`4JZr^(I^1Ua*i@5KwUQSV=PEZDu)iADN%;c_Slzj7?jhY2zo!9+j{TZ~0XTAq$TmP;Rx5cXOBlVdi)coQf-dHc zo}aJrUDduaSpN>-p#tTJ%{zFQpixrLnn^c?-v%E28sV?Q?Y{jR6s{3=Xj4eqDt*%G94>ii@*<-XaP8dQ(RxtgNKv*=Bwl=i?E*DokIM zwddd6`db0+T_D*_(UPMeHgZ@9owN~_PD};I4B@cektK0@spd^x)UDmBQEjrgSPR&o zEnm>_sxtdDeP#3JK~mV_%OSyjP?SYdqX zX!bzOV*1RtokwP4$3;#Y9C?`$f{Bp8Y}M46ija^!>#<|M5OvjrPVELhZs;xM6-T3U zXvhvdw7vXxy&ASDCag&Q?fB>U>t;z;YaIJVw*bnVh4ghvE7J;{moa{Y&SitF}Qx@Nsz}&xSCN1 zWb6z%*o)KWfV+=rYF6n~7v-FC(JE>x9zKS%ZeGDa@{rMDZpPd`xXOE=U?OT@qMT@>ZdfnQu>J|@ z=IA>&SmJ^Is09L#K?3ye5YqozMB|1O(b!jaR3r^%HQSYp%%iT0R;u9zW(Rw$Kau@K z^dpELmmE_*!c*x{Y8M+~dbbc=x<;Ofteu=Aogk-bj4s|TRXlj>P~w;d;`FC!eb!e0 zc37vuWFo6TYny~jXnV@Ms?J88sYk6O#-=64CtkLn{XV84Ak4dhO&Z0Q!TMuk*r%D0 zxz-57`k<69g&I%2DFU=FDI)cnW0dKX&q0B+cb>zE>WsYNTCVuk7`NlCTAHb;DdSwN z{8WVP`I)5^eCCzv$;G^?3CEfmM5pe{#|b;NvZkq8{1}k|kwx+t|BJo%jB09W<42F8 za1;CenOKW`_G;903H>KKmvnYu1}?QbenNnHg|k${(cwNGmycDIWT zZ4)!eTp?*rtk$!A6+7t{XdEiul-d4^*xS;A$me^$d>_4R{r4*TjKG{i({Y;l{CX?e zm^0bzpLe;ugi0cMY;N257V%|S0S2&&o+ZniA72vMR2W#IfqHb7h$4oMO9n~PyRKPKtp->^X;%Cg`bbU5!NJ4BLes<0GGci`Dcvsg zwd=|MS-a=0`1OHXJ%58sPs&xZJYa4Os)R=VG)+O8*2rJXyz zd2(SsuzqL1QGD^v)0SsXm%iu=1Vt$R1dkp^x4X0w0S08jpKWubvZvwM?}H~Z2VOMD zDD`=~h$ZCUMJa$;H6Qv@j2EuOb=&3k?BfaRi>*2Oe9<%r^UTH>D$t*on%tPGjH&0T zANkzhOdQ&84GZIQu0)8)AIqcA2f~<9uQLC$<3XK{><${jFhp)09*V zbd=gPn+-a^PiiuL5Ero=ltiRfg>v;+gnp*2)enPS{dh}P2GytMF6+?#Q7ky))mC-+ z&TB!BX{JDUrcPU_*y5wK>kkSonFkldTpk6R?^H6`$C${l`o20n39{#}JHlvpe<#G8 z5>bw30A80eJ2f4}H-UQT6i?mvqkW|PUTii;MCAlMFek0UZu$OBg2a0rTYc>{E8jkj zr|J7f-_KwS4&J&HiW_-OyMKxb3_Z>-pUZyKuiP^educn0-1SZgh@MU>=XSNSho)xK zFbmiysP|62mOJas+|4XjU>(Zwf8c}nslz+`Y>8Jg+Rh4o&g4>SFQjQK6O|4K<2EZ)_;}! zClN=q9*>jdx%MwW1#B|B5;{t4IFu^c~R^BeH}&l=~~OTRiCuwR7psf ziYpg%>+A|{u5D=yOBp6)v}m@}oh{ETw@1weoZ=O9)?Q?&^}_bZstJz+~^w|?C+h>axhUxF_bt>$7M#{n~?Bbs?1p*&E&?KMJuMt#YitAq5d})9tM1uTB(3eLF# zJy5$0n2`A8E2z&tG)tm?2oPcie6{|3u*kFs`eAfArEY?fozoagP;WJzcIUKj;5s?^ zTFR6k?DA*h9Nr5$J z!{2UMEKS9t4K2j^G6R-JM1@{{`v{yu;C^dHpKfT3eP?igFq@lyr7%3vlsuEXw9)@4 zKS5K_P9XVoVn$J#&0a`d=!}}i8Xs)Tz<eEpVoVdH?TD*8Lw_I|eOX^rUD44dbrp@)0BWTOym=8;)D z&gJhn$jmsn5Iq%XlE=lAQ_=klXrZsg`vurq0>!r*C5fzS&eK=#0Vo2!D)l z%0y0C28&G~}UQ;jt3enSKwZhf)%Z&MD{ z)_+7Teo#ehS6sx>%dlin>V_)W+4ECz_5*&%np@!$=5t>|wE#o4>8O`GsJ;Gv#Z%5y zmaM6*T$sI(|C`VD($-6ybnN&yAFB*2N9x|uZ`C{VRty|;#U6CNJPh?R)$Hi~u<9iz z9`@?5ztZn0n>-w-U{I5@MGHq*oYQFn<|Z#Bcx`U7(VDa>Jn3&ALHOq;V>KU90h@3B z_`17JgjPT6#AJ2l9;nfHWi4&(k&*HtyVK>zGHv2H$%|(kDM=H!eQInbY#JMt_SFe= z3Y=UAh25oPAXWMmAkdvWkqJl+gUg?IYbE;^XQn1^I=}R&Y+a3c&EWY5B~sE#aY2zZ zu3@P#d9C;x-;GfJyvXly5;o-bffz{lKy;Us)a_M?o6YtL%AQNr#-BL`9%db=N#B@2 zG=J$Y2KfIv5l-jA;@M1(0zQS&?*AsuoE1o~!jB4Xr+Pp z&Ub1?c}vfDoIID>b)fVHBt$i6<)H^S=k+=ClIF83eKArfz0S{<+ir@0{2i1!6GcXD zy2^~>m6-hEjH#xIDB+zn3gb?y`1(wa*|#k}_#w~xi=pC7Nuj|vq*Y?lPd-0wel|3< zV>>r6lW+cmhr0PxUz{%{0PU}V|lVW^i` zTwRXyMHCQ=5FYqoV^%pAwW_UEdeL{YB8h^oOWB#EA)8i3U7{;X%4gRSpExf56HqxE z@^Ag#Ak>M=?p2$waLV7xXHk-s$bC69iAs?23jUXbW;KIfO%=GVqwJS>x=j1VA~ z*uYQsAq!h|+My&kQ8<2^JfCTjH=HiFcbFjj%Q(j;3yTHq^cIvMJfLCgeMHOA}4=#i=kn zZ1ot0Vmgv^3=11dXq{hH&CU%l+h}Kg_`kgXfVrIhV=f1SYGEudK^a{X$kXwNT+6XD zB{+qF4D*Q5(l!+-;!h;E`qC&T;AW-Wl4t(Xy>T2FDD@JE?qW~A@!pr!-F3Fze@xoT z$sQ1VO_J@vq3Dsa99dGDTf>@#2P$Y3PNi6Ubi}^X-&aqeACa1Sy`y^7B>px(tKb)s zL@HX;=U^zOOVPo@w;{pnb4g2-%);^;2RGCi35j7IYGrIJFedVz0c|Ee!QSqc{oW9P z%tUr9g_Vy)HCw$E1qE%|Iz_quftEEV7`iyMNj=HwhAr_hBbE`c< zIuMyBNo(ET;in&oaK5ieUdGK!$eEnfkG)?|2BIkv7k2Mx`4J?4#duo% zL5u749wCb)4h5u#{gk4kjW_cp=lxFQHPik?axOe=Obq5j#?6H-FNf=@e+$(fq?PtIKFO=gk0aBg*?9x7qp`BJYAgqFsB83~kTDHYn~ZPM2H# zHG-EfDOGtf(pm`o}T-DQhoZa`70{O{##9u&UGV% zu-X$+QzeI&5SR6j)R=H(RT4%wof4+#AGdUL?A>(sw`pi>UH1NT6!uu ztRt8-HDx}wbz`(k0w9oc4=>`}k7N;1m7i?2l7gTL;zhgXh_5S zC>bTVC~(nF6p;7zQ-T`ls0~ zY;ddeb1eA$7+KgPjI^#3r>DgAN($Ng`Gvj!?QRE7jvfQrySFO3&UWifNe@rv*6&L^ zm8L^jX}R}4_u#m@qBkSwajk#L9plk4`bJS5ZAepQ8OcPL`6K_mmE9y5Jx7HoOEU?j zx9?m_&K+ZNu7yn^uawtHgDc(bQy2BMUMZFao$A$FU(1-DjcH(3^P8aPk7?7d_+z}a zC_J-$A_#d64**I~Jn-Lz7W0nA8u~N zezmU$h_rFLf>F6IE-d;2%%Mu4=3wYrB%^&+e~4q0p>G-+Z}9OsXb7-skyeTB|u`-$qP{F$&4|J1Bco4a;Uhhr|bYN%+|<^HgXmk)l&7U>Bc zvh~RGO%ca9dt5;@l_0zsCGRvovAFx$PuM|dtS}(+k#61~m#Mj1%3-O=o*Nu>n_pAX zL9<9~2b%ELgLCgNW__N1n8ZMS56Ny)0cH4bgjPIjeaVOBskCH`xQy)(;unPbg zb&tf@+fg+_o8kdlRfmyAdD40L@jO0V_08529TP5~O97&~`?yXnzr7+~2;mv0T z$MKHk$-$9|v_%8WpsBi`D4BkaigJ^n#e<6&>l$$ZE_duUX~<|X%ZhkW`v&m=r10uz zw6(|P`gf3I%@IylW1C$c=^5@6*K%w(C_u!B~>U=H;Dbhc%) z#-&=x6dU@Sk94<;Y2nN*|8wQJOa^fP^Au` zkjdd}K#tdjT)ios(fU$qwaL{>B(#W1A|TPqx!iSMKi0R|AuD4k`TG0)C0&%n4`)wg zkm7d#&TB(stBMj&LtrK5naz*yA4*P+v6vpY{p6wWR%B2Ytq6?kcNI{1j0hk5(KvTA zoM}=nlSoz3=ABJPo8;z!6$?yxaP@c1DAt_wC%xr}{0Pc?IT}PLg@2Xs>B0{g zL-!}%)nnc!#I-K!z{?6OoOX)M(>iut^<6~TQN>M}yK_i6#JM2-_nBFprmBW#J((F6 zy>#|Sv$U=K$jz}~dYE%<8kHq9b25QxY=e??A;@+su3n|cf$)@Hz{3bm_~lPL&NL}h z(mKAlh%Lqw_pWtjH#YrZGdR$kqbEEv8hHA9C3D=#2Ub;5XWipH+&y-|VVp5Vm*({& z`i^P;Y@yI(&k^;z%4P@eqelQ#E+d1jD969W=6-+rVh&$-z&m2lQ3EvdqAS_iV`d;a^6==~pVg&uhum3Y=LGIIXYD>1 zJ!@TKPquyPCnwUge21y&@XD_x=gEFqzKqP#$&FaIlVO*mhhCl8Pm$_36g7j^0$CI? zo(o4;zOAG5)Zyu42N^mw3JJgcI}bL~U7yH6$dd;7hC;y?|5;WTIHwu#{RHV)eiYk1 zLi4R>4_NBe$>!H=k{k!Mb0b6&7x>>ZiWDIQoUD@u-X#sb@mOecu4_cf8hH#@3sD~k z*$}Pe4&vkZelbXT&POkf?;ISXd-T0;F${JO1gw5p3_O!=)$8R#BAS1*)5~$RnT}ig z8UvFcT)y%`KP!~5V1+p9N0YE&@rzLIJeTSeHIp)Z39ZjYe`rK(+yIhlr=E=(cOwCC zG`j(iN;Xg$k7;hyuLtFLPcjaE{=v69WZ#_RVVut_!8d&5yxx?uY_s_JOaclV*$Uq9^qmg4Z%7fb9SlUMlN5 zK+U+nj^Q8v)BRub#SMK|PClh8IKoeioj&2N6H@!yZYcq#Aw=h1)u=Kc2>e?H?s*83kM#@J@pTBKyrC|#R@4Mgj^lgxudU7M|U)049@}*Grd%%MF*wPZu|K9rpPs;VRFX_(S zURM6;A2xY_|B3(lBjJDN?PuCs+i#pe1nv9}+C)P`!^mjfX!_rW{ajyNTT^+FLPuGyD1T=W77vxZq`UL1#}-4}kOC-QC^UIpEd$-y1$4MDu$|Z-9nIdm873h3-lN zyd17#!9zS)^q9Z!3^aa1`2W4>|E~!CFHvFtZv_2t-3a^Z z7}(ELVq<3Z8e(qY*Z;312Z}!K$J^9+36;A2AMoy>`~S4s|HD-LiK?|vPEMfE=*WVX zATmID0bpdBz zNG0EY4by+GgS`Bfo184T_}EKY`2~Qbt>NP0qNX-Hq4aNWycB>g+bMzWw)mZyTalad+f*^02DDg75JkgHhQ*|u@(u~Jo?~Cj4f%Lbv@nlO#-3vPhdn&BjsJ21L&J9h zsixVBp2eZVU=8>mN|@^Nc{c~a_Wlao0W_omXR2LB$;206_5lp$K~Kll%F5xRD}uYB zcAT2yf;F?szVtpdBLUB3)K^zmt^FneSnVcQ#G=LEDU|PKY?Ul812<@Oc8Dn!3xVx@ zwDZQwPUEHncE00y(r#P^yo~=SQDp&O8nn8Ax5xvBz|1>=H`lE+$!W~y1>XKY-qfju zx%t@GnDUG9+B70%_5aX&Y^0s$p$B08fUyx15e;;@o&u&~W@ZK;nznXC#AyJ4*Bua` zfIinwp-j8Z?m~rpn|T6o^R>3O#n+>pvRn~N$cvUgqzFs_*r=GDg~f{U6sq=&n#kM7 zXBg58m=D})43Ble`tz%Kk70l-tUW0{JO?-|vp8&%vCVBw*2U#$pVc}lCPqn7QBhf$ zJRvj`*qiGLps?%1@T|$7Um)J(AEVzRkV|SlfW!V*+e5%yXcpKRXlRTXO}_=kS0bVv z1EJJbQ&Fj#bu_)1n@;66-t`n49~*RCtY z8d1R(p9bL74iB9xr;#%=4Gz=5@$I3L=;)}Z-rv8UEs1UFC@X(1w#$FBIt}2J2gpXU zGBY!?vc~+5r4_jw`j_4Nw*AeX(tx8NbGI?cKO@Q@2|)O+oPmw3Of-QJEi1+S6nk6* z&@QHnU+k-dE%p9mF``*mMm>3beH~ySS`lX>L~zh0#}`|Sb-j=R8fJ98Zhnug zAgSiGI#CAZ2ev!f%{pxAG;C`eNo97gXf47RXtC&3pE?TYv?_Ee4V2Yy_(3Ldn?bH? zF{g2wL1eQ5)oebc>z~8)OHao3nvY9e&^0Z?w(*P|M&9p%2{3yV9(h+C;G#@H=%J_B z+1Xi5b@dCA{j*^(ZV7i2(!6k3I>9O|JY~tRqEsE(*4B1Xu&b}H55St^4Box_^5x58 zztF1%pQS(G^jwDZKvZ2hNiHpf1{8Bn=9crZGGG_1#}SBn0+8&;mBv}vH%;okVqGWafjvgkcYR_?w#2c4<2Y5g4UB_dBoWbQ z-{Bz>qd^gMp7O^|xlRMy$RJE?Y;1t#TeWJtsBocSKg*9`AWJCioRVI_md0Dt*Zea- zKW~aXO;auLu<{4!+#tO_7pPZIW~E9Ji2c_G?gH&p&`DgRhrA@zYm%CG`qkyx9v1Vmjjh;OrU1JYTZ1%0U47TudXd+S3;S zN39~{3E6sp5x2<;qNJj_x~L416c!fd;MlVP-^?yy7M6mrsI3kvT59Ta#hjk6rmxh4 z-VcPRQ%atE@yndPq!4!Yh+X<+3VM;mCMvNrCOFK&tWei2;`ffm-<`Bc|oeVEB(nXa9w90hIR9P*tn6PwY0h8 zEzZsjdwxpbVVp3VlwNh-IH<;B@>Sjixl_E@#X-5irTO!ZKheFwb;fznZgbeQ_!q_wg{zyGV2?XUBCvFS-@~3T;@md+QUE25 z&cmIB;D2gA=>ww5fVEf=ol28d_l>@-ypYACs|alH%C9h<*7LE*Fk%&|(sX6b8F{Nw z_YSvOomTysIy;g+(;8P_JdKjrHmqJr2_;^WAZ)g7KQuPpM;K%@=V%zMaR0N zlhVQ$38;-UTSD!KK65&F?qpQ-^G4~{Pwbe=2~JkA&1EJlyg*))5t;-Ro(+fG?pj4< zQY@)dm2|PFwr2Zw5Nwuv{!({TilO9r*Ti#MznhYWtw#0L4oZrN7z$YLbcgUIQDlz( z0$h=w6_jS_`F`R|p8GR=q#3Dhdw}JAa<94Yc)KwEaHh&)yLaH|3b(-%va|iCU61Es zCnhG6)6#rS*L&5ZP~U25&VXpq&dyG?`7>|^*mYo_k@Cx!>0r<6wd1k) zM;Nc;)k+QyHKWvhAs#2=_d3dUpF&qZAMIcFJ|sQCVVC3MW2pn_ACqcMo!aDKoPA^4 ze|8auTwkofRclFT*;=+|)9NKC*@tjFpTNNmzR&v#i*QaIc%Ou3O#RHWdnHqD7=o-j z1k^aIOB_1j{qKZ5Se!?fd?h1jObXmONY`L`@6p|&3|2}ql6l{aSo7JaTVZV=;L&Xz#9Ie;YKpXmL2t8hYp=V=FF& zUMsagh)qiEL>`Wmur|Tea0E|CYdC{>$`NxkF`tJPjE zR=e&%H&8MAQxo4t93mE-U62tfth+$nqV9)JYEmhwctV&R-XwxR{_v}$%_m-7YlD^4 zF1;KE&ASOOuiIEZZ9kn4k9XV2Aipp7?x_SQd9sC_3koO z?l`^RNoY^RLDM3K6rK$ypr-yT!kDE)p#tjro?TNV0McEO-EgiX6#A$rZPY91rOx8g z5^fIq7JTu)y#NlCvDdwS)H&vS+!TkN?!akGU%YF;0Wt}sk$?t9kH!z>N(Nl)PX6t{ zj~jL3N#jvLw7NlOu`@n!o<9Ervw&1dV6O<|uG0${W+QPA^s=cEuQ4eppOvMhB@ts# zMW!^)rH`8F**EUF^D)AuDv}EINqz=j_x_N6RoqGZV$)-jh=OlcR+hH;^_gJ)H($J) z@e`#MoqG+=G`hlX>JpGCn9cYr28VdgdMRA{7!`YN?NTHDpks(_7$(0b6$d@-aR~~v z`7C-6}nr;k-_lBBi4E4OAsM3qZ-X~DG?N?~Zy*Ee%h*ul7j9le!P@<|1Y zn$s6#3NYxQ7YI_hO|j_3Rzya_ed(Mcz=0_Uv({;m>}NOU92-Ry65V)SQd%j4F;AXnE}`aT&(9dGHCpX8J4=JO=Kc60TCLiEit}%g zFNw)b&lDhu>c3#|4iB~N{gC(BspSY!A%U#N0@^wr%1Gj0R>IZ+>}dzci|t$xEB z6yZ)LbYj*1&6Veu3W4tSicm}f>p=`vNDbOF7}ZO?Q{f1RNc~JMfTensVq$gtbqRP) zG=k2#t%mUF&X0tf^Bi4p>~2J%%wpJDg_05i&YkQjd(^hxIz`2}+ktYRYD~?A1$)g* zF&E7_5+*{b;@m#Q!VR1VHNtU*rW(~w?RQ6mN|!iIuI8BtfAy>Acv;*yUyu1;r8p#7 z8rQBi?uY459-YquH^@{GRXUnaL946HZ-5$>J{3Gts7snXxVAPj1$Vxa6Q`N6wJ4TTgD@CnoATSm&jqxS03()q2jqrAH$b>r!*3B z;vOG)%@c=RzzjPE4ZswCF3lDWFg_*Wf?}GB)G*kHo^F~TXDzf(y+l&}eeU4%lP(5L z1MhlCBcX#(o4!2MGJ#8-+Gzl31|rY_^-fE2#Cvms;Gk&-+mq|dDgA_W5W7&3C53)7 zNg!?bos)e*z0H$%nQ{O2tS97fV(?Oi#L^a2Pz(9!asrKUi&lKD+)|noTrl4qMNhEy{`V@TNAC2akHzZInLnRDIK?8m;kiAe3 zS|QC#Pcbz*_pNt&6MgP&dJ^ouDT463TFn`a_ElPpE=k?A;*wWU&^TW_ig01S7CFz)A&?s*iGnQR!lV2 zXZHE(z>e`*1rWNZc)Bcjlx(ipx6HwKC%mpqHZO?mO!%LVp1M-yL73S5K0RYKm&G3g z)`s$P!H2Q(HB}Fy2$SzC#TcvmUro~2UMGmr9McBm?G z=B}))))H;bXH9aHGcigczNVrQI)8h-OHI7>5?VC^cg(k-7+p*+Z(;BobJKU`WJd}= zB&UoH-USK!Hkou&1ReG1}j@oJCQ?)tL)qwu@p7S8-@lT>pW0 z{Pnb5R7Dw7%sBMo_V)Jt{Cp+E1Mne#7yGqzY?~Gp7sCwd*nj=>pYmzD85#NTdr08M zo8Fg)JW<8apx5qZ_MDKe`663)L<=> zOmPWv)$UBb(CSjkV*FdT9ouPfMW0pt+uvW-09omSyLPpeZ)5|oT}{x=FXds@oTg{X zNCK&J#}RZ;f8JAjT3p5_u-I`fH|Vr1o+H=*oBjfnh@D9y(Fr*=Y4}=hQ!gP3oLUV$ zpM$lszO~ce1UDU|94HNfKsy|{#usHIaWrf9Pt_API5ZZS+lC>qSOn=o;PvtDh*MEUZZtLL5;A9gOkQAM;6<;pLI>M? zLxbMeA54ZcN-FY;x^!F#Z}Gf?AXc4Rn!jmlC=i+$rRCIamul-DwT&}4FH)cgArP0F z!M&q-RU_B6)r@IVICeQt=ks@^WDU)m;2Rz=A_tsbS++stmB+bQ{Ys&Vr?v}mqp)q< z{;MRKk|k=JYOz%8_HZP%FE-|g0j8>d{K?#~kEyGF7c9z$B}~{jJB%str^1B1yz-xO z?piD?=9(H9V|>MUe2erN=EJRNn4Pqnz=J7LeW8@d%Zmr37 ztn=02VLu*dEJSpMi_g8LGQ|FZYUc>OQ1?hkxF&zAgVDQ3G^v(o2m9lyrhc*CsTwVV z(wwzPOM1dZ)At$~%M(0((Z%@bw0I5M?vZIkn2nsms`tO%m$aWVPtr#>fm?##FV!LF zYB-l%`u|4AH+Xd{ES#9MTsmzGgB$moxj}C2BW~WaOV=ceh+uD}&C)AQoi*5fDRlef zC9&y^tf5a2BXj+XcXR3%PaZq&wuT2%#ahIr8r@8}?>~&gGQv!*bDSnQHhW7h7~Z2= zZXWs2MD4%e_O#*vjV&07lV?yQ&>oa5mK!#lHOeUN{<`Hvd3G)zeQR;SFj_^?{nG z1y}7SBxoWGbSc0IT`*#H2VJlt7ni4>10DgU=ag z*9*i=GeRJ15OiAM66D}%W?U?^gr1NDq8~RmxE$Uhkdz!0S-Rab#o5Ayfm7+w*@Y9An@l2$5v)=4t8lq6v06mB9L$xY}_c{1S2T z##Tl00O5C_+kfTsBLd5|h9#1bih1!&++eG87a3*X&)tHVVoW{KBlXub3tp~rL@lo= zJlt+P&YCt6V0+P3ZbyZCMJ{vbhr@XLagvix^B<}uUA=X=OIhJ>{+){__f?>|tpKd; zYN`^M>ii?--lscfcqHHGZyd*{(R#{ifi~MJa>v6ijC857GA_8mc6i=(@8&xb?=uTE zDNFw&lENqX1Ymy-a%~dK`o#)Y9^cGqF{-#dM+(H+HwkX3ho2pMMVy^(xeWUW9GZK-JM(7`Q zmb`t_=gt8Mk~eWdi75!*(8z77;1F}IeQ4Rv6(gk$7zlD^QZ7^jnj<5f-oLyP25s=2lT(~Tr9fs=J%q4*lB3u#%MlK;IJSFj_V4ttz9(ke)%ywS8 zK{u7S$%TtFl5s|3iSo{@)&pEffDhwBXV50$?P$OJ)80g<+0-A&kcW}?H1FO2Y&gKB zYA^gHcH!6aox8{%oT4JGBG-bfJR;9UTm}yn<_v$^xr_;aQE^tfyc_VhNMmUB>!&WC z0GKPYztiXN@98loRyp?9n>fXq95OQX`=9my#+N?DWZ%BA92`(%8vKnN5*R*uDaR}@ z31z0qVyd$~30B$a-`c(M$h>j6K{j==#MAYtK{~;+TQ0%I&a-}7KILSf=xm^zv$Ty` z(q0WK1WG&}(h`?KI%;;@AHh?XO>Fc^;?)`B#>w=>sxFsJVRYB{VQ|qalc(TUzB^r) zJW_|4Rf@@0B%vM%gmsXZLcy{Q`4=_QOuui!X;Jn!lN|JX*7?}O1D4yihq(=C)Y=eb zG-*_1IE=uzRVCi^V4IZ6iFd(R#1pIu--~=Qx@*d+4O)H}a-qMOQ z6(Ul<%=qlVSB7G1tmK$-l$d*5Q1i&|`N4=D!!z#fR!zNfSrAC=Y_xvDhNPC>(UntP zoOYKo<)maYiFgOj@vh;(wx-!UDwWT$e}Y(U)Zxz(gBgFIMakIR23ax>UlUCN;M9)C zCK2uk*y1xeG4Z!SAl+bEj|?b5OIMc_><(mQOmA1I8Y`T7;^=`5JiYNiYL$vAz7Wo7 zktu2<=mHKBE8+^kMLn5bSG#%V5M<&0WBz@O7cs`LSW9J!IVh-v)~R=gXWG9z)mB-< zjK2tO@_S-w?wx7K_DCpRz}2CXya5%}G8uw_QWoN}wG7pB&&n1>G9$B`%jaw_Lil~H@#rmg4{yuQE zd~p#MlVc~Y=`un_>c$;lO}^pe)1b+yPOD-70!?>0Rs?li#^gXC21yAdZz;rbG9!Pl zxm(JDxg#tR4OW}Et=lGCTbA0ip5n51tf}=upq{QvPDXN64|9*u z**8X-$)~5~hIIsZTWhxPu98C3K_B*5bHF2*S8B6ACBbLrDDcxXmL*vS`L7eTLW&9f6Q1%XIYvB&Q!ffz9vpJ3xOWZiO?!y?;PeW6VM=ma?au4=$TDpT;iJdSxN{SZ?gVmC4 zz1B_3vsOh9_${c~?1rQ71{dVN^RIUI+I26cn0GTa#zY-_S9}grV2ahWSBcTB%*$~LwG4^{9eMh8kOjAd;Mq-WJ5 z7a2jTaysuc3YZmn^ck0Ow}$BKU3C`wb%6|;weF&L^Qj5w;f1JEed-grUE0k3jIQI} z>nzsptal~u+4iaxYTh|@jcGe|289Z5V0XgoQsW}Ukg7#iwC@dTa>{=2ck5e=O{R8) zM;wP;t~G=LiKp6H_qMKY)AkePp6d?InQz->W{`Wmx9?}xh>HWaDeP4O&0Eas8)j1| zgB-j9o9`NLpjkI2npd>pfNK9LrifI)vAD zhiu3Gtf#G0r2VAC;yk5S?aS#|z-CHWqTDY^g{a7ugufI1`8~aT_Bg5{(~!SDAV&M# zYwdNFaLszKtvpkLtbQkf>N)@iR_ zVA4*opG-6Ty!@R5Dk)_;=>h~rK4Bhv+!5zZ6hFA9S}DcY@oGuDG}9_7@>e@2pG0F0 zcig!`47BdHcYMvwMk?T1YX~|ymfXGnho`@A+}r!>7xR@&?;z@ItH%37Z?Z?dl>T^G zrgCwu_QuG)7fvU@*56wu_<}cbCWfUX)JXQS|Ne_yd>hi$F`L?Ro1n#e_wVlPRDyRl zI!zmui?m9|G~KNND|w41z0|;ZCj4zfc>hquAbFZEc%d&$b< zJ%&B%eS%M(p3QVyZ6;4>=$NG&JEt4BIeOhBz)crMU%79*4B_1JMkW3W0wO{xBdr^JQoMn=9Qw+)yNK9rzv z;^Qh!u|Xw1U2lr_7ti{boBNU~dd!TI>|wxL2>(-<8bVV%NOickX0cz63C^ANn3NcO z5c|GQb9f+o)YH8uPoev}MqhrHISpwPc~L2kDkb8CpKR&SELnbj&B4!!*C|MGV3bn)RSxc$Kwaa0duo7-fR?moeVs$S zESqRcDi!SB80gB2sl0Vl?4Ock7@oOUNSdw;cX^wmoS*)VAZDOTKqTRQ+EXEcQW?9? z?ulYc${#24W4A~yKBq}5^j`U5bwnzHB@&ba;!Jy+U2Z7Ath?k;M zZTRBc>uS+m0;co{oGf#BBSHy>-S9k7%;RKA{NBMvPVTdOId8v~A1KO5nS3z)QpdWx07vuq|l3ScRzofyzQ;$Wx`QJ7NgH*P#o}aMMcEFaEMpn}Kw`1PW{J+{P%! zwQa)dsOu(6Z3P7>b$1%~s&?L;AT##@t)ke%PTosT&->wI1?yKyY^wH6%SXFRpu=Bs zNBd$qSR3~c2QaCADz`AvhMMt3Wvdt+69ZIOgSVCgme>vgE~(Jgqme|5y$3XP%GQbUK9jG z7WL12>Xj3zB-BFh@lr`(Iq7C0on5Le69eJa*sLUtsWaH%ldrK=SjO{$p+c9|g)dVs z@_p9HnfPlLUpUKpm2W8wyzDU!uc+4}sm*5{iSO6WP1yUMHb_DYK;gey|4(~g{S{^R zwQoF%A|a@BD1t~gNQ(?zN)FxK-5?+_QlbJx;C~*;5bwm|Nn2?gG5XnMkXX(n{ z)uC_t^P5vvSXq2~9*L-8d;dBvhWYcyU!(^gTp~;ki`P9YJFu4-#z%=8I~oLdHN$cr zuNXn0b{36?B)$yWTX;`+?o;pO4dcmSCQR~*2~tbE=e{??4#|B>{VY~L54*6%Y)NlZc!e^(;xlN?I9M>b#_NhBQ9Xw%zWX6E)hsz|yu%uX55}Crs3rIQ_cipV46yaJAtbO!T9TwCY!*9@zUrAJp zBCkK&TizhY#a+d!*8}E9LxNXkiSHKOrK^-Vqp|uvpUv*!xM`vH*RrA_vkAH>T9y3a z?(PHbHq%i2;cpd>qkHJS7PaMl&B@7>6dqx$@Yk*uceLJZtJ|?paJB{c`$TCfd3@lN z0-u}0eY9JY7;M$?12XUIV(HI0Lky2Cl!?%RX!}}H)}P(}`>JS1wewA9*9uk|*k^FJ zW)0lP=olG_Uu5*%o^3iKMNNQ(SAJJ6a5MMCIIw5KRq^f@jq_RLh^#a_A<R#3rEB{GYMG)KSN;jHw1VBpz)L&`KJVCp(%S5z;Pe(`sFVIZx~a!H6$bR4v9SCaoEb%>kyz3~&KK$EIXKd1LGR*<~3y*>Shm7nc#c#Eaen@)=MsFG;c-NMkoA%QrI( zc|GR$Bwp_zF+$9V@_B(lz@8(^BcAp5Ru#}JQ=u$Hc6rw|d_zQ0mD!Mh5}=;@SI)f7 z>Cf<|l+C$*Rv58#IFTsdRhnkPeWE7^2$vvXrMBwBJvr+(qKre-8EU=u!Q9_WJPPN;lep$9F6O zj@CgHqH6Lr+=?!Mgf#@=JOCe9PNg?*h9XF%zD!MSA-9{wx9TDoX;H!ni|ScoLJ4w6 zjZCF?5r_$xh+b;@<${iZgUDqkrx!EY+$F4)xAjz@u##uj6}>R368+I3-@310t-{m$ zTiS7%{)^0bN*?2;7ktPf)bcy$(l*(Pc|9!)WhdUy-FBg6^sNKseEtiudI1G>R(U6r znt(00Eu#jnw&|R1?X4fHKMQVcm2I^HRG<@04zp`Q)=FJI7e5F>KmSo*l-})vWZoN7 zcAw@2CIzsWLQ)?*&YL7eJ!*ckdZ{RJRm+G_Bx0P3v6Z{+>uxfl+oKw@+2GbX&FP@mWM?lil~{DxtJKf{@Ma; zK$P;jopE+$_pGK((2p9en#W}J-6{P8MArENTY)F@)yF#hohjxlb?8@yVaxjZx877u zjAzv>nSI|kic>VG;uWoX#HMUb&eWB~f(bQhd-tk_GRu^BX7s_@-Dt!i@wVje?XH+6 z9Gj}p!*$7xRjJ9T`P}gI@P%GmwY?KoHE6PRrAzAbC8EgoAlhES8S#!SlD&8v`;fe{C2ege(4jSIV}LHJyX&j zz&B1R+qKskQnZ*$N^Z{es5kIWGRFA~S2@spo`Gqs@_FX%eK%eA^Zr@1UiC>%q3~E) zk+GJsu!Z^lpHq!&JB@P<8Qh9qjpW(yln&_3QATQr)gKpoYW0~<7w5?=hjw=(KPP+h zNFpnIOtgG z%d%Ui3^gHD1Cjhie0)-zKBX#uIS!ASgB^l9N8j4_@#FuK+Z zeDsS0NUQQ&L&H%kmvrdzZg)=4?TeAXW8vl5JMH${7KGaTOqzXDC(~M_W#9(C`5*Q7 zD7nkZW)8Gx5GT6l@OZALTdzV^BIsfGL_MM9t3iKoN}owPTX8q5j4B}aJv78hl*=wZjQw#lwhkJ5^8rA`{) z!aX!yq+hod`1ahLN)SduV))9zwSvW`U`fm6(?{)b zHf{4Yr>8y%F?{0^UYDgyZw*Q^P(GIN)lM=x`ATA573N=Yeuf`nek0PoV+kKPrc(6l zBbznFFZw0vOas#OU-7g6%$XJHKxuTH8+r0W^vsKlP%eN!xZEg}{Z!;@Op){ea|K0u zFGv!af{*B8bG{7Y4kUO}9B}$$gK_Ho%(b{qMVTJoVd}MCsH>l(E&p(nYKJ*kH(WuS zhE%}{1gdy#@WZEYj9=lQ)v9F8P1GGf2zj2f)O#odpe9o1~e4i7@> z1wMF;woIJ0MeT3TTA~E^tlB}K+lO2>WRW+pKCEhwhBong-Wtr+wCeL~H?rdD+RJzn z3xa4OLbr;_C67ZHx73~A3XuS#u8AZyI;OL(s9P}Tgw1PzTnaI}VCRC3i4kn*b#+iT ztae$B%UhJ;b6oDm(J&ECCP3*j#HsH-pfu}({&LjSrESaS9!K5X(@b=JgE>jh!qKpUKq+r)oj+5f@94pPk!=n+xOA(9TO$?>mesM}W>zBJx-%#Qw?Vi@n z>sb>P^QvEC_IG(uQJK|2+b4Ip8vLT$Pqenw>IhL?3E|#2Eo$~YUsoJ{gF5a+6vCoz zf93Dx7P(IrIVtUDDP8G*}Pm}cJnFV^0 zzWBzt!gmb`&3IjO=XJ`<>l7~{I4<*7YbL&=D`fKqhm`g`vhqriQ(=!Ix@AZ@Y8od! z;O~b?LMW8PsR&CJOZps?&Y&wA^!GU^zUc2&HP>$YG4*q=#xnld)AsFwfqMs|~c6bW;r_|8_jSx@7$S?Q#2J4%!46wtr+eM(%CuEHR9qSO}h z{C9}k`20Noe3%visg8lJJ~-ZcW(-@hNuu}Rmsq9#z->c}xGKz{d%?B+PZt!uGRH1R zWa)3VK9ZbePRKA-UHPPPd*{!OqX^`zfQ!Dhd0!7hu)PejZ=S@12qt9EYY-J90YCE* zilfmIIuR45>?5msQ$Gw)$>)}hSL63s`{YKpEv$0vwufd_7zsqvdiQz-@iDhBajSB< zp77wh3BYT`Q2ES>KG^gZ#Adw2Dq?=NK(G!;^xFX?8lQ0`Fwt&V?akwYrd z-c%ey>8Q3?w<&L(0)f!Jm>zoGGw?}8S;X#eEwe*fru4AwTR+!~WN6*_t|;7Ew5Qvm z)drJ)6CW(~rZtLuglhSCFU=cQ(;zhfJxc7yVh~{N)jKbw58Y!A1!Bbv(?3G(2DwZ3 zFZ_|0dyA>()dnN4QcWs3QCfI(?30y+DsERD zzKh2(A2fN6ZJ$jy*h8Rvy3YP(-$q8MnRQ_!j-BpHn*oQXhxX26fr(qQ0**ZaurYA; zvcDq|KJ$o3WmG=4t;js4u7BWoLn_djwf_oucA_iuo#e49iLvgpkMP3$wed=$tnuOD z1&?}V6%|Mk9Ilr0nkv4Xm{u4-Wxx$EW}KcMic_%O*cTJev*^6E$!YIPHCs8Mc$3|KYtl<|NUqF~EE70!+s z93B}BrEw2797n>BMjahTL}VVWIl4MaRQZNR1XmeTK|tSzia<v!uX5 zk#6vEL$wZ!;wPrt4BHifJfhbiwB6W7T^Np+B==xbxr*5gUx7eI-z#ySjxlH7x!9sM zC4Ej5+fLbYo1Gxdah3HXiBG|)ip=B?;1@TU<}kMbr9ZO^?z1D!^*5mX(QgAiRemIMKd}Z_e!nMdv#^z>AvzeQtx{@$H13`JF1&Nvg-|Y~LZjJpT zGNrUU-l;%`aaa50$>c{i#4wT6Rou+r`7CsXSX)c!=SS@%x|0Kj&@;WVe3-6opNM08 z3P_f+f6-3d=T8&bHc;!8haTFnJem=#SJ~3(a6mzf^699UK^|ufmz6)UIB~p}yA2Cl z+(t>8q7bS*-ve^fI+F}lzI?BouSI$2jQ`@Srm0@xhl}zZ9Ic#2Y>ODt<-)?V_D$~k zyMDUMo{lYE7&e0f0({?VENvcVSuFUkSxqgkoemY}MpO38GmC%(jP9R&Zq}Kcfu# z{KS+id%P=2ejE0a7)=PgL`d`*a22!iUOKzMH#TE1&>3{!nA-|g7%3yUD{kiMcGG$L;&l8d zU5A66lP&uDluyaUT%f`x8|%mK__+k*;ltuW;f}6gV8icF7YzB!G*OLpqseoqrOh`K zZ#aIF16FFo1rte!-1C`Cpp)?1sT=tS7c$*0t_|JPZ$K1GI*g^mA$qhMPLhEC+MGMK z@1Xz65SiWq*;M+MbX;>2NK($fb?cU;G!J^Z-V^7P6{DUlB?)zs#A*Y$U{|L7)Mu1! zJyr-H>?Xz+6AY>XbQ*LtDUUv4X|*srrDqvH_L4*|?k)FR0@$Sr0A*+9c0yraZ`2HE zK7hd;czUpUJ3Vxw|Ew-lVz__@pK8y#ILw2c8@WJ`OEOjN1DC!I1 zmleBN%snq}QREq`$>P9LhH6w0%dLiuBa(U1Cd6+m{v`n;)QHP=e~w)D;2w2`?dP^O z9MP>)aYPlZ?VgHS6gi8m(bpdPZ7P3}rA8!`bw#UHr>$MD+LC&jsRQrl=gA;C%0Js4 z34sJF%gbyJ2@@7i&pZiF^BuC(d-yX8M<&EIe{$$NTsxqNdXYFay51K8RvwKsNST+| znho2ft3Cf7(Q2ld0Zn2fB$jWAnh$qLMJMxLD}DvHc~LGBocsROr#AyOhD>L3#PV5D z)-N;)Eth6K2s1V5qWO&m7q`6PFVY|69@o+LPk)n;%aJM1`P#A0V1JpIp$#8dt)hvl z``RJdodKPLIzWJ z?j4>v!;U79__rDpxbVf*R!Pu!c*u}f%B}AEs1Kh+?t9?-c@=&>?AnWsRKP_fy;fw7 z-A-#ol{LAYHvU3b>dN__K6pCw#8iV>*`mE=so2xOec!bS8;bd42&vJA3#?UfTH{^< zE#a`2u#=O>BT|Oe?Ytg{R`KV{MR`0qdrm3Z=&Ko-Ze3@#kja&W+Fd@>jiSqLL~Npa zGu-nCv-&%;zqfny{|%*|zh8Ruj;^-0;8Abb#GURjPxVpD3IOt_L4sCMTo>MN+78|x z%#p?6aII!;Jt?B(3{#n!ge z`3`!CK;hmPHPG#^HH`pxzG}^BRGx)Pfzx6Qc0saQwh(htXezukTSwb${nLU^+WhU* zW`caKmSm=Tk|J|tWTcl?JT+0=8pg%=8Ko&&b5rTT=B&+`pGJ`xIt~S=)o48xQ4;j? z|5_`_TwnPzd-`S}@_3*+fkcx_%q zti#uzJtT~Ll~a)|!k;~mn)y+jE&&RKKH|{rT;5~6*oi(n+C;j|hl4qxqMij{&ZB@P zY2XOFY4%|1H|l(^M}aX|aGiK0NpT5>&DL=I-p?CnAZa0*yBFv^1;w3NCu2CWZL>qW_W=eO%8_tuY zqbw|`(&n348H{bLVmw6{L&}>cW>=r+x+1vY#rj`uW2UoF7u`t8U9uZj5=K)5Cb-W@ z6VIMpueTR(QY*DPp&Tq_+HbiimpfGTj*p6HY~buT)*kit=oUSZ?i~J1aYVKSn($;x-6}*6QOrf4}zpHpl5I1 zvXQ-)GoGI__HVDq7JGDhjt$w~+8`?~P2ZOA?Kx+`bZckD{30BY1Q<%a1|NBd5S(S1 zC&}vvxON?10Gw}*yZv|-+mZU$7$GoUP=p^(mCw3OANT0e>Na=>&p6%WG%=%tF@3F@ z&ibT(=6qZH0$+OEj-S11o|6LEHnEr7pOu$E2ydDkm*9osfUCf|&(gW0(dNg(5TmD7xzvQ_>O^ z8{4*^B-(CXa$fsQRGO>NvP9v!$h*6}`h*xM2Y}^LZj4$U@HbFK%al)z&IWD#DUPl0 zTLmb}fHU2{T7hZ+s;n-o&zW%K-I7z*Kst`!r&~xTvCYDzHc*oWxT(m58AcoZ6Du{Z3CDC9-vrm6@2&tDyI(JF{UU)x+3K-M=xiGs8|HO zt{VAT9pnj^pmXSF>=>Jnp++Z{PN5RJ9yStfUO18_lxs_613=XQm^%W@32ezx5tV3{ zlycO&&Hsp_7I=|>^jaOhLhuKl9aW8ON+c;-@lttYkW!c-Vn9sE%K5LK171moHuzHY zyUpcdWB_Z(2|lDs7d7!ndC7Gqpw{FRogoVuBNoZJnMDQ!Zo0Nn?4$Dyj?&%K0Up0MQJVoyU)WD`X?tkPjeLz{Yoh;;P5L* zMdaFyTcI=!N)pzwpDcM60=Z$P$;|!5oesN?E2!a$12J{=^jK0wJv&5DtBOYbl;%i7 zvP<_&_wU5}wv05-9I`0b#S&kY#BD_OJIM#}G}ZP2mKQS5K0#dJAuGMq$QkIAvIK+1 z^M~mtW*>1u&A^o3Wo(-c%xR5{jgAd>ugNR{mR|lsc^XjZzam=K8Ov3e$y2OxdpJA6 zwc|6_Ou~RKoE$>6(z^T_OzD3Bx;cH2#gMbHX~>*?_FC9g-q%`|UVbOxeZrpn-g=^# z;TAy0Q_3~O%6=LNmS66nj;DYw*G~Kw z&CxUJt89Q802+l>GFdcw9vFJ-uLo&JqH){el&8eh{wb_$ne$78)N>ZeX z0t`bCV8WHP6^hW2>?`cf%{}gL*P5E5FX*O=JOaY>_NGvlu*4Z~_B;@XnRMlj7>eQP z#(m3VC3j%>1MJHW@&6VdjrhBwZyj&7inTC*P-F&{3Sc*1%vFRU7GP$167g^LAz+k1 z2S*;5kpqJm-~n-a5hN+n?Q17f@*j@9Yi?P`bGLaAWry@gvGtRy`KwRD7Q!P z^1)IVZ&>XA^VP@j@bF@Wn=K-0%;eNmRF9d8wZ-%W(%UWgnqSjzn*JxpYXubSW7o^B zB;XDZ;+y~&kgCrz6dM_0+g|0oetiFB-aqAk3P_q8u^baw`8WqT1dv_TE8?SY3R0jR zukGC2?r;9OGEM%zTz$ASU z`rkjz$w!#c&owN(Ac2VfC!rQKHHp#xwlVOuE)6(>`On*H*X~~f`b8U%4ExLfK730{ z7f^arl_gm@`J{FLoAD#*zpq+e3c^&`#=KM|lBHyXX<6x3w0ssIUMFkmk002V<06>0?j*NH{ira^a zxS+TK6*bY((P3+9tB6N3H(4Dw4Mz(%Ph%HzfTe?@y*az9sf)R}gR7OJ+c8SJ1OPw- zP?UMA>6LM?1a?Z8&1Aei79LfCPda|r<;1apeV3Llz^*}i^#jQvcfJQ>Bu&wn@qoWl zRM|kIHcQ{~XlO%3v|p1&Lo~f{y|Le7UDU(;B&md4-@N!s`FdKHsI$ouWrMA-xm^QB zwDHdr1?M+1i+x7__GBnrpkrt z%!wdSiR}ap!9T-5@5;n8Fru<+XbAC#Q!&ms1^xJg1HT~j})sf}>P zx=^*wkoyuV=M-=6v$v~v{-j~irxE{*B#AI@8NotbHf`9>2-JSo9Z4t_MGD;*CJ@N< z#U|2#Gz{>WCL+&@TU6zr1!S}%jj6^}IKR9ua{Jd?YDJf7QK^5ukdaC5<@);1Vi6Hm zktQkf7w_o`^__zM_1-~!k@xFPK0}R=QQT!<<+$XsPj}X{v*syjod@|J!?HC!z!tErqa?Z%pqL8fFBQN zs6bZBqK(9n1qMQHbZv4J_5uQAAn72v_MfFxr#eEFP@8Hs^ZpWg?{mk0+mk(Xc4B1 zq#5smVD`JczO$Z0>Iq;f>hu$U7Vyaftb&6=SEs1OkZ$n>ib)$%_a=IrJXf6{dPdil z3Efm!PC<}3ik@)7xnNyN3Uf_JYhGJYRQznP_C=I;mtY3$ph!dm(9(h?)oWHRjM_nc z4yi)Y3}Xcpp%#Cx{`B4#^sk+0hZ|EF$fvrwY#vg^q`KMj59<@`wFb6+Zt z#@s`9U70uc7pg+XdP(hpg|90h+#ot0btPuD!AM_^u39@i-K1xBuq$=4StShG6*5lB z`x{9d5=N5qjVj(&xOyMg*UG4Z z?TVci2&54Mh>ik4-Oi^orTMhCZ%mFOMS-55em`-RV}`|KnlKY50EiaIor#->n*pW& zMrRLkTh!sffF&lmz3Th^dW&9Tcrb)ggGF=t%e&6i`O_!(a8UKx#p{fx%Qpt|bCW3s z$q(NSU*eH4#;R~l{L5(R4wU!y_Vyg1fT!4OZ9h&Lj@>dcBT^v}lr| zc4`WjY8`m-B(AZ{>cuns!k*Lx>Uw2SDNLWuV;b&{OY~Y#P&pVo1*S1g-|5@8{`ut^2L-kO^0r0|aFTNhi`2rsq*#-JcEn@z2iwHVx z$V=o)WO3RMd_x^YG>tr{pkcvl`y3T~S;VOdyf2_DjNu-bBH2wKuMXLMLx?~b2W+W# z6*9(FACqS14vNTm*zHSZ7L_67u6DD|BhfKSE$x?3I~(hzDHc=hCc)(oaonjK1?I{l zE#j`?3x6YzE@lXRzZp-aYM%)=gZG7D$d5!koyz^*b!S?cm$Fey%&%`7OwFkbC}K@l zPAQgDV#wYbiD>}jrX0`#z?lFf04fB0Uce(qibR?Vsi2dItE#I!thUU7;N~&$$Wi3; z*9E;5XA0r&wSv|Ball5FEBSrBfFQQc&U&6Wf%`qaW?3RxhugA;o4N_(YKi~#?X?Ml1{6av|JgKA@`>H~ThWiK9l_r&(=S=OhAIsgkD=;%s7-B=RWbqv}!A!9&H z=FpcDE{ifi(4a>KO*8`oW07SML`y|Qg;l%!`m!qgA$=QeAZlLkXbWFxr8swS$WL!z z=RPkZ+xpwRyLkV{+fXEv7|2uzDpW62kIYkGc0cD>mzn*_T||6w+eZB?XkAEsK!>de zVi8G2z)l?IePKMGbh!)hWOxxdKczatQTb8rgrbaH7Y6qzX7lw94!Rb#}A`n@xd1_&}X+>Oi4N(`Ihtc>qXb)1;a- zBp0ARt3uy$Ujq%enXXDuj}o=g?oyqve}KU<>RhFb_>3)^csA@w*TbMBMIRUjxiUr-k_E${-eW5wRoPLCE&UMuk zB&dUoVT3>sbHPg}WvP{)i;Inq2)Z-|lBp)CfS`p|1OX@NvmeR3vH*kkUPnS^Bvxd`%$cp7mhE z0@x^K5ugH5Kv?!UNQjUIp{QBOPOS}xbLzT49UzF7ow<SJ@}sOxT~9 zzWm{yjq00>l&PdoA1GKB5pVcG=SemwQF{tY-RRQms>w7nr_HFdl<291;yMQAZ<)4Sh6{&E48G>-kokI%p`=3n?s@TY&F%4+`Fz91pi{A~m}&{`9j|3%5J)Q1Gzx^X!Q(*pd;!#CeC zI8)safKa}$kgg6ItYL!@Oibt!_4P3hnoFX0bMf`Q5gR=h>#MGd^<9E3)^&DXlfDgEa^6_3Ls;-xv^>_kJj#N9Q zJbv?{_DWIYLj(}Zbs>&mU}#9wdr#l{R=aSIb2JgPk0Ai;QKOTk^`7qoPOrs%Em6D9 zskG@UB_C)sh7gb?X4i&GnBO3cjjYi0v|+!Ojrn;r;28}&>lb87S%)Dat|Wx6ga&bH zzN{^z3#nt9O?ujYy7%sb68o}o(iqG)mAF^?Ixm0U!~R#-u)3+UeRdGJ+F?UvPFT#upZ#Cg%0( zaX;CZ3y3doM-~>+0=XBg%o8v_yewtH4t^&)ASH#sy&tM{ClGEA{9h~@V_ZKcxo6{g zPZLp0OLp`XRmcWsiwNZkK}5Rpaul?tNkqx&ptuO(n9~OJ1rgsF^a2VBTP5qR4iA06 zM9jXGUD2OW%~Hj<6S3XKSj`}}7PpDo!k>09I9RCzYF3c+D{XfoF~*Z4^0~maZA-25 zpScj+q+frF7J-2D<139rY(RcyHq_h_9{nb@!DkEm$USKdfS23+%MGwiE;UPp= zA+q?^Py7|-1YjsD4oWWOrU zf-dth1zi_qqM8TcW2S@!@~A8HRfi@7z5|g_MlvajLSKY!8a*szmhk!Ked~Y2Sj>{E z&JFuApF~~KL#eYnaL{z{=W4ChY}Wi^hWtPAKpoRxJ7+Fu>i_^1tUp58*_BvI>Vhim z5?2{~=f`xtyh3|cl) zGzC4NVhI~7EkP~&Y;__2fEA9s0+}`4rvffrii#Kv>D-rfNyxwm8?%8)-naPkEG##P zL2(2ISzpK~3wRA1b%*a~`s-046POA;1_7lt4Ji8%-zZoRd>i6B89W)ag!NA}nfH}4 z%-fAfS5GfJtp*hp6(Kf?NC3n-l+1;iFXfC$QQwk2%RNRjCY8;753)T(dkY~aZ6~}o z)|Uk`(S?lf%|u&LRAA>2$h=brf;!L<+T(0YZaRS%Ltn_w&Rmet1z}eLS;M}mqkElt zEu0g^n(9I63&s~@;`k=4ewjK1Z>LSXDJtq_0%82Biyg@X!xNv@>Y%u|54zUug--0O zTRQ#?DLvjLFpa3|z2w9u@&7BEw$p2xBCl=S^Mwfc2hqY?DRk>Vg%T#re6Hv`8q6*x z>@FIF-N6iIimv-or)5m4s-``_o7jOT^HK+dkf7-Crm=;Th-R%H#VF=)^k;Djh@K(H zB&YjCNLWcnL=!^LrGrdd@LY;nk0w#MF;a#nHM%bzl?%U@v?5{uS5mzIr|oc(e0cHa zFNZn~B?;JI>v?r_jZ;RodslB8xlRSz0(=~2xbj~F^uyu>4YS)J>BYsAh2!JWzsZ#LX~mIC&7xH0beQao$GfuD3GzKYLRTz zfo{g|UP(E<{61I{roBMkpN{@G`fBiwJSAGj-*HbRF(Ud_FVSR)SH)ufR5wnJ4^$w{ zhkVK7!tVa@=*Z_Qst&R)%tPQ>2mE0d1ldE=Wv-|jXPFov%oEtdo0<-!-4;F++o=&dfNIe{eT4(bfW2Z7B?@>A!Zj&ybZ^O2==9;Vi-U)hqLKGmYGu8Q?lzb#*B%Ar*WqREzDPQh0m^@%WHP^h$CUVur>^|Z zRP^(>SYf$PR4y)cDN=M=HfAV37;PQM7bE_RF?<dMiNa&o^vYBcStky-Lb z=TyXP?^c2Bn4?YveH1st%hIA+R9fk4rm8Rcpt+XZIGRa)1+03%?a~{sWA*FfpHu1e zsHOjk-J2fES8N90A}MK9V3&n}>~)dvgVYKl=uuDRuV4#W4z6Wr{=yz++$14BMm*Xrq!U|7 zOj1&&0)yW7k{--+vwOLO&0)1Of0uhkZNnr_UYq%Qw16ht!l>QHzQ=k-f9E!fqk~Z+ zFc}6wO2{b|;zq4!Alm+TvM|xY4-`D{YH zbWwW0CnacA=&LDUFh6^?_k@hfm8bQ?a*O{%S#{ZxYLC7`34!8|0$YCi7aBwX17AbX z>(qfA?skLUF<>}d3KSZH=Lta*Trvv?7$>u(gwq3~3e>KTIf- zKI0xdOT;X6i6JhC)kaI$AFkF6Q*1-Uxje>zriC3ah?tN86JWnJC;RxrD08s;fJ2iv7(qw@8p z@rB>so524IaU62$8J5Lon)3SW8zs5j%zE|DNCm48>P-W)#JCKRPVRR~%Jj%0BO-Rx z9mWI<+-e5}_@}ie_~dLW>WK0G3Ka?x^0aaoZ6V&sfk+p2Un~QCTEfpiW&;Yop?0Ao zVG*o0Y1uNwfxH*08M}UB#-Hhr#sTG_39bgnv1@vrsBG8`&-zI7b@PTDNw2q6P zzr+raB2DGeY^(BX_uH}?sA|w1X37V$$Cr4z4w{v|U4ZiRe%IT4Y8WTy})Z!t^qeDTx%&ZgbWIkC@3_U_nq2W@9Nv2BY; z@IL4c|M<6MRky8v6?Y@Up`kg&EWEC2b|VP9v}JD4)LdUbi6#G4?5?$a0Ve2lNZkJs z`P>eAiFcB)KbN}?mPN27BBJuU2>f32Sp!9l%JC^2G!C1Zz`oWvrdstuh?3jk2_0e6 z>*3*HM4l+2BBs(l)H3=9z1Ku3dkwQq)F(`Cy)-nGi1QELuAV4Bbh-k3IVtHo{2+SB zTQiv<$kK0IKSb7+J1GH(h%*u5LHFFW?i->uIHvm%N>8aLk`xETB8^ngky{8148#lu zdGn-{j3f+dvGD6&vI0eN3A1-1R??n~W*)XA9}h0O!@tfsu%&_!nIL8-*x4pZaIm1i zSLTJge&wMqy*JNQZtYVNyV1?S_JRQ*C_H%Fw0-XBc&(Y+%KtHH_j6=iZjd0}L-z``h_ip=(66xY_S4YB%E>Xqb~^9v zqxM@>^kXQ=f!w%Xx4F8r0%|__yx6x|G@LWPaD*aTg~9(A>ZAwVWY>% zQO_Bw@_LuX^Tqp3$$LLbLIBt4mTN#QJf9kkLG733GDm%I5&!CDyfyO4JvQ@RUQ?lFFMo znASd+^IL(&7*IAwqvWW+dL=aBxmvhxxy61_ZE{C_xO0{lgtBtC<}^#fH6> z?(v``zZR_rHfuj$Q(gS63WBWpr8^jTB>#)OmasMYS&u^(G%kpbGrDn(uu_( zqtar!B(5iL_42GXK95htt)|`aQO7823F@;y!*07j`OMhdZRgio8wwidv>ZCka`kDO zeY&rR3#41AoAK)vrI6FbpXK4KfkjPprF>}=5<&Z_$3N7LtaFK`chmu=ZazIphcE#T z&lwJvLLslbr$Q7kBgv5fiKCI{D0De%!3>=>U%C&MQfN$4vqaGSEVR!^sBsm;7HT)| z=WiHRIG!71ie}`q`^OlP-X!2K-%=!+Jvw4g#T(3MC_3d?xUNg)XTH)#5*sTobq|US zaMb2s_0X8@^%bb%9BP&A8rV7M=#OI1^;tTWXrXtMmqyF-I=cQ$Mru@hKBy;pV71x- z_t!D{Qw%T*|DKtb50dgGc3C zaa<^!>^EAu++;tvuwYjCR3|d#>EhSZ8c>(t7Zqmc^QSR?8td!v(n53Pm~-bv(*-^K zU(S3}=agDh52xiqnnvTOZV&a3D5~W7yGa53SFbpUkve>HT5D%<@!CXBf>-a~S0;Mr zn0q_&%sap;ahEZQ3@ND}n_bK?0lysit8QE0sAh^@W^WTcHCAt~4Dc600#-8vyq%Z# zJ@m0D|dHcJk9}w?C>J>Lud%DJj*&~7KQ(E`wuwkVCy1X#h_MSCf`Mh z8gtoR-axNz;pq z&rIvIdYMSsqY1cNk`NIJ*6Y$uZ+v9P!AQr{-Wm2Y8&7F;V@2y|0%YetE&MhDkdk$s zeYCA!rM)~TTvt#17rSwE392#A7G`cPB3);%hh=37gWAt~Q~>b<vsu+VgkMBA+zqfeJl004K zsxHzvnUf;QCEHobxpT9Xu@=vQ zCFf&mNf*y$q`>*$X2CR0|0(BApzyU~cXEO|-2jYb(k@>=&ot@>fmWP z{=t&?)x&QDzIGc{3hjxhZM?}S10?zrMf~k|o0+z?mkrHVRHGK0V8#D9 zW5jQ{c+bB2!$?eUVB{u}Ot)B6hgLdJUF6Qj?P6Q*1PIHxTe2RwPb5HDVEm@O9^wf~ z2tF%GwVqop)r1`v>PqlB?E^TQ$s< z(i$)W#fPu!$2YTNv-S@j+XFZPIiCmuQ83tWmGd0RA&ko(MlkmSA zXtukyvG@(|`y|+xCXsG+YiBy>iKceH`dRsV#LiYX*H;eCL{MWT#jvbCGNQFW0g>yS zPF_gJgZ|!H>~@v)&T9i3K9B$vkgTk8Ana?lyb=bfDdj@srzvzxIzv!?E=rjXr1CoI zm`tX&_90w^0PIhVxLpc}TAm^d8jYVDRQ)F8S%|2Gaq zUQEPX@v(IIqc4)$v>qRm{j|9Kc-Q$fJ~5M|2UTX~7v@e?E&TH0MHJ77_R*u-%2Oe` zkk&G%Iu zqFSigWL=qa-?rN#QO3?9B~6txwS*VjFlWNrPVw4^RsF!DoECvjE@n<|Qthl9Gr?UK z^Kt$7=p!SG5!37$n{dBPR?pe>j@qhYZ@Xho4vjub&%?P!=50U^-Ka!s0JOK)L9hNi zm(tdC9wK@q>Thyh-%9l?_jbEP)FKbzFlBK}1t z;XUYp10opp@^~f^3&IY-;1hNP8ZF?Z*d@^!3)ggy!&kRu#kk2qRyj{*)`z#z+B%bz zl1*I0QJz)%XMmSqFJ}j@(gcncmeS$)u~hTzjtyx+Ki;y6Z*rbZmE=Bq}%=A1{7YQ9hRA?#jj&-wLT`1}+cYXIx^kZk*%;5+$MoV1N&H~?tu3vI1dfF}D zjDeR@k0|l%DYCGh(q>lbD^`(u=1Q|RrVnik0+O2hORIyUN4_O9&GY+-$4i&1V@8MKBJDWM zKMy;VH+R|`pLVP&d{1MCo2lEXR=tG8-{)yztDQWVqdVLu>>9wl!m zB}VG;fR5B_gCsPYU2MiQ7k#}N|JI07=QHnpyA+ts@=!w^&2)b zg^!|2v?6?FI11H$I-9vtC8P9(Uu#G4$^xVY>>mb45-G^BRx%`R3;Ug{d|aOGdQ>?R zfoc!?1l|y$TgV;!Ds@|6SDA`uj$6i09*dVuZ@WPCm--W@r)Br^^3&;t_DUuxGN8J~ zc={>Rae2(JA>cN9i-<(vewpWK73L@=K#2-iUGyEy49Id8G}99S9ZzLlr<&J4L9qfX zc9f*M`}VVMy_+5OZ(M3_;-J0R*lNm10Av*E2zt>rLKU&AnOzcT6hO_XUVXq`ZFSgP zZ6tN{$TNW173*Mndahcg=wo|!DhqPpWcvQ?%$eT4c$#>zolB3=7WstFZTsWiwbAI5 z?_wN>n7=J7Y_rV2-Q}Pk5V&LC{J=p$*CW_|arEopyOOHok;aORgHJ)l4P!eMYul_W z?dv8or%|z{7M&po8^VL7HM6G>L!$c>68J$+t2SwEnq<8E(9kkEh`X0G-_mjD!Dtp2 z@Y?(}Y3_%`ALF$>f-6+uyG+-~7n2O{1VZp%2L`UQFh3~RqG$|0Hx7BrM(k<_gR)sG zJ`d?-LZ=J)e3D2-!TEu)SoufyyYT6EzZNeXJ^1`j1uqy0c^Ldpqn;d34r!TB`qdWw zw=+FATn6>h^lIj}bi|Gfp$uyx5d+$A?t{uFYm&e_N$^wt)7Cg#WGju)`*E(TD%}8b ziMm0F8B!v0++zKZPF<#E0-^Y0c)#Y z;jK>o-0gb`Pi^1_`qAbMQX`0+fJno3N!w&?F z?kS6!N>Y+xGgsQzHa{KK;I7iNd_Gc%i`MaJ1?%BlOSU{q__1sGK|U-TEj<6?zys?L zZl{-y)U}qJy)5czJi0S9PA&1!C57&_1h@O>HBDU840YIb+taqI^qh|C9z85GsLEo; zVgNC09b}4P1Sp=wXgWfw<1lCI^dv z?Lvb+7+2~v5$tOyCP|xj8qay;eWifmBvNl@V9;*13#3J(5xbdqY<%PBp^TeBN=e1` zI@AB@eQQyrQ^z5Z9`vOcpc^gQ-(q?LY&&EfAn_qw+G2sJG`Iz zids^fkB{%{5i8cCyo3mddnGOy;2%rUJ7s^;d1O?spi&GFTd= zI_Zw{-uDN^AeaGn=M*LW_=aF$Z|mNG_`bx*-&CLMJv;kwM@en* z#O>BO&=VU4a}OVx*-6UnEZxtCby5jz%$Mi&lF zsw%KD6QT%+ANCeM^zBPj7EDA-O)w4mXBzC!G$-Jds$2dbqh`TqMAa}~!MZS7K#r`i zP9QL$hS1tvdB*j8I8}|!wox30pB!<&=VonVVS{KF%}mKiE7dHtmk&hcdk0Gz{oYk$ zkAS*9)Q}J)o9f(effEa)tsdRCNwKgN{ixr9&$4aE+q)62zfoC7O8sbtu-nh$ipk>Q zVheJL8XO!PZcc~hSX5&AA*?}dZT_o?>SP;1=#^coS(c}`%~{$|Yy`|DI`SBRLy+9V<$COx?2yOr0nn=N@>A3oyvRgm{2*z$HOYof_`kTS|?bITbM z&u7DRQ{Ygk5Z`CDU zDB#T2XRlwbtqclXKN%0jI+T^ZG}K2~lQLTQ?9*!H^#eNlmO+$)?w9Wjkr<#SA1qHY z!!28kcZrAMglXGkA)j+4R<)Z~_EF+?$g}PIT4R*5@~d~>ZPu$mr`6RiL%@0!rI;4d zx5JO;kJig}AbsJPb|?Pf++Z}^7QaS%bQz`|x@#AsE6zw#%U=YiTO;{A9mulDwvCl=arNgmh#>x{Ot9z(nN#rR$!OBI0~P&c$aeE?m# z-B>HNmbh(hc58CU9>(BKly}1O0jGE&r}+?S^wi4fRC!|_VIj}k|%6JLBwXl9vJ6G>7`k(CGZj#d4CZI1O&;GTW~;_N z)csKlLPVOqj)Ie$il#-yMO|DMU}}6`ULxz}Ui6B~U%$MpO-w8-daJaMSMZP}FM>O& z?o5odZ#aVa+_`` zsl9?!<-D1Bc-<4D_u4UN1}C85?woah(Dv$S*vP11f5lv8`i!0@gsVKDj;h@Ygq!`U zg-IhSEL^S{`a)G8yL$~wE>DQh?UX?Sp#;)qlxDEnj_31BC%ZwNA&_ocr*U!x7D8Bu|w(<~RThX7K zI@m9yti7)Dgh@ZJ(5`mGj?Rp1x=;aLa*prs^{-3saOj`-WQK|XUWRjWw+F3FcCQtc zuWxGCt!j7AB7nH5eiAX0f_z^8N&4&le#MqWVQuB8w(Ya|D+k4nrD%$7!IKN1IIh=Dpi%+nBg)F#d=N5!0CQv^2BxCwNtCo#SDYYpIjvX zOZ(SILoM8whM|FZyj#emSY^poN|T+USdTQ75IiXm{4;}T>+XRg{r6#LViF;+={59K zU}iq`V5YPP_UP7WlIMX9$k=dM z6`E2TiCxTYF#lk&xja=-Tm*7dCNauP6C-4q*&rB~M?sX&#SAjjNGVfGZZ~&9_{@yN z6s)Rz^>yg9#h`dzUJZW;`7Pp$L6$FdGa_FQ-vMeCKq@O5OCXg?(2xXT>~}B|#}>kS zA3q;+1_!cD^f^Kvi@sN^7EvKM73?_jfC(m;bK+;M1wfZe9rpy`!gmn5lne;N_#@$8 z-MtP2vD>=o4>pj~87LDkQJc7sMh0e7yl(oLjJZXV$j4HlD>s`BW!slgz8G``b!@ce zMb#x0_Vc@)6qFb@rfswS?#wdmdCfwj&Hjpp<_#_tX~=>7adFSx1uW%9kRe=nk%+cp zD@RB7ZSl1{KRYusWkyjF8esE}uhR*B@NuP#g{nj=|{}@#UJyxj!$D6Z!3eNGGUT|LJ=-y`h70wQL}9e8K1{+!?WYOc9| z;|=6A8b*o8+JthRJJ|W(1 z;sGV2dO9{O0>fIiecFh&jr;f?@hiA}AB4IAX9nRSu&lX6=${I_BU|uTeK;=|+`3fk z>vM-{vFUT#yO6*KJ-)jwdM>A(P8h-0i^M@`sjOOV#KLi7ZVS5tYJ08?joU5MRe&{= zSmsIb^h%-EX%Lc>Deu?rkbP}^D{yj{t88Fura<@FPgDTD1MK{A3VB^~oxEd19~&UG z1(9x7ruO2pre6@jt#iNbu`8;os;_Tuz8m_@v*DwZe)4<6hbe~53!)TXb-T;xo%p4P zby4K?V#{XGmCEjxjF?L!5q_ky!9=617ofwXTi8lzsDYj;OTQkgBCuak0U+~dZLq>W z1Rsd_KEeTYluBZc?~r@{k*1V#A#yP%Z(MnH!Vqi7Q;{c<3s`mbHbAcU&^a|( zmOjG@paEs^auhKBqQ7gyaGpmiTw9piSd@F0bz)hVTSq*f3ZJQJ^X%j$pjQUGNjP*h zdi-(kwNCkD%hVuYQu&qd2#KljCj0A^q;_334<4;sb2k?rd#n~iiqWaQRz<_}z88eO za&p5`-_!?E@jknbq>MJ=Cj=IiN=2+UwrvL`sDJ(jiMc9x8SlI6%p9^20!TdRe0MY= z)pZFT31P8;oic|g$mT`^&@c$!B!2>s{6Yc-y_0?a5;Fu)D21{8T`31pnzvJwOOfUz zK0;dfg>Xy`B)#gqb_!fh)+eYmc?RUj{+--X+E@5S{jP8a_ydGtmcn=>1h#DNUG`@NnHnMppJj#*@q723*c*JG|bjwa#^_(QtftfilF zg>~QKDD~ZXaxyVSSMHhiPBFrnWBHb4X=YVB);TV*Zj-#d1PjQY5WK7G?8MC z0XWHR^0RcY$f9A1MN8LsR+X}c51qu=)Br!rJ>|(++l`IW$8e9rx{D%#a~byGGFV;V zN0gJh*z61+3d0YKqjo*<{m<15OhefN#dP9{^xJCRSq)~e!hFsal>11u{9;(%i zXI&I_y`C=~t&?mq_0AF^6bCwkr|P;SXNR+-AXQC($g0?7ZxW$E|yw zmrf*v6NiJEz`&r263CQ&wQ9%;MRH-RXu5paTh=%zmdE%UFin()qHmj3JQZN+KItQS zu`S_eV|IUWadOEYkZqXRbat*18#9E3(tn`ap&OLLc&7&&a;;wbifN5B+U;*sQ4h#^`UK=kZ<3k`YVnK5`Z%faX?I9L?u%`ecvU$ zQ-7mbhAZ>)YwU<;_|Hg8=XOfNln8rRc!OH(B`K-{mo-<(4%`WDU-y zhMB_LV{k%k%vXb5zpOe_m9k8&5~v88XMTv#eyBNIh+T~aBqEE=uIs-8nO4sYXRm>Z z2v=%4I5eWtvbU!fBi>YfPo*w0K*8B+qY$lynx2WncIzFGq-NbxY%TaBwpVgWTM9!% zmOJzul?&=uUZ4QPFHauBzdrWcChzG;N>57PdDizJ-LI{O8}5m5*G^_OZ7Qi9+qeFD zAif<3?*c4?D#M+)`E~vN_@!j>4i~C~+VNt}ofWH^iKy>JSubxqT6j3l_8cM=e@AcM z9^(z)a*Py+nfDHia$dUcL2=?1Xb_&;s3TIN@~fm=o+nW|^NA`Z!EBCgG zKjX{y(Y1od_gC8WM#E|~it+X$oCMQDdj5|aKyv?wo9(aIMq72fAnK28@HwT{K0_Xg zStzs)Qdgii(H~(!FX$*GgICFSmL@3BxFORSU_`sd-s93>_6#4M< zNL=uVq_nhrATsrn;@a6$uLl}gAh5fG=l(mz{&cLEZiPQLev9A_H=v(*#N_68G{C2J z1Lrm!>NX=rLncDht9|v|2{IyTOKd4TTak`Leu6YkDx1X|K!bDuH=hKr#MHER^Cmk6 zr!$gNcN!0)6*gJt2j0gWZr@iqkrQ0kse_~Qe7o7@(OKou$V}<$>}plfm5vST8jWzg z!JgwU_`rHL?DMfd=-ZmlHF9Y)7xW3F{JO}t%b~PXD9AJb^xSeSZ+VP!KW2KQRiIZ! z(6dq|8v69TdPM-Afd)V1;Joyi(lqn)-Mg&q`Dhd**<9?7co}?r{khnx(o&ibg*DeT zr5tr+T8S4N@i_!4lP?XZ$;k+yFSLl53klF$E=QWTeS5oTdGTxZ{F(iNT70$QtyD z7z^;F4^b{vIL_F%H-PFpIYSRo09IOFLGRN~uQLV>2Z+E5HaS57JsO+Q8N&Q7TT|Qs zNabd3t1p$Z{A+tkybAHleBRlUXAXfTZP1-=_tVf@E8RFaIw&%lYsHrPz!H6ExWYzo$at!zMb;c|kYa$46s#FPcGB~9C{5;*`^?y7hxAVw* zHO%FQ(~gJg4o)v=%ug+RevuQ^)+35AnCZ@b_xL;Kkr^LQEpG5~56-d#lS;H8Uj<&s zaiK8^i{o7U+nHUPMosD!d~Bb$FJvBv2Q~>yk3c;!yT+_gGKS?AFW&h!mvuquv25R` zmCd9kQR&uF3c#xNt>uz3wC~TQlGy)Y?>(cUTADV{O;EBRSwNEHBnn6l4hV=KFhq$G z1SIDim7I~B1sRYbNzOS-7I2U>l5-q#yxVi$bKdX!*8O+?-d+pVu$k4nyQ{0}sjlj( zZtLqBJk{!X{IU;+k-M_rPvi$vJZK*K{D!38pCuZ7`&v^W%BAZv7GdV!C154)fIKo| z@x3zTxjYG9XC>o<@q7!p3&Ea~EoBjtYPGVAdIBQ?pbRn^U*R0^d^ntu*&)^N*eBV+ z5W7bhC606uUc&^iua$_aTnuV%a|mGny|NY6}P@uTkTY zWj;P18>|cl_Q}Z`%}qRi5)}Q|*f1TY?!yNFQqPNgB-g$CJ*!OA>m&H3PQf1Ze7`y9 zQio?_ZYx~!dcd)`u-<5OPInJk=U#4R^mY)7jYjlriI7#9boT7r=TwfOKK+3*n6Jmt z#|M>*T?t$Hp-)boPH|?CTc}BrNS&&iM{4(anD7l4Cz{?g+qRhgj=($^4X9d%#NfH~ zPuCdKTf3UosclM(nPXUND3!2G*vx|lo1Pn@7_i8*>jF(Ry>uN2zWVpc`A$Mssx|Z7 zD67X~2}gJP%_<}Si+<;&$?U+yVH#2Q^*jPVlOq!o=rgZ;0r>Y+4BLrf# zMQ?wkThUF`qxeK=r$*h$#&nz5OVRwZcXrZ~-#)A6H`L8aqwYHgfP>{LVS#a*LgB|| zNkglaA*@@aH_t&$vQH@D}9}niSFl{ae^4< zl7l#l&jwn(nVeHn-WeV|!1}_mY0tMuTeO6e5)lP~W&1X(gahZk0Cw?(@$W_TVqD~T zh>^(U((+{X@++QEFRj`0!Z%L&K4z19*R5`Tiy;npfb&vx>zmVqTd$$a>@;zFeZqWF zVByFlhA?o{=3b{*Row?d54qKcs?I^I{(veQOg$%o7#IC>4J$3$=Y+2b9JsK+30qa{ zoa)vkb5m1HKuUpK$oyTek2#yx+g!#lIh-$Wb&cm=Ior@SF%G00nBS?ds2fQIVus)$ z*51t&2}zJZi5PK&N(X$jzR27e)es;GvuGIe90@)P&zq3OSc38Std01k2>Y$hj-KZ% z%Jd%?WF@fw9)xD+jO9=i(Y+y%{hsd;e(u5APu4ZZ67Vu8s+&S@)w&PL1DH2)8_(3m(DR9;cI={a#{BIVDonzg+aP!h^Q;_OUyiV z-S&cYMNx*?P!!Ej^+QpX%2zG{xSR4@`PEK#Hq!~w7=WqQsr?)@}=Ol60=sH@hG7cn66}!lJvD^nZYFP_jtfn%Wk3dB+sw-fc4oUuBJk9pNEIL z{k>zWF5(*LHEvZ(`SlbOZ4*|{nhW3ULS`#WP_y<2iw$#6PwoBfXMR@Q1zeGZupN02 zM1kBo8nss6DOrfiySXO_z&j=1Kwg1SLbnu4WXX3WkWB|zobi{r$9^iwSHqhzt>550 zT=2{o-qrH^e$pTfdk)SMC3w^Gs2VO85`%n)iJ zkL8a0C3QSb!SEWdGW4aO$`OsS-y}QrhkY;aeJ;C-7jP4&)jyD*OOv#kzk*p23F{@< z@^d$L!j4-qFl}C*1&w=vhwv&c-C8DHfCIAj*=Ii>Sz$~jwdNx1rh*- zu%%*FFGmAJ=u&SlDn%jkBd1_044+mzCO<|#ZKFuah6#QZmz=Lq)#s{0s^U02SRpv+ zI3sJcOM~XoHd@wN*tTyxdEZ(l0H-)8f!H*Ni$0;;mu|i2i<7CYuAqguxUe6;7Rux4 ze2-q+8LoExo|0iVtj1yu-#a;`^V-_+^F~Gad8{qVUDMse&k{`*Z{&)%CgiCuX@^}c zmt_C>(MxV0bld4nA3#+_VP##6_tJW}_jYE_-Ina#T%K$US}&Glx2UFEdG1%YiazCi zu(nY+;?RVPNQ#iNH(s1v_y(ij=goI)X8{urc_NwNVjcuH0M9aLGB59m zZtZNAqZJ<|MJv4oC?$RgoDvmW_8k}j-*nt|ZU>8@{FQ}1$aySX-CA2D7?pe!WZA;uS4~aDjeTdaEN}T1goiBbV zI;|sUXJ@BLfwQPk5KS|J=092EwQ2%aBHZ4vm;|o2uOiAx0{lG9r zV@>`{O?9n|rP@2`Cgw~98)UQjKnBx<^?b{hNf4c|RBCr8KgeQuni}6=`8Z+dB=m!6IQpegpG+P=WB(x%bu3Li=>=)F)r_uk8W*FPVzvpQGl zYZ~zDsI5i+Jwe+@i$qx~=O1xI1R3bNeo^`x9SE&Onb7fU@LUZoj2rgfbKz^|p1+bg z+_fvU+zm}x+3Y*zn*)88EUyovpoOl7g#;+eZ9#ao@_{Cj=W!ODS@@oxu+xGK1vD1gxu>-=jG9Ao40%B-E@ta@B_ECu#_qFH|Mayv zo-@>~zld)*`)q$gZGP$dcQ1^_>NDTQzD0>)vn|F?* zf{O~9cE0Tf2LqJ%?jS3KBMlBE#>R}4+Sl8Yv(*2Bd43b(GkZ#ku9L%r=4~)jqr4ex zqz?anR+qDN5bSwRHD;#!@OakHawCw_k=5~l8*1g-F zbHh5x15;Aq=TA!}TZV*v@wweUf%(1Kx!f4Ao4eiHOUETODU3RW0RYDyXuBEK_*yMB z=Ga7IMdwT*;KNX+&6vYXNZ&!D_k%Y$&9oBd{5SM zN2PSXDU($?Z>bj{xxP((hz(u|RvjYECX@ZMvqrTD=#MJv%y+jVh66{FM*8ra49!M!hceo7OzUAKx8aWb`>+zMmhq&4dlWw4n`P+Tu6}c#lw!=R*`~WK z!ai>Fp4usO3GQ8bZb_%MSdU?Gf3miHt=K|u;Qh|g;1#RE&TQIrVzk}-rRi4ZFKgGj zbqtF*nTluZzPo2R2=W0zU02ETsa1~^oBltf_&DzO-BsdvFmCJDUB}=1?OJMvbD1?i z8e0xBPg+_OhwpOEXV|{h*0B+8@?2f~Kqu^5W!^ilIiRq2aZy`q#zJWOT2`UgZ?tfC zuJKpH=J%HX!1hq`NAptKYUpIcDK&C-I+^+}vaZr8ux{+@U5}pVp)4#aR61D0iknsN9Tu<%F z<$n40cs77=xa>!X2CnQQu}mc12Ab5y^_{h@biYPtLc&uT+F&9L z=YA(*KuWNR2*8%$^o>7RSjg(^G)=TEfwI}RBHf&vB6emCBag0(1G@zHU>x)^VZR>m zh)ZGTAn*QxWb6qO;X+{YDX0P&BeVQxV$1im@(}Dojm`V)%x!}lO{x0N*qO`a>v(7| zapK<6)7ET0YM0@vs^Dp2BqxuIvg8#LAsTAWG2Mw^wY_Yl66l^e$Gj0 z$R&!+AX!!GVS^$e=IvmlG@px zP1}~2Tihs_Nh_Hj-r!ZP^dj_WMa(xhHoG`kZmOTypISX`cDH`(8wop49sW26n`?Bd zvdQ9cL^+Ae5jH#5x%KZHWX{uxUs~szAUbu&57e!^Hyy6^kcE#j%beeli`^F;pUj+T z;Jyd=BE!`1SZMxuk(6V$*Vo-Ya|>?`cHiuR-=r1g<20lbKKZy;Ip-z%j1`m+4?*sk z`nB}9#eH4f+YrnDZdyK?-pzYqRs$awHDhxty-MPKQmGS;P6AfYTV(!}aA$k!XIhG| zvnT=~ZVo*xFmUggFGuBIO+({MMI6hSzh-Yqx>Qv#h0DfU{Kmxp*oyR}SvcrJ59q)(-4wfd>d z)H+#l5Gz+tW>sX0d#raSH{7L+Y;tURhx{3sXg!k@Iis&dvx-J7ba++S9j#Aj_qw;l z{V5riD8lVZWsOrmwQE=(Z#HM*@<-*!$>eUVe65ne1*TWs+S!P~j+nrbM>LJs&Bm;B zBklb3rzRn~YLm;?v>q^2R~wl%xr17iv-qj=E?=WaO*ON*c(3*87AjeKqpN-Gk=N^5CKO{N4Zf*BEv1VIGnK?_%CF&ZW}I^6VF zo^vFmq~*VdlYVmAePU3tV>w9W;@)~)2w(=Yp%^liU{kp0p$Cui^x#EZTkz16Ou8GMHw2FwtK4|imDtM8e zo{mm+MuLIyF{nNO3G0~}ot-6}yy}o*;j8Jo)Dsn$O|g#DO*5n!zPFtD3kk>jnE6;3 z0ER?cuP1jLzQsu1_C7uyZpzLaFq2(v7B#G0+Cpx~qaV>+_4tW*18v1t3M`Cic5AW~ zk6=YM8s2KALhTsax)0lzg;CjBTSvm;WGg&Ley9ztsUBq~zW*%(rgsBh zGyXo22q1@}@M98q_?#YSsY1eZ`jh)?st_9y97eW_5W^;suUYXUg z-%syTR+lkx_f08ZoOB`{EIoDXoOc0<9u_#*s-%oEC1TM~xI81XKO-}Y6!4cSF;>^(?&fvusgPaI>JZ)V{; z`B-ZvcyWQ zDX8{{D2CeVC%>{{(<>GyF-Hfa1iKirNR!jVM5US&a{>agw;CP^uOU zgOuW~Qr%B(?7j@+sNGL754SnF>c2D^_8oCgL@idpO;WScZwDwjqf8a>YCTM0S1{2N z{Z(H)$3qPBL&WTt;eJBX(rYffx3IW?AAQ7^z6O_S4u=>KwsAG;TK3EtSMn^a%Mq^n zESDF0r_9;o?E1QIzXo^0^&L~x(%uAWBwI%Uz_KjQ6OUKf9Il;7`%$jUWoOh5lgX~W z&+P7eHOuTiWL`}$Tb(s~`Tz^)BUmFJ+_0S)_{%2~NTfp*cUhEkg=E?SlgwQ0xxmJw z3E|cAxIfj{G=^{1bOrjQv2#`&i%jpXYqI!{7HXzd%cesVh&~W2taoR{27bud>Q5}v zdN%QD_dc?buO2!^Fz{IRLy{3CSue$)8e91T2OnFVE`E&CUa{;#I1Ad>+%z;WBO%g3 zkLSCF?s%1bpsb4$8rw_Z;MNsz9a?W|^Y?rUnsY-)(_dhlgl#BY;)P z9V-1@2Kqo>H-3rz#Iikw_p#S2<0FI9R7y7UV_E1x-^sCYWjSL0pICtG?wObF1Ibyl zGnUVHkN5me<;=~3U881d3Iu?@<6~pvkiXI3-dDvO^Va0E&WoO*fZwZZcZPd14~9E_ zBUd9LcHT!5L0X-72c;vHq}YiRcvy_8RR}}r12HiIT-=b{&o|o;Wd^^R%%X7hgvc07 zwOL;neXS{B56|aFaMU8gp$}Vp`t2b_=-wqQ?DKiwZChwaXag9@S z@=5Hk3G$%p2|}h2_?e1`F))@2)&-u4PO1-k_^ozer3NG&yXlnI#a5KJEVL{8Y|hp@ znU7>RSnFc9uN;vu=wUB-wh$&L-u2!FB^7e36-C8$iYb8Lc{IdFymP&Ydl3`(EQQ6cqd6=E( zzL4`xaS>~}iDllCxf}c5lf1icPag_bs36q;-g%>Yq0ha}3h_Dz#KDZV4FqpfLdaP` zT&mEIg9ZWQcO<2-XRF~iPPd!$`Chx!LfQ9CQs2_A+bi3%1jRxU9xj}fa$p_@j%31Z zv>wvssk1u>A=uc$bDUR9o$vn5t&sQGF{1H% zBF7R9z$DN9oVc(k?Oj^*lA01T#f&{qdO4!E>KnNtw|TMlMpm$dD-a5$Vp9Wv=!(P=^vEUOTWm~No`%Ap}G|B~zlcA=%`=5V#) z>}hu}8RyN-o*xs86bkrb77`JED{j!Pvc}7$1m+KwP%k=Lsb2f?yq};RI9%VY6TK>; zJ}CUUx!%@hDp_5wdV5!_%}!KQP)uz5qkdJgvk>P!Lc*dp$5)~>Il5AXr*b1=lhyC7 zE`yU~#E5Iu&5|7-{hC*ki#c7TnAIVeTg(`YTWgtVHnO_RPgb-G%1{dN6+56F@+j0uv60e049iZf0RAq)Q;ny&ta9OIeS^2lxuh7 zy?@`lR$(_Ms2X$$J$GpUt?b18u5;mkTkqG^1R;({-uRQS`#*ntcg0+A2>W=I_1Sc{OZtk) zbR)=DcrM4pNy=|MjZnCXGy-5Evb-bBvqaIO;6ow%G!+#fWzp%Az3#JSv)~n+V+F?z zv}dnhhG4VTC7?^8uaSl_sT7Kk1a-)iYqM!RA4@|N-+RFrr(snruiy0KepyUosi?#= z1NLO`-Y58|oDkZEMp!q>M}Hg!CiBIn*Oe^y*!mgMdOl*;A0fKQ&9@NdX!uJ($MW~8 z#%RORZksX|FQ}6KL+4RDv~hkMhUleP5xcX`GgPpN3ND5~8zjdu7`cMHKV?S3O~4=( zM*5VBS0YKLo@MMayn#O4RtIX&9++B98ezo0C(d z-|Y}c_oZV$^aWdMD|QVZhAv0tBQD?mksJQ>GelT=rJn(;_32fS(Zak=Jc!}{Gk0f~ zK%xZb3su|dQ}0n51yxd@Z3Wy0=eDj`lFpZ()X~tDj?3Nop>QGe(9f3ilxFBsxC%^T z_`~&qh06p|ISDxI%tR0X8(=U!Oi$Cn`-G!fm4kEn#YsAAk274MaIxW@`Sz*gwH+*1 zfj*EU98RQQ$(mGsy<4}eSJz@E>e^ky8NKA*LWa%XWs?6+Cr;!M(KmDkI2%L(61L_E zgUmb4Syn5s-GN}o=4)`??yO0ic0cw#KwhQqa1y)Ro?V2!OP_s3QcCG<^KDeWZj4ZU zod>_=S?EjJf2J7dYh^!-CZ$oHg{Dte%^UM%cH$v}bL9fk)S@3d>TZcke~&${HiuR? ztb!~WjJ>l(Cqk#s!vyEW9-zVqR;F2WYOkYDAcpu8(W^sBgksRa`z6m4w%0L9^i(wf zmAtBAe-kalyw3=H;$l%sl0k}{3=&!x0wzL32V}6@y)|7@m!8~abC*je!=RtnR%Mz;``YA=y+^X%H_o+ zu7LNoVP$cbY@;lvIVa+J@SolL=ZqdD*2xQ^1jH*FZcxyJ^xL<4S_P`eN#38&1it30 zp19}eeFFX42r`I9;)H7@296ZdgpTotT(N&cG`W!F6J~a&`Ws`mTsa|`1E(d*Lvcb(c_q5qTSn>6zUl8zVUzvMeku&!>7jql#F+yTWu<^!yXf%b$$6>{`5jIXsem-RD3v zjw@^I0?*g)GwqR+-F9Rr4Z9EV^zw?Pz3g4dnqw}<+V1eW!=5*3s=rj|Xtx&ZE37WA z)y#wOuUsN(n3QMN_|p(By$(rULhC2wQLyLW8Q^^rlb7`_sybD5bJmI@Ab8gUv7nT#I+Kyl6Yig{@vIOvXM{5`PpHC zM99-*aQ3cb_;(39W1-{`-MUS!6p-~=;+kJFTC1}bH-gs~C0n}{=mF4r_0ohb0uXAD zD&X{!aiq5AI8NfEd8&yun7xy38%6s{|HF;O&9u*jhJm3bmtp!FBW01Oq?mfCd(u0q zhEk3MCs~{^0R@5~muAA(t?6}-1fC=eXJ4xDGCYWf;-Uuv^09&5xl4GEL-_Ao=gnr3%$Md`teAzecnT#;vD8P)CMg zvyge8Js%!4>?=IbT%|T%&>(QJqS~V7CeX{r7$ZdoNSEQOqD4|pYn5jDDIf9x+DE`A z{|QoF{_0q2J`7D`C3pt4i!7Ef1r+8@SgNnAe*h>C~I>#3@1Naa-AGuHrRB6PNddWY1ji^h{Xd?a+mgu?Vj#6nYgeK zSF;5{128OPEIV4wHHyo5E|gn0F88necRc6=h++ZyfG6{!nxkaII4QWNslQ9Nf~kog zo#&Qv1ed;t%YKF6#3d-OKmTB!%p!|bY%jxiJoP3%`)2!MZK)cJ8lw3UhFnjZYG<9o{*lU{ zmsDs3b$IU0M*71=Ulq*lY8gdxzX2=>@O|ZJa0_Vf!@5B12p$t&^A3*?L|Hd^Wu{PK zus>JLV;V&HjA#``-c<}DY6p+<9ohqdSn#R`6jsGwgo)mzH-utz-0Pga~gM;0+% zPutG$;M4hAdp0cqkXHMWBgIb|iCGf+?k+dqTdMH2nFO8u2jQ|s-Nwa6-E--+U796k z>c>8kM7XKJFWD9%H3GHAs;2Q@fH7zAZew056UDkxX>6R}jd0QV<_?4C4uIJ*ZZQCQDZT8N*9o$R zFCcCFNA{pCto?idJJpQK;tjlEcj*)i+`VKB%Xfrv+!0iKC5YcyrLKgc0WablMmO@0 z=Y^*sdWic4i2E+*Sm!K%@!~6ls&RizNXx&yST7sS8sIv;`qwu5o4L5rJH?!p=C6h> z71X%$t-_7c8f0O4=>D%pj%kL`lS&ot1H`xwg5N33eZ5c=idOK4z7XI>y7<{$>o??k zRF6ncKAXZ|gjGEparm_^6M+7NK-_{V&fjGuxPh1?vHkfRFGz|z9r-HQv$@kU7A4m760dvePsi?qrtEVO-k(_)Q;qSbiF z;@xI9L|m(Nz+BD%Pu-eGixo+1{=54S@7~g>labpEmu;u0|84^CQ_c)}u*bU2Iiy8= z!Pio0R$U{AD2ENaed15NSZAPu1LQPdb*Z&;dIxc&;wsn24zpWP!g%63V&uFAh zFn!>w{h;4a-Vq9b7#olW7?@RXLGi$^&j(uR+~SlqJ`PvQ?EX3_a+bGP^znuy;1L&F zrZ+D-SS<`>W10dLj(hm{8AC^>nc_1usQI>Yr;JJ|`rpDDomC1!VT)6v-$i#L1^91P z1q%$X=`w4UxRzw*Xc|hD{dPR&pGIT;?=d2m>3wREBex^>0KN}c((DHJL3F?@l z_ui|Mg);{DleX;L4}vQ2j~t6*`F~BAn0z6iaZDK>*k-Chi}sb->l0`}f5_lL#M8gq zk&gwSxUsVok>xEh5+bNExn86%=i56}TKrX2RhBZ^!4C}M&U*bsJeRd@oqm~N{qM1? z!~bl!g7{7nkC~aKir4lz#G~Bfr(`itID#OBOr(LNk>@qe*zR%PeoHJcl76Gw+`RKA;Z8$0lILu%PpE~tIB0X%L=L)e4 z{$#j!^AbMEqsw$Se~`N>eA4crszFL9_+NYdZ=SLV z%sd$-eqr3ljppQ zH94q8Nf+^Wu3d z$f-?8eNFxy0i`TuT?DW`LFr# zXDXw1SRgS7lAX@W?nWpj{iNbk)WAjmbRS_(*2_I(+vgy(p7G>`DmD>A5GfS;k|~aU zLp?>#EYCCkjwkP%0<7wk7-D?{sYa#;)Bjl+8zr!a%z*gaBhj2L8NXlDE~%_iT7~(R zfgI{;WOLs>Fcfa4;$bAh2~=iK(L_+ruA)TbkP@nI;>05X3feT%r5XT#3 z;v>2}asPWCty6)B$d)5yz#-&a_-;_HzQcnYe$&lhGVNwFC3@2Sd-#c*WWu6N>5<28 zHG}2YR5f0V3n+nO%}Fqh6eQ>r`(L|qz4q=)81%34MiCPe+*vHQXyVyD+n#M(U*G?> zH4WY_k^!U06Go}f^X~FCG)Ahh@n5^t>lysN4r|no8XxsPB@}3gQaRUS{r6<$KQGsz z4EFdh4E+1kTrOy<|DT8Jf<cWi8a-Qy1lu4_POx|CRpGsnK7Y}^}FVn$D z%{hl1_{_9~+3D8;!GhzAX;h-kwAySAZDuveYZzySnx5(_GR}+#f)&roXZdF_9iNxZ zUftBm<1@@+qIaDfUvaV^TzuvciT<9=hc{rNuL z+FWLovVWc*1_rwKSH56WH8#!#4ey9<92^|RI5w7$#D3E!!hhC=iUZo zh#l9M$jDi?T5+`B<4UHyl;jO}>)PF$=WlL&3&!h5gSz~|laiM&!J?6P+8^baJ`Joq zP36)=qy48VU@&WBjhdErRp?$EEx^SU=t1-_d0=vU9QFQU<`ok$te^lK)}05LMwDPa z<}P=zu)&;d?aa>UWso`19@;ND! zTEnP7TU7hrDcD45qJwytPV{&v?lZHn3_H2^R1`yUh{9xOW58rq=I-t(p)w4sm6ZQ< z?LSg6dg&;U9kV2qJdl`@!bToJLPFvrk=?uWW}OP{Hy^GMts-J{RK?D252E!%OIB8v zi8xH1BeV8C_`Ue}cy@cEZsUWF;S-#Hx&)qs>ng)9Bt&UFf%0rND09o%LiZnG4D{gT zrs+LCI^uFBbBb81>*|sTHd-~}D%Z~V^a&#K@vlZ4hUw$}Z%+;{n24E~nD7Y+@jMKS zjTig+`U>9PVFc<_nYf(M>`m-NGV$~~yi;n&Nt4!>Jbd521z!HU_)^})VKVrg`1kJN zI$7h^9s|JYgATEjG{n}5kZn#;3L#DJQ0E|`gTmv=*AJZD6+il)lJbAGwn$JVhIB5R zUV?OKNhSnP$`zw`+6dZK9EUazn2BG?$_BT;t=q^?fBN((6bk(mAKz_UF3)u6+1w$M zATBOW=W`SXmQ?&@ZCmTL8@?d)I!9%2?jVfq$rDv3Vgf?K%WGewO`CsS2ZZV}#`UeC z>gRUBRVh>#dHS?slnkxS91k4u^yug)%|l^9!P7?M^=^a9)`X=C#9y8X_u4Z;vTY6J z(L5gz5D*+3+!;dHdi+z@Y3zj{n7)n>tac9WCMJ)6Um_R?wz#-B@D-_w!Jsr=P3?Qj z%jHZ!w0CoXXzg)*qZMrQ%yzcU>vR{a-8Xbbe=lT-qV=qM;Bl)MNY2h_8OE_S$=h>E zT6?iXg~Zs{(eZJQv9XTOuRz13duVTizj-!I0(U9!4p zO-3Z~1o&Nf2V^Ik9-}OE4uSL`LW#5A!UVN9w!Hz2l|)Gxe=s=GoV0OEGKa|IQlh&A;BCmvA*uhVgvr`3vMnC+IA1os_mgr>Dx}(X$5R}maN!;Tb&O19h z_d-m5zR=-SXvkcrIU3mQhM+vVjlT)BHg zoB6kJMV5i#VS_4*v2)Z~fh3aUe;^e~!oNlnDEJy}ut%%qFtMVdA}=rRK-`0moP0Ve zyVmJY3kKalwB8)MZ2jzp|0`|4q4;||`lE-`7#SEC1O?sJhBH~>*|(Wnkzl*sZQOf` z|7CmwT#Gyvwmj&@wpJhN@)}e!8_p*uC(}D8AkiOtFW?Cxy(_i3?#(HF8QY#J|58-6 zHBqSc=+UD}8d4ztB^Nm>3(He{X;81pGlm@?g=k}X_fAR1`uea(PzGg!HNF2`lu#^e z6#X;XhK+pvM%Ouq-iJ$uiyk+idvUhveLQkc`PHj|k&)XT!4_3*?Kkab`a{09tkSbyYHH~f^ABycc)_Y4JGotbSr4i_LUww}Mjq5VX&ihMMmHPT8oF&Kl zdEZN$1=#%jyx_e15;#-C%W1@J>+KDA$WS=B3Fa0ma|4yuQe9l!{hT#faMmzrsv=AB zfb@*#T`cgPkuD*PkcyfbpO_ep_syIea%b<)#HbBrZEbf~htePGH%!dUJsgO~@(1GN zyDEx#-Nf$v5co^S`bZcKIyr;tCpMOTFrU*j1T4YI5?`;!6TQTEBl52<0O}UN%}III zLDBNfq&K>&t1Gn3k;ZLxFm=@2Tf$IZf9`Ux)lgHj#`pRnAt6Co;(R$?S;9ex22=yr z*ViD^fGT|r1*&5fW@cDk9+yrP8~877ZY3D>+`9$Ls&rY~dcE_9!M>IA7`DvL%vkYK z_9Qb9hwZjp?jmn*PFpYM_D1V4h@3ixEJDHb_|&r*RPQvs z%&e?@_+M1m!1LsRm9$%h|7?!cYy}tx56`poq5Fo!Q1OMy)z#I829c1?CrD=S_Ov3y z;KyGIuU^g6IV>^pO-DsWDmqGDFLp|+5AEA<=5goCKy3ZlFN%ulY^09$VhjvM78clQ zwtthpt$=c%qPx30C@5%rcHSBs-0Jl7bWe}0_sT=ycQ7bE&9>&f_VkyGGpuI4|j_YGptyia?oJ*<1VW|kr>sGMvykNpxvk4_?)_b=a3boKRxj;w(y0`i@* zSJHtjFFplgeK_ z`VFS#+JS`v?4zmX{8Y)04QXdV(Z(by>UDc_-3MB4i*z8`NF4FmbpjRBTL=X5?Eu7I zK?3beEG(A%Y2fDY_$mj2Rv7F>h6&&Lu;||4$cUY#WmcZ2Pk7o}@JwNWB9k$eFXh?K z4d))+Pptc%&94jMLzf*rKpYE6Mm=CuWh2G?1!C%8CY$qMEINFJjsPPC;}0E-hVDk$Ja?e=If6NlZL*xkC0<-W-FKI|7ukP+1U)T=v^Az?#7>t;wh z^POp=XzEzXfOsq@dH4F=62UZupdU+vUg?1O>jAl2eVgj}Z^^3K95JE!e2r$)pEOtq z_(l)rnJ>M|$oo-E;H@n-(w8q^+SXD>tPMpB9?I2}&{()%6fT_v@qlF;O{cG-$fz|f zQ-XNFaf6NC6(TJZy_CCCYGrHdDnyf*mrOL_4Slitpv36`a?ZBF9JL z$|KU^GU~dzm)|+dwrA_rI1*o1yaBPrPA&3g+Nx|w`lTTBlblQwti|%fWBYy-t|rrv zoq%8k&SS0HZbT8!u$w@6Z-q4kffU%=<^x z7R)*xZ%(3Al)affBK0I98mA7MCiDA=e}q1IigK6{ll{W0N^5?&WA0eb7{yzumVTo< z#C!O_n+v4+2NG-9MmuX$u2v`M=lRAB>3EXC4rJ!7fo5r#zgP zr#qfZ?ACbz;P8-EI#~0#dj=1PkaQ*KxyqW9qpqY)8od zqLXZ)I{d>2^aTfS4@PP=fvnSh{wyLQLIb{W>qXgL_C*75iflBl*P5X-1)uYXLyU6N!`X=|4^?0n| zNITeKSm^S_hepA~;%5(_y%}15{D|N;-n#u4)kT$&Ixe4h7gcKK9CL2E#x80?u|7RE zXujFaZQ}a!OrwrUdhTHoUR1&B3B&FC!*<^X5^2HOZ!OU4$0T@s;Pvz{%|ovhR;rJ|7b3+J!xXno=Z{(KnTdH(7NH-=^WQzzNnEVi zUOS(R&x502W?zEeg}RrHUl`~Q9VGD(#wi+pbCAw?Nts0Vty(avxR6lzW&fdw7?4*AB1{Ti1nCP(; z!{>1*H1c!1tX5W(?f!lCBvNZO0=;{GdRB5_rOwyBVxU1B8w?dqDxFYvxr@cgG+Ii( zSFKTH%cfhes`{BN=Hco+u<`$}_ttMwcHRHCaVeKlg3_fUA>CaH14v7EcO%`Xw3Kv% zF!a#fNJ|XeDb3K`@!fje*XO?fh37b)^A`_wX3l-?bMLj+UhDl@+d&XNvd`Vgl1YNE z>e44*;MAXl{Lg*58e*%=d&eOp+=<$rwL#RmZy3ABOGddv38eqD_}UUC@Bwpx+tO-} zeSEAET5}j3tEQ-4(L>%al-yQ)6j!Bid_>)>GuM~+x)J{wI+x2ACq!aK+O&LW*VSw3szO^^ao z*|XWMT)Z2Ti4NLRp5g_YPgZej;KfRdt0}CkMy6(d3>h z2?jwm-cT-^11(HV*~7~swq$OT^)WR#s_z?mcKFig`WaFIJN~~U)f%lP#(#AXm+4q< zZM;4i#*rM8heVpAl1)zCt;mL}X4GmLWZ~_26hNp-`cvzRVF-P)3`OBz$C=aDS6+Q* z{C24zwi;(WjcvC&BDsr7=_-7RipMbHu1PLCM0jL} zCJvQ%)Lwn)tmS%qBEG~2qI0;xAaXmgm2&J0`SvDuB@QEOy(hI5fJybGgdCUD&k^+862|;X5TM99kQAk zUc#%LmJ~y8XS#*Q&{a}MA@UojJ+!@262fL3oneh*D*RArBn45G6&rO>P1r~P#@M6F zLc?h#qH1f+yy1epH`TkI7nZ7Pxjl@G1_kT`jnjrCl#qlb)0ryyhj2|b^>jl=&7+pC zaGlMQXA%0Y7pH~B=rCqH!&E*GYTF1tC^RHHclK>z@opja%xe0U+1DP;D(ReI#}t8* z;-WcZEomCFf9nI!WsA=P<%ZqG|<0_cM*fLS%W(iq1I9U`5O*8hwDw;(ZLgMK1KHY-aUO(;HrO<0ps8W zludkD%gSm8&HAjX{b0H4?ttv>V4lL&yJx1FE4L>e#*SS}M5fDI_#|s|CD7UVoO7waS)plc$yVXc+9*eM=2nkxd+x}8=SH|9 zRXsI!ji0v>hVaxGqVxZqrP5(UXaIBuP8>7 zA`n^tQ(#|KoXvlXn+>%(GbTF^5OId34ao4GT!8fL7v0UnW z=v|>KOao(f*$;wpnkLr1jH1Z+yq5SjlFHJfVNMtq+=Oc`2@`{FrK|jZB_iaZ`6b_H8 zn&p&W?wPIa=6xMgm5kN8Rnoub)Thbr{(f9i%{m9_?Te)){aB-LGC={kmz+Z!D#hA3 ztqt4T+p2!9biLM-C9imSlZ>W7By71WTx|Oyc*pqOy)@+~s9N`CbJ++(K@QF%sl;2p zFXwl(=8!l-FCL^UsMnS|Wx-bXa!jqIsG-SzOls-z^{B$Rh#-poUdTtV$>;Znp=a5@>*&g&gbj$3$o$GIK^Qv2q z2)Jzqg8j7sssSOt1e!VPGeaVgNIiz}nK1sw^&DCLyXzy_8;5~lbdFS2VPvOA#|Ez1 zBOIb~rS?h^cejD(1AE>Vt8r=Gn?a2M^Y+gy=-_Y0v4?DX7pYRYMq9BWV0oKCRH-gY z7iQkEeu;ieb`L(!!pbX=o0G~Ji>H@SIkIVdPS=)r!qCj0{bQ_PKup`3X-etY@L0Tt z3>&T)`!*Z%&ahQU=JnDT+BIY$8<|}vOA33%q3tv;UiepYu!ox^A5HR*K!1OAlqWk? zTZ2jrn%VauOdu*Y#X>hQ7nXi&?$SKEpVg_1f>oNWY2fmH)6@)2aEt*S`Wm7?63te- z@i>Nn)e#K~y`s7zz-q_R%v|I9T_JkKIA;Jwtc(VTCB09V5790n00Vh+Oi?g}JUq~U zpy@jNZXYIgy3LF0wVJ|_l&Mw7`;k~18;OS|#vsFySZMP!zvsBY_qIl%QEtFf*=T&^;PHVgxAH5Rh zLcQzJ8scvK&SMXSF*QMNJ)Mt=>RWZhjNO{hq#oTm0}8BmG;4wr9m{F8o6v4G;a7sg z%+&*eWT!a;P@8K;vBJ$yAmtULsmr`nd&^n6e9`)`g&vC`x7wU3=3md+RAfmloSrdX z&RV`I7{HL^)#3m|VuVBvDm-Cp8;zH(>&e8d-WD3F37@Ql@Lh88BTQG!L4p)2E^;qwHlBM-fAvAsik{ytUzhLzTwCf*#ET|?$$Eg!FTPCrI`pPLtBOGISDM+nON!?_!2_jsH) zHFd@}eAC6JlUq-?{kDQYj7hI{)+zhIVY$OXAw?R560s1vkG1Bfhk4fgo4cm1m&bEe z=delGD;4~QzC$1y78-Sx&sP@ zTwnT>h-wEkTd0J-s^6D4Y)wm4*OeOt7~V!p)%w?R50s>CEL74zgq-5W3R(t!Xn=M+ z!=hG_$QXnpK5Awvq|B?E%3guS z`8O3sA**X&W-WafgtH7O=vaqVqe>-W15JMrAkVe5Ffs-y``$L^l+!L@f07BY-yZMVlA{TnmDzT{>+${-hl2b} ze=*Ix*T^>ZEym_&k4`nRCl4=`+oM!)ssF(uirC+s8Lf^yt-J%oVfP`iN;HPhHk!h( zfm~t}HGyqN{9l2pSrQblu8vz_?|tvdLlVvz1-ds(x|S2nqu-V%Q|bQ@X%9E%le@+Z zp?r~gA@Wk+S2YyBqONf1UoAkEC};OdJhp#KpY`_yOu`M3<9)1!zf;fXpz)ma_WLb? zmS2{|AVYXoa60y<6Def2T*Me*>p^^^@<_CN=7m~xr_YBG;K*(ojAQphk9M@FWvqFc zC$EDR1OAy>l*7~0JgaujBU6jwzn%ln+)DNGjaT!`PO6~~L0~ywadSN*SA_9bb^4sD!ET~T zH^=tIwG`XNB7;$Cg{oePtlU*-`kX-s4}1LmI<1?V-wxKtjlr6z?WVrx%-I=ZEh;== zwIG6ydrI;|ls_>B7es5#S=L zN=H-%_YHIz*p|`UpiVB>vYzbz2&F5PSbY%+Z4)a0<*Pvx{n-`rT1K$uTXNX_R0lNX z_OsJ|{fw?!;^<`;NGv{wytA%uPk+se5lnw5%hjoqxUpB&*T9-81EnrTKlGlS*1N

Ke6=I8zp{l>!e7ki|BQ0_lSKU?g%F9y)1+ba4_>n+Rso38 zz@L7n_$}XpZSL)@yUq*d_4W1n(SRe4R--1?PTa08FZBhAdROggHG!Iro11$LCACVKwP46;a z4+^YKEhgE!W>0-zq=W)%e&xP#>5k;i{u}kl;W&pyiLTZ7Ftb5UG~emn?ShTh(I&jO z>2gcQayWd!ohJ_7jKFTdN;T|b_1h`s8hGJR?_Z=@CF9!A%}kA?{%rqCi$C@#w3y7r zPIk=i$8jpP`3X4%D#1LOah?)QC!+Wb`C}OED0lKhXnX#PvVDYMtGrYH=c$vgPvAGw zm-K%nRYnB{4k5c3vA$I`n6{-j6LeAiA8o9+dsp~bEvCu zA*p~%o)6?o6cd~!LQRP846g$(eHO9Yj#WF(i_=Vx^FI!VNM>QJ4Q*T6kl~SK6Le`w zgu_PO0oQYox|4*~pq9DdyLJ-rFy)aB%U+g2_OgeL@wicWpb8$o zSE_?K7+X~DF7i@MLQPJs)Qxa!Qrv#r=IN6L+_-`P)NK2?xB+P#CqFNTsL|;kj(WIw zR*mk+6LG4nywCEN11Y6{4fC1xU)G1VloBjoR!Tnf-l+b|KkU*@O_tN)FVXi}jYwO%ja=|i~AYxZp2XhjrMer=};(vbA$t59snN>)$pzSIv zSHvO2^&H1o>MP0IcbVX4L3tsyi^w_%=K_A78J#xts%78yw?7z>{-l1XZ1vGpIp)!W z=AF&~>3mgf!t)O7;jVedF^;#;%-((s$s@zwV8rqR2}uhgF=)t8gPrC%Rj;OzA(##o z>PrkGkAH7PYtkR#HEJ|@T!4ePgM)*9T5{MdGz3pbLNgO5|ZJ~3)1Ao5*o)p zWXx-Z<3_y?-Y{5$7>n9|+t$3Qmb5l2TJ&>+z$#OxDfB#YXX!;#zmI znAn|$TU&COtd3=W$?y=?bw7(4$)QYs$>PF-3r{H%75F3(aAi6i+Izddc{V15Jy|jg86S zIBrXQXYsOC>vys;l5kwGN@Zcvw!go(5=3zi(LU#v6(a|Sst5+yyETd(dM6kuAo6$u z`>OH=%iNOA3(uIv^CWF(X`cR-;Tbm|#nB051Z_<6d2E|O=Im>cvzPg?Z@s@6ipw>H z1zOSqdbP@s55j2C7E|Ai>`^g*HPFBOPBJ& z*iLLiktI(>nB14R*KN_tvrPr@IVACHlvzSbbKfsq`a4!&CscjchXp-$k?$|Us+$;R zfBTpMsf@Ep6o!JC2F`j{v#zLgDw%rhA5B05Su8wMO;jhLoyf&EFcXQ`(QR~$4}2dP ztEO4XJu`xQiHQF+WS@6SqH(HDVnA2;W`k?e6tmzbRkvzotz}kZATi>E{v+oU-S3(3 zBNU-e(0Bt5WegED*bn~#BtEZ-KvE7%dd8XzKY8Z4%YV6B=NiwG!99Lae1Sae&hInA6-F8bqYsWDE zdJ0n%mP!2_Xu5IOl0TB&^*~R11O4h-faiKx>d<^6gNc=E|7076mVmsHduX;HzvGfn zizhb>{>#jMya*3tWtZ!ul{MDUo$Q$%z2mKC{n1Visq1-3Of~UE4$V&ZUa!J)EDE!u zaix_|g4*R#Rz|kRaR*5)t4m-%r4&o4=##fF@u<0LXYJk)C> zO?2xE^AlcPnh8_3wZd?GUOJNBd9VmQCPGCtEbe)DQ`O34vZN?sxpfqxc7t(Yr#{0CZvN~_v(%nOPUpCA+)qSZFKf6fI ziz&Gfz3^dYjD%>l)77peyfFCa{RdSAY)N>(;(s3(e{-PSzd0|5@tQV{* zRC&1l$2LNEaJUmOl_o5zjEEdES>n9YN++j@7EFw(9y}sHVTFNdJX%PEDUby#^ydIY`smOQb52aVNb1DMd0*RUhO{%@>c$wg zo&R3724foc$BI+^yG^YT*~7pDGgs(3x(nv0NYD}y@>iu}8#ciLNZ=SfQFT0G| z9IZ0%?EDjn=Ez1;n4e!+le@QX?rAh{I*cUUB-#q`#`)6g@nB~gbsgPdO1~z4kU}$&A?6=8{V2V@LOJ4n`y(~2%4)j8 z%W?7lI;re|0vae}!XyB5JN zR2`+SPF`BqG+x%?)pNw+7@6S3hW3vSU^+CYxF5PKk8L6N8xfA^YhQ&@Ix9pCmguz{ zNB%zg@<>*#yc~S#X1ol&qxNx@1fD8+$d+ZHptgdb@m%TR=|rk~zq-1zK1V(pWsKgj z*}R@vo}g<%H&2T1$Io0d@HF>+d3EKo;oM>Tav$29k119N$1Yj_V}yv!`S!tbZ6Z1) zPMf*IV~EDfvrzNF#nVkZOxq;RH6o>g9$k4Xtb?$s5%w_~u0G7?SmK@1!~~2PSGs6A z`2rt=lO$C(RkJ#CN1tJ#J2cpD3|i3lrHDYw`4!x6KGTSZxHie1_svjk4Lc= zkL!?7_LlmuYM`PQbQ+GEp2u>k*99iWVF)_^XezCEsgR>+jwZ2$V!4xJ5k976x5f~} z^j2l$HiyJHHm0b1Jk(Xgs&H?V6kb0X{N@jAri(E;KK@iMb;y)038*j->7Wd*Q$c!q zdS)hYXeel6z6OF!=bW{TsVUf=>jkhRAzB_c0hy;OH5lC)lq~pYs9F!(dExAK1Fa*n zf;T@NVltt<#t!;oWt$!z^mFgb8|XCnJ@#qu{R`oAGn-QGUr3RVcZknR=)MwdJ%~+V`dN7j!A>$y!kj6s+7uku~>oV9wXfF#O}o z`d=keb^6?7YaW4XKFoFW(l;NIB8E`Kv1lUR48K?j*%uHud>Z>C!Cz5s|JSn7V~8Xo z^+tL*ryuRNt|m;?EB5z7eeY!k(B0{Z0afdjWmQ4}s|_1rwao$pUlANG9M4IDL?WT1 zBdm=YX7}zfV;AdK1Q5NGlne$!>DY9_g(p;e;Kg|z!^j#h){=la?SLmz}(!AmgUS+ia0gH!26BZZtue5vn@&`4knC1!xSQ0p z&pt?k4+psKj6X3U2-P2aGw;Xs0x_1dj5$P(H@v)!?hYqsc_b;bzF#szfPE$I;D&=W z^r;Q&Ky0tmtN2gPS#zPIqxZ%!;*HnMfZhWtQW)Y0xI0EhS^Ja5<)BzxdlOJ&2Vwzx zZcpa1|GDe=V$zvj%Y0$={N?r0zygqVx3$^N6hS~2p0|;Ct}{F$349X>vF+8R zav${`$$xd)h2v{^_Ivu33SL~ACR1@*(db~s-+kO@O{khY^K8jU0FvffO(3@B=B{n_ z=766gqE=Q6Az-r)bZ-D^3ABjpJDiI#rf91qb!*6{IJVhgv;QnE4o$8cV;P$N^Ds=_ zTwycK-glq|yjrZ0+Ua5d8Ng*Ql5Fg;jmv8r8u*Ss$a&u30N^XZrDy8z`;>KRcimKJ z9$Oj<>Oif%QXkx*4;ylsv8r}-aBy&T7F+0IWeh-?_V-ngVvL1K-bV`I-+JNJc?O$E-mD!jWzus|RO8QWiGY&5F+qU$fW7Ut~+ zdPHEK*9TW$-yP0W)g<13Vf`0rxEN)WIC&(pK&ZL|`}uRV*pmwnnRN%=i+cj)9-f+Y zQ`fGsLxu?~qL1AkbES$a(%-f&c&+bAN=YyuRQaof-7$Ja|9SSMFI~%KHZ}tYgb}`f zU|D&2sHg#e*UtS8gg|2c@bIv;RcJ9>D3bGn;hDsF(=GH2=l;F5zkhRNP`%W_WxMqh zXMgrazoIlMzGtmffSvL>?4|;o$Bt7#5uB3<0Fm6>u82f-S-yjIz$I8M+Kf<>aNA|p z)wyM;tZxM9zdGc*RqxG8Z|-LGl%b@Al+vBpPbb-o=i>|yOJ7e5~;mdFB@dTMIwsoxhMsjl7cP)?DFi-^$U zNb2qB`TF(i7ahOqDNCUK7LLTj0kBxfOXy=Cy>yFDE+&=gRBmQK2u)8<-+DazAZ!^x zSUwgLMF33zu*h+ClJ(QMTNo(S;C8%`WYgq&uq0AHC(!$z@%3wisWKyA;*&V6fVP^r z*;)rm-1VEQiIU>|KCxw@Ft!5PqRBu;N}Aer!I5x`^Jwd-auV9+=nHP)_M;om)37Rg z6Pby$X8)I%#_!Jgt~{DfO)BfnUh2g18rw&3=9;E%4== zeVenhnN^OUS15A+Cp3)u*=VY@wRLG}DfcuV4!1Bh{jr@mBq1wHl>y>0zeUyp8Xz4v zO8Mj{a08%F)Ai0awh47$lIUD7;T1O=eicg3Ehu2aM_V}W4&a}#8uxBgtayQvL2(sa1+F>?U@B_`%8!$HF4=7+kChqqN{JVgo$3LxxC zwCgQ5833A+j8`_swkDYOaR2%}Tvzn8-P|r`Y?@PIVkoYMcAV09op)EdqsGR@>}s#a zHEh5eoGUX5R-{3n`DkjI3y1ewR|BiwWdjJh#kK&dJDP7$NbONSZ}Ul^&755?ecILR zI>?+e8N5~G;pLT-lpIJC6zt{zbU|JHA*qO#MjVzo+^v(vRVu zwF^Lf*xA|LPq(pde=_v}La)56?8VcL0q@%*z%GLNs#)@`1N>*cJ8$B8(I5M?=$`Ml zVS1myb)v07g7w#qC16(dkT!iq{|uRXY3#&Zl~unl1_z+4dnPF-CG$5^L==^* zlcS@bKvZO;yqelYOtHu6&Dp}43m-6oyE?arae-HLbGxi2{S7y#Gb7?_08qNVTDSwK zt*)^n_#YrnkQGRvy4Uv!xHmj0fF#z72Zc?*qx~AKKN1Z*7#<$R_s7bViIQk|sA zMn`7>sOZ5oWlNC&(5|!d8E5kaVo>BfyL;>8H^VC6urU}7H$j|W`e`yyLE6#wsh3aO z{BG6#^~kZG^p*t$1z`vI1_Jdw6e8ERUiTun13<#~wA9q%VCuC!>CWg{4%)H(x1BZR zDECkwj93bx)o2!hInfb}CkS@`ElgmzBuHg@U&MJ|fV=LVxY;z{oKEO?19}?|2j@wY z;Zs0!2{r;6cZ6&UymO~|Pvme`I;+)FAAbQ{UxB0FDDTC3+TQkd^3BDAw@|O(|SGQnj?U2JzWy*d=A~!rtE27HZYGuc&i+ zcw1_%-D(f``0V&N#}VsapO6O^9zu8U-?AY}X6A@MfEx~S;0c3e>|BY7g{AfA`T@h} zf8QsQGeG}aZ;nUI%*<$2iqLi%uL8W!1Hh?iRKNWRO$Hr+@ApdBWcA0duPrL4%>6}XPDmS!iunaKj;A8{A(6)A^%=Xtg5V- z*&M5huJ3+R)&uJrwRT2!c6KGS*1$f4$pD0ENC#~Y*<|#-Kpi-emX-zpo^g5O-R;%T zxILI_fN1qw^ZPw@WO9yFnlHy%d{DT7*L6^8{9m3kB!Jf)tQ)`0Xg1jzXasnuXG6dp z%Kz}jm&%A`>NgK05{wJjGjt4$u#gbnT|g&hX#Ls*1Gc}XhfpwY%>}eZER5LobjqZ@ zt}ff#11v@VUQDFB+xLHb`N5X-%m6TgjE%>UBgpF(OfOsDhCIEo2MfX<^zU-4xCx9S zl_E_7V&WB7ZAe539oSC7^Yhm-BEh-^*5;KCJe_M`HP)BT|NZsff7+w#k+y5==;$aZ z#k4J+gR|wT9FX#h;{JNd1*#D#DTt5BV7S%Kp|zFJxdRNvw2e>h#|r!QO!Z!jiZ*bj z6D|_|_xI@D0Oyd3D-52Ohuqnj1J%Rj%c*^iD1r?REL1D}ku8;bWi0txXEycv7BrsB1HaE@x2U@>( z&lVO}im|onkn}`4~ z%dbRNA-2i6xf`3iQyUZv;*HHs&{M1oF@f#+2bNC%9s0m$eg!COg(!)vaz20;0c3tP zPUm*Ky6FVKN}%|_W1Ylzs9L5r3qOCp7Zz??xS3nH*)|cp0zpLMgcU5THYV`B|7qsi zg708>Q$}@jg#~w~t7~Y`($G|>gX3~6z=Fk2hycOHW-<3~0QS$x&UR~@2X^u|Itz@9 zo`Q4#U*BDduOtD>$CK8*L}|$W#e)7 ziGe6>rv-vsf4i~GI$q0S8@~r0L^6opUi&j##2j@7Prh`W~VmWp1X;)>ihMzrglEDqMF>)cLa_&Um1E8`;_72i*gIh!~Tt~mw80#>)`~? z&|ZJn(^Vh7Itj-*P8RU2lPwn@>*wCh$S6dwJ=>k5g#eNE%y9XC8vm1C^z`1!qh3pW zQ0E+Qn=AFWGwBr-4)Cb|t1R>Bvl!^g9 z4FExp2?`TSjv253?KrtN(LP9KjsZ0#UfvUjzEk}7Ti=2bT$#G8tA7vuiVC&`obFJt z{sD6rvGOOZ_WN@d`o^SE;H3gr5o|VV?brK3@OXV~4eVyla{I>0r!0Yw3F-_0%@uG- zz;%(|4Q_hmX#VeA_H$qMwLMciP;OPVUNv>R(jl!_#VPX>cy6d z_c99B(6f#`1beQM0iqT+K^Ol-hwmMNY>3M?R}W@1@3{B5r2`9}K_IT_uK8K8=8lc0 zr^}W9Hkr0sTfB53l7R?^=bDSkH3}QMsDYCQI#hwo2tH(zQ?tRv~xD&vSP$80gjKlx?;m6{0>;s@u^g zZ;eu@#njyYOTit}eE^ zE{COIYjnb;yu^v*#!@1=F4%k}fIg zcLDeGdRHkdX0)}xtbF-hj-|w2wytm&2%T^^u!WG4^axkUjoTI!XX!s5L6tsaKRYEJ z@06)H)qaI=htSWShUnD(GsKC2wusN#nDL}ar{^5J!HG+CTB!c(IgOG|Rg8AVG;={u zilcf7H6u?+4TV8iZT<{TxUY)wy-P!Hv9l~XgVnEXc$RLpl}$n;H0nGvtt2^A6U1_y z#tFjZALo1Q*cTi>Wg+W|^7E^aWB#Q(vK}~T1NABE_U8=@5i3rq*s=PVcYGr2)ldI4 z=cnHn50!hL{HVVk4tgTXi0kGJCVJx&hV228x>XF^^&gF88bk&p^*vVX zG@*s2o4K?zw$wxoN!rcM{Cf8Nq%k`^7zkyT7?{2DO!fAh;nG@Cb0V>Ggx`hBW3zv3 z=+8&SYxh+<2Sr{uE7@Bplnievia=#MThkX?ZY|vhsCV=Iv;7k#(z@~_HYb0!uc8Y3 z5V+4;wQisPbm3FodyzA-9x2Ycbwm*LdU@o8G~84>O^l&XfMQ7d#N4@DmO1-zy*nd+ zjl8?7w1R>3_u(x&^StrEE=W}R(Bg@veqEno?%v5~sI-`geau82Wn5u{38UcT zq?T#Rxr3pdhl}mLyu<3V5_zZZO-S3&g>@&hJVOXUxUVp5z1#-P^lYq>gH!Q}FE6p{ zb(F#S6-un^Ood9NBc}>YB);Gr|2He z`2|QYLQ~0zn^?+ITTXh`iYt}OL|uMOBELD6)Hdc-4a4#vZgxC;9HSG0gzV7m81x+k z`sb%{ln^&QY(oACPFthZLe)L3B999TtIwn2uWzgRs}i5)DNXNqLNX@cR-UET@<4bX-QL>oVQ*j{J*JQBRwW;kqOAu<&iN!kk)pasNwgL0%-6ZDwVjz#IuVK|K#M z$D|;5a&oCCD)y5|2SrjdD{bwd^xKv};o=`FL^b(&m3frk4&cm@g}QC3fk&#w0MG%~ z$~`^OPiVWZyXw@h6H`OF^u-a2qik`4lbf`RTLNxQQ|d8BC_4P(a$Dl)*IkgH%q3e_ zO`Nd++I*iD);WH>Y&+K1uW%&;xkTjKmb1yR^T>tdCQ?5x+Pd`?P2BoTSp~tm9zb_ z7L%*bD(-c|&Qt`!9g$16k1)t~4qOMM>A5YPAj!7lC~V>5AGqU4+?CuT_la;{IG}q7iQ|-E~#oHZqb&Xu5-;u%3CN~oY-?LPfAA% zVx{6!hw-}MJUKpN{I6@aZZ!G)IAblkccr_P?ml^EpEhmX`57YYoTc|o4KAlZB6z5; zQn0{L%Q{>P9G$Y8*!i5~e$MY9=eBt!D&n~OF%1qfLLO$!Ycl4$6lzZUm8r0?{^OP* z-*oN<&>>zrce+(#2+hT^%!icABc;BkKp0Uuv(C#NnPtn*|9XHa{y(m)9SDH@U@7ts zqonuPZBIC!CCS>>ohc1d58uqQ$tU3P();+n;`+UcSfN;qq2Y?6&7F(ey(u}@OjtR^ zFSH704q19kc8)&e`*l3=a&TJGp4t$kOw5H}?Rl=Xn-#Js4~b z4i)DqV}(2H>FmaM$c*6JeEvzrt;8k|Yg=;Q`J|fDW|gtChY_c7dFMOtOXPcGZNF7m zAL@LnQUYUTIk{Y7`24jv3pNA~Y8Sqtl0^@Aj_gsabaEVbXZJpWpTxykLbHiYrB@3M zg6yf>{fFC@!+JnUU79{MODUOz=m|T@7a5c9O+8+#@}0jLVLYmNUw#tY38YW_eDkr1WoS{y1xCX_W@BLIuKX~sEwIPF5W>GYFc{dy9FJUB$ z{k!aTs^W30`4$#(IhXZ3@*R_&h^{bpWz+u6Y#HezN-=jH`GF>R$r$lZ-8j*sOay|LL?@df9tH{c0&7zyTe}W9o zsk>r8WUi}?S2T`~Psi^ZNZKTHwg2PAetQn~bh-p_wA{0EtQ9p+)0pk^Y?z{7V2y2^ z(f8YwPgJnEDMg&Gvd(!fV`hILrwnj&6Vrz*<=GT{T*(k!MR~}872#sostL)7=45-6 z0s-N@bHX8Mj;2qt&m)jSx9NVL-LaUBi_lJvL!l6r*L7>^3r*rU_nm9@&b!kkRVdq{ zq70it{&}x$Klrw!{gmm_~=q#8@ zK}Wy(jA16ukn#~be0|TtHpqLdn<1%hmz!gwIT33`YV0f8X#mN1wCR(;*(ax@koWby zp@X4$C@n%X&(i!53>AW1?%HQ9-M1g)p?sCnEhoDsx!gwEp}Xb5;YnDH4t35< z8Nq?HlO;7UeOCqhLFx>#{Zh6Hy-a>do52Qnz98*V-a$Rm@FIb64iED`Q zunx{WhgwoQOcvq^_Q;+hAsMc;WZSXEhYUI1yHC#bAI{C0h!souw;|U%RN@lzIB$$? z_Vg9{WwbW<=kbR5iu&A=EdE-3D?IobayfR?XhJ46nS`#hu|pzlo^8@zWNuuucdew= zPGD)JWlz*cEGU$aMfN_XNEXfEEKJ~yr_-j8&44Ww@8jd7%{kbhbGk@atjS{zL?i?2JatO4kw%~@ox zgcL?HQE*PN+d+m@>Zs-!H7bXqIRu-GcjQV}y@r|9tE%2xKDPNp3XfqG-7yb?%kJ_x ztD~1K1C}F;(HTvr)&8C;&+=b;`{lR2yy94vm11G~`#|K zVMS$Xeb{yPYvW+|bKg2|RNmQ-hi)KE6Jo-xp~f*m~`3ik#vmQ+&oo9jR=+)*Ir=bg5GoW zu800ysZdl2+&rya9UkTBk(7}xLNhYShJDPl{o1x#CF!C}ERZHie&s`_@2f!N7;{A& z2@Z+qCqOqLH|jp#@UY}zNxF#245X{YBC6{JhBGK=bs%x+vEp5`O@H>Sa~7q!!rMVJWV zShe@Fh0jxwlk^N-b``VoiCVfgOwwB4tQqxtfQ}>*u%O zqZSeR#484;C#N8vezW?p|0p==RCWxlcz^#3dq&{!#v2l~IBhu*k{*)3=p&%=iaQAm zTr5RpBd=$F^z&cUxHG797?Whg9_}bYQIpauV&Y~or#`97)mWNd%L?s+tsZ6wDCsbh z4_pNJzj9z^DC6nnp1dV5Wrp8u2%SER@Nbi_qg)LjTlm-zzPalbi%2ij-(LTza@=Bf zd5bOcJJ6}951F+dF%f(?Dx24{axcE?2OvNU%{!OYm5*h*nrnrPS1}lb+qgg-Trce2 z+ZkVaTFjz8-Xt1yktAm*_GHuG=jpS!50-i3o)4tV1l2_$14_|8j7H&I4b>BWsX53& z!dqplfR*ExIT!yQnA?@*tDvoWs3P$iKgf)x*g3jod#YKbfp-5OE-+l*2R zJ<+=U7FYe)U96D3-gSs5uc~A*rDIF`?BjJAet(&7ds(_$JI=} z;VIG;PGsTijMHDM!{w((Mt;?mmBgf*S4t^AuzZQfonU}vN36T8ifNgh=c$UlhC*4c zN=1g0>@`kWN0mbRdv6;Y%)|$MT&iTZR@Q1c-D=TLYwknSMb$f478(x?eH_cyyA`lM zEEVRPkU;SXPRtIscfH1L{m|t*JWP+8QfOK{$r8F^mpyk4?8unv z%Xs;KaCG?x*~EBD@;lOlU7$=E0oi1w@%SiuB;43nfpDyD zaB=j4joL9?uEP}!J$-gNhir)6JxFbF(dFn^niQIkXZ$X0V@dj(C*iLO&$9y{$r`nv zcF$|>z+u@^V714+^eI|jhD$W;^>7jk-B%x8CGI~31UiJq;*DEr8Z_&hAJXNX^K_Ni z4d0S_PfkY?23AkZ5lQi!<6-=lFP;)9t4Sh?f(Zg9W^ZZfmFAR4l4PrILu2?j#n zor{)CZSdRfP#Pb6Tdh&Z1MfAPjM3InhueSl5num6z$UYS*E7XVnn+fkBR~|Xq2~XO z8uQcKhx5c&_HJ=yGeB$nbLP57Rb_e-*Y|Q4{Mj?kw1hUh0g=e!^aW}oJkCEyx6%4@ zWK8fYdIsp?(b<@ycw6$65GxIVgeq&Snubn_U@9|3{GbF(Tlb{<+|`pMRN?mw4F|eu2ltN7yY0WvvShNlpC^{Sw@`%{ zGDJ+#^zNXI1UHVkoMYA4m?&hTmyZi9t~$ewB;!oVAVj6=+OXCIr(%(r6^;S4ud9rL zXD)6ArUb*|K~j&X7T_gSya~NfCY}7^Ks0+`4VJ(tP%c?()3h%K5tgzsA z(R&H^v??Xg*|D>*OV%RHYlvPE$jh(K^QWgvKYGx-r~ZT;YG$NU=x#*5f?u4x?opl2 zEYn?my?1tH6!uct7HCDljqgw7Czz>zBW0j`BqA7nXDvUU`@CQ=KVLIGWO=C9NBOgw zBgDkf+d%rzk z)#9U`o82;n3|9ik2kj%3Yk^&n30q~>_jDCIv}ma-3fua!W!NpE&6K^F^l1g1(@gKbo3Ha2rkZklafbL}~Rwca8;(hCj% z+f=5-lTBY8hHTER63;zTHnEp{k{d;BamP&pM(D@)-6+Lh79Zay0<4x%+c_E(Y)nRQ z4(G}nm!oL>zohb9=qOUgBw%E%TaI!f4>P&yd| z71M4_XjPgr>|N$s$C$W%+jK=;QrYK1xxeIKi?LEKHnjA$$tfE0f^vxak}Z2~%n_gb z=*KKrFuiV1b7Qe|eDb6dT`&mg6A z<|1Jr)e_(4_yRvMn}6mTBm#K{9SuA7F+sE?Puk!xmFJ>sYo1c@VjG*zj@is-}(;=;1^-SP7p%7aYv}~nL8@=CSx{HMs&3&F%6q5 zdZv^=BgFHs*zEH}-K`^epN@Wfh5r8FyLZQv0DU)Qs)+mJD;H+tCj>}jb5>jXxztRd zUpYsZ8R(0=`{5SL4Bh|Pd0Mk3Ouk?P%VUA%?~0(;2IJD{eZx|z#(~;Z9i0T{Iy}VTc#RXp4bc z(oX^ZraMLbdrs3ZDhHVSDSC8FmwJtR!3{K+T8gdcI zQ3R^47Rriiq+^F$awl_Fb#Dy@bFwM26aDUwR)(OLHnfPHOKQR)xi=uTre{AG0 zIQ&JR=6aq*PQFX2>!_hM3=;euZrhz|gWnV5z{nXk=m#hlN8P{I7N48ZCp>)+JH6)E zb=3Jx;HT!6rjYWG;X?cJvp-WZV%2(Qr?{3PlmTWRixH8pcJq3wIImRLRkp~BEY4_V z$_TZd7s~z~D@-mJdMj{p+-MHQV`dyMnW$TvY|% z^-K0J@e+YYj|Hh~1YE3s4UK$WH0e-qObzdkNs~G}%~z)MX)D$hv9CcXjePxKAM05- z%5EZ2GPmV_mq4LE5JI!QD_>Q~tLn~!7)Kpti*edzBgG#Gj4?x{t8CiisK^tGtdIO# z+oi_B!&wu^#oXEVG=t`Ig+z4`>-RUBLvj~(`_+}rl|3t+(^oglsmRx9sltkusLagv z{Jq1@6Tj0mf! z;`zId-MR^#7GD%fNO(jjC+1StTX!#*W$nD9Tlf}VRbXgDnz}I#rhngL}i7Pi8K@;(Q!2;l;$TYedVxn zKWXJBukNkDmK{e^H?Ek;b?`4IVX8o=Z}=-%#voiiCIrj~6tddxyI@8L*e!o^aGrXf z+f&e!yuE`9LT#5>qq7KuLlZwzOd=0+n!hLq$D zj?!hoDEocv3A~G!ds|!D%fKoj=(0UC_#pwSX1A#Ad~%PwuHY4u_4=k6!S@kj@j(Pf z%BGm7DFb2jA6VP3u4d4+IrtRsdSwsBO%!hPt@n{6l!S(shAPGIqg)VIwr{O4J8e!b z;SCR~*q6$!ulifF8QLYKIDD$j|M~7FgP?fVCMSdcaISk#02u8<-IGq||21T}Vkq^0 z*snjs_5T|LXMsFBbA>QHNjv!$Q5P0G@5ZFEd{zWf%^q-^Px1ugUCxWMdQ^;8n z-Umj-^p=Ir->GbHW=7lA))qJdJb(V&EaN}Kp-kzdg**ZP9+Sc6H`*?>*Snt_=+IqJ zfHsRf6klCi1CJKxfg>TC-1mp~KNz4ZSH={zwd=qoMMMxmUNb~)$J~6V7kHra*RL)| z@KI-HX8^hRfP^F_IvOxM0W5h9ASr8#?r;A)_S3~BB_-d#tHVbDc`zdb3VGev#eJ7< z9)4|axhy(1cCD76>3(5a+FRf|1VGjQGpsR#j+bMFTHyN5if(;iq~Hcak(ina&f7QR zNnpkUVq#(v5|RfGCM>Ev#{adsaao*c^!E1h!h&3GZ*T9b|MU^z2V#KDjQQGv#u^Vfo|(2_|-SRrsiJ zxsfOX@kZB+P7~zx@1(YM4lv&MEw5HQ{`=OZZjf4~g8PhF*w`wkcPKFyAYt0tmiqVh z_s=zWCcgT&?6`7;g|7D+4S6mZ0F)o;EAPvePVazR40z8e$;q=Gfa?6u@sLTevLw5@)SAn3$>^w_=L61X;=dd+9Qa{qtW}v|z&(0NK96*QKTM=1qzD zIJ?|%RmT5tpGwGkd#|zf8J7c5e`WY+p_n2d_UP*`0D*@|iT^pdEzo!=VBs|>%6Xr7 zo|~5krk)0n*e@;#X)u0N+7s#L&p-ZgxSSq))3o|Kf0B~21VW*} ze1|1vuDZHfh0ptBSEuDENa3-E%1!4~*Ebn=j2cz99ax0L@+QRd-V5J}@=&|OFq6P9 z_A~-T*>w`KtgzdTTRH9xac2#}|9y>3_d8T|byHA6S2{D&(wa6LrYx!~#ZZ@+lcoJr z)iqa;nFCfK_W|*ot?ce zeuW~$i+1~+b-o&?p;~b#J9+V+YLsr+1$=Sd|LD&d{NVto zRsc$nCMUVkeMVlCVB`t|EjJ1~h( zBdx7*8L@zP0745)>QF9m@l;s`V2xDU=hzejZFj$#1+BLauvw&;{S!tXI>_~D-U=pU zNB^!wNbCUBc82{k)0WJ^@%H4BQ?2)kn%4@TB^{l40EPu|IHCB(-fuyEz&-|^S@rLE z+(0!yipRvlV&Fx?3avzBf;-C=D7Ib{*=c~E0i5NCu4gVa%b_~?@>l3}oZ2Lwl#@5> zJn?gyyE?eA;(?3Kd#XSGRIR^C0a-yk2n3(gPPa4L5OAW{r4yG+Y z%Bq;hi3zIMpr<$)bBS>doC(Hm*Pm(^Z7D5ZoR;q@g(srq>YVLgJ{CB^%xbCD^F9dJ zq1+ufe|?bRB3^UY6S{$1GxVIi$gUmf_Z--rSEffAps?<&C$-0xreOonRGmLAvmelJ zr|!Ic1men^!4+7?<>i@yXwRoT2DH`-S|AhOvan~osSJW1Ic;BdZ+>QGrX`{d+^e`1 zJuI-WWCebq-XCc%*QEf})4JiHn*}&50pnQ~LQt0y_|bJl9&V?e4h^b#v7yl;C!yE{ zJ?|1?z|XHwH5WYhIRGf)voYLRhqY)%KBJYgy`>*07r#Z+ZH*UO;|N^Mx$Z8Vqqo3a z%g)3Eexi9Snos;-QBC)3hNk-RbA!0ZNjYyAyK5CTx>$sfi`QXs%euo}CpI}%x~^3w>Es}y=ISJTz3@QIWkX$I~S%5y2iij9Yg)F>$88|n}ZP?tMDFzI~`4> zU9rKwB6j3n>`|uTwNL0hbvS#%uhcZAQruaMiw}zm++T=_59TcXZ}@&^*kv9ppVE*J{}%3>KkEltL%Y=@;%b87 zea2en${?bjbeo7B#-qGWY60DF2yp+)QXGWGS!e5UK9{u;#`Cz1+VKM3+dUC9s8uDn zqphPHu!>*;>yh7L%HYo(Ck!ar3(D{0v=Q&jXaJGJ#Ol=Giof(`RxTH7V7Q@@D965o=6tKkQXtX$KCqAiOQB*B2!8M;bgZX#mxm9Z#X{otm% zUZ-vP;rM(twnuNOVn9GF;?Oa$6kq6AOg!e|*OaKtm%eF4C-LcGte%j1LwhRhPhG#) z3F-^hAI9aAJIB0)?&li_Wxb2Z)XPcc;rBpXX=`&6@DQ(eNu+>UfnQwE+KSzNr$*HM zI7!qUR30y2yYr2>R3Izkt1klBwYd)r9l?4F5G9|3$+#H1SqzpS+u$X@Yirf|a?JV? zC|KyZO@JBLcr(doe^=`gWqcWMFk>RN(78WJV?bzGchN~xvq1Y&E4PtfM#-OdtK3SAAPjR)>}9$31-nUoqHFU;A1DAH5; z;Lo5;u$yo9Uw`ZVB=Ftl=_qMyEtn;M+QL~TZC1yD7>ETP9v+K2yVZQ;PW=xVh~?t_8pGxkm#KrJKL_ z^_j3?Kg60f+X09(1D{tYD0wF4=CJiBPgJuHUbfjg@715NUMGi**cmZDW~ke{O(|>K z4(VtB0K(5K+_3W~*mct{rA_PZd7$;qX|! zk2wLYPnFC2$|&sK4hpxUCEl(1wd}-a9=zUrbv$KKEjueO&ILD1i)}e+JaNXIKR-XM zOkk10Gf~+n*E_v1@!pBQ#0o7N6u^6uWFhF7V48# zr;^oJ^{;tj8oi2CZr5Lyqvg=QJ2YUXmX?;@c5-K)(1M2_PLdLOR&7?>ji(H$jJRSX z9X30btV8N9Q5y2oRl;%gwqP!Jtq^aKR$wk4oOQfjF?PMf&H#xOUEp&sl`YOJj$OD)5dKP%B zUL9r$Ddjt_F)vbU(s=6ay+1jm4jzMO2)_r1BN-vrMxY$w&Q-g}D? z;g#-f)Zu-G-Z@S5>yUf>e6Yto1v4D)hb|ixuj8;;{~0tOUXG)yRkF06!9G17rmYj@ zA_^2`W$0N~$D8HDV47flI>pr!VSeY0$(x2-_|vpX5pf&Mk1=ANf8M;L`#yh~RuVBs z?Jaz0c8o*f_JEIDW>}5eKZbgmqm~(J)&vm0eGtH};r+ z&P{yj^rfFz{qoo>_Nnn=$e*%wf)z}2L@Myvz<2i1Yb#Z5PPO$2gcN+VElU``MZNcT zo9d*{+LL98ZoF6z$fx2?P~JFI(N5ZYf~ZZ~M!adk>jjPk-Z8DUl5I@AXIiq=_^0ZVP4K5^S6A z9QeFNg32uzp7tA_nnt*)d!B~W7+Xzqj|!eA4%&~F7a-@%)GC$xRaX7&Rm_NQzEKRY z{3LSy)hj+j#szHY%UiEqZoU1{kEt~7Yc8Q}a7S%ZpL?zyUanfWptCx97eu?Ge6Is- zRqVI;U0$Ga00cnnlh_V)+UY|3q@_~%|AV@E88 zAdkmtyKR%6PON^RWU#)3f!EMeI7#>wRhzCOEr~$B(RYTV{1-Bm~!)2%X0$! z1x(y;u9;K{70xM@w3mJVHIkA{8zRk}mEDo*nLX0^CC16F)yzJoxLp0N9|ePL;I{&K z#JHdQy%sYxE2OkW(OYW&8%y3)MSJr>QqRV+dUS#jI_4AaKwJf_h48zeb@|uVj0 zGH0A9l44T5I`>;%Ua!c7#jXt|D=(aJj8xd!i-K|M9}AQZ@nDzf?eXj1k~9v9d}4I2 zDX-`wq_av7=j8f+3r2vMRFbAlo1IjWLDpZJ-9$t6eaaq=P-O4JWaz*F`ZL= zL1p4$AJ@T=lUsb0%`9nGuYyXE$XWLwXWlkZ^!`M4<&CV4!HH$?J~k5j9MkUW=K15g z9t&xfpU?qMvfkS}3UnP-jE(A6><1QH7@ggC3}Nb1u$cRK+T0(sKxb&jtXITNML_PE ztd>~hbUfAw>OzajZqqF&MVY|UW?fBG+ zR&1<^(%qOD2@MMJaO3A~_LLSJ+olxxg+P9XmE`8tk4*3~>6PqHWyCUUlJyA!vBPuP z&KIW|9qTkorkl-+(LQ)`ZiNX(F8wPN$fV-@+p!-z%V~?Bl&!Pf4ezcwH>H-^J(MCy z+VMx$Dqj8g?BTtyujq*a=$+ooK(fmMmAGY=&CXp0<`$HQ!N-jzGRIJA^lUo%$kC3V zzi1`aXM0qqJhH3ws53d)Tg-EPOiawueZ)zrz{ad+sUL89%Q$tS`se*LTF*-|# zm_USi=vUvnD?lkWS1Ue4HoG`VywVfWzCyfGg>+8FKS5BcM#WN)LyOSMyP{Grvgqwz zE}YBdkF%z>miKvkjfe}2xaU;!va&3|^r^m$vJEA-u;VZsbTg+oC8+}I6E62!_wjep*c(T#@ve#jm z6#6!Mi7h((4a?dDjbX5ZiftT#o3QD{O{-B_n!5}vYmHxvNJ(gp1Vf$la5K|e zdl(OTnMl)^Z-$m5n=cbg92{QJEJNp5TT}v_x7} zyp|g<+0?X7o}C;YPOf+xD=6c_TvI>pJ#Hz?zrMl9pzL)r6}&AK;dbmjtgcWK(r#f6 z`Wqw12GZ?o3f5eFZR%w2Z$3*Yd6mDjDSsG(?PTYtg%Xl%QQz`JS*&zgDK56mmQ$ni1S|x2VU&{Xm*S?O23JlyS5N0 z6P=q=XD1grrL9@LJ9Vn?N4WaRmAJn^*U@%1S0+bi~1+ybHZce@qo5o;6r zfygdHZsJwq2M-Jl4PgyjPr>okhX2jBres=-e#bf0rGT+32#6ld(SPJzMw8<;VV{a52?RU~Pe*_`E$j(%FfsZbItY(?XBk z(ZK8J3~{k5CN#ZGzr2+s&lY<#Z%+IB$IWC&q^k)xPL%RO%VjiW(#rd+hyEfS$+K=yLw{9Li!^B?Jy$r8}(m9SCf7 zSk-YP;Vr7{@CTDmht-(qmuQ6eQ>ErBhcZ#Jb&EfA7KwQC>tEAy^aPSs>QtgNo``?KwE&2V^6|Gm9gZBns5{f>c zTpbMVPv!T_S1XQrC7DM9`gWj)V*^_JFr+;Y1qbaa{@?AmpqI(tflN5q4gmj)Yn|E% zBJrs;1Ad>Ve8RB4Nq8;BW-vnz&#I*p(a`=GMP2t~yUSd-0TY?AvtqoU=F58RrKR%i zkMA7EcFD>A9-g*4{k4%18|34lRQ_=O1!*F~#0TWc>F(?u$sg~dq&uo@>{jY+en_6Z zxP#y^%u=rZDz+NMikB)Q)z^@)r++j`dr?qXG9$Zu=*ojf@{QIq@hLJ$s&u>5re?F& z?X<-|oZWSLpOBDaTJ=$HIiicB4?dC_nmo}z?gDHLzrc?rQ(Rm2;?=%A@#SzEaf$Rq zI5$qaPK-}aj;>InC~imU$(vS*jA>d_rh4Ge3dQ*MAg}Qy;96tq!EIZCTUEEyL(NRj z&aaBLRC<-Vo4jX&!jB^dRM`1R)Cx?oXCIAmO=q59b4CZN3jb@XVEXXz%XyopSgJO= zg0wURqXv@t>A7dV1olCXkV7z8_{7_7$hfU!3TvJ zR6YHkdU zAypZVLeJL?+1EcZ2;hAe^f{?8lrbV+mFY2OEnT$Rq`o6YE7qLgaFY>rX+63S=NP!d z2qhxfJ>jWKHJs$dJqOlFS7X71FfpeyzmFh=b%&hbTtr#uGM?{lA&U8R4|GL2;Lj+ek-%^jz2FjDev6Q}PTrSqgG ztbZl$Ag!wKIc1K*EC%lN6FcvY<*reO@WAMy6#WlpchA?XD;`v#Uz;OJDdnyo?i z3!`K$%>kUq!onih(>{2a*^^t~*(wLd^8_<_DPIjsHn!vz z@tz0W@q#U|dQy2Lj6vRGxGiQcEsP?P$xXdh-BYfTm$xH~R@80p*N-WG%Y9w3x<|g` zMYBB>>dk9q$o=LV<#ETO5Zu1_Sg{@e@!+sws4-clF1mMiBpS`L6IH`&8GOTz=}a?c z%-n2ct&^XMb5|XfABTFc)+Z;Y{0vH1T&t?DF>2|~kce@(4pB@kY#ZbAiXi37qEPkS z$eE<Ts_Do?WHMM7*;fL%{^di)}nQky$r`UV>WGOr)BgLOhqC1>g>lFMmA zGeXVxv&{~siz@F!OywjWd4z8{0JlapKhdh%>K3cibS47fqBfd5aUSms6$-c1ja70D zC=^pUCSd)***qWEhMI7N9sWp*W(c|LIb|2-7{AFRI9UUyU-hQ$H&$z39dqLSTM%iVC) zbY@DY=D@R;14v7H-8i@Ti|Ki+;?)DLmd`CqCeo{jIGTHqGfd?0!5CqfL42#GYG)hI zat#-m*=+m8HTQ(IMI8YFhA1cl85RqB8V+xO%XEOC4JX#0r;T~jmo4bC=R{pSlInS zo2u**`DPs(dT6#@Pl1WOVr?DZNk=&0BOEij`Ng022c^@P>?R3AW>@Duh_!X-3F3v{ z>@L{xpa>a0UkXL?cO+^!SVY!4xkakJZ(wu6h~mpBF1bpFNK!)}Cbc9S@l;+>Te92- zK7a}E`yiuaKD^QZcA@;lZ?#xpHx#@nOiHWYq=H@WlU$X&&WSXj#oO5EZKg#@MOBcS z3o-t6zlteZnv_-jjh2@1kAyoRP?7d*L^*d}RL}I}Mmnp}k@WEBicRpGV)6J(9;z>W zDlt;Yp*hW08_LT$>S&F8eRIMz%2vh`?K7QCh3n9y9WQm-Wr0j+N8kb5>Rq|OPq(wG z_kF(@ammtEzei+pZ;mTd+S^4vcq!T1W_KikYv>tUqa)JH?&^GVPa<|zZnyW4G;Cbx z4(oMrZ5pYp{1)ATeOBA;gF;h~J?Hj+r)eAUvS zQhP7Qyf-Q88bi(frn0B3iJ{_~w@aC8FUk*OR9IT<`_NqDO%3EJv zJXb9=_V=GDa>=4&j}3ayaARt@dP3#t>SgiR9Xb9O3)l3g{gPj@xq;Wi`+}i5S<{R# z1lCPKJlv&0h&g`6!L+ik?O5iV@H z;&vV|^dO!vSX0>5N3wBtn2)Q{IBltd|5Zi>abC_aR3jooW~+W-uWCN+2-7aw@WeDC zcNt?h+x#h`O|@JR0WVi_S}fkTu`)?=>7QL<7`k7o8I!}f@K1EdAF>emV@$M!gN4oQ#Oo?r0WUsqtXm6%p%mgO7J)8I(t|v!6B@k8C#IXi2O;nR92< zW#jtz%iw@vPWbX~W`g-wI$HjWyLQ9NXh}x-6#tgn)YY=NZOtcr&`akVaDP?qp!bjsGv^xS3aH&8%uL~?ckvGJ z@eX9ht5Rml4c0bFSq#6pyGoxuMp=aIGC4Ya_pb=nEM%bgNdZ=4f;*g*5HG~=>u+l2 zaJ!5zh!ma6@dIAJ}AVA5><0O|Gw;o>0GTELGqA zB_>pFoUA-#!HG+A$p!sTNmV^K+HZ>M9%|}zNrhsLZ)oALIdtAZwJntTZ+cya71Nc^ zFgtrC+4oF}6!b7Jg?wul5osv&z%?-ysnhVnR7zwK})0JYVO z;L0;)cFl{Qk`$}*6b*d@UEE66bC&X)xFG^trgW%;Jbtihnib&ds-*lHZWRCYA=^GX zVt8ZOC8ZO^SU6Y^lIo0E%0ew59^TJ0mf7=7o^B@0|L!N>>PPcrm}>B0x8i~WsAZxo zF3oi3aDjec+%U79lj)iR7PlEkf;!JGDpBxQ_y03 zL$nw$TnEb*jm`u$NFw!Be&zCzs?~4r%+wSct(NPC2Hh@wQT%E}3HmOJ6|#>NSb7c+ zCiqM}vMjxcUq&(l53Y*7tSeubz`k@Ghi*xh@z=ql`SUtl1d4*@JA|oB@$otwZBuz< z9cXbjtK)9lvF?o&CSQAhE_=1uQr|~jRb|Ez;3y6-J`Hva8$YXF${939Ud#wQ7HvpR zla2a`e0FvBA>_58ZA0`UAUyHp7bTI|puhiFI!s@=>@(hxpvHym0Y3xC!nRyM$M#pg-XUJd} zaB$fO{z@LdCVKSqC0}BnaV=Y9Yg=F;MIyiToC6c)+h;ILA`$9IfWgt_h}+*TVR9H?qhfpbzRs);CCJnf(pnm$p!*Sgvt$}=z({)M~&E|*AT|QJFS=Xduyq=vz{kzC#VC{g!*2HiryzQ?C`lUV4%DNC(X@7dTCY9umf9H zUu{uQWm&ys>_-$tw* zF&^zk7?WyN!diJf>()B6a%4lOg;Am29eFF@QQ-Tr(i7{G>Z4-qvE;bVxCksLLJ|}U z($YQ{Ap~@yRhZdWgHs$ge>+pS$jv!98rtzId+p|!267$7#v{hk;+qcXOuzNM<%D7T zRie!Ob?5?rl)XRNmd~CEKf}3ESBXJ0ih_7M43er`1LE$sbWYk#SkUE0LHWTZ84xW!OZ<1^0lai(fI#bk)!;YU`Z{r* z{wMk49b*=`!RI_Xi{H1v1&1eQy@Tp`zlrHD zvBmLQ1*F3sheNK{o~>{3l2NGH$EhSUK{J{jw#~~3(aAU%j{ZGQuUY?;!~pO!WF58h z+A?U(MaSL;dOmsW_Uz`pj;uoR_qnLwa+&dC@GHlefRYkzrwQ-l|+7edV7t_Vo&w z=gJl0^31r3w5y>HMhAf{&TaDIAaMx4)eMlArwIHS6H}$%;+t!^v)@7M{p;h+FIBg- zmRzwHXNQsMm0L4yHV+?Zl{neIHqMEa)7PjxyKN7wv~PdqxzWPYXo9ji^7ZCp)zacg z3T0wdfJe(Ftw4-Kk4GZHMcC`#@00~Ec)L&xRLWM=3~<}=kZ11^D96CXqQ#Y-u4QY-nP{F3Re7K7puBL!xRa{QjHtoJC0CS9qUT)0Z70qwPE+#C$IMgVlp&f> zyQ!UPun~iD!GKq>mVx~QcPAxd{E>R)avV0P0-x_%rUnQ;%T52@OTlbKS)GI(GZuzG z%u6*h)H>NqByV1!9)D4PiB~wN)1Dg?n~*WjP$kpH4t>Y!bKTZ0&QT&-!CGCX;wcqf zRn>$m^j&aJ;h@H+^q)d=th%;sv_~>=P{J)vXKwF^UBFYSpB6lB^>PG0Y?~StrKWhVi@H z-QVxO2mj~4C%<_z%(>2)Gv{3A+CHE6`$dGoN7e>>DOE@uDNca_f~i!o&>z*}8$%ix zkKD%mWnp1!WTWcdE4xssH{eTk)vrp0&aS@tMEtoU&`j^rV4^j8V@T%uku5Cc!G<=v zEq~C6mwj<(n3*}|&t0xP=Yv}@FTZB-Dfijxr@r@aw2HObmcmyX_}H@VH}Ih>#lXm8 zbAG1VU#L3n!A|vM01PFq>jYhk-dpt1i7MHsqZm_+;EYc1m zdRHpO#%zpr8Mo(*-Z62G4XafgR8WeI=|2INnn8^6jUIooZ_=3~srJi4zKsj;Bigv; z0p3gI1+m;3{{g7{2*_Q8}q3!%`3Rr58G>I zlsXr0)h35V(-EyZ4~FJ^!a{JR7%%uL)~l5^!5mfb@ndqn#M?fDi&C@#OwrXc+C*t; zsd-viXvrcL;WcNGAOcC<%QOnIC~9G9GBb=eq&{6*|T+`$SFR4+}`y2{x~ zZPKa!$PsqxAtRutq~ukJo_ffdY>qc4`TS`aD?`VN`n)C*cE+p|rX@CZo{llu24~c) z-mdgp1n_<|sdBP&W>0ObDZ6R(0hRq%V8Ll-xyq9CdlKD(DLoz0=Md9lgUpOsruEm} z>RHPn^Q%0b))eSmEgMNZKSq(w_L_;k)e=N$e0rra^-f)llkHKC8%)Km1r_Z&8u9S& zyKiHJMX=4-3nB?pDMmY4Fb``u??*`OLeKK2+xjIo5IZZEfgTZYS)J%(gAq~H1DuSN ziqNH3g;90l2DX-V#_w|W-o0zS?Zj*&oc<^xDpO`f=` z9&^)5(fE7eJbF$Bav8CWwh7TEucW*|=jGJOztzX?>wRnWGR zx|^^WzxQgpux2;2IBAGD^(iJMD%7}TmyAk#`u1xRzMwXj3xvuFXJ6-vMdFHt!@$?i0{g+}Jz|`nY@UmF{z!DN7e$ zshT=gD%7lNXn5Ez3(6tI>{^5EKI~t@*tG;#X2#T~H6o)3bbsOB$3E{hyJBMYhCwUJ zAR!1I)pu0MkjuxHF}c=Zy76i9(Hsx_;`#+G5Q+0Jj1uWp@(f6f`2=Z}4$0>?nI2w$ z_D4V_KUO2wUa30+2|A*Ut_zG7hEU;>GEo;?&gUd`TuW#x=KD*RcBUe&S>oIEzZb)Jse+RR@$Ks>+82UOWIaqX7&iDHMRVer$o^h%@f9 z1$8D`5oyK}Js+JSbjFH~USrNbbZy45t8g)X%_8Z80#*TjS{(|jia$~Coi3(gb-%38 zeH|AwWbXgdd7-bX^UQ7bi*p!V-KUj4i z9+7;xVj*Y%HRPAb9BnwSZ0ZTIqrBbJpXSesbA2!oN4MTD+K{QDa$lbF^|cj4UOAYp z?T)kmNkxuCtX)>WZ-`oTchkvfR}Xew`R~OO*%S#hWXLIyv2-j$EI19ev%TNK#g8Pq zS$(W}>lfMXq7>e|Ie8P4aaWKIAtWcg``7>p#d!q?Y76I0ltFXsA3}=kO+E8tR5kX- z4LM=&%`LHI(1AEr5l^obU*=M|%yh$YMQaX^8@I*l}IY&9jMju7eVO20Vz@a!|!LaZ+KK z4nCFS6CP2OWw>2G$0kHQNSDEW9K0f@(z@~mpOgTiTbBL{)vA3kuSzAwTt}2voK@es zc+JA&D(2rAU0^P)e&y2Yb+1tBoD>tan!t5_vW zZ|_x&RXv76)gkH!l)Q&6`@91m=|_%3qjapWLSopKieu#nTb+gws~7nLOWRcl`$CK^ z%%?2La_F_K$qH>Y0FyvOhM2n>=&@!aS>AOs%XG+hDf4q$&$gim}R^*hS zB*bN;cfYyXRRe6AKYgi!b-cA-?nKCIEqkaOhmOr{n(T;^PsRR5sP>OR6hd*PCd~^z zFgZCj*}h{{-s8l$+Lm@VdQe+An5oH8Py!Xyw*-}%QGk^PO|#Qjwy>)fo3@!9AG@V} z-47y6G3MgqOLX#HsG8cEfckxt)IrY1r#bdqU+fk9=%`sRObzkE68KU|WATF^$Ld^b zReXs1J+q9A{E=OwDh*^#-u#dd4C{B}t|)%p`tJ~JCG9fvB8!y$xmBit z24|n*_Z|sdHgoMR$=J07| zv`PJO|N6mFD-E_Twoppg+2t^Rji=4o&s(kcE~Gx^9Gu9vNK2dxQJ)Nw`bNac9r zq%%-3>`|&i-&-p#8oT(NUCZ!~9p!QsSR(he&_=O!XZ-~*#2`&HQaUErQJQi6nV&K* z8oACkV9xR=<6ruI*Om1dX;-M797{t!!=Fe-)}aW&ZTp#fun>7y^{glZ=Y2u-zcpE zkztMIQp)I?W7>m(+Y4X1&&)qCFQ3e=n|c>8I%>ypsdZ&!EAVWYy@eCD+9OZXi;owN z^#hu(D`JXfu$WuD8pqvM$sww*vENJS`aR5j20zmky_uOkMuioZJ&)lpR<25EATHMa|rg`32cAi{x7Jl-!|MIOB z)L&X<)oZI+DqAg8PkN6*cRp$d@?(9LsvhFQHvMPhD*bd8hZmcw%3iFE-6=keH(Y`@ zHie8U*xL}rdMv~zynnX%ExA0zSWs?gd`M5JuH`r=e7+D^6iRimCrMEeizB*;3HnjJ zDrE!utF03f@?|DQE>V&JPsOS4%GZY5296!$(=&es(~(QqG)npqY_TA|GfsK{QJvn$ zE)2!#7Y?s?*oId>%kk~5Qd94dU$AHj=AyK`TGGk>h`05&tIFM}ZOn;rHZ9M~SvDcKB;b5iREy^Fu&|ce{p@nEI7B=4VD%3X z;|{{n*t9RGrPmQKp8dth#0RBuwzf5Ql7kJ5 z{rv2=V&Lil09{Md@P^UlUdtnr-$%Nx4xbTFKdsKZ z>a=)XDC1A{D7C*oknw24?tCK$KR=L#OoXG6KDsk+?-Y!yW%k>ZVqNF8vQ`n-I6~B^* zFIL7nk5;dZ#Snrbjf-5^@K|pis;ma;7SgPm9@CJ$!Zpj|Ur!M)nwCOs-^}$GDvkAT z$hn(ypp#N^|9X!3z&TK87P7&5@DlxlB%3C*BJ*{VqO|w-3(2Vbxnc$u_6@~9Z%p$U zrtov{3YVWemTmN*M2jJK0xT|~Hf#9?I<-mi`n6BQfB668jGd6xk`GR0^9cVo6^iJ@ zFiW{tz1;XP+u8mIBlzO!VEHKvpjHKqMLA`?@GSkg}$GV z?F=zn$d38b)XP*BA0Mi0?GH=g@rC$~V=xm2|6hwb>53rza{dYExWBl0NHg`43$y0{ zXZt~5pwSz+qwy6u4M?)Ch3y6D>YnbtXH)Ch5VpTtibPwRm@wHL`i)4Sx1H`h#2 z0nj!9MTZ=x9tuGaVY}{gakEe1@x0RxpKf6QJFqA%b28?woIsHu4)#WIIFv~z z=eSOVNivQr=W3>T>!xRA+IC?Ja`}4FyR-jX3J39I{c`S8M_khG2&d{6NiTbagoN1J z6TsS4?2n43Rb3q&fEd-C;xCeEEW`kwX(+RVLM@F(`{X?6T^AS_h=epq3kfY8;uaaW zuHUCDnpIk61Mu&~xM}Y#qp)@6I;G&4_sCY-?u08ML5bDu(z&30%6OhY|6NwC0`_W8 z^|i=FFXZ0Va0kyE(VAtz0z$)(ZPg-J?gj&yF_mHXNux61yHU7>|1sOQl2+>K>VUEe zg2G+j-0^Y*Hk7qk@~vh&QqHi$CEmd27BLx`o|B#m&b;k3=EQ@&1lQXsRdWNf2~OWG zpzR;m(TE9?gpFF1?iv0v6nyKJ$-nkK-Q;PPm^zvW~ao(^TP1S&|I2l63Rd zy>PSr`i*dzCQQ!0fb?LBM5xpBdOzPcpHTQ*E4rQdYs%J^{kT{8k>cM1DaF`DA*71q zL%b}p2k_EImcO??STY|y?*vR30QJ=P-aRSChd@dSVjdS`HOxyXKvnPM?X3b|cb-}S z^>HfvV80TH05Z~URoYs)O&zbjzOHWLQbp58U@cuevog+2H?AARnP5=fk}qW`IfS^wrWS$$J+U}Qm@>ZBNYi5llS4>X&gA#{fC}hL|2IfHuR&l+(>l^eS z4Uy^-e*cWm<$vniqlOcy@FV_elB{>G7TQ%xhJh=UqRhKUA|d>6$J-qwg|T0+RTLHq z0CP1@c>)?X#WJx1s!Cfcm#33WSzW-C~npQ^FIvvf9;m zAPi30q{kphK;&3^`&mSF#FGEEribSIT{hKi`23B!+1~xPEwn1+mb@n`T6@YOtMUE> zoiw&*_vCum_Ry7rf~4~Yk`jV4R%#&ZNC>1+`M0L_r+yioY{Q;{+SH)-=C-~dCQoXS zT!@&1w*dNSV{96MREJxsL}m@lN1kK=&P#D!-&R*wfird;&P$S&8(_5SaSfq6e&*)p z+X(Qi0MPetQ!BLzU|qY#J!RPixtm|=^jK|Ley++ERo8m<_%bgZx9}bXxn^tXKMDBe zg`$dzR^qQtMz*8T=$n@$aRagHpjpb#kLmix*3j)(yT{{B7ye?m&9m;0`6uB^4eME} zdezgX>($@s>(QlI6WLH6=E4+zZ&z(tXGSA=JosnVucqcv!znOpfLvyZt+6h@#g{p# z#Yw(a$IQ)5(sFz z845n!jKBsYml(J*N$InvN!JP_#k|yo0_gZg))`5taH=q1@Uqea$aUp21J^joynIVh zZdTUxum!PxUa45TjLL?BD0g7^ieZTNzCUi^lH+^GuYTbX=`vXD_y4pJM|QiP%Oa2B zOLBQ9YV>uB%5_$I3i8}B9Z1bB%*?DrFM?WgFWaOzz9|_b$_{2JBoTLj((h2W2P`#= zP1pVc=;d~Y^m9 zi~-FAX7za%P~0dgW;n8DetyU8!lnK#yw|cu;5%)?`&9qqyo#|EE_?D5_rYHm=?J>+ zFo#4;IzzwQ+$da0AVnu#m;_dvff*}4;9r{+~Xi`HwB-X8wa4JGc z)8T4;IeEzlQeHj%%RBbSk@}Xy6HwGGcmbW5@IG;mlO^z3(*ilT#n#rg?yj?H@~xM- zxd5M$9;w00*jF{9;CkuYS2&(XK78gQ+i}S&I()!)T9hf)S(>d=65O7k=_QIEXY?Dn zR^%%5#dwj*dy@wek+(=81FGkhR~vW0RI{Py?x5a(s)_1hHA%KEPwR26{&EC?A^J6V z6E^q5!hL*5{ZUWB-Ol^$kgJ^D2aw=uZ(`!d&@vG>+lXXvs`O5xXp(Bq%l+8-2x*j} z&Yo@S$k%|`$Fu_^>0|2_>i%}ag!jL;WJf}^4}U2b7hL7%ht~QDwGs&9<6B(kSGV`W z9LFjgBtMb&&eyg{#y?NIdj9l>Am5W<_DA2z$Is6Q{%GVP*|icRrn&&mS{Z{`g*=qB zx!8*e&kEgooPQ_viE%8n$LY^Cd=9Yd_`$Hg|2O`^PdWPg2bBc;wJ$QbgnxaP9+B7& ztDi2&_LgN&zO}(c3UqUOS69cfPy4q<9KQJC!0iA7!BI6&xo!JJacP$r^#f!~J@-O+5kau2Ssy()VkLP^+hEQaLN9q`159v0D7Q z?r~VoK^%DX`9`yW{dsRhIfC3+Fh!Z8qkug5P!+=0ua_|7n6-b8c=$H20_sX2-{mxB zfkptEmd=R($BPQwo>bk~PoyonSS}1%V~)JD7k1z&&;Hwh8h2C z3zdjyhGV(9&)tSc)d@{$6tRcDfA1SE|;V+fTw*L66&&_S;Tv zvTuC Date: Tue, 28 Jun 2022 20:20:13 +0200 Subject: [PATCH 037/118] Add ARM image building for regular PRs (#24664) The image building for ARM is currently only done in the main build only to refresh cache, however there are sometimes cases when new dependency (for example #24635) broke ARM image build and it was only discovered after merge. This PR adds extra ARM-based build that should be run after the AMD64 build. It should not influence the depending steps, it should just signal failure of the PR if the ARM image cannot be build. (cherry picked from commit 2fbd750d35d12271ed4c5b9eb7564eb12e91bb03) --- .github/workflows/build-images.yml | 105 ++++++- .github/workflows/ci.yml | 77 +++++ images/breeze/output-build-image.svg | 284 ++++++++--------- images/breeze/output-build-prod-image.svg | 352 +++++++++++----------- images/breeze/output-commands-hash.txt | 6 + 5 files changed, 507 insertions(+), 317 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 5c6b085b97c7c..39c758a23d24f 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -225,21 +225,35 @@ jobs: - name: "Free space" run: breeze free-space - name: > - Build & Push CI images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + Build & Push AMD64 CI images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} ${{ needs.build-info.outputs.allPythonVersionsListAsString }} run: breeze build-image --push-image --tag-as-latest --run-in-parallel + if: matrix.platform == 'linux/amd64' + env: + UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} + DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} + IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + - name: "Start ARM instance" + run: ./scripts/ci/images/ci_start_arm_instance_and_connect_to_docker.sh + if: matrix.platform == 'linux/arm64' + - name: > + Build ARM CI images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + run: breeze build-image --run-in-parallel + if: matrix.platform == 'linux/arm64' env: UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }} - name: Push empty CI image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} - if: failure() || cancelled() + if: (failure() || cancelled()) && matrix.platform == 'linux/amd64' run: breeze build-image --push-image --empty-image --run-in-parallel env: IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Candidates for pip resolver backtrack triggers" - if: failure() || cancelled() + if: (failure() || cancelled()) && matrix.platform == 'linux/amd64' run: > breeze find-newer-dependencies --max-age 1 --python "${{ needs.build-info.outputs.defaultPythonVersion }}" @@ -360,3 +374,88 @@ jobs: - name: "Fix ownership" run: breeze fix-ownership if: always() + + + build-ci-images-arm: + timeout-minutes: 80 + name: "Build ARM CI images ${{ needs.build-info.outputs.allPythonVersionsListAsString }}" + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + needs: [build-info, build-prod-images] + if: | + needs.build-info.outputs.image-build == 'true' && + needs.build-info.outputs.upgradeToNewerDependencies != 'false' && + github.event.pull_request.head.repo.full_name != 'apache/airflow' + env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} + BACKEND: sqlite + outputs: ${{toJSON(needs.build-info.outputs) }} + steps: + - name: Cleanup repo + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - uses: actions/checkout@v2 + with: + ref: ${{ needs.build-info.outputs.targetCommitSha }} + persist-credentials: false + submodules: recursive + - name: "Retrieve DEFAULTS from the _initialization.sh" + # We cannot "source" the script here because that would be a security problem (we cannot run + # any code that comes from the sources coming from the PR. Therefore, we extract the + # DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands + id: defaults + run: | + DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \ + awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') + echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV + DEFAULT_CONSTRAINTS_BRANCH=$(grep "export DEFAULT_CONSTRAINTS_BRANCH" \ + scripts/ci/libraries/_initialization.sh | \ + awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') + echo "DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}" >> $GITHUB_ENV + DEBIAN_VERSION=$(grep "export DEBIAN_VERSION" scripts/ci/libraries/_initialization.sh | \ + awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') + echo "DEBIAN_VERSION=${DEBIAN_VERSION}" >> $GITHUB_ENV + - name: > + Checkout "${{ needs.build-info.outputs.targetBranch }}" branch to 'main-airflow' folder + to use ci/scripts from there. + uses: actions/checkout@v2 + with: + path: "main-airflow" + ref: "${{ needs.build-info.outputs.targetBranch }}" + persist-credentials: false + submodules: recursive + - name: > + Override "scripts/ci" with the "${{ needs.build-info.outputs.targetBranch }}" branch + so that the PR does not override it + # We should not override those scripts which become part of the image as they will not be + # changed in the image built - we should only override those that are executed to build + # the image. + run: | + rm -rfv "scripts/ci" + rm -rfv "dev" + mv -v "main-airflow/scripts/ci" "scripts" + mv -v "main-airflow/dev" "." + - name: "Setup python" + uses: actions/setup-python@v2 + with: + python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + - run: ./scripts/ci/install_breeze.sh + - name: "Free space" + run: breeze free-space + - name: "Start ARM instance" + run: ./scripts/ci/images/ci_start_arm_instance_and_connect_to_docker.sh + - name: > + Build ARM CI images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + run: > + breeze build-image --run-in-parallel --parallelism 1 + --builder airflow_cache --platform "linux/arm64" + env: + UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} + DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} + IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + - name: "Stop ARM instance" + run: ./scripts/ci/images/ci_stop_arm_instance.sh + if: always() + - name: "Fix ownership" + run: breeze fix-ownership + if: always() diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3606036f5a37e..383b3112830a8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1749,3 +1749,80 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: "Fix ownership" run: breeze fix-ownership if: always() + + build-ci-arm-images: + timeout-minutes: 80 + name: > + ${{needs.build-info.outputs.buildJobDescription}} CI ARM images + ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + needs: + - build-info + - wait-for-ci-images + - wait-for-prod-images + - static-checks + - tests-sqlite + - tests-mysql + - tests-mssql + - tests-postgres + env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} + if: needs.build-info.outputs.upgradeToNewerDependencies != 'false' + steps: + - name: Cleanup repo + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + if: needs.build-info.outputs.inWorkflowBuild == 'true' + - uses: actions/checkout@v2 + with: + ref: ${{ needs.build-info.outputs.targetCommitSha }} + persist-credentials: false + submodules: recursive + if: needs.build-info.outputs.inWorkflowBuild == 'true' + - name: "Setup python" + uses: actions/setup-python@v2 + with: + python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + if: needs.build-info.outputs.inWorkflowBuild == 'true' + - name: "Retrieve DEFAULTS from the _initialization.sh" + # We cannot "source" the script here because that would be a security problem (we cannot run + # any code that comes from the sources coming from the PR. Therefore, we extract the + # DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands + id: defaults + run: | + DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \ + awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') + echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV + DEFAULT_CONSTRAINTS_BRANCH=$(grep "export DEFAULT_CONSTRAINTS_BRANCH" \ + scripts/ci/libraries/_initialization.sh | \ + awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') + echo "DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}" >> $GITHUB_ENV + DEBIAN_VERSION=$(grep "export DEBIAN_VERSION" scripts/ci/libraries/_initialization.sh | \ + awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') + echo "DEBIAN_VERSION=${DEBIAN_VERSION}" >> $GITHUB_ENV + if: needs.build-info.outputs.inWorkflowBuild == 'true' + - run: ./scripts/ci/install_breeze.sh + if: needs.build-info.outputs.inWorkflowBuild == 'true' + - name: "Free space" + run: breeze free-space + if: needs.build-info.outputs.inWorkflowBuild == 'true' + - name: "Start ARM instance" + run: ./scripts/ci/images/ci_start_arm_instance_and_connect_to_docker.sh + if: needs.build-info.outputs.inWorkflowBuild == 'true' + - name: > + Build CI ARM images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + run: > + breeze build-image --run-in-parallel --parallelism 1 + --builder airflow_cache --platform "linux/arm64" + env: + UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} + DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} + IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} + PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + if: needs.build-info.outputs.inWorkflowBuild == 'true' + - name: "Stop ARM instance" + run: ./scripts/ci/images/ci_stop_arm_instance.sh + if: always() && needs.build-info.outputs.inWorkflowBuild == 'true' + - name: "Fix ownership" + run: breeze fix-ownership + if: always() && needs.build-info.outputs.inWorkflowBuild == 'true' diff --git a/images/breeze/output-build-image.svg b/images/breeze/output-build-image.svg index e84a5e189afc6..c0cc659eedde9 100644 --- a/images/breeze/output-build-image.svg +++ b/images/breeze/output-build-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: build-image + Command: build-image - + - - -Usage: breeze build-image [OPTIONS] - -Build CI image. Include building multiple images for all python versions (sequentially). - -╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. ---debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] ---image-tag-tTag the image after building it(TEXT) ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful -when you build or pull image with --image-tag.                                  ---docker-cache-cCache option for image used during the build.(registry | local | disabled) -[default: registry]                           ---force-buildForce image build no matter if it is determined as needed. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced options (for power users) ─────────────────────────────────────────────────────────────────────────────────╮ ---install-providers-from-sourcesInstall providers from sources when installing. ---airflow-constraints-modeMode of constraints for CI image building                               -(constraints-source-providers | constraints | constraints-no-providers) -[default: constraints-source-providers]                                 ---airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) ---python-imageIf specified this is the base python image used to build the image. Should be    -something like: python:VERSION-slim-bullseye                                     -(TEXT)                                                                           ---additional-python-depsAdditional python dependencies to use when building the images.(TEXT) ---runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) ---runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) ---additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) ---additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) ---additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) ---additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) ---additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) ---additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) ---additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) ---dev-apt-depsApt dev dependencies to use when building the images.(TEXT) ---dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---github-usernameThe user name used to authenticate to GitHub.(TEXT) ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) ---push-imagePush image after building it. ---empty-imagePrepare empty image tagged with the same name as the Airflow image. ---prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  -image).                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze build-image [OPTIONS] + +Build CI image. Include building multiple images for all python versions (sequentially). + +╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +--image-tag-tTag the image after building it(TEXT) +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful +when you build or pull image with --image-tag.                                  +--docker-cache-cCache option for image used during the build.(registry | local | disabled) +[default: registry]                           +--force-buildForce image build no matter if it is determined as needed. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced options (for power users) ─────────────────────────────────────────────────────────────────────────────────╮ +--install-providers-from-sourcesInstall providers from sources when installing. +--airflow-constraints-modeMode of constraints for CI image building                               +(constraints-source-providers | constraints | constraints-no-providers) +[default: constraints-source-providers]                                 +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +--python-imageIf specified this is the base python image used to build the image. Should be    +something like: python:VERSION-slim-bullseye                                     +(TEXT)                                                                           +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) +--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) +--additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) +--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) +--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--github-usernameThe user name used to authenticate to GitHub.(TEXT) +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--push-imagePush image after building it. +--empty-imagePrepare empty image tagged with the same name as the Airflow image. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--builderBuildx builder used to perform `docker buildx build` commands(TEXT) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-build-prod-image.svg b/images/breeze/output-build-prod-image.svg index 9efd67fc9ce0c..3d6122bc3f66f 100644 --- a/images/breeze/output-build-prod-image.svg +++ b/images/breeze/output-build-prod-image.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: build-prod-image + Command: build-prod-image - + - - -Usage: breeze build-prod-image [OPTIONS] - -Build Production image. Include building multiple images for all or selected Python versions sequentially. - -╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---install-airflow-version-VInstall version of Airflow from PyPI.(TEXT) ---upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. ---debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] ---image-tag-tTag the image after building it(TEXT) ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful -when you build or pull image with --image-tag.                                  ---docker-cache-cCache option for image used during the build.(registry | local | disabled) -[default: registry]                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options for customizing images ─────────────────────────────────────────────────────────────────────────────────────╮ ---install-providers-from-sourcesInstall providers from sources when installing. ---airflow-extrasExtras to install by default.                                                    -(TEXT)                                                                           -[default:                                                                        -amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,google,google… ---airflow-constraints-modeMode of constraints for PROD image building                             -(constraints | constraints-no-providers | constraints-source-providers) -[default: constraints]                                                  ---airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) ---python-imageIf specified this is the base python image used to build the image. Should be    -something like: python:VERSION-slim-bullseye                                     -(TEXT)                                                                           ---additional-python-depsAdditional python dependencies to use when building the images.(TEXT) ---additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) ---additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) ---additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) ---additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) ---additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) ---additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) ---additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) ---runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) ---runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) ---dev-apt-depsApt dev dependencies to use when building the images.(TEXT) ---dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Customization options (for specific customization needs) ───────────────────────────────────────────────────────────╮ ---install-packages-from-contextInstall wheels from local docker-context-files when building image. ---airflow-is-in-contextIf set Airflow is installed from docker-context-files only rather than     -from PyPI or sources.                                                      ---cleanup-contextClean up docker context files before running build (cannot be used         -together with --install-packages-from-context).                            ---disable-mysql-client-installationDo not install MySQL client. ---disable-mssql-client-installationDo not install MsSQl client. ---disable-postgres-client-installationDo not install Postgres client. ---disable-airflow-repo-cacheDisable cache from Airflow repository during building. ---install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT) ---installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---github-usernameThe user name used to authenticate to GitHub.(TEXT) ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) ---push-imagePush image after building it. ---empty-imagePrepare empty image tagged with the same name as the Airflow image. ---prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  -image).                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze build-prod-image [OPTIONS] + +Build Production image. Include building multiple images for all or selected Python versions sequentially. + +╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--install-airflow-version-VInstall version of Airflow from PyPI.(TEXT) +--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +--image-tag-tTag the image after building it(TEXT) +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful +when you build or pull image with --image-tag.                                  +--docker-cache-cCache option for image used during the build.(registry | local | disabled) +[default: registry]                           +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.7 3.8 3.9 3.10]                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options for customizing images ─────────────────────────────────────────────────────────────────────────────────────╮ +--install-providers-from-sourcesInstall providers from sources when installing. +--airflow-extrasExtras to install by default.                                                    +(TEXT)                                                                           +[default:                                                                        +amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,google,google… +--airflow-constraints-modeMode of constraints for PROD image building                             +(constraints | constraints-no-providers | constraints-source-providers) +[default: constraints]                                                  +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +--python-imageIf specified this is the base python image used to build the image. Should be    +something like: python:VERSION-slim-bullseye                                     +(TEXT)                                                                           +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) +--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) +--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) +--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Customization options (for specific customization needs) ───────────────────────────────────────────────────────────╮ +--install-packages-from-contextInstall wheels from local docker-context-files when building image. +--airflow-is-in-contextIf set Airflow is installed from docker-context-files only rather than     +from PyPI or sources.                                                      +--cleanup-contextClean up docker context files before running build (cannot be used         +together with --install-packages-from-context).                            +--disable-mysql-client-installationDo not install MySQL client. +--disable-mssql-client-installationDo not install MsSQl client. +--disable-postgres-client-installationDo not install Postgres client. +--disable-airflow-repo-cacheDisable cache from Airflow repository during building. +--install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT) +--installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--github-usernameThe user name used to authenticate to GitHub.(TEXT) +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--push-imagePush image after building it. +--empty-imagePrepare empty image tagged with the same name as the Airflow image. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--builderBuildx builder used to perform `docker buildx build` commands(TEXT) +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index 044b799ce0385..0af29de136ceb 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -2,4 +2,10 @@ # This file is automatically generated by pre-commit. If you have a conflict with this file # Please do not solve it but run `breeze regenerate-command-images`. # This command should fix the conflict and regenerate help images that you have conflict with. +<<<<<<< HEAD e1bc752aeb5e9c4095bb9d3cbb614252 +||||||| parent of 2fbd750d35 (Add ARM image building for regular PRs (#24664)) +81bb48610ea6c4b13c1fa5d7d7cbd061 +======= +d382b111fddf3b8c0fd3434fce8aa948 +>>>>>>> 2fbd750d35 (Add ARM image building for regular PRs (#24664)) From 44994a79b7cd65fceadeffe58691bab09d57b37d Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Tue, 28 Jun 2022 23:22:23 +0200 Subject: [PATCH 038/118] Fix unnecessary check for ARM images (#24718) The ARM image build introduced in #24664 had problem with build image that was additionally checking for arm images which were moved out to a spearate step (cherry picked from commit 5321577aeef49a33d10efa42222df5524cc45c26) --- .github/workflows/build-images.yml | 18 ++---------------- 1 file changed, 2 insertions(+), 16 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 39c758a23d24f..e6bb57c40df41 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -228,32 +228,18 @@ jobs: Build & Push AMD64 CI images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} ${{ needs.build-info.outputs.allPythonVersionsListAsString }} run: breeze build-image --push-image --tag-as-latest --run-in-parallel - if: matrix.platform == 'linux/amd64' - env: - UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} - DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} - IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }} - - name: "Start ARM instance" - run: ./scripts/ci/images/ci_start_arm_instance_and_connect_to_docker.sh - if: matrix.platform == 'linux/arm64' - - name: > - Build ARM CI images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - ${{ needs.build-info.outputs.allPythonVersionsListAsString }} - run: breeze build-image --run-in-parallel - if: matrix.platform == 'linux/arm64' env: UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }} - name: Push empty CI image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} - if: (failure() || cancelled()) && matrix.platform == 'linux/amd64' + if: failure() || cancelled() run: breeze build-image --push-image --empty-image --run-in-parallel env: IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Candidates for pip resolver backtrack triggers" - if: (failure() || cancelled()) && matrix.platform == 'linux/amd64' + if: failure() || cancelled() run: > breeze find-newer-dependencies --max-age 1 --python "${{ needs.build-info.outputs.defaultPythonVersion }}" From 2905ede084229c4b8675cfcd5d11a82cad1d2739 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Tue, 28 Jun 2022 23:30:59 +0200 Subject: [PATCH 039/118] Script to filter candidates for PR of the month based on heuristics (#24654) This scripts proposes top candidates for PR of the month based on simple heuristics as discussed in the document https://docs.google.com/document/d/1qO5FztgzJLccfvbagX8DLh1EwhFVD2nUqbw96fRJmQQ/edit?disco=AAAAZ-Ct0Bs&usp_dm=true (cherry picked from commit 0e1a6b98079814747205e3320e43e11f8e2ef3d4) --- dev/requirements.txt | 2 + dev/stats/get_important_pr_candidates.py | 180 +++++++++++++++++++++++ 2 files changed, 182 insertions(+) create mode 100755 dev/stats/get_important_pr_candidates.py diff --git a/dev/requirements.txt b/dev/requirements.txt index e83bdbe2162e0..31a4ad84056af 100644 --- a/dev/requirements.txt +++ b/dev/requirements.txt @@ -1,9 +1,11 @@ click>=8.0 +cached_property;python_version<"3.8" jinja2>=2.10 keyring==10.1 PyGithub jsonpath_ng jsonschema +pendulum pyyaml packaging rich diff --git a/dev/stats/get_important_pr_candidates.py b/dev/stats/get_important_pr_candidates.py new file mode 100755 index 0000000000000..059742fc19d31 --- /dev/null +++ b/dev/stats/get_important_pr_candidates.py @@ -0,0 +1,180 @@ +#!/usr/bin/env python3 +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +import math +import sys +import textwrap +from datetime import datetime +from typing import List, Set + +import pendulum +import rich_click as click +from github import Github +from github.PullRequest import PullRequest +from rich.console import Console + +if sys.version_info >= (3, 8): + from functools import cached_property +else: + from cached_property import cached_property + +PROVIDER_LABEL = "area:providers" + +logger = logging.getLogger(__name__) + +console = Console(width=400, color_system="standard") + +option_github_token = click.option( + "--github-token", + type=str, + required=True, + help=textwrap.dedent( + """ + GitHub token used to authenticate. + You can set omit it if you have GITHUB_TOKEN env variable set + Can be generated with: + https://github.com/settings/tokens/new?description=Read%20issues&scopes=repo:status""" + ), + envvar='GITHUB_TOKEN', +) + +PROVIDER_SCORE = 0.5 +REGULAR_SCORE = 1.0 + +REVIEW_INTERACTION_VALUE = 1.0 +COMMENT_INTERACTION_VALUE = 1.0 +REACTION_INTERACTION_VALUE = 0.1 + + +class PrStat: + def __init__(self, pull_request: PullRequest): + self.pull_request = pull_request + self._users: Set[str] = set() + + @cached_property + def label_score(self) -> float: + for label in self.pull_request.labels: + if "provider" in label.name: + return PROVIDER_SCORE + return REGULAR_SCORE + + @cached_property + def num_interactions(self) -> float: + interactions = 0.0 + for comment in self.pull_request.get_comments(): + interactions += COMMENT_INTERACTION_VALUE + self._users.add(comment.user.login) + for _ in comment.get_reactions(): + interactions += REACTION_INTERACTION_VALUE + for review in self.pull_request.get_reviews(): + interactions += REVIEW_INTERACTION_VALUE + self._users.add(review.user.login) + return interactions + + @cached_property + def num_interacting_users(self) -> int: + _ = self.num_interactions # make sure the _users set is populated + return len(self._users) + + @cached_property + def num_changed_files(self) -> float: + return self.pull_request.changed_files + + @cached_property + def score(self): + return ( + 1.0 + * self.num_interactions + * self.label_score + * self.num_interacting_users + / (math.log10(self.num_changed_files) if self.num_changed_files > 10 else 1.0) + ) + + def __str__(self) -> str: + return ( + f"Score: {self.score:.2f}: PR{self.pull_request.number} by @{self.pull_request.user.login}: " + f"`{self.pull_request.title}. " + f"Merged at {self.pull_request.merged_at}: {self.pull_request.html_url}" + ) + + +DAYS_BACK = 5 +# Current (or previous during first few days of the next month) +DEFAULT_BEGINNING_OF_MONTH = pendulum.now().subtract(days=DAYS_BACK).start_of('month') +DEFAULT_END_OF_MONTH = DEFAULT_BEGINNING_OF_MONTH.end_of('month').add(days=1) + +MAX_PR_CANDIDATES = 500 +DEFAULT_TOP_PRS = 10 + + +@click.command() +@click.option( + '--date-start', type=click.DateTime(formats=["%Y-%m-%d"]), default=str(DEFAULT_BEGINNING_OF_MONTH.date()) +) +@click.option( + '--date-end', type=click.DateTime(formats=["%Y-%m-%d"]), default=str(DEFAULT_END_OF_MONTH.date()) +) +@click.option('--top-number', type=int, default=DEFAULT_TOP_PRS) +@click.option('--verbose', is_flag="True", help="Print scoring details") +@option_github_token +def main(github_token: str, date_start: datetime, date_end: datetime, top_number: int, verbose: bool): + console.print(f"Finding best candidate PRs between {date_start} and {date_end}") + g = Github(github_token) + repo = g.get_repo("apache/airflow") + pulls = repo.get_pulls(state="closed", sort="created", direction='desc') + issue_num = 0 + selected_prs: List[PrStat] = [] + for pr in pulls: + issue_num += 1 + if not pr.merged: + continue + if not (date_start < pr.merged_at < date_end): + console.print( + f"[bright_blue]Skipping {pr.number} {pr.title} as it was not " + f"merged between {date_start} and {date_end}]" + ) + continue + if pr.created_at < date_start: + console.print("[bright_blue]Completed selecting candidates") + break + pr_stat = PrStat(pull_request=pr) # type: ignore + console.print( + f"[green]Selecting PR: #{pr.number} `{pr.title}` as candidate." + f"Score: {pr_stat.score}[/]." + f" Url: {pr.html_url}" + ) + if verbose: + console.print( + f'[bright_blue]Created at: {pr.created_at}, Merged at: {pr.merged_at}, ' + f'Overall score: {pr_stat.score:.2f}, ' + f'Label score: {pr_stat.label_score}, ' + f'Interactions: {pr_stat.num_interactions}, ' + f'Users interacting: {pr_stat.num_interacting_users}, ' + f'Changed files: {pr_stat.num_changed_files}\n' + ) + selected_prs.append(pr_stat) + if issue_num == MAX_PR_CANDIDATES: + console.print(f'[red]Reached {MAX_PR_CANDIDATES}. Stopping') + break + console.print(f"Top {top_number} PRs:") + for pr_stat in sorted(selected_prs, key=lambda s: -s.score)[:top_number]: + console.print(f" * {pr_stat}") + + +if __name__ == "__main__": + main() From 8e4bb850987762265f66cd5eadbcf04e2898edef Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Tue, 28 Jun 2022 23:58:01 +0200 Subject: [PATCH 040/118] Merge-friendly output of command hashes for breeze (#24711) We had just one hash generated from all commands in breeze and that basically meant that when there were two PRs on two different commands in Breeze, they resulted with merge conflict which should be solved with `breeze regenerate-command-images`. This change turns the hash output into a multi-command one - i.e. each command has its own hash, which will make it much more merge-friendly - i.e. if two PRs will work on two different commands the rebase should result with merge rather than conflict. (cherry picked from commit 94f4f81efb8c424bee8336bf6b8720821e48898a) --- images/breeze/output-commands-hash.txt | 40 ++- images/breeze/output-config.svg | 108 ++++----- images/breeze/output-selective-check.svg | 104 ++++---- images/breeze/output-shell.svg | 220 ++++++++--------- images/breeze/output-start-airflow.svg | 228 +++++++++--------- images/breeze/output-static-checks.svg | 228 +++++++++--------- .../pre_commit/pre_commit_breeze_cmd_line.py | 17 +- 7 files changed, 498 insertions(+), 447 deletions(-) diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index 0af29de136ceb..15b755a6e5ac4 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -2,10 +2,36 @@ # This file is automatically generated by pre-commit. If you have a conflict with this file # Please do not solve it but run `breeze regenerate-command-images`. # This command should fix the conflict and regenerate help images that you have conflict with. -<<<<<<< HEAD -e1bc752aeb5e9c4095bb9d3cbb614252 -||||||| parent of 2fbd750d35 (Add ARM image building for regular PRs (#24664)) -81bb48610ea6c4b13c1fa5d7d7cbd061 -======= -d382b111fddf3b8c0fd3434fce8aa948 ->>>>>>> 2fbd750d35 (Add ARM image building for regular PRs (#24664)) +main:fa4319079b275ce966502346f083f2e3 +build-docs:b4f510db97402e0932f1ed86b37b4330 +build-image:b00ae014f9c45c116e8dd0ea72b61d19 +build-prod-image:4a3950d1a61aeb61218eb45659574139 +cleanup:9a94bd1063296ea86e895f671db0b330 +command-hash-export:83bc6a4a8c60b62da3d0f00e81d2c3ea +config:92653afc11889e1b78e3a2e38f41107f +docker-compose-tests:8ae3b6211fd31db81a750d1c6b96ec3d +exec:e4329909b8b2a610fa4fad5116c4b896 +find-newer-dependencies:5c9c5e568a930960a25ece6039e03e5c +fix-ownership:596143cc74217f0a90850a554220ea45 +free-space:bb8e7ac63d12ab3ede272a898de2f527 +generate-constraints:a5120e79439f30eb7fbee929dca23156 +prepare-airflow-package:cff9d88ca313db10f3cc464c6798f6be +prepare-provider-documentation:520ffb789ef7174252c8ed3c710f6ae2 +prepare-provider-packages:ada68a213aef9890e4b3e0b9187e449d +pull-image:a9bb83372b5da5212f48e2affeedc551 +pull-prod-image:6e8467a2b8c833a392c8bdd65189363e +regenerate-command-images:4fd2e7ecbfd6eebb18b854f3eb0f29c8 +release-prod-images:8858fe5a13989c7c65a79dc97a880928 +resource-check:0fb929ac3496dbbe97acfe99e35accd7 +selective-check:eb1cf022ae43fa9c737b1647142e5a96 +self-upgrade:b5437c0a1a91533a11ee9d0a9692369c +setup-autocomplete:355b72dee171c2fcba46fc90ac7c97b0 +shell:ab7955da71048b3a695485c152d06786 +start-airflow:30caa077555bef44e5bdf76dfeca092c +static-checks:96a0bcc981be838ae32f53e804157ee5 +stop:8ebd8a42f1003495d37b884de5ac7ce6 +tests:ae8d62b505ff8f79bddc202fe9d575e3 +verify-image:a6b3c70957aea96a5d4d261f23359a2d +verify-prod-image:bf3cf39200e010e3015ef071fd387c6f +verify-provider-packages:ce22b3617436bbe7f48597b2964b9c32 +version:d11da4c17a23179830079b646160149c diff --git a/images/breeze/output-config.svg b/images/breeze/output-config.svg index b196ab3306b24..4f161b859fc3e 100644 --- a/images/breeze/output-config.svg +++ b/images/breeze/output-config.svg @@ -19,125 +19,125 @@ font-weight: 700; } - .terminal-119432441-matrix { + .terminal-1878418540-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-119432441-title { + .terminal-1878418540-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-119432441-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-119432441-r2 { fill: #c5c8c6 } -.terminal-119432441-r3 { fill: #d0b344;font-weight: bold } -.terminal-119432441-r4 { fill: #868887 } -.terminal-119432441-r5 { fill: #68a0b3;font-weight: bold } -.terminal-119432441-r6 { fill: #98a84b;font-weight: bold } -.terminal-119432441-r7 { fill: #8d7b39 } + .terminal-1878418540-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-1878418540-r2 { fill: #c5c8c6 } +.terminal-1878418540-r3 { fill: #d0b344;font-weight: bold } +.terminal-1878418540-r4 { fill: #868887 } +.terminal-1878418540-r5 { fill: #68a0b3;font-weight: bold } +.terminal-1878418540-r6 { fill: #98a84b;font-weight: bold } +.terminal-1878418540-r7 { fill: #8d7b39 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: config + Command: config - + - - -Usage: breeze config [OPTIONS] - -Show/update configuration (Python, Backend, Cheatsheet, ASCIIART). - -╭─ Config flags ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql) -[default: sqlite]        ---cheatsheet/--no-cheatsheet-C/-cEnable/disable cheatsheet. ---asciiart/--no-asciiart-A/-aEnable/disable ASCIIart. ---colour/--no-colour-B/-bEnable/disable Colour mode (useful for colour blind-friendly communication). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze config [OPTIONS] + +Show/update configuration (Python, Backend, Cheatsheet, ASCIIART). + +╭─ Config flags ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql) +[default: sqlite]        +--cheatsheet/--no-cheatsheet-C/-cEnable/disable cheatsheet. +--asciiart/--no-asciiart-A/-aEnable/disable ASCIIart. +--colour/--no-colourEnable/disable Colour mode (useful for colour blind-friendly communication). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-selective-check.svg b/images/breeze/output-selective-check.svg index 67bc6c52533d9..45a774081903f 100644 --- a/images/breeze/output-selective-check.svg +++ b/images/breeze/output-selective-check.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: selective-check + Command: selective-check - + - - -Usage: breeze selective-check [OPTIONS] - -Checks what kind of tests should be run for an incoming commit. - -╭─ Selective check flags ──────────────────────────────────────────────────────────────────────────────────────────────╮ ---commit-refCommit-ish reference to the commit that should be checked(TEXT) ---pr-labelsSpace-separate list of labels which are valid for the PR(TEXT) ---default-branchBranch against which the PR should be run(TEXT)[default: main] ---github-event-nameName of the GitHub event that triggered the check                                           -(pull_request | pull_request_review | pull_request_target | push | schedule | workflow_run) -[default: pull_request]                                                                     -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze selective-check [OPTIONS] + +Checks what kind of tests should be run for an incoming commit. + +╭─ Selective check flags ──────────────────────────────────────────────────────────────────────────────────────────────╮ +--commit-refCommit-ish reference to the commit that should be checked(TEXT) +--pr-labelsSpace-separate list of labels which are valid for the PR(TEXT) +--default-branchBranch against which the PR should be run(TEXT)[default: main] +--github-event-nameName of the GitHub event that triggered the check                                             +(pull_request | pull_request_review | pull_request_target | pull_request_workflow | push |    +schedule | workflow_run)                                                                      +[default: pull_request]                                                                       +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-shell.svg b/images/breeze/output-shell.svg index 415b1b09c2005..86ab4cc4085e5 100644 --- a/images/breeze/output-shell.svg +++ b/images/breeze/output-shell.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: shell + Command: shell - + - - -Usage: breeze shell [OPTIONS] [EXTRA_ARGS]... - -Enter breeze.py environment. this is the default command use when no other is selected. - -╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---integrationIntegration(s) to enable when running (can be more than one).                             -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    -all)                                                                                      ---forward-credentials-fForward local credentials to container when running. ---db-reset-dReset DB when entering the container. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or -`sdist` if Airflow should be removed, installed from wheel packages or sdist packages  -available in dist folder respectively. Implies --mount-sources `remove`.               -(none | wheel | sdist | <airflow_version>)                                             ---airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) ---use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when  -entering breeze.                                                                       ---package-formatFormat of packages that should be installed from dist.(wheel | sdist) -[default: wheel]                                       ---force-buildForce image build no matter if it is determined as needed. ---image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip)(TEXT) ---mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =   -selected).                                                                             -(selected | all | skip | remove)                                                       -[default: selected]                                                                    ---debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT) -[default: apache/airflow]                        ---airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     -specify constraints for the installed version and to find newer dependencies    -(TEXT)                                                                          ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze shell [OPTIONS] [EXTRA_ARGS]... + +Enter breeze.py environment. this is the default command use when no other is selected. + +╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) +[default: 3.7]                                               +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                             +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino |    +all)                                                                                      +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or +`sdist` if Airflow should be removed, installed from wheel packages or sdist packages  +available in dist folder respectively. Implies --mount-sources `remove`.               +(none | wheel | sdist | <airflow_version>)                                             +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when  +entering breeze.                                                                       +--package-formatFormat of packages that should be installed from dist.(wheel | sdist) +[default: wheel]                                       +--force-buildForce image build no matter if it is determined as needed. +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip)(TEXT) +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =   +selected).                                                                             +(selected | all | skip | remove)                                                       +[default: selected]                                                                    +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64) +--github-repository-gGitHub repository used to pull, push run images.(TEXT) +[default: apache/airflow]                        +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     +specify constraints for the installed version and to find newer dependencies    +(TEXT)                                                                          +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-start-airflow.svg b/images/breeze/output-start-airflow.svg index d7cf283cd2f48..37031e3733de5 100644 --- a/images/breeze/output-start-airflow.svg +++ b/images/breeze/output-start-airflow.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: start-airflow + Command: start-airflow - + - - -Usage: breeze start-airflow [OPTIONS] [EXTRA_ARGS]... - -Enter breeze.py environment and starts all Airflow components in the tmux session. - -╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---load-example-dags-eEnable configuration to load example DAGs when starting Airflow. ---load-default-connections-cEnable configuration to load default connections when starting Airflow. ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---integrationIntegration(s) to enable when running (can be more than one).                        -(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino -| all)                                                                               ---forward-credentials-fForward local credentials to container when running. ---db-reset-dReset DB when entering the container. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or -`sdist` if Airflow should be removed, installed from wheel packages or sdist packages  -available in dist folder respectively. Implies --mount-sources `remove`.               -(none | wheel | sdist | <airflow_version>)                                             ---airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) ---use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when  -entering breeze.                                                                       ---package-formatFormat of packages that should be installed from dist.(wheel | sdist) -[default: wheel]                                       ---force-buildForce image build no matter if it is determined as needed. ---image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip)(TEXT) ---mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =   -selected).                                                                             -(selected | all | skip | remove)                                                       -[default: selected]                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT) -[default: apache/airflow]                        ---airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     -specify constraints for the installed version and to find newer dependencies    -(TEXT)                                                                          ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze start-airflow [OPTIONS] [EXTRA_ARGS]... + +Enter breeze.py environment and starts all Airflow components in the tmux session. + +╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.7< | 3.8 | 3.9 | 3.10)                                   +[default: 3.7]                                               +--load-example-dags-eEnable configuration to load example DAGs when starting Airflow. +--load-default-connections-cEnable configuration to load default connections when starting Airflow. +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--postgres-version-PVersion of Postgres used.(>10< | 11 | 12 | 13 | 14)[default: 10] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                        +(cassandra | kerberos | mongo | openldap | pinot | rabbitmq | redis | statsd | trino +| all)                                                                               +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or +`sdist` if Airflow should be removed, installed from wheel packages or sdist packages  +available in dist folder respectively. Implies --mount-sources `remove`.               +(none | wheel | sdist | <airflow_version>)                                             +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when  +entering breeze.                                                                       +--package-formatFormat of packages that should be installed from dist.(wheel | sdist) +[default: wheel]                                       +--force-buildForce image build no matter if it is determined as needed. +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip)(TEXT) +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed (default =   +selected).                                                                             +(selected | all | skip | remove)                                                       +[default: selected]                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64) +--github-repository-gGitHub repository used to pull, push run images.(TEXT) +[default: apache/airflow]                        +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     +specify constraints for the installed version and to find newer dependencies    +(TEXT)                                                                          +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output-static-checks.svg b/images/breeze/output-static-checks.svg index 5bd4b35dab40c..ae64d0be23df3 100644 --- a/images/breeze/output-static-checks.svg +++ b/images/breeze/output-static-checks.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - Command: static-checks + Command: static-checks - + - - -Usage: breeze static-checks [OPTIONS] [PRECOMMIT_ARGS]... - -Run static checks. - -╭─ Pre-commit flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---type-tType(s) of the static checks to run (multiple can be added).                             -(all | black | blacken-docs | check-airflow-2-1-compatibility |                          -check-airflow-config-yaml-consistent | check-airflow-providers-have-extras |             -check-apache-license-rat | check-base-operator-usage | check-boring-cyborg-configuration -| check-breeze-top-dependencies-limited | check-builtin-literals |                       -check-changelog-has-no-duplicates | check-daysago-import-from-utils |                    -check-docstring-param-types | check-executables-have-shebangs |                          -check-extra-packages-references | check-extras-order | check-for-inclusive-language |    -check-hooks-apply | check-incorrect-use-of-LoggingMixin |                                -check-integrations-are-consistent | check-merge-conflict | check-newsfragments-are-valid -| check-no-providers-in-core-examples | check-no-relative-imports |                      -check-persist-credentials-disabled-in-github-workflows |                                 -check-pre-commit-information-consistent | check-provide-create-sessions-imports |        -check-provider-yaml-valid | check-providers-init-file-missing |                          -check-providers-subpackages-init-file-exist | check-pydevd-left-in-code |                -check-revision-heads-map | check-safe-filter-usage-in-html | check-setup-order |         -check-start-date-not-used-in-defaults | check-system-tests-present | check-xml |         -codespell | debug-statements | detect-private-key | doctoc | end-of-file-fixer |         -fix-encoding-pragma | flynt | forbid-tabs | identity | insert-license | isort |          -lint-chart-schema | lint-css | lint-dockerfile | lint-helm-chart | lint-javascript |     -lint-json-schema | lint-markdown | lint-openapi | mixed-line-ending | pretty-format-json -| pydocstyle | python-no-log-warn | pyupgrade | rst-backticks | run-flake8 | run-mypy |  -run-shellcheck | static-check-autoflake | trailing-whitespace | update-breeze-cmd-output -| update-breeze-readme-config-hash | update-extras | update-in-the-wild-to-be-sorted |   -update-inlined-dockerfile-scripts | update-local-yml-file | update-migration-references  -| update-providers-dependencies | update-setup-cfg-file |                                -update-spelling-wordlist-to-be-sorted | update-supported-versions |                      -update-vendored-in-k8s-json-schema | update-version | yamllint | yesqa)                  ---file-fList of files to run the checks on.(PATH) ---all-files-aRun checks on all files. ---show-diff-on-failure-sShow diff for files modified by the checks. ---last-commit-cRun checks for all files in last commit. Mutually exclusive with --commit-ref. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---commit-ref-rRun checks for this commit reference only (can be any git commit-ish reference). Mutually   -exclusive with --last-commit.                                                               -(TEXT)                                                                                      ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze static-checks [OPTIONS] [PRECOMMIT_ARGS]... + +Run static checks. + +╭─ Pre-commit flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--type-tType(s) of the static checks to run (multiple can be added).                             +(all | black | blacken-docs | check-airflow-2-2-compatibility |                          +check-airflow-config-yaml-consistent | check-airflow-providers-have-extras |             +check-apache-license-rat | check-base-operator-partial-arguments |                       +check-base-operator-usage | check-boring-cyborg-configuration |                          +check-breeze-top-dependencies-limited | check-builtin-literals |                         +check-changelog-has-no-duplicates | check-daysago-import-from-utils |                    +check-docstring-param-types | check-example-dags-urls | check-executables-have-shebangs  +| check-extra-packages-references | check-extras-order | check-for-inclusive-language |  +check-hooks-apply | check-incorrect-use-of-LoggingMixin |                                +check-integrations-are-consistent | check-merge-conflict | check-newsfragments-are-valid +| check-no-providers-in-core-examples | check-no-relative-imports |                      +check-persist-credentials-disabled-in-github-workflows |                                 +check-pre-commit-information-consistent | check-provide-create-sessions-imports |        +check-provider-yaml-valid | check-providers-init-file-missing |                          +check-providers-subpackages-init-file-exist | check-pydevd-left-in-code |                +check-revision-heads-map | check-safe-filter-usage-in-html | check-setup-order |         +check-start-date-not-used-in-defaults | check-system-tests-present |                     +check-system-tests-tocs | check-xml | codespell | create-missing-init-py-files-tests |   +debug-statements | detect-private-key | doctoc | end-of-file-fixer | fix-encoding-pragma +| flynt | forbid-tabs | identity | insert-license | isort | lint-chart-schema | lint-css +| lint-dockerfile | lint-helm-chart | lint-javascript | lint-json-schema | lint-markdown +| lint-openapi | mixed-line-ending | pretty-format-json | pydocstyle |                   +python-no-log-warn | pyupgrade | rst-backticks | run-flake8 | run-mypy | run-shellcheck  +| static-check-autoflake | trailing-whitespace | update-breeze-cmd-output |              +update-breeze-readme-config-hash | update-extras | update-in-the-wild-to-be-sorted |     +update-inlined-dockerfile-scripts | update-local-yml-file | update-migration-references  +| update-providers-dependencies | update-setup-cfg-file |                                +update-spelling-wordlist-to-be-sorted | update-supported-versions |                      +update-vendored-in-k8s-json-schema | update-version | yamllint | yesqa)                  +--file-fList of files to run the checks on.(PATH) +--all-files-aRun checks on all files. +--show-diff-on-failure-sShow diff for files modified by the checks. +--last-commit-cRun checks for all files in last commit. Mutually exclusive with --commit-ref. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--commit-ref-rRun checks for this commit reference only (can be any git commit-ish reference). Mutually   +exclusive with --last-commit.                                                               +(TEXT)                                                                                      +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/scripts/ci/pre_commit/pre_commit_breeze_cmd_line.py b/scripts/ci/pre_commit/pre_commit_breeze_cmd_line.py index 81b9fedf52f0e..360672dc8f2b3 100755 --- a/scripts/ci/pre_commit/pre_commit_breeze_cmd_line.py +++ b/scripts/ci/pre_commit/pre_commit_breeze_cmd_line.py @@ -55,7 +55,7 @@ def print_help_for_all_commands(): env['RECORD_BREEZE_OUTPUT_FILE'] = str(BREEZE_IMAGES_DIR / "output-commands.svg") env['TERM'] = "xterm-256color" env['PYTHONPATH'] = str(BREEZE_SOURCES_DIR) - new_hash = check_output( + new_hash_dump = PREAMBLE + check_output( [ sys.executable, str(BREEZE_SOURCES_DIR / "airflow_breeze" / "breeze.py"), @@ -67,12 +67,17 @@ def print_help_for_all_commands(): ) hash_file_path = BREEZE_IMAGES_DIR / "output-commands-hash.txt" try: - old_hash = hash_file_path.read_text()[len(PREAMBLE) :] + old_hash_dump = hash_file_path.read_text() except FileNotFoundError: - old_hash = "" - if old_hash == new_hash: - console.print(f"[bright_blue]Skip generation of SVG images as command hash is unchanged {old_hash}") + old_hash_dump = "" + if old_hash_dump == new_hash_dump: + console.print("[bright_blue]Skip generation of SVG images as command hash files are unchanged:") + console.print(old_hash_dump, markup=False) return + else: + console.print("[yellow]The hash files differ") + console.print(new_hash_dump, markup=False) + console.print(old_hash_dump, markup=False) run([sys.executable, "-m", "pip", "install", "--upgrade", "-e", BREEZE_INSTALL_DIR]) env = os.environ.copy() env['AIRFLOW_SOURCES_ROOT'] = str(AIRFLOW_SOURCES_DIR) @@ -89,7 +94,7 @@ def print_help_for_all_commands(): env['RECORD_BREEZE_OUTPUT_FILE'] = str(BREEZE_IMAGES_DIR / f"output-{command}.svg") env['TERM'] = "xterm-256color" check_call(["breeze", command, "--help"], env=env) - hash_file_path.write_text(PREAMBLE + new_hash) + hash_file_path.write_text(new_hash_dump) def verify_all_commands_described_in_docs(): From 6ceb7e245edd24ef4ffd70c233e15c2384a0e9d6 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Wed, 29 Jun 2022 12:39:47 +0200 Subject: [PATCH 041/118] Fix selective checks to work for non-main-branch --- .github/workflows/build-images.yml | 88 ++++------ .github/workflows/ci.yml | 82 ++++----- .../airflow_breeze/commands/ci_commands.py | 13 ++ .../airflow_breeze/utils/selective_checks.py | 25 ++- dev/breeze/tests/test_selective_checks.py | 23 ++- images/breeze/output-commands-hash.txt | 2 +- images/breeze/output-selective-check.svg | 116 +++++++------ tests/always/test_project_structure.py | 138 --------------- .../google/test_project_structure.py | 161 ++++++++++++++++++ 9 files changed, 349 insertions(+), 299 deletions(-) create mode 100644 tests/providers/google/test_project_structure.py diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index e6bb57c40df41..2d895e3c092fd 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -52,6 +52,9 @@ jobs: runs-on: ${{ github.repository == 'apache/airflow' && 'self-hosted' || 'ubuntu-20.04' }} env: targetBranch: ${{ github.event.pull_request.base.ref }} + DEFAULT_BRANCH: ${{ steps.selective-checks.outputs.default-constraints-branch }} + DEFAULT_CONSTRAINTS_BRANCH: ${{ steps.selective-checks.outputs.default-constraints-branch }} + DEBIAN_VERSION: ${{ steps.selective-checks.outputs.debian-version }} outputs: runsOn: ${{ github.repository == 'apache/airflow' && '["self-hosted"]' || '["ubuntu-20.04"]' }} pythonVersions: "${{ steps.selective-checks.python-versions }}" @@ -65,6 +68,8 @@ jobs: cacheDirective: ${{ steps.dynamic-outputs.outputs.cacheDirective }} targetBranch: ${{ steps.dynamic-outputs.outputs.targetBranch }} defaultBranch: ${{ steps.selective-checks.outputs.default-branch }} + defaultConstraintsBranch: ${{ steps.selective-checks.outputs.default-constraints-branch }} + debianVersion: ${{ steps.selective-checks.outputs.debian-version }} targetCommitSha: "${{steps.discover-pr-merge-commit.outputs.targetCommitSha || github.event.pull_request.head.sha || github.sha @@ -104,9 +109,27 @@ jobs: ref: ${{ env.TARGET_COMMIT_SHA }} persist-credentials: false fetch-depth: 2 - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + - name: "Retrieve DEFAULTS from the _initialization.sh" + # We cannot "source" the script here because that would be a security problem (we cannot run + # any code that comes from the sources coming from the PR. Therefore we extract the + # DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands + id: defaults + run: | + DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \ + awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') + echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV + DEFAULT_CONSTRAINTS_BRANCH=$(grep "export DEFAULT_CONSTRAINTS_BRANCH" \ + scripts/ci/libraries/_initialization.sh | \ + awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') + echo "DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}" >> $GITHUB_ENV + DEBIAN_VERSION=$(grep "export DEBIAN_VERSION" scripts/ci/libraries/_initialization.sh | \ + cut -d "=" -f 3 | sed s'/["}]//g') + echo "DEBIAN_VERSION=${DEBIAN_VERSION}" >> $GITHUB_ENV + - name: Checkout main branch to 'main-airflow' folder to use breeze from there. uses: actions/checkout@v2 with: + path: "main-airflow" + ref: "main" persist-credentials: false submodules: recursive - name: "Setup python" @@ -165,6 +188,9 @@ jobs: needs.build-info.outputs.image-build == 'true' && github.event.pull_request.head.repo.full_name != 'apache/airflow' env: + DEFAULT_BRANCH: ${{ needs.build-info.outputs.default-branch }} + DEFAULT_CONSTRAINTS_BRANCH: ${{ needs.build-info.outputs.default-constraints-branch }} + DEBIAN_VERSION: ${{ needs.build-info.outputs.debian-version }} RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} BACKEND: sqlite outputs: ${{toJSON(needs.build-info.outputs) }} @@ -176,29 +202,11 @@ jobs: ref: ${{ needs.build-info.outputs.targetCommitSha }} persist-credentials: false submodules: recursive - - name: "Retrieve DEFAULTS from the _initialization.sh" - # We cannot "source" the script here because that would be a security problem (we cannot run - # any code that comes from the sources coming from the PR. Therefore, we extract the - # DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands - id: defaults - run: | - DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV - DEFAULT_CONSTRAINTS_BRANCH=$(grep "export DEFAULT_CONSTRAINTS_BRANCH" \ - scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}" >> $GITHUB_ENV - DEBIAN_VERSION=$(grep "export DEBIAN_VERSION" scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEBIAN_VERSION=${DEBIAN_VERSION}" >> $GITHUB_ENV - - name: > - Checkout "${{ needs.build-info.outputs.targetBranch }}" branch to 'main-airflow' folder - to use ci/scripts from there. + - name: Checkout main branch to 'main-airflow' folder to use ci/scripts from there. uses: actions/checkout@v2 with: path: "main-airflow" - ref: "${{ needs.build-info.outputs.targetBranch }}" + ref: "main" persist-credentials: false submodules: recursive - name: "Setup python" @@ -258,6 +266,9 @@ jobs: needs.build-info.outputs.image-build == 'true' && github.event.pull_request.head.repo.full_name != 'apache/airflow' env: + DEFAULT_BRANCH: ${{ needs.build-info.outputs.default-branch }} + DEFAULT_CONSTRAINTS_BRANCH: ${{ needs.build-info.outputs.default-constraints-branch }} + DEBIAN_VERSION: ${{ needs.build-info.outputs.debian-version }} RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} BACKEND: sqlite steps: @@ -268,22 +279,6 @@ jobs: ref: ${{ needs.build-info.outputs.targetCommitSha }} persist-credentials: false submodules: recursive - - name: "Retrieve DEFAULTS from the _initialization.sh" - # We cannot "source" the script here because that would be a security problem (we cannot run - # any code that comes from the sources coming from the PR. Therefore we extract the - # DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands - id: defaults - run: | - DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV - DEFAULT_CONSTRAINTS_BRANCH=$(grep "export DEFAULT_CONSTRAINTS_BRANCH" \ - scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}" >> $GITHUB_ENV - DEBIAN_VERSION=$(grep "export DEBIAN_VERSION" scripts/ci/libraries/_initialization.sh | \ - cut -d "=" -f 3 | sed s'/["}]//g') - echo "DEBIAN_VERSION=${DEBIAN_VERSION}" >> $GITHUB_ENV - name: > Checkout "${{ needs.build-info.outputs.targetBranch }}" branch to 'main-airflow' folder to use ci/scripts from there. @@ -372,6 +367,9 @@ jobs: needs.build-info.outputs.upgradeToNewerDependencies != 'false' && github.event.pull_request.head.repo.full_name != 'apache/airflow' env: + DEFAULT_BRANCH: ${{ needs.build-info.outputs.default-branch }} + DEFAULT_CONSTRAINTS_BRANCH: ${{ needs.build-info.outputs.default-constraints-branch }} + DEBIAN_VERSION: ${{ needs.build-info.outputs.debian-version }} RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} BACKEND: sqlite outputs: ${{toJSON(needs.build-info.outputs) }} @@ -383,22 +381,6 @@ jobs: ref: ${{ needs.build-info.outputs.targetCommitSha }} persist-credentials: false submodules: recursive - - name: "Retrieve DEFAULTS from the _initialization.sh" - # We cannot "source" the script here because that would be a security problem (we cannot run - # any code that comes from the sources coming from the PR. Therefore, we extract the - # DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands - id: defaults - run: | - DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV - DEFAULT_CONSTRAINTS_BRANCH=$(grep "export DEFAULT_CONSTRAINTS_BRANCH" \ - scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}" >> $GITHUB_ENV - DEBIAN_VERSION=$(grep "export DEBIAN_VERSION" scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEBIAN_VERSION=${DEBIAN_VERSION}" >> $GITHUB_ENV - name: > Checkout "${{ needs.build-info.outputs.targetBranch }}" branch to 'main-airflow' folder to use ci/scripts from there. diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 383b3112830a8..6fe6966c455ff 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -126,6 +126,8 @@ jobs: GITHUB_CONTEXT: ${{ toJson(github) }} outputs: defaultBranch: ${{ steps.selective-checks.outputs.default-branch }} + defaultConstraintsBranch: ${{ steps.selective-checks.outputs.default-constraints-branch }} + debianVersion: ${{ steps.selective-checks.outputs.debian-version }} cacheDirective: ${{ steps.dynamic-outputs.outputs.cacheDirective }} waitForImage: ${{ steps.wait-for-image.outputs.wait-for-image }} allPythonVersions: ${{ steps.selective-checks.outputs.all-python-versions }} @@ -164,6 +166,7 @@ jobs: needs-api-tests: ${{ steps.selective-checks.outputs.needs-api-tests }} needs-api-codegen: ${{ steps.selective-checks.outputs.needs-api-codegen }} default-branch: ${{ steps.selective-checks.outputs.default-branch }} + docs-filter: ${{ steps.selective-checks.outputs.docs-filter }} sourceHeadRepo: ${{ steps.source-run-info.outputs.sourceHeadRepo }} pullRequestNumber: ${{ steps.source-run-info.outputs.pullRequestNumber }} pullRequestLabels: ${{ steps.source-run-info.outputs.pullRequestLabels }} @@ -191,7 +194,6 @@ jobs: ref: ${{ github.sha }} fetch-depth: 2 persist-credentials: false - if: github.event_name == 'pull_request' - name: "Setup python" uses: actions/setup-python@v2 with: @@ -200,6 +202,22 @@ jobs: cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh + - name: "Retrieve DEFAULTS from the _initialization.sh" + # We cannot "source" the script here because that would be a security problem (we cannot run + # any code that comes from the sources coming from the PR. Therefore, we extract the + # DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands + id: defaults + run: | + DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \ + awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') + echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV + DEFAULT_CONSTRAINTS_BRANCH=$(grep "export DEFAULT_CONSTRAINTS_BRANCH" \ + scripts/ci/libraries/_initialization.sh | \ + awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') + echo "DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}" >> $GITHUB_ENV + DEBIAN_VERSION=$(grep "export DEBIAN_VERSION" scripts/ci/libraries/_initialization.sh | \ + awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') + echo "DEBIAN_VERSION=${DEBIAN_VERSION}" >> $GITHUB_ENV - name: Selective checks id: selective-checks env: @@ -294,6 +312,9 @@ jobs: runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info] env: + DEFAULT_BRANCH: ${{ needs.build-info.outputs.default-branch }} + DEFAULT_CONSTRAINTS_BRANCH: ${{ needs.build-info.outputs.default-constraints-branch }} + DEBIAN_VERSION: ${{ needs.build-info.outputs.debian-version }} RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} steps: - name: Cleanup repo @@ -310,23 +331,6 @@ jobs: with: python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} if: needs.build-info.outputs.inWorkflowBuild == 'true' - - name: "Retrieve DEFAULTS from the _initialization.sh" - # We cannot "source" the script here because that would be a security problem (we cannot run - # any code that comes from the sources coming from the PR. Therefore, we extract the - # DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands - id: defaults - run: | - DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV - DEFAULT_CONSTRAINTS_BRANCH=$(grep "export DEFAULT_CONSTRAINTS_BRANCH" \ - scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}" >> $GITHUB_ENV - DEBIAN_VERSION=$(grep "export DEBIAN_VERSION" scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEBIAN_VERSION=${DEBIAN_VERSION}" >> $GITHUB_ENV - if: needs.build-info.outputs.inWorkflowBuild == 'true' - run: ./scripts/ci/install_breeze.sh if: needs.build-info.outputs.inWorkflowBuild == 'true' - name: "Free space" @@ -361,6 +365,9 @@ jobs: runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info, build-ci-images] env: + DEFAULT_BRANCH: ${{ needs.build-info.outputs.default-branch }} + DEFAULT_CONSTRAINTS_BRANCH: ${{ needs.build-info.outputs.default-constraints-branch }} + DEBIAN_VERSION: ${{ needs.build-info.outputs.debian-version }} RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} BACKEND: sqlite DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} @@ -380,23 +387,6 @@ jobs: with: python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} if: needs.build-info.outputs.inWorkflowBuild == 'true' - - name: "Retrieve DEFAULTS from the _initialization.sh" - # We cannot "source" the script here because that would be a security problem (we cannot run - # any code that comes from the sources coming from the PR. Therefore we extract the - # DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands - id: defaults - run: | - DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV - DEFAULT_CONSTRAINTS_BRANCH=$(grep "export DEFAULT_CONSTRAINTS_BRANCH" \ - scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}" >> $GITHUB_ENV - DEBIAN_VERSION=$(grep "export DEBIAN_VERSION" scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEBIAN_VERSION=${DEBIAN_VERSION}" >> $GITHUB_ENV - if: needs.build-info.outputs.inWorkflowBuild == 'true' - run: ./scripts/ci/install_breeze.sh if: needs.build-info.outputs.inWorkflowBuild == 'true' - name: "Free space" @@ -745,7 +735,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" docs-inventory-${{ hashFiles('setup.py','setup.cfg','pyproject.toml;') }} docs-inventory- - name: "Build docs" - run: breeze build-docs + run: breeze build-docs ${{ needs.build-info.outputs.docs-filter }} - name: Configure AWS credentials uses: ./.github/actions/configure-aws-credentials if: > @@ -1766,6 +1756,9 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - tests-mssql - tests-postgres env: + DEFAULT_BRANCH: ${{ needs.build-info.outputs.default-branch }} + DEFAULT_CONSTRAINTS_BRANCH: ${{ needs.build-info.outputs.default-constraints-branch }} + DEBIAN_VERSION: ${{ needs.build-info.outputs.debian-version }} RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} if: needs.build-info.outputs.upgradeToNewerDependencies != 'false' steps: @@ -1783,23 +1776,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" with: python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} if: needs.build-info.outputs.inWorkflowBuild == 'true' - - name: "Retrieve DEFAULTS from the _initialization.sh" - # We cannot "source" the script here because that would be a security problem (we cannot run - # any code that comes from the sources coming from the PR. Therefore, we extract the - # DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands - id: defaults - run: | - DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV - DEFAULT_CONSTRAINTS_BRANCH=$(grep "export DEFAULT_CONSTRAINTS_BRANCH" \ - scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}" >> $GITHUB_ENV - DEBIAN_VERSION=$(grep "export DEBIAN_VERSION" scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEBIAN_VERSION=${DEBIAN_VERSION}" >> $GITHUB_ENV - if: needs.build-info.outputs.inWorkflowBuild == 'true' - run: ./scripts/ci/install_breeze.sh if: needs.build-info.outputs.inWorkflowBuild == 'true' - name: "Free space" diff --git a/dev/breeze/src/airflow_breeze/commands/ci_commands.py b/dev/breeze/src/airflow_breeze/commands/ci_commands.py index c65753e1a65e5..826a77bf1b89d 100644 --- a/dev/breeze/src/airflow_breeze/commands/ci_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/ci_commands.py @@ -30,6 +30,7 @@ from airflow_breeze.utils.common_options import ( option_airflow_constraints_reference, option_answer, + option_debian_version, option_dry_run, option_github_repository, option_max_age, @@ -184,6 +185,13 @@ def get_changed_files(commit_ref: Optional[str], dry_run: bool, verbose: bool) - envvar="DEFAULT_BRANCH", show_default=True, ) +@click.option( + '--default-constraints-branch', + help="Branch against which the constraints should be downloaded from", + default="constraints-main", + envvar="DEFAULT_CONSTRAINTS_BRANCH", + show_default=True, +) @click.option( '--github-event-name', type=BetterChoice(github_events()), @@ -192,12 +200,15 @@ def get_changed_files(commit_ref: Optional[str], dry_run: bool, verbose: bool) - envvar="GITHUB_EVENT_NAME", show_default=True, ) +@option_debian_version @option_verbose @option_dry_run def selective_check( commit_ref: Optional[str], pr_labels: str, default_branch: str, + default_constraints_branch: str, + debian_version: str, github_event_name: str, verbose: bool, dry_run: bool, @@ -213,6 +224,8 @@ def selective_check( commit_ref=commit_ref, files=changed_files, default_branch=default_branch, + default_constraints_branch=default_constraints_branch, + debian_version=debian_version, pr_labels=tuple(" ".split(pr_labels)) if pr_labels else (), github_event=github_event, ) diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py index 22e74f455301f..2a57cac5e01ed 100644 --- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -189,12 +189,16 @@ def __init__( self, files: tuple[str, ...] = (), default_branch="main", + default_constraints_branch="constraints-main", + debian_version="bullseye", commit_ref: str | None = None, pr_labels: tuple[str, ...] = (), github_event: GithubEvents = GithubEvents.PULL_REQUEST, ): self._files = files self._default_branch = default_branch + self._default_constraints_branch = default_constraints_branch + self._debian_version = debian_version self._commit_ref = commit_ref self._pr_labels = pr_labels self._github_event = github_event @@ -230,6 +234,14 @@ def __str__(self) -> str: def default_branch(self) -> str: return self._default_branch + @cached_property + def default_constraints_branch(self) -> str: + return self._default_constraints_branch + + @cached_property + def debian_version(self) -> str: + return self._debian_version + @cached_property def _full_tests_needed(self) -> bool: if self._github_event in [GithubEvents.PUSH, GithubEvents.SCHEDULE]: @@ -395,7 +407,10 @@ def docs_build(self) -> bool: @cached_property def needs_helm_tests(self) -> bool: - return self._should_be_run(FileGroupForCi.HELM_FILES) and self._default_branch == "main" + if self._default_branch != 'main': + get_console().print(f"[warning]Not running helm tests in {self._default_branch} branch") + return False + return self._should_be_run(FileGroupForCi.HELM_FILES) @cached_property def run_tests(self) -> bool: @@ -478,3 +493,11 @@ def upgrade_to_newer_dependencies(self) -> bool: return len( self._matching_files(FileGroupForCi.SETUP_FILES, CI_FILE_GROUP_MATCHES) ) > 0 or self._github_event in [GithubEvents.PUSH, GithubEvents.SCHEDULE] + + @cached_property + def docs_filter(self) -> str: + return ( + "" + if self._default_branch == 'main' + else "--package-filter apache-airflow --package-filter docker-stack" + ) diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py index 2d7e8fe83d88b..196aacf56678a 100644 --- a/dev/breeze/tests/test_selective_checks.py +++ b/dev/breeze/tests/test_selective_checks.py @@ -191,16 +191,21 @@ def test_expected_output_pull_request_main( @pytest.mark.parametrize( - "files, pr_labels, default_branch, expected_outputs,", + "files, pr_labels, default_branch, default_constraints_branch, debian_version, expected_outputs,", [ ( pytest.param( ("INTHEWILD.md",), ("full tests needed",), "main", + "constraints-main", + "bullseye", { "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "default-branch": "main", + "default-constraints-branch": "constraints-main", + "debian-version": "bullseye", "image-build": "true", "run-tests": "true", "docs-build": "true", @@ -218,6 +223,8 @@ def test_expected_output_pull_request_main( "full tests needed", ), "main", + "constraints-main", + "bullseye", { "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", @@ -235,9 +242,14 @@ def test_expected_output_pull_request_main( (), ("full tests needed",), "main", + "constraints-main", + "bullseye", { "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "default-branch": "main", + "default-constraints-branch": "constraints-main", + "debian-version": "bullseye", "image-build": "true", "run-tests": "true", "docs-build": "true", @@ -252,9 +264,14 @@ def test_expected_output_pull_request_main( ("INTHEWILD.md",), ("full tests needed",), "v2-3-stable", + "constraints-2-3", + "bullseye", { "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "default-branch": "v2-3-stable", + "default-constraints-branch": "constraints-2-3", + "debian-version": "bullseye", "image-build": "true", "run-tests": "true", "docs-build": "true", @@ -270,6 +287,8 @@ def test_expected_output_full_tests_needed( files: Tuple[str, ...], pr_labels: Tuple[str, ...], default_branch: str, + default_constraints_branch: str, + debian_version: str, expected_outputs: Dict[str, str], ): sc = SelectiveChecks( @@ -278,6 +297,8 @@ def test_expected_output_full_tests_needed( github_event=GithubEvents.PULL_REQUEST, pr_labels=pr_labels, default_branch=default_branch, + default_constraints_branch=default_constraints_branch, + debian_version=debian_version, ) output = str(sc) assert_outputs_are_printed(expected_outputs, output) diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index 15b755a6e5ac4..217a9debb9495 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -23,7 +23,7 @@ pull-prod-image:6e8467a2b8c833a392c8bdd65189363e regenerate-command-images:4fd2e7ecbfd6eebb18b854f3eb0f29c8 release-prod-images:8858fe5a13989c7c65a79dc97a880928 resource-check:0fb929ac3496dbbe97acfe99e35accd7 -selective-check:eb1cf022ae43fa9c737b1647142e5a96 +selective-check:9b1cc2827b36be29141083dc9e9f6290 self-upgrade:b5437c0a1a91533a11ee9d0a9692369c setup-autocomplete:355b72dee171c2fcba46fc90ac7c97b0 shell:ab7955da71048b3a695485c152d06786 diff --git a/images/breeze/output-selective-check.svg b/images/breeze/output-selective-check.svg index 45a774081903f..02ab2644d751a 100644 --- a/images/breeze/output-selective-check.svg +++ b/images/breeze/output-selective-check.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + + + + - Command: selective-check + Command: selective-check - + - - -Usage: breeze selective-check [OPTIONS] - -Checks what kind of tests should be run for an incoming commit. - -╭─ Selective check flags ──────────────────────────────────────────────────────────────────────────────────────────────╮ ---commit-refCommit-ish reference to the commit that should be checked(TEXT) ---pr-labelsSpace-separate list of labels which are valid for the PR(TEXT) ---default-branchBranch against which the PR should be run(TEXT)[default: main] ---github-event-nameName of the GitHub event that triggered the check                                             -(pull_request | pull_request_review | pull_request_target | pull_request_workflow | push |    -schedule | workflow_run)                                                                      -[default: pull_request]                                                                       -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze selective-check [OPTIONS] + +Checks what kind of tests should be run for an incoming commit. + +╭─ Selective check flags ──────────────────────────────────────────────────────────────────────────────────────────────╮ +--commit-refCommit-ish reference to the commit that should be checked(TEXT) +--pr-labelsSpace-separate list of labels which are valid for the PR(TEXT) +--default-branchBranch against which the PR should be run(TEXT)[default: main] +--github-event-nameName of the GitHub event that triggered the check                                             +(pull_request | pull_request_review | pull_request_target | pull_request_workflow | push |    +schedule | workflow_run)                                                                      +[default: pull_request]                                                                       +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--default-constraints-branchBranch against which the constraints should be downloaded from(TEXT) +[default: constraints-main]                                    +--debian-versionDebian version used for the image.(bullseye | buster)[default: bullseye] +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/tests/always/test_project_structure.py b/tests/always/test_project_structure.py index bd26944233297..8db759fac76db 100644 --- a/tests/always/test_project_structure.py +++ b/tests/always/test_project_structure.py @@ -22,8 +22,6 @@ import unittest from typing import List -from parameterized import parameterized - ROOT_FOLDER = os.path.realpath( os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) ) @@ -151,142 +149,6 @@ def get_classes_from_file(filepath: str): return results -class TestGoogleProviderProjectStructure(unittest.TestCase): - MISSING_EXAMPLE_DAGS = { - 'adls_to_gcs', - 'sql_to_gcs', - 'bigquery_to_mysql', - 'cassandra_to_gcs', - 'drive', - 'ads_to_gcs', - } - - # Those operators are deprecated and we do not need examples for them - DEPRECATED_OPERATORS = { - 'airflow.providers.google.cloud.operators.cloud_storage_transfer_service' - '.CloudDataTransferServiceS3ToGCSOperator', - 'airflow.providers.google.cloud.operators.cloud_storage_transfer_service' - '.CloudDataTransferServiceGCSToGCSOperator', - 'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHadoopJobOperator', - 'airflow.providers.google.cloud.operators.dataproc.DataprocScaleClusterOperator', - 'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitSparkJobOperator', - 'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitSparkSqlJobOperator', - 'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHiveJobOperator', - 'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitPigJobOperator', - 'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitPySparkJobOperator', - 'airflow.providers.google.cloud.operators.mlengine.MLEngineManageModelOperator', - 'airflow.providers.google.cloud.operators.mlengine.MLEngineManageVersionOperator', - 'airflow.providers.google.cloud.operators.dataflow.DataflowCreateJavaJobOperator', - 'airflow.providers.google.cloud.operators.bigquery.BigQueryPatchDatasetOperator', - 'airflow.providers.google.cloud.operators.dataflow.DataflowCreatePythonJobOperator', - 'airflow.providers.google.cloud.operators.bigquery.BigQueryExecuteQueryOperator', - } - - # Those operators should not have examples as they are never used standalone (they are abstract) - BASE_OPERATORS = { - 'airflow.providers.google.cloud.operators.compute.ComputeEngineBaseOperator', - 'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLBaseOperator', - 'airflow.providers.google.cloud.operators.dataproc.DataprocJobBaseOperator', - } - - # Please at the examples to those operators at the earliest convenience :) - MISSING_EXAMPLES_FOR_OPERATORS = { - 'airflow.providers.google.cloud.operators.dataproc.DataprocInstantiateInlineWorkflowTemplateOperator', - 'airflow.providers.google.cloud.operators.mlengine.MLEngineTrainingCancelJobOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPGetStoredInfoTypeOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPReidentifyContentOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPCreateDeidentifyTemplateOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPCreateDLPJobOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPUpdateDeidentifyTemplateOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPGetDLPJobTriggerOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPListDeidentifyTemplatesOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPGetDeidentifyTemplateOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPListInspectTemplatesOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPListStoredInfoTypesOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPUpdateInspectTemplateOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteDLPJobOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPListJobTriggersOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPCancelDLPJobOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPGetDLPJobOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPGetInspectTemplateOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPListInfoTypesOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteDeidentifyTemplateOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPListDLPJobsOperator', - 'airflow.providers.google.cloud.operators.dlp.CloudDLPRedactImageOperator', - 'airflow.providers.google.cloud.sensors.gcs.GCSObjectUpdateSensor', - 'airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionCompleteSensor', - } - - def test_missing_example_for_operator(self): - """ - Assert that all operators defined under operators, sensors and transfers directories - are used in any of the example dags - """ - all_operators = set() - services = set() - for resource_type in ["operators", "sensors", "transfers"]: - operator_files = set( - self.find_resource_files(top_level_directory="airflow", resource_type=resource_type) - ) - for filepath in operator_files: - service_name = os.path.basename(filepath)[: -(len(".py"))] - if service_name in self.MISSING_EXAMPLE_DAGS: - continue - services.add(service_name) - operators_paths = set(get_classes_from_file(f"{ROOT_FOLDER}/{filepath}")) - all_operators.update(operators_paths) - - for service in services: - example_dags = self.examples_for_service(service) - example_paths = { - path for example_dag in example_dags for path in get_imports_from_file(example_dag) - } - all_operators -= example_paths - - all_operators -= self.MISSING_EXAMPLES_FOR_OPERATORS - all_operators -= self.DEPRECATED_OPERATORS - all_operators -= self.BASE_OPERATORS - assert set() == all_operators - - @parameterized.expand( - itertools.product(["_system.py", "_system_helper.py"], ["operators", "sensors", "transfers"]) - ) - def test_detect_invalid_system_tests(self, resource_type, filename_suffix): - operators_tests = self.find_resource_files(top_level_directory="tests", resource_type=resource_type) - operators_files = self.find_resource_files(top_level_directory="airflow", resource_type=resource_type) - - files = {f for f in operators_tests if f.endswith(filename_suffix)} - - expected_files = (f"tests/{f[8:]}" for f in operators_files) - expected_files = (f.replace(".py", filename_suffix).replace("/test_", "/") for f in expected_files) - expected_files = {f'{f.rpartition("/")[0]}/test_{f.rpartition("/")[2]}' for f in expected_files} - - assert set() == files - expected_files - - @staticmethod - def find_resource_files( - top_level_directory: str = "airflow", - department: str = "*", - resource_type: str = "*", - service: str = "*", - ): - python_files = glob.glob( - f"{ROOT_FOLDER}/{top_level_directory}/providers/google/{department}/{resource_type}/{service}.py" - ) - # Make path relative - resource_files = (os.path.relpath(f, ROOT_FOLDER) for f in python_files) - # Exclude __init__.py and pycache - resource_files = (f for f in resource_files if not f.endswith("__init__.py")) - return resource_files - - @staticmethod - def examples_for_service(service_name): - yield from glob.glob( - f"{ROOT_FOLDER}/airflow/providers/google/*/example_dags/example_{service_name}*.py" - ) - yield from glob.glob(f"{ROOT_FOLDER}/tests/system/providers/google/{service_name}/example_*.py") - - class TestOperatorsHooks(unittest.TestCase): def test_no_illegal_suffixes(self): illegal_suffixes = ["_operator.py", "_hook.py", "_sensor.py"] diff --git a/tests/providers/google/test_project_structure.py b/tests/providers/google/test_project_structure.py new file mode 100644 index 0000000000000..bd2fecade36c4 --- /dev/null +++ b/tests/providers/google/test_project_structure.py @@ -0,0 +1,161 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import glob +import itertools +import os +import unittest + +from parameterized import parameterized + +from tests.always.test_project_structure import ROOT_FOLDER, get_classes_from_file, get_imports_from_file + + +class TestGoogleProviderProjectStructure(unittest.TestCase): + MISSING_EXAMPLE_DAGS = { + 'adls_to_gcs', + 'sql_to_gcs', + 'bigquery_to_mysql', + 'cassandra_to_gcs', + 'drive', + 'ads_to_gcs', + } + + # Those operators are deprecated and we do not need examples for them + DEPRECATED_OPERATORS = { + 'airflow.providers.google.cloud.operators.cloud_storage_transfer_service' + '.CloudDataTransferServiceS3ToGCSOperator', + 'airflow.providers.google.cloud.operators.cloud_storage_transfer_service' + '.CloudDataTransferServiceGCSToGCSOperator', + 'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHadoopJobOperator', + 'airflow.providers.google.cloud.operators.dataproc.DataprocScaleClusterOperator', + 'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitSparkJobOperator', + 'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitSparkSqlJobOperator', + 'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHiveJobOperator', + 'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitPigJobOperator', + 'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitPySparkJobOperator', + 'airflow.providers.google.cloud.operators.mlengine.MLEngineManageModelOperator', + 'airflow.providers.google.cloud.operators.mlengine.MLEngineManageVersionOperator', + 'airflow.providers.google.cloud.operators.dataflow.DataflowCreateJavaJobOperator', + 'airflow.providers.google.cloud.operators.bigquery.BigQueryPatchDatasetOperator', + 'airflow.providers.google.cloud.operators.dataflow.DataflowCreatePythonJobOperator', + 'airflow.providers.google.cloud.operators.bigquery.BigQueryExecuteQueryOperator', + } + + # Those operators should not have examples as they are never used standalone (they are abstract) + BASE_OPERATORS = { + 'airflow.providers.google.cloud.operators.compute.ComputeEngineBaseOperator', + 'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLBaseOperator', + 'airflow.providers.google.cloud.operators.dataproc.DataprocJobBaseOperator', + } + + # Please at the examples to those operators at the earliest convenience :) + MISSING_EXAMPLES_FOR_OPERATORS = { + 'airflow.providers.google.cloud.operators.dataproc.DataprocInstantiateInlineWorkflowTemplateOperator', + 'airflow.providers.google.cloud.operators.mlengine.MLEngineTrainingCancelJobOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPGetStoredInfoTypeOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPReidentifyContentOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPCreateDeidentifyTemplateOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPCreateDLPJobOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPUpdateDeidentifyTemplateOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPGetDLPJobTriggerOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPListDeidentifyTemplatesOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPGetDeidentifyTemplateOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPListInspectTemplatesOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPListStoredInfoTypesOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPUpdateInspectTemplateOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteDLPJobOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPListJobTriggersOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPCancelDLPJobOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPGetDLPJobOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPGetInspectTemplateOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPListInfoTypesOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteDeidentifyTemplateOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPListDLPJobsOperator', + 'airflow.providers.google.cloud.operators.dlp.CloudDLPRedactImageOperator', + 'airflow.providers.google.cloud.sensors.gcs.GCSObjectUpdateSensor', + 'airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionCompleteSensor', + } + + def test_missing_example_for_operator(self): + """ + Assert that all operators defined under operators, sensors and transfers directories + are used in any of the example dags + """ + all_operators = set() + services = set() + for resource_type in ["operators", "sensors", "transfers"]: + operator_files = set( + self.find_resource_files(top_level_directory="airflow", resource_type=resource_type) + ) + for filepath in operator_files: + service_name = os.path.basename(filepath)[: -(len(".py"))] + if service_name in self.MISSING_EXAMPLE_DAGS: + continue + services.add(service_name) + operators_paths = set(get_classes_from_file(f"{ROOT_FOLDER}/{filepath}")) + all_operators.update(operators_paths) + + for service in services: + example_dags = self.examples_for_service(service) + example_paths = { + path for example_dag in example_dags for path in get_imports_from_file(example_dag) + } + all_operators -= example_paths + + all_operators -= self.MISSING_EXAMPLES_FOR_OPERATORS + all_operators -= self.DEPRECATED_OPERATORS + all_operators -= self.BASE_OPERATORS + assert set() == all_operators + + @parameterized.expand( + itertools.product(["_system.py", "_system_helper.py"], ["operators", "sensors", "transfers"]) + ) + def test_detect_invalid_system_tests(self, resource_type, filename_suffix): + operators_tests = self.find_resource_files(top_level_directory="tests", resource_type=resource_type) + operators_files = self.find_resource_files(top_level_directory="airflow", resource_type=resource_type) + + files = {f for f in operators_tests if f.endswith(filename_suffix)} + + expected_files = (f"tests/{f[8:]}" for f in operators_files) + expected_files = (f.replace(".py", filename_suffix).replace("/test_", "/") for f in expected_files) + expected_files = {f'{f.rpartition("/")[0]}/test_{f.rpartition("/")[2]}' for f in expected_files} + + assert set() == files - expected_files + + @staticmethod + def find_resource_files( + top_level_directory: str = "airflow", + department: str = "*", + resource_type: str = "*", + service: str = "*", + ): + python_files = glob.glob( + f"{ROOT_FOLDER}/{top_level_directory}/providers/google/{department}/{resource_type}/{service}.py" + ) + # Make path relative + resource_files = (os.path.relpath(f, ROOT_FOLDER) for f in python_files) + # Exclude __init__.py and pycache + resource_files = (f for f in resource_files if not f.endswith("__init__.py")) + return resource_files + + @staticmethod + def examples_for_service(service_name): + yield from glob.glob( + f"{ROOT_FOLDER}/airflow/providers/google/*/example_dags/example_{service_name}*.py" + ) + yield from glob.glob(f"{ROOT_FOLDER}/tests/system/providers/google/{service_name}/example_*.py") From b8b6f31676126e9d7622a2ab7ea87a38843102ae Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Sun, 5 Jun 2022 21:52:02 +0800 Subject: [PATCH 042/118] Parse error for task added to multiple groups (#23071) This raises an exception if a task already belonging to a task group (including added to a DAG, since such task is automatically added to the DAG's root task group). Also, according to the issue response, manually calling TaskGroup.add() is not considered a supported way to add a task to group. So a meta-marker is added to the function docstring to prevent it from showing up in documentation and users from trying to use it. (cherry picked from commit 337863fa35bba8463d62e5cf0859f2bb73cf053a) --- airflow/exceptions.py | 17 +++++++++++++++++ airflow/utils/task_group.py | 18 ++++++++++++++++-- tests/utils/test_task_group.py | 22 ++++++++++++++++++++++ 3 files changed, 55 insertions(+), 2 deletions(-) diff --git a/airflow/exceptions.py b/airflow/exceptions.py index 95fa9e3276545..fa7acf61da1ce 100644 --- a/airflow/exceptions.py +++ b/airflow/exceptions.py @@ -185,6 +185,23 @@ class DuplicateTaskIdFound(AirflowException): """Raise when a Task with duplicate task_id is defined in the same DAG.""" +class TaskAlreadyInTaskGroup(AirflowException): + """Raise when a Task cannot be added to a TaskGroup since it already belongs to another TaskGroup.""" + + def __init__(self, task_id: str, existing_group_id: Optional[str], new_group_id: str) -> None: + super().__init__(task_id, new_group_id) + self.task_id = task_id + self.existing_group_id = existing_group_id + self.new_group_id = new_group_id + + def __str__(self) -> str: + if self.existing_group_id is None: + existing_group = "the DAG's root group" + else: + existing_group = f"group {self.existing_group_id!r}" + return f"cannot add {self.task_id!r} to {self.new_group_id!r} (already in {existing_group})" + + class SerializationError(AirflowException): """A problem occurred when trying to serialize a DAG.""" diff --git a/airflow/utils/task_group.py b/airflow/utils/task_group.py index 7b53a521db21b..ed8d380ff0bc8 100644 --- a/airflow/utils/task_group.py +++ b/airflow/utils/task_group.py @@ -24,7 +24,12 @@ import weakref from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Sequence, Set, Tuple, Union -from airflow.exceptions import AirflowDagCycleException, AirflowException, DuplicateTaskIdFound +from airflow.exceptions import ( + AirflowDagCycleException, + AirflowException, + DuplicateTaskIdFound, + TaskAlreadyInTaskGroup, +) from airflow.models.taskmixin import DAGNode, DependencyMixin from airflow.serialization.enums import DagAttributeTypes from airflow.utils.helpers import validate_group_key @@ -186,7 +191,16 @@ def __iter__(self): yield child def add(self, task: DAGNode) -> None: - """Add a task to this TaskGroup.""" + """Add a task to this TaskGroup. + + :meta private: + """ + from airflow.models.abstractoperator import AbstractOperator + + existing_tg = task.task_group + if isinstance(task, AbstractOperator) and existing_tg is not None and existing_tg != self: + raise TaskAlreadyInTaskGroup(task.node_id, existing_tg.node_id, self.node_id) + # Set the TG first, as setting it might change the return value of node_id! task.task_group = weakref.proxy(self) key = task.node_id diff --git a/tests/utils/test_task_group.py b/tests/utils/test_task_group.py index 4b47a0c9bb4bd..9aacc96b826e8 100644 --- a/tests/utils/test_task_group.py +++ b/tests/utils/test_task_group.py @@ -20,6 +20,7 @@ import pytest from airflow.decorators import dag, task_group as task_group_decorator +from airflow.exceptions import TaskAlreadyInTaskGroup from airflow.models import DAG from airflow.models.xcom_arg import XComArg from airflow.operators.bash import BashOperator @@ -1200,3 +1201,24 @@ def nested_topo(group): ], task6, ] + + +def test_add_to_sub_group(): + with DAG("test_dag", start_date=pendulum.parse("20200101")): + tg = TaskGroup("section") + task = EmptyOperator(task_id="task") + with pytest.raises(TaskAlreadyInTaskGroup) as ctx: + tg.add(task) + + assert str(ctx.value) == "cannot add 'task' to 'section' (already in the DAG's root group)" + + +def test_add_to_another_group(): + with DAG("test_dag", start_date=pendulum.parse("20200101")): + tg = TaskGroup("section_1") + with TaskGroup("section_2"): + task = EmptyOperator(task_id="task") + with pytest.raises(TaskAlreadyInTaskGroup) as ctx: + tg.add(task) + + assert str(ctx.value) == "cannot add 'section_2.task' to 'section_1' (already in group 'section_2')" From 90d338fb186c1c543f579c14e770564da86a22dd Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 8 Jun 2022 17:58:20 +0800 Subject: [PATCH 043/118] Check bag DAG schedule_interval match tiemtable (#23113) This guards against the DAG's timetable or schedule_interval from being changed after it's created. Validation is done by creating a timetable and check its summary matches schedule_interval. The logic is not bullet-proof, especially if a custom timetable does not provide a useful summary. But this is the best we can do. (cherry picked from commit a1a9a8f9a3adc63e783cf3fd699066f35e488d4f) --- airflow/exceptions.py | 4 ++++ airflow/models/dag.py | 43 +++++++++++++++++++++++++++++++++++++++- airflow/models/dagbag.py | 13 ++++++------ tests/models/test_dag.py | 26 ++++++++++++++++++++++++ 4 files changed, 79 insertions(+), 7 deletions(-) diff --git a/airflow/exceptions.py b/airflow/exceptions.py index fa7acf61da1ce..bfb5835fdaf31 100644 --- a/airflow/exceptions.py +++ b/airflow/exceptions.py @@ -149,6 +149,10 @@ def __str__(self) -> str: return f"Ignoring DAG {self.dag_id} from {self.incoming} - also found in {self.existing}" +class AirflowDagInconsistent(AirflowException): + """Raise when a DAG has inconsistent attributes.""" + + class AirflowClusterPolicyViolation(AirflowException): """Raise when there is a violation of a Cluster Policy in DAG definition.""" diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 54f5b0667520f..823287dcb1856 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -63,7 +63,7 @@ from airflow import settings, utils from airflow.compat.functools import cached_property from airflow.configuration import conf -from airflow.exceptions import AirflowException, DuplicateTaskIdFound, TaskNotFound +from airflow.exceptions import AirflowDagInconsistent, AirflowException, DuplicateTaskIdFound, TaskNotFound from airflow.models.abstractoperator import AbstractOperator from airflow.models.base import ID_LEN, Base from airflow.models.dagbag import DagBag @@ -484,6 +484,47 @@ def __init__( self._task_group = TaskGroup.create_root(self) self.validate_schedule_and_params() + def _check_schedule_interval_matches_timetable(self) -> bool: + """Check ``schedule_interval`` and ``timetable`` match. + + This is done as a part of the DAG validation done before it's bagged, to + guard against the DAG's ``timetable`` (or ``schedule_interval``) from + being changed after it's created, e.g. + + .. code-block:: python + + dag1 = DAG("d1", timetable=MyTimetable()) + dag1.schedule_interval = "@once" + + dag2 = DAG("d2", schedule_interval="@once") + dag2.timetable = MyTimetable() + + Validation is done by creating a timetable and check its summary matches + ``schedule_interval``. The logic is not bullet-proof, especially if a + custom timetable does not provide a useful ``summary``. But this is the + best we can do. + """ + if self.schedule_interval == self.timetable.summary: + return True + try: + timetable = create_timetable(self.schedule_interval, self.timezone) + except ValueError: + return False + return timetable.summary == self.timetable.summary + + def validate(self): + """Validate the DAG has a coherent setup. + + This is called by the DAG bag before bagging the DAG. + """ + if not self._check_schedule_interval_matches_timetable(): + raise AirflowDagInconsistent( + f"inconsistent schedule: timetable {self.timetable.summary!r} " + f"does not match schedule_interval {self.schedule_interval!r}", + ) + self.params.validate() + self.timetable.validate() + def __repr__(self): return f"" diff --git a/airflow/models/dagbag.py b/airflow/models/dagbag.py index 3673ce095ea16..c0ef0941b6dbf 100644 --- a/airflow/models/dagbag.py +++ b/airflow/models/dagbag.py @@ -39,6 +39,7 @@ AirflowClusterPolicyViolation, AirflowDagCycleException, AirflowDagDuplicatedIdException, + AirflowDagInconsistent, AirflowTimetableInvalid, ParamValidationError, ) @@ -402,25 +403,25 @@ def _process_modules(self, filepath, mods, file_last_changed_on_disk): for (dag, mod) in top_level_dags: dag.fileloc = mod.__file__ try: - dag.timetable.validate() - # validate dag params - dag.params.validate() + dag.validate() self.bag_dag(dag=dag, root_dag=dag) - found_dags.append(dag) - found_dags += dag.subdags except AirflowTimetableInvalid as exception: self.log.exception("Failed to bag_dag: %s", dag.fileloc) self.import_errors[dag.fileloc] = f"Invalid timetable expression: {exception}" self.file_last_changed[dag.fileloc] = file_last_changed_on_disk except ( + AirflowClusterPolicyViolation, AirflowDagCycleException, AirflowDagDuplicatedIdException, - AirflowClusterPolicyViolation, + AirflowDagInconsistent, ParamValidationError, ) as exception: self.log.exception("Failed to bag_dag: %s", dag.fileloc) self.import_errors[dag.fileloc] = str(exception) self.file_last_changed[dag.fileloc] = file_last_changed_on_disk + else: + found_dags.append(dag) + found_dags += dag.subdags return found_dags def bag_dag(self, dag, root_dag): diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py index 9e3c46a602938..0164ce0f87c42 100644 --- a/tests/models/test_dag.py +++ b/tests/models/test_dag.py @@ -2243,6 +2243,32 @@ def return_num(num): assert dag.params['value'] == value +@pytest.mark.parametrize("timetable", [NullTimetable(), OnceTimetable()]) +def test_dag_timetable_match_schedule_interval(timetable): + dag = DAG("my-dag", timetable=timetable) + assert dag._check_schedule_interval_matches_timetable() + + +@pytest.mark.parametrize("schedule_interval", [None, "@once", "@daily", timedelta(days=1)]) +def test_dag_schedule_interval_match_timetable(schedule_interval): + dag = DAG("my-dag", schedule_interval=schedule_interval) + assert dag._check_schedule_interval_matches_timetable() + + +@pytest.mark.parametrize("schedule_interval", [None, "@daily", timedelta(days=1)]) +def test_dag_schedule_interval_change_after_init(schedule_interval): + dag = DAG("my-dag", timetable=OnceTimetable()) + dag.schedule_interval = schedule_interval + assert not dag._check_schedule_interval_matches_timetable() + + +@pytest.mark.parametrize("timetable", [NullTimetable(), OnceTimetable()]) +def test_dag_timetable_change_after_init(timetable): + dag = DAG("my-dag") # Default is timedelta(days=1). + dag.timetable = timetable + assert not dag._check_schedule_interval_matches_timetable() + + @pytest.mark.parametrize("run_id, execution_date", [(None, datetime_tz(2020, 1, 1)), ('test-run-id', None)]) def test_set_task_instance_state(run_id, execution_date, session, dag_maker): """Test that set_task_instance_state updates the TaskInstance state and clear downstream failed""" From bab5e1cc6fbc1d9333c3c4aacb18d4083c92d310 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hubert=20Pietro=C5=84?= <94397721+hubert-pietron@users.noreply.github.com> Date: Tue, 31 May 2022 12:39:16 +0200 Subject: [PATCH 044/118] Fix closing connection dbapi.get_pandas_df (#23452) (cherry picked from commit ab1f637e463011a34d950c306583400b7a2fceb3) --- airflow/hooks/dbapi.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/airflow/hooks/dbapi.py b/airflow/hooks/dbapi.py index da33bacca8447..0b9ce4377be23 100644 --- a/airflow/hooks/dbapi.py +++ b/airflow/hooks/dbapi.py @@ -128,6 +128,24 @@ def get_pandas_df(self, sql, parameters=None, **kwargs): with closing(self.get_conn()) as conn: return psql.read_sql(sql, con=conn, params=parameters, **kwargs) + def get_pandas_df_by_chunks(self, sql, parameters=None, *, chunksize, **kwargs): + """ + Executes the sql and returns a generator + + :param sql: the sql statement to be executed (str) or a list of + sql statements to execute + :param parameters: The parameters to render the SQL query with + :param chunksize: number of rows to include in each chunk + :param kwargs: (optional) passed into pandas.io.sql.read_sql method + """ + try: + from pandas.io import sql as psql + except ImportError: + raise Exception("pandas library not installed, run: pip install 'apache-airflow[pandas]'.") + + with closing(self.get_conn()) as conn: + yield from psql.read_sql(sql, con=conn, params=parameters, chunksize=chunksize, **kwargs) + def get_records(self, sql, parameters=None): """ Executes the sql and returns a set of records. From d7b58db1588474b287669dc58ae2ca72bd35e139 Mon Sep 17 00:00:00 2001 From: Ephraim Anierobi Date: Wed, 8 Jun 2022 08:47:26 +0100 Subject: [PATCH 045/118] Fix permission issue for dag that has dot in name (#23510) How we determine if a DAG is a subdag in airflow.security.permissions.resource_name_for_dag is not right. If a dag_id contains a dot, the permission is not recorded correctly. The current solution makes a query every time we check for permission for dags that has a dot in the name. Not that I like it but I think it's better than other options I considered such as changing how we name dags for subdag. That's not good in UX. Another option I considered was making a query when parsing, that's not good and it's avoided by passing root_dag to resource_name_for_dag Co-authored-by: Ash Berlin-Taylor Co-authored-by: Tzu-ping Chung (cherry picked from commit cc35fcaf89eeff3d89e18088c2e68f01f8baad56) --- airflow/models/dagbag.py | 8 ++-- airflow/security/permissions.py | 19 +++++----- airflow/www/security.py | 29 ++++++++++++--- tests/www/test_security.py | 66 ++++++++++++++++++++++++++++----- 4 files changed, 95 insertions(+), 27 deletions(-) diff --git a/airflow/models/dagbag.py b/airflow/models/dagbag.py index c0ef0941b6dbf..929842fd0da4c 100644 --- a/airflow/models/dagbag.py +++ b/airflow/models/dagbag.py @@ -641,6 +641,8 @@ def _sync_perm_for_dag(self, dag, session: Session = None): from airflow.security.permissions import DAG_ACTIONS, resource_name_for_dag from airflow.www.fab_security.sqla.models import Action, Permission, Resource + root_dag_id = dag.parent_dag.dag_id if dag.parent_dag else dag.dag_id + def needs_perms(dag_id: str) -> bool: dag_resource_name = resource_name_for_dag(dag_id) for permission_name in DAG_ACTIONS: @@ -655,9 +657,9 @@ def needs_perms(dag_id: str) -> bool: return True return False - if dag.access_control or needs_perms(dag.dag_id): - self.log.debug("Syncing DAG permissions: %s to the DB", dag.dag_id) + if dag.access_control or needs_perms(root_dag_id): + self.log.debug("Syncing DAG permissions: %s to the DB", root_dag_id) from airflow.www.security import ApplessAirflowSecurityManager security_manager = ApplessAirflowSecurityManager(session=session) - security_manager.sync_perm_for_dag(dag.dag_id, dag.access_control) + security_manager.sync_perm_for_dag(root_dag_id, dag.access_control) diff --git a/airflow/security/permissions.py b/airflow/security/permissions.py index 2d5c0b939976e..2d02c773b43ff 100644 --- a/airflow/security/permissions.py +++ b/airflow/security/permissions.py @@ -66,14 +66,15 @@ DAG_ACTIONS = {ACTION_CAN_READ, ACTION_CAN_EDIT, ACTION_CAN_DELETE} -def resource_name_for_dag(dag_id): - """Returns the resource name for a DAG id.""" - if dag_id == RESOURCE_DAG: - return dag_id +def resource_name_for_dag(root_dag_id: str) -> str: + """Returns the resource name for a DAG id. - if dag_id.startswith(RESOURCE_DAG_PREFIX): - return dag_id - - # To account for SubDags - root_dag_id = dag_id.split(".")[0] + Note that since a sub-DAG should follow the permission of its + parent DAG, you should pass ``DagModel.root_dag_id`` to this function, + for a subdag. A normal dag should pass the ``DagModel.dag_id``. + """ + if root_dag_id == RESOURCE_DAG: + return root_dag_id + if root_dag_id.startswith(RESOURCE_DAG_PREFIX): + return root_dag_id return f"{RESOURCE_DAG_PREFIX}{root_dag_id}" diff --git a/airflow/www/security.py b/airflow/www/security.py index 42188f06184b4..de6b0d646e8c8 100644 --- a/airflow/www/security.py +++ b/airflow/www/security.py @@ -200,6 +200,16 @@ def __init__(self, appbuilder): view.datamodel = CustomSQLAInterface(view.datamodel.obj) self.perms = None + def _get_root_dag_id(self, dag_id): + if '.' in dag_id: + dm = ( + self.get_session.query(DagModel.dag_id, DagModel.root_dag_id) + .filter(DagModel.dag_id == dag_id) + .first() + ) + return dm.root_dag_id or dm.dag_id + return dag_id + def init_role(self, role_name, perms): """ Initialize the role with actions and related resources. @@ -340,7 +350,8 @@ def get_accessible_dag_ids(self, user, user_actions=None, session=None) -> Set[s def can_access_some_dags(self, action: str, dag_id: Optional[str] = None) -> bool: """Checks if user has read or write access to some dags.""" if dag_id and dag_id != '~': - return self.has_access(action, permissions.resource_name_for_dag(dag_id)) + root_dag_id = self._get_root_dag_id(dag_id) + return self.has_access(action, permissions.resource_name_for_dag(root_dag_id)) user = g.user if action == permissions.ACTION_CAN_READ: @@ -349,17 +360,20 @@ def can_access_some_dags(self, action: str, dag_id: Optional[str] = None) -> boo def can_read_dag(self, dag_id, user=None) -> bool: """Determines whether a user has DAG read access.""" - dag_resource_name = permissions.resource_name_for_dag(dag_id) + root_dag_id = self._get_root_dag_id(dag_id) + dag_resource_name = permissions.resource_name_for_dag(root_dag_id) return self.has_access(permissions.ACTION_CAN_READ, dag_resource_name, user=user) def can_edit_dag(self, dag_id, user=None) -> bool: """Determines whether a user has DAG edit access.""" - dag_resource_name = permissions.resource_name_for_dag(dag_id) + root_dag_id = self._get_root_dag_id(dag_id) + dag_resource_name = permissions.resource_name_for_dag(root_dag_id) return self.has_access(permissions.ACTION_CAN_EDIT, dag_resource_name, user=user) def can_delete_dag(self, dag_id, user=None) -> bool: """Determines whether a user has DAG delete access.""" - dag_resource_name = permissions.resource_name_for_dag(dag_id) + root_dag_id = self._get_root_dag_id(dag_id) + dag_resource_name = permissions.resource_name_for_dag(root_dag_id) return self.has_access(permissions.ACTION_CAN_DELETE, dag_resource_name, user=user) def prefixed_dag_id(self, dag_id): @@ -370,7 +384,8 @@ def prefixed_dag_id(self, dag_id): DeprecationWarning, stacklevel=2, ) - return permissions.resource_name_for_dag(dag_id) + root_dag_id = self._get_root_dag_id(dag_id) + return permissions.resource_name_for_dag(root_dag_id) def is_dag_resource(self, resource_name): """Determines if a resource belongs to a DAG or all DAGs.""" @@ -530,7 +545,8 @@ def create_dag_specific_permissions(self) -> None: dags = dagbag.dags.values() for dag in dags: - dag_resource_name = permissions.resource_name_for_dag(dag.dag_id) + root_dag_id = dag.parent_dag.dag_id if dag.parent_dag else dag.dag_id + dag_resource_name = permissions.resource_name_for_dag(root_dag_id) for action_name in self.DAG_ACTIONS: if (action_name, dag_resource_name) not in perms: self._merge_perm(action_name, dag_resource_name) @@ -615,6 +631,7 @@ def _sync_dag_view_permissions(self, dag_id, access_control): :param access_control: a dict where each key is a rolename and each value is a set() of action names (e.g. {'can_read'}) """ + dag_resource_name = permissions.resource_name_for_dag(dag_id) def _get_or_create_dag_permission(action_name: str) -> Optional[Permission]: diff --git a/tests/www/test_security.py b/tests/www/test_security.py index 8c90062600818..7b8541ca81756 100644 --- a/tests/www/test_security.py +++ b/tests/www/test_security.py @@ -192,7 +192,8 @@ def sample_dags(security_manager): @pytest.fixture(scope="module") def has_dag_perm(security_manager): def _has_dag_perm(perm, dag_id, user): - return security_manager.has_access(perm, permissions.resource_name_for_dag(dag_id), user) + root_dag_id = security_manager._get_root_dag_id(dag_id) + return security_manager.has_access(perm, permissions.resource_name_for_dag(root_dag_id), user) return _has_dag_perm @@ -351,7 +352,7 @@ def test_verify_anon_user_with_admin_role_has_access_to_each_dag( user.roles = security_manager.get_user_roles(user) assert user.roles == {security_manager.get_public_role()} - test_dag_ids = ["test_dag_id_1", "test_dag_id_2", "test_dag_id_3"] + test_dag_ids = ["test_dag_id_1", "test_dag_id_2", "test_dag_id_3", "test_dag_id_4.with_dot"] for dag_id in test_dag_ids: with _create_dag_model_context(dag_id, session, security_manager): @@ -588,7 +589,8 @@ def test_access_control_with_invalid_permission(app, security_manager): for action in invalid_actions: with pytest.raises(AirflowException) as ctx: security_manager._sync_dag_view_permissions( - 'access_control_test', access_control={rolename: {action}} + 'access_control_test', + access_control={rolename: {action}}, ) assert "invalid permissions" in str(ctx.value) @@ -728,11 +730,13 @@ def test_create_dag_specific_permissions(session, security_manager, monkeypatch, assert ('can_edit', dag_resource_name) in all_perms security_manager._sync_dag_view_permissions.assert_called_once_with( - permissions.resource_name_for_dag('has_access_control'), access_control + permissions.resource_name_for_dag('has_access_control'), + access_control, ) del dagbag_mock.dags["has_access_control"] - with assert_queries_count(1): # one query to get all perms; dagbag is mocked + with assert_queries_count(2): # two query to get all perms; dagbag is mocked + # The extra query happens at permission check security_manager.create_dag_specific_permissions() @@ -782,10 +786,12 @@ def test_prefixed_dag_id_is_deprecated(security_manager): security_manager.prefixed_dag_id("hello") -def test_parent_dag_access_applies_to_subdag(app, security_manager, assert_user_has_dag_perms): +def test_parent_dag_access_applies_to_subdag(app, security_manager, assert_user_has_dag_perms, session): username = 'dag_permission_user' role_name = 'dag_permission_role' parent_dag_name = "parent_dag" + subdag_name = parent_dag_name + ".subdag" + subsubdag_name = parent_dag_name + ".subdag.subsubdag" with app.app_context(): mock_roles = [ { @@ -801,15 +807,57 @@ def test_parent_dag_access_applies_to_subdag(app, security_manager, assert_user_ username=username, role_name=role_name, ) as user: + dag1 = DagModel(dag_id=parent_dag_name) + dag2 = DagModel(dag_id=subdag_name, is_subdag=True, root_dag_id=parent_dag_name) + dag3 = DagModel(dag_id=subsubdag_name, is_subdag=True, root_dag_id=parent_dag_name) + session.add_all([dag1, dag2, dag3]) + session.commit() security_manager.bulk_sync_roles(mock_roles) - security_manager._sync_dag_view_permissions( - parent_dag_name, access_control={role_name: READ_WRITE} - ) + for dag in [dag1, dag2, dag3]: + security_manager._sync_dag_view_permissions( + parent_dag_name, access_control={role_name: READ_WRITE} + ) + assert_user_has_dag_perms(perms=READ_WRITE, dag_id=parent_dag_name, user=user) assert_user_has_dag_perms(perms=READ_WRITE, dag_id=parent_dag_name + ".subdag", user=user) assert_user_has_dag_perms( perms=READ_WRITE, dag_id=parent_dag_name + ".subdag.subsubdag", user=user ) + session.query(DagModel).delete() + + +def test_permissions_work_for_dags_with_dot_in_dagname( + app, security_manager, assert_user_has_dag_perms, assert_user_does_not_have_dag_perms, session +): + username = 'dag_permission_user' + role_name = 'dag_permission_role' + dag_id = "dag_id_1" + dag_id_2 = "dag_id_1.with_dot" + with app.app_context(): + mock_roles = [ + { + 'role': role_name, + 'perms': [ + (permissions.ACTION_CAN_READ, f"DAG:{dag_id}"), + (permissions.ACTION_CAN_EDIT, f"DAG:{dag_id}"), + ], + } + ] + with create_user_scope( + app, + username=username, + role_name=role_name, + ) as user: + dag1 = DagModel(dag_id=dag_id) + dag2 = DagModel(dag_id=dag_id_2) + session.add_all([dag1, dag2]) + session.commit() + security_manager.bulk_sync_roles(mock_roles) + security_manager.sync_perm_for_dag(dag1.dag_id, access_control={role_name: READ_WRITE}) + security_manager.sync_perm_for_dag(dag2.dag_id, access_control={role_name: READ_WRITE}) + assert_user_has_dag_perms(perms=READ_WRITE, dag_id=dag_id, user=user) + assert_user_does_not_have_dag_perms(perms=READ_WRITE, dag_id=dag_id_2, user=user) + session.query(DagModel).delete() def test_fab_models_use_airflow_base_meta(): From 5e174a12b73b9737bf48d3097c1fd5ca45a9b0e2 Mon Sep 17 00:00:00 2001 From: Ephraim Anierobi Date: Fri, 10 Jun 2022 14:44:19 +0100 Subject: [PATCH 046/118] Refactor `DagRun.verify_integrity` (#24114) This refactoring became necessary as there's a necessity to add additional code to the already exisiting code to handle mapped task immutability during run. The additional code would make this method difficult to read. Refactoring the code will aid understanding and help in debugging. (cherry picked from commit 12638d2310d962986b43af8f1584a405e280badf) --- airflow/models/dagrun.py | 102 +++++++++++++++++++++++++++++++++------ 1 file changed, 88 insertions(+), 14 deletions(-) diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index fdb566e467150..b71cd03eec2e3 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -23,6 +23,7 @@ from typing import ( TYPE_CHECKING, Any, + Callable, Dict, Generator, Iterable, @@ -30,6 +31,7 @@ NamedTuple, Optional, Sequence, + Set, Tuple, Union, cast, @@ -818,13 +820,50 @@ def verify_integrity(self, session: Session = NEW_SESSION): """ from airflow.settings import task_instance_mutation_hook + # Set for the empty default in airflow.settings -- if it's not set this means it has been changed + hook_is_noop = getattr(task_instance_mutation_hook, 'is_noop', False) + dag = self.get_dag() + task_ids = self._check_for_removed_or_restored_tasks( + dag, task_instance_mutation_hook, session=session + ) + + def task_filter(task: "Operator") -> bool: + return task.task_id not in task_ids and ( + self.is_backfill + or task.start_date <= self.execution_date + and (task.end_date is None or self.execution_date <= task.end_date) + ) + + created_counts: Dict[str, int] = defaultdict(int) + + # Get task creator function + task_creator = self._get_task_creator(created_counts, task_instance_mutation_hook, hook_is_noop) + + # Create the missing tasks, including mapped tasks + tasks = self._create_missing_tasks(dag, task_creator, task_filter, session=session) + + self._create_task_instances(dag.dag_id, tasks, created_counts, hook_is_noop, session=session) + + def _check_for_removed_or_restored_tasks( + self, dag: "DAG", ti_mutation_hook, *, session: Session + ) -> Set[str]: + """ + Check for removed tasks/restored tasks. + + :param dag: DAG object corresponding to the dagrun + :param ti_mutation_hook: task_instance_mutation_hook function + :param session: Sqlalchemy ORM Session + + :return: List of task_ids in the dagrun + + """ tis = self.get_task_instances(session=session) # check for removed or restored tasks task_ids = set() for ti in tis: - task_instance_mutation_hook(ti) + ti_mutation_hook(ti) task_ids.add(ti.task_id) task = None try: @@ -885,19 +924,21 @@ def verify_integrity(self, session: Session = NEW_SESSION): ) ti.state = State.REMOVED ... + return task_ids - def task_filter(task: "Operator") -> bool: - return task.task_id not in task_ids and ( - self.is_backfill - or task.start_date <= self.execution_date - and (task.end_date is None or self.execution_date <= task.end_date) - ) + def _get_task_creator( + self, created_counts: Dict[str, int], ti_mutation_hook: Callable, hook_is_noop: bool + ) -> Callable: + """ + Get the task creator function. - created_counts: Dict[str, int] = defaultdict(int) + This function also updates the created_counts dictionary with the number of tasks created. - # Set for the empty default in airflow.settings -- if it's not set this means it has been changed - hook_is_noop = getattr(task_instance_mutation_hook, 'is_noop', False) + :param created_counts: Dictionary of task_type -> count of created TIs + :param ti_mutation_hook: task_instance_mutation_hook function + :param hook_is_noop: Whether the task_instance_mutation_hook is a noop + """ if hook_is_noop: def create_ti_mapping(task: "Operator", indexes: Tuple[int, ...]) -> Generator: @@ -912,13 +953,25 @@ def create_ti_mapping(task: "Operator", indexes: Tuple[int, ...]) -> Generator: def create_ti(task: "Operator", indexes: Tuple[int, ...]) -> Generator: for map_index in indexes: ti = TI(task, run_id=self.run_id, map_index=map_index) - task_instance_mutation_hook(ti) + ti_mutation_hook(ti) created_counts[ti.operator] += 1 yield ti creator = create_ti + return creator + + def _create_missing_tasks( + self, dag: "DAG", task_creator: Callable, task_filter: Callable, *, session: Session + ) -> Iterable["Operator"]: + """ + Create missing tasks -- and expand any MappedOperator that _only_ have literals as input + + :param dag: DAG object corresponding to the dagrun + :param task_creator: a function that creates tasks + :param task_filter: a function that filters tasks to create + :param session: the session to use + """ - # Create missing tasks -- and expand any MappedOperator that _only_ have literals as input def expand_mapped_literals(task: "Operator") -> Tuple["Operator", Sequence[int]]: if not task.is_mapped: return (task, (-1,)) @@ -931,8 +984,29 @@ def expand_mapped_literals(task: "Operator") -> Tuple["Operator", Sequence[int]] return (task, range(count)) tasks_and_map_idxs = map(expand_mapped_literals, filter(task_filter, dag.task_dict.values())) - tasks = itertools.chain.from_iterable(itertools.starmap(creator, tasks_and_map_idxs)) + tasks = itertools.chain.from_iterable(itertools.starmap(task_creator, tasks_and_map_idxs)) + return tasks + + def _create_task_instances( + self, + dag_id: str, + tasks: Iterable["Operator"], + created_counts: Dict[str, int], + hook_is_noop: bool, + *, + session: Session, + ) -> None: + """ + Create the necessary task instances from the given tasks. + + :param dag_id: DAG ID associated with the dagrun + :param tasks: the tasks to create the task instances from + :param created_counts: a dictionary of number of tasks -> total ti created by the task creator + :param hook_is_noop: whether the task_instance_mutation_hook is noop + :param session: the session to use + + """ try: if hook_is_noop: session.bulk_insert_mappings(TI, tasks) @@ -945,7 +1019,7 @@ def expand_mapped_literals(task: "Operator") -> Tuple["Operator", Sequence[int]] except IntegrityError: self.log.info( 'Hit IntegrityError while creating the TIs for %s- %s', - dag.dag_id, + dag_id, self.run_id, exc_info=True, ) From 8892587cce270aa504fc1a9e25d8d2279f0c71b8 Mon Sep 17 00:00:00 2001 From: Ephraim Anierobi Date: Sat, 18 Jun 2022 08:32:38 +0100 Subject: [PATCH 047/118] Fix mapped task immutability after clear (#23667) We should be able to detect if the structure of mapped task has changed and verify the integrity. This PR ensures this Co-authored-by: Tzu-ping Chung (cherry picked from commit b692517ce3aafb276e9d23570e9734c30a5f3d1f) --- airflow/models/dagrun.py | 114 ++++++++++++++++++++----- tests/models/test_dagrun.py | 161 +++++++++++++++++++++++++++++++++++- 2 files changed, 251 insertions(+), 24 deletions(-) diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index b71cd03eec2e3..3be82b9b6db57 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -642,15 +642,9 @@ def task_instance_scheduling_decisions(self, session: Session = NEW_SESSION) -> tis = list(self.get_task_instances(session=session, state=State.task_states)) self.log.debug("number of tis tasks for %s: %s task(s)", self, len(tis)) dag = self.get_dag() - for ti in tis: - try: - ti.task = dag.get_task(ti.task_id) - except TaskNotFound: - self.log.warning( - "Failed to get task '%s' for dag '%s'. Marking it as removed.", ti, ti.dag_id - ) - ti.state = State.REMOVED - session.flush() + missing_indexes = self._find_missing_task_indexes(dag, tis, session=session) + if missing_indexes: + self.verify_integrity(missing_indexes=missing_indexes, session=session) unfinished_tis = [t for t in tis if t.state in State.unfinished] finished_tis = [t for t in tis if t.state in State.finished] @@ -811,11 +805,17 @@ def _emit_duration_stats_for_finished_state(self): Stats.timing(f'dagrun.duration.failed.{self.dag_id}', duration) @provide_session - def verify_integrity(self, session: Session = NEW_SESSION): + def verify_integrity( + self, + *, + missing_indexes: Optional[Dict["MappedOperator", Sequence[int]]] = None, + session: Session = NEW_SESSION, + ): """ Verifies the DagRun by checking for removed tasks or tasks that are not in the database yet. It will set state to removed or add the task if required. + :missing_indexes: A dictionary of task vs indexes that are missing. :param session: Sqlalchemy ORM Session """ from airflow.settings import task_instance_mutation_hook @@ -824,9 +824,16 @@ def verify_integrity(self, session: Session = NEW_SESSION): hook_is_noop = getattr(task_instance_mutation_hook, 'is_noop', False) dag = self.get_dag() - task_ids = self._check_for_removed_or_restored_tasks( - dag, task_instance_mutation_hook, session=session - ) + task_ids: Set[str] = set() + if missing_indexes: + tis = self.get_task_instances(session=session) + for ti in tis: + task_instance_mutation_hook(ti) + task_ids.add(ti.task_id) + else: + task_ids, missing_indexes = self._check_for_removed_or_restored_tasks( + dag, task_instance_mutation_hook, session=session + ) def task_filter(task: "Operator") -> bool: return task.task_id not in task_ids and ( @@ -841,27 +848,29 @@ def task_filter(task: "Operator") -> bool: task_creator = self._get_task_creator(created_counts, task_instance_mutation_hook, hook_is_noop) # Create the missing tasks, including mapped tasks - tasks = self._create_missing_tasks(dag, task_creator, task_filter, session=session) + tasks = self._create_missing_tasks(dag, task_creator, task_filter, missing_indexes, session=session) self._create_task_instances(dag.dag_id, tasks, created_counts, hook_is_noop, session=session) def _check_for_removed_or_restored_tasks( self, dag: "DAG", ti_mutation_hook, *, session: Session - ) -> Set[str]: + ) -> Tuple[Set[str], Dict["MappedOperator", Sequence[int]]]: """ - Check for removed tasks/restored tasks. + Check for removed tasks/restored/missing tasks. :param dag: DAG object corresponding to the dagrun :param ti_mutation_hook: task_instance_mutation_hook function :param session: Sqlalchemy ORM Session - :return: List of task_ids in the dagrun + :return: List of task_ids in the dagrun and missing task indexes """ tis = self.get_task_instances(session=session) # check for removed or restored tasks task_ids = set() + existing_indexes: Dict["MappedOperator", List[int]] = defaultdict(list) + expected_indexes: Dict["MappedOperator", Sequence[int]] = defaultdict(list) for ti in tis: ti_mutation_hook(ti) task_ids.add(ti.task_id) @@ -902,7 +911,8 @@ def _check_for_removed_or_restored_tasks( else: self.log.info("Restoring mapped task '%s'", ti) Stats.incr(f"task_restored_to_dag.{dag.dag_id}", 1, 1) - ti.state = State.NONE + existing_indexes[task].append(ti.map_index) + expected_indexes[task] = range(num_mapped_tis) else: # What if it is _now_ dynamically mapped, but wasn't before? total_length = task.run_time_mapped_ti_count(self.run_id, session=session) @@ -923,8 +933,16 @@ def _check_for_removed_or_restored_tasks( total_length, ) ti.state = State.REMOVED - ... - return task_ids + else: + self.log.info("Restoring mapped task '%s'", ti) + Stats.incr(f"task_restored_to_dag.{dag.dag_id}", 1, 1) + existing_indexes[task].append(ti.map_index) + expected_indexes[task] = range(total_length) + # Check if we have some missing indexes to create ti for + missing_indexes: Dict["MappedOperator", Sequence[int]] = defaultdict(list) + for k, v in existing_indexes.items(): + missing_indexes.update({k: list(set(expected_indexes[k]).difference(v))}) + return task_ids, missing_indexes def _get_task_creator( self, created_counts: Dict[str, int], ti_mutation_hook: Callable, hook_is_noop: bool @@ -961,7 +979,13 @@ def create_ti(task: "Operator", indexes: Tuple[int, ...]) -> Generator: return creator def _create_missing_tasks( - self, dag: "DAG", task_creator: Callable, task_filter: Callable, *, session: Session + self, + dag: "DAG", + task_creator: Callable, + task_filter: Callable, + missing_indexes: Optional[Dict["MappedOperator", Sequence[int]]], + *, + session: Session, ) -> Iterable["Operator"]: """ Create missing tasks -- and expand any MappedOperator that _only_ have literals as input @@ -972,7 +996,9 @@ def _create_missing_tasks( :param session: the session to use """ - def expand_mapped_literals(task: "Operator") -> Tuple["Operator", Sequence[int]]: + def expand_mapped_literals( + task: "Operator", sequence: Union[Sequence[int], None] = None + ) -> Tuple["Operator", Sequence[int]]: if not task.is_mapped: return (task, (-1,)) task = cast("MappedOperator", task) @@ -981,11 +1007,19 @@ def expand_mapped_literals(task: "Operator") -> Tuple["Operator", Sequence[int]] ) if not count: return (task, (-1,)) + if sequence: + return (task, sequence) return (task, range(count)) tasks_and_map_idxs = map(expand_mapped_literals, filter(task_filter, dag.task_dict.values())) tasks = itertools.chain.from_iterable(itertools.starmap(task_creator, tasks_and_map_idxs)) + if missing_indexes: + # If there are missing indexes, override the tasks to create + new_tasks_and_map_idxs = itertools.starmap( + expand_mapped_literals, [(k, v) for k, v in missing_indexes.items() if len(v) > 0] + ) + tasks = itertools.chain.from_iterable(itertools.starmap(task_creator, new_tasks_and_map_idxs)) return tasks def _create_task_instances( @@ -1027,6 +1061,42 @@ def _create_task_instances( # TODO[HA]: We probably need to savepoint this so we can keep the transaction alive. session.rollback() + def _find_missing_task_indexes(self, dag, tis, *, session) -> Dict["MappedOperator", Sequence[int]]: + """ + Here we check if the length of the mapped task instances changed + at runtime. If so, we find the missing indexes. + + This function also marks task instances with missing tasks as REMOVED. + + :param dag: DAG object corresponding to the dagrun + :param tis: task instances to check + :param session: the session to use + """ + existing_indexes: Dict["MappedOperator", list] = defaultdict(list) + new_indexes: Dict["MappedOperator", Sequence[int]] = defaultdict(list) + for ti in tis: + try: + task = ti.task = dag.get_task(ti.task_id) + except TaskNotFound: + self.log.error("Failed to get task '%s' for dag '%s'. Marking it as removed.", ti, ti.dag_id) + + ti.state = State.REMOVED + session.flush() + continue + if not task.is_mapped: + continue + # skip unexpanded tasks and also tasks that expands with literal arguments + if ti.map_index < 0 or task.parse_time_mapped_ti_count: + continue + existing_indexes[task].append(ti.map_index) + task.run_time_mapped_ti_count.cache_clear() + new_length = task.run_time_mapped_ti_count(self.run_id, session=session) or 0 + new_indexes[task] = range(new_length) + missing_indexes: Dict["MappedOperator", Sequence[int]] = defaultdict(list) + for k, v in existing_indexes.items(): + missing_indexes.update({k: list(set(new_indexes[k]).difference(v))}) + return missing_indexes + @staticmethod def get_run(session: Session, dag_id: str, execution_date: datetime) -> Optional['DagRun']: """ diff --git a/tests/models/test_dagrun.py b/tests/models/test_dagrun.py index f73f5d1c45147..d45fd4137069a 100644 --- a/tests/models/test_dagrun.py +++ b/tests/models/test_dagrun.py @@ -41,7 +41,7 @@ from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import DagRunType from tests.models import DEFAULT_DATE as _DEFAULT_DATE -from tests.test_utils.db import clear_db_dags, clear_db_pools, clear_db_runs +from tests.test_utils.db import clear_db_dags, clear_db_pools, clear_db_runs, clear_db_variables from tests.test_utils.mock_operators import MockOperator DEFAULT_DATE = pendulum.instance(_DEFAULT_DATE) @@ -54,11 +54,13 @@ def setup_class(self) -> None: clear_db_runs() clear_db_pools() clear_db_dags() + clear_db_variables() def teardown_method(self) -> None: clear_db_runs() clear_db_pools() clear_db_dags() + clear_db_variables() def create_dag_run( self, @@ -899,7 +901,7 @@ def test_verify_integrity_task_start_and_end_date(Stats_incr, session, run_type, session.add(dag_run) session.flush() - dag_run.verify_integrity(session) + dag_run.verify_integrity(session=session) tis = dag_run.task_instances assert len(tis) == expected_tis @@ -1027,6 +1029,161 @@ def task_2(arg2): ] +def test_mapped_literal_length_increase_adds_additional_ti(dag_maker, session): + """Test that when the length of mapped literal increases, additional ti is added""" + + with dag_maker(session=session) as dag: + + @task + def task_2(arg2): + ... + + task_2.expand(arg2=[1, 2, 3, 4]) + + dr = dag_maker.create_dagrun() + tis = dr.get_task_instances() + indices = [(ti.map_index, ti.state) for ti in tis] + assert sorted(indices) == [ + (0, State.NONE), + (1, State.NONE), + (2, State.NONE), + (3, State.NONE), + ] + + # Now "increase" the length of literal + dag._remove_task('task_2') + + with dag: + task_2.expand(arg2=[1, 2, 3, 4, 5]).operator + + # At this point, we need to test that the change works on the serialized + # DAG (which is what the scheduler operates on) + serialized_dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) + + dr.dag = serialized_dag + # Since we change the literal on the dag file itself, the dag_hash will + # change which will have the scheduler verify the dr integrity + dr.verify_integrity() + + tis = dr.get_task_instances() + indices = [(ti.map_index, ti.state) for ti in tis] + assert sorted(indices) == [ + (0, State.NONE), + (1, State.NONE), + (2, State.NONE), + (3, State.NONE), + (4, State.NONE), + ] + + +def test_mapped_literal_length_reduction_adds_removed_state(dag_maker, session): + """Test that when the length of mapped literal reduces, removed state is added""" + + with dag_maker(session=session) as dag: + + @task + def task_2(arg2): + ... + + task_2.expand(arg2=[1, 2, 3, 4]) + + dr = dag_maker.create_dagrun() + tis = dr.get_task_instances() + indices = [(ti.map_index, ti.state) for ti in tis] + assert sorted(indices) == [ + (0, State.NONE), + (1, State.NONE), + (2, State.NONE), + (3, State.NONE), + ] + + # Now "reduce" the length of literal + dag._remove_task('task_2') + + with dag: + task_2.expand(arg2=[1, 2]).operator + + # At this point, we need to test that the change works on the serialized + # DAG (which is what the scheduler operates on) + serialized_dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) + + dr.dag = serialized_dag + # Since we change the literal on the dag file itself, the dag_hash will + # change which will have the scheduler verify the dr integrity + dr.verify_integrity() + + tis = dr.get_task_instances() + indices = [(ti.map_index, ti.state) for ti in tis] + assert sorted(indices) == [ + (0, State.NONE), + (1, State.NONE), + (2, State.REMOVED), + (3, State.REMOVED), + ] + + +def test_mapped_literal_length_increase_at_runtime_adds_additional_tis(dag_maker, session): + """Test that when the length of mapped literal increases at runtime, additional ti is added""" + from airflow.models import Variable + + Variable.set(key='arg1', value=[1, 2, 3]) + + @task + def task_1(): + return Variable.get('arg1', deserialize_json=True) + + with dag_maker(session=session) as dag: + + @task + def task_2(arg2): + ... + + task_2.expand(arg2=task_1()) + + dr = dag_maker.create_dagrun() + ti = dr.get_task_instance(task_id='task_1') + ti.run() + dr.task_instance_scheduling_decisions() + tis = dr.get_task_instances() + indices = [(ti.map_index, ti.state) for ti in tis if ti.map_index >= 0] + assert sorted(indices) == [ + (0, State.NONE), + (1, State.NONE), + (2, State.NONE), + ] + + # Now "clear" and "increase" the length of literal + dag.clear() + Variable.set(key='arg1', value=[1, 2, 3, 4]) + + with dag: + task_2.expand(arg2=task_1()).operator + + # At this point, we need to test that the change works on the serialized + # DAG (which is what the scheduler operates on) + serialized_dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) + + dr.dag = serialized_dag + + # Run the first task again to get the new lengths + ti = dr.get_task_instance(task_id='task_1') + task1 = dag.get_task('task_1') + ti.refresh_from_task(task1) + ti.run() + + # this would be called by the localtask job + dr.task_instance_scheduling_decisions() + tis = dr.get_task_instances() + + indices = [(ti.map_index, ti.state) for ti in tis if ti.map_index >= 0] + assert sorted(indices) == [ + (0, State.NONE), + (1, State.NONE), + (2, State.NONE), + (3, State.NONE), + ] + + @pytest.mark.need_serialized_dag def test_mapped_mixed__literal_not_expanded_at_create(dag_maker, session): literal = [1, 2, 3, 4] From e74803d406e24d49b49cd052af994f701bf50c96 Mon Sep 17 00:00:00 2001 From: Bowrna Date: Sun, 5 Jun 2022 20:01:03 +0530 Subject: [PATCH 048/118] Fix xfail test in test_scheduler.py (#23731) (cherry picked from commit dbe80c89b2a99d6ab737f2c4146bf8f918034f0f) --- tests/jobs/test_scheduler_job.py | 46 ++++++++++++++++---------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index 6f50b1c759a1e..1b11ae3b96d65 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -4100,42 +4100,42 @@ def test_catchup_works_correctly(self, dag_maker): ) > (timezone.utcnow() - timedelta(days=2)) -@pytest.mark.xfail(reason="Work out where this goes") -def test_task_with_upstream_skip_process_task_instances(): +@pytest.mark.need_serialized_dag +def test_schedule_dag_run_with_upstream_skip(dag_maker, session): """ - Test if _process_task_instances puts a task instance into SKIPPED state if any of its + Test if _schedule_dag_run puts a task instance into SKIPPED state if any of its upstream tasks are skipped according to TriggerRuleDep. """ - clear_db_runs() - with DAG( - dag_id='test_task_with_upstream_skip_dag', start_date=DEFAULT_DATE, schedule_interval=None - ) as dag: + with dag_maker( + dag_id='test_task_with_upstream_skip_process_task_instances', + start_date=DEFAULT_DATE, + session=session, + ): dummy1 = EmptyOperator(task_id='dummy1') dummy2 = EmptyOperator(task_id="dummy2") dummy3 = EmptyOperator(task_id="dummy3") [dummy1, dummy2] >> dummy3 - # dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock()) - dag.clear() - dr = dag.create_dagrun(run_type=DagRunType.MANUAL, state=State.RUNNING, execution_date=DEFAULT_DATE) + dr = dag_maker.create_dagrun(state=State.RUNNING) assert dr is not None - with create_session() as session: - tis = {ti.task_id: ti for ti in dr.get_task_instances(session=session)} - # Set dummy1 to skipped and dummy2 to success. dummy3 remains as none. - tis[dummy1.task_id].state = State.SKIPPED - tis[dummy2.task_id].state = State.SUCCESS - assert tis[dummy3.task_id].state == State.NONE + tis = {ti.task_id: ti for ti in dr.get_task_instances(session=session)} + # Set dummy1 to skipped and dummy2 to success. dummy3 remains as none. + tis[dummy1.task_id].state = State.SKIPPED + tis[dummy2.task_id].state = State.SUCCESS + assert tis[dummy3.task_id].state == State.NONE + session.flush() # dag_runs = DagRun.find(dag_id='test_task_with_upstream_skip_dag') # dag_file_processor._process_task_instances(dag, dag_runs=dag_runs) - - with create_session() as session: - tis = {ti.task_id: ti for ti in dr.get_task_instances(session=session)} - assert tis[dummy1.task_id].state == State.SKIPPED - assert tis[dummy2.task_id].state == State.SUCCESS - # dummy3 should be skipped because dummy1 is skipped. - assert tis[dummy3.task_id].state == State.SKIPPED + scheduler_job = SchedulerJob(subdir=os.devnull) + scheduler_job._schedule_dag_run(dr, session) + session.flush() + tis = {ti.task_id: ti for ti in dr.get_task_instances(session=session)} + assert tis[dummy1.task_id].state == State.SKIPPED + assert tis[dummy2.task_id].state == State.SUCCESS + # dummy3 should be skipped because dummy1 is skipped. + assert tis[dummy3.task_id].state == State.SKIPPED class TestSchedulerJobQueriesCount: From 5b7dbaa8058120b76bb39ab69e03748c04808d1d Mon Sep 17 00:00:00 2001 From: Ephraim Anierobi Date: Sat, 28 May 2022 09:06:07 +0100 Subject: [PATCH 049/118] Add TaskInstance State 'REMOVED' to finished states and success states (#23797) Now that we support dynamic task mapping, we should have the 'REMOVED' state of task instances as a finished state because for dynamic tasks with a removed task instance, the dagrun would be stuck in running state if 'REMOVED' state is not in finished states. (cherry picked from commit 73446f28e9eb1e4c6f2f32c700147b61ab3da600) --- airflow/models/dagrun.py | 2 +- airflow/utils/state.py | 1 + tests/models/test_dagrun.py | 23 +++++++++++++++++++++++ 3 files changed, 25 insertions(+), 1 deletion(-) diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index 3be82b9b6db57..d9c4eeb72637b 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -542,7 +542,7 @@ def update_state( ) leaf_task_ids = {t.task_id for t in dag.leaves} - leaf_tis = [ti for ti in tis if ti.task_id in leaf_task_ids] + leaf_tis = [ti for ti in tis if ti.task_id in leaf_task_ids if ti.state != TaskInstanceState.REMOVED] # if all roots finished and at least one failed, the run failed if not unfinished_tis and any(leaf_ti.state in State.failed_states for leaf_ti in leaf_tis): diff --git a/airflow/utils/state.py b/airflow/utils/state.py index 8415dd16667a8..a79169f86169f 100644 --- a/airflow/utils/state.py +++ b/airflow/utils/state.py @@ -154,6 +154,7 @@ def color_fg(cls, state): TaskInstanceState.FAILED, TaskInstanceState.SKIPPED, TaskInstanceState.UPSTREAM_FAILED, + TaskInstanceState.REMOVED, ] ) """ diff --git a/tests/models/test_dagrun.py b/tests/models/test_dagrun.py index d45fd4137069a..6c3cc1c91ca70 100644 --- a/tests/models/test_dagrun.py +++ b/tests/models/test_dagrun.py @@ -198,6 +198,29 @@ def test_dagrun_success_when_all_skipped(self, session): dag_run.update_state() assert DagRunState.SUCCESS == dag_run.state + def test_dagrun_not_stuck_in_running_when_all_tasks_instances_are_removed(self, session): + """ + Tests that a DAG run succeeds when all tasks are removed + """ + dag = DAG(dag_id='test_dagrun_success_when_all_skipped', start_date=timezone.datetime(2017, 1, 1)) + dag_task1 = ShortCircuitOperator( + task_id='test_short_circuit_false', dag=dag, python_callable=lambda: False + ) + dag_task2 = EmptyOperator(task_id='test_state_skipped1', dag=dag) + dag_task3 = EmptyOperator(task_id='test_state_skipped2', dag=dag) + dag_task1.set_downstream(dag_task2) + dag_task2.set_downstream(dag_task3) + + initial_task_states = { + 'test_short_circuit_false': TaskInstanceState.REMOVED, + 'test_state_skipped1': TaskInstanceState.REMOVED, + 'test_state_skipped2': TaskInstanceState.REMOVED, + } + + dag_run = self.create_dag_run(dag=dag, task_states=initial_task_states, session=session) + dag_run.update_state() + assert DagRunState.SUCCESS == dag_run.state + def test_dagrun_success_conditions(self, session): dag = DAG('test_dagrun_success_conditions', start_date=DEFAULT_DATE, default_args={'owner': 'owner1'}) From 5a125b6f1628cc3c0e66c378a5fa9d873f3287c3 Mon Sep 17 00:00:00 2001 From: pierrejeambrun Date: Fri, 27 May 2022 01:47:39 +0800 Subject: [PATCH 050/118] Grid fix details button truncated and small UI tweaks (#23934) * Show details button and wrap on LegendRow. * Update following brent review * Fix display on small width * Rotate icon for a 'ReadLess' effect (cherry picked from commit 5ab58d057abb6b1f28eb4e3fb5cec7dc9850f0b0) --- airflow/www/static/js/grid/AutoRefresh.jsx | 8 ++++- airflow/www/static/js/grid/Grid.jsx | 39 +++++++++++++++++----- airflow/www/static/js/grid/LegendRow.jsx | 2 +- airflow/www/static/js/grid/Main.jsx | 22 ++++-------- 4 files changed, 45 insertions(+), 26 deletions(-) diff --git a/airflow/www/static/js/grid/AutoRefresh.jsx b/airflow/www/static/js/grid/AutoRefresh.jsx index d92cf07838045..b7c1c29206095 100644 --- a/airflow/www/static/js/grid/AutoRefresh.jsx +++ b/airflow/www/static/js/grid/AutoRefresh.jsx @@ -33,7 +33,13 @@ const AutoRefresh = () => { return ( - + Auto-refresh { +const Grid = ({ isPanelOpen = false, onPanelToggle, hoveredTaskState }) => { const scrollRef = useRef(); const tableRef = useRef(); @@ -82,16 +84,35 @@ const Grid = ({ isPanelOpen = false, hoveredTaskState }) => { overflow="auto" ref={scrollRef} flexGrow={1} - minWidth={isPanelOpen && '300px'} + minWidth={isPanelOpen && '350px'} > - - - + + + + + + } + transform={!isPanelOpen && 'rotateZ(180deg)'} + transitionProperty="none" /> - diff --git a/airflow/www/static/js/grid/LegendRow.jsx b/airflow/www/static/js/grid/LegendRow.jsx index 3193435701a96..eff503403c768 100644 --- a/airflow/www/static/js/grid/LegendRow.jsx +++ b/airflow/www/static/js/grid/LegendRow.jsx @@ -44,7 +44,7 @@ const StatusBadge = ({ const LegendRow = ({ setHoveredTaskState }) => ( - + { Object.entries(stateColors).map(([state, stateColor]) => ( { const { clearSelection } = useSelection(); const [hoveredTaskState, setHoveredTaskState] = useState(); - const toggleDetailsPanel = () => { + const onPanelToggle = () => { if (!isOpen) { localStorage.setItem(detailsPanelKey, false); } else { @@ -57,20 +56,13 @@ const Main = () => { - - + + - {isOpen && (
)} From aa3442824608d5936fc7cc12065b8dc3f71e5237 Mon Sep 17 00:00:00 2001 From: eladkal <45845474+eladkal@users.noreply.github.com> Date: Sat, 28 May 2022 23:10:39 +0300 Subject: [PATCH 051/118] Replace `use_task_execution_date` with `use_task_logical_date` (#23983) * Replace `use_task_execution_date` with `use_task_logical_date` We have some operators/sensors that use `*_execution_date` as the class parameters. This PR deprecate the usage of these parameters and replace it with `logical_date`. There is no change in functionality, under the hood the functionality already uses `logical_date` this is just about the parameters name as exposed to the users. (cherry picked from commit 614b2329c1603ef1e2199044e2cc9e4b7332c2e0) --- airflow/operators/datetime.py | 15 ++++++++++++--- airflow/operators/weekday.py | 16 ++++++++++++---- airflow/sensors/weekday.py | 22 +++++++++++++++------- tests/operators/test_datetime.py | 21 +++++++++++++++++++-- tests/operators/test_weekday.py | 19 +++++++++++++++++-- tests/sensors/test_weekday_sensor.py | 25 ++++++++++++++++++++----- 6 files changed, 95 insertions(+), 23 deletions(-) diff --git a/airflow/operators/datetime.py b/airflow/operators/datetime.py index c37a4f9d50c11..c5a423d563868 100644 --- a/airflow/operators/datetime.py +++ b/airflow/operators/datetime.py @@ -16,6 +16,7 @@ # under the License. import datetime +import warnings from typing import Iterable, Union from airflow.exceptions import AirflowException @@ -39,7 +40,7 @@ class BranchDateTimeOperator(BaseBranchOperator): ``datetime.datetime.now()`` falls below target_lower or above ``target_upper``. :param target_lower: target lower bound. :param target_upper: target upper bound. - :param use_task_execution_date: If ``True``, uses task's execution day to compare with targets. + :param use_task_logical_date: If ``True``, uses task's logical date to compare with targets. Execution date is useful for backfilling. If ``False``, uses system's date. """ @@ -50,6 +51,7 @@ def __init__( follow_task_ids_if_false: Union[str, Iterable[str]], target_lower: Union[datetime.datetime, datetime.time, None], target_upper: Union[datetime.datetime, datetime.time, None], + use_task_logical_date: bool = False, use_task_execution_date: bool = False, **kwargs, ) -> None: @@ -64,10 +66,17 @@ def __init__( self.target_upper = target_upper self.follow_task_ids_if_true = follow_task_ids_if_true self.follow_task_ids_if_false = follow_task_ids_if_false - self.use_task_execution_date = use_task_execution_date + self.use_task_logical_date = use_task_logical_date + if use_task_execution_date: + self.use_task_logical_date = use_task_execution_date + warnings.warn( + "Parameter ``use_task_execution_date`` is deprecated. Use ``use_task_logical_date``.", + DeprecationWarning, + stacklevel=2, + ) def choose_branch(self, context: Context) -> Union[str, Iterable[str]]: - if self.use_task_execution_date is True: + if self.use_task_logical_date: now = timezone.make_naive(context["logical_date"], self.dag.timezone) else: now = timezone.make_naive(timezone.utcnow(), self.dag.timezone) diff --git a/airflow/operators/weekday.py b/airflow/operators/weekday.py index fb35079fe0d3e..b23d57e9fb1d4 100644 --- a/airflow/operators/weekday.py +++ b/airflow/operators/weekday.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - +import warnings from typing import Iterable, Union from airflow.operators.branch import BaseBranchOperator @@ -41,7 +41,7 @@ class BranchDayOfWeekOperator(BaseBranchOperator): * ``{WeekDay.TUESDAY}`` * ``{WeekDay.SATURDAY, WeekDay.SUNDAY}`` - :param use_task_execution_day: If ``True``, uses task's execution day to compare + :param use_task_logical_date: If ``True``, uses task's logical date to compare with is_today. Execution Date is Useful for backfilling. If ``False``, uses system's day of the week. """ @@ -52,6 +52,7 @@ def __init__( follow_task_ids_if_true: Union[str, Iterable[str]], follow_task_ids_if_false: Union[str, Iterable[str]], week_day: Union[str, Iterable[str]], + use_task_logical_date: bool = False, use_task_execution_day: bool = False, **kwargs, ) -> None: @@ -59,11 +60,18 @@ def __init__( self.follow_task_ids_if_true = follow_task_ids_if_true self.follow_task_ids_if_false = follow_task_ids_if_false self.week_day = week_day - self.use_task_execution_day = use_task_execution_day + self.use_task_logical_date = use_task_logical_date + if use_task_execution_day: + self.use_task_logical_date = use_task_execution_day + warnings.warn( + "Parameter ``use_task_execution_day`` is deprecated. Use ``use_task_logical_date``.", + DeprecationWarning, + stacklevel=2, + ) self._week_day_num = WeekDay.validate_week_day(week_day) def choose_branch(self, context: Context) -> Union[str, Iterable[str]]: - if self.use_task_execution_day: + if self.use_task_logical_date: now = context["logical_date"] else: now = timezone.make_naive(timezone.utcnow(), self.dag.timezone) diff --git a/airflow/sensors/weekday.py b/airflow/sensors/weekday.py index bdf9275e107b5..5bb4db646f7c4 100644 --- a/airflow/sensors/weekday.py +++ b/airflow/sensors/weekday.py @@ -15,6 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +import warnings from airflow.sensors.base import BaseSensorOperator from airflow.utils import timezone @@ -33,7 +34,7 @@ class DayOfWeekSensor(BaseSensorOperator): weekend_check = DayOfWeekSensor( task_id='weekend_check', week_day='Saturday', - use_task_execution_day=True, + use_task_logical_date=True, dag=dag) **Example** (with multiple day using set): :: @@ -41,7 +42,7 @@ class DayOfWeekSensor(BaseSensorOperator): weekend_check = DayOfWeekSensor( task_id='weekend_check', week_day={'Saturday', 'Sunday'}, - use_task_execution_day=True, + use_task_logical_date=True, dag=dag) **Example** (with :class:`~airflow.utils.weekday.WeekDay` enum): :: @@ -52,7 +53,7 @@ class DayOfWeekSensor(BaseSensorOperator): weekend_check = DayOfWeekSensor( task_id='weekend_check', week_day={WeekDay.SATURDAY, WeekDay.SUNDAY}, - use_task_execution_day=True, + use_task_logical_date=True, dag=dag) :param week_day: Day of the week to check (full name). Optionally, a set @@ -64,16 +65,23 @@ class DayOfWeekSensor(BaseSensorOperator): * ``{WeekDay.TUESDAY}`` * ``{WeekDay.SATURDAY, WeekDay.SUNDAY}`` - :param use_task_execution_day: If ``True``, uses task's execution day to compare + :param use_task_logical_date: If ``True``, uses task's logical date to compare with week_day. Execution Date is Useful for backfilling. If ``False``, uses system's day of the week. Useful when you don't want to run anything on weekdays on the system. """ - def __init__(self, *, week_day, use_task_execution_day=False, **kwargs): + def __init__(self, *, week_day, use_task_logical_date=False, use_task_execution_day=False, **kwargs): super().__init__(**kwargs) self.week_day = week_day - self.use_task_execution_day = use_task_execution_day + self.use_task_logical_date = use_task_logical_date + if use_task_execution_day: + self.use_task_logical_date = use_task_execution_day + warnings.warn( + "Parameter ``use_task_execution_day`` is deprecated. Use ``use_task_logical_date``.", + DeprecationWarning, + stacklevel=2, + ) self._week_day_num = WeekDay.validate_week_day(week_day) def poke(self, context: Context): @@ -82,7 +90,7 @@ def poke(self, context: Context): self.week_day, WeekDay(timezone.utcnow().isoweekday()).name, ) - if self.use_task_execution_day: + if self.use_task_logical_date: return context['logical_date'].isoweekday() in self._week_day_num else: return timezone.utcnow().isoweekday() in self._week_day_num diff --git a/tests/operators/test_datetime.py b/tests/operators/test_datetime.py index 2bf4ff5d4a8ad..bb1f9282a465d 100644 --- a/tests/operators/test_datetime.py +++ b/tests/operators/test_datetime.py @@ -20,6 +20,7 @@ import unittest import freezegun +import pytest from airflow.exceptions import AirflowException from airflow.models import DAG, DagRun, TaskInstance as TI @@ -225,10 +226,10 @@ def test_branch_datetime_operator_lower_comparison_outside_range(self): ) @freezegun.freeze_time("2020-12-01 09:00:00") - def test_branch_datetime_operator_use_task_execution_date(self): + def test_branch_datetime_operator_use_task_logical_date(self): """Check if BranchDateTimeOperator uses task execution date""" in_between_date = timezone.datetime(2020, 7, 7, 10, 30, 0) - self.branch_op.use_task_execution_date = True + self.branch_op.use_task_logical_date = True self.dr = self.dag.create_dagrun( run_id='manual_exec_date__', start_date=in_between_date, @@ -249,3 +250,19 @@ def test_branch_datetime_operator_use_task_execution_date(self): 'branch_2': State.SKIPPED, } ) + + def test_deprecation_warning(self): + warning_message = ( + """Parameter ``use_task_execution_date`` is deprecated. Use ``use_task_logical_date``.""" + ) + with pytest.warns(DeprecationWarning) as warnings: + BranchDateTimeOperator( + task_id='warning', + follow_task_ids_if_true='branch_1', + follow_task_ids_if_false='branch_2', + target_upper=timezone.datetime(2020, 7, 7, 10, 30, 0), + target_lower=timezone.datetime(2020, 7, 7, 10, 30, 0), + use_task_execution_date=True, + dag=self.dag, + ) + assert warning_message == str(warnings[0].message) diff --git a/tests/operators/test_weekday.py b/tests/operators/test_weekday.py index 69ab21a6df84e..5185e1728cf5b 100644 --- a/tests/operators/test_weekday.py +++ b/tests/operators/test_weekday.py @@ -134,14 +134,14 @@ def test_branch_follow_true(self, _, weekday): @freeze_time("2021-01-25") # Monday def test_branch_follow_true_with_execution_date(self): - """Checks if BranchDayOfWeekOperator follows true branch when set use_task_execution_day""" + """Checks if BranchDayOfWeekOperator follows true branch when set use_task_logical_date""" branch_op = BranchDayOfWeekOperator( task_id="make_choice", follow_task_ids_if_true="branch_1", follow_task_ids_if_false="branch_2", week_day="Wednesday", - use_task_execution_day=True, # We compare to DEFAULT_DATE which is Wednesday + use_task_logical_date=True, # We compare to DEFAULT_DATE which is Wednesday dag=self.dag, ) @@ -274,3 +274,18 @@ def test_branch_xcom_push_true_branch(self): for ti in tis: if ti.task_id == 'make_choice': assert ti.xcom_pull(task_ids='make_choice') == 'branch_1' + + def test_deprecation_warning(self): + warning_message = ( + """Parameter ``use_task_execution_day`` is deprecated. Use ``use_task_logical_date``.""" + ) + with pytest.warns(DeprecationWarning) as warnings: + BranchDayOfWeekOperator( + task_id="week_day_warn", + follow_task_ids_if_true="branch_1", + follow_task_ids_if_false="branch_2", + week_day="Monday", + use_task_execution_day=True, + dag=self.dag, + ) + assert warning_message == str(warnings[0].message) diff --git a/tests/sensors/test_weekday_sensor.py b/tests/sensors/test_weekday_sensor.py index 04e133fa1410c..5aa8bdbf823b8 100644 --- a/tests/sensors/test_weekday_sensor.py +++ b/tests/sensors/test_weekday_sensor.py @@ -72,7 +72,7 @@ def tearDown(self): ) def test_weekday_sensor_true(self, _, week_day): op = DayOfWeekSensor( - task_id='weekday_sensor_check_true', week_day=week_day, use_task_execution_day=True, dag=self.dag + task_id='weekday_sensor_check_true', week_day=week_day, use_task_logical_date=True, dag=self.dag ) op.run(start_date=WEEKDAY_DATE, end_date=WEEKDAY_DATE, ignore_ti_state=True) assert op.week_day == week_day @@ -83,7 +83,7 @@ def test_weekday_sensor_false(self): poke_interval=1, timeout=2, week_day='Tuesday', - use_task_execution_day=True, + use_task_logical_date=True, dag=self.dag, ) with pytest.raises(AirflowSensorTimeout): @@ -95,7 +95,7 @@ def test_invalid_weekday_number(self): DayOfWeekSensor( task_id='weekday_sensor_invalid_weekday_num', week_day=invalid_week_day, - use_task_execution_day=True, + use_task_logical_date=True, dag=self.dag, ) @@ -110,7 +110,7 @@ def test_weekday_sensor_with_invalid_type(self): DayOfWeekSensor( task_id='weekday_sensor_check_true', week_day=invalid_week_day, - use_task_execution_day=True, + use_task_logical_date=True, dag=self.dag, ) @@ -120,8 +120,23 @@ def test_weekday_sensor_timeout_with_set(self): poke_interval=1, timeout=2, week_day={WeekDay.MONDAY, WeekDay.TUESDAY}, - use_task_execution_day=True, + use_task_logical_date=True, dag=self.dag, ) with pytest.raises(AirflowSensorTimeout): op.run(start_date=WEEKDAY_DATE, end_date=WEEKDAY_DATE, ignore_ti_state=True) + + def test_deprecation_warning(self): + warning_message = ( + """Parameter ``use_task_execution_day`` is deprecated. Use ``use_task_logical_date``.""" + ) + with pytest.warns(DeprecationWarning) as warnings: + DayOfWeekSensor( + task_id='week_day_warn', + poke_interval=1, + timeout=2, + week_day='Tuesday', + use_task_execution_day=True, + dag=self.dag, + ) + assert warning_message == str(warnings[0].message) From 412c1524ad804eed1a7aef6f445c34bf97457e28 Mon Sep 17 00:00:00 2001 From: Bernardo Couto <35502483+bernardocouto@users.noreply.github.com> Date: Tue, 31 May 2022 08:59:52 -0300 Subject: [PATCH 052/118] Removing magic numbers from exceptions (#23997) * Removing magic numbers from exceptions * Running pre-commit (cherry picked from commit 375673aa3cebf6889f403ac1577cffe0c33c1ae7) --- airflow/exceptions.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/airflow/exceptions.py b/airflow/exceptions.py index bfb5835fdaf31..6a8eed35a321e 100644 --- a/airflow/exceptions.py +++ b/airflow/exceptions.py @@ -21,6 +21,7 @@ """Exceptions used by Airflow""" import datetime import warnings +from http import HTTPStatus from typing import Any, Dict, List, NamedTuple, Optional, Sized @@ -31,19 +32,19 @@ class AirflowException(Exception): Each custom exception should be derived from this class. """ - status_code = 500 + status_code = HTTPStatus.INTERNAL_SERVER_ERROR class AirflowBadRequest(AirflowException): """Raise when the application or server cannot handle the request.""" - status_code = 400 + status_code = HTTPStatus.BAD_REQUEST class AirflowNotFoundException(AirflowException): """Raise when the requested object/resource is not available in the system.""" - status_code = 404 + status_code = HTTPStatus.NOT_FOUND class AirflowConfigException(AirflowException): From 324be5081c7d5d599b8b7f9c35844e033e5a12a7 Mon Sep 17 00:00:00 2001 From: eladkal <45845474+eladkal@users.noreply.github.com> Date: Mon, 30 May 2022 11:07:49 +0300 Subject: [PATCH 053/118] Cleanup `BranchDayOfWeekOperator` example dag (#24007) * Cleanup BranchDayOfWeekOperator example dag There is no need for `dag=dag` when using context manager. (cherry picked from commit 56fd04016f1a8561f1c02e7f756bab8805c05876) --- airflow/example_dags/example_branch_day_of_week_operator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airflow/example_dags/example_branch_day_of_week_operator.py b/airflow/example_dags/example_branch_day_of_week_operator.py index e4aaeb147593f..879824ab1c876 100644 --- a/airflow/example_dags/example_branch_day_of_week_operator.py +++ b/airflow/example_dags/example_branch_day_of_week_operator.py @@ -33,8 +33,8 @@ schedule_interval="@daily", ) as dag: # [START howto_operator_day_of_week_branch] - empty_task_1 = EmptyOperator(task_id='branch_true', dag=dag) - empty_task_2 = EmptyOperator(task_id='branch_false', dag=dag) + empty_task_1 = EmptyOperator(task_id='branch_true') + empty_task_2 = EmptyOperator(task_id='branch_false') branch = BranchDayOfWeekOperator( task_id="make_choice", From 93d929daad9c9639484c357ba29be734a0e876dc Mon Sep 17 00:00:00 2001 From: Bernardo Couto <35502483+bernardocouto@users.noreply.github.com> Date: Thu, 2 Jun 2022 14:52:19 -0300 Subject: [PATCH 054/118] Removing magic status code numbers from api_connecxion (#24050) (cherry picked from commit f2e6452efdf74828de9f73a8bf4f42f6dd10eb58) --- airflow/api_connexion/endpoints/config_endpoint.py | 4 +++- .../api_connexion/endpoints/connection_endpoint.py | 3 ++- airflow/api_connexion/endpoints/dag_endpoint.py | 3 ++- airflow/api_connexion/endpoints/dag_run_endpoint.py | 3 ++- .../api_connexion/endpoints/dag_source_endpoint.py | 4 +++- airflow/api_connexion/endpoints/pool_endpoint.py | 4 +++- .../endpoints/role_and_permission_endpoint.py | 3 ++- airflow/api_connexion/endpoints/user_endpoint.py | 3 ++- .../api_connexion/endpoints/variable_endpoint.py | 3 ++- airflow/api_connexion/exceptions.py | 13 +++++++------ 10 files changed, 28 insertions(+), 15 deletions(-) diff --git a/airflow/api_connexion/endpoints/config_endpoint.py b/airflow/api_connexion/endpoints/config_endpoint.py index 9514621447609..bdd2b3a959547 100644 --- a/airflow/api_connexion/endpoints/config_endpoint.py +++ b/airflow/api_connexion/endpoints/config_endpoint.py @@ -15,6 +15,8 @@ # specific language governing permissions and limitations # under the License. +from http import HTTPStatus + from flask import Response, request from airflow.api_connexion import security @@ -72,7 +74,7 @@ def get_config() -> Response: } return_type = request.accept_mimetypes.best_match(serializer.keys()) if return_type not in serializer: - return Response(status=406) + return Response(status=HTTPStatus.NOT_ACCEPTABLE) elif conf.getboolean("webserver", "expose_config"): conf_dict = conf.as_dict(display_source=False, display_sensitive=True) config = _conf_dict_to_config(conf_dict) diff --git a/airflow/api_connexion/endpoints/connection_endpoint.py b/airflow/api_connexion/endpoints/connection_endpoint.py index f9be9c227e3f3..b196b3236b911 100644 --- a/airflow/api_connexion/endpoints/connection_endpoint.py +++ b/airflow/api_connexion/endpoints/connection_endpoint.py @@ -16,6 +16,7 @@ # under the License. import os +from http import HTTPStatus from connexion import NoContent from flask import request @@ -51,7 +52,7 @@ def delete_connection(*, connection_id: str, session: Session = NEW_SESSION) -> detail=f"The Connection with connection_id: `{connection_id}` was not found", ) session.delete(connection) - return NoContent, 204 + return NoContent, HTTPStatus.NO_CONTENT @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONNECTION)]) diff --git a/airflow/api_connexion/endpoints/dag_endpoint.py b/airflow/api_connexion/endpoints/dag_endpoint.py index 40113021cfad6..7940a25c8f9fb 100644 --- a/airflow/api_connexion/endpoints/dag_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_endpoint.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. +from http import HTTPStatus from typing import Collection, Optional from connexion import NoContent @@ -177,4 +178,4 @@ def delete_dag(dag_id: str, session: Session = NEW_SESSION) -> APIResponse: except AirflowException: raise AlreadyExists(detail=f"Task instances of dag with id: '{dag_id}' are still running") - return NoContent, 204 + return NoContent, HTTPStatus.NO_CONTENT diff --git a/airflow/api_connexion/endpoints/dag_run_endpoint.py b/airflow/api_connexion/endpoints/dag_run_endpoint.py index 1fad48f7b6fe7..351b723c4a283 100644 --- a/airflow/api_connexion/endpoints/dag_run_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_run_endpoint.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from http import HTTPStatus from typing import List, Optional, Tuple import pendulum @@ -55,7 +56,7 @@ def delete_dag_run(*, dag_id: str, dag_run_id: str, session: Session = NEW_SESSI """Delete a DAG Run""" if session.query(DagRun).filter(DagRun.dag_id == dag_id, DagRun.run_id == dag_run_id).delete() == 0: raise NotFound(detail=f"DAGRun with DAG ID: '{dag_id}' and DagRun ID: '{dag_run_id}' not found") - return NoContent, 204 + return NoContent, HTTPStatus.NO_CONTENT @security.requires_access( diff --git a/airflow/api_connexion/endpoints/dag_source_endpoint.py b/airflow/api_connexion/endpoints/dag_source_endpoint.py index 74c3496a2c208..ad6209221e523 100644 --- a/airflow/api_connexion/endpoints/dag_source_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_source_endpoint.py @@ -15,6 +15,8 @@ # specific language governing permissions and limitations # under the License. +from http import HTTPStatus + from flask import Response, current_app, request from itsdangerous import BadSignature, URLSafeSerializer @@ -42,4 +44,4 @@ def get_dag_source(*, file_token: str) -> Response: if return_type == 'application/json': content = dag_source_schema.dumps(dict(content=dag_source)) return Response(content, headers={'Content-Type': return_type}) - return Response("Not Allowed Accept Header", status=406) + return Response("Not Allowed Accept Header", status=HTTPStatus.NOT_ACCEPTABLE) diff --git a/airflow/api_connexion/endpoints/pool_endpoint.py b/airflow/api_connexion/endpoints/pool_endpoint.py index 8c3d3f3b86d38..594afeb49bc1a 100644 --- a/airflow/api_connexion/endpoints/pool_endpoint.py +++ b/airflow/api_connexion/endpoints/pool_endpoint.py @@ -14,6 +14,8 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + +from http import HTTPStatus from typing import Optional from flask import Response @@ -42,7 +44,7 @@ def delete_pool(*, pool_name: str, session: Session = NEW_SESSION) -> APIRespons affected_count = session.query(Pool).filter(Pool.pool == pool_name).delete() if affected_count == 0: raise NotFound(detail=f"Pool with name:'{pool_name}' not found") - return Response(status=204) + return Response(status=HTTPStatus.NO_CONTENT) @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_POOL)]) diff --git a/airflow/api_connexion/endpoints/role_and_permission_endpoint.py b/airflow/api_connexion/endpoints/role_and_permission_endpoint.py index 25419066d20fa..1b25769af7737 100644 --- a/airflow/api_connexion/endpoints/role_and_permission_endpoint.py +++ b/airflow/api_connexion/endpoints/role_and_permission_endpoint.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. +from http import HTTPStatus from typing import List, Optional, Tuple from connexion import NoContent @@ -105,7 +106,7 @@ def delete_role(*, role_name: str) -> APIResponse: if not role: raise NotFound(title="Role not found", detail=f"Role with name {role_name!r} was not found") ab_security_manager.delete_role(role_name=role_name) - return NoContent, 204 + return NoContent, HTTPStatus.NO_CONTENT @security.requires_access([(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_ROLE)]) diff --git a/airflow/api_connexion/endpoints/user_endpoint.py b/airflow/api_connexion/endpoints/user_endpoint.py index 2ed0db2aae864..3ab476e219cb9 100644 --- a/airflow/api_connexion/endpoints/user_endpoint.py +++ b/airflow/api_connexion/endpoints/user_endpoint.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from http import HTTPStatus from typing import List, Optional from connexion import NoContent @@ -205,4 +206,4 @@ def delete_user(*, username: str) -> APIResponse: security_manager.get_session.delete(user) security_manager.get_session.commit() - return NoContent, 204 + return NoContent, HTTPStatus.NO_CONTENT diff --git a/airflow/api_connexion/endpoints/variable_endpoint.py b/airflow/api_connexion/endpoints/variable_endpoint.py index 4dfc0803c5c62..8f039b6fdbe69 100644 --- a/airflow/api_connexion/endpoints/variable_endpoint.py +++ b/airflow/api_connexion/endpoints/variable_endpoint.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from http import HTTPStatus from typing import Optional from flask import Response @@ -37,7 +38,7 @@ def delete_variable(*, variable_key: str) -> Response: """Delete variable""" if Variable.delete(variable_key) == 0: raise NotFound("Variable not found") - return Response(status=204) + return Response(status=HTTPStatus.NO_CONTENT) @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_VARIABLE)]) diff --git a/airflow/api_connexion/exceptions.py b/airflow/api_connexion/exceptions.py index 0c6c4fa0d3a8f..8fb7f2e78883b 100644 --- a/airflow/api_connexion/exceptions.py +++ b/airflow/api_connexion/exceptions.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from http import HTTPStatus from typing import Any, Dict, Optional import flask @@ -80,7 +81,7 @@ def __init__( **kwargs: Any, ) -> None: super().__init__( - status=404, + status=HTTPStatus.NOT_FOUND, type=EXCEPTIONS_LINK_MAP[404], title=title, detail=detail, @@ -100,7 +101,7 @@ def __init__( **kwargs: Any, ) -> None: super().__init__( - status=400, + status=HTTPStatus.BAD_REQUEST, type=EXCEPTIONS_LINK_MAP[400], title=title, detail=detail, @@ -120,7 +121,7 @@ def __init__( **kwargs: Any, ): super().__init__( - status=401, + status=HTTPStatus.UNAUTHORIZED, type=EXCEPTIONS_LINK_MAP[401], title=title, detail=detail, @@ -140,7 +141,7 @@ def __init__( **kwargs: Any, ) -> None: super().__init__( - status=403, + status=HTTPStatus.FORBIDDEN, type=EXCEPTIONS_LINK_MAP[403], title=title, detail=detail, @@ -160,7 +161,7 @@ def __init__( **kwargs: Any, ): super().__init__( - status=409, + status=HTTPStatus.CONFLICT, type=EXCEPTIONS_LINK_MAP[409], title=title, detail=detail, @@ -180,7 +181,7 @@ def __init__( **kwargs: Any, ) -> None: super().__init__( - status=500, + status=HTTPStatus.INTERNAL_SERVER_ERROR, type=EXCEPTIONS_LINK_MAP[500], title=title, detail=detail, From 8e73a77f3928cd9d9d559a3fec7f1c1c3e555255 Mon Sep 17 00:00:00 2001 From: James Timmins Date: Thu, 2 Jun 2022 11:04:52 -0700 Subject: [PATCH 055/118] Rename Permissions to Permission Pairs. (#24065) (cherry picked from commit be21e08e1bb6202626c12b2375f24167cf22838a) --- airflow/www/fab_security/manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airflow/www/fab_security/manager.py b/airflow/www/fab_security/manager.py index 2010e58c348bc..5cdbe60d39613 100644 --- a/airflow/www/fab_security/manager.py +++ b/airflow/www/fab_security/manager.py @@ -797,7 +797,7 @@ def register_views(self): if self.appbuilder.app.config.get("FAB_ADD_SECURITY_PERMISSION_VIEWS_VIEW", True): self.appbuilder.add_view( self.permissionmodelview, - "Permissions", + "Permission Pairs", icon="fa-link", label=_("Permissions"), category="Security", From 0aae6bbb29d7362a288706583cfbe788fbf8673d Mon Sep 17 00:00:00 2001 From: Brent Bovenzi Date: Thu, 2 Jun 2022 19:48:53 +0200 Subject: [PATCH 056/118] Reduce grid view API calls (#24083) * Reduce API calls from /grid - Separate /grid_data from /grid - Remove need for formatData - Increase default query stale time to prevent extra fetches - Fix useTask query keys * consolidate grid data functions * fix www tests test grid_data instead of /grid (cherry picked from commit 035553c86988f403b43ef5825715b45f055d62dd) --- airflow/www/static/js/grid/Main.jsx | 26 +++++-- airflow/www/static/js/grid/api/useGridData.js | 7 +- .../useGridData.test.js} | 2 +- .../static/js/grid/api/useGridData.test.jsx | 78 ------------------- airflow/www/static/js/grid/api/useTasks.js | 4 +- .../static/js/grid/context/autorefresh.jsx | 15 +--- .../www/static/js/grid/dagRuns/index.test.jsx | 10 +-- .../static/js/grid/details/content/Dag.jsx | 3 +- airflow/www/static/js/grid/index.jsx | 1 + airflow/www/static/js/grid/utils/gridData.js | 34 -------- airflow/www/templates/airflow/grid.html | 1 - airflow/www/views.py | 11 --- tests/www/views/test_views_acl.py | 10 +-- tests/www/views/test_views_tasks.py | 18 +---- 14 files changed, 38 insertions(+), 182 deletions(-) rename airflow/www/static/js/grid/{utils/gridData.test.js => api/useGridData.test.js} (96%) delete mode 100644 airflow/www/static/js/grid/api/useGridData.test.jsx delete mode 100644 airflow/www/static/js/grid/utils/gridData.js diff --git a/airflow/www/static/js/grid/Main.jsx b/airflow/www/static/js/grid/Main.jsx index 98aa360aa6789..5e9a4f2a9a12d 100644 --- a/airflow/www/static/js/grid/Main.jsx +++ b/airflow/www/static/js/grid/Main.jsx @@ -25,17 +25,21 @@ import { Flex, useDisclosure, Divider, + Spinner, } from '@chakra-ui/react'; +import { isEmpty } from 'lodash'; import Details from './details'; import useSelection from './utils/useSelection'; import Grid from './Grid'; import FilterBar from './FilterBar'; import LegendRow from './LegendRow'; +import { useGridData } from './api'; const detailsPanelKey = 'hideDetailsPanel'; const Main = () => { + const { data: { groups }, isLoading } = useGridData(); const isPanelOpen = localStorage.getItem(detailsPanelKey) !== 'true'; const { isOpen, onToggle } = useDisclosure({ defaultIsOpen: isPanelOpen }); const { clearSelection } = useSelection(); @@ -57,14 +61,20 @@ const Main = () => { - - - {isOpen && (
)} - + {isLoading || isEmpty(groups) + ? () + : ( + <> + + + {isOpen && (
)} + + + )} ); diff --git a/airflow/www/static/js/grid/api/useGridData.js b/airflow/www/static/js/grid/api/useGridData.js index d31712989bcfd..38d4e00748d32 100644 --- a/airflow/www/static/js/grid/api/useGridData.js +++ b/airflow/www/static/js/grid/api/useGridData.js @@ -17,14 +17,13 @@ * under the License. */ -/* global autoRefreshInterval, gridData */ +/* global autoRefreshInterval */ import { useQuery } from 'react-query'; import axios from 'axios'; import { getMetaValue } from '../../utils'; import { useAutoRefresh } from '../context/autorefresh'; -import { areActiveRuns, formatData } from '../utils/gridData'; import useErrorToast from '../utils/useErrorToast'; import useFilters, { BASE_DATE_PARAM, NUM_RUNS_PARAM, RUN_STATE_PARAM, RUN_TYPE_PARAM, now, @@ -42,8 +41,9 @@ const emptyData = { groups: {}, }; +export const areActiveRuns = (runs = []) => runs.filter((run) => ['queued', 'running', 'scheduled'].includes(run.state)).length > 0; + const useGridData = () => { - const initialData = formatData(gridData, emptyData); const { isRefreshOn, stopRefresh } = useAutoRefresh(); const errorToast = useErrorToast(); const { @@ -75,7 +75,6 @@ const useGridData = () => { throw (error); } }, { - initialData, placeholderData: emptyData, // only refetch if the refresh switch is on refetchInterval: isRefreshOn && autoRefreshInterval * 1000, diff --git a/airflow/www/static/js/grid/utils/gridData.test.js b/airflow/www/static/js/grid/api/useGridData.test.js similarity index 96% rename from airflow/www/static/js/grid/utils/gridData.test.js rename to airflow/www/static/js/grid/api/useGridData.test.js index 6bbd8bf8b6a6d..29a7f1ac8a11f 100644 --- a/airflow/www/static/js/grid/utils/gridData.test.js +++ b/airflow/www/static/js/grid/api/useGridData.test.js @@ -19,7 +19,7 @@ /* global describe, test, expect */ -import { areActiveRuns } from './gridData'; +import { areActiveRuns } from './useGridData'; describe('Test areActiveRuns()', () => { test('Correctly detects active runs', () => { diff --git a/airflow/www/static/js/grid/api/useGridData.test.jsx b/airflow/www/static/js/grid/api/useGridData.test.jsx deleted file mode 100644 index 24aece6f5919f..0000000000000 --- a/airflow/www/static/js/grid/api/useGridData.test.jsx +++ /dev/null @@ -1,78 +0,0 @@ -/*! - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* global describe, test, expect, beforeAll */ - -import { renderHook } from '@testing-library/react-hooks'; -import useGridData from './useGridData'; -import { Wrapper } from '../utils/testUtils'; - -const pendingGridData = { - groups: {}, - dag_runs: [ - { - dag_id: 'example_python_operator', - run_id: 'manual__2021-11-08T21:14:17.170046+00:00', - start_date: null, - end_date: null, - state: 'queued', - execution_date: '2021-11-08T21:14:17.170046+00:00', - data_interval_start: '2021-11-08T21:14:17.170046+00:00', - data_interval_end: '2021-11-08T21:14:17.170046+00:00', - run_type: 'manual', - }, - ], -}; - -describe('Test useGridData hook', () => { - beforeAll(() => { - global.autoRefreshInterval = 5; - }); - - test('data is valid camelcase json', () => { - global.gridData = JSON.stringify(pendingGridData); - - const { result } = renderHook(() => useGridData(), { wrapper: Wrapper }); - const { data } = result.current; - - expect(typeof data === 'object').toBe(true); - expect(data.dagRuns).toBeDefined(); - expect(data.dag_runs).toBeUndefined(); - }); - - test('Can handle no gridData', () => { - global.gridData = null; - - const { result } = renderHook(() => useGridData(), { wrapper: Wrapper }); - const { data } = result.current; - - expect(data.dagRuns).toStrictEqual([]); - expect(data.groups).toStrictEqual({}); - }); - - test('Can handle empty gridData object', () => { - global.gridData = {}; - - const { result } = renderHook(() => useGridData(), { wrapper: Wrapper }); - const { data } = result.current; - - expect(data.dagRuns).toStrictEqual([]); - expect(data.groups).toStrictEqual({}); - }); -}); diff --git a/airflow/www/static/js/grid/api/useTasks.js b/airflow/www/static/js/grid/api/useTasks.js index 9d0f56d7cb59c..a635622e2787b 100644 --- a/airflow/www/static/js/grid/api/useTasks.js +++ b/airflow/www/static/js/grid/api/useTasks.js @@ -23,9 +23,9 @@ import { getMetaValue } from '../../utils'; const tasksUrl = getMetaValue('tasks_api'); -export default function useTasks(dagId) { +export default function useTasks() { return useQuery( - ['tasks', dagId], + 'tasks', () => axios.get(tasksUrl), { placeholderData: { tasks: [] }, diff --git a/airflow/www/static/js/grid/context/autorefresh.jsx b/airflow/www/static/js/grid/context/autorefresh.jsx index ae242e5bf66aa..7fbdc405f93c1 100644 --- a/airflow/www/static/js/grid/context/autorefresh.jsx +++ b/airflow/www/static/js/grid/context/autorefresh.jsx @@ -17,11 +17,10 @@ * under the License. */ -/* global localStorage, gridData, document */ +/* global localStorage, document */ import React, { useContext, useState, useEffect } from 'react'; import { getMetaValue } from '../../utils'; -import { formatData, areActiveRuns } from '../utils/gridData'; const autoRefreshKey = 'disabledAutoRefresh'; @@ -31,17 +30,9 @@ const isRefreshDisabled = JSON.parse(localStorage.getItem(autoRefreshKey)); const AutoRefreshContext = React.createContext(null); export const AutoRefreshProvider = ({ children }) => { - let dagRuns = []; - try { - const data = JSON.parse(gridData); - if (data.dag_runs) dagRuns = formatData(data.dag_runs); - } catch { - dagRuns = []; - } const [isPaused, setIsPaused] = useState(initialIsPaused); - const isActive = areActiveRuns(dagRuns); const isRefreshAllowed = !(isPaused || isRefreshDisabled); - const initialState = isRefreshAllowed && isActive; + const initialState = isRefreshAllowed; const [isRefreshOn, setRefresh] = useState(initialState); @@ -67,8 +58,6 @@ export const AutoRefreshProvider = ({ children }) => { setIsPaused(!e.value); if (!e.value) { stopRefresh(); - } else if (isActive) { - setRefresh(true); } }; diff --git a/airflow/www/static/js/grid/dagRuns/index.test.jsx b/airflow/www/static/js/grid/dagRuns/index.test.jsx index a507df522c565..0420d7aba8792 100644 --- a/airflow/www/static/js/grid/dagRuns/index.test.jsx +++ b/airflow/www/static/js/grid/dagRuns/index.test.jsx @@ -112,15 +112,7 @@ describe('Test DagRuns', () => { }); test('Handles empty data correctly', () => { - global.gridData = null; - const { queryByTestId } = render( - , { wrapper: TableWrapper }, - ); - expect(queryByTestId('run')).toBeNull(); - }); - - test('Handles no data correctly', () => { - global.gridData = {}; + global.autoRefreshInterval = 0; const { queryByTestId } = render( , { wrapper: TableWrapper }, ); diff --git a/airflow/www/static/js/grid/details/content/Dag.jsx b/airflow/www/static/js/grid/details/content/Dag.jsx index 0f434d88fd72f..e527b494cf283 100644 --- a/airflow/www/static/js/grid/details/content/Dag.jsx +++ b/airflow/www/static/js/grid/details/content/Dag.jsx @@ -37,11 +37,10 @@ import { useTasks, useGridData } from '../../api'; import Time from '../../components/Time'; import { SimpleStatus } from '../../components/StatusBox'; -const dagId = getMetaValue('dag_id'); const dagDetailsUrl = getMetaValue('dag_details_url'); const Dag = () => { - const { data: taskData } = useTasks(dagId); + const { data: taskData } = useTasks(); const { data: { dagRuns } } = useGridData(); if (!taskData) return null; const { tasks = [], totalEntries = '' } = taskData; diff --git a/airflow/www/static/js/grid/index.jsx b/airflow/www/static/js/grid/index.jsx index 3d8702841edc6..b8c6db5324d4b 100644 --- a/airflow/www/static/js/grid/index.jsx +++ b/airflow/www/static/js/grid/index.jsx @@ -48,6 +48,7 @@ const queryClient = new QueryClient({ refetchOnWindowFocus: false, retry: 1, retryDelay: 500, + staleTime: 5 * 60 * 1000, // 5 minutes refetchOnMount: true, // Refetches stale queries, not "always" }, mutations: { diff --git a/airflow/www/static/js/grid/utils/gridData.js b/airflow/www/static/js/grid/utils/gridData.js deleted file mode 100644 index 95171652b7a7b..0000000000000 --- a/airflow/www/static/js/grid/utils/gridData.js +++ /dev/null @@ -1,34 +0,0 @@ -/*! - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import camelcaseKeys from 'camelcase-keys'; - -export const areActiveRuns = (runs = []) => runs.filter((run) => ['queued', 'running', 'scheduled'].includes(run.state)).length > 0; - -export const formatData = (data, emptyData) => { - if (!data || !Object.keys(data).length) { - return emptyData; - } - let formattedData = data; - // Convert to json if needed - if (typeof data === 'string') formattedData = JSON.parse(data); - // change from pascal to camelcase - formattedData = camelcaseKeys(formattedData, { deep: true }); - return formattedData; -}; diff --git a/airflow/www/templates/airflow/grid.html b/airflow/www/templates/airflow/grid.html index 46d52f7d27f58..f72e0ed4f1704 100644 --- a/airflow/www/templates/airflow/grid.html +++ b/airflow/www/templates/airflow/grid.html @@ -39,7 +39,6 @@ {% block tail_js %} {{ super()}} - diff --git a/airflow/www/webpack.config.js b/airflow/www/webpack.config.js index bb26d630fef47..9fc9252f30ec0 100644 --- a/airflow/www/webpack.config.js +++ b/airflow/www/webpack.config.js @@ -64,7 +64,6 @@ const config = { flash: `${CSS_DIR}/flash.css`, gantt: [`${CSS_DIR}/gantt.css`, `${JS_DIR}/gantt.js`], graph: [`${CSS_DIR}/graph.css`, `${JS_DIR}/graph.js`], - ie: `${JS_DIR}/ie.js`, loadingDots: `${CSS_DIR}/loading-dots.css`, main: [`${CSS_DIR}/main.css`, `${JS_DIR}/main.js`], materialIcons: `${CSS_DIR}/material-icons.css`, From 894f15c26f769510fbdfec2b93f44753596b66e4 Mon Sep 17 00:00:00 2001 From: Brent Bovenzi Date: Thu, 16 Jun 2022 16:36:21 -0400 Subject: [PATCH 077/118] Fix toast messages (#24505) * update webpack-cli, eslint, stylelint, babel * revert stylelint changes * update more plugins * update to webpack 5 * use portalProps to properly style toast msgs (cherry picked from commit c74946275e3488f3e553e156e8294cddae6b5db7) --- .../static/js/grid/context/containerRef.jsx | 5 ++- airflow/www/static/js/grid/index.jsx | 36 +++++++++++-------- .../www/static/js/grid/utils/useErrorToast.js | 2 +- 3 files changed, 25 insertions(+), 18 deletions(-) diff --git a/airflow/www/static/js/grid/context/containerRef.jsx b/airflow/www/static/js/grid/context/containerRef.jsx index 4ef3352122c49..9062f907ede00 100644 --- a/airflow/www/static/js/grid/context/containerRef.jsx +++ b/airflow/www/static/js/grid/context/containerRef.jsx @@ -18,7 +18,6 @@ */ import React, { useContext, useRef } from 'react'; -import { Box } from '@chakra-ui/react'; const ContainerRefContext = React.createContext(null); @@ -29,9 +28,9 @@ export const ContainerRefProvider = ({ children }) => { return ( - +
{children} - +
); }; diff --git a/airflow/www/static/js/grid/index.jsx b/airflow/www/static/js/grid/index.jsx index 6a7e129a3d662..549c0b7b292d3 100644 --- a/airflow/www/static/js/grid/index.jsx +++ b/airflow/www/static/js/grid/index.jsx @@ -29,7 +29,7 @@ import { QueryClient, QueryClientProvider } from 'react-query'; import Main from './Main'; import theme from './theme'; -import { ContainerRefProvider } from './context/containerRef'; +import { ContainerRefProvider, useContainerRef } from './context/containerRef'; import { TimezoneProvider } from './context/timezone'; import { AutoRefreshProvider } from './context/autorefresh'; @@ -61,23 +61,31 @@ const queryClient = new QueryClient({ }, }); +// Chakra needs to access the containerRef provider so our tooltips pick up the correct styles +const ChakraApp = () => { + const containerRef = useContainerRef(); + return ( + + + + + +
+ + + + + + ); +}; + function App() { return ( - - - - - - -
- - - - - - + + + ); diff --git a/airflow/www/static/js/grid/utils/useErrorToast.js b/airflow/www/static/js/grid/utils/useErrorToast.js index 842eb53810ba4..c4107bc91f487 100644 --- a/airflow/www/static/js/grid/utils/useErrorToast.js +++ b/airflow/www/static/js/grid/utils/useErrorToast.js @@ -35,10 +35,10 @@ const useErrorToast = () => { // Add an error prop and handle it as a description return ({ error, ...rest }) => { toast({ + ...rest, status: 'error', title: getErrorTitle(error), description: getErrorDescription(error).slice(0, 500), - ...rest, }); }; }; From 540f48cd4a7bdff52caad16bfadd35b2c019ecd0 Mon Sep 17 00:00:00 2001 From: Jed Cunningham <66968678+jedcunningham@users.noreply.github.com> Date: Thu, 16 Jun 2022 19:49:40 -0600 Subject: [PATCH 078/118] Fix deprecated log_id_template value (#24506) We had the wrong old default in our deprecated value upgrading logic, so the 2.2.5 default wasn't actually being upgraded. (cherry picked from commit a955ae4e7eef70a5621979152f9be2acb2211801) --- airflow/configuration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airflow/configuration.py b/airflow/configuration.py index 729e780f74a6a..799116415f99e 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -256,7 +256,7 @@ class AirflowConfigParser(ConfigParser): }, 'elasticsearch': { 'log_id_template': ( - re.compile('^' + re.escape('{dag_id}-{task_id}-{run_id}-{try_number}') + '$'), + re.compile('^' + re.escape('{dag_id}-{task_id}-{execution_date}-{try_number}') + '$'), '{dag_id}-{task_id}-{run_id}-{map_index}-{try_number}', '3.0', ) From 023270c4eca9f107c488ccebfb77954789ed8857 Mon Sep 17 00:00:00 2001 From: Jed Cunningham <66968678+jedcunningham@users.noreply.github.com> Date: Thu, 16 Jun 2022 23:41:32 -0600 Subject: [PATCH 079/118] Seed log_template table (#24511) Seed the log_template table with the default values pre 2.3.0 so log retrieval still works post upgrade. This only worked previously if you have the default in your config, now it works even if you don't. (cherry picked from commit c36d94df09930abfb58856e1b8b6e059c7a44525) --- airflow/utils/db.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/airflow/utils/db.py b/airflow/utils/db.py index 46b1968970112..cd7f82fea085f 100644 --- a/airflow/utils/db.py +++ b/airflow/utils/db.py @@ -798,6 +798,21 @@ def log_template_exists(): filename = conf.get("logging", "log_filename_template") elasticsearch_id = conf.get("elasticsearch", "log_id_template") + # First check if we have an empty table. If so, and the default values exist, + # we will seed the table with the values from pre 2.3.0, so old logs will + # still be retrievable. + if not session.query(LogTemplate.id).first(): + is_default_log_id = elasticsearch_id == conf.airflow_defaults.get("elasticsearch", "log_id_template") + is_default_filename = filename == conf.airflow_defaults.get("logging", "log_filename_template") + if is_default_log_id and is_default_filename: + session.add( + LogTemplate( + filename="{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log", + elasticsearch_id="{dag_id}-{task_id}-{execution_date}-{try_number}", + ) + ) + session.flush() + # Before checking if the _current_ value exists, we need to check if the old config value we upgraded in # place exists! pre_upgrade_filename = conf.upgraded_values.get(("logging", "log_filename_template"), filename) From 63ca8870219da31f166a06b729a1fc3d108594bc Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Thu, 23 Jun 2022 02:27:54 +0200 Subject: [PATCH 080/118] Add note about image regeneration in June 2022 (#24524) The note is about image changes after refreshing are now better organized (around date of the change) - this should be more useful by the users who will look why their images have been refreshed. Related to: #24516 (cherry picked from commit 1a62829136660b1c9b8e8a8676f4116f082cf7a2) --- docs/docker-stack/changelog.rst | 35 ++++++++++++++++++++------------- docs/spelling_wordlist.txt | 1 + 2 files changed, 22 insertions(+), 14 deletions(-) diff --git a/docs/docker-stack/changelog.rst b/docs/docker-stack/changelog.rst index c8cd754366787..45e57cd9923a9 100644 --- a/docs/docker-stack/changelog.rst +++ b/docs/docker-stack/changelog.rst @@ -34,14 +34,27 @@ the Airflow team. any Airflow version from the ``Airflow 2`` line. There is no guarantee that it works, but if it does, then you can use latest features from that image to build the previous Airflow versions. -:warning: Some of the images below (as noted in the Changelog) have been regenerated using newer - ``Dockerfiles``. This happens when there is a breaking change that invalidates already released images - and the images need regeneration. This has happened already when MySQL changed the keys they - used to release their packages: `#20911 `_ - and 2.1 images were all regenerated using the 2.2 ``Dockerfile``. This is a rare event and - we do it only when we have no choice because of external factors. In such case, the newer version of - the image **might** contain breaking changes when it comes to running or building the image (but - we try to avoid those). +Changes after publishing the images +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Occasionally our images need to be regenerated using newer ``Dockerfiles`` or constraints. +This happens when an issue is found or a breaking change is released by our dependencies +that invalidates the already released image, and regenerating the image makes it usable again. +While we cannot assure 100% backwards compatibility when it happens, we at least document it +here so that users affected can find the reason for the changes. + ++--------------+---------------------+-----------------------------------------+------------------------+----------------------------------------------+ +| Date | Affected images | Potentially breaking change | Reason | Link to Pull Request | ++==============+=====================+=========================================+========================+==============================================+ +| 17 June 2022 | 2.2.5 | * The ``Authlib`` library downgraded | Flask App Builder | https://github.com/apache/airflow/pull/24516 | +| | | from 1.0.1 to 0.15.5 version | not compatible with | | +| | 2.3.0-2.3.2 | | Authlib >= 1.0.0 | | ++--------------+---------------------+-----------------------------------------+------------------------+----------------------------------------------+ +| 18 Jan 2022 | All 2.2.\*, 2.1.\* | * The AIRFLOW_GID 500 was removed | MySQL changed keys | https://github.com/apache/airflow/pull/20912 | +| | | * MySQL ``apt`` repository key changed. | to sign their packages | | +| | | | on 17 Jan 2022 | | ++--------------+---------------------+-----------------------------------------+------------------------+----------------------------------------------+ + Airflow 2.3 ~~~~~~~~~~~ @@ -67,12 +80,6 @@ Airflow 2.3 Airflow 2.2 ~~~~~~~~~~~ -* MySQL changed the keys to sign their packages on 17 Feb 2022. This caused all released images - to fail when being extended. As result, on 18 Feb 2021 we re-released all - the ``2.2`` and ``2.1`` images with latest versions of ``Dockerfile`` - containing the new signing keys. The - detailed `issue here `_ - * 2.2.4 * Add support for both ``.piprc`` and ``pip.conf`` customizations * Add ArtifactHub labels for better discovery of the images diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 69130e7d058d6..5de2041c3314f 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -25,6 +25,7 @@ Async AsyncResult Atlassian Auth +Authlib AutoMlClient Autoscale Avro From 2320145803b82a42039f90d8761b22a137ee64f3 Mon Sep 17 00:00:00 2001 From: Bas Harenslak Date: Sun, 19 Jun 2022 10:53:15 +0200 Subject: [PATCH 081/118] Add imports to deferring code samples (#24544) (cherry picked from commit cabbf616bbc3139b1ce793c9daa20f13c82eb287) --- docs/apache-airflow/concepts/deferring.rst | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/apache-airflow/concepts/deferring.rst b/docs/apache-airflow/concepts/deferring.rst index ca810d7972089..3b5bf1b72de21 100644 --- a/docs/apache-airflow/concepts/deferring.rst +++ b/docs/apache-airflow/concepts/deferring.rst @@ -82,6 +82,12 @@ You are free to set ``method_name`` to ``execute`` if you want your Operator to Here's a basic example of how a sensor might trigger deferral:: + from datetime import timedelta + + from airflow.sensors.base import BaseSensorOperator + from airflow.triggers.temporal import TimeDeltaTrigger + + class WaitOneHourSensor(BaseSensorOperator): def execute(self, context): self.defer(trigger=TimeDeltaTrigger(timedelta(hours=1)), method_name="execute_complete") @@ -122,6 +128,12 @@ There's also some design constraints to be aware of: Here's the structure of a basic Trigger:: + import asyncio + + from airflow.triggers.base import BaseTrigger, TriggerEvent + from airflow.utils import timezone + + class DateTimeTrigger(BaseTrigger): def __init__(self, moment): From bada25d2c3a46fe58cd095beeac563659937ee4e Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Mon, 20 Jun 2022 14:45:16 +0200 Subject: [PATCH 082/118] Clarify that users should not use Maria DB (#24556) (cherry picked from commit 05c542dfa8eee9b4cdca4e9370f459ce807354b2) --- docs/apache-airflow/howto/set-up-database.rst | 9 ++++ .../installation/prerequisites.rst | 45 ++++++++++++------- 2 files changed, 38 insertions(+), 16 deletions(-) diff --git a/docs/apache-airflow/howto/set-up-database.rst b/docs/apache-airflow/howto/set-up-database.rst index a83948296d12c..9a9b6ababc2a1 100644 --- a/docs/apache-airflow/howto/set-up-database.rst +++ b/docs/apache-airflow/howto/set-up-database.rst @@ -40,6 +40,15 @@ Airflow supports the following database engine versions, so make sure which vers If you plan on running more than one scheduler, you have to meet additional requirements. For details, see :ref:`Scheduler HA Database Requirements `. +.. warning:: + + Despite big similarities between MariaDB and MySQL, we DO NOT support MariaDB as a backend for Airflow. + There are known problems (for example index handling) between MariaDB and MySQL and we do not test + our migration scripts nor application execution on Maria DB. We know there were people who used + MariaDB for Airflow and that cause a lot of operational headache for them so we strongly discourage + attempts of using MariaDB as a backend and users cannot expect any community support for it + because the number of users who tried to use MariaDB for Airflow is very small. + Database URI ------------ diff --git a/docs/apache-airflow/installation/prerequisites.rst b/docs/apache-airflow/installation/prerequisites.rst index c41da8b4514bd..f4bf65d208ae4 100644 --- a/docs/apache-airflow/installation/prerequisites.rst +++ b/docs/apache-airflow/installation/prerequisites.rst @@ -18,7 +18,7 @@ Prerequisites ------------- -Airflow is tested with: +Starting with Airflow 2.3.0, Airflow is tested with:. * Python: 3.7, 3.8, 3.9, 3.10 @@ -31,22 +31,35 @@ Airflow is tested with: * Kubernetes: 1.20.2, 1.21.1, 1.22.0, 1.23.0, 1.24.0 -**Note:** MySQL 5.x versions are unable to or have limitations with -running multiple schedulers -- please see: :doc:`/concepts/scheduler`. MariaDB is not tested/recommended. +The minimum memory required we recommend Airflow to run with is 4GB, but the actual requirements depends +wildly on the deployment options you have -**Note:** SQLite is used in Airflow tests. Do not use it in production. We recommend -using the latest stable version of SQLite for local development. +.. warning:: -Starting with Airflow 2.3.0, Airflow is tested with Python 3.7, 3.8, 3.9 and 3.10. + Despite big similarities between MariaDB and MySQL, we DO NOT support MariaDB as a backend for Airflow. + There are known problems (for example index handling) between MariaDB and MySQL and we do not test + our migration scripts nor application execution on Maria DB. We know there were people who used + MariaDB for Airflow and that cause a lot of operational headache for them so we strongly discourage + attempts of using MariaDB as a backend and users cannot expect any community support for it + because the number of users who tried to use MariaDB for Airflow is very small. -The minimum memory required we recommend Airflow to run with is 4GB, but the actual requirements depends -wildly on the deployment options you have +.. warning:: + + MySQL 5.x versions are unable to or have limitations with + running multiple schedulers -- please see: :doc:`/concepts/scheduler`. + +.. warning:: + SQLite is used in Airflow tests. Do not use it in production. We recommend + using the latest stable version of SQLite for local development. + + +.. warning:: -**Note**: Airflow currently can be run on POSIX-compliant Operating Systems. For development it is regularly -tested on fairly modern Linux Distros and recent versions of MacOS. -On Windows you can run it via WSL2 (Windows Subsystem for Linux 2) or via Linux Containers. -The work to add Windows support is tracked via `#10388 `__ but -it is not a high priority. You should only use Linux-based distros as "Production" execution environment -as this is the only environment that is supported. The only distro that is used in our CI tests and that -is used in the `Community managed DockerHub image `__ is -``Debian Bullseye``. + Airflow currently can be run on POSIX-compliant Operating Systems. For development it is regularly + tested on fairly modern Linux Distros and recent versions of MacOS. + On Windows you can run it via WSL2 (Windows Subsystem for Linux 2) or via Linux Containers. + The work to add Windows support is tracked via `#10388 `__ + but it is not a high priority. You should only use Linux-based distros as "Production" execution environment + as this is the only environment that is supported. The only distro that is used in our CI tests and that + is used in the `Community managed DockerHub image `__ is + ``Debian Bullseye``. From 628815e54c702af2d5e04e0d7bd0257e857561be Mon Sep 17 00:00:00 2001 From: Sergey Lyapustin Date: Wed, 22 Jun 2022 13:06:57 +0200 Subject: [PATCH 083/118] Patch sql_alchemy_conn if old Postgres schemes used (#24569) (cherry picked from commit 0527a0b6ce506434a23bc2a6f5ddb11f492fc614) --- airflow/configuration.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/airflow/configuration.py b/airflow/configuration.py index 799116415f99e..4f2caa186e2f7 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -361,21 +361,24 @@ def _upgrade_auth_backends(self): ) def _upgrade_postgres_metastore_conn(self): - """As of sqlalchemy 1.4, scheme `postgres+psycopg2` must be replaced with `postgresql`""" + """ + As of SQLAlchemy 1.4, schemes `postgres+psycopg2` and `postgres` + must be replaced with `postgresql`. + """ section, key = 'database', 'sql_alchemy_conn' old_value = self.get(section, key) - bad_scheme = 'postgres+psycopg2' + bad_schemes = ['postgres+psycopg2', 'postgres'] good_scheme = 'postgresql' parsed = urlparse(old_value) - if parsed.scheme == bad_scheme: + if parsed.scheme in bad_schemes: warnings.warn( - f"Bad scheme in Airflow configuration core > sql_alchemy_conn: `{bad_scheme}`. " - "As of SqlAlchemy 1.4 (adopted in Airflow 2.3) this is no longer supported. You must " + f"Bad scheme in Airflow configuration core > sql_alchemy_conn: `{parsed.scheme}`. " + "As of SQLAlchemy 1.4 (adopted in Airflow 2.3) this is no longer supported. You must " f"change to `{good_scheme}` before the next Airflow release.", FutureWarning, ) self.upgraded_values[(section, key)] = old_value - new_value = re.sub('^' + re.escape(f"{bad_scheme}://"), f"{good_scheme}://", old_value) + new_value = re.sub('^' + re.escape(f"{parsed.scheme}://"), f"{good_scheme}://", old_value) self._update_env_var(section=section, name=key, new_value=new_value) # if the old value is set via env var, we need to wipe it From 79fe04558e79638acb5a5b79ce5b6011e48720ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1=C5=A1=20Drtina?= Date: Wed, 29 Jun 2022 03:55:41 +0200 Subject: [PATCH 084/118] Fix migration 0080_2_0_2 - Replace null values before setting column not null (#24585) (cherry picked from commit 9f58e823329d525c0e2b3950ada7e0e047ee7cfd) --- .../versions/0080_2_0_2_change_default_pool_slots_to_1.py | 1 + 1 file changed, 1 insertion(+) diff --git a/airflow/migrations/versions/0080_2_0_2_change_default_pool_slots_to_1.py b/airflow/migrations/versions/0080_2_0_2_change_default_pool_slots_to_1.py index f5ae34c2977c9..ef819468efddc 100644 --- a/airflow/migrations/versions/0080_2_0_2_change_default_pool_slots_to_1.py +++ b/airflow/migrations/versions/0080_2_0_2_change_default_pool_slots_to_1.py @@ -37,6 +37,7 @@ def upgrade(): """Change default ``pool_slots`` to ``1`` and make pool_slots not nullable""" + op.execute("UPDATE task_instance SET pool_slots = 1 WHERE pool_slots IS NULL") with op.batch_alter_table("task_instance", schema=None) as batch_op: batch_op.alter_column("pool_slots", existing_type=sa.Integer, nullable=False, server_default='1') From 19a393103545aab4050865e3d0232c366f73018e Mon Sep 17 00:00:00 2001 From: fritz-astronomer <80706212+fritz-astronomer@users.noreply.github.com> Date: Tue, 14 Jun 2022 16:15:49 -0400 Subject: [PATCH 085/118] `TI.log_url` fix for `map_index` (#24335) (cherry picked from commit 48a6155bb1478245c1dd8b6401e4cce00e129422) --- airflow/models/taskinstance.py | 8 +++++++- tests/models/test_taskinstance.py | 13 +------------ 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index 5cd582ce3e34d..95ddb2dacdc25 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -799,7 +799,13 @@ def log_url(self): """Log URL for TaskInstance""" iso = quote(self.execution_date.isoformat()) base_url = conf.get('webserver', 'BASE_URL') - return base_url + f"/log?execution_date={iso}&task_id={self.task_id}&dag_id={self.dag_id}" + return ( + f"{base_url}/log" + f"?execution_date={iso}" + f"&task_id={self.task_id}" + f"&dag_id={self.dag_id}" + f"&map_index={self.map_index}" + ) @property def mark_success_url(self): diff --git a/tests/models/test_taskinstance.py b/tests/models/test_taskinstance.py index 32a5b85ae9a61..b8e753456158c 100644 --- a/tests/models/test_taskinstance.py +++ b/tests/models/test_taskinstance.py @@ -1289,18 +1289,6 @@ def test_get_num_running_task_instances(self, create_task_instance): assert 1 == ti2.get_num_running_task_instances(session=session) assert 1 == ti3.get_num_running_task_instances(session=session) - # def test_log_url(self): - # now = pendulum.now('Europe/Brussels') - # dag = DAG('dag', start_date=DEFAULT_DATE) - # task = EmptyOperator(task_id='op', dag=dag) - # ti = TI(task=task, execution_date=now) - # d = urllib.parse.parse_qs( - # urllib.parse.urlparse(ti.log_url).query, - # keep_blank_values=True, strict_parsing=True) - # self.assertEqual(d['dag_id'][0], 'dag') - # self.assertEqual(d['task_id'][0], 'op') - # self.assertEqual(pendulum.parse(d['execution_date'][0]), now) - def test_log_url(self, create_task_instance): ti = create_task_instance(dag_id='dag', task_id='op', execution_date=timezone.datetime(2018, 1, 1)) @@ -1309,6 +1297,7 @@ def test_log_url(self, create_task_instance): 'execution_date=2018-01-01T00%3A00%3A00%2B00%3A00' '&task_id=op' '&dag_id=dag' + '&map_index=-1' ) assert ti.log_url == expected_url From b1be02473b2ad04dde8d1268a47f18a22eb89faa Mon Sep 17 00:00:00 2001 From: Daniel Standish <15932138+dstandish@users.noreply.github.com> Date: Tue, 14 Jun 2022 21:30:53 -0700 Subject: [PATCH 086/118] Don't crash scheduler if exec config has old k8s objects (#24117) From time to time k8s library objects change their attrs. If executor config is stored with old version, and unpickled with new version, we can get attribute errors that can crash the scheduler (see https://github.com/apache/airflow/issues/23727). Here we update handling so that we fail the task but don't crash the scheduler. (cherry picked from commit 0c41f437674f135fe7232a368bf9c198b0ecd2f0) --- airflow/exceptions.py | 4 +++ airflow/executors/kubernetes_executor.py | 12 +++++-- airflow/kubernetes/pod_generator.py | 7 ++-- airflow/models/taskinstance.py | 16 ++++++++- tests/executors/test_kubernetes_executor.py | 39 +++++++++++++++++++++ tests/kubernetes/test_pod_generator.py | 30 +++++++++++++++- tests/models/test_taskinstance.py | 26 +++++++++++++- 7 files changed, 127 insertions(+), 7 deletions(-) diff --git a/airflow/exceptions.py b/airflow/exceptions.py index 6a8eed35a321e..f1a8c1cb66473 100644 --- a/airflow/exceptions.py +++ b/airflow/exceptions.py @@ -327,3 +327,7 @@ def __repr__(self) -> str: class TaskDeferralError(AirflowException): """Raised when a task failed during deferral for some reason.""" + + +class PodReconciliationError(AirflowException): + """Raised when an error is encountered while trying to merge pod configs.""" diff --git a/airflow/executors/kubernetes_executor.py b/airflow/executors/kubernetes_executor.py index c76cf58f418d4..e510da2b314d6 100644 --- a/airflow/executors/kubernetes_executor.py +++ b/airflow/executors/kubernetes_executor.py @@ -35,7 +35,7 @@ from kubernetes.client.rest import ApiException from urllib3.exceptions import ReadTimeoutError -from airflow.exceptions import AirflowException +from airflow.exceptions import AirflowException, PodReconciliationError from airflow.executors.base_executor import NOT_STARTED_MESSAGE, BaseExecutor, CommandType from airflow.kubernetes import pod_generator from airflow.kubernetes.kube_client import get_kube_client @@ -300,8 +300,9 @@ def run_next(self, next_job: KubernetesJobType) -> None: and store relevant info in the current_jobs map so we can track the job's status """ - self.log.info('Kubernetes job is %s', str(next_job).replace("\n", " ")) key, command, kube_executor_config, pod_template_file = next_job + self.log.info('Kubernetes job is %s', key) + dag_id, task_id, run_id, try_number, map_index = key if command[0:3] != ["airflow", "tasks", "run"]: @@ -617,6 +618,13 @@ def sync(self) -> None: task = self.task_queue.get_nowait() try: self.kube_scheduler.run_next(task) + except PodReconciliationError as e: + self.log.error( + "Pod reconciliation failed, likely due to kubernetes library upgrade. " + "Try clearing the task to re-run.", + exc_info=True, + ) + self.fail(task[0], e) except ApiException as e: # These codes indicate something is wrong with pod definition; otherwise we assume pod diff --git a/airflow/kubernetes/pod_generator.py b/airflow/kubernetes/pod_generator.py index 52b45801ccabc..8a86919a65b75 100644 --- a/airflow/kubernetes/pod_generator.py +++ b/airflow/kubernetes/pod_generator.py @@ -34,7 +34,7 @@ from kubernetes.client import models as k8s from kubernetes.client.api_client import ApiClient -from airflow.exceptions import AirflowConfigException +from airflow.exceptions import AirflowConfigException, PodReconciliationError from airflow.kubernetes.pod_generator_deprecated import PodDefaults, PodGenerator as PodGeneratorDeprecated from airflow.utils import yaml from airflow.version import version as airflow_version @@ -389,7 +389,10 @@ def construct_pod( # Pod from the pod_template_File -> Pod from executor_config arg -> Pod from the K8s executor pod_list = [base_worker_pod, pod_override_object, dynamic_pod] - return reduce(PodGenerator.reconcile_pods, pod_list) + try: + return reduce(PodGenerator.reconcile_pods, pod_list) + except Exception as e: + raise PodReconciliationError from e @staticmethod def serialize_pod(pod: k8s.V1Pod) -> dict: diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index 95ddb2dacdc25..fe3387ecf0025 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -428,6 +428,20 @@ def key(self) -> "TaskInstanceKey": return self +def _executor_config_comparator(x, y): + """ + The TaskInstance.executor_config attribute is a pickled object that may contain + kubernetes objects. If the installed library version has changed since the + object was originally pickled, due to the underlying ``__eq__`` method on these + objects (which converts them to JSON), we may encounter attribute errors. In this + case we should replace the stored object. + """ + try: + return x == y + except AttributeError: + return False + + class TaskInstance(Base, LoggingMixin): """ Task instances store the state of a task instance. This table is the @@ -470,7 +484,7 @@ class TaskInstance(Base, LoggingMixin): queued_dttm = Column(UtcDateTime) queued_by_job_id = Column(Integer) pid = Column(Integer) - executor_config = Column(PickleType(pickler=dill)) + executor_config = Column(PickleType(pickler=dill, comparator=_executor_config_comparator)) external_executor_id = Column(String(ID_LEN, **COLLATION_ARGS)) diff --git a/tests/executors/test_kubernetes_executor.py b/tests/executors/test_kubernetes_executor.py index 954f4f06001db..8ffeb5624b8cc 100644 --- a/tests/executors/test_kubernetes_executor.py +++ b/tests/executors/test_kubernetes_executor.py @@ -29,6 +29,7 @@ from urllib3 import HTTPResponse from airflow import AirflowException +from airflow.exceptions import PodReconciliationError from airflow.models.taskinstance import TaskInstanceKey from airflow.operators.bash import BashOperator from airflow.utils import timezone @@ -272,6 +273,44 @@ def test_run_next_exception_requeue( assert kubernetes_executor.task_queue.empty() assert kubernetes_executor.event_buffer[task_instance_key][0] == State.FAILED + @pytest.mark.skipif( + AirflowKubernetesScheduler is None, reason='kubernetes python package is not installed' + ) + @mock.patch('airflow.executors.kubernetes_executor.KubernetesJobWatcher') + @mock.patch('airflow.executors.kubernetes_executor.get_kube_client') + def test_run_next_pod_reconciliation_error(self, mock_get_kube_client, mock_kubernetes_job_watcher): + """ + When construct_pod raises PodReconciliationError, we should fail the task. + """ + import sys + + path = sys.path[0] + '/tests/kubernetes/pod_generator_base_with_secrets.yaml' + + mock_kube_client = mock.patch('kubernetes.client.CoreV1Api', autospec=True) + fail_msg = 'test message' + mock_kube_client.create_namespaced_pod = mock.MagicMock(side_effect=PodReconciliationError(fail_msg)) + mock_get_kube_client.return_value = mock_kube_client + mock_api_client = mock.MagicMock() + mock_api_client.sanitize_for_serialization.return_value = {} + mock_kube_client.api_client = mock_api_client + config = {('kubernetes', 'pod_template_file'): path} + with conf_vars(config): + kubernetes_executor = self.kubernetes_executor + kubernetes_executor.start() + # Execute a task while the Api Throws errors + try_number = 1 + task_instance_key = TaskInstanceKey('dag', 'task', 'run_id', try_number) + kubernetes_executor.execute_async( + key=task_instance_key, + queue=None, + command=['airflow', 'tasks', 'run', 'true', 'some_parameter'], + ) + kubernetes_executor.sync() + + assert kubernetes_executor.task_queue.empty() + assert kubernetes_executor.event_buffer[task_instance_key][0] == State.FAILED + assert kubernetes_executor.event_buffer[task_instance_key][1].args[0] == fail_msg + @mock.patch('airflow.executors.kubernetes_executor.KubeConfig') @mock.patch('airflow.executors.kubernetes_executor.KubernetesExecutor.sync') @mock.patch('airflow.executors.base_executor.BaseExecutor.trigger_tasks') diff --git a/tests/kubernetes/test_pod_generator.py b/tests/kubernetes/test_pod_generator.py index d220872187cb1..df5efb0d06c9f 100644 --- a/tests/kubernetes/test_pod_generator.py +++ b/tests/kubernetes/test_pod_generator.py @@ -19,6 +19,7 @@ import sys import uuid from unittest import mock +from unittest.mock import MagicMock import pytest from dateutil import parser @@ -26,7 +27,7 @@ from parameterized import parameterized from airflow import __version__ -from airflow.exceptions import AirflowConfigException +from airflow.exceptions import AirflowConfigException, PodReconciliationError from airflow.kubernetes.pod_generator import ( PodDefaults, PodGenerator, @@ -520,6 +521,33 @@ def test_construct_pod_empty_executor_config(self, mock_uuid): worker_config_result = self.k8s_client.sanitize_for_serialization(worker_config) assert worker_config_result == sanitized_result + @mock.patch('uuid.uuid4') + def test_construct_pod_attribute_error(self, mock_uuid): + """ + After upgrading k8s library we might get attribute error. + In this case it should raise PodReconciliationError + """ + path = sys.path[0] + '/tests/kubernetes/pod_generator_base_with_secrets.yaml' + worker_config = PodGenerator.deserialize_model_file(path) + mock_uuid.return_value = self.static_uuid + executor_config = MagicMock() + executor_config.side_effect = AttributeError('error') + + with pytest.raises(PodReconciliationError): + PodGenerator.construct_pod( + dag_id='dag_id', + task_id='task_id', + pod_id='pod_id', + kube_image='test-image', + try_number=3, + date=self.execution_date, + args=['command'], + pod_override_object=executor_config, + base_worker_pod=worker_config, + namespace='namespace', + scheduler_job_id='uuid', + ) + @mock.patch('uuid.uuid4') def test_ensure_max_label_length(self, mock_uuid): mock_uuid.return_value = self.static_uuid diff --git a/tests/models/test_taskinstance.py b/tests/models/test_taskinstance.py index b8e753456158c..3990c3cbf5522 100644 --- a/tests/models/test_taskinstance.py +++ b/tests/models/test_taskinstance.py @@ -57,7 +57,12 @@ XCom, ) from airflow.models.taskfail import TaskFail -from airflow.models.taskinstance import TaskInstance, load_error_file, set_error_file +from airflow.models.taskinstance import ( + TaskInstance, + _executor_config_comparator, + load_error_file, + set_error_file, +) from airflow.models.taskmap import TaskMap from airflow.models.xcom import XCOM_RETURN_KEY from airflow.operators.bash import BashOperator @@ -2868,3 +2873,22 @@ def get_extra_env(): echo_task = dag.get_task("echo") assert "get_extra_env" in echo_task.upstream_task_ids + + +def test_executor_config_comparator(): + """ + When comparison raises AttributeError, return False. + This can happen when executor config contains kubernetes objects pickled + under older kubernetes library version. + """ + + class MockAttrError: + def __eq__(self, other): + raise AttributeError('hello') + + a = MockAttrError() + with pytest.raises(AttributeError): + # just verify for ourselves that this throws + assert a == a + assert _executor_config_comparator(a, a) is False + assert _executor_config_comparator('a', 'a') is True From bea9e656bb3dda8ac7f1ad31b198b7e4e00a3b79 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 7 Jun 2022 08:13:55 +0800 Subject: [PATCH 087/118] Apply per-run log templates to log handlers (#24153) (cherry picked from commit c23826915dcdca4f22b52b74633336cb2f4a1eca) --- .../airflow_local_settings.py | 1 - airflow/config_templates/default_test.cfg | 1 - airflow/models/dagrun.py | 15 +++- .../alibaba/cloud/log/oss_task_handler.py | 2 +- .../amazon/aws/log/cloudwatch_task_handler.py | 3 +- .../amazon/aws/log/s3_task_handler.py | 3 +- .../elasticsearch/log/es_task_handler.py | 33 +++++++-- .../google/cloud/log/gcs_task_handler.py | 2 +- .../microsoft/azure/log/wasb_task_handler.py | 3 +- airflow/utils/log/file_task_handler.py | 26 ++++--- airflow/utils/log/log_reader.py | 2 +- .../endpoints/test_log_endpoint.py | 8 +-- tests/conftest.py | 39 +++++++++++ .../cloud/log/test_oss_task_handler.py | 5 +- .../aws/log/test_cloudwatch_task_handler.py | 37 ++++++---- .../amazon/aws/log/test_s3_task_handler.py | 43 ++++++++---- .../elasticsearch/log/test_es_task_handler.py | 68 ++++++++----------- .../google/cloud/log/test_gcs_task_handler.py | 8 +-- .../azure/log/test_wasb_task_handler.py | 41 +++++------ tests/task/task_runner/test_task_runner.py | 1 + tests/utils/log/test_log_reader.py | 12 +++- tests/utils/test_log_handlers.py | 47 ++++++------- .../task_for_testing_log_view/1.log | 1 + .../attempt=1.log | 1 + tests/www/views/test_views_log.py | 3 - 25 files changed, 250 insertions(+), 155 deletions(-) create mode 100644 tests/www/test_logs/dag_for_testing_log_view/scheduled__2017-09-01T00:00:00+00:00/task_for_testing_log_view/1.log create mode 100644 tests/www/test_logs/dag_id=dag_for_testing_log_view/run_id=scheduled__2017-09-01T00:00:00+00:00/task_id=task_for_testing_log_view/attempt=1.log diff --git a/airflow/config_templates/airflow_local_settings.py b/airflow/config_templates/airflow_local_settings.py index b2752c2be7c25..6684fd18e51a0 100644 --- a/airflow/config_templates/airflow_local_settings.py +++ b/airflow/config_templates/airflow_local_settings.py @@ -82,7 +82,6 @@ 'class': 'airflow.utils.log.file_task_handler.FileTaskHandler', 'formatter': 'airflow', 'base_log_folder': os.path.expanduser(BASE_LOG_FOLDER), - 'filename_template': FILENAME_TEMPLATE, 'filters': ['mask_secrets'], }, 'processor': { diff --git a/airflow/config_templates/default_test.cfg b/airflow/config_templates/default_test.cfg index 2f9b6fa264b13..83260d0d5250f 100644 --- a/airflow/config_templates/default_test.cfg +++ b/airflow/config_templates/default_test.cfg @@ -54,7 +54,6 @@ base_log_folder = {AIRFLOW_HOME}/logs logging_level = INFO celery_logging_level = WARN fab_logging_level = WARN -log_filename_template = {{{{ ti.dag_id }}}}/{{{{ ti.task_id }}}}/{{{{ ts }}}}/{{{{ try_number }}}}.log log_processor_filename_template = {{{{ filename }}}}.log dag_processor_manager_log_location = {AIRFLOW_HOME}/logs/dag_processor_manager/dag_processor_manager.log worker_log_server_port = 8793 diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index d9c4eeb72637b..ad0dcdfebdbb3 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -1209,14 +1209,23 @@ def schedule_tis(self, schedulable_tis: Iterable[TI], session: Session = NEW_SES return count @provide_session - def get_log_filename_template(self, *, session: Session = NEW_SESSION) -> str: + def get_log_template(self, *, session: Session = NEW_SESSION) -> LogTemplate: if self.log_template_id is None: # DagRun created before LogTemplate introduction. - template = session.query(LogTemplate.filename).order_by(LogTemplate.id).limit(1).scalar() + template = session.query(LogTemplate).order_by(LogTemplate.id).first() else: - template = session.query(LogTemplate.filename).filter_by(id=self.log_template_id).scalar() + template = session.query(LogTemplate).get(self.log_template_id) if template is None: raise AirflowException( f"No log_template entry found for ID {self.log_template_id!r}. " f"Please make sure you set up the metadatabase correctly." ) return template + + @provide_session + def get_log_filename_template(self, *, session: Session = NEW_SESSION) -> str: + warnings.warn( + "This method is deprecated. Please use get_log_template instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.get_log_template(session=session).filename diff --git a/airflow/providers/alibaba/cloud/log/oss_task_handler.py b/airflow/providers/alibaba/cloud/log/oss_task_handler.py index d26bfbfd048dc..ec61972ffcd43 100644 --- a/airflow/providers/alibaba/cloud/log/oss_task_handler.py +++ b/airflow/providers/alibaba/cloud/log/oss_task_handler.py @@ -36,7 +36,7 @@ class OSSTaskHandler(FileTaskHandler, LoggingMixin): uploads to and reads from OSS remote storage. """ - def __init__(self, base_log_folder, oss_log_folder, filename_template): + def __init__(self, base_log_folder, oss_log_folder, filename_template=None): self.log.info("Using oss_task_handler for remote logging...") super().__init__(base_log_folder, filename_template) (self.bucket_name, self.base_folder) = OSSHook.parse_oss_url(oss_log_folder) diff --git a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py index c975a2cb83fc6..7d4f81006b380 100644 --- a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +++ b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py @@ -17,6 +17,7 @@ # under the License. import sys from datetime import datetime +from typing import Optional import watchtower @@ -42,7 +43,7 @@ class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin): :param filename_template: template for file name (local storage) or log stream name (remote) """ - def __init__(self, base_log_folder: str, log_group_arn: str, filename_template: str): + def __init__(self, base_log_folder: str, log_group_arn: str, filename_template: Optional[str] = None): super().__init__(base_log_folder, filename_template) split_arn = log_group_arn.split(':') diff --git a/airflow/providers/amazon/aws/log/s3_task_handler.py b/airflow/providers/amazon/aws/log/s3_task_handler.py index 695c4623d97b2..0abea94c665c0 100644 --- a/airflow/providers/amazon/aws/log/s3_task_handler.py +++ b/airflow/providers/amazon/aws/log/s3_task_handler.py @@ -17,6 +17,7 @@ # under the License. import os import sys +from typing import Optional if sys.version_info >= (3, 8): from functools import cached_property @@ -35,7 +36,7 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin): uploads to and reads from S3 remote storage. """ - def __init__(self, base_log_folder: str, s3_log_folder: str, filename_template: str): + def __init__(self, base_log_folder: str, s3_log_folder: str, filename_template: Optional[str] = None): super().__init__(base_log_folder, filename_template) self.remote_base = s3_log_folder self.log_relative_path = '' diff --git a/airflow/providers/elasticsearch/log/es_task_handler.py b/airflow/providers/elasticsearch/log/es_task_handler.py index 83c1163d80c87..64fce0df53c15 100644 --- a/airflow/providers/elasticsearch/log/es_task_handler.py +++ b/airflow/providers/elasticsearch/log/es_task_handler.py @@ -18,6 +18,7 @@ import logging import sys +import warnings from collections import defaultdict from datetime import datetime from operator import attrgetter @@ -31,15 +32,22 @@ from elasticsearch_dsl import Search from airflow.configuration import conf -from airflow.models import TaskInstance +from airflow.models.dagrun import DagRun +from airflow.models.taskinstance import TaskInstance from airflow.utils import timezone from airflow.utils.log.file_task_handler import FileTaskHandler from airflow.utils.log.json_formatter import JSONFormatter from airflow.utils.log.logging_mixin import ExternalLoggingMixin, LoggingMixin +from airflow.utils.session import create_session # Elasticsearch hosted log type EsLogMsgType = List[Tuple[str, str]] +# Compatibility: Airflow 2.3.3 and up uses this method, which accesses the +# LogTemplate model to record the log ID template used. If this function does +# not exist, the task handler should use the log_id_template attribute instead. +USE_PER_RUN_LOG_ID = hasattr(DagRun, "get_log_template") + class ElasticsearchTaskHandler(FileTaskHandler, ExternalLoggingMixin, LoggingMixin): """ @@ -65,8 +73,6 @@ class ElasticsearchTaskHandler(FileTaskHandler, ExternalLoggingMixin, LoggingMix def __init__( self, base_log_folder: str, - filename_template: str, - log_id_template: str, end_of_log_mark: str, write_stdout: bool, json_format: bool, @@ -76,6 +82,9 @@ def __init__( host: str = "localhost:9200", frontend: str = "localhost:5601", es_kwargs: Optional[dict] = conf.getsection("elasticsearch_configs"), + *, + filename_template: Optional[str] = None, + log_id_template: Optional[str] = None, ): """ :param base_log_folder: base folder to store logs locally @@ -88,7 +97,13 @@ def __init__( self.client = elasticsearch.Elasticsearch([host], **es_kwargs) # type: ignore[attr-defined] - self.log_id_template = log_id_template + if USE_PER_RUN_LOG_ID and log_id_template is not None: + warnings.warn( + "Passing log_id_template to ElasticsearchTaskHandler is deprecated and has no effect", + DeprecationWarning, + ) + + self.log_id_template = log_id_template # Only used on Airflow < 2.3.2. self.frontend = frontend self.mark_end_on_close = True self.end_of_log_mark = end_of_log_mark @@ -103,7 +118,13 @@ def __init__( self.handler: Union[logging.FileHandler, logging.StreamHandler] # type: ignore[assignment] def _render_log_id(self, ti: TaskInstance, try_number: int) -> str: - dag_run = ti.get_dagrun() + with create_session() as session: + dag_run = ti.get_dagrun(session=session) + if USE_PER_RUN_LOG_ID: + log_id_template = dag_run.get_log_template(session=session).elasticsearch_id + else: + log_id_template = self.log_id_template + dag = ti.task.dag assert dag is not None # For Mypy. try: @@ -126,7 +147,7 @@ def _render_log_id(self, ti: TaskInstance, try_number: int) -> str: data_interval_end = "" execution_date = dag_run.execution_date.isoformat() - return self.log_id_template.format( + return log_id_template.format( dag_id=ti.dag_id, task_id=ti.task_id, run_id=getattr(ti, "run_id", ""), diff --git a/airflow/providers/google/cloud/log/gcs_task_handler.py b/airflow/providers/google/cloud/log/gcs_task_handler.py index 92d133d109af5..81f1426d75154 100644 --- a/airflow/providers/google/cloud/log/gcs_task_handler.py +++ b/airflow/providers/google/cloud/log/gcs_task_handler.py @@ -67,7 +67,7 @@ def __init__( *, base_log_folder: str, gcs_log_folder: str, - filename_template: str, + filename_template: Optional[str] = None, gcp_key_path: Optional[str] = None, gcp_keyfile_dict: Optional[dict] = None, gcp_scopes: Optional[Collection[str]] = _DEFAULT_SCOPESS, diff --git a/airflow/providers/microsoft/azure/log/wasb_task_handler.py b/airflow/providers/microsoft/azure/log/wasb_task_handler.py index 9ec0cdf646fc4..f5e89c2c21a68 100644 --- a/airflow/providers/microsoft/azure/log/wasb_task_handler.py +++ b/airflow/providers/microsoft/azure/log/wasb_task_handler.py @@ -44,8 +44,9 @@ def __init__( base_log_folder: str, wasb_log_folder: str, wasb_container: str, - filename_template: str, delete_local_copy: str, + *, + filename_template: Optional[str] = None, ) -> None: super().__init__(base_log_folder, filename_template) self.wasb_container = wasb_container diff --git a/airflow/utils/log/file_task_handler.py b/airflow/utils/log/file_task_handler.py index 321e125288766..2c53529a72dc0 100644 --- a/airflow/utils/log/file_task_handler.py +++ b/airflow/utils/log/file_task_handler.py @@ -18,6 +18,7 @@ """File logging handler for tasks.""" import logging import os +import warnings from datetime import datetime from pathlib import Path from typing import TYPE_CHECKING, Optional, Tuple @@ -27,6 +28,7 @@ from airflow.utils.helpers import parse_template_string, render_template_to_string from airflow.utils.jwt_signer import JWTSigner from airflow.utils.log.non_caching_file_handler import NonCachingFileHandler +from airflow.utils.session import create_session if TYPE_CHECKING: from airflow.models import TaskInstance @@ -43,11 +45,15 @@ class FileTaskHandler(logging.Handler): :param filename_template: template filename string """ - def __init__(self, base_log_folder: str, filename_template: str): + def __init__(self, base_log_folder: str, filename_template: Optional[str] = None): super().__init__() self.handler: Optional[logging.FileHandler] = None self.local_base = base_log_folder - self.filename_template, self.filename_jinja_template = parse_template_string(filename_template) + if filename_template is not None: + warnings.warn( + "Passing filename_template to FileTaskHandler is deprecated and has no effect", + DeprecationWarning, + ) def set_context(self, ti: "TaskInstance"): """ @@ -74,15 +80,19 @@ def close(self): self.handler.close() def _render_filename(self, ti: "TaskInstance", try_number: int) -> str: - if self.filename_jinja_template: + with create_session() as session: + dag_run = ti.get_dagrun(session=session) + template = dag_run.get_log_template(session=session).filename + str_tpl, jinja_tpl = parse_template_string(template) + + if jinja_tpl: if hasattr(ti, "task"): context = ti.get_template_context() else: - context = Context(ti=ti, ts=ti.get_dagrun().logical_date.isoformat()) + context = Context(ti=ti, ts=dag_run.logical_date.isoformat()) context["try_number"] = try_number - return render_template_to_string(self.filename_jinja_template, context) - elif self.filename_template: - dag_run = ti.get_dagrun() + return render_template_to_string(jinja_tpl, context) + elif str_tpl: dag = ti.task.dag assert dag is not None # For Mypy. try: @@ -97,7 +107,7 @@ def _render_filename(self, ti: "TaskInstance", try_number: int) -> str: data_interval_end = data_interval[1].isoformat() else: data_interval_end = "" - return self.filename_template.format( + return str_tpl.format( dag_id=ti.dag_id, task_id=ti.task_id, run_id=ti.run_id, diff --git a/airflow/utils/log/log_reader.py b/airflow/utils/log/log_reader.py index 396ab90a324f5..f241c22df188d 100644 --- a/airflow/utils/log/log_reader.py +++ b/airflow/utils/log/log_reader.py @@ -121,6 +121,6 @@ def render_log_filename( attachment_filename = render_log_filename( ti=ti, try_number="all" if try_number is None else try_number, - filename_template=dagrun.get_log_filename_template(session=session), + filename_template=dagrun.get_log_template(session=session).filename, ) return attachment_filename diff --git a/tests/api_connexion/endpoints/test_log_endpoint.py b/tests/api_connexion/endpoints/test_log_endpoint.py index 614e1fa3a19db..1b226be96f985 100644 --- a/tests/api_connexion/endpoints/test_log_endpoint.py +++ b/tests/api_connexion/endpoints/test_log_endpoint.py @@ -99,7 +99,7 @@ def setup_attrs(self, configured_app, configure_loggers, dag_maker, session) -> self.ti.hostname = 'localhost' @pytest.fixture - def configure_loggers(self, tmp_path): + def configure_loggers(self, tmp_path, create_log_template): self.log_dir = tmp_path dir_path = tmp_path / self.DAG_ID / self.TASK_ID / self.default_time.replace(':', '.') @@ -112,9 +112,9 @@ def configure_loggers(self, tmp_path): logging_config = copy.deepcopy(DEFAULT_LOGGING_CONFIG) logging_config['handlers']['task']['base_log_folder'] = self.log_dir - logging_config['handlers']['task'][ - 'filename_template' - ] = '{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts | replace(":", ".") }}/{{ try_number }}.log' + create_log_template( + '{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts | replace(":", ".") }}/{{ try_number }}.log' + ) logging.config.dictConfig(logging_config) diff --git a/tests/conftest.py b/tests/conftest.py index 68d318e13c50a..b153c213d5f93 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -36,6 +36,9 @@ os.environ["AWS_DEFAULT_REGION"] = os.environ.get("AWS_DEFAULT_REGION") or "us-east-1" os.environ["CREDENTIALS_DIR"] = os.environ.get('CREDENTIALS_DIR') or "/files/airflow-breeze-config/keys" +from airflow import settings # noqa: E402 +from airflow.models.tasklog import LogTemplate # noqa: E402 + from tests.test_utils.perf.perf_kit.sqlalchemy import ( # noqa isort:skip count_queries, trace_queries, @@ -775,3 +778,39 @@ def session(): with create_session() as session: yield session session.rollback() + + +@pytest.fixture() +def get_test_dag(): + def _get(dag_id): + from airflow.models.dagbag import DagBag + from airflow.models.serialized_dag import SerializedDagModel + + dag_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'dags', f'{dag_id}.py') + dagbag = DagBag(dag_folder=dag_file, include_examples=False) + + dag = dagbag.get_dag(dag_id) + dag.sync_to_db() + SerializedDagModel.write_dag(dag) + + return dag + + return _get + + +@pytest.fixture() +def create_log_template(request): + session = settings.Session() + + def _create_log_template(filename_template, elasticsearch_id=""): + log_template = LogTemplate(filename=filename_template, elasticsearch_id=elasticsearch_id) + session.add(log_template) + session.commit() + + def _delete_log_template(): + session.delete(log_template) + session.commit() + + request.addfinalizer(_delete_log_template) + + return _create_log_template diff --git a/tests/providers/alibaba/cloud/log/test_oss_task_handler.py b/tests/providers/alibaba/cloud/log/test_oss_task_handler.py index 24eb73b92e92f..30e8cc32b9b23 100644 --- a/tests/providers/alibaba/cloud/log/test_oss_task_handler.py +++ b/tests/providers/alibaba/cloud/log/test_oss_task_handler.py @@ -35,10 +35,7 @@ class TestOSSTaskHandler(unittest.TestCase): def setUp(self): self.base_log_folder = 'local/airflow/logs/1.log' self.oss_log_folder = f'oss://{MOCK_BUCKET_NAME}/airflow/logs' - self.filename_template = '{try_number}.log' - self.oss_task_handler = OSSTaskHandler( - self.base_log_folder, self.oss_log_folder, self.filename_template - ) + self.oss_task_handler = OSSTaskHandler(self.base_log_folder, self.oss_log_folder) @mock.patch(OSS_TASK_HANDLER_STRING.format('conf.get')) @mock.patch(OSS_TASK_HANDLER_STRING.format('OSSHook')) diff --git a/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py b/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py index dbd2ae28d5ad7..8b23218c8cc39 100644 --- a/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py +++ b/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py @@ -16,17 +16,18 @@ # specific language governing permissions and limitations # under the License. import time -import unittest from datetime import datetime as dt from unittest import mock from unittest.mock import ANY, call +import pytest from watchtower import CloudWatchLogHandler from airflow.models import DAG, DagRun, TaskInstance from airflow.operators.empty import EmptyOperator from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook from airflow.providers.amazon.aws.log.cloudwatch_task_handler import CloudwatchTaskHandler +from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.timezone import datetime from tests.test_utils.config import conf_vars @@ -44,19 +45,24 @@ def get_time_str(time_in_milliseconds): return dt_time.strftime("%Y-%m-%d %H:%M:%S,000") -@unittest.skipIf(mock_logs is None, "Skipping test because moto.mock_logs is not available") -@mock_logs -class TestCloudwatchTaskHandler(unittest.TestCase): +@pytest.fixture(autouse=True, scope="module") +def logmock(): + with mock_logs(): + yield + + +@pytest.mark.skipif(mock_logs is None, reason="Skipping test because moto.mock_logs is not available") +class TestCloudwatchTaskHandler: @conf_vars({('logging', 'remote_log_conn_id'): 'aws_default'}) - def setUp(self): + @pytest.fixture(autouse=True) + def setup(self, create_log_template): self.remote_log_group = 'log_group_name' self.region_name = 'us-west-2' self.local_log_location = 'local/log/location' - self.filename_template = '{dag_id}/{task_id}/{execution_date}/{try_number}.log' + create_log_template('{dag_id}/{task_id}/{execution_date}/{try_number}.log') self.cloudwatch_task_handler = CloudwatchTaskHandler( self.local_log_location, f"arn:aws:logs:{self.region_name}:11111111:log-group:{self.remote_log_group}", - self.filename_template, ) self.cloudwatch_task_handler.hook @@ -65,21 +71,29 @@ def setUp(self): task_id = 'task_for_testing_cloudwatch_log_handler' self.dag = DAG(dag_id=dag_id, start_date=date) task = EmptyOperator(task_id=task_id, dag=self.dag) - dag_run = DagRun(dag_id=self.dag.dag_id, execution_date=date, run_id="test") - self.ti = TaskInstance(task=task) + dag_run = DagRun(dag_id=self.dag.dag_id, execution_date=date, run_id="test", run_type="scheduled") + with create_session() as session: + session.add(dag_run) + session.commit() + session.refresh(dag_run) + + self.ti = TaskInstance(task=task, run_id=dag_run.run_id) self.ti.dag_run = dag_run self.ti.try_number = 1 self.ti.state = State.RUNNING - self.remote_log_stream = f'{dag_id}/{task_id}/{date.isoformat()}/{self.ti.try_number}.log'.replace( + self.remote_log_stream = (f'{dag_id}/{task_id}/{date.isoformat()}/{self.ti.try_number}.log').replace( ':', '_' ) moto.moto_api._internal.models.moto_api_backend.reset() self.conn = boto3.client('logs', region_name=self.region_name) - def tearDown(self): + yield + self.cloudwatch_task_handler.handler = None + with create_session() as session: + session.query(DagRun).delete() def test_hook(self): assert isinstance(self.cloudwatch_task_handler.hook, AwsLogsHook) @@ -89,7 +103,6 @@ def test_hook_raises(self): handler = CloudwatchTaskHandler( self.local_log_location, f"arn:aws:logs:{self.region_name}:11111111:log-group:{self.remote_log_group}", - self.filename_template, ) with mock.patch.object(handler.log, 'error') as mock_error: diff --git a/tests/providers/amazon/aws/log/test_s3_task_handler.py b/tests/providers/amazon/aws/log/test_s3_task_handler.py index a322f167eccc2..d5a5185f758c7 100644 --- a/tests/providers/amazon/aws/log/test_s3_task_handler.py +++ b/tests/providers/amazon/aws/log/test_s3_task_handler.py @@ -17,7 +17,6 @@ # under the License. import os -import unittest from unittest import mock from unittest.mock import ANY @@ -28,6 +27,7 @@ from airflow.operators.empty import EmptyOperator from airflow.providers.amazon.aws.hooks.s3 import S3Hook from airflow.providers.amazon.aws.log.s3_task_handler import S3TaskHandler +from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.timezone import datetime from tests.test_utils.config import conf_vars @@ -40,32 +40,39 @@ mock_s3 = None -@unittest.skipIf(mock_s3 is None, "Skipping test because moto.mock_s3 is not available") -@mock_s3 -class TestS3TaskHandler(unittest.TestCase): +@pytest.fixture(autouse=True, scope="module") +def s3mock(): + with mock_s3(): + yield + + +@pytest.mark.skipif(mock_s3 is None, reason="Skipping test because moto.mock_s3 is not available") +class TestS3TaskHandler: @conf_vars({('logging', 'remote_log_conn_id'): 'aws_default'}) - def setUp(self): - super().setUp() + @pytest.fixture(autouse=True) + def setup(self, create_log_template): self.remote_log_base = 's3://bucket/remote/log/location' self.remote_log_location = 's3://bucket/remote/log/location/1.log' self.remote_log_key = 'remote/log/location/1.log' self.local_log_location = 'local/log/location' - self.filename_template = '{try_number}.log' - self.s3_task_handler = S3TaskHandler( - self.local_log_location, self.remote_log_base, self.filename_template - ) + create_log_template('{try_number}.log') + self.s3_task_handler = S3TaskHandler(self.local_log_location, self.remote_log_base) # Vivfy the hook now with the config override assert self.s3_task_handler.hook is not None date = datetime(2016, 1, 1) self.dag = DAG('dag_for_testing_s3_task_handler', start_date=date) task = EmptyOperator(task_id='task_for_testing_s3_log_handler', dag=self.dag) - dag_run = DagRun(dag_id=self.dag.dag_id, execution_date=date, run_id="test") - self.ti = TaskInstance(task=task) + dag_run = DagRun(dag_id=self.dag.dag_id, execution_date=date, run_id="test", run_type="manual") + with create_session() as session: + session.add(dag_run) + session.commit() + session.refresh(dag_run) + + self.ti = TaskInstance(task=task, run_id=dag_run.run_id) self.ti.dag_run = dag_run self.ti.try_number = 1 self.ti.state = State.RUNNING - self.addCleanup(self.dag.clear) self.conn = boto3.client('s3') # We need to create the bucket since this is all in Moto's 'virtual' @@ -73,7 +80,13 @@ def setUp(self): moto.moto_api._internal.models.moto_api_backend.reset() self.conn.create_bucket(Bucket="bucket") - def tearDown(self): + yield + + self.dag.clear() + + with create_session() as session: + session.query(DagRun).delete() + if self.s3_task_handler.handler: try: os.remove(self.s3_task_handler.handler.baseFilename) @@ -86,7 +99,7 @@ def test_hook(self): @conf_vars({('logging', 'remote_log_conn_id'): 'aws_default'}) def test_hook_raises(self): - handler = S3TaskHandler(self.local_log_location, self.remote_log_base, self.filename_template) + handler = S3TaskHandler(self.local_log_location, self.remote_log_base) with mock.patch.object(handler.log, 'error') as mock_error: with mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook") as mock_hook: mock_hook.side_effect = Exception('Failed to connect') diff --git a/tests/providers/elasticsearch/log/test_es_task_handler.py b/tests/providers/elasticsearch/log/test_es_task_handler.py index 03eab3dbb78a6..e26a78fe77e83 100644 --- a/tests/providers/elasticsearch/log/test_es_task_handler.py +++ b/tests/providers/elasticsearch/log/test_es_task_handler.py @@ -58,9 +58,11 @@ class TestElasticsearchTaskHandler: EXECUTION_DATE = datetime(2016, 1, 1) LOG_ID = f'{DAG_ID}-{TASK_ID}-2016-01-01T00:00:00+00:00-1' JSON_LOG_ID = f'{DAG_ID}-{TASK_ID}-{ElasticsearchTaskHandler._clean_date(EXECUTION_DATE)}-1' + FILENAME_TEMPLATE = '{try_number}.log' @pytest.fixture() - def ti(self, create_task_instance): + def ti(self, create_task_instance, create_log_template): + create_log_template(self.FILENAME_TEMPLATE, '{dag_id}-{task_id}-{execution_date}-{try_number}') yield get_ti( dag_id=self.DAG_ID, task_id=self.TASK_ID, @@ -73,8 +75,6 @@ def ti(self, create_task_instance): @elasticmock def setup(self): self.local_log_location = 'local/log/location' - self.filename_template = '{try_number}.log' - self.log_id_template = '{dag_id}-{task_id}-{execution_date}-{try_number}' self.end_of_log_mark = 'end_of_log\n' self.write_stdout = False self.json_format = False @@ -82,15 +82,13 @@ def setup(self): self.host_field = 'host' self.offset_field = 'offset' self.es_task_handler = ElasticsearchTaskHandler( - self.local_log_location, - self.filename_template, - self.log_id_template, - self.end_of_log_mark, - self.write_stdout, - self.json_format, - self.json_fields, - self.host_field, - self.offset_field, + base_log_folder=self.local_log_location, + end_of_log_mark=self.end_of_log_mark, + write_stdout=self.write_stdout, + json_format=self.json_format, + json_fields=self.json_fields, + host_field=self.host_field, + offset_field=self.offset_field, ) self.es = elasticsearch.Elasticsearch(hosts=[{'host': 'localhost', 'port': 9200}]) @@ -115,15 +113,13 @@ def test_client_with_config(self): assert es_conf == expected_dict # ensure creating with configs does not fail ElasticsearchTaskHandler( - self.local_log_location, - self.filename_template, - self.log_id_template, - self.end_of_log_mark, - self.write_stdout, - self.json_format, - self.json_fields, - self.host_field, - self.offset_field, + base_log_folder=self.local_log_location, + end_of_log_mark=self.end_of_log_mark, + write_stdout=self.write_stdout, + json_format=self.json_format, + json_fields=self.json_fields, + host_field=self.host_field, + offset_field=self.offset_field, es_kwargs=es_conf, ) @@ -395,7 +391,7 @@ def test_close(self, ti): self.es_task_handler.set_context(ti) self.es_task_handler.close() with open( - os.path.join(self.local_log_location, self.filename_template.format(try_number=1)) + os.path.join(self.local_log_location, self.FILENAME_TEMPLATE.format(try_number=1)) ) as log_file: # end_of_log_mark may contain characters like '\n' which is needed to # have the log uploaded but will not be stored in elasticsearch. @@ -409,7 +405,7 @@ def test_close_no_mark_end(self, ti): self.es_task_handler.set_context(ti) self.es_task_handler.close() with open( - os.path.join(self.local_log_location, self.filename_template.format(try_number=1)) + os.path.join(self.local_log_location, self.FILENAME_TEMPLATE.format(try_number=1)) ) as log_file: assert self.end_of_log_mark not in log_file.read() assert self.es_task_handler.closed @@ -419,7 +415,7 @@ def test_close_closed(self, ti): self.es_task_handler.set_context(ti) self.es_task_handler.close() with open( - os.path.join(self.local_log_location, self.filename_template.format(try_number=1)) + os.path.join(self.local_log_location, self.FILENAME_TEMPLATE.format(try_number=1)) ) as log_file: assert 0 == len(log_file.read()) @@ -428,7 +424,7 @@ def test_close_with_no_handler(self, ti): self.es_task_handler.handler = None self.es_task_handler.close() with open( - os.path.join(self.local_log_location, self.filename_template.format(try_number=1)) + os.path.join(self.local_log_location, self.FILENAME_TEMPLATE.format(try_number=1)) ) as log_file: assert 0 == len(log_file.read()) assert self.es_task_handler.closed @@ -438,7 +434,7 @@ def test_close_with_no_stream(self, ti): self.es_task_handler.handler.stream = None self.es_task_handler.close() with open( - os.path.join(self.local_log_location, self.filename_template.format(try_number=1)) + os.path.join(self.local_log_location, self.FILENAME_TEMPLATE.format(try_number=1)) ) as log_file: assert self.end_of_log_mark in log_file.read() assert self.es_task_handler.closed @@ -447,7 +443,7 @@ def test_close_with_no_stream(self, ti): self.es_task_handler.handler.stream.close() self.es_task_handler.close() with open( - os.path.join(self.local_log_location, self.filename_template.format(try_number=1)) + os.path.join(self.local_log_location, self.FILENAME_TEMPLATE.format(try_number=1)) ) as log_file: assert self.end_of_log_mark in log_file.read() assert self.es_task_handler.closed @@ -478,15 +474,13 @@ def test_clean_date(self): ) def test_get_external_log_url(self, ti, json_format, es_frontend, expected_url): es_task_handler = ElasticsearchTaskHandler( - self.local_log_location, - self.filename_template, - self.log_id_template, - self.end_of_log_mark, - self.write_stdout, - json_format, - self.json_fields, - self.host_field, - self.offset_field, + base_log_folder=self.local_log_location, + end_of_log_mark=self.end_of_log_mark, + write_stdout=self.write_stdout, + json_format=json_format, + json_fields=self.json_fields, + host_field=self.host_field, + offset_field=self.offset_field, frontend=es_frontend, ) url = es_task_handler.get_external_log_url(ti, ti.try_number) @@ -508,8 +502,6 @@ def test_dynamic_offset(self, stdout_mock, ti): # arrange handler = ElasticsearchTaskHandler( base_log_folder=self.local_log_location, - filename_template=self.filename_template, - log_id_template=self.log_id_template, end_of_log_mark=self.end_of_log_mark, write_stdout=True, json_format=True, diff --git a/tests/providers/google/cloud/log/test_gcs_task_handler.py b/tests/providers/google/cloud/log/test_gcs_task_handler.py index 6517be8f31245..b443a9f8ec8ef 100644 --- a/tests/providers/google/cloud/log/test_gcs_task_handler.py +++ b/tests/providers/google/cloud/log/test_gcs_task_handler.py @@ -49,13 +49,11 @@ def local_log_location(self): yield td @pytest.fixture(autouse=True) - def gcs_task_handler(self, local_log_location): - self.remote_log_base = "gs://bucket/remote/log/location" - self.filename_template = "{try_number}.log" + def gcs_task_handler(self, create_log_template, local_log_location): + create_log_template("{try_number}.log") self.gcs_task_handler = GCSTaskHandler( base_log_folder=local_log_location, - gcs_log_folder=self.remote_log_base, - filename_template=self.filename_template, + gcs_log_folder="gs://bucket/remote/log/location", ) yield self.gcs_task_handler diff --git a/tests/providers/microsoft/azure/log/test_wasb_task_handler.py b/tests/providers/microsoft/azure/log/test_wasb_task_handler.py index 4fe967671729d..3c92aa78aaa28 100644 --- a/tests/providers/microsoft/azure/log/test_wasb_task_handler.py +++ b/tests/providers/microsoft/azure/log/test_wasb_task_handler.py @@ -22,23 +22,25 @@ from airflow.providers.microsoft.azure.hooks.wasb import WasbHook from airflow.providers.microsoft.azure.log.wasb_task_handler import WasbTaskHandler -from airflow.utils.state import State +from airflow.utils.state import TaskInstanceState from airflow.utils.timezone import datetime from tests.test_utils.config import conf_vars from tests.test_utils.db import clear_db_dags, clear_db_runs +DEFAULT_DATE = datetime(2020, 8, 10) + class TestWasbTaskHandler: @pytest.fixture(autouse=True) - def ti(self, create_task_instance): - date = datetime(2020, 8, 10) + def ti(self, create_task_instance, create_log_template): + create_log_template("{try_number}.log") ti = create_task_instance( dag_id='dag_for_testing_wasb_task_handler', task_id='task_for_testing_wasb_log_handler', - execution_date=date, - start_date=date, - dagrun_state=State.RUNNING, - state=State.RUNNING, + execution_date=DEFAULT_DATE, + start_date=DEFAULT_DATE, + dagrun_state=TaskInstanceState.RUNNING, + state=TaskInstanceState.RUNNING, ) ti.try_number = 1 ti.hostname = 'localhost' @@ -52,12 +54,10 @@ def setup_method(self): self.remote_log_location = 'remote/log/location/1.log' self.local_log_location = 'local/log/location' self.container_name = "wasb-container" - self.filename_template = '{try_number}.log' self.wasb_task_handler = WasbTaskHandler( base_log_folder=self.local_log_location, wasb_log_folder=self.wasb_log_folder, wasb_container=self.container_name, - filename_template=self.filename_template, delete_local_copy=True, ) @@ -68,9 +68,7 @@ def test_hook(self, mock_service): @conf_vars({('logging', 'remote_log_conn_id'): 'wasb_default'}) def test_hook_raises(self): - handler = WasbTaskHandler( - self.local_log_location, self.wasb_log_folder, self.container_name, self.filename_template, True - ) + handler = self.wasb_task_handler with mock.patch.object(handler.log, 'error') as mock_error: with mock.patch("airflow.providers.microsoft.azure.hooks.wasb.WasbHook") as mock_hook: mock_hook.side_effect = AzureHttpError("failed to connect", 404) @@ -120,15 +118,14 @@ def test_wasb_read(self, mock_hook, ti): [{'end_of_log': True}], ) - def test_wasb_read_raises(self): - handler = WasbTaskHandler( - self.local_log_location, self.wasb_log_folder, self.container_name, self.filename_template, True - ) + @mock.patch( + "airflow.providers.microsoft.azure.hooks.wasb.WasbHook", + **{"return_value.read_file.side_effect": AzureHttpError("failed to connect", 404)}, + ) + def test_wasb_read_raises(self, mock_hook): + handler = self.wasb_task_handler with mock.patch.object(handler.log, 'error') as mock_error: - with mock.patch("airflow.providers.microsoft.azure.hooks.wasb.WasbHook") as mock_hook: - mock_hook.return_value.read_file.side_effect = AzureHttpError("failed to connect", 404) - - handler.wasb_read(self.remote_log_location, return_error=True) + handler.wasb_read(self.remote_log_location, return_error=True) mock_error.assert_called_once_with( 'Could not read logs from remote/log/location/1.log', exc_info=True, @@ -164,9 +161,7 @@ def test_write_when_append_is_false(self, mock_hook): ) def test_write_raises(self): - handler = WasbTaskHandler( - self.local_log_location, self.wasb_log_folder, self.container_name, self.filename_template, True - ) + handler = self.wasb_task_handler with mock.patch.object(handler.log, 'error') as mock_error: with mock.patch("airflow.providers.microsoft.azure.hooks.wasb.WasbHook") as mock_hook: mock_hook.return_value.load_string.side_effect = AzureHttpError("failed to connect", 404) diff --git a/tests/task/task_runner/test_task_runner.py b/tests/task/task_runner/test_task_runner.py index ab140e05f598f..fc5f3cc894650 100644 --- a/tests/task/task_runner/test_task_runner.py +++ b/tests/task/task_runner/test_task_runner.py @@ -36,6 +36,7 @@ def test_should_have_valid_imports(self, import_path): def test_should_support_core_task_runner(self, mock_subprocess): ti = mock.MagicMock(map_index=-1, run_as_user=None) ti.get_template_context.return_value = {"ti": ti} + ti.get_dagrun.return_value.get_log_template.return_value.filename = "blah" local_task_job = mock.MagicMock(task_instance=ti) task_runner = get_task_runner(local_task_job) diff --git a/tests/utils/log/test_log_reader.py b/tests/utils/log/test_log_reader.py index 4e6e942741df8..9a76ada725d33 100644 --- a/tests/utils/log/test_log_reader.py +++ b/tests/utils/log/test_log_reader.py @@ -29,6 +29,7 @@ from airflow import settings from airflow.config_templates.airflow_local_settings import DEFAULT_LOGGING_CONFIG from airflow.models import DagRun +from airflow.models.tasklog import LogTemplate from airflow.operators.python import PythonOperator from airflow.timetables.base import DataInterval from airflow.utils import timezone @@ -44,6 +45,7 @@ class TestLogView: DAG_ID = "dag_log_reader" TASK_ID = "task_log_reader" DEFAULT_DATE = timezone.datetime(2017, 9, 1) + FILENAME_TEMPLATE = "{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts | replace(':', '.') }}/{{ try_number }}.log" @pytest.fixture(autouse=True) def log_dir(self): @@ -70,9 +72,7 @@ def settings_folder(self): def configure_loggers(self, log_dir, settings_folder): logging_config = copy.deepcopy(DEFAULT_LOGGING_CONFIG) logging_config["handlers"]["task"]["base_log_folder"] = log_dir - logging_config["handlers"]["task"][ - "filename_template" - ] = "{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts | replace(':', '.') }}/{{ try_number }}.log" + logging_config["handlers"]["task"]["filename_template"] = self.FILENAME_TEMPLATE settings_file = os.path.join(settings_folder, "airflow_local_settings.py") with open(settings_file, "w") as handle: new_logging_file = f"LOGGING_CONFIG = {logging_config}" @@ -93,6 +93,10 @@ def prepare_log_files(self, log_dir): @pytest.fixture(autouse=True) def prepare_db(self, create_task_instance): + session = settings.Session() + log_template = LogTemplate(filename=self.FILENAME_TEMPLATE, elasticsearch_id="") + session.add(log_template) + session.commit() ti = create_task_instance( dag_id=self.DAG_ID, task_id=self.TASK_ID, @@ -107,6 +111,8 @@ def prepare_db(self, create_task_instance): yield clear_db_runs() clear_db_dags() + session.delete(log_template) + session.commit() def test_test_read_log_chunks_should_read_one_try(self): task_log_reader = TaskLogReader() diff --git a/tests/utils/test_log_handlers.py b/tests/utils/test_log_handlers.py index f4b4f7b2e31d7..28b9c7cf1aa22 100644 --- a/tests/utils/test_log_handlers.py +++ b/tests/utils/test_log_handlers.py @@ -21,8 +21,6 @@ import os import re -import pytest - from airflow.config_templates.airflow_local_settings import DEFAULT_LOGGING_CONFIG from airflow.models import DAG, DagRun, TaskInstance from airflow.operators.python import PythonOperator @@ -218,34 +216,37 @@ def task_callable(ti): os.remove(log_filename) -@pytest.fixture() -def filename_rendering_ti(session, create_task_instance): - return create_task_instance( - dag_id='dag_for_testing_filename_rendering', - task_id='task_for_testing_filename_rendering', - run_type=DagRunType.SCHEDULED, - execution_date=DEFAULT_DATE, - session=session, - ) - - class TestFilenameRendering: - def test_python_formatting(self, filename_rendering_ti): - expected_filename = ( - f'dag_for_testing_filename_rendering/task_for_testing_filename_rendering/' - f'{DEFAULT_DATE.isoformat()}/42.log' + def test_python_formatting(self, create_log_template, create_task_instance): + create_log_template("{dag_id}/{task_id}/{execution_date}/{try_number}.log") + filename_rendering_ti = create_task_instance( + dag_id="dag_for_testing_filename_rendering", + task_id="task_for_testing_filename_rendering", + run_type=DagRunType.SCHEDULED, + execution_date=DEFAULT_DATE, ) - fth = FileTaskHandler('', '{dag_id}/{task_id}/{execution_date}/{try_number}.log') + expected_filename = ( + f"dag_for_testing_filename_rendering/task_for_testing_filename_rendering/" + f"{DEFAULT_DATE.isoformat()}/42.log" + ) + fth = FileTaskHandler("") rendered_filename = fth._render_filename(filename_rendering_ti, 42) assert expected_filename == rendered_filename - def test_jinja_rendering(self, filename_rendering_ti): - expected_filename = ( - f'dag_for_testing_filename_rendering/task_for_testing_filename_rendering/' - f'{DEFAULT_DATE.isoformat()}/42.log' + def test_jinja_rendering(self, create_log_template, create_task_instance): + create_log_template("{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log") + filename_rendering_ti = create_task_instance( + dag_id="dag_for_testing_filename_rendering", + task_id="task_for_testing_filename_rendering", + run_type=DagRunType.SCHEDULED, + execution_date=DEFAULT_DATE, ) - fth = FileTaskHandler('', '{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log') + expected_filename = ( + f"dag_for_testing_filename_rendering/task_for_testing_filename_rendering/" + f"{DEFAULT_DATE.isoformat()}/42.log" + ) + fth = FileTaskHandler("") rendered_filename = fth._render_filename(filename_rendering_ti, 42) assert expected_filename == rendered_filename diff --git a/tests/www/test_logs/dag_for_testing_log_view/scheduled__2017-09-01T00:00:00+00:00/task_for_testing_log_view/1.log b/tests/www/test_logs/dag_for_testing_log_view/scheduled__2017-09-01T00:00:00+00:00/task_for_testing_log_view/1.log new file mode 100644 index 0000000000000..bc10ef7880290 --- /dev/null +++ b/tests/www/test_logs/dag_for_testing_log_view/scheduled__2017-09-01T00:00:00+00:00/task_for_testing_log_view/1.log @@ -0,0 +1 @@ +Log for testing. diff --git a/tests/www/test_logs/dag_id=dag_for_testing_log_view/run_id=scheduled__2017-09-01T00:00:00+00:00/task_id=task_for_testing_log_view/attempt=1.log b/tests/www/test_logs/dag_id=dag_for_testing_log_view/run_id=scheduled__2017-09-01T00:00:00+00:00/task_id=task_for_testing_log_view/attempt=1.log new file mode 100644 index 0000000000000..bc10ef7880290 --- /dev/null +++ b/tests/www/test_logs/dag_id=dag_for_testing_log_view/run_id=scheduled__2017-09-01T00:00:00+00:00/task_id=task_for_testing_log_view/attempt=1.log @@ -0,0 +1 @@ +Log for testing. diff --git a/tests/www/views/test_views_log.py b/tests/www/views/test_views_log.py index 988d28593649c..fd136351cf1ad 100644 --- a/tests/www/views/test_views_log.py +++ b/tests/www/views/test_views_log.py @@ -85,9 +85,6 @@ def factory(): logging_config['handlers']['task']['base_log_folder'] = str( pathlib.Path(__file__, "..", "..", "test_logs").resolve(), ) - logging_config['handlers']['task'][ - 'filename_template' - ] = '{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts | replace(":", ".") }}/{{ try_number }}.log' with tempfile.TemporaryDirectory() as settings_dir: local_settings = pathlib.Path(settings_dir, "airflow_local_settings.py") From d54af711c42b62d23b84ce7377da64e90bbb7c4e Mon Sep 17 00:00:00 2001 From: Ash Berlin-Taylor Date: Fri, 10 Jun 2022 15:35:38 +0100 Subject: [PATCH 088/118] Add tests for the grid_data endpoint (#24375) The one fix/change here was to include the JSON content response here so that `resp.json` works in the test. (cherry picked from commit 2b2d97068fa45881672dab6f2134becae246f3f3) --- airflow/www/views.py | 5 +- tests/www/views/test_views_grid.py | 238 +++++++++++++++++++++++++++++ 2 files changed, 242 insertions(+), 1 deletion(-) create mode 100644 tests/www/views/test_views_grid.py diff --git a/airflow/www/views.py b/airflow/www/views.py index fe4217c4525a6..7e1a80da1bc96 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -3544,7 +3544,10 @@ def grid_data(self): 'dag_runs': encoded_runs, } # avoid spaces to reduce payload size - return htmlsafe_json_dumps(data, separators=(',', ':')) + return ( + htmlsafe_json_dumps(data, separators=(',', ':')), + {'Content-Type': 'application/json; charset=utf-8'}, + ) @expose('/robots.txt') @action_logging diff --git a/tests/www/views/test_views_grid.py b/tests/www/views/test_views_grid.py new file mode 100644 index 0000000000000..e5d29be8a2410 --- /dev/null +++ b/tests/www/views/test_views_grid.py @@ -0,0 +1,238 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import freezegun +import pendulum +import pytest + +from airflow.models import DagBag +from airflow.operators.empty import EmptyOperator +from airflow.utils.state import DagRunState, TaskInstanceState +from airflow.utils.task_group import TaskGroup +from airflow.utils.types import DagRunType +from tests.test_utils.mock_operators import MockOperator + +DAG_ID = 'test' +CURRENT_TIME = pendulum.DateTime(2021, 9, 7) + + +@pytest.fixture(autouse=True, scope="module") +def examples_dag_bag(): + # Speed up: We don't want example dags for this module + return DagBag(include_examples=False, read_dags_from_db=True) + + +@pytest.fixture +def dag_without_runs(dag_maker, session, app, monkeypatch): + with monkeypatch.context() as m: + # Remove global operator links for this test + m.setattr('airflow.plugins_manager.global_operator_extra_links', []) + m.setattr('airflow.plugins_manager.operator_extra_links', []) + m.setattr('airflow.plugins_manager.registered_operator_link_classes', {}) + + with dag_maker(dag_id=DAG_ID, serialized=True, session=session): + EmptyOperator(task_id="task1") + with TaskGroup(group_id='group'): + MockOperator.partial(task_id='mapped').expand(arg1=['a', 'b', 'c']) + + m.setattr(app, 'dag_bag', dag_maker.dagbag) + yield dag_maker + + +@pytest.fixture +def dag_with_runs(dag_without_runs): + with freezegun.freeze_time(CURRENT_TIME): + date = dag_without_runs.dag.start_date + run_1 = dag_without_runs.create_dagrun( + run_id='run_1', state=DagRunState.SUCCESS, run_type=DagRunType.SCHEDULED, execution_date=date + ) + run_2 = dag_without_runs.create_dagrun( + run_id='run_2', + run_type=DagRunType.SCHEDULED, + execution_date=dag_without_runs.dag.next_dagrun_info(date).logical_date, + ) + + yield run_1, run_2 + + +def test_no_runs(admin_client, dag_without_runs): + resp = admin_client.get(f'/object/grid_data?dag_id={DAG_ID}', follow_redirects=True) + assert resp.status_code == 200, resp.json + assert resp.json == { + 'dag_runs': [], + 'groups': { + 'children': [ + { + 'extra_links': [], + 'id': 'task1', + 'instances': [], + 'is_mapped': False, + 'label': 'task1', + }, + { + 'children': [ + { + 'extra_links': [], + 'id': 'group.mapped', + 'instances': [], + 'is_mapped': True, + 'label': 'mapped', + } + ], + 'id': 'group', + 'instances': [], + 'label': 'group', + 'tooltip': '', + }, + ], + 'id': None, + 'instances': [], + 'label': None, + 'tooltip': '', + }, + } + + +def test_one_run(admin_client, dag_with_runs, session): + run1, run2 = dag_with_runs + + for ti in run1.task_instances: + ti.state = TaskInstanceState.SUCCESS + + session.flush() + + resp = admin_client.get(f'/object/grid_data?dag_id={DAG_ID}', follow_redirects=True) + assert resp.status_code == 200, resp.json + assert resp.json == { + 'dag_runs': [ + { + 'data_interval_end': '2016-01-02T00:00:00+00:00', + 'data_interval_start': '2016-01-01T00:00:00+00:00', + 'end_date': '2021-09-07T00:00:00+00:00', + 'execution_date': '2016-01-01T00:00:00+00:00', + 'last_scheduling_decision': None, + 'run_id': 'run_1', + 'run_type': 'scheduled', + 'start_date': '2016-01-01T00:00:00+00:00', + 'state': 'success', + }, + { + 'data_interval_end': '2016-01-03T00:00:00+00:00', + 'data_interval_start': '2016-01-02T00:00:00+00:00', + 'end_date': None, + 'execution_date': '2016-01-02T00:00:00+00:00', + 'last_scheduling_decision': None, + 'run_id': 'run_2', + 'run_type': 'scheduled', + 'start_date': '2016-01-01T00:00:00+00:00', + 'state': 'running', + }, + ], + 'groups': { + 'children': [ + { + 'extra_links': [], + 'id': 'task1', + 'instances': [ + { + 'end_date': None, + 'map_index': -1, + 'run_id': 'run_1', + 'start_date': None, + 'state': 'success', + 'task_id': 'task1', + 'try_number': 1, + }, + { + 'end_date': None, + 'map_index': -1, + 'run_id': 'run_2', + 'start_date': None, + 'state': None, + 'task_id': 'task1', + 'try_number': 1, + }, + ], + 'is_mapped': False, + 'label': 'task1', + }, + { + 'children': [ + { + 'extra_links': [], + 'id': 'group.mapped', + 'instances': [ + { + 'end_date': None, + 'mapped_states': ['success', 'success', 'success'], + 'run_id': 'run_1', + 'start_date': None, + 'state': 'success', + 'task_id': 'group.mapped', + 'try_number': 1, + }, + { + 'end_date': None, + 'mapped_states': [None, None, None], + 'run_id': 'run_2', + 'start_date': None, + 'state': None, + 'task_id': 'group.mapped', + 'try_number': 1, + }, + ], + 'is_mapped': True, + 'label': 'mapped', + }, + ], + 'id': 'group', + 'instances': [ + { + 'end_date': None, + 'run_id': 'run_1', + 'start_date': None, + 'state': 'success', + 'task_id': 'group', + }, + { + 'end_date': None, + 'run_id': 'run_2', + 'start_date': None, + 'state': None, + 'task_id': 'group', + }, + ], + 'label': 'group', + 'tooltip': '', + }, + ], + 'id': None, + 'instances': [ + { + 'end_date': None, + 'run_id': 'run_1', + 'start_date': None, + 'state': 'success', + 'task_id': None, + }, + {'end_date': None, 'run_id': 'run_2', 'start_date': None, 'state': None, 'task_id': None}, + ], + 'label': None, + 'tooltip': '', + }, + } From 627488f67f3fedc2ea4a8abc7efe82d2dfc34e9a Mon Sep 17 00:00:00 2001 From: Ash Berlin-Taylor Date: Wed, 15 Jun 2022 13:02:23 +0100 Subject: [PATCH 089/118] Speed up grid_data endpoint by 10x (#24284) * Speed up grid_data endpoint by 10x These changes make the endpoint go from almost 20s down to 1.5s and the changes are two fold: 1. Keep datetimes as objects for as long as possible Previously we were converting start/end dates for a task group to a string, and then in the parent parsing it back to a datetime to find the min and max of all the child nodes. The fix for that was to leave it as a datetime (or a pendulum.DateTime technically) and use the existing `AirflowJsonEncoder` class to "correctly" encode these objects on output. 2. Reduce the number of DB queries from 1 per task to 1. The removed `get_task_summaries` function was called for each task, and was making a query to the database to find info for the given DagRuns. The helper function now makes just a single DB query for all tasks/runs and constructs a dict to efficiently look up the ti by run_id. * Add support for mapped tasks in the grid data * Don't fail when not all tasks have a finish date. Note that this possibly has incorrect behaviour, in that the end_date of a TaskGroup is set to the max of all the children's end dates, even if some are still running. (This is the existing behaviour and is not changed or altered by this change - limiting it to just performance fixes) (cherry picked from commit 451a6f4d9ff8b744075e2f25099046c77f28179e) --- airflow/utils/json.py | 6 +- .../js/grid/components/InstanceTooltip.jsx | 14 +- .../grid/components/InstanceTooltip.test.jsx | 2 +- .../details/content/taskInstance/Details.jsx | 14 +- airflow/www/utils.py | 49 ----- airflow/www/views.py | 186 +++++++++++++----- tests/utils/test_json.py | 7 +- tests/www/views/test_views_grid.py | 77 ++++++-- 8 files changed, 223 insertions(+), 132 deletions(-) diff --git a/airflow/utils/json.py b/airflow/utils/json.py index 326a2f1b4379d..99ab607c03200 100644 --- a/airflow/utils/json.py +++ b/airflow/utils/json.py @@ -22,6 +22,8 @@ from flask.json import JSONEncoder +from airflow.utils.timezone import convert_to_utc, is_naive + try: import numpy as np except ImportError: @@ -48,7 +50,9 @@ def __init__(self, *args, **kwargs): def _default(obj): """Convert dates and numpy objects in a json serializable format.""" if isinstance(obj, datetime): - return obj.strftime('%Y-%m-%dT%H:%M:%SZ') + if is_naive(obj): + obj = convert_to_utc(obj) + return obj.isoformat() elif isinstance(obj, date): return obj.strftime('%Y-%m-%d') elif isinstance(obj, Decimal): diff --git a/airflow/www/static/js/grid/components/InstanceTooltip.jsx b/airflow/www/static/js/grid/components/InstanceTooltip.jsx index ebcecc5341b32..8898f5af53cf8 100644 --- a/airflow/www/static/js/grid/components/InstanceTooltip.jsx +++ b/airflow/www/static/js/grid/components/InstanceTooltip.jsx @@ -35,6 +35,7 @@ const InstanceTooltip = ({ const summary = []; const numMap = finalStatesMap(); + let numMapped = 0; if (isGroup) { group.children.forEach((child) => { const taskInstance = child.instances.find((ti) => ti.runId === runId); @@ -44,9 +45,10 @@ const InstanceTooltip = ({ } }); } else if (isMapped && mappedStates) { - mappedStates.forEach((s) => { - const stateKey = s || 'no_status'; - if (numMap.has(stateKey)) numMap.set(stateKey, numMap.get(stateKey) + 1); + Object.keys(mappedStates).forEach((stateKey) => { + const num = mappedStates[stateKey]; + numMapped += num; + numMap.set(stateKey || 'no_status', num); }); } @@ -68,12 +70,12 @@ const InstanceTooltip = ({ {group.tooltip && ( {group.tooltip} )} - {isMapped && !!mappedStates.length && ( + {isMapped && numMapped > 0 && ( - {mappedStates.length} + {numMapped} {' '} mapped task - {mappedStates.length > 1 && 's'} + {numMapped > 1 && 's'} )} diff --git a/airflow/www/static/js/grid/components/InstanceTooltip.test.jsx b/airflow/www/static/js/grid/components/InstanceTooltip.test.jsx index fc6ab848c958d..eb1abe8ba4a79 100644 --- a/airflow/www/static/js/grid/components/InstanceTooltip.test.jsx +++ b/airflow/www/static/js/grid/components/InstanceTooltip.test.jsx @@ -49,7 +49,7 @@ describe('Test Task InstanceTooltip', () => { const { getByText } = render( , { wrapper: Wrapper }, ); diff --git a/airflow/www/static/js/grid/details/content/taskInstance/Details.jsx b/airflow/www/static/js/grid/details/content/taskInstance/Details.jsx index 55ea09951fd2e..e82d2b63a1fbb 100644 --- a/airflow/www/static/js/grid/details/content/taskInstance/Details.jsx +++ b/airflow/www/static/js/grid/details/content/taskInstance/Details.jsx @@ -50,6 +50,7 @@ const Details = ({ instance, group, operator }) => { } = group; const numMap = finalStatesMap(); + let numMapped = 0; if (isGroup) { children.forEach((child) => { const taskInstance = child.instances.find((ti) => ti.runId === runId); @@ -59,9 +60,10 @@ const Details = ({ instance, group, operator }) => { } }); } else if (isMapped && mappedStates) { - mappedStates.forEach((s) => { - const stateKey = s || 'no_status'; - if (numMap.has(stateKey)) numMap.set(stateKey, numMap.get(stateKey) + 1); + Object.keys(mappedStates).forEach((stateKey) => { + const num = mappedStates[stateKey]; + numMapped += num; + numMap.set(stateKey || 'no_status', num); }); } @@ -92,11 +94,11 @@ const Details = ({ instance, group, operator }) => {
)} - {mappedStates && mappedStates.length > 0 && ( + {mappedStates && numMapped > 0 && ( - {mappedStates.length} + {numMapped} {' '} - {mappedStates.length === 1 ? 'Task ' : 'Tasks '} + {numMapped === 1 ? 'Task ' : 'Tasks '} Mapped )} diff --git a/airflow/www/utils.py b/airflow/www/utils.py index feeedd0cefce3..c8b97ec901538 100644 --- a/airflow/www/utils.py +++ b/airflow/www/utils.py @@ -36,11 +36,9 @@ from pygments import highlight, lexers from pygments.formatters import HtmlFormatter from sqlalchemy.ext.associationproxy import AssociationProxy -from sqlalchemy.orm import Session from airflow import models from airflow.models import errors -from airflow.models.dagrun import DagRun from airflow.models.taskinstance import TaskInstance from airflow.utils import timezone from airflow.utils.code_utils import get_python_source @@ -129,53 +127,6 @@ def get_mapped_summary(parent_instance, task_instances): } -def get_task_summaries(task, dag_runs: List[DagRun], session: Session) -> List[Dict[str, Any]]: - tis = ( - session.query( - TaskInstance.dag_id, - TaskInstance.task_id, - TaskInstance.run_id, - TaskInstance.map_index, - TaskInstance.state, - TaskInstance.start_date, - TaskInstance.end_date, - TaskInstance._try_number, - ) - .filter( - TaskInstance.dag_id == task.dag_id, - TaskInstance.run_id.in_([dag_run.run_id for dag_run in dag_runs]), - TaskInstance.task_id == task.task_id, - # Only get normal task instances or the first mapped task - TaskInstance.map_index <= 0, - ) - .order_by(TaskInstance.run_id.asc()) - ) - - def _get_summary(task_instance): - if task_instance.map_index > -1: - return get_mapped_summary( - task_instance, task_instances=get_mapped_instances(task_instance, session) - ) - - try_count = ( - task_instance._try_number - if task_instance._try_number != 0 or task_instance.state in State.running - else task_instance._try_number + 1 - ) - - return { - 'task_id': task_instance.task_id, - 'run_id': task_instance.run_id, - 'map_index': task_instance.map_index, - 'state': task_instance.state, - 'start_date': datetime_to_string(task_instance.start_date), - 'end_date': datetime_to_string(task_instance.end_date), - 'try_number': try_count, - } - - return [_get_summary(ti) for ti in tis] - - def encode_dag_run(dag_run: Optional[models.DagRun]) -> Optional[Dict[str, Any]]: if not dag_run: return None diff --git a/airflow/www/views.py b/airflow/www/views.py index 7e1a80da1bc96..52111f1405ffa 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -18,6 +18,7 @@ # import collections import copy +import itertools import json import logging import math @@ -252,64 +253,151 @@ def _safe_parse_datetime(v): abort(400, f"Invalid datetime: {v!r}") -def task_group_to_grid(task_item_or_group, dag, dag_runs, session): +def dag_to_grid(dag, dag_runs, session): """ - Create a nested dict representation of this TaskGroup and its children used to construct - the Graph. + Create a nested dict representation of the DAG's TaskGroup and its children + used to construct the Graph and Grid views. """ - if isinstance(task_item_or_group, AbstractOperator): - return { - 'id': task_item_or_group.task_id, - 'instances': wwwutils.get_task_summaries(task_item_or_group, dag_runs, session), - 'label': task_item_or_group.label, - 'extra_links': task_item_or_group.extra_links, - 'is_mapped': task_item_or_group.is_mapped, - } + query = ( + session.query( + TaskInstance.task_id, + TaskInstance.run_id, + TaskInstance.state, + sqla.func.count(sqla.func.coalesce(TaskInstance.state, sqla.literal('no_status'))).label( + 'state_count' + ), + sqla.func.min(TaskInstance.start_date).label('start_date'), + sqla.func.max(TaskInstance.end_date).label('end_date'), + sqla.func.max(TaskInstance._try_number).label('_try_number'), + ) + .filter( + TaskInstance.dag_id == dag.dag_id, + TaskInstance.run_id.in_([dag_run.run_id for dag_run in dag_runs]), + ) + .group_by(TaskInstance.task_id, TaskInstance.run_id, TaskInstance.state) + .order_by(TaskInstance.task_id, TaskInstance.run_id) + ) - # Task Group - task_group = task_item_or_group + grouped_tis = {task_id: list(tis) for task_id, tis in itertools.groupby(query, key=lambda ti: ti.task_id)} + + def task_group_to_grid(item, dag_runs, grouped_tis): + if isinstance(item, AbstractOperator): + + def _get_summary(task_instance): + try_count = ( + task_instance._try_number + if task_instance._try_number != 0 or task_instance.state in State.running + else task_instance._try_number + 1 + ) + + return { + 'task_id': task_instance.task_id, + 'run_id': task_instance.run_id, + 'state': task_instance.state, + 'start_date': task_instance.start_date, + 'end_date': task_instance.end_date, + 'try_number': try_count, + } + + def _mapped_summary(ti_summaries): + run_id = None + record = None + + def set_overall_state(record): + for state in wwwutils.priority: + if state in record['mapped_states']: + record['state'] = state + break + if None in record['mapped_states']: + # When turnong the dict into JSON we can't have None as a key, so use the string that + # the UI does + record['mapped_states']['no_status'] = record['mapped_states'].pop(None) + + for ti_summary in ti_summaries: + if ti_summary.state is None: + ti_summary.state == 'no_status' + if run_id != ti_summary.run_id: + run_id = ti_summary.run_id + if record: + set_overall_state(record) + yield record + record = { + 'task_id': ti_summary.task_id, + 'run_id': run_id, + 'start_date': ti_summary.start_date, + 'end_date': ti_summary.end_date, + 'mapped_states': {ti_summary.state: ti_summary.state_count}, + 'state': None, # We change this before yielding + } + continue + record['start_date'] = min( + filter(None, [record['start_date'], ti_summary.start_date]), default=None + ) + record['end_date'] = max( + filter(None, [record['end_date'], ti_summary.end_date]), default=None + ) + record['mapped_states'][ti_summary.state] = ti_summary.state_count + if record: + set_overall_state(record) + yield record + + if item.is_mapped: + instances = list(_mapped_summary(grouped_tis.get(item.task_id, []))) + else: + instances = list(map(_get_summary, grouped_tis.get(item.task_id, []))) + + return { + 'id': item.task_id, + 'instances': instances, + 'label': item.label, + 'extra_links': item.extra_links, + 'is_mapped': item.is_mapped, + } - children = [task_group_to_grid(child, dag, dag_runs, session) for child in task_group.topological_sort()] + # Task Group + task_group = item - def get_summary(dag_run, children): - child_instances = [child['instances'] for child in children if 'instances' in child] - child_instances = [ - item for sublist in child_instances for item in sublist if item['run_id'] == dag_run.run_id + children = [ + task_group_to_grid(child, dag_runs, grouped_tis) for child in task_group.topological_sort() ] - children_start_dates = [item['start_date'] for item in child_instances if item] - children_end_dates = [item['end_date'] for item in child_instances if item] - children_states = [item['state'] for item in child_instances if item] - - group_state = None - for state in wwwutils.priority: - if state in children_states: - group_state = state - break - group_start_date = wwwutils.datetime_to_string( - min((timezone.parse(date) for date in children_start_dates if date), default=None) - ) - group_end_date = wwwutils.datetime_to_string( - max((timezone.parse(date) for date in children_end_dates if date), default=None) - ) + def get_summary(dag_run, children): + child_instances = [child['instances'] for child in children if 'instances' in child] + child_instances = [ + item for sublist in child_instances for item in sublist if item['run_id'] == dag_run.run_id + ] + + children_start_dates = (item['start_date'] for item in child_instances if item) + children_end_dates = (item['end_date'] for item in child_instances if item) + children_states = {item['state'] for item in child_instances if item} + + group_state = None + for state in wwwutils.priority: + if state in children_states: + group_state = state + break + group_start_date = min(filter(None, children_start_dates), default=None) + group_end_date = max(filter(None, children_end_dates), default=None) + + return { + 'task_id': task_group.group_id, + 'run_id': dag_run.run_id, + 'state': group_state, + 'start_date': group_start_date, + 'end_date': group_end_date, + } + + group_summaries = [get_summary(dr, children) for dr in dag_runs] return { - 'task_id': task_group.group_id, - 'run_id': dag_run.run_id, - 'state': group_state, - 'start_date': group_start_date, - 'end_date': group_end_date, + 'id': task_group.group_id, + 'label': task_group.label, + 'children': children, + 'tooltip': task_group.tooltip, + 'instances': group_summaries, } - group_summaries = [get_summary(dr, children) for dr in dag_runs] - - return { - 'id': task_group.group_id, - 'label': task_group.label, - 'children': children, - 'tooltip': task_group.tooltip, - 'instances': group_summaries, - } + return task_group_to_grid(dag.task_group, dag_runs, grouped_tis) def task_group_to_dict(task_item_or_group): @@ -3540,12 +3628,12 @@ def grid_data(self): dag_runs.reverse() encoded_runs = [wwwutils.encode_dag_run(dr) for dr in dag_runs] data = { - 'groups': task_group_to_grid(dag.task_group, dag, dag_runs, session), + 'groups': dag_to_grid(dag, dag_runs, session), 'dag_runs': encoded_runs, } # avoid spaces to reduce payload size return ( - htmlsafe_json_dumps(data, separators=(',', ':')), + htmlsafe_json_dumps(data, separators=(',', ':'), cls=utils_json.AirflowJsonEncoder), {'Content-Type': 'application/json; charset=utf-8'}, ) diff --git a/tests/utils/test_json.py b/tests/utils/test_json.py index f34d90b79cee7..54d4902a2c73d 100644 --- a/tests/utils/test_json.py +++ b/tests/utils/test_json.py @@ -23,6 +23,7 @@ import numpy as np import parameterized +import pendulum import pytest from airflow.utils import json as utils_json @@ -31,7 +32,11 @@ class TestAirflowJsonEncoder(unittest.TestCase): def test_encode_datetime(self): obj = datetime.strptime('2017-05-21 00:00:00', '%Y-%m-%d %H:%M:%S') - assert json.dumps(obj, cls=utils_json.AirflowJsonEncoder) == '"2017-05-21T00:00:00Z"' + assert json.dumps(obj, cls=utils_json.AirflowJsonEncoder) == '"2017-05-21T00:00:00+00:00"' + + def test_encode_pendulum(self): + obj = pendulum.datetime(2017, 5, 21, tz='Asia/Kolkata') + assert json.dumps(obj, cls=utils_json.AirflowJsonEncoder) == '"2017-05-21T00:00:00+05:30"' def test_encode_date(self): assert json.dumps(date(2017, 5, 21), cls=utils_json.AirflowJsonEncoder) == '"2017-05-21"' diff --git a/tests/www/views/test_views_grid.py b/tests/www/views/test_views_grid.py index e5d29be8a2410..81aa0e757bcac 100644 --- a/tests/www/views/test_views_grid.py +++ b/tests/www/views/test_views_grid.py @@ -16,15 +16,21 @@ # specific language governing permissions and limitations # under the License. +from typing import List + import freezegun import pendulum import pytest from airflow.models import DagBag +from airflow.models.dagrun import DagRun from airflow.operators.empty import EmptyOperator from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.task_group import TaskGroup from airflow.utils.types import DagRunType +from airflow.www.views import dag_to_grid +from tests.test_utils.asserts import assert_queries_count +from tests.test_utils.db import clear_db_runs from tests.test_utils.mock_operators import MockOperator DAG_ID = 'test' @@ -37,6 +43,13 @@ def examples_dag_bag(): return DagBag(include_examples=False, read_dags_from_db=True) +@pytest.fixture(autouse=True) +def clean(): + clear_db_runs() + yield + clear_db_runs() + + @pytest.fixture def dag_without_runs(dag_maker, session, app, monkeypatch): with monkeypatch.context() as m: @@ -48,7 +61,7 @@ def dag_without_runs(dag_maker, session, app, monkeypatch): with dag_maker(dag_id=DAG_ID, serialized=True, session=session): EmptyOperator(task_id="task1") with TaskGroup(group_id='group'): - MockOperator.partial(task_id='mapped').expand(arg1=['a', 'b', 'c']) + MockOperator.partial(task_id='mapped').expand(arg1=['a', 'b', 'c', 'd']) m.setattr(app, 'dag_bag', dag_maker.dagbag) yield dag_maker @@ -108,11 +121,29 @@ def test_no_runs(admin_client, dag_without_runs): } -def test_one_run(admin_client, dag_with_runs, session): +def test_one_run(admin_client, dag_with_runs: List[DagRun], session): + """ + Test a DAG with complex interaction of states: + - One run successful + - One run partly success, partly running + - One TI not yet finished + """ run1, run2 = dag_with_runs for ti in run1.task_instances: ti.state = TaskInstanceState.SUCCESS + for ti in sorted(run2.task_instances, key=lambda ti: (ti.task_id, ti.map_index)): + if ti.task_id == "task1": + ti.state = TaskInstanceState.SUCCESS + elif ti.task_id == "group.mapped": + if ti.map_index == 0: + ti.state = TaskInstanceState.SUCCESS + ti.start_date = pendulum.DateTime(2021, 7, 1, 1, 0, 0, tzinfo=pendulum.UTC) + ti.end_date = pendulum.DateTime(2021, 7, 1, 1, 2, 3, tzinfo=pendulum.UTC) + elif ti.map_index == 1: + ti.state = TaskInstanceState.RUNNING + ti.start_date = pendulum.DateTime(2021, 7, 1, 2, 3, 4, tzinfo=pendulum.UTC) + ti.end_date = None session.flush() @@ -150,20 +181,18 @@ def test_one_run(admin_client, dag_with_runs, session): 'id': 'task1', 'instances': [ { - 'end_date': None, - 'map_index': -1, 'run_id': 'run_1', 'start_date': None, + 'end_date': None, 'state': 'success', 'task_id': 'task1', 'try_number': 1, }, { - 'end_date': None, - 'map_index': -1, 'run_id': 'run_2', 'start_date': None, - 'state': None, + 'end_date': None, + 'state': 'success', 'task_id': 'task1', 'try_number': 1, }, @@ -178,22 +207,20 @@ def test_one_run(admin_client, dag_with_runs, session): 'id': 'group.mapped', 'instances': [ { - 'end_date': None, - 'mapped_states': ['success', 'success', 'success'], 'run_id': 'run_1', + 'mapped_states': {'success': 4}, 'start_date': None, + 'end_date': None, 'state': 'success', 'task_id': 'group.mapped', - 'try_number': 1, }, { - 'end_date': None, - 'mapped_states': [None, None, None], 'run_id': 'run_2', - 'start_date': None, - 'state': None, + 'mapped_states': {'no_status': 2, 'running': 1, 'success': 1}, + 'start_date': '2021-07-01T01:00:00+00:00', + 'end_date': '2021-07-01T01:02:03+00:00', + 'state': 'running', 'task_id': 'group.mapped', - 'try_number': 1, }, ], 'is_mapped': True, @@ -210,10 +237,10 @@ def test_one_run(admin_client, dag_with_runs, session): 'task_id': 'group', }, { - 'end_date': None, 'run_id': 'run_2', - 'start_date': None, - 'state': None, + 'start_date': '2021-07-01T01:00:00+00:00', + 'end_date': '2021-07-01T01:02:03+00:00', + 'state': 'running', 'task_id': 'group', }, ], @@ -230,9 +257,21 @@ def test_one_run(admin_client, dag_with_runs, session): 'state': 'success', 'task_id': None, }, - {'end_date': None, 'run_id': 'run_2', 'start_date': None, 'state': None, 'task_id': None}, + { + 'end_date': '2021-07-01T01:02:03+00:00', + 'run_id': 'run_2', + 'start_date': '2021-07-01T01:00:00+00:00', + 'state': 'running', + 'task_id': None, + }, ], 'label': None, 'tooltip': '', }, } + + +def test_query_count(dag_with_runs, session): + run1, run2 = dag_with_runs + with assert_queries_count(1): + dag_to_grid(run1.dag, (run1, run2), session) From 4e654037769f4225c7bd1098deedd2c85e834b20 Mon Sep 17 00:00:00 2001 From: Brent Bovenzi Date: Wed, 18 May 2022 21:52:02 -0400 Subject: [PATCH 090/118] Maintain grid view selection on filtering upstream (#23779) * Maintain grid selection on filter upstream The grid view selection was being cleared when clicking "Filter Upstream". The selection should persist. Also, added a left margin to the "Reset root" button * fix linting (cherry picked from commit 9db2271fad751b51127ac0c2dc0477dc95b271d5) --- airflow/www/static/js/grid/ResetRoot.jsx | 2 +- .../www/static/js/grid/details/content/taskInstance/Nav.jsx | 4 +++- .../www/static/js/grid/details/content/taskInstance/index.jsx | 1 + 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/airflow/www/static/js/grid/ResetRoot.jsx b/airflow/www/static/js/grid/ResetRoot.jsx index c69e8dbb84cf7..bd6a89e3b7d20 100644 --- a/airflow/www/static/js/grid/ResetRoot.jsx +++ b/airflow/www/static/js/grid/ResetRoot.jsx @@ -33,7 +33,7 @@ const ResetRoot = () => ( variant="outline" href={url} colorScheme="blue" - mr={2} + mx={2} title="Reset root to show the whole DAG" > Reset Root diff --git a/airflow/www/static/js/grid/details/content/taskInstance/Nav.jsx b/airflow/www/static/js/grid/details/content/taskInstance/Nav.jsx index 93ee175b69da3..08c33d1e65756 100644 --- a/airflow/www/static/js/grid/details/content/taskInstance/Nav.jsx +++ b/airflow/www/static/js/grid/details/content/taskInstance/Nav.jsx @@ -47,7 +47,7 @@ const LinkButton = ({ children, ...rest }) => ( ); const Nav = ({ - taskId, executionDate, operator, isMapped, + runId, taskId, executionDate, operator, isMapped, }) => { const params = new URLSearchParams({ task_id: taskId, @@ -68,6 +68,8 @@ const Nav = ({ }).toString(); const filterParams = new URLSearchParams({ + task_id: taskId, + dag_run_id: runId, base_date: baseDate, num_runs: numRuns, root: taskId, diff --git a/airflow/www/static/js/grid/details/content/taskInstance/index.jsx b/airflow/www/static/js/grid/details/content/taskInstance/index.jsx index b6aad7d4d3ddc..90ef2e839e2ef 100644 --- a/airflow/www/static/js/grid/details/content/taskInstance/index.jsx +++ b/airflow/www/static/js/grid/details/content/taskInstance/index.jsx @@ -83,6 +83,7 @@ const TaskInstance = ({ taskId, runId }) => { {!isGroup && ( Date: Fri, 17 Jun 2022 12:31:28 -0400 Subject: [PATCH 091/118] Do not calculate grid root instances (#24528) * do not calculate grid root instances * fix tests (cherry picked from commit 8b054efba736b985c8fdb7a278f01d5f4934f857) --- airflow/www/views.py | 9 +++++++++ tests/www/views/test_views_grid.py | 19 +------------------ 2 files changed, 10 insertions(+), 18 deletions(-) diff --git a/airflow/www/views.py b/airflow/www/views.py index 52111f1405ffa..1b401db31b1ac 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -387,6 +387,15 @@ def get_summary(dag_run, children): 'end_date': group_end_date, } + # We don't need to calculate summaries for the root + if task_group.group_id is None: + return { + 'id': task_group.group_id, + 'label': task_group.label, + 'children': children, + 'instances': [], + } + group_summaries = [get_summary(dr, children) for dr in dag_runs] return { diff --git a/tests/www/views/test_views_grid.py b/tests/www/views/test_views_grid.py index 81aa0e757bcac..a64a0452bb8a0 100644 --- a/tests/www/views/test_views_grid.py +++ b/tests/www/views/test_views_grid.py @@ -116,7 +116,6 @@ def test_no_runs(admin_client, dag_without_runs): 'id': None, 'instances': [], 'label': None, - 'tooltip': '', }, } @@ -249,24 +248,8 @@ def test_one_run(admin_client, dag_with_runs: List[DagRun], session): }, ], 'id': None, - 'instances': [ - { - 'end_date': None, - 'run_id': 'run_1', - 'start_date': None, - 'state': 'success', - 'task_id': None, - }, - { - 'end_date': '2021-07-01T01:02:03+00:00', - 'run_id': 'run_2', - 'start_date': '2021-07-01T01:00:00+00:00', - 'state': 'running', - 'task_id': None, - }, - ], + 'instances': [], 'label': None, - 'tooltip': '', }, } From 0cc91d618e3c98f62d72d9638e06fcb5db84ce9a Mon Sep 17 00:00:00 2001 From: Brent Bovenzi Date: Thu, 16 Jun 2022 14:35:32 -0400 Subject: [PATCH 092/118] Upgrade to webpack 5 (#24485) * update webpack-cli, eslint, stylelint, babel * revert stylelint changes * update more plugins * update to webpack 5 * remove all resolutions (cherry picked from commit 127f8f4de02422ade8f2c84f84d3262d6efde185) --- airflow/www/.eslintrc | 14 +- airflow/www/package.json | 56 +- airflow/www/static/js/dags.js | 6 +- airflow/www/static/js/grid/Grid.jsx | 18 +- airflow/www/static/js/grid/LegendRow.test.jsx | 6 +- .../www/static/js/grid/components/Table.jsx | 1 + .../js/grid/components/TaskName.test.jsx | 12 +- .../www/static/js/grid/components/Tooltip.tsx | 4 +- .../static/js/grid/context/autorefresh.jsx | 50 +- .../www/static/js/grid/context/selection.jsx | 56 - .../www/static/js/grid/context/timezone.jsx | 8 +- .../www/static/js/grid/dagRuns/index.test.jsx | 14 +- .../www/static/js/grid/utils/useFilters.js | 6 +- airflow/www/static/js/ti_log.js | 6 +- airflow/www/webpack.config.js | 36 +- airflow/www/yarn.lock | 4927 +++++------------ 16 files changed, 1469 insertions(+), 3751 deletions(-) delete mode 100644 airflow/www/static/js/grid/context/selection.jsx diff --git a/airflow/www/.eslintrc b/airflow/www/.eslintrc index 818b1ccf31459..b1a446a31b818 100644 --- a/airflow/www/.eslintrc +++ b/airflow/www/.eslintrc @@ -1,6 +1,12 @@ { "extends": ["airbnb", "airbnb/hooks"], - "parser": "babel-eslint", + "parser": "@babel/eslint-parser", + "parserOptions": { + "babelOptions": { + "presets": ["@babel/preset-env", "@babel/preset-react", "@babel/preset-typescript"], + "plugins": ["@babel/plugin-transform-runtime"] + } + }, "plugins": [ "html", "react" ], "rules": { "no-param-reassign": 1, @@ -23,6 +29,12 @@ "optionalDependencies": false, "peerDependencies": false } + ], + "react/function-component-definition": [ + 0, + { + "namedComponents": "function-declaration" + } ] }, "settings": { diff --git a/airflow/www/package.json b/airflow/www/package.json index ad646770f8e77..85665962b1c84 100644 --- a/airflow/www/package.json +++ b/airflow/www/package.json @@ -2,9 +2,9 @@ "description": "Apache Airflow is a platform to programmatically author, schedule and monitor workflows.", "scripts": { "test": "jest", - "dev": "NODE_ENV=dev webpack --watch --colors --progress --debug --output-pathinfo --devtool eval-cheap-source-map --mode development", - "prod": "NODE_ENV=production node --max_old_space_size=4096 ./node_modules/webpack/bin/webpack.js -p --colors --progress", - "build": "NODE_ENV=production webpack --colors --progress", + "dev": "NODE_ENV=development webpack --watch --progress --devtool eval-cheap-source-map --mode development", + "prod": "NODE_ENV=production node --max_old_space_size=4096 ./node_modules/webpack/bin/webpack.js --mode production --progress", + "build": "NODE_ENV=production webpack --progress --mode production", "lint": "eslint --ignore-path=.eslintignore --ext .js,.jsx,.ts,.tsx . && tsc --noEmit", "lint:fix": "eslint --fix --ignore-path=.eslintignore --ext .js,.jsx,.ts,.tsx . && tsc --noEmit" }, @@ -26,6 +26,8 @@ "flask" ], "devDependencies": { + "@babel/core": "^7.18.5", + "@babel/eslint-parser": "^7.18.2", "@babel/plugin-transform-runtime": "^7.16.0", "@babel/preset-env": "^7.16.0", "@babel/preset-react": "^7.16.0", @@ -37,45 +39,40 @@ "@types/react-dom": "^18.0.5", "@typescript-eslint/eslint-plugin": "^5.13.0", "@typescript-eslint/parser": "^5.0.0", - "babel": "^6.23.0", - "babel-core": "^6.26.3", - "babel-eslint": "^10.1.0", "babel-jest": "^27.3.1", "babel-loader": "^8.1.0", - "babel-plugin-css-modules-transform": "^1.6.1", - "babel-polyfill": "^6.26.0", "clean-webpack-plugin": "^3.0.0", "copy-webpack-plugin": "^6.0.3", - "css-loader": "^3.4.2", - "eslint": "^7.2.0", - "eslint-config-airbnb": "18.2.1", + "css-loader": "5.2.7", + "css-minimizer-webpack-plugin": "^4.0.0", + "eslint": "^8.6.0", + "eslint-config-airbnb": "^19.0.4", "eslint-config-airbnb-typescript": "^17.0.0", "eslint-plugin-html": "^6.0.2", - "eslint-plugin-import": "^2.22.1", - "eslint-plugin-jsx-a11y": "^6.4.1", + "eslint-plugin-import": "^2.25.3", + "eslint-plugin-jsx-a11y": "^6.5.0", "eslint-plugin-node": "^11.1.0", "eslint-plugin-promise": "^4.2.1", - "eslint-plugin-react": "^7.21.5", - "eslint-plugin-react-hooks": "^1.7.0", + "eslint-plugin-react": "^7.30.0", + "eslint-plugin-react-hooks": "^4.5.0", "eslint-plugin-standard": "^4.0.1", "file-loader": "^6.0.0", "imports-loader": "^1.1.0", "jest": "^27.3.1", - "mini-css-extract-plugin": "1.6.0", + "mini-css-extract-plugin": "^1.6.2", "moment": "^2.29.3", "moment-locales-webpack-plugin": "^1.2.0", "nock": "^13.2.4", - "optimize-css-assets-webpack-plugin": "6.0.0", "style-loader": "^1.2.1", "stylelint": "^13.6.1", "stylelint-config-standard": "^20.0.0", "terser-webpack-plugin": "<5.0.0", "typescript": "^4.6.3", "url-loader": "4.1.0", - "webpack": "4.44.2", - "webpack-cli": "^3.3.12", + "webpack": "^5.73.0", + "webpack-cli": "^4.0.0", "webpack-license-plugin": "^4.2.1", - "webpack-manifest-plugin": "^2.2.0" + "webpack-manifest-plugin": "^4.0.0" }, "dependencies": { "@chakra-ui/react": "^2.2.0", @@ -102,23 +99,10 @@ "react": "^18.0.0", "react-dom": "^18.0.0", "react-icons": "^4.3.1", - "react-query": "^3.34.16", + "react-query": "^3.39.1", "react-router-dom": "^6.3.0", - "react-table": "^7.7.0", - "redoc": "^2.0.0-rc.63", + "react-table": "^7.8.0", + "redoc": "^2.0.0-rc.72", "url-search-params-polyfill": "^8.1.0" - }, - "resolutions": { - "lodash": "^4.17.21", - "css-what": ">=5.0.1", - "trim-newlines": ">=3.0.1", - "y18n": ">=5.0.5", - "postcss": ">=8.2.10", - "glob-parent": ">=5.1.2", - "ssri": ">= 8.0.1", - "jquery": ">=3.5.0", - "browserslist": ">=4.16.5", - "tar": ">=6.1.9", - "node-fetch": ">=2.6.7" } } diff --git a/airflow/www/static/js/dags.js b/airflow/www/static/js/dags.js index 56afe8b48ee5f..cdf6557cc96be 100644 --- a/airflow/www/static/js/dags.js +++ b/airflow/www/static/js/dags.js @@ -108,14 +108,16 @@ $.each($('[id^=toggle]'), function toggleId() { $('.typeahead').typeahead({ source(query, callback) { - return $.ajax(autocompleteUrl, + return $.ajax( + autocompleteUrl, { data: { query: encodeURIComponent(query), status: statusFilter, }, success: callback, - }); + }, + ); }, autoSelect: false, afterSelect(value) { diff --git a/airflow/www/static/js/grid/Grid.jsx b/airflow/www/static/js/grid/Grid.jsx index 9bb510778a6fe..17101cef4f028 100644 --- a/airflow/www/static/js/grid/Grid.jsx +++ b/airflow/www/static/js/grid/Grid.jsx @@ -56,15 +56,15 @@ const Grid = ({ isPanelOpen = false, onPanelToggle, hoveredTaskState }) => { setOpenGroupIds(groupIds); }; - const scrollOnResize = new ResizeObserver(() => { - const runsContainer = scrollRef.current; - // Set scroll to top right if it is scrollable - if (runsContainer && runsContainer.scrollWidth > runsContainer.clientWidth) { - runsContainer.scrollBy(tableRef.current.offsetWidth, 0); - } - }); - useEffect(() => { + const scrollOnResize = new ResizeObserver(() => { + const runsContainer = scrollRef.current; + // Set scroll to top right if it is scrollable + if (runsContainer && runsContainer.scrollWidth > runsContainer.clientWidth) { + runsContainer.scrollBy(tableRef.current.offsetWidth, 0); + } + }); + if (tableRef && tableRef.current) { const table = tableRef.current; @@ -74,7 +74,7 @@ const Grid = ({ isPanelOpen = false, onPanelToggle, hoveredTaskState }) => { }; } return () => {}; - }, [tableRef, scrollOnResize]); + }, [tableRef]); return ( { { state: 'success', expectedSetValue: 'success' }, { state: 'failed', expectedSetValue: 'failed' }, { state: 'no_status', expectedSetValue: null }, - ])('Hovering $state badge should trigger setHoverdTaskState function with $expectedSetValue', + ])( + 'Hovering $state badge should trigger setHoverdTaskState function with $expectedSetValue', async ({ state, expectedSetValue }) => { const setHoveredTaskState = jest.fn(); const { getByText } = render( @@ -52,5 +53,6 @@ describe('Test LegendRow', () => { expect(setHoveredTaskState).toHaveBeenCalledWith(expectedSetValue); fireEvent.mouseLeave(successElement); expect(setHoveredTaskState).toHaveBeenLastCalledWith(); - }); + }, + ); }); diff --git a/airflow/www/static/js/grid/components/Table.jsx b/airflow/www/static/js/grid/components/Table.jsx index 570becf12aa9e..57bef0a911e5d 100644 --- a/airflow/www/static/js/grid/components/Table.jsx +++ b/airflow/www/static/js/grid/components/Table.jsx @@ -79,6 +79,7 @@ const Table = ({ hooks.visibleColumns.push((cols) => [ { id: 'selection', + // eslint-disable-next-line react/no-unstable-nested-components Cell: ({ row }) => (
diff --git a/airflow/www/static/js/grid/components/TaskName.test.jsx b/airflow/www/static/js/grid/components/TaskName.test.jsx index 9a403735ba885..a9760da303592 100644 --- a/airflow/www/static/js/grid/components/TaskName.test.jsx +++ b/airflow/www/static/js/grid/components/TaskName.test.jsx @@ -27,25 +27,19 @@ import { ChakraWrapper } from '../utils/testUtils'; describe('Test TaskName', () => { test('Displays a normal task name', () => { - const { getByText } = render( - , { wrapper: ChakraWrapper }, - ); + const { getByText } = render(, { wrapper: ChakraWrapper }); expect(getByText('test')).toBeDefined(); }); test('Displays a mapped task name', () => { - const { getByText } = render( - , { wrapper: ChakraWrapper }, - ); + const { getByText } = render(, { wrapper: ChakraWrapper }); expect(getByText('test [ ]')).toBeDefined(); }); test('Displays a group task name', () => { - const { getByText, getByTestId } = render( - , { wrapper: ChakraWrapper }, - ); + const { getByText, getByTestId } = render(, { wrapper: ChakraWrapper }); expect(getByText('test')).toBeDefined(); expect(getByTestId('closed-group')).toBeDefined(); diff --git a/airflow/www/static/js/grid/components/Tooltip.tsx b/airflow/www/static/js/grid/components/Tooltip.tsx index df7a5557c66e3..da4af5b529439 100644 --- a/airflow/www/static/js/grid/components/Tooltip.tsx +++ b/airflow/www/static/js/grid/components/Tooltip.tsx @@ -41,7 +41,7 @@ export interface TooltipProps * The React component to use as the * trigger for the tooltip */ - children: React.ReactNode + children: React.ReactElement /** * The label of the tooltip */ @@ -128,7 +128,7 @@ const Tooltip = forwardRef((props, ref) => { * Let's simply return the children */ if (!label) { - return <>{children}; + return children; } return ( diff --git a/airflow/www/static/js/grid/context/autorefresh.jsx b/airflow/www/static/js/grid/context/autorefresh.jsx index bba7f6cb06fcb..35df9b7daf920 100644 --- a/airflow/www/static/js/grid/context/autorefresh.jsx +++ b/airflow/www/static/js/grid/context/autorefresh.jsx @@ -19,7 +19,9 @@ /* global localStorage, document */ -import React, { useContext, useState, useEffect } from 'react'; +import React, { + useMemo, useContext, useState, useEffect, useCallback, +} from 'react'; import { getMetaValue } from '../../utils'; const autoRefreshKey = 'disabledAutoRefresh'; @@ -36,22 +38,32 @@ export const AutoRefreshProvider = ({ children }) => { const [isRefreshOn, setRefresh] = useState(initialState); - const onToggle = () => setRefresh(!isRefreshOn); + const onToggle = useCallback( + () => setRefresh(!isRefreshOn), + [isRefreshOn], + ); const stopRefresh = () => setRefresh(false); - const startRefresh = () => isRefreshAllowed && setRefresh(true); - const toggleRefresh = (updateStorage = false) => { - if (updateStorage) { - if (isRefreshOn) { - localStorage.setItem(autoRefreshKey, 'true'); - } else { - localStorage.removeItem(autoRefreshKey); + const startRefresh = useCallback( + () => isRefreshAllowed && setRefresh(true), + [isRefreshAllowed, setRefresh], + ); + + const toggleRefresh = useCallback( + (updateStorage = false) => { + if (updateStorage) { + if (isRefreshOn) { + localStorage.setItem(autoRefreshKey, 'true'); + } else { + localStorage.removeItem(autoRefreshKey); + } + onToggle(); + } else if (isRefreshAllowed) { + onToggle(); } - onToggle(); - } else if (isRefreshAllowed) { - onToggle(); - } - }; + }, + [isRefreshAllowed, isRefreshOn, onToggle], + ); useEffect(() => { const handleChange = (e) => { @@ -67,12 +79,12 @@ export const AutoRefreshProvider = ({ children }) => { }; }); + const value = useMemo(() => ({ + isRefreshOn, toggleRefresh, stopRefresh, startRefresh, isPaused, + }), [isPaused, isRefreshOn, startRefresh, toggleRefresh]); + return ( - + {children} ); diff --git a/airflow/www/static/js/grid/context/selection.jsx b/airflow/www/static/js/grid/context/selection.jsx deleted file mode 100644 index 29fcece22370a..0000000000000 --- a/airflow/www/static/js/grid/context/selection.jsx +++ /dev/null @@ -1,56 +0,0 @@ -/*! - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import React, { useContext, useReducer } from 'react'; - -const SelectionContext = React.createContext(null); - -const SELECT = 'SELECT'; -const DESELECT = 'DESELECT'; - -const selectionReducer = (state, { type, payload }) => { - switch (type) { - case SELECT: - // Deselect if it is the same selection - if (payload.taskId === state.taskId && payload.runId === state.runId) { - return {}; - } - return payload; - case DESELECT: - return {}; - default: - return state; - } -}; - -// Expose the grid selection to any react component instead of passing around lots of props -export const SelectionProvider = ({ children }) => { - const [selected, dispatch] = useReducer(selectionReducer, {}); - - const clearSelection = () => dispatch({ type: DESELECT }); - const onSelect = (payload) => dispatch({ type: SELECT, payload }); - - return ( - - {children} - - ); -}; - -export const useSelection = () => useContext(SelectionContext); diff --git a/airflow/www/static/js/grid/context/timezone.jsx b/airflow/www/static/js/grid/context/timezone.jsx index acde934a48241..99fee1a0fe64a 100644 --- a/airflow/www/static/js/grid/context/timezone.jsx +++ b/airflow/www/static/js/grid/context/timezone.jsx @@ -19,7 +19,9 @@ /* global moment, document */ -import React, { useContext, useEffect, useState } from 'react'; +import React, { + useContext, useEffect, useState, useMemo, +} from 'react'; import { TimezoneEvent } from '../../datetime_utils'; const TimezoneContext = React.createContext({ timezone: 'UTC' }); @@ -38,8 +40,10 @@ export const TimezoneProvider = ({ children }) => { }; }); + const value = useMemo(() => ({ timezone }), [timezone]); + return ( - + {children} ); diff --git a/airflow/www/static/js/grid/dagRuns/index.test.jsx b/airflow/www/static/js/grid/dagRuns/index.test.jsx index 0420d7aba8792..3507153eb0331 100644 --- a/airflow/www/static/js/grid/dagRuns/index.test.jsx +++ b/airflow/www/static/js/grid/dagRuns/index.test.jsx @@ -61,9 +61,9 @@ describe('Test DagRuns', () => { const spy = jest.spyOn(useGridDataModule, 'default').mockImplementation(() => ({ data, })); - const { queryAllByTestId, getByText, queryByText } = render( - , { wrapper: TableWrapper }, - ); + const { + queryAllByTestId, getByText, queryByText, + } = render(, { wrapper: TableWrapper }); expect(queryAllByTestId('run')).toHaveLength(2); expect(queryAllByTestId('manual-run')).toHaveLength(1); @@ -104,18 +104,14 @@ describe('Test DagRuns', () => { const spy = jest.spyOn(useGridDataModule, 'default').mockImplementation(() => ({ data, })); - const { getByText } = render( - , { wrapper: TableWrapper }, - ); + const { getByText } = render(, { wrapper: TableWrapper }); expect(getByText(moment.utc(dagRuns[0].executionDate).format('MMM DD, HH:mm'))).toBeInTheDocument(); spy.mockRestore(); }); test('Handles empty data correctly', () => { global.autoRefreshInterval = 0; - const { queryByTestId } = render( - , { wrapper: TableWrapper }, - ); + const { queryByTestId } = render(, { wrapper: TableWrapper }); expect(queryByTestId('run')).toBeNull(); }); }); diff --git a/airflow/www/static/js/grid/utils/useFilters.js b/airflow/www/static/js/grid/utils/useFilters.js index aa207e23539d9..78f51d6e518fb 100644 --- a/airflow/www/static/js/grid/utils/useFilters.js +++ b/airflow/www/static/js/grid/utils/useFilters.js @@ -50,8 +50,10 @@ const useFilters = () => { setSearchParams(params); }; - const onBaseDateChange = makeOnChangeFn(BASE_DATE_PARAM, - (localDate) => moment(localDate).utc().format()); + const onBaseDateChange = makeOnChangeFn( + BASE_DATE_PARAM, + (localDate) => moment(localDate).utc().format(), + ); const onNumRunsChange = makeOnChangeFn(NUM_RUNS_PARAM); const onRunTypeChange = makeOnChangeFn(RUN_TYPE_PARAM); const onRunStateChange = makeOnChangeFn(RUN_STATE_PARAM); diff --git a/airflow/www/static/js/ti_log.js b/airflow/www/static/js/ti_log.js index 7a3b9ae54c0d3..1bf6b501a659c 100644 --- a/airflow/www/static/js/ti_log.js +++ b/airflow/www/static/js/ti_log.js @@ -33,7 +33,7 @@ const ANIMATION_SPEED = parseInt(getMetaValue('animation_speed'), 10); const TOTAL_ATTEMPTS = parseInt(getMetaValue('total_attempts'), 10); function recurse(delay = DELAY) { - return new Promise((resolve) => setTimeout(resolve, delay)); + return new Promise((resolve) => { setTimeout(resolve, delay); }); } // Enable auto tailing only when users scroll down to the bottom @@ -134,9 +134,7 @@ function autoTailingLog(tryNumber, metadata = null, autoTailing = false) { document.getElementById(`loading-${tryNumber}`).style.display = 'none'; return; } - recurse().then(() => autoTailingLog( - tryNumber, res.metadata, autoTailing, - )); + recurse().then(() => autoTailingLog(tryNumber, res.metadata, autoTailing)); }); } diff --git a/airflow/www/webpack.config.js b/airflow/www/webpack.config.js index 9fc9252f30ec0..e48b01c687736 100644 --- a/airflow/www/webpack.config.js +++ b/airflow/www/webpack.config.js @@ -19,12 +19,12 @@ const webpack = require('webpack'); const path = require('path'); -const ManifestPlugin = require('webpack-manifest-plugin'); +const { WebpackManifestPlugin } = require('webpack-manifest-plugin'); const cwplg = require('clean-webpack-plugin'); const CopyWebpackPlugin = require('copy-webpack-plugin'); const MiniCssExtractPlugin = require('mini-css-extract-plugin'); const MomentLocalesPlugin = require('moment-locales-webpack-plugin'); -const OptimizeCSSAssetsPlugin = require('optimize-css-assets-webpack-plugin'); +const CssMinimizerPlugin = require('css-minimizer-webpack-plugin'); const LicensePlugin = require('webpack-license-plugin'); const TerserPlugin = require('terser-webpack-plugin'); @@ -84,6 +84,7 @@ const config = { chunkFilename: '[name].[chunkhash].js', library: ['Airflow', '[name]'], libraryTarget: 'umd', + publicPath: '', }, resolve: { extensions: [ @@ -98,15 +99,23 @@ const config = { rules: [ { test: /datatables\.net.*/, - loader: 'imports-loader?define=>false', + use: [ + { + loader: 'imports-loader?define=>false', + }, + ], }, { test: /\.[j|t]sx?$/, exclude: /node_modules/, - loader: 'babel-loader', - options: { - presets: ['@babel/preset-react', '@babel/preset-typescript'], - }, + use: [ + { + loader: 'babel-loader', + options: { + presets: ['@babel/preset-react', '@babel/preset-typescript'], + }, + }, + ], }, // Extract css files { @@ -149,12 +158,19 @@ const config = { }, { test: /\.(ttf|eot|svg)(\?v=[0-9]\.[0-9]\.[0-9])?$/, - loader: 'file-loader', + use: [ + { + loader: 'file-loader', + }, + ], }, ], }, plugins: [ - new ManifestPlugin(), + new WebpackManifestPlugin({ + // d3-tip is named index.js in its dist folder which was confusing the manifest + map: (file) => (file.path === 'd3-tip.js' ? { ...file, name: 'd3-tip.js' } : file), + }), new cwplg.CleanWebpackPlugin({ verbose: true, }), @@ -252,7 +268,7 @@ const config = { optimization: { minimize: process.env.NODE_ENV === 'production', minimizer: [ - new OptimizeCSSAssetsPlugin({}), + new CssMinimizerPlugin({}), new TerserPlugin(), ], }, diff --git a/airflow/www/yarn.lock b/airflow/www/yarn.lock index 2293dfddbc50f..2c015b08d18b2 100644 --- a/airflow/www/yarn.lock +++ b/airflow/www/yarn.lock @@ -9,14 +9,15 @@ dependencies: "@jridgewell/trace-mapping" "^0.3.0" -"@babel/code-frame@7.12.11": - version "7.12.11" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.11.tgz#f4ad435aa263db935b8f10f2c552d23fb716a63f" - integrity sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw== +"@ampproject/remapping@^2.1.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" + integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== dependencies: - "@babel/highlight" "^7.10.4" + "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/trace-mapping" "^0.3.9" -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.14.5": +"@babel/code-frame@^7.0.0": version "7.14.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.14.5.tgz#23b08d740e83f49c5e59945fbf1b43e80bbf4edb" integrity sha512-9pzDqyc6OLDaqe+zbACgFkb6fKMNG6CObKpnYXChRsvYGyEdc7CA2BaqeOM+vOtCS5ndmJicPJhKAwYRI6UfFw== @@ -42,36 +43,15 @@ resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.16.0.tgz#ea269d7f78deb3a7826c39a4048eecda541ebdaa" integrity sha512-DGjt2QZse5SGd9nfOSqO4WLJ8NN/oHkijbXbPrxuoJO3oIPJL3TciZs9FX+cOHNiY9E9l0opL8g7BmLe3T+9ew== -"@babel/compat-data@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.14.5.tgz#8ef4c18e58e801c5c95d3c1c0f2874a2680fadea" - integrity sha512-kixrYn4JwfAVPa0f2yfzc2AWti6WRRyO3XjWW5PJAvtE11qhSayrrcrEnee05KAtNaPC+EwehE8Qt1UedEVB8w== - "@babel/compat-data@^7.16.4": version "7.17.0" resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.17.0.tgz#86850b8597ea6962089770952075dcaabb8dba34" integrity sha512-392byTlpGWXMv4FbyWw3sAZ/FrW/DrwqLGXpy0mbyNe9Taqv1mg9yON5/o0cnr8XYCkFTZbC1eV+c+LAROgrng== -"@babel/core@>=7.9.0": - version "7.14.6" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.14.6.tgz#e0814ec1a950032ff16c13a2721de39a8416fcab" - integrity sha512-gJnOEWSqTk96qG5BoIrl5bVtc23DCycmIePPYnamY9RboYdI4nFy5vAQMSl81O5K/W0sLDWfGysnOECC+KUUCA== - dependencies: - "@babel/code-frame" "^7.14.5" - "@babel/generator" "^7.14.5" - "@babel/helper-compilation-targets" "^7.14.5" - "@babel/helper-module-transforms" "^7.14.5" - "@babel/helpers" "^7.14.6" - "@babel/parser" "^7.14.6" - "@babel/template" "^7.14.5" - "@babel/traverse" "^7.14.5" - "@babel/types" "^7.14.5" - convert-source-map "^1.7.0" - debug "^4.1.0" - gensync "^1.0.0-beta.2" - json5 "^2.1.2" - semver "^6.3.0" - source-map "^0.5.0" +"@babel/compat-data@^7.17.10": + version "7.18.5" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.18.5.tgz#acac0c839e317038c73137fbb6ef71a1d6238471" + integrity sha512-BxhE40PVCBxVEJsSBhB6UWyAuqJRxGsAw8BdHMJ3AKGydcwuWW4kOO3HmqBQAdcq/OP+/DlTVxLvsCzRTnZuGg== "@babel/core@^7.1.0", "@babel/core@^7.12.3", "@babel/core@^7.7.2": version "7.16.0" @@ -94,6 +74,27 @@ semver "^6.3.0" source-map "^0.5.0" +"@babel/core@^7.17.9", "@babel/core@^7.18.5": + version "7.18.5" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.18.5.tgz#c597fa680e58d571c28dda9827669c78cdd7f000" + integrity sha512-MGY8vg3DxMnctw0LdvSEojOsumc70g0t18gNyUdAZqB1Rpd1Bqo/svHGvt+UJ6JcGX+DIekGFDxxIWofBxLCnQ== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.16.7" + "@babel/generator" "^7.18.2" + "@babel/helper-compilation-targets" "^7.18.2" + "@babel/helper-module-transforms" "^7.18.0" + "@babel/helpers" "^7.18.2" + "@babel/parser" "^7.18.5" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.18.5" + "@babel/types" "^7.18.4" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + "@babel/core@^7.8.0": version "7.17.2" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.17.2.tgz#2c77fc430e95139d816d39b113b31bf40fb22337" @@ -115,14 +116,14 @@ json5 "^2.1.2" semver "^6.3.0" -"@babel/generator@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.14.5.tgz#848d7b9f031caca9d0cd0af01b063f226f52d785" - integrity sha512-y3rlP+/G25OIX3mYKKIOlQRcqj7YgrvHxOLbVmyLJ9bPmi5ttvUmpydVjcFjZphOktWuA7ovbx91ECloWTfjIA== +"@babel/eslint-parser@^7.18.2": + version "7.18.2" + resolved "https://registry.yarnpkg.com/@babel/eslint-parser/-/eslint-parser-7.18.2.tgz#e14dee36c010edfb0153cf900c2b0815e82e3245" + integrity sha512-oFQYkE8SuH14+uR51JVAmdqwKYXGRjEXx7s+WiagVjqQ+HPE+nnwyF2qlVG8evUsUHmPcA+6YXMEDbIhEyQc5A== dependencies: - "@babel/types" "^7.14.5" - jsesc "^2.5.1" - source-map "^0.5.0" + eslint-scope "^5.1.1" + eslint-visitor-keys "^2.1.0" + semver "^6.3.0" "@babel/generator@^7.16.0", "@babel/generator@^7.7.2": version "7.16.0" @@ -183,16 +184,6 @@ browserslist "^4.16.6" semver "^6.3.0" -"@babel/helper-compilation-targets@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.14.5.tgz#7a99c5d0967911e972fe2c3411f7d5b498498ecf" - integrity sha512-v+QtZqXEiOnpO6EYvlImB6zCD2Lel06RzOPzmkz/D/XgQiUu3C/Jb1LOqSt/AIA34TYi/Q+KlT8vTQrgdxkbLw== - dependencies: - "@babel/compat-data" "^7.14.5" - "@babel/helper-validator-option" "^7.14.5" - browserslist "^4.16.6" - semver "^6.3.0" - "@babel/helper-compilation-targets@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.16.7.tgz#06e66c5f299601e6c7da350049315e83209d551b" @@ -203,6 +194,16 @@ browserslist "^4.17.5" semver "^6.3.0" +"@babel/helper-compilation-targets@^7.18.2": + version "7.18.2" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.18.2.tgz#67a85a10cbd5fc7f1457fec2e7f45441dc6c754b" + integrity sha512-s1jnPotJS9uQnzFtiZVBUxe67CuBa679oWFHpxYYnTpRL/1ffhyX44R9uYiXoa/pLXcY9H2moJta0iaanlk/rQ== + dependencies: + "@babel/compat-data" "^7.17.10" + "@babel/helper-validator-option" "^7.16.7" + browserslist "^4.20.2" + semver "^6.3.0" + "@babel/helper-create-class-features-plugin@^7.16.0": version "7.16.0" resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.16.0.tgz#090d4d166b342a03a9fec37ef4fd5aeb9c7c6a4b" @@ -269,15 +270,6 @@ dependencies: "@babel/types" "^7.16.0" -"@babel/helper-function-name@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.14.5.tgz#89e2c474972f15d8e233b52ee8c480e2cfcd50c4" - integrity sha512-Gjna0AsXWfFvrAuX+VKcN/aNNWonizBj39yGwUzVDVTlMYJMK2Wp6xdpy72mfArFq5uK+NOuexfzZlzI1z9+AQ== - dependencies: - "@babel/helper-get-function-arity" "^7.14.5" - "@babel/template" "^7.14.5" - "@babel/types" "^7.14.5" - "@babel/helper-function-name@^7.16.0": version "7.16.0" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.16.0.tgz#b7dd0797d00bbfee4f07e9c4ea5b0e30c8bb1481" @@ -304,13 +296,6 @@ "@babel/template" "^7.16.7" "@babel/types" "^7.17.0" -"@babel/helper-get-function-arity@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.14.5.tgz#25fbfa579b0937eee1f3b805ece4ce398c431815" - integrity sha512-I1Db4Shst5lewOM4V+ZKJzQ0JGGaZ6VY1jYvMghRjqs6DWgxLCIyFt30GlnKkfUeFLpJt2vzbMVEXVSXlIFYUg== - dependencies: - "@babel/types" "^7.14.5" - "@babel/helper-get-function-arity@^7.16.0": version "7.16.0" resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.16.0.tgz#0088c7486b29a9cb5d948b1a1de46db66e089cfa" @@ -325,13 +310,6 @@ dependencies: "@babel/types" "^7.16.7" -"@babel/helper-hoist-variables@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.14.5.tgz#e0dd27c33a78e577d7c8884916a3e7ef1f7c7f8d" - integrity sha512-R1PXiz31Uc0Vxy4OEOm07x0oSjKAdPPCh3tPivn/Eo8cvz6gveAeuyUUPB21Hoiif0uoPQSSdhIPS3352nvdyQ== - dependencies: - "@babel/types" "^7.14.5" - "@babel/helper-hoist-variables@^7.16.0": version "7.16.0" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.0.tgz#4c9023c2f1def7e28ff46fc1dbcd36a39beaa81a" @@ -346,13 +324,6 @@ dependencies: "@babel/types" "^7.16.7" -"@babel/helper-member-expression-to-functions@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.14.5.tgz#d5c70e4ad13b402c95156c7a53568f504e2fb7b8" - integrity sha512-UxUeEYPrqH1Q/k0yRku1JE7dyfyehNwT6SVkMHvYvPDv4+uu627VXBckVj891BO8ruKBkiDoGnZf4qPDD8abDQ== - dependencies: - "@babel/types" "^7.14.5" - "@babel/helper-member-expression-to-functions@^7.16.0": version "7.16.0" resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.16.0.tgz#29287040efd197c77636ef75188e81da8bccd5a4" @@ -367,7 +338,7 @@ dependencies: "@babel/types" "^7.17.0" -"@babel/helper-module-imports@^7.12.13", "@babel/helper-module-imports@^7.14.5": +"@babel/helper-module-imports@^7.12.13": version "7.14.5" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.14.5.tgz#6d1a44df6a38c957aa7c312da076429f11b422f3" integrity sha512-SwrNHu5QWS84XlHwGYPDtCxcA0hrSlL2yhWYLgeOc0w7ccOl2qv4s/nARI0aYZW+bSwAL5CukeXA47B/1NKcnQ== @@ -388,20 +359,6 @@ dependencies: "@babel/types" "^7.16.7" -"@babel/helper-module-transforms@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.14.5.tgz#7de42f10d789b423eb902ebd24031ca77cb1e10e" - integrity sha512-iXpX4KW8LVODuAieD7MzhNjmM6dzYY5tfRqT+R9HDXWl0jPn/djKmA+G9s/2C2T9zggw5tK1QNqZ70USfedOwA== - dependencies: - "@babel/helper-module-imports" "^7.14.5" - "@babel/helper-replace-supers" "^7.14.5" - "@babel/helper-simple-access" "^7.14.5" - "@babel/helper-split-export-declaration" "^7.14.5" - "@babel/helper-validator-identifier" "^7.14.5" - "@babel/template" "^7.14.5" - "@babel/traverse" "^7.14.5" - "@babel/types" "^7.14.5" - "@babel/helper-module-transforms@^7.16.0": version "7.16.0" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.16.0.tgz#1c82a8dd4cb34577502ebd2909699b194c3e9bb5" @@ -430,12 +387,19 @@ "@babel/traverse" "^7.16.7" "@babel/types" "^7.16.7" -"@babel/helper-optimise-call-expression@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.14.5.tgz#f27395a8619e0665b3f0364cddb41c25d71b499c" - integrity sha512-IqiLIrODUOdnPU9/F8ib1Fx2ohlgDhxnIDU7OEVi+kAbEZcyiF7BLU8W6PfvPi9LzztjS7kcbzbmL7oG8kD6VA== +"@babel/helper-module-transforms@^7.18.0": + version "7.18.0" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.18.0.tgz#baf05dec7a5875fb9235bd34ca18bad4e21221cd" + integrity sha512-kclUYSUBIjlvnzN2++K9f2qzYKFgjmnmjwL4zlmU5f8ZtzgWe8s0rUPSTGy2HmK4P8T52MQsS+HTQAgZd3dMEA== dependencies: - "@babel/types" "^7.14.5" + "@babel/helper-environment-visitor" "^7.16.7" + "@babel/helper-module-imports" "^7.16.7" + "@babel/helper-simple-access" "^7.17.7" + "@babel/helper-split-export-declaration" "^7.16.7" + "@babel/helper-validator-identifier" "^7.16.7" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.18.0" + "@babel/types" "^7.18.0" "@babel/helper-optimise-call-expression@^7.16.0": version "7.16.0" @@ -470,16 +434,6 @@ "@babel/helper-wrap-function" "^7.16.0" "@babel/types" "^7.16.0" -"@babel/helper-replace-supers@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.14.5.tgz#0ecc0b03c41cd567b4024ea016134c28414abb94" - integrity sha512-3i1Qe9/8x/hCHINujn+iuHy+mMRLoc77b2nI9TB0zjH1hvn9qGlXjWlggdwUcju36PkPCy/lpM7LLUdcTyH4Ow== - dependencies: - "@babel/helper-member-expression-to-functions" "^7.14.5" - "@babel/helper-optimise-call-expression" "^7.14.5" - "@babel/traverse" "^7.14.5" - "@babel/types" "^7.14.5" - "@babel/helper-replace-supers@^7.16.0": version "7.16.0" resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.16.0.tgz#73055e8d3cf9bcba8ddb55cad93fedc860f68f17" @@ -501,13 +455,6 @@ "@babel/traverse" "^7.18.2" "@babel/types" "^7.18.2" -"@babel/helper-simple-access@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.14.5.tgz#66ea85cf53ba0b4e588ba77fc813f53abcaa41c4" - integrity sha512-nfBN9xvmCt6nrMZjfhkl7i0oTV3yxR4/FztsbOASyTvVcoYd0TRHh7eMLdlEcCqobydC0LAF3LtC92Iwxo0wyw== - dependencies: - "@babel/types" "^7.14.5" - "@babel/helper-simple-access@^7.16.0": version "7.16.0" resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.16.0.tgz#21d6a27620e383e37534cf6c10bba019a6f90517" @@ -522,6 +469,13 @@ dependencies: "@babel/types" "^7.16.7" +"@babel/helper-simple-access@^7.17.7": + version "7.18.2" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.18.2.tgz#4dc473c2169ac3a1c9f4a51cfcd091d1c36fcff9" + integrity sha512-7LIrjYzndorDY88MycupkpQLKS1AFfsVRm2k/9PtKScSy5tZq0McZTj+DiMRynboZfIqOKvo03pmhTaUgiD6fQ== + dependencies: + "@babel/types" "^7.18.2" + "@babel/helper-skip-transparent-expression-wrappers@^7.16.0": version "7.16.0" resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.16.0.tgz#0ee3388070147c3ae051e487eca3ebb0e2e8bb09" @@ -529,13 +483,6 @@ dependencies: "@babel/types" "^7.16.0" -"@babel/helper-split-export-declaration@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.14.5.tgz#22b23a54ef51c2b7605d851930c1976dd0bc693a" - integrity sha512-hprxVPu6e5Kdp2puZUmvOGjaLv9TCe58E/Fl6hRq4YiVQxIcNvuq6uTM2r1mT/oPskuS9CgR+I94sqAYv0NGKA== - dependencies: - "@babel/types" "^7.14.5" - "@babel/helper-split-export-declaration@^7.16.0": version "7.16.0" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.0.tgz#29672f43663e936df370aaeb22beddb3baec7438" @@ -585,15 +532,6 @@ "@babel/traverse" "^7.16.0" "@babel/types" "^7.16.0" -"@babel/helpers@^7.14.6": - version "7.14.6" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.14.6.tgz#5b58306b95f1b47e2a0199434fa8658fa6c21635" - integrity sha512-yesp1ENQBiLI+iYHSJdoZKUtRpfTlL1grDIX9NRlAVppljLw/4tTyYupIB7uIYmC3stW/imAv8EqaKaS/ibmeA== - dependencies: - "@babel/template" "^7.14.5" - "@babel/traverse" "^7.14.5" - "@babel/types" "^7.14.5" - "@babel/helpers@^7.16.0": version "7.16.0" resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.16.0.tgz#875519c979c232f41adfbd43a3b0398c2e388183" @@ -612,14 +550,14 @@ "@babel/traverse" "^7.17.0" "@babel/types" "^7.17.0" -"@babel/highlight@^7.10.4": - version "7.17.12" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.17.12.tgz#257de56ee5afbd20451ac0a75686b6b404257351" - integrity sha512-7yykMVF3hfZY2jsHZEEgLc+3x4o1O+fYyULu11GynEUQNwB6lua+IIQn1FiJxNucd5UlyJryrwsOh8PL9Sn8Qg== +"@babel/helpers@^7.18.2": + version "7.18.2" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.18.2.tgz#970d74f0deadc3f5a938bfa250738eb4ac889384" + integrity sha512-j+d+u5xT5utcQSzrh9p+PaJX94h++KN+ng9b9WEJq7pkUPAd61FGqhjuUEdfknb3E/uDBb7ruwEeKkIxNJPIrg== dependencies: - "@babel/helper-validator-identifier" "^7.16.7" - chalk "^2.0.0" - js-tokens "^4.0.0" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.18.2" + "@babel/types" "^7.18.2" "@babel/highlight@^7.14.5": version "7.14.5" @@ -653,11 +591,6 @@ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.16.2.tgz#3723cd5c8d8773eef96ce57ea1d9b7faaccd12ac" integrity sha512-RUVpT0G2h6rOZwqLDTrKk7ksNv7YpAilTnYe1/Q+eDjxEceRMKVWbCsX7t8h6C1qCFi/1Y8WZjcEPBAFG27GPw== -"@babel/parser@^7.14.5", "@babel/parser@^7.14.6", "@babel/parser@^7.7.0": - version "7.14.6" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.14.6.tgz#d85cc68ca3cac84eae384c06f032921f5227f4b2" - integrity sha512-oG0ej7efjEXxb4UgE+klVx+3j4MVo+A2vCzm7OUN4CLo6WhQ+vSOD2yJ8m7B+DghObxtLxt3EfgMWpq+AsWehQ== - "@babel/parser@^7.16.7", "@babel/parser@^7.17.0": version "7.17.0" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.17.0.tgz#f0ac33eddbe214e4105363bb17c3341c5ffcc43c" @@ -668,6 +601,11 @@ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.18.4.tgz#6774231779dd700e0af29f6ad8d479582d7ce5ef" integrity sha512-FDge0dFazETFcxGw/EXzOkN8uJp0PC7Qbm+Pe9T+av2zlBpOgunFHkQPPn+eRuClU73JF+98D531UgayY89tow== +"@babel/parser@^7.18.5": + version "7.18.5" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.18.5.tgz#337062363436a893a2d22faa60be5bb37091c83c" + integrity sha512-YZWVaglMiplo7v8f1oMQ5ZPQr0vn7HPeZXxXWsxXJRjGVrzUFn9OxFQl1sb5wzfootjA/yChhW84BV+383FSOw== + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.16.0": version "7.16.2" resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.16.2.tgz#2977fca9b212db153c195674e57cfab807733183" @@ -1381,7 +1319,7 @@ dependencies: regenerator-runtime "^0.13.4" -"@babel/runtime@^7.10.2", "@babel/runtime@^7.11.2": +"@babel/runtime@^7.10.2": version "7.15.4" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.15.4.tgz#fd17d16bfdf878e6dd02d19753a39fa8a8d9c84a" integrity sha512-99catp6bHCaxr4sJ/DbTGgHS4+Rs2RVd2g7iOap6SLGPDknRK9ztKNsE/Fg6QhSeh1FGE5f6gHGQmvvn3I3xhw== @@ -1409,6 +1347,13 @@ dependencies: regenerator-runtime "^0.13.4" +"@babel/runtime@^7.16.3": + version "7.18.3" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.18.3.tgz#c7b654b57f6f63cf7f8b418ac9ca04408c4579f4" + integrity sha512-38Y8f7YUhce/K7RMwTp7m0uCumpv9hZkitCbBClqQIow1qSbCvGkcegKOXpEWCQLfWmevgRiWokZ1GkpfhbZug== + dependencies: + regenerator-runtime "^0.13.4" + "@babel/runtime@^7.7.6": version "7.17.9" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.17.9.tgz#d19fbf802d01a8cb6cf053a64e472d42c434ba72" @@ -1416,15 +1361,6 @@ dependencies: regenerator-runtime "^0.13.4" -"@babel/template@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.14.5.tgz#a9bc9d8b33354ff6e55a9c60d1109200a68974f4" - integrity sha512-6Z3Po85sfxRGachLULUhOmvAaOo7xCvqGQtxINai2mEGPFm6pQ4z5QInFnUrRpfoSV60BnjyF5F3c+15fxFV1g== - dependencies: - "@babel/code-frame" "^7.14.5" - "@babel/parser" "^7.14.5" - "@babel/types" "^7.14.5" - "@babel/template@^7.16.0", "@babel/template@^7.3.3": version "7.16.0" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.0.tgz#d16a35ebf4cd74e202083356fab21dd89363ddd6" @@ -1458,21 +1394,6 @@ debug "^4.1.0" globals "^11.1.0" -"@babel/traverse@^7.14.5", "@babel/traverse@^7.7.0": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.14.5.tgz#c111b0f58afab4fea3d3385a406f692748c59870" - integrity sha512-G3BiS15vevepdmFqmUc9X+64y0viZYygubAMO8SvBmKARuF6CPSZtH4Ng9vi/lrWlZFGe3FWdXNy835akH8Glg== - dependencies: - "@babel/code-frame" "^7.14.5" - "@babel/generator" "^7.14.5" - "@babel/helper-function-name" "^7.14.5" - "@babel/helper-hoist-variables" "^7.14.5" - "@babel/helper-split-export-declaration" "^7.14.5" - "@babel/parser" "^7.14.5" - "@babel/types" "^7.14.5" - debug "^4.1.0" - globals "^11.1.0" - "@babel/traverse@^7.16.7", "@babel/traverse@^7.17.0": version "7.17.0" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.17.0.tgz#3143e5066796408ccc880a33ecd3184f3e75cd30" @@ -1489,6 +1410,22 @@ debug "^4.1.0" globals "^11.1.0" +"@babel/traverse@^7.18.0", "@babel/traverse@^7.18.5": + version "7.18.5" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.18.5.tgz#94a8195ad9642801837988ab77f36e992d9a20cd" + integrity sha512-aKXj1KT66sBj0vVzk6rEeAO6Z9aiiQ68wfDgge3nHhA/my6xMM/7HGQUNumKZaoa2qUPQ5whJG9aAifsxUKfLA== + dependencies: + "@babel/code-frame" "^7.16.7" + "@babel/generator" "^7.18.2" + "@babel/helper-environment-visitor" "^7.18.2" + "@babel/helper-function-name" "^7.17.9" + "@babel/helper-hoist-variables" "^7.16.7" + "@babel/helper-split-export-declaration" "^7.16.7" + "@babel/parser" "^7.18.5" + "@babel/types" "^7.18.4" + debug "^4.1.0" + globals "^11.1.0" + "@babel/traverse@^7.18.2": version "7.18.2" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.18.2.tgz#b77a52604b5cc836a9e1e08dca01cba67a12d2e8" @@ -1513,7 +1450,7 @@ "@babel/helper-validator-identifier" "^7.15.7" to-fast-properties "^2.0.0" -"@babel/types@^7.14.5", "@babel/types@^7.7.0": +"@babel/types@^7.14.5": version "7.14.5" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.14.5.tgz#3bb997ba829a2104cedb20689c4a5b8121d383ff" integrity sha512-M/NzBpEL95I5Hh4dwhin5JlE7EzO5PHMAuzjxss3tiOBD46KfQvVedN/3jEPZvdRvtsK2222XfdHogNIttFgcg== @@ -1529,7 +1466,7 @@ "@babel/helper-validator-identifier" "^7.16.7" to-fast-properties "^2.0.0" -"@babel/types@^7.18.2": +"@babel/types@^7.18.0", "@babel/types@^7.18.2", "@babel/types@^7.18.4": version "7.18.4" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.18.4.tgz#27eae9b9fd18e9dccc3f9d6ad051336f307be354" integrity sha512-ThN1mBcMq5pG/Vm2IcBmPPfyPXbd8S02rS+OBIDENdufvqC7Z/jHPCv9IcP01277aKtDI8g/2XysBN4hA8niiw== @@ -2099,6 +2036,11 @@ resolved "https://registry.yarnpkg.com/@ctrl/tinycolor/-/tinycolor-3.4.0.tgz#c3c5ae543c897caa9c2a68630bed355be5f9990f" integrity sha512-JZButFdZ1+/xAfpguQHoabIXkcqRRKpMrWKBkpEZZyxfY9C1DpADFB8PEqGSTeFr135SaTRfKqGKx5xSCLI7ZQ== +"@discoveryjs/json-ext@^0.5.0": + version "0.5.7" + resolved "https://registry.yarnpkg.com/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz#1d572bfbbe14b7704e0ba0f39b74815b84870d70" + integrity sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw== + "@emotion/babel-plugin@^11.3.0": version "11.3.0" resolved "https://registry.yarnpkg.com/@emotion/babel-plugin/-/babel-plugin-11.3.0.tgz#3a16850ba04d8d9651f07f3fb674b3436a4fb9d7" @@ -2241,19 +2183,19 @@ resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.2.5.tgz#8eed982e2ee6f7f4e44c253e12962980791efd46" integrity sha512-6U71C2Wp7r5XtFtQzYrW5iKFT67OixrSxjI4MptCHzdSVlgabczzqLe0ZSgnub/5Kp4hSbpDB1tMytZY9pwxxA== -"@eslint/eslintrc@^0.4.3": - version "0.4.3" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.3.tgz#9e42981ef035beb3dd49add17acb96e8ff6f394c" - integrity sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw== +"@eslint/eslintrc@^1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-1.3.0.tgz#29f92c30bb3e771e4a2048c95fa6855392dfac4f" + integrity sha512-UWW0TMTmk2d7hLcWD1/e2g5HDM/HQ3csaLSqXCfqwh4uNDuNqlaKWXmEsL4Cs41Z0KnILNvwbHAah3C2yt06kw== dependencies: ajv "^6.12.4" - debug "^4.1.1" - espree "^7.3.0" - globals "^13.9.0" - ignore "^4.0.6" + debug "^4.3.2" + espree "^9.3.2" + globals "^13.15.0" + ignore "^5.2.0" import-fresh "^3.2.1" - js-yaml "^3.13.1" - minimatch "^3.0.4" + js-yaml "^4.1.0" + minimatch "^3.1.2" strip-json-comments "^3.1.1" "@exodus/schemasafe@^1.0.0-rc.2": @@ -2261,16 +2203,16 @@ resolved "https://registry.yarnpkg.com/@exodus/schemasafe/-/schemasafe-1.0.0-rc.3.tgz#dda2fbf3dafa5ad8c63dadff7e01d3fdf4736025" integrity sha512-GoXw0U2Qaa33m3eUcxuHnHpNvHjNlLo0gtV091XBpaRINaB4X6FGCG5XKxSFNFiPpugUDqNruHzaqpTdDm4AOg== -"@humanwhocodes/config-array@^0.5.0": - version "0.5.0" - resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.5.0.tgz#1407967d4c6eecd7388f83acf1eaf4d0c6e58ef9" - integrity sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg== +"@humanwhocodes/config-array@^0.9.2": + version "0.9.5" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.9.5.tgz#2cbaf9a89460da24b5ca6531b8bbfc23e1df50c7" + integrity sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw== dependencies: - "@humanwhocodes/object-schema" "^1.2.0" + "@humanwhocodes/object-schema" "^1.2.1" debug "^4.1.1" minimatch "^3.0.4" -"@humanwhocodes/object-schema@^1.2.0": +"@humanwhocodes/object-schema@^1.2.1": version "1.2.1" resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== @@ -2492,6 +2434,14 @@ "@types/yargs" "^16.0.0" chalk "^4.0.0" +"@jridgewell/gen-mapping@^0.1.0": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" + integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/gen-mapping@^0.3.0": version "0.3.1" resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.1.tgz#cf92a983c83466b8c0ce9124fadeaf09f7c66ea9" @@ -2511,6 +2461,14 @@ resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.1.tgz#36a6acc93987adcf0ba50c66908bd0b70de8afea" integrity sha512-Ct5MqZkLGEXTVmQYbGtx9SVqD2fqwvdubdps5D3djjAkgkKwT918VNOz65pEHFaYTeWcukmJmH5SwsA9Tn2ObQ== +"@jridgewell/source-map@^0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" + integrity sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw== + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" + "@jridgewell/sourcemap-codec@^1.4.10": version "1.4.11" resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.11.tgz#771a1d8d744eeb71b6adb35808e1a6c7b9b8c8ec" @@ -2524,7 +2482,7 @@ "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" -"@jridgewell/trace-mapping@^0.3.9": +"@jridgewell/trace-mapping@^0.3.7", "@jridgewell/trace-mapping@^0.3.9": version "0.3.13" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.13.tgz#dcfe3e95f224c8fe97a87a5235defec999aa92ea" integrity sha512-o1xbKhp9qnIAoHJSWd6KlCZfqslL4valSF81H8ImioOAxluWYWOpWkpyktY2vnt4tbrX9XYaxovq6cgowaJp2w== @@ -2576,10 +2534,10 @@ require-from-string "^2.0.2" uri-js "^4.2.2" -"@redocly/openapi-core@^1.0.0-beta.54": - version "1.0.0-beta.81" - resolved "https://registry.yarnpkg.com/@redocly/openapi-core/-/openapi-core-1.0.0-beta.81.tgz#c3684b8c4fcf4d4d25d23b46964809a3f0097dcb" - integrity sha512-cLmGZD+7U3NB2u5q6E8gPKENEKlQmvD1D4m+eIixgM5yQ2K3w3VaD84eNPhvRlvgu7yPrDX9mmTj1jVp254SRA== +"@redocly/openapi-core@^1.0.0-beta.97": + version "1.0.0-beta.102" + resolved "https://registry.yarnpkg.com/@redocly/openapi-core/-/openapi-core-1.0.0-beta.102.tgz#e1cd049979f05812c594063fec71e618201319c4" + integrity sha512-3Fr3fg+9VEF4+4uoyvOOk+9ipmX2GYhlb18uZbpC4v3cUgGpkTRGZM2Qetfah7Tgx2LgqLuw8A1icDD6Zed2Gw== dependencies: "@redocly/ajv" "^8.6.4" "@types/node" "^14.11.8" @@ -2587,16 +2545,11 @@ js-levenshtein "^1.1.6" js-yaml "^4.1.0" lodash.isequal "^4.5.0" - minimatch "^3.0.4" + minimatch "^5.0.1" node-fetch "^2.6.1" pluralize "^8.0.0" yaml-ast-parser "0.0.43" -"@redocly/react-dropdown-aria@^2.0.11": - version "2.0.12" - resolved "https://registry.yarnpkg.com/@redocly/react-dropdown-aria/-/react-dropdown-aria-2.0.12.tgz#2e3af2b1b8e9123487109400d6117f0d4a8445a6" - integrity sha512-feQEZlyBvQsbT/fvpJ4jJ5OLGaUPpnskHYDsY8DGpPymN+HUeDQrqkBEbbKRwMKidFTI2cxk2kJNNTnvdS9jyw== - "@sinonjs/commons@^1.7.0": version "1.8.3" resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" @@ -2612,11 +2565,11 @@ "@sinonjs/commons" "^1.7.0" "@stylelint/postcss-css-in-js@^0.37.2": - version "0.37.2" - resolved "https://registry.yarnpkg.com/@stylelint/postcss-css-in-js/-/postcss-css-in-js-0.37.2.tgz#7e5a84ad181f4234a2480803422a47b8749af3d2" - integrity sha512-nEhsFoJurt8oUmieT8qy4nk81WRHmJynmVwn/Vts08PL9fhgIsMhk1GId5yAN643OzqEEb5S/6At2TZW7pqPDA== + version "0.37.3" + resolved "https://registry.yarnpkg.com/@stylelint/postcss-css-in-js/-/postcss-css-in-js-0.37.3.tgz#d149a385e07ae365b0107314c084cb6c11adbf49" + integrity sha512-scLk3cSH1H9KggSniseb2KNAU5D9FWc3H7BxCSAIdtU9OWIyw0zkEZ9qEKHryRM+SExYXRKNb7tOOVNAsQ3iwg== dependencies: - "@babel/core" ">=7.9.0" + "@babel/core" "^7.17.9" "@stylelint/postcss-markdown@^0.36.2": version "0.36.2" @@ -2677,10 +2630,10 @@ resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== -"@trysound/sax@0.1.1": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@trysound/sax/-/sax-0.1.1.tgz#3348564048e7a2d7398c935d466c0414ebb6a669" - integrity sha512-Z6DoceYb/1xSg5+e+ZlPZ9v0N16ZvZ+wYMraFue4HYrE4ttONKtsvruIRf6t9TBR0YvSOfi1hUU0fJfBLCDYow== +"@trysound/sax@0.2.0": + version "0.2.0" + resolved "https://registry.yarnpkg.com/@trysound/sax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad" + integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== "@types/aria-query@^4.2.0": version "4.2.2" @@ -2720,6 +2673,27 @@ dependencies: "@babel/types" "^7.3.0" +"@types/eslint-scope@^3.7.3": + version "3.7.3" + resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.3.tgz#125b88504b61e3c8bc6f870882003253005c3224" + integrity sha512-PB3ldyrcnAicT35TWPs5IcwKD8S333HMaa2VVv4+wdvebJkjWuW/xESoB8IwRcog8HYVYamb1g/R31Qv5Bx03g== + dependencies: + "@types/eslint" "*" + "@types/estree" "*" + +"@types/eslint@*": + version "8.4.3" + resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.4.3.tgz#5c92815a3838b1985c90034cd85f26f59d9d0ece" + integrity sha512-YP1S7YJRMPs+7KZKDb9G63n8YejIwW9BALq7a5j2+H4yl6iOv9CB29edho+cuFRrvmJbbaH2yiVChKLJVysDGw== + dependencies: + "@types/estree" "*" + "@types/json-schema" "*" + +"@types/estree@*", "@types/estree@^0.0.51": + version "0.0.51" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.51.tgz#cfd70924a25a3fd32b218e5e420e6897e1ac4f40" + integrity sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ== + "@types/glob@^7.1.1": version "7.1.3" resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.3.tgz#e6ba80f36b7daad2c685acd9266382e68985c183" @@ -2762,16 +2736,16 @@ jest-diff "^27.0.0" pretty-format "^27.0.0" +"@types/json-schema@*", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": + version "7.0.11" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" + integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== + "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.6", "@types/json-schema@^7.0.7": version "7.0.7" resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.7.tgz#98a993516c859eb0d5c4c8f098317a9ea68db9ad" integrity sha512-cxWFQVseBm6O9Gbw1IWb8r6OS4OhSt3hPZLkFApLjM8TEXROBuQGLAH2i2gZpcXdLBIrpXuTDhH7Vbm1iXmNGA== -"@types/json-schema@^7.0.9": - version "7.0.11" - resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" - integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== - "@types/json5@^0.0.29": version "0.0.29" resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" @@ -2790,9 +2764,9 @@ integrity sha512-0d5Wd09ItQWH1qFbEyQ7oTQ3GZrMfth5JkbN3EvTKLXcHLRDSXeLnlvlOn0wvxVIwK5o2M8JzP/OWz7T3NRsbw== "@types/mdast@^3.0.0": - version "3.0.3" - resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.3.tgz#2d7d671b1cd1ea3deb306ea75036c2a0407d2deb" - integrity sha512-SXPBMnFVQg1s00dlMCc/jCdvPqdE4mXaMMCeRlxLDmTAEoegHT53xKtkDnzDTOcmMHUfcjyf36/YYZ6SxRdnsw== + version "3.0.10" + resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.10.tgz#4724244a82a4598884cbbe9bcfd73dff927ee8af" + integrity sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA== dependencies: "@types/unist" "*" @@ -2896,9 +2870,9 @@ source-map "^0.6.1" "@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2": - version "2.0.3" - resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.3.tgz#9c088679876f374eb5983f150d4787aa6fb32d7e" - integrity sha512-FvUupuM3rlRsRtCN+fDudtmytGO6iHJuuRKS1Ss0pG5z8oX0diNEw94UEL7hgDbpN94rgaK5R7sWm6RrSkZuAQ== + version "2.0.6" + resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.6.tgz#250a7b16c3b91f672a24552ec64678eeb1d3a08d" + integrity sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ== "@types/webpack-sources@*": version "2.1.0" @@ -3013,150 +2987,143 @@ "@typescript-eslint/types" "5.27.1" eslint-visitor-keys "^3.3.0" -"@webassemblyjs/ast@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.9.0.tgz#bd850604b4042459a5a41cd7d338cbed695ed964" - integrity sha512-C6wW5L+b7ogSDVqymbkkvuW9kruN//YisMED04xzeBBqjHa2FYnmvOlS6Xj68xWQRgWvI9cIglsjFowH/RJyEA== - dependencies: - "@webassemblyjs/helper-module-context" "1.9.0" - "@webassemblyjs/helper-wasm-bytecode" "1.9.0" - "@webassemblyjs/wast-parser" "1.9.0" - -"@webassemblyjs/floating-point-hex-parser@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.9.0.tgz#3c3d3b271bddfc84deb00f71344438311d52ffb4" - integrity sha512-TG5qcFsS8QB4g4MhrxK5TqfdNe7Ey/7YL/xN+36rRjl/BlGE/NcBvJcqsRgCP6Z92mRE+7N50pRIi8SmKUbcQA== - -"@webassemblyjs/helper-api-error@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.9.0.tgz#203f676e333b96c9da2eeab3ccef33c45928b6a2" - integrity sha512-NcMLjoFMXpsASZFxJ5h2HZRcEhDkvnNFOAKneP5RbKRzaWJN36NC4jqQHKwStIhGXu5mUWlUUk7ygdtrO8lbmw== - -"@webassemblyjs/helper-buffer@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.9.0.tgz#a1442d269c5feb23fcbc9ef759dac3547f29de00" - integrity sha512-qZol43oqhq6yBPx7YM3m9Bv7WMV9Eevj6kMi6InKOuZxhw+q9hOkvq5e/PpKSiLfyetpaBnogSbNCfBwyB00CA== - -"@webassemblyjs/helper-code-frame@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.9.0.tgz#647f8892cd2043a82ac0c8c5e75c36f1d9159f27" - integrity sha512-ERCYdJBkD9Vu4vtjUYe8LZruWuNIToYq/ME22igL+2vj2dQ2OOujIZr3MEFvfEaqKoVqpsFKAGsRdBSBjrIvZA== - dependencies: - "@webassemblyjs/wast-printer" "1.9.0" - -"@webassemblyjs/helper-fsm@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-fsm/-/helper-fsm-1.9.0.tgz#c05256b71244214671f4b08ec108ad63b70eddb8" - integrity sha512-OPRowhGbshCb5PxJ8LocpdX9Kl0uB4XsAjl6jH/dWKlk/mzsANvhwbiULsaiqT5GZGT9qinTICdj6PLuM5gslw== - -"@webassemblyjs/helper-module-context@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-module-context/-/helper-module-context-1.9.0.tgz#25d8884b76839871a08a6c6f806c3979ef712f07" - integrity sha512-MJCW8iGC08tMk2enck1aPW+BE5Cw8/7ph/VGZxwyvGbJwjktKkDK7vy7gAmMDx88D7mhDTCNKAW5tED+gZ0W8g== - dependencies: - "@webassemblyjs/ast" "1.9.0" - -"@webassemblyjs/helper-wasm-bytecode@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.9.0.tgz#4fed8beac9b8c14f8c58b70d124d549dd1fe5790" - integrity sha512-R7FStIzyNcd7xKxCZH5lE0Bqy+hGTwS3LJjuv1ZVxd9O7eHCedSdrId/hMOd20I+v8wDXEn+bjfKDLzTepoaUw== - -"@webassemblyjs/helper-wasm-section@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.9.0.tgz#5a4138d5a6292ba18b04c5ae49717e4167965346" - integrity sha512-XnMB8l3ek4tvrKUUku+IVaXNHz2YsJyOOmz+MMkZvh8h1uSJpSen6vYnw3IoQ7WwEuAhL8Efjms1ZWjqh2agvw== - dependencies: - "@webassemblyjs/ast" "1.9.0" - "@webassemblyjs/helper-buffer" "1.9.0" - "@webassemblyjs/helper-wasm-bytecode" "1.9.0" - "@webassemblyjs/wasm-gen" "1.9.0" - -"@webassemblyjs/ieee754@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.9.0.tgz#15c7a0fbaae83fb26143bbacf6d6df1702ad39e4" - integrity sha512-dcX8JuYU/gvymzIHc9DgxTzUUTLexWwt8uCTWP3otys596io0L5aW02Gb1RjYpx2+0Jus1h4ZFqjla7umFniTg== +"@webassemblyjs/ast@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" + integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw== + dependencies: + "@webassemblyjs/helper-numbers" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + +"@webassemblyjs/floating-point-hex-parser@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f" + integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ== + +"@webassemblyjs/helper-api-error@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16" + integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg== + +"@webassemblyjs/helper-buffer@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5" + integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA== + +"@webassemblyjs/helper-numbers@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae" + integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ== + dependencies: + "@webassemblyjs/floating-point-hex-parser" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@xtuc/long" "4.2.2" + +"@webassemblyjs/helper-wasm-bytecode@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1" + integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q== + +"@webassemblyjs/helper-wasm-section@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a" + integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + +"@webassemblyjs/ieee754@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614" + integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ== dependencies: "@xtuc/ieee754" "^1.2.0" -"@webassemblyjs/leb128@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.9.0.tgz#f19ca0b76a6dc55623a09cffa769e838fa1e1c95" - integrity sha512-ENVzM5VwV1ojs9jam6vPys97B/S65YQtv/aanqnU7D8aSoHFX8GyhGg0CMfyKNIHBuAVjy3tlzd5QMMINa7wpw== +"@webassemblyjs/leb128@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5" + integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw== dependencies: "@xtuc/long" "4.2.2" -"@webassemblyjs/utf8@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.9.0.tgz#04d33b636f78e6a6813227e82402f7637b6229ab" - integrity sha512-GZbQlWtopBTP0u7cHrEx+73yZKrQoBMpwkGEIqlacljhXCkVM1kMQge/Mf+csMJAjEdSwhOyLAS0AoR3AG5P8w== - -"@webassemblyjs/wasm-edit@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.9.0.tgz#3fe6d79d3f0f922183aa86002c42dd256cfee9cf" - integrity sha512-FgHzBm80uwz5M8WKnMTn6j/sVbqilPdQXTWraSjBwFXSYGirpkSWE2R9Qvz9tNiTKQvoKILpCuTjBKzOIm0nxw== - dependencies: - "@webassemblyjs/ast" "1.9.0" - "@webassemblyjs/helper-buffer" "1.9.0" - "@webassemblyjs/helper-wasm-bytecode" "1.9.0" - "@webassemblyjs/helper-wasm-section" "1.9.0" - "@webassemblyjs/wasm-gen" "1.9.0" - "@webassemblyjs/wasm-opt" "1.9.0" - "@webassemblyjs/wasm-parser" "1.9.0" - "@webassemblyjs/wast-printer" "1.9.0" - -"@webassemblyjs/wasm-gen@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.9.0.tgz#50bc70ec68ded8e2763b01a1418bf43491a7a49c" - integrity sha512-cPE3o44YzOOHvlsb4+E9qSqjc9Qf9Na1OO/BHFy4OI91XDE14MjFN4lTMezzaIWdPqHnsTodGGNP+iRSYfGkjA== - dependencies: - "@webassemblyjs/ast" "1.9.0" - "@webassemblyjs/helper-wasm-bytecode" "1.9.0" - "@webassemblyjs/ieee754" "1.9.0" - "@webassemblyjs/leb128" "1.9.0" - "@webassemblyjs/utf8" "1.9.0" - -"@webassemblyjs/wasm-opt@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.9.0.tgz#2211181e5b31326443cc8112eb9f0b9028721a61" - integrity sha512-Qkjgm6Anhm+OMbIL0iokO7meajkzQD71ioelnfPEj6r4eOFuqm4YC3VBPqXjFyyNwowzbMD+hizmprP/Fwkl2A== - dependencies: - "@webassemblyjs/ast" "1.9.0" - "@webassemblyjs/helper-buffer" "1.9.0" - "@webassemblyjs/wasm-gen" "1.9.0" - "@webassemblyjs/wasm-parser" "1.9.0" - -"@webassemblyjs/wasm-parser@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.9.0.tgz#9d48e44826df4a6598294aa6c87469d642fff65e" - integrity sha512-9+wkMowR2AmdSWQzsPEjFU7njh8HTO5MqO8vjwEHuM+AMHioNqSBONRdr0NQQ3dVQrzp0s8lTcYqzUdb7YgELA== - dependencies: - "@webassemblyjs/ast" "1.9.0" - "@webassemblyjs/helper-api-error" "1.9.0" - "@webassemblyjs/helper-wasm-bytecode" "1.9.0" - "@webassemblyjs/ieee754" "1.9.0" - "@webassemblyjs/leb128" "1.9.0" - "@webassemblyjs/utf8" "1.9.0" - -"@webassemblyjs/wast-parser@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-parser/-/wast-parser-1.9.0.tgz#3031115d79ac5bd261556cecc3fa90a3ef451914" - integrity sha512-qsqSAP3QQ3LyZjNC/0jBJ/ToSxfYJ8kYyuiGvtn/8MK89VrNEfwj7BPQzJVHi0jGTRK2dGdJ5PRqhtjzoww+bw== - dependencies: - "@webassemblyjs/ast" "1.9.0" - "@webassemblyjs/floating-point-hex-parser" "1.9.0" - "@webassemblyjs/helper-api-error" "1.9.0" - "@webassemblyjs/helper-code-frame" "1.9.0" - "@webassemblyjs/helper-fsm" "1.9.0" +"@webassemblyjs/utf8@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff" + integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ== + +"@webassemblyjs/wasm-edit@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6" + integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/helper-wasm-section" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-opt" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + "@webassemblyjs/wast-printer" "1.11.1" + +"@webassemblyjs/wasm-gen@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76" + integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wasm-opt@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2" + integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + +"@webassemblyjs/wasm-parser@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199" + integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wast-printer@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0" + integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg== + dependencies: + "@webassemblyjs/ast" "1.11.1" "@xtuc/long" "4.2.2" -"@webassemblyjs/wast-printer@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.9.0.tgz#4935d54c85fef637b00ce9f52377451d00d47899" - integrity sha512-2J0nE95rHXHyQ24cWjMKJ1tqB/ds8z/cyeOZxJhcb+rW+SQASVjuznUSmdz5GpVJTzU8JkhYut0D3siFDD6wsA== +"@webpack-cli/configtest@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@webpack-cli/configtest/-/configtest-1.2.0.tgz#7b20ce1c12533912c3b217ea68262365fa29a6f5" + integrity sha512-4FB8Tj6xyVkyqjj1OaTqCjXYULB9FMkqQ8yGrZjRDrYh0nOE+7Lhs45WioWQQMV+ceFlE368Ukhe6xdvJM9Egg== + +"@webpack-cli/info@^1.5.0": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@webpack-cli/info/-/info-1.5.0.tgz#6c78c13c5874852d6e2dd17f08a41f3fe4c261b1" + integrity sha512-e8tSXZpw2hPl2uMJY6fsMswaok5FdlGNRTktvFk2sD8RjH0hE2+XistawJx1vmKteh4NmGmNUrp+Tb2w+udPcQ== dependencies: - "@webassemblyjs/ast" "1.9.0" - "@webassemblyjs/wast-parser" "1.9.0" - "@xtuc/long" "4.2.2" + envinfo "^7.7.3" + +"@webpack-cli/serve@^1.7.0": + version "1.7.0" + resolved "https://registry.yarnpkg.com/@webpack-cli/serve/-/serve-1.7.0.tgz#e1993689ac42d2b16e9194376cfb6753f6254db1" + integrity sha512-oxnCNGj88fL+xzV+dacXs44HcDwf1ovs3AuEzvP7mqXw7fQntqIhQ1BRmynh4qEKQSSSRSWVyXRjmTbZIX9V2Q== "@xtuc/ieee754@^1.2.0": version "1.2.0" @@ -3186,7 +3153,12 @@ acorn-globals@^6.0.0: acorn "^7.1.1" acorn-walk "^7.1.1" -acorn-jsx@^5.3.1: +acorn-import-assertions@^1.7.6: + version "1.8.0" + resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9" + integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw== + +acorn-jsx@^5.3.2: version "5.3.2" resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== @@ -3196,12 +3168,7 @@ acorn-walk@^7.1.1: resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== -acorn@^6.4.1: - version "6.4.2" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.2.tgz#35866fd710528e92de10cf06016498e47e39e1e6" - integrity sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ== - -acorn@^7.1.1, acorn@^7.4.0: +acorn@^7.1.1: version "7.4.1" resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== @@ -3211,6 +3178,11 @@ acorn@^8.2.4: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.5.0.tgz#4512ccb99b3698c752591e9bb4472e38ad43cee2" integrity sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q== +acorn@^8.4.1, acorn@^8.5.0, acorn@^8.7.1: + version "8.7.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.1.tgz#0197122c843d1bf6d0a5e83220a788f278f63c30" + integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A== + agent-base@6: version "6.0.2" resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" @@ -3226,17 +3198,26 @@ aggregate-error@^3.0.0: clean-stack "^2.0.0" indent-string "^4.0.0" -ajv-errors@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" - integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== +ajv-formats@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" + integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== + dependencies: + ajv "^8.0.0" -ajv-keywords@^3.1.0, ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: +ajv-keywords@^3.5.2: version "3.5.2" resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== -ajv@^6.1.0, ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.4, ajv@^6.12.5: +ajv-keywords@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16" + integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== + dependencies: + fast-deep-equal "^3.1.3" + +ajv@^6.10.0, ajv@^6.12.4, ajv@^6.12.5: version "6.12.6" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== @@ -3246,6 +3227,16 @@ ajv@^6.1.0, ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.4, ajv@^6.12.5: json-schema-traverse "^0.4.1" uri-js "^4.2.2" +ajv@^8.0.0, ajv@^8.8.0: + version "8.11.0" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.11.0.tgz#977e91dd96ca669f54a11e23e378e33b884a565f" + integrity sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg== + dependencies: + fast-deep-equal "^3.1.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + uri-js "^4.2.2" + ajv@^8.0.1: version "8.6.0" resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.6.0.tgz#60cc45d9c46a477d80d92c48076d972c342e5720" @@ -3256,16 +3247,6 @@ ajv@^8.0.1: require-from-string "^2.0.2" uri-js "^4.2.2" -alphanum-sort@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/alphanum-sort/-/alphanum-sort-1.0.2.tgz#97a1119649b211ad33691d9f9f486a8ec9fbe0a3" - integrity sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM= - -ansi-colors@^4.1.1: - version "4.1.3" - resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.3.tgz#37611340eb2243e70cc604cad35d63270d48781b" - integrity sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw== - ansi-escapes@^4.2.1: version "4.3.2" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" @@ -3273,32 +3254,12 @@ ansi-escapes@^4.2.1: dependencies: type-fest "^0.21.3" -ansi-regex@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" - integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= - -ansi-regex@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" - integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== - -ansi-regex@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.0.tgz#388539f55179bf39339c81af30a654d69f87cb75" - integrity sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg== - -ansi-regex@^5.0.1: +ansi-regex@^5.0.0, ansi-regex@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== -ansi-styles@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" - integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= - -ansi-styles@^3.2.0, ansi-styles@^3.2.1: +ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== @@ -3317,15 +3278,7 @@ ansi-styles@^5.0.0: resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== -anymatch@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" - integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== - dependencies: - micromatch "^3.1.4" - normalize-path "^2.1.1" - -anymatch@^3.0.0, anymatch@^3.0.3, anymatch@~3.1.1: +anymatch@^3.0.0, anymatch@^3.0.3: version "3.1.2" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== @@ -3333,11 +3286,6 @@ anymatch@^3.0.0, anymatch@^3.0.3, anymatch@~3.1.1: normalize-path "^3.0.0" picomatch "^2.0.4" -aproba@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" - integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== - argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" @@ -3370,22 +3318,7 @@ aria-query@^5.0.0: resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.0.0.tgz#210c21aaf469613ee8c9a62c7f86525e058db52c" integrity sha512-V+SM7AbUwJ+EBnB8+DXs0hPZHO0W6pqBcc0dW90OwtVG02PswOu/teuARoLQjdDOH+t9pJgGnW5/Qmouf3gPJg== -arr-diff@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" - integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= - -arr-flatten@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" - integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== - -arr-union@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" - integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= - -array-includes@^3.1.1, array-includes@^3.1.3: +array-includes@^3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.3.tgz#c7f619b382ad2afaf5326cddfdc0afc61af7690a" integrity sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A== @@ -3396,6 +3329,17 @@ array-includes@^3.1.1, array-includes@^3.1.3: get-intrinsic "^1.1.1" is-string "^1.0.5" +array-includes@^3.1.4, array-includes@^3.1.5: + version "3.1.5" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.5.tgz#2c320010db8d31031fd2a5f6b3bbd4b1aad31bdb" + integrity sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + get-intrinsic "^1.1.1" + is-string "^1.0.7" + array-union@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" @@ -3413,58 +3357,31 @@ array-uniq@^1.0.1: resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= -array-unique@^0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" - integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= - -array.prototype.flat@^1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.2.4.tgz#6ef638b43312bd401b4c6199fdec7e2dc9e9a123" - integrity sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg== +array.prototype.flat@^1.2.5: + version "1.3.0" + resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz#0b0c1567bf57b38b56b4c97b8aa72ab45e4adc7b" + integrity sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw== dependencies: - call-bind "^1.0.0" + call-bind "^1.0.2" define-properties "^1.1.3" - es-abstract "^1.18.0-next.1" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" -array.prototype.flatmap@^1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.2.4.tgz#94cfd47cc1556ec0747d97f7c7738c58122004c9" - integrity sha512-r9Z0zYoxqHz60vvQbWEdXIEtCwHF0yxaWfno9qzXeNHvfyl3BZqygmGzb84dsubyaXLH4husF+NFgMSdpZhk2Q== +array.prototype.flatmap@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz#a7e8ed4225f4788a70cd910abcf0791e76a5534f" + integrity sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg== dependencies: - call-bind "^1.0.0" + call-bind "^1.0.2" define-properties "^1.1.3" - es-abstract "^1.18.0-next.1" - function-bind "^1.1.1" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" arrify@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" integrity sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0= -asn1.js@^5.2.0: - version "5.4.1" - resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-5.4.1.tgz#11a980b84ebb91781ce35b0fdc2ee294e3783f07" - integrity sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA== - dependencies: - bn.js "^4.0.0" - inherits "^2.0.1" - minimalistic-assert "^1.0.0" - safer-buffer "^2.1.0" - -assert@^1.1.1: - version "1.5.0" - resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb" - integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA== - dependencies: - object-assign "^4.1.1" - util "0.10.3" - -assign-symbols@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" - integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= - ast-types-flow@^0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" @@ -3475,11 +3392,6 @@ astral-regex@^2.0.0: resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== -async-each@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" - integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== - asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" @@ -3491,22 +3403,22 @@ atob@^2.1.2: integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== autoprefixer@^9.8.6: - version "9.8.6" - resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.8.6.tgz#3b73594ca1bf9266320c5acf1588d74dea74210f" - integrity sha512-XrvP4VVHdRBCdX1S3WXVD8+RyG9qeb1D5Sn1DeLiG2xfSpzellk5k54xbUERJ3M5DggQxes39UGOTP8CFrEGbg== + version "9.8.8" + resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.8.8.tgz#fd4bd4595385fa6f06599de749a4d5f7a474957a" + integrity sha512-eM9d/swFopRt5gdJ7jrpCwgvEMIayITpojhkkSMRsFHYuH5bkSQ4p/9qTEHtmNudUZh22Tehu7I6CxAW0IXTKA== dependencies: browserslist "^4.12.0" caniuse-lite "^1.0.30001109" - colorette "^1.2.1" normalize-range "^0.1.2" num2fraction "^1.2.2" + picocolors "^0.2.1" postcss "^7.0.32" postcss-value-parser "^4.1.0" -axe-core@^4.0.2: - version "4.3.3" - resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.3.3.tgz#b55cd8e8ddf659fe89b064680e1c6a4dceab0325" - integrity sha512-/lqqLAmuIPi79WYfRpy2i8z+x+vxU3zX2uAm0gs1q52qTuKwolOj1P8XbufpXcsydrpKx2yGn2wzAnxCMV86QA== +axe-core@^4.3.5: + version "4.4.2" + resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.4.2.tgz#dcf7fb6dea866166c3eab33d68208afe4d5f670c" + integrity sha512-LVAaGp/wkkgYJcjmHsoKx4juT1aQvJyPcW09MLCjVTh3V2cc6PnyempiLMNH5iMdfIX/zdbjUx2KDjMLCTdPeA== axios@^0.26.0: version "0.26.0" @@ -3520,78 +3432,10 @@ axobject-query@^2.2.0: resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-2.2.0.tgz#943d47e10c0b704aa42275e20edf3722648989be" integrity sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA== -babel-code-frame@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" - integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= - dependencies: - chalk "^1.1.3" - esutils "^2.0.2" - js-tokens "^3.0.2" - -babel-core@^6.26.0, babel-core@^6.26.3: - version "6.26.3" - resolved "https://registry.yarnpkg.com/babel-core/-/babel-core-6.26.3.tgz#b2e2f09e342d0f0c88e2f02e067794125e75c207" - integrity sha512-6jyFLuDmeidKmUEb3NM+/yawG0M2bDZ9Z1qbZP59cyHLz8kYGKYwpJP0UwUKKUiTRNvxfLesJnTedqczP7cTDA== - dependencies: - babel-code-frame "^6.26.0" - babel-generator "^6.26.0" - babel-helpers "^6.24.1" - babel-messages "^6.23.0" - babel-register "^6.26.0" - babel-runtime "^6.26.0" - babel-template "^6.26.0" - babel-traverse "^6.26.0" - babel-types "^6.26.0" - babylon "^6.18.0" - convert-source-map "^1.5.1" - debug "^2.6.9" - json5 "^0.5.1" - lodash "^4.17.4" - minimatch "^3.0.4" - path-is-absolute "^1.0.1" - private "^0.1.8" - slash "^1.0.0" - source-map "^0.5.7" - -babel-eslint@^10.1.0: - version "10.1.0" - resolved "https://registry.yarnpkg.com/babel-eslint/-/babel-eslint-10.1.0.tgz#6968e568a910b78fb3779cdd8b6ac2f479943232" - integrity sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg== - dependencies: - "@babel/code-frame" "^7.0.0" - "@babel/parser" "^7.7.0" - "@babel/traverse" "^7.7.0" - "@babel/types" "^7.7.0" - eslint-visitor-keys "^1.0.0" - resolve "^1.12.0" - -babel-generator@^6.26.0: - version "6.26.1" - resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.1.tgz#1844408d3b8f0d35a404ea7ac180f087a601bd90" - integrity sha512-HyfwY6ApZj7BYTcJURpM5tznulaBvyio7/0d4zFOeMPUmfxkCjHocCuoLa2SAGzBI8AREcH3eP3758F672DppA== - dependencies: - babel-messages "^6.23.0" - babel-runtime "^6.26.0" - babel-types "^6.26.0" - detect-indent "^4.0.0" - jsesc "^1.3.0" - lodash "^4.17.4" - source-map "^0.5.7" - trim-right "^1.0.1" - -babel-helpers@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-helpers/-/babel-helpers-6.24.1.tgz#3471de9caec388e5c850e597e58a26ddf37602b2" - integrity sha1-NHHenK7DiOXIUOWX5Yom3fN2ArI= - dependencies: - babel-runtime "^6.22.0" - babel-template "^6.24.1" - -babel-jest@^27.3.1: - version "27.3.1" - resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-27.3.1.tgz#0636a3404c68e07001e434ac4956d82da8a80022" - integrity sha512-SjIF8hh/ir0peae2D6S6ZKRhUy7q/DnpH7k/V6fT4Bgs/LXXUztOpX4G2tCgq8mLo5HA9mN6NmlFMeYtKmIsTQ== +babel-jest@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-27.3.1.tgz#0636a3404c68e07001e434ac4956d82da8a80022" + integrity sha512-SjIF8hh/ir0peae2D6S6ZKRhUy7q/DnpH7k/V6fT4Bgs/LXXUztOpX4G2tCgq8mLo5HA9mN6NmlFMeYtKmIsTQ== dependencies: "@jest/transform" "^27.3.1" "@jest/types" "^27.2.5" @@ -3626,21 +3470,6 @@ babel-loader@^8.1.0: make-dir "^3.1.0" schema-utils "^2.6.5" -babel-messages@^6.23.0: - version "6.23.0" - resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e" - integrity sha1-8830cDhYA1sqKVHG7F7fbGLyYw4= - dependencies: - babel-runtime "^6.22.0" - -babel-plugin-css-modules-transform@^1.6.1: - version "1.6.1" - resolved "https://registry.yarnpkg.com/babel-plugin-css-modules-transform/-/babel-plugin-css-modules-transform-1.6.1.tgz#5af9483bd62d09af18eeebdc7e6c4370e5125eed" - integrity sha512-Iv98dKRgQnhuHFcn2heHH1OpHo9LGyoKjlsAkj6/Q3wkwpVyHrNfVua/WHnrwe2f7EHy1KANnOSg+q4AJ6ZzaQ== - dependencies: - css-modules-require-hook "^4.0.6" - mkdirp "^0.5.1" - babel-plugin-dynamic-import-node@^2.3.3: version "2.3.3" resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3" @@ -3712,15 +3541,6 @@ babel-plugin-polyfill-regenerator@^0.2.3: dependencies: "@babel/helper-define-polyfill-provider" "^0.2.4" -babel-polyfill@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-polyfill/-/babel-polyfill-6.26.0.tgz#379937abc67d7895970adc621f284cd966cf2153" - integrity sha1-N5k3q8Z9eJWXCtxiHyhM2WbPIVM= - dependencies: - babel-runtime "^6.26.0" - core-js "^2.5.0" - regenerator-runtime "^0.10.5" - babel-preset-current-node-syntax@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" @@ -3755,73 +3575,6 @@ babel-preset-jest@^27.5.1: babel-plugin-jest-hoist "^27.5.1" babel-preset-current-node-syntax "^1.0.0" -babel-register@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-register/-/babel-register-6.26.0.tgz#6ed021173e2fcb486d7acb45c6009a856f647071" - integrity sha1-btAhFz4vy0htestFxgCahW9kcHE= - dependencies: - babel-core "^6.26.0" - babel-runtime "^6.26.0" - core-js "^2.5.0" - home-or-tmp "^2.0.0" - lodash "^4.17.4" - mkdirp "^0.5.1" - source-map-support "^0.4.15" - -babel-runtime@^6.22.0, babel-runtime@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" - integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4= - dependencies: - core-js "^2.4.0" - regenerator-runtime "^0.11.0" - -babel-template@^6.24.1, babel-template@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02" - integrity sha1-3gPi0WOWsGn0bdn/+FIfsaDjXgI= - dependencies: - babel-runtime "^6.26.0" - babel-traverse "^6.26.0" - babel-types "^6.26.0" - babylon "^6.18.0" - lodash "^4.17.4" - -babel-traverse@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee" - integrity sha1-RqnL1+3MYsjlwGTi0tjQ9ANXZu4= - dependencies: - babel-code-frame "^6.26.0" - babel-messages "^6.23.0" - babel-runtime "^6.26.0" - babel-types "^6.26.0" - babylon "^6.18.0" - debug "^2.6.8" - globals "^9.18.0" - invariant "^2.2.2" - lodash "^4.17.4" - -babel-types@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" - integrity sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc= - dependencies: - babel-runtime "^6.26.0" - esutils "^2.0.2" - lodash "^4.17.4" - to-fast-properties "^1.0.3" - -babel@^6.23.0: - version "6.23.0" - resolved "https://registry.yarnpkg.com/babel/-/babel-6.23.0.tgz#d0d1e7d803e974765beea3232d4e153c0efb90f4" - integrity sha1-0NHn2APpdHZb7qMjLU4VPA77kPQ= - -babylon@^6.18.0: - version "6.18.0" - resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" - integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ== - bail@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/bail/-/bail-1.0.5.tgz#b6fa133404a392cbc1f8c4bf63f5953351e7a776" @@ -3837,71 +3590,16 @@ balanced-match@^2.0.0: resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-2.0.0.tgz#dc70f920d78db8b858535795867bf48f820633d9" integrity sha512-1ugUSr8BHXRnK23KfuYS+gVMC3LB8QGH9W1iGtDPsNWoQbgtXSExkBu2aDR4epiGWZOjZsj6lDl/N/AqqTC3UA== -base64-js@^1.0.2: - version "1.5.1" - resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" - integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== - -base@^0.11.1: - version "0.11.2" - resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" - integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== - dependencies: - cache-base "^1.0.1" - class-utils "^0.3.5" - component-emitter "^1.2.1" - define-property "^1.0.0" - isobject "^3.0.1" - mixin-deep "^1.2.0" - pascalcase "^0.1.1" - big-integer@^1.6.16: version "1.6.51" resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.51.tgz#0df92a5d9880560d3ff2d5fd20245c889d130686" integrity sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg== -big.js@^3.1.3: - version "3.2.0" - resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e" - integrity sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q== - big.js@^5.2.2: version "5.2.2" resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== -binary-extensions@^1.0.0: - version "1.13.1" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" - integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== - -binary-extensions@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" - integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== - -bindings@^1.5.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df" - integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ== - dependencies: - file-uri-to-path "1.0.0" - -bluebird@^3.5.5: - version "3.7.2" - resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" - integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== - -bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.11.9: - version "4.12.0" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" - integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== - -bn.js@^5.0.0, bn.js@^5.1.1: - version "5.2.0" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.0.tgz#358860674396c6997771a9d051fcc1b57d4ae002" - integrity sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw== - boolbase@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" @@ -3925,23 +3623,14 @@ brace-expansion@^1.1.7: balanced-match "^1.0.0" concat-map "0.0.1" -braces@^2.3.1, braces@^2.3.2: - version "2.3.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" - integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== dependencies: - arr-flatten "^1.1.0" - array-unique "^0.3.2" - extend-shallow "^2.0.1" - fill-range "^4.0.0" - isobject "^3.0.1" - repeat-element "^1.1.2" - snapdragon "^0.8.1" - snapdragon-node "^2.0.1" - split-string "^3.0.2" - to-regex "^3.0.1" + balanced-match "^1.0.0" -braces@^3.0.1, braces@~3.0.2: +braces@^3.0.1: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== @@ -3962,78 +3651,23 @@ broadcast-channel@^3.4.1: rimraf "3.0.2" unload "2.2.0" -brorand@^1.0.1, brorand@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" - integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= - browser-process-hrtime@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== -browserify-aes@^1.0.0, browserify-aes@^1.0.4: - version "1.2.0" - resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" - integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== - dependencies: - buffer-xor "^1.0.3" - cipher-base "^1.0.0" - create-hash "^1.1.0" - evp_bytestokey "^1.0.3" - inherits "^2.0.1" - safe-buffer "^5.0.1" - -browserify-cipher@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" - integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== - dependencies: - browserify-aes "^1.0.4" - browserify-des "^1.0.0" - evp_bytestokey "^1.0.0" - -browserify-des@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" - integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== - dependencies: - cipher-base "^1.0.1" - des.js "^1.0.0" - inherits "^2.0.1" - safe-buffer "^5.1.2" - -browserify-rsa@^4.0.0, browserify-rsa@^4.0.1: - version "4.1.0" - resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.1.0.tgz#b2fd06b5b75ae297f7ce2dc651f918f5be158c8d" - integrity sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog== - dependencies: - bn.js "^5.0.0" - randombytes "^2.0.1" - -browserify-sign@^4.0.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.2.1.tgz#eaf4add46dd54be3bb3b36c0cf15abbeba7956c3" - integrity sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg== - dependencies: - bn.js "^5.1.1" - browserify-rsa "^4.0.1" - create-hash "^1.2.0" - create-hmac "^1.1.7" - elliptic "^6.5.3" - inherits "^2.0.4" - parse-asn1 "^5.1.5" - readable-stream "^3.6.0" - safe-buffer "^5.2.0" - -browserify-zlib@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f" - integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== +browserslist@^4.0.0, browserslist@^4.12.0, browserslist@^4.14.5, browserslist@^4.17.5, browserslist@^4.17.6, browserslist@^4.20.2, browserslist@^4.20.3: + version "4.20.4" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.20.4.tgz#98096c9042af689ee1e0271333dbc564b8ce4477" + integrity sha512-ok1d+1WpnU24XYN7oC3QWgTyMhY/avPJ/r9T00xxvUOIparA/gc+UPUMaod3i+G6s+nI2nUb9xZ5k794uIwShw== dependencies: - pako "~1.0.5" + caniuse-lite "^1.0.30001349" + electron-to-chromium "^1.4.147" + escalade "^3.1.1" + node-releases "^2.0.5" + picocolors "^1.0.0" -browserslist@>=4.16.5, browserslist@^4.0.0, browserslist@^4.12.0, browserslist@^4.16.0, browserslist@^4.16.6, browserslist@^4.17.5, browserslist@^4.17.6: +browserslist@^4.16.6: version "4.16.6" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.6.tgz#d7901277a5a88e554ed305b183ec9b0c08f66fa2" integrity sha512-Wspk/PqO+4W9qp5iUTJsa1B/QrYn1keNCcEP5OvP7WBwT4KaDly0uONYmC6Xa3Z5IqnUgS0KcgLYu1l74x0ZXQ== @@ -4056,46 +3690,6 @@ buffer-from@^1.0.0: resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== -buffer-xor@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" - integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= - -buffer@^4.3.0: - version "4.9.2" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" - integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== - dependencies: - base64-js "^1.0.2" - ieee754 "^1.1.4" - isarray "^1.0.0" - -builtin-status-codes@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" - integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= - -cacache@^12.0.2: - version "12.0.4" - resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.4.tgz#668bcbd105aeb5f1d92fe25570ec9525c8faa40c" - integrity sha512-a0tMB40oefvuInr4Cwb3GerbL9xTj1D5yg0T5xrjGCGyfvbxseIXX7BAO/u/hIXdafzOI5JC3wDwHyf24buOAQ== - dependencies: - bluebird "^3.5.5" - chownr "^1.1.1" - figgy-pudding "^3.5.1" - glob "^7.1.4" - graceful-fs "^4.1.15" - infer-owner "^1.0.3" - lru-cache "^5.1.1" - mississippi "^3.0.0" - mkdirp "^0.5.1" - move-concurrently "^1.0.1" - promise-inflight "^1.0.1" - rimraf "^2.6.3" - ssri "^6.0.1" - unique-filename "^1.1.1" - y18n "^4.0.0" - cacache@^15.0.5: version "15.2.0" resolved "https://registry.yarnpkg.com/cacache/-/cacache-15.2.0.tgz#73af75f77c58e72d8c630a7a2858cb18ef523389" @@ -4119,21 +3713,6 @@ cacache@^15.0.5: tar "^6.0.2" unique-filename "^1.1.1" -cache-base@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" - integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== - dependencies: - collection-visit "^1.0.0" - component-emitter "^1.2.1" - get-value "^2.0.6" - has-value "^1.0.0" - isobject "^3.0.1" - set-value "^2.0.0" - to-object-path "^0.3.0" - union-value "^1.0.0" - unset-value "^1.0.0" - call-bind@^1.0.0, call-bind@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" @@ -4171,7 +3750,7 @@ camelcase-keys@^7.0.0: quick-lru "^5.1.1" type-fest "^1.2.1" -camelcase@^5.0.0, camelcase@^5.3.1: +camelcase@^5.3.1: version "5.3.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== @@ -4191,23 +3770,22 @@ caniuse-api@^3.0.0: lodash.memoize "^4.1.2" lodash.uniq "^4.5.0" -caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001109, caniuse-lite@^1.0.30001219: +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001109: + version "1.0.30001355" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001355.tgz#e240b7177443ed0198c737a7f609536976701c77" + integrity sha512-Sd6pjJHF27LzCB7pT7qs+kuX2ndurzCzkpJl6Qct7LPSZ9jn0bkOA8mdgMgmqnQAWLVOOGjLpc+66V57eLtb1g== + +caniuse-lite@^1.0.30001219: version "1.0.30001312" resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001312.tgz" integrity sha512-Wiz1Psk2MEK0pX3rUzWaunLTZzqS2JYZFzNKqAiJGiuxIjRPLgV6+VDPOg6lQOUxmDwhTlh198JsTTi8Hzw6aQ== -chalk@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" - integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= - dependencies: - ansi-styles "^2.2.1" - escape-string-regexp "^1.0.2" - has-ansi "^2.0.0" - strip-ansi "^3.0.0" - supports-color "^2.0.0" +caniuse-lite@^1.0.30001349: + version "1.0.30001354" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001354.tgz#95c5efdb64148bb4870771749b9a619304755ce5" + integrity sha512-mImKeCkyGDAHNywYFA4bqnLAzTUvVkqPvhY4DV47X+Gl2c5Z8c3KNETnXp14GQt11LvxE8AwjzGxJ+rsikiOzg== -chalk@^2.0.0, chalk@^2.4.2: +chalk@^2.0.0: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== @@ -4224,7 +3802,7 @@ chalk@^3.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.1: +chalk@^4.0.0, chalk@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.1.tgz#c80b3fab28bf6371e6863325eee67e618b77e6ad" integrity sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg== @@ -4232,6 +3810,19 @@ chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.1: ansi-styles "^4.1.0" supports-color "^7.1.0" +chalk@^4.1.1: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +chalk@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.0.1.tgz#ca57d71e82bb534a296df63bbacc4a1c22b2a4b6" + integrity sha512-Fo07WOYGqMfCWHOzSXOt2CxDbC6skS/jO9ynEcmpANMoPrD+W1r1K6Vx7iNm+AQmETU1Xr2t+n8nzkV9t6xh3w== + char-regex@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" @@ -4252,45 +3843,6 @@ character-reference-invalid@^1.0.0: resolved "https://registry.yarnpkg.com/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz#083329cda0eae272ab3dbbf37e9a382c13af1560" integrity sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg== -chokidar@^2.1.8: - version "2.1.8" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" - integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== - dependencies: - anymatch "^2.0.0" - async-each "^1.0.1" - braces "^2.3.2" - glob-parent "^3.1.0" - inherits "^2.0.3" - is-binary-path "^1.0.0" - is-glob "^4.0.0" - normalize-path "^3.0.0" - path-is-absolute "^1.0.0" - readdirp "^2.2.1" - upath "^1.1.1" - optionalDependencies: - fsevents "^1.2.7" - -chokidar@^3.4.1: - version "3.5.1" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.1.tgz#ee9ce7bbebd2b79f49f304799d5468e31e14e68a" - integrity sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw== - dependencies: - anymatch "~3.1.1" - braces "~3.0.2" - glob-parent "~5.1.0" - is-binary-path "~2.1.0" - is-glob "~4.0.1" - normalize-path "~3.0.0" - readdirp "~3.5.0" - optionalDependencies: - fsevents "~2.3.1" - -chownr@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" - integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== - chownr@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece" @@ -4306,29 +3858,11 @@ ci-info@^3.2.0: resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.2.0.tgz#2876cb948a498797b5236f0095bc057d0dca38b6" integrity sha512-dVqRX7fLUm8J6FgHJ418XuIgDLZDkYcDFTeL6TA2gt5WlIZUQrrH6EZrNClwT/H0FateUsZkGIOPRrLbP+PR9A== -cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" - integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== - dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" - cjs-module-lexer@^1.0.0: version "1.2.2" resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== -class-utils@^0.3.5: - version "0.3.6" - resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" - integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== - dependencies: - arr-union "^3.1.0" - define-property "^0.2.5" - isobject "^3.0.0" - static-extend "^0.1.1" - classnames@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.1.tgz#dfcfa3891e306ec1dad105d0e88f4417b8535e8e" @@ -4355,15 +3889,6 @@ cli@~1.0.0: exit "0.1.2" glob "^7.1.1" -cliui@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" - integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== - dependencies: - string-width "^3.1.0" - strip-ansi "^5.2.0" - wrap-ansi "^5.1.0" - cliui@^7.0.2: version "7.0.4" resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" @@ -4373,6 +3898,15 @@ cliui@^7.0.2: strip-ansi "^6.0.0" wrap-ansi "^7.0.0" +clone-deep@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" + integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== + dependencies: + is-plain-object "^2.0.4" + kind-of "^6.0.2" + shallow-clone "^3.0.0" + clone-regexp@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/clone-regexp/-/clone-regexp-2.2.0.tgz#7d65e00885cd8796405c35a737e7a86b7429e36f" @@ -4400,14 +3934,6 @@ collect-v8-coverage@^1.0.0: resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== -collection-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" - integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= - dependencies: - map-visit "^1.0.0" - object-visit "^1.0.0" - color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" @@ -4432,16 +3958,21 @@ color-name@~1.1.4: resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== -colord@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/colord/-/colord-2.0.1.tgz#1e7fb1f9fa1cf74f42c58cb9c20320bab8435aa0" - integrity sha512-vm5YpaWamD0Ov6TSG0GGmUIwstrWcfKQV/h2CmbR7PbNu41+qdB5PW9lpzhjedrpm08uuYvcXi0Oel1RLZIJuA== +colord@^2.9.1: + version "2.9.2" + resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.2.tgz#25e2bacbbaa65991422c07ea209e2089428effb1" + integrity sha512-Uqbg+J445nc1TKn4FoDPS6ZZqAvEDnwrH42yo8B40JSOgSLxMZ/gt3h4nmCtPLQeXhjJJkqBx7SCY35WnIixaQ== -colorette@^1.2.0, colorette@^1.2.1, colorette@^1.2.2: +colorette@^1.2.0, colorette@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94" integrity sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w== +colorette@^2.0.14: + version "2.0.19" + resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.19.tgz#cdf044f47ad41a0f4b56b3a0d5b4e6e1a2d5a798" + integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ== + combined-stream@^1.0.8: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" @@ -4454,7 +3985,7 @@ commander@2, commander@^2.20.0: resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== -commander@^7.1.0: +commander@^7.0.0, commander@^7.2.0: version "7.2.0" resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== @@ -4464,11 +3995,6 @@ commondir@^1.0.1: resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= -component-emitter@^1.2.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" - integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== - compute-scroll-into-view@1.0.14: version "1.0.14" resolved "https://registry.yarnpkg.com/compute-scroll-into-view/-/compute-scroll-into-view-1.0.14.tgz#80e3ebb25d6aa89f42e533956cb4b16a04cfe759" @@ -4479,16 +4005,6 @@ concat-map@0.0.1: resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= -concat-stream@^1.5.0: - version "1.6.2" - resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" - integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== - dependencies: - buffer-from "^1.0.0" - inherits "^2.0.3" - readable-stream "^2.2.2" - typedarray "^0.0.6" - confusing-browser-globals@^1.0.10: version "1.0.10" resolved "https://registry.yarnpkg.com/confusing-browser-globals/-/confusing-browser-globals-1.0.10.tgz#30d1e7f3d1b882b25ec4933d1d1adac353d20a59" @@ -4501,16 +4017,6 @@ console-browserify@1.1.x: dependencies: date-now "^0.1.4" -console-browserify@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336" - integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA== - -constants-browserify@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" - integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= - convert-source-map@^1.4.0, convert-source-map@^1.5.0, convert-source-map@^1.6.0: version "1.8.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" @@ -4518,30 +4024,13 @@ convert-source-map@^1.4.0, convert-source-map@^1.5.0, convert-source-map@^1.6.0: dependencies: safe-buffer "~5.1.1" -convert-source-map@^1.5.1, convert-source-map@^1.7.0: +convert-source-map@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== dependencies: safe-buffer "~5.1.1" -copy-concurrently@^1.0.0: - version "1.0.5" - resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0" - integrity sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A== - dependencies: - aproba "^1.1.1" - fs-write-stream-atomic "^1.0.8" - iferr "^0.1.5" - mkdirp "^0.5.1" - rimraf "^2.5.4" - run-queue "^1.0.0" - -copy-descriptor@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" - integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= - copy-to-clipboard@3.3.1: version "3.3.1" resolved "https://registry.yarnpkg.com/copy-to-clipboard/-/copy-to-clipboard-3.3.1.tgz#115aa1a9998ffab6196f93076ad6da3b913662ae" @@ -4579,11 +4068,6 @@ core-js-pure@^3.16.0: resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.18.0.tgz#e5187347bae66448c9e2d67c01c34c4df3261dc5" integrity sha512-ZnK+9vyuMhKulIGqT/7RHGRok8RtkHMEX/BGPHkHx+ouDkq+MUvf9mfIgdqhpmPDu8+V5UtRn/CbCRc9I4lX4w== -core-js@^2.4.0, core-js@^2.5.0: - version "2.6.12" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.12.tgz#d9333dfa7b065e347cc5682219d6f690859cc2ec" - integrity sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ== - core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" @@ -4611,48 +4095,6 @@ cosmiconfig@^7.0.0: path-type "^4.0.0" yaml "^1.10.0" -create-ecdh@^4.0.0: - version "4.0.4" - resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.4.tgz#d6e7f4bffa66736085a0762fd3a632684dabcc4e" - integrity sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A== - dependencies: - bn.js "^4.1.0" - elliptic "^6.5.3" - -create-hash@^1.1.0, create-hash@^1.1.2, create-hash@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" - integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== - dependencies: - cipher-base "^1.0.1" - inherits "^2.0.1" - md5.js "^1.3.4" - ripemd160 "^2.0.1" - sha.js "^2.4.0" - -create-hmac@^1.1.0, create-hmac@^1.1.4, create-hmac@^1.1.7: - version "1.1.7" - resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" - integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== - dependencies: - cipher-base "^1.0.3" - create-hash "^1.1.0" - inherits "^2.0.1" - ripemd160 "^2.0.0" - safe-buffer "^5.0.1" - sha.js "^2.4.8" - -cross-spawn@^6.0.5: - version "6.0.5" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" - integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== - dependencies: - nice-try "^1.0.4" - path-key "^2.0.1" - semver "^5.5.0" - shebang-command "^1.2.0" - which "^1.2.9" - cross-spawn@^7.0.2, cross-spawn@^7.0.3: version "7.0.3" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" @@ -4662,23 +4104,6 @@ cross-spawn@^7.0.2, cross-spawn@^7.0.3: shebang-command "^2.0.0" which "^2.0.1" -crypto-browserify@^3.11.0: - version "3.12.0" - resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" - integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== - dependencies: - browserify-cipher "^1.0.0" - browserify-sign "^4.0.0" - create-ecdh "^4.0.0" - create-hash "^1.1.0" - create-hmac "^1.1.0" - diffie-hellman "^5.0.0" - inherits "^2.0.1" - pbkdf2 "^3.0.3" - public-encrypt "^4.0.0" - randombytes "^2.0.0" - randomfill "^1.0.3" - css-box-model@1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/css-box-model/-/css-box-model-1.2.1.tgz#59951d3b81fd6b2074a62d49444415b0d2b4d7c1" @@ -4686,80 +4111,51 @@ css-box-model@1.2.1: dependencies: tiny-invariant "^1.0.6" -css-color-names@^0.0.4: - version "0.0.4" - resolved "https://registry.yarnpkg.com/css-color-names/-/css-color-names-0.0.4.tgz#808adc2e79cf84738069b646cb20ec27beb629e0" - integrity sha1-gIrcLnnPhHOAabZGyyDsJ762KeA= - -css-color-names@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/css-color-names/-/css-color-names-1.0.1.tgz#6ff7ee81a823ad46e020fa2fd6ab40a887e2ba67" - integrity sha512-/loXYOch1qU1biStIFsHH8SxTmOseh1IJqFvy8IujXOm1h+QjUdDhkzOrR5HG8K8mlxREj0yfi8ewCHx0eMxzA== - -css-declaration-sorter@^6.0.3: - version "6.0.3" - resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-6.0.3.tgz#9dfd8ea0df4cc7846827876fafb52314890c21a9" - integrity sha512-52P95mvW1SMzuRZegvpluT6yEv0FqQusydKQPZsNN5Q7hh8EwQvN8E2nwuJ16BBvNN6LcoIZXu/Bk58DAhrrxw== - dependencies: - timsort "^0.3.0" +css-declaration-sorter@^6.2.2: + version "6.3.0" + resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-6.3.0.tgz#72ebd995c8f4532ff0036631f7365cce9759df14" + integrity sha512-OGT677UGHJTAVMRhPO+HJ4oKln3wkBTwtDFH0ojbqm+MJm6xuDMHp2nkhh/ThaBqq20IbraBQSWKfSLNHQO9Og== -css-loader@^3.4.2: - version "3.6.0" - resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-3.6.0.tgz#2e4b2c7e6e2d27f8c8f28f61bffcd2e6c91ef645" - integrity sha512-M5lSukoWi1If8dhQAUCvj4H8vUt3vOnwbQBH9DdTm/s4Ym2B/3dPMtYZeJmq7Q3S3Pa+I94DcZ7pc9bP14cWIQ== +css-loader@5.2.7: + version "5.2.7" + resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-5.2.7.tgz#9b9f111edf6fb2be5dc62525644cbc9c232064ae" + integrity sha512-Q7mOvpBNBG7YrVGMxRxcBJZFL75o+cH2abNASdibkj/fffYD8qWbInZrD0S9ccI6vZclF3DsHE7njGlLtaHbhg== dependencies: - camelcase "^5.3.1" - cssesc "^3.0.0" - icss-utils "^4.1.1" - loader-utils "^1.2.3" - normalize-path "^3.0.0" - postcss "^7.0.32" - postcss-modules-extract-imports "^2.0.0" - postcss-modules-local-by-default "^3.0.2" - postcss-modules-scope "^2.2.0" - postcss-modules-values "^3.0.0" + icss-utils "^5.1.0" + loader-utils "^2.0.0" + postcss "^8.2.15" + postcss-modules-extract-imports "^3.0.0" + postcss-modules-local-by-default "^4.0.0" + postcss-modules-scope "^3.0.0" + postcss-modules-values "^4.0.0" postcss-value-parser "^4.1.0" - schema-utils "^2.7.0" - semver "^6.3.0" + schema-utils "^3.0.0" + semver "^7.3.5" -css-modules-require-hook@^4.0.6: - version "4.2.3" - resolved "https://registry.yarnpkg.com/css-modules-require-hook/-/css-modules-require-hook-4.2.3.tgz#6792ca412b15e23e6f9be6a07dcef7f577ff904d" - integrity sha1-Z5LKQSsV4j5vm+agfc739Xf/kE0= - dependencies: - debug "^2.2.0" - generic-names "^1.0.1" - glob-to-regexp "^0.3.0" - icss-replace-symbols "^1.0.2" - lodash "^4.3.0" - postcss "^6.0.1" - postcss-modules-extract-imports "^1.0.0" - postcss-modules-local-by-default "^1.0.1" - postcss-modules-resolve-imports "^1.3.0" - postcss-modules-scope "^1.0.0" - postcss-modules-values "^1.1.1" - seekout "^1.0.1" - -css-select@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/css-select/-/css-select-3.1.2.tgz#d52cbdc6fee379fba97fb0d3925abbd18af2d9d8" - integrity sha512-qmss1EihSuBNWNNhHjxzxSfJoFBM/lERB/Q4EnsJQQC62R2evJDW481091oAdOr9uh46/0n4nrg0It5cAnj1RA== +css-minimizer-webpack-plugin@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-4.0.0.tgz#e11800388c19c2b7442c39cc78ac8ae3675c9605" + integrity sha512-7ZXXRzRHvofv3Uac5Y+RkWRNo0ZMlcg8e9/OtrqUYmwDWJo+qs67GvdeFrXLsFb7czKNwjQhPkM0avlIYl+1nA== dependencies: - boolbase "^1.0.0" - css-what "^4.0.0" - domhandler "^4.0.0" - domutils "^2.4.3" - nth-check "^2.0.0" + cssnano "^5.1.8" + jest-worker "^27.5.1" + postcss "^8.4.13" + schema-utils "^4.0.0" + serialize-javascript "^6.0.0" + source-map "^0.6.1" -css-selector-tokenizer@^0.7.0: - version "0.7.3" - resolved "https://registry.yarnpkg.com/css-selector-tokenizer/-/css-selector-tokenizer-0.7.3.tgz#735f26186e67c749aaf275783405cf0661fae8f1" - integrity sha512-jWQv3oCEL5kMErj4wRnK/OPoBi0D+P1FR2cDCKYPaMeD2eW3/mttav8HT4hT1CKopiJI/psEULjkClhvJo4Lvg== +css-select@^4.1.3: + version "4.3.0" + resolved "https://registry.yarnpkg.com/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b" + integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ== dependencies: - cssesc "^3.0.0" - fastparse "^1.1.2" + boolbase "^1.0.0" + css-what "^6.0.1" + domhandler "^4.3.1" + domutils "^2.8.0" + nth-check "^2.0.1" -css-tree@^1.1.2: +css-tree@^1.1.2, css-tree@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.1.3.tgz#eb4870fb6fd7707327ec95c2ff2ab09b5e8db91d" integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== @@ -4767,10 +4163,10 @@ css-tree@^1.1.2: mdn-data "2.0.14" source-map "^0.6.1" -css-what@>=5.0.1, css-what@^4.0.0: - version "5.0.1" - resolved "https://registry.yarnpkg.com/css-what/-/css-what-5.0.1.tgz#3efa820131f4669a8ac2408f9c32e7c7de9f4cad" - integrity sha512-FYDTSHb/7KXsWICVsxdmiExPjCfRC4qRFBdVwv7Ax9hMnvMmEjP9RfxTEZ3qPZGmADDn2vAKSo9UcN1jKVYscg== +css-what@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" + integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== css.escape@^1.5.1: version "1.5.1" @@ -4791,54 +4187,54 @@ cssesc@^3.0.0: resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== -cssnano-preset-default@^5.1.3: - version "5.1.3" - resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-5.1.3.tgz#caa54183a8c8df03124a9e23f374ab89df5a9a99" - integrity sha512-qo9tX+t4yAAZ/yagVV3b+QBKeLklQbmgR3wI7mccrDcR+bEk9iHgZN1E7doX68y9ThznLya3RDmR+nc7l6/2WQ== - dependencies: - css-declaration-sorter "^6.0.3" - cssnano-utils "^2.0.1" - postcss-calc "^8.0.0" - postcss-colormin "^5.2.0" - postcss-convert-values "^5.0.1" - postcss-discard-comments "^5.0.1" - postcss-discard-duplicates "^5.0.1" - postcss-discard-empty "^5.0.1" - postcss-discard-overridden "^5.0.1" - postcss-merge-longhand "^5.0.2" - postcss-merge-rules "^5.0.2" - postcss-minify-font-values "^5.0.1" - postcss-minify-gradients "^5.0.1" - postcss-minify-params "^5.0.1" - postcss-minify-selectors "^5.1.0" - postcss-normalize-charset "^5.0.1" - postcss-normalize-display-values "^5.0.1" - postcss-normalize-positions "^5.0.1" - postcss-normalize-repeat-style "^5.0.1" - postcss-normalize-string "^5.0.1" - postcss-normalize-timing-functions "^5.0.1" - postcss-normalize-unicode "^5.0.1" - postcss-normalize-url "^5.0.2" - postcss-normalize-whitespace "^5.0.1" - postcss-ordered-values "^5.0.2" - postcss-reduce-initial "^5.0.1" - postcss-reduce-transforms "^5.0.1" - postcss-svgo "^5.0.2" - postcss-unique-selectors "^5.0.1" - -cssnano-utils@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/cssnano-utils/-/cssnano-utils-2.0.1.tgz#8660aa2b37ed869d2e2f22918196a9a8b6498ce2" - integrity sha512-i8vLRZTnEH9ubIyfdZCAdIdgnHAUeQeByEeQ2I7oTilvP9oHO6RScpeq3GsFUVqeB8uZgOQ9pw8utofNn32hhQ== +cssnano-preset-default@^5.2.11: + version "5.2.11" + resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-5.2.11.tgz#28350471bc1af9df14052472b61340347f453a53" + integrity sha512-4PadR1NtuaIK8MvLNuY7MznK4WJteldGlzCiMaaTiOUP+apeiIvUDIXykzUOoqgOOUAHrU64ncdD90NfZR3LSQ== + dependencies: + css-declaration-sorter "^6.2.2" + cssnano-utils "^3.1.0" + postcss-calc "^8.2.3" + postcss-colormin "^5.3.0" + postcss-convert-values "^5.1.2" + postcss-discard-comments "^5.1.2" + postcss-discard-duplicates "^5.1.0" + postcss-discard-empty "^5.1.1" + postcss-discard-overridden "^5.1.0" + postcss-merge-longhand "^5.1.5" + postcss-merge-rules "^5.1.2" + postcss-minify-font-values "^5.1.0" + postcss-minify-gradients "^5.1.1" + postcss-minify-params "^5.1.3" + postcss-minify-selectors "^5.2.1" + postcss-normalize-charset "^5.1.0" + postcss-normalize-display-values "^5.1.0" + postcss-normalize-positions "^5.1.0" + postcss-normalize-repeat-style "^5.1.0" + postcss-normalize-string "^5.1.0" + postcss-normalize-timing-functions "^5.1.0" + postcss-normalize-unicode "^5.1.0" + postcss-normalize-url "^5.1.0" + postcss-normalize-whitespace "^5.1.1" + postcss-ordered-values "^5.1.2" + postcss-reduce-initial "^5.1.0" + postcss-reduce-transforms "^5.1.0" + postcss-svgo "^5.1.0" + postcss-unique-selectors "^5.1.1" + +cssnano-utils@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/cssnano-utils/-/cssnano-utils-3.1.0.tgz#95684d08c91511edfc70d2636338ca37ef3a6861" + integrity sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA== -cssnano@^5.0.2: - version "5.0.6" - resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-5.0.6.tgz#2a91ad34c6521ae31eab3da9c90108ea3093535d" - integrity sha512-NiaLH/7yqGksFGsFNvSRe2IV/qmEBAeDE64dYeD8OBrgp6lE8YoMeQJMtsv5ijo6MPyhuoOvFhI94reahBRDkw== +cssnano@^5.1.8: + version "5.1.11" + resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-5.1.11.tgz#3bb003380718c7948ce3813493370e8946caf04b" + integrity sha512-2nx+O6LvewPo5EBtYrKc8762mMkZRk9cMGIOP4UlkmxHm7ObxH+zvsJJ+qLwPkUc4/yumL/qJkavYi9NlodWIQ== dependencies: - cosmiconfig "^7.0.0" - cssnano-preset-default "^5.1.3" - is-resolvable "^1.1.0" + cssnano-preset-default "^5.2.11" + lilconfig "^2.0.3" + yaml "^1.10.2" csso@^4.2.0: version "4.2.0" @@ -4874,11 +4270,6 @@ csstype@^3.0.2: resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.0.8.tgz#d2266a792729fb227cd216fb572f43728e1ad340" integrity sha512-jXKhWqXPmlUeoQnF/EhTtTl4C9SnrxSH/jZUih3jmO6lBKr99rP3/+FmrMj4EFpOXzMtXHAZkd3x0E6h6Fgflw== -cyclist@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" - integrity sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk= - d3-array@1, d3-array@^1.1.1, d3-array@^1.2.0: version "1.2.4" resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-1.2.4.tgz#635ce4d5eea759f6f605863dbcfc30edc737f71f" @@ -5170,15 +4561,10 @@ dagre@^0.8.5: graphlib "^2.1.8" lodash "^4.17.15" -damerau-levenshtein@^1.0.6: - version "1.0.7" - resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.7.tgz#64368003512a1a6992593741a09a9d31a836f55d" - integrity sha512-VvdQIPGdWP0SqFXghj79Wf/5LArmreyMsGLa6FG6iC4t3j7j5s71TrwWmT/4akbDQIqjfACkLZmjXhA7g2oUZw== - -data-uri-to-buffer@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/data-uri-to-buffer/-/data-uri-to-buffer-4.0.0.tgz#b5db46aea50f6176428ac05b73be39a57701a64b" - integrity sha512-Vr3mLBA8qWmcuschSLAOogKgQ/Jwxulv3RNE4FXnYWRGujzrRWQI4m12fQqRkwX06C0KanhLr4hK+GydchZsaA== +damerau-levenshtein@^1.0.7: + version "1.0.8" + resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" + integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== data-urls@^2.0.0: version "2.0.0" @@ -5216,7 +4602,7 @@ debug@4: dependencies: ms "2.1.2" -debug@^2.2.0, debug@^2.3.3, debug@^2.6.8, debug@^2.6.9: +debug@^2.6.9: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== @@ -5230,20 +4616,20 @@ debug@^3.2.6, debug@^3.2.7: dependencies: ms "^2.1.1" -debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1: - version "4.3.1" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" - integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== - dependencies: - ms "2.1.2" - -debug@^4.0.1, debug@^4.3.4: +debug@^4.0.0, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== dependencies: ms "2.1.2" +debug@^4.1.0, debug@^4.1.1: + version "4.3.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" + integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== + dependencies: + ms "2.1.2" + decamelize-keys@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/decamelize-keys/-/decamelize-keys-1.1.0.tgz#d171a87933252807eb3cb61dc1c1445d078df2d9" @@ -5302,28 +4688,6 @@ define-properties@^1.1.4: has-property-descriptors "^1.0.0" object-keys "^1.1.1" -define-property@^0.2.5: - version "0.2.5" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" - integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= - dependencies: - is-descriptor "^0.1.0" - -define-property@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" - integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= - dependencies: - is-descriptor "^1.0.0" - -define-property@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" - integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== - dependencies: - is-descriptor "^1.0.2" - isobject "^3.0.1" - del@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" @@ -5342,26 +4706,6 @@ delayed-stream@~1.0.0: resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= -des.js@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843" - integrity sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA== - dependencies: - inherits "^2.0.1" - minimalistic-assert "^1.0.0" - -detect-file@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" - integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc= - -detect-indent@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208" - integrity sha1-920GQ1LN9Docts5hnE7jqUdd4gg= - dependencies: - repeating "^2.0.0" - detect-newline@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" @@ -5387,15 +4731,6 @@ diff-sequences@^27.5.1: resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-27.5.1.tgz#eaecc0d327fd68c8d9672a1e64ab8dccb2ef5327" integrity sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ== -diffie-hellman@^5.0.0: - version "5.0.3" - resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" - integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== - dependencies: - bn.js "^4.1.0" - miller-rabin "^4.0.0" - randombytes "^2.0.0" - dir-glob@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" @@ -5439,11 +4774,6 @@ dom-serializer@^1.0.1: domhandler "^4.2.0" entities "^2.0.0" -domain-browser@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda" - integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== - domelementtype@1, domelementtype@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" @@ -5482,6 +4812,13 @@ domhandler@^4.0.0, domhandler@^4.2.0: dependencies: domelementtype "^2.2.0" +domhandler@^4.3.1: + version "4.3.1" + resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c" + integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== + dependencies: + domelementtype "^2.2.0" + dompurify@^2.2.8: version "2.2.9" resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.2.9.tgz#4b42e244238032d9286a0d2c87b51313581d9624" @@ -5503,7 +4840,7 @@ domutils@^1.5.1: dom-serializer "0" domelementtype "1" -domutils@^2.4.3, domutils@^2.5.2: +domutils@^2.5.2: version "2.7.0" resolved "https://registry.yarnpkg.com/domutils/-/domutils-2.7.0.tgz#8ebaf0c41ebafcf55b0b72ec31c56323712c5442" integrity sha512-8eaHa17IwJUPAiB+SoTYBo5mCdeMgdcAoXJ59m6DT1vw+5iLS3gNoqYaRowaBKtGVrOF1Jz4yDTgYKLK2kvfJg== @@ -5512,86 +4849,52 @@ domutils@^2.4.3, domutils@^2.5.2: domelementtype "^2.2.0" domhandler "^4.2.0" -duplexify@^3.4.2, duplexify@^3.6.0: - version "3.7.1" - resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" - integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== +domutils@^2.8.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135" + integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== dependencies: - end-of-stream "^1.0.0" - inherits "^2.0.1" - readable-stream "^2.0.0" - stream-shift "^1.0.0" + dom-serializer "^1.0.1" + domelementtype "^2.2.0" + domhandler "^4.2.0" electron-to-chromium@^1.3.723: version "1.3.752" resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.752.tgz#0728587f1b9b970ec9ffad932496429aef750d09" integrity sha512-2Tg+7jSl3oPxgsBsWKh5H83QazTkmWG/cnNwJplmyZc7KcN61+I10oUgaXSVk/NwfvN3BdkKDR4FYuRBQQ2v0A== -elliptic@^6.5.3: - version "6.5.4" - resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" - integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== - dependencies: - bn.js "^4.11.9" - brorand "^1.1.0" - hash.js "^1.0.0" - hmac-drbg "^1.0.1" - inherits "^2.0.4" - minimalistic-assert "^1.0.1" - minimalistic-crypto-utils "^1.0.1" +electron-to-chromium@^1.4.147: + version "1.4.156" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.156.tgz#fc398e1bfbe586135351ebfaf198473a82923af5" + integrity sha512-/Wj5NC7E0wHaMCdqxWz9B0lv7CcycDTiHyXCtbbu3pXM9TV2AOp8BtMqkVuqvJNdEvltBG6LxT2Q+BxY4LUCIA== emittery@^0.8.1: version "0.8.1" resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" integrity sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg== -emoji-regex@^7.0.1: - version "7.0.3" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" - integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== - emoji-regex@^8.0.0: version "8.0.0" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== -emoji-regex@^9.0.0: +emoji-regex@^9.2.2: version "9.2.2" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== -emojis-list@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389" - integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k= - emojis-list@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== -end-of-stream@^1.0.0, end-of-stream@^1.1.0: - version "1.4.4" - resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" - integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== - dependencies: - once "^1.4.0" - -enhanced-resolve@^4.1.1, enhanced-resolve@^4.3.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.5.0.tgz#2f3cfd84dbe3b487f18f2db2ef1e064a571ca5ec" - integrity sha512-Nv9m36S/vxpsI+Hc4/ZGRs0n9mXqSWGGq49zxb/cJfPAQMbUtttJAlNPS4AQzaBdw/pKskw5bMbekT/Y7W/Wlg== - dependencies: - graceful-fs "^4.1.2" - memory-fs "^0.5.0" - tapable "^1.0.0" - -enquirer@^2.3.5: - version "2.3.6" - resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d" - integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== +enhanced-resolve@^5.9.3: + version "5.9.3" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.9.3.tgz#44a342c012cbc473254af5cc6ae20ebd0aae5d88" + integrity sha512-Bq9VSor+kjvW3f9/MiiR4eE3XYgOl7/rS8lnSxbRbF3kS0B2r+Y9w5krBWxZgDxASVZbdYrn5wT4j/Wb0J9qow== dependencies: - ansi-colors "^4.1.1" + graceful-fs "^4.2.4" + tapable "^2.2.0" entities@1.0: version "1.0.0" @@ -5608,6 +4911,11 @@ entities@^2.0.0: resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== +envinfo@^7.7.3: + version "7.8.1" + resolved "https://registry.yarnpkg.com/envinfo/-/envinfo-7.8.1.tgz#06377e3e5f4d379fea7ac592d5ad8927e0c4d475" + integrity sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw== + eonasdan-bootstrap-datetimepicker@^4.17.47: version "4.17.49" resolved "https://registry.yarnpkg.com/eonasdan-bootstrap-datetimepicker/-/eonasdan-bootstrap-datetimepicker-4.17.49.tgz#5534ba581c1e7eb988dbf773e2fed8a7f48cc76a" @@ -5618,13 +4926,6 @@ eonasdan-bootstrap-datetimepicker@^4.17.47: moment "^2.10" moment-timezone "^0.4.0" -errno@^0.1.3, errno@~0.1.7: - version "0.1.8" - resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.8.tgz#8bb3e9c7d463be4976ff888f76b4809ebc2e811f" - integrity sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A== - dependencies: - prr "~1.0.1" - error-ex@^1.3.1: version "1.3.2" resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" @@ -5632,7 +4933,7 @@ error-ex@^1.3.1: dependencies: is-arrayish "^0.2.1" -es-abstract@^1.18.0-next.1, es-abstract@^1.18.0-next.2, es-abstract@^1.18.2: +es-abstract@^1.18.0-next.2: version "1.18.3" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.18.3.tgz#25c4c3380a27aa203c44b2b685bba94da31b63e0" integrity sha512-nQIr12dxV7SSxE6r6f1l3DtAeEYdsGpps13dR0TwJg1S8gyp4ZPgy3FZcHBgbiQqnoqSTb+oC+kO4UQ0C/J8vw== @@ -5654,31 +4955,7 @@ es-abstract@^1.18.0-next.1, es-abstract@^1.18.0-next.2, es-abstract@^1.18.2: string.prototype.trimstart "^1.0.4" unbox-primitive "^1.0.1" -es-abstract@^1.18.1: - version "1.18.6" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.18.6.tgz#2c44e3ea7a6255039164d26559777a6d978cb456" - integrity sha512-kAeIT4cku5eNLNuUKhlmtuk1/TRZvQoYccn6TO0cSVdf1kzB0T7+dYuVK9MWM7l+/53W2Q8M7N2c6MQvhXFcUQ== - dependencies: - call-bind "^1.0.2" - es-to-primitive "^1.2.1" - function-bind "^1.1.1" - get-intrinsic "^1.1.1" - get-symbol-description "^1.0.0" - has "^1.0.3" - has-symbols "^1.0.2" - internal-slot "^1.0.3" - is-callable "^1.2.4" - is-negative-zero "^2.0.1" - is-regex "^1.1.4" - is-string "^1.0.7" - object-inspect "^1.11.0" - object-keys "^1.1.1" - object.assign "^4.1.2" - string.prototype.trimend "^1.0.4" - string.prototype.trimstart "^1.0.4" - unbox-primitive "^1.0.1" - -es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.5: +es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5: version "1.20.1" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.1.tgz#027292cd6ef44bd12b1913b828116f54787d1814" integrity sha512-WEm2oBhfoI2sImeM4OF2zE2V3BYdSF+KnSi9Sidz51fQHd7+JuF8Xgcj9/0o+OWeIeIS/MiuNnlruQrJf16GQA== @@ -5707,6 +4984,18 @@ es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.5: string.prototype.trimstart "^1.0.5" unbox-primitive "^1.0.2" +es-module-lexer@^0.9.0: + version "0.9.3" + resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19" + integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== + +es-shim-unscopables@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" + integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== + dependencies: + has "^1.0.3" + es-to-primitive@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" @@ -5726,7 +5015,7 @@ escalade@^3.1.1: resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== -escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: +escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= @@ -5753,15 +5042,6 @@ escodegen@^2.0.0: optionalDependencies: source-map "~0.6.1" -eslint-config-airbnb-base@^14.2.1: - version "14.2.1" - resolved "https://registry.yarnpkg.com/eslint-config-airbnb-base/-/eslint-config-airbnb-base-14.2.1.tgz#8a2eb38455dc5a312550193b319cdaeef042cd1e" - integrity sha512-GOrQyDtVEc1Xy20U7vsB2yAoB4nBlfH5HZJeatRXHleO+OS5Ot+MWij4Dpltw4/DyIkqUfqz1epfhVR5XWWQPA== - dependencies: - confusing-browser-globals "^1.0.10" - object.assign "^4.1.2" - object.entries "^1.1.2" - eslint-config-airbnb-base@^15.0.0: version "15.0.0" resolved "https://registry.yarnpkg.com/eslint-config-airbnb-base/-/eslint-config-airbnb-base-15.0.0.tgz#6b09add90ac79c2f8d723a2580e07f3925afd236" @@ -5779,14 +5059,14 @@ eslint-config-airbnb-typescript@^17.0.0: dependencies: eslint-config-airbnb-base "^15.0.0" -eslint-config-airbnb@18.2.1: - version "18.2.1" - resolved "https://registry.yarnpkg.com/eslint-config-airbnb/-/eslint-config-airbnb-18.2.1.tgz#b7fe2b42f9f8173e825b73c8014b592e449c98d9" - integrity sha512-glZNDEZ36VdlZWoxn/bUR1r/sdFKPd1mHPbqUtkctgNG4yT2DLLtJ3D+yCV+jzZCc2V1nBVkmdknOJBZ5Hc0fg== +eslint-config-airbnb@^19.0.4: + version "19.0.4" + resolved "https://registry.yarnpkg.com/eslint-config-airbnb/-/eslint-config-airbnb-19.0.4.tgz#84d4c3490ad70a0ffa571138ebcdea6ab085fdc3" + integrity sha512-T75QYQVQX57jiNgpF9r1KegMICE94VYwoFQyMGhrvc+lB8YF2E/M/PYDaQe1AJcWaEgqLE+ErXV1Og/+6Vyzew== dependencies: - eslint-config-airbnb-base "^14.2.1" + eslint-config-airbnb-base "^15.0.0" object.assign "^4.1.2" - object.entries "^1.1.2" + object.entries "^1.1.5" eslint-import-resolver-node@^0.3.6: version "0.3.6" @@ -5796,13 +5076,13 @@ eslint-import-resolver-node@^0.3.6: debug "^3.2.7" resolve "^1.20.0" -eslint-module-utils@^2.6.2: - version "2.6.2" - resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.6.2.tgz#94e5540dd15fe1522e8ffa3ec8db3b7fa7e7a534" - integrity sha512-QG8pcgThYOuqxupd06oYTZoNOGaUdTY1PqK+oS6ElF6vs4pBdk/aYxFVQQXzcrAqp9m7cl7lb2ubazX+g16k2Q== +eslint-module-utils@^2.7.3: + version "2.7.3" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.7.3.tgz#ad7e3a10552fdd0642e1e55292781bd6e34876ee" + integrity sha512-088JEC7O3lDZM9xGe0RerkOMd0EjFl+Yvd1jPWIkMT5u3H9+HC34mWWPnqPrN13gieT9pBOO+Qt07Nb/6TresQ== dependencies: debug "^3.2.7" - pkg-dir "^2.0.0" + find-up "^2.1.0" eslint-plugin-es@^3.0.0: version "3.0.1" @@ -5819,43 +5099,42 @@ eslint-plugin-html@^6.0.2: dependencies: htmlparser2 "^6.0.1" -eslint-plugin-import@^2.22.1: - version "2.24.2" - resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.24.2.tgz#2c8cd2e341f3885918ee27d18479910ade7bb4da" - integrity sha512-hNVtyhiEtZmpsabL4neEj+6M5DCLgpYyG9nzJY8lZQeQXEn5UPW1DpUdsMHMXsq98dbNm7nt1w9ZMSVpfJdi8Q== +eslint-plugin-import@^2.25.3: + version "2.26.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz#f812dc47be4f2b72b478a021605a59fc6fe8b88b" + integrity sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA== dependencies: - array-includes "^3.1.3" - array.prototype.flat "^1.2.4" + array-includes "^3.1.4" + array.prototype.flat "^1.2.5" debug "^2.6.9" doctrine "^2.1.0" eslint-import-resolver-node "^0.3.6" - eslint-module-utils "^2.6.2" - find-up "^2.0.0" + eslint-module-utils "^2.7.3" has "^1.0.3" - is-core-module "^2.6.0" - minimatch "^3.0.4" - object.values "^1.1.4" - pkg-up "^2.0.0" - read-pkg-up "^3.0.0" - resolve "^1.20.0" - tsconfig-paths "^3.11.0" + is-core-module "^2.8.1" + is-glob "^4.0.3" + minimatch "^3.1.2" + object.values "^1.1.5" + resolve "^1.22.0" + tsconfig-paths "^3.14.1" -eslint-plugin-jsx-a11y@^6.4.1: - version "6.4.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.4.1.tgz#a2d84caa49756942f42f1ffab9002436391718fd" - integrity sha512-0rGPJBbwHoGNPU73/QCLP/vveMlM1b1Z9PponxO87jfr6tuH5ligXbDT6nHSSzBC8ovX2Z+BQu7Bk5D/Xgq9zg== +eslint-plugin-jsx-a11y@^6.5.0: + version "6.5.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.5.1.tgz#cdbf2df901040ca140b6ec14715c988889c2a6d8" + integrity sha512-sVCFKX9fllURnXT2JwLN5Qgo24Ug5NF6dxhkmxsMEUZhXRcGg+X3e1JbJ84YePQKBl5E0ZjAH5Q4rkdcGY99+g== dependencies: - "@babel/runtime" "^7.11.2" + "@babel/runtime" "^7.16.3" aria-query "^4.2.2" - array-includes "^3.1.1" + array-includes "^3.1.4" ast-types-flow "^0.0.7" - axe-core "^4.0.2" + axe-core "^4.3.5" axobject-query "^2.2.0" - damerau-levenshtein "^1.0.6" - emoji-regex "^9.0.0" + damerau-levenshtein "^1.0.7" + emoji-regex "^9.2.2" has "^1.0.3" - jsx-ast-utils "^3.1.0" + jsx-ast-utils "^3.2.1" language-tags "^1.0.5" + minimatch "^3.0.4" eslint-plugin-node@^11.1.0: version "11.1.0" @@ -5874,45 +5153,37 @@ eslint-plugin-promise@^4.2.1: resolved "https://registry.yarnpkg.com/eslint-plugin-promise/-/eslint-plugin-promise-4.3.1.tgz#61485df2a359e03149fdafc0a68b0e030ad2ac45" integrity sha512-bY2sGqyptzFBDLh/GMbAxfdJC+b0f23ME63FOE4+Jao0oZ3E1LEwFtWJX/1pGMJLiTtrSSern2CRM/g+dfc0eQ== -eslint-plugin-react-hooks@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-1.7.0.tgz#6210b6d5a37205f0b92858f895a4e827020a7d04" - integrity sha512-iXTCFcOmlWvw4+TOE8CLWj6yX1GwzT0Y6cUfHHZqWnSk144VmVIRcVGtUAzrLES7C798lmvnt02C7rxaOX1HNA== +eslint-plugin-react-hooks@^4.5.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz#4c3e697ad95b77e93f8646aaa1630c1ba607edd3" + integrity sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g== -eslint-plugin-react@^7.21.5: - version "7.26.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.26.0.tgz#3ae019a35d542b98e5af9e2f96b89c232c74b55b" - integrity sha512-dceliS5itjk4EZdQYtLMz6GulcsasguIs+VTXuiC7Q5IPIdGTkyfXVdmsQOqEhlD9MciofH4cMcT1bw1WWNxCQ== +eslint-plugin-react@^7.30.0: + version "7.30.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.30.0.tgz#8e7b1b2934b8426ac067a0febade1b13bd7064e3" + integrity sha512-RgwH7hjW48BleKsYyHK5vUAvxtE9SMPDKmcPRQgtRCYaZA0XQPt5FSkrU3nhz5ifzMZcA8opwmRJ2cmOO8tr5A== dependencies: - array-includes "^3.1.3" - array.prototype.flatmap "^1.2.4" + array-includes "^3.1.5" + array.prototype.flatmap "^1.3.0" doctrine "^2.1.0" - estraverse "^5.2.0" + estraverse "^5.3.0" jsx-ast-utils "^2.4.1 || ^3.0.0" - minimatch "^3.0.4" - object.entries "^1.1.4" - object.fromentries "^2.0.4" - object.hasown "^1.0.0" - object.values "^1.1.4" - prop-types "^15.7.2" + minimatch "^3.1.2" + object.entries "^1.1.5" + object.fromentries "^2.0.5" + object.hasown "^1.1.1" + object.values "^1.1.5" + prop-types "^15.8.1" resolve "^2.0.0-next.3" semver "^6.3.0" - string.prototype.matchall "^4.0.5" + string.prototype.matchall "^4.0.7" eslint-plugin-standard@^4.0.1: version "4.1.0" resolved "https://registry.yarnpkg.com/eslint-plugin-standard/-/eslint-plugin-standard-4.1.0.tgz#0c3bf3a67e853f8bbbc580fb4945fbf16f41b7c5" integrity sha512-ZL7+QRixjTR6/528YNGyDotyffm5OQst/sGxKDwGb9Uqs4In5Egi4+jbobhqJoyoCM6/7v/1A5fhQ7ScMtDjaQ== -eslint-scope@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" - integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== - dependencies: - esrecurse "^4.1.0" - estraverse "^4.1.1" - -eslint-scope@^5.1.1: +eslint-scope@5.1.1, eslint-scope@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== @@ -5920,7 +5191,15 @@ eslint-scope@^5.1.1: esrecurse "^4.3.0" estraverse "^4.1.1" -eslint-utils@^2.0.0, eslint-utils@^2.1.0: +eslint-scope@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642" + integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw== + dependencies: + esrecurse "^4.3.0" + estraverse "^5.2.0" + +eslint-utils@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27" integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg== @@ -5934,12 +5213,12 @@ eslint-utils@^3.0.0: dependencies: eslint-visitor-keys "^2.0.0" -eslint-visitor-keys@^1.0.0, eslint-visitor-keys@^1.1.0, eslint-visitor-keys@^1.3.0: +eslint-visitor-keys@^1.1.0: version "1.3.0" resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e" integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== -eslint-visitor-keys@^2.0.0: +eslint-visitor-keys@^2.0.0, eslint-visitor-keys@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== @@ -5949,60 +5228,55 @@ eslint-visitor-keys@^3.3.0: resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== -eslint@^7.2.0: - version "7.32.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.32.0.tgz#c6d328a14be3fb08c8d1d21e12c02fdb7a2a812d" - integrity sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA== +eslint@^8.6.0: + version "8.17.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.17.0.tgz#1cfc4b6b6912f77d24b874ca1506b0fe09328c21" + integrity sha512-gq0m0BTJfci60Fz4nczYxNAlED+sMcihltndR8t9t1evnU/azx53x3t2UHXC/uRjcbvRw/XctpaNygSTcQD+Iw== dependencies: - "@babel/code-frame" "7.12.11" - "@eslint/eslintrc" "^0.4.3" - "@humanwhocodes/config-array" "^0.5.0" + "@eslint/eslintrc" "^1.3.0" + "@humanwhocodes/config-array" "^0.9.2" ajv "^6.10.0" chalk "^4.0.0" cross-spawn "^7.0.2" - debug "^4.0.1" + debug "^4.3.2" doctrine "^3.0.0" - enquirer "^2.3.5" escape-string-regexp "^4.0.0" - eslint-scope "^5.1.1" - eslint-utils "^2.1.0" - eslint-visitor-keys "^2.0.0" - espree "^7.3.1" + eslint-scope "^7.1.1" + eslint-utils "^3.0.0" + eslint-visitor-keys "^3.3.0" + espree "^9.3.2" esquery "^1.4.0" esutils "^2.0.2" fast-deep-equal "^3.1.3" file-entry-cache "^6.0.1" functional-red-black-tree "^1.0.1" - glob-parent "^5.1.2" - globals "^13.6.0" - ignore "^4.0.6" + glob-parent "^6.0.1" + globals "^13.15.0" + ignore "^5.2.0" import-fresh "^3.0.0" imurmurhash "^0.1.4" is-glob "^4.0.0" - js-yaml "^3.13.1" + js-yaml "^4.1.0" json-stable-stringify-without-jsonify "^1.0.1" levn "^0.4.1" lodash.merge "^4.6.2" - minimatch "^3.0.4" + minimatch "^3.1.2" natural-compare "^1.4.0" optionator "^0.9.1" - progress "^2.0.0" - regexpp "^3.1.0" - semver "^7.2.1" - strip-ansi "^6.0.0" + regexpp "^3.2.0" + strip-ansi "^6.0.1" strip-json-comments "^3.1.0" - table "^6.0.9" text-table "^0.2.0" v8-compile-cache "^2.0.3" -espree@^7.3.0, espree@^7.3.1: - version "7.3.1" - resolved "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6" - integrity sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g== +espree@^9.3.2: + version "9.3.2" + resolved "https://registry.yarnpkg.com/espree/-/espree-9.3.2.tgz#f58f77bd334731182801ced3380a8cc859091596" + integrity sha512-D211tC7ZwouTIuY5x9XnS0E9sWNChB7IYKX/Xp5eQj3nFXhqmiUDB9q27y76oFl8jTg3pXcQx/bpxMfs3CIZbA== dependencies: - acorn "^7.4.0" - acorn-jsx "^5.3.1" - eslint-visitor-keys "^1.3.0" + acorn "^8.7.1" + acorn-jsx "^5.3.2" + eslint-visitor-keys "^3.3.0" esprima@^4.0.0, esprima@^4.0.1: version "4.0.1" @@ -6016,7 +5290,7 @@ esquery@^1.4.0: dependencies: estraverse "^5.1.0" -esrecurse@^4.1.0, esrecurse@^4.3.0: +esrecurse@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== @@ -6028,7 +5302,7 @@ estraverse@^4.1.1: resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== -estraverse@^5.1.0: +estraverse@^5.1.0, estraverse@^5.3.0: version "5.3.0" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== @@ -6048,19 +5322,11 @@ eventemitter3@^4.0.7: resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== -events@^3.0.0: +events@^3.2.0: version "3.3.0" resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== -evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" - integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== - dependencies: - md5.js "^1.3.4" - safe-buffer "^5.1.1" - execa@^5.0.0: version "5.1.1" resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" @@ -6088,26 +5354,6 @@ exit@0.1.2, exit@0.1.x, exit@^0.1.2: resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" integrity sha1-BjJjj42HfMghB9MKD/8aF8uhzQw= -expand-brackets@^2.1.4: - version "2.1.4" - resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" - integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= - dependencies: - debug "^2.3.3" - define-property "^0.2.5" - extend-shallow "^2.0.1" - posix-character-classes "^0.1.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - -expand-tilde@^2.0.0, expand-tilde@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" - integrity sha1-l+gBqgUt8CRU3kawK/YhZCzchQI= - dependencies: - homedir-polyfill "^1.0.1" - expect@^27.5.1: version "27.5.1" resolved "https://registry.yarnpkg.com/expect/-/expect-27.5.1.tgz#83ce59f1e5bdf5f9d2b94b61d2050db48f3fef74" @@ -6118,58 +5364,17 @@ expect@^27.5.1: jest-matcher-utils "^27.5.1" jest-message-util "^27.5.1" -extend-shallow@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" - integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= - dependencies: - is-extendable "^0.1.0" - -extend-shallow@^3.0.0, extend-shallow@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" - integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= - dependencies: - assign-symbols "^1.0.0" - is-extendable "^1.0.1" - extend@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== -extglob@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" - integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== - dependencies: - array-unique "^0.3.2" - define-property "^1.0.0" - expand-brackets "^2.1.4" - extend-shallow "^2.0.1" - fragment-cache "^0.2.1" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== -fast-glob@^3.1.1, fast-glob@^3.2.5: - version "3.2.5" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.5.tgz#7939af2a656de79a4f1901903ee8adcaa7cb9661" - integrity sha512-2DtFcgT68wiTTiwZ2hNdJfcHNke9XOfnwmBRWXhmeKM8rF0TGwmC/Qto3S7RoZKp5cilZbxzO5iTNTQsJ+EeDg== - dependencies: - "@nodelib/fs.stat" "^2.0.2" - "@nodelib/fs.walk" "^1.2.3" - glob-parent "^5.1.0" - merge2 "^1.3.0" - micromatch "^4.0.2" - picomatch "^2.2.1" - -fast-glob@^3.2.4, fast-glob@^3.2.9: +fast-glob@^3.2.4, fast-glob@^3.2.5, fast-glob@^3.2.9: version "3.2.11" resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.11.tgz#a1172ad95ceb8a16e20caa5c5e56480e5129c1d9" integrity sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew== @@ -6200,11 +5405,6 @@ fastest-levenshtein@^1.0.12: resolved "https://registry.yarnpkg.com/fastest-levenshtein/-/fastest-levenshtein-1.0.12.tgz#9990f7d3a88cc5a9ffd1f1745745251700d497e2" integrity sha512-On2N+BpYJ15xIC974QNVuYGMOlEVt4s0EOI3wwMqOmK1fdDY+FN/zltPV8vosq4ad4c/gJ1KHScUn/6AWIgiow== -fastparse@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/fastparse/-/fastparse-1.1.2.tgz#91728c5a5942eced8531283c79441ee4122c35a9" - integrity sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ== - fastq@^1.6.0: version "1.11.0" resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.11.0.tgz#bb9fb955a07130a918eb63c1f5161cc32a5d0858" @@ -6219,19 +5419,6 @@ fb-watchman@^2.0.0: dependencies: bser "2.1.1" -fetch-blob@^3.1.2, fetch-blob@^3.1.4: - version "3.1.4" - resolved "https://registry.yarnpkg.com/fetch-blob/-/fetch-blob-3.1.4.tgz#e8c6567f80ad7fc22fd302e7dcb72bafde9c1717" - integrity sha512-Eq5Xv5+VlSrYWEqKrusxY1C3Hm/hjeAsCGVG3ft7pZahlUAChpGZT/Ms1WmSLnEAisEXszjzu/s+ce6HZB2VHA== - dependencies: - node-domexception "^1.0.0" - web-streams-polyfill "^3.0.3" - -figgy-pudding@^3.5.1: - version "3.5.2" - resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.2.tgz#b4eee8148abb01dcf1d1ac34367d59e12fa61d6e" - integrity sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw== - file-entry-cache@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" @@ -6247,21 +5434,6 @@ file-loader@^6.0.0: loader-utils "^2.0.0" schema-utils "^3.0.0" -file-uri-to-path@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd" - integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw== - -fill-range@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" - integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= - dependencies: - extend-shallow "^2.0.1" - is-number "^3.0.0" - repeat-string "^1.6.1" - to-regex-range "^2.1.0" - fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" @@ -6269,15 +5441,6 @@ fill-range@^7.0.1: dependencies: to-regex-range "^5.0.1" -find-cache-dir@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" - integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== - dependencies: - commondir "^1.0.1" - make-dir "^2.0.0" - pkg-dir "^3.0.0" - find-cache-dir@^3.3.1: version "3.3.1" resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.1.tgz#89b33fad4a4670daa94f855f7fbe31d6d84fe880" @@ -6292,20 +5455,13 @@ find-root@^1.1.0: resolved "https://registry.yarnpkg.com/find-root/-/find-root-1.1.0.tgz#abcfc8ba76f708c42a97b3d685b7e9450bfb9ce4" integrity sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng== -find-up@^2.0.0, find-up@^2.1.0: +find-up@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c= dependencies: locate-path "^2.0.0" -find-up@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" - integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== - dependencies: - locate-path "^3.0.0" - find-up@^4.0.0, find-up@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" @@ -6314,16 +5470,6 @@ find-up@^4.0.0, find-up@^4.1.0: locate-path "^5.0.0" path-exists "^4.0.0" -findup-sync@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1" - integrity sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg== - dependencies: - detect-file "^1.0.0" - is-glob "^4.0.0" - micromatch "^3.0.4" - resolve-dir "^1.0.1" - flat-cache@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" @@ -6337,14 +5483,6 @@ flatted@^3.1.0: resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.1.1.tgz#c4b489e80096d9df1dfc97c79871aea7c617c469" integrity sha512-zAoAQiudy+r5SvnSw3KJy5os/oRJYHzrzja/tBDqrZtNhUw8bt6y8OBzMWcjWr+8liV8Eb6yOhw8WZ7VFZ5ZzA== -flush-write-stream@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" - integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== - dependencies: - inherits "^2.0.3" - readable-stream "^2.3.6" - focus-lock@^0.11.2: version "0.11.2" resolved "https://registry.yarnpkg.com/focus-lock/-/focus-lock-0.11.2.tgz#aeef3caf1cea757797ac8afdebaec8fd9ab243ed" @@ -6357,11 +5495,6 @@ follow-redirects@^1.14.8: resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.9.tgz#dd4ea157de7bfaf9ea9b3fbd85aa16951f78d8d7" integrity sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w== -for-in@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" - integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= - foreach@^2.0.4: version "2.0.5" resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.5.tgz#0bee005018aeb260d0a3af3ae658dd0136ec1b99" @@ -6376,20 +5509,6 @@ form-data@^3.0.0: combined-stream "^1.0.8" mime-types "^2.1.12" -formdata-polyfill@^4.0.10: - version "4.0.10" - resolved "https://registry.yarnpkg.com/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz#24807c31c9d402e002ab3d8c720144ceb8848423" - integrity sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g== - dependencies: - fetch-blob "^3.1.2" - -fragment-cache@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" - integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= - dependencies: - map-cache "^0.2.2" - framer-motion@4.1.17: version "4.1.17" resolved "https://registry.yarnpkg.com/framer-motion/-/framer-motion-4.1.17.tgz#4029469252a62ea599902e5a92b537120cc89721" @@ -6410,23 +5529,6 @@ framesync@5.3.0: dependencies: tslib "^2.1.0" -from2@^2.1.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" - integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= - dependencies: - inherits "^2.0.1" - readable-stream "^2.0.0" - -fs-extra@^7.0.0: - version "7.0.1" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" - integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== - dependencies: - graceful-fs "^4.1.2" - jsonfile "^4.0.0" - universalify "^0.1.0" - fs-minipass@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" @@ -6434,30 +5536,12 @@ fs-minipass@^2.0.0: dependencies: minipass "^3.0.0" -fs-write-stream-atomic@^1.0.8: - version "1.0.10" - resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" - integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk= - dependencies: - graceful-fs "^4.1.2" - iferr "^0.1.5" - imurmurhash "^0.1.4" - readable-stream "1 || 2" - fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= -fsevents@^1.2.7: - version "1.2.13" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.13.tgz#f325cb0455592428bcf11b383370ef70e3bfcc38" - integrity sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw== - dependencies: - bindings "^1.5.0" - nan "^2.12.1" - -fsevents@^2.3.2, fsevents@~2.3.1: +fsevents@^2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== @@ -6487,19 +5571,12 @@ functions-have-names@^1.2.2: resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== -generic-names@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/generic-names/-/generic-names-1.0.3.tgz#2d786a121aee508876796939e8e3bff836c20917" - integrity sha1-LXhqEhruUIh2eWk56OO/+DbCCRc= - dependencies: - loader-utils "^0.2.16" - gensync@^1.0.0-beta.2: version "1.0.0-beta.2" resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== -get-caller-file@^2.0.1, get-caller-file@^2.0.5: +get-caller-file@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== @@ -6548,22 +5625,24 @@ get-symbol-description@^1.0.0: call-bind "^1.0.2" get-intrinsic "^1.1.1" -get-value@^2.0.3, get-value@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" - integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= - -glob-parent@>=5.1.2, glob-parent@^3.1.0, glob-parent@^5.1.0, glob-parent@^5.1.1, glob-parent@^5.1.2, glob-parent@~5.1.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.0.tgz#f851b59b388e788f3a44d63fab50382b2859c33c" - integrity sha512-Hdd4287VEJcZXUwv1l8a+vXC1GjOQqXe+VS30w/ypihpcnu9M1n3xeYeJu5CBpeEQj2nAab2xxz28GuA3vp4Ww== +glob-parent@^5.1.1, glob-parent@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== dependencies: is-glob "^4.0.1" -glob-to-regexp@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.3.0.tgz#8c5a1494d2066c570cc3bfe4496175acc4d502ab" - integrity sha1-jFoUlNIGbFcMw7/kSWF1rMTVAqs= +glob-parent@^6.0.1: + version "6.0.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== + dependencies: + is-glob "^4.0.3" + +glob-to-regexp@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" + integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== glob@^7.0.3, glob@^7.1.1, glob@^7.1.3, glob@^7.1.4: version "7.1.7" @@ -6589,15 +5668,6 @@ glob@^7.1.2: once "^1.3.0" path-is-absolute "^1.0.0" -global-modules@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" - integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== - dependencies: - global-prefix "^1.0.1" - is-windows "^1.0.1" - resolve-dir "^1.0.0" - global-modules@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" @@ -6605,17 +5675,6 @@ global-modules@^2.0.0: dependencies: global-prefix "^3.0.0" -global-prefix@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" - integrity sha1-2/dDxsFJklk8ZVVoy2btMsASLr4= - dependencies: - expand-tilde "^2.0.2" - homedir-polyfill "^1.0.1" - ini "^1.3.4" - is-windows "^1.0.1" - which "^1.2.14" - global-prefix@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" @@ -6630,19 +5689,14 @@ globals@^11.1.0: resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== -globals@^13.6.0, globals@^13.9.0: +globals@^13.15.0: version "13.15.0" resolved "https://registry.yarnpkg.com/globals/-/globals-13.15.0.tgz#38113218c907d2f7e98658af246cef8b77e90bac" integrity sha512-bpzcOlgDhMG070Av0Vy5Owklpv1I6+j96GhUI7Rh7IzDCKLzboflLrrfqMu8NquDbiR4EOQk7XzJwqVJxicxog== dependencies: type-fest "^0.20.2" -globals@^9.18.0: - version "9.18.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" - integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ== - -globby@^11.0.1, globby@^11.1.0: +globby@^11.0.1, globby@^11.0.3, globby@^11.1.0: version "11.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== @@ -6654,18 +5708,6 @@ globby@^11.0.1, globby@^11.1.0: merge2 "^1.4.1" slash "^3.0.0" -globby@^11.0.3: - version "11.0.3" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.3.tgz#9b1f0cb523e171dd1ad8c7b2a9fb4b644b9593cb" - integrity sha512-ffdmosjA807y7+lA1NM0jELARVmYul/715xiILEjo3hBLPTcirgQNnXECn5g3mtR8TOLCVbkfua1Hpen25/Xcg== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.1.1" - ignore "^5.1.4" - merge2 "^1.3.0" - slash "^3.0.0" - globby@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" @@ -6689,7 +5731,7 @@ gonzales-pe@^4.3.0: dependencies: minimist "^1.2.5" -graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6: +graceful-fs@^4.1.2: version "4.2.6" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee" integrity sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ== @@ -6716,13 +5758,6 @@ hard-rejection@^2.1.0: resolved "https://registry.yarnpkg.com/hard-rejection/-/hard-rejection-2.1.0.tgz#1c6eda5c1685c63942766d79bb40ae773cecd883" integrity sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA== -has-ansi@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" - integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= - dependencies: - ansi-regex "^2.0.0" - has-bigints@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.1.tgz#64fe6acb020673e3b78db035a5af69aa9d07b113" @@ -6767,37 +5802,6 @@ has-tostringtag@^1.0.0: dependencies: has-symbols "^1.0.2" -has-value@^0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" - integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= - dependencies: - get-value "^2.0.3" - has-values "^0.1.4" - isobject "^2.0.0" - -has-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" - integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= - dependencies: - get-value "^2.0.6" - has-values "^1.0.0" - isobject "^3.0.0" - -has-values@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" - integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= - -has-values@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" - integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= - dependencies: - is-number "^3.0.0" - kind-of "^4.0.0" - has@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" @@ -6805,28 +5809,6 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" -hash-base@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.1.0.tgz#55c381d9e06e1d2997a883b4a3fddfe7f0d3af33" - integrity sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA== - dependencies: - inherits "^2.0.4" - readable-stream "^3.6.0" - safe-buffer "^5.2.0" - -hash.js@^1.0.0, hash.js@^1.0.3: - version "1.1.7" - resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" - integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== - dependencies: - inherits "^2.0.3" - minimalistic-assert "^1.0.1" - -hex-color-regex@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/hex-color-regex/-/hex-color-regex-1.1.0.tgz#4c06fccb4602fe2602b3c93df82d7e7dbf1a8a8e" - integrity sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ== - hey-listen@^1.0.8: version "1.0.8" resolved "https://registry.yarnpkg.com/hey-listen/-/hey-listen-1.0.8.tgz#8e59561ff724908de1aa924ed6ecc84a56a9aa68" @@ -6839,15 +5821,6 @@ history@^5.2.0: dependencies: "@babel/runtime" "^7.7.6" -hmac-drbg@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" - integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= - dependencies: - hash.js "^1.0.3" - minimalistic-assert "^1.0.0" - minimalistic-crypto-utils "^1.0.1" - hoist-non-react-statics@^3.3.1: version "3.3.2" resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz#ece0acaf71d62c2969c2ec59feff42a4b1a85b45" @@ -6855,21 +5828,6 @@ hoist-non-react-statics@^3.3.1: dependencies: react-is "^16.7.0" -home-or-tmp@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/home-or-tmp/-/home-or-tmp-2.0.0.tgz#e36c3f2d2cae7d746a857e38d18d5f32a7882db8" - integrity sha1-42w/LSyufXRqhX440Y1fMqeILbg= - dependencies: - os-homedir "^1.0.0" - os-tmpdir "^1.0.1" - -homedir-polyfill@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" - integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== - dependencies: - parse-passwd "^1.0.0" - hosted-git-info@^2.1.4: version "2.8.9" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" @@ -6882,16 +5840,6 @@ hosted-git-info@^4.0.1: dependencies: lru-cache "^6.0.0" -hsl-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/hsl-regex/-/hsl-regex-1.0.0.tgz#d49330c789ed819e276a4c0d272dffa30b18fe6e" - integrity sha1-1JMwx4ntgZ4nakwNJy3/owsY/m4= - -hsla-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/hsla-regex/-/hsla-regex-1.0.0.tgz#c1ce7a3168c8c6614033a4b5f7877f3b225f9c38" - integrity sha1-wc56MWjIxmFAM6S194d/OyJfnDg= - html-encoding-sniffer@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" @@ -6905,9 +5853,9 @@ html-escaper@^2.0.0: integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== html-tags@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/html-tags/-/html-tags-3.1.0.tgz#7b5e6f7e665e9fb41f30007ed9e0d41e97fb2140" - integrity sha512-1qYz89hW3lFDEazhjW0yVAV87lw8lVkrJocr72XmBkMKsoSVJCQx3W8BXsC7hO2qAt8BoVjYjtAcZ9perqGnNg== + version "3.2.0" + resolved "https://registry.yarnpkg.com/html-tags/-/html-tags-3.2.0.tgz#dbb3518d20b726524e4dd43de397eb0a95726961" + integrity sha512-vy7ClnArOZwCnqZgvv+ddgHgJiAFXe3Ge9ML5/mBctVJoUoYPCdxVucOywjDARn6CVoh3dRSFdPHy2sX80L0Wg== htmlparser2@3.8.x: version "3.8.3" @@ -6956,11 +5904,6 @@ http2-client@^1.2.5: resolved "https://registry.yarnpkg.com/http2-client/-/http2-client-1.3.3.tgz#90fc15d646cca86956b156d07c83947d57d659a9" integrity sha512-nUxLymWQ9pzkzTmir24p2RtsgruLmhje7lH3hLX1IpwvyTg77fW+1brenPPP3USAR+rQ36p5sTA/x7sjCJVkAA== -https-browserify@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" - integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= - https-proxy-agent@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2" @@ -6981,46 +5924,17 @@ iconv-lite@0.4, iconv-lite@0.4.24, iconv-lite@^0.4.4: dependencies: safer-buffer ">= 2.1.2 < 3" -icss-replace-symbols@^1.0.2, icss-replace-symbols@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded" - integrity sha1-Bupvg2ead0njhs/h/oEq5dsiPe0= - -icss-utils@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-3.0.1.tgz#ee70d3ae8cac38c6be5ed91e851b27eed343ad0f" - integrity sha1-7nDTroysOMa+XtkehRsn7tNDrQ8= - dependencies: - postcss "^6.0.2" - -icss-utils@^4.0.0, icss-utils@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-4.1.1.tgz#21170b53789ee27447c2f47dd683081403f9a467" - integrity sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA== - dependencies: - postcss "^7.0.14" - -ieee754@^1.1.4: - version "1.2.1" - resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" - integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== - -iferr@^0.1.5: - version "0.1.5" - resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" - integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= - -ignore@^4.0.6: - version "4.0.6" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" - integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== +icss-utils@^5.0.0, icss-utils@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" + integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== -ignore@^5.1.1, ignore@^5.1.4, ignore@^5.1.8: +ignore@^5.1.1: version "5.1.8" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.8.tgz#f150a8b50a34289b33e22f5889abd4d8016f0e57" integrity sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw== -ignore@^5.2.0: +ignore@^5.1.8, ignore@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== @@ -7038,14 +5952,6 @@ import-lazy@^4.0.0: resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-4.0.0.tgz#e8eb627483a0a43da3c03f3e35548be5cb0cc153" integrity sha512-rKtvo6a868b5Hu3heneU+L4yEQ4jYKLtjpnPeUdK7h0yzXGmyBTypknlkCvHFBqfX9YlorEiMM6Dnq/5atfHkw== -import-local@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" - integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== - dependencies: - pkg-dir "^3.0.0" - resolve-cwd "^2.0.0" - import-local@^3.0.2: version "3.0.3" resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.0.3.tgz#4d51c2c495ca9393da259ec66b62e022920211e0" @@ -7074,7 +5980,7 @@ indent-string@^4.0.0: resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== -infer-owner@^1.0.3, infer-owner@^1.0.4: +infer-owner@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== @@ -7087,22 +5993,12 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: +inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== -inherits@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" - integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= - -inherits@2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" - integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= - -ini@^1.3.4, ini@^1.3.5: +ini@^1.3.5: version "1.3.8" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== @@ -7116,37 +6012,18 @@ internal-slot@^1.0.3: has "^1.0.3" side-channel "^1.0.4" -interpret@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.4.0.tgz#665ab8bc4da27a774a40584e812e3e0fa45b1a1e" - integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA== +interpret@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/interpret/-/interpret-2.2.0.tgz#1a78a0b5965c40a5416d007ad6f50ad27c417df9" + integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw== -invariant@^2.2.2, invariant@^2.2.4: +invariant@^2.2.4: version "2.2.4" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== dependencies: loose-envify "^1.0.0" -is-absolute-url@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698" - integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q== - -is-accessor-descriptor@^0.1.6: - version "0.1.6" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" - integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= - dependencies: - kind-of "^3.0.2" - -is-accessor-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" - integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== - dependencies: - kind-of "^6.0.0" - is-alphabetical@^1.0.0: version "1.0.4" resolved "https://registry.yarnpkg.com/is-alphabetical/-/is-alphabetical-1.0.4.tgz#9e7d6b94916be22153745d184c298cbf986a686d" @@ -7170,20 +6047,6 @@ is-bigint@^1.0.1: resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.2.tgz#ffb381442503235ad245ea89e45b3dbff040ee5a" integrity sha512-0JV5+SOCQkIdzjBK9buARcV804Ddu7A0Qet6sHi3FimE9ne6m4BGQZfRn+NZiXbBk4F4XmHfDZIipLj9pX8dSA== -is-binary-path@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" - integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= - dependencies: - binary-extensions "^1.0.0" - -is-binary-path@~2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" - integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== - dependencies: - binary-extensions "^2.0.0" - is-boolean-object@^1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.1.tgz#3c0878f035cb821228d350d2e1e36719716a3de8" @@ -7191,11 +6054,6 @@ is-boolean-object@^1.1.0: dependencies: call-bind "^1.0.2" -is-buffer@^1.1.5: - version "1.1.6" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" - integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== - is-buffer@^2.0.0: version "2.0.5" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" @@ -7211,18 +6069,6 @@ is-callable@^1.2.4: resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.4.tgz#47301d58dd0259407865547853df6d61fe471945" integrity sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w== -is-color-stop@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-color-stop/-/is-color-stop-1.1.0.tgz#cfff471aee4dd5c9e158598fbe12967b5cdad345" - integrity sha1-z/9HGu5N1cnhWFmPvhKWe1za00U= - dependencies: - css-color-names "^0.0.4" - hex-color-regex "^1.1.0" - hsl-regex "^1.0.0" - hsla-regex "^1.0.0" - rgb-regex "^1.0.1" - rgba-regex "^1.0.0" - is-core-module@^2.2.0: version "2.4.0" resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.4.0.tgz#8e9fc8e15027b011418026e98f0e6f4d86305cc1" @@ -7230,27 +6076,13 @@ is-core-module@^2.2.0: dependencies: has "^1.0.3" -is-core-module@^2.6.0: - version "2.6.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.6.0.tgz#d7553b2526fe59b92ba3e40c8df757ec8a709e19" - integrity sha512-wShG8vs60jKfPWpF2KZRaAtvt3a20OAn7+IJ6hLPECpSABLcKtFKTTI4ZtH5QcBruBHlq+WsdHWyz0BCZW7svQ== +is-core-module@^2.8.1: + version "2.9.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.9.0.tgz#e1c34429cd51c6dd9e09e0799e396e27b19a9c69" + integrity sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A== dependencies: has "^1.0.3" -is-data-descriptor@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" - integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= - dependencies: - kind-of "^3.0.2" - -is-data-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" - integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== - dependencies: - kind-of "^6.0.0" - is-date-object@^1.0.1: version "1.0.4" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.4.tgz#550cfcc03afada05eea3dd30981c7b09551f73e5" @@ -7261,50 +6093,10 @@ is-decimal@^1.0.0: resolved "https://registry.yarnpkg.com/is-decimal/-/is-decimal-1.0.4.tgz#65a3a5958a1c5b63a706e1b333d7cd9f630d3fa5" integrity sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw== -is-descriptor@^0.1.0: - version "0.1.6" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" - integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== - dependencies: - is-accessor-descriptor "^0.1.6" - is-data-descriptor "^0.1.4" - kind-of "^5.0.0" - -is-descriptor@^1.0.0, is-descriptor@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" - integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== - dependencies: - is-accessor-descriptor "^1.0.0" - is-data-descriptor "^1.0.0" - kind-of "^6.0.2" - -is-extendable@^0.1.0, is-extendable@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" - integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= - -is-extendable@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" - integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== - dependencies: - is-plain-object "^2.0.4" - -is-extglob@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= - -is-finite@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.1.0.tgz#904135c77fb42c0641d6aa1bcdbc4daa8da082f3" - integrity sha512-cdyMtqX/BOqqNBBiKlIVkytNHm49MtMlYyn1zxzvJKWmFMlGzm+ry5BBfYyeY9YmNKbRSo/o7OX9w9ale0wg3w== - -is-fullwidth-code-point@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" - integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= is-fullwidth-code-point@^3.0.0: version "3.0.0" @@ -7316,14 +6108,14 @@ is-generator-fn@^2.0.0: resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: +is-glob@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== dependencies: is-extglob "^2.1.1" -is-glob@^4.0.3: +is-glob@^4.0.1, is-glob@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== @@ -7350,13 +6142,6 @@ is-number-object@^1.0.4: resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.5.tgz#6edfaeed7950cff19afedce9fbfca9ee6dd289eb" integrity sha512-RU0lI/n95pMoUKu9v1BZP5MBcZuNSVJkMkAG2dJqC4z2GlkGUNeH68SuHuBKBD/XFe+LHZ+f9BKkLET60Niedw== -is-number@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" - integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= - dependencies: - kind-of "^3.0.2" - is-number@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" @@ -7391,7 +6176,7 @@ is-plain-obj@^2.0.0: resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== -is-plain-object@^2.0.3, is-plain-object@^2.0.4: +is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== @@ -7424,11 +6209,6 @@ is-regexp@^2.0.0: resolved "https://registry.yarnpkg.com/is-regexp/-/is-regexp-2.1.0.tgz#cd734a56864e23b956bf4e7c66c396a4c0b22c2d" integrity sha512-OZ4IlER3zmRIoB9AqNhEggVxqIH4ofDns5nRrPS6yQxXE1TPCUpFznBfRQmQa8uC+pXqjMnukiJBxCisIxiLGA== -is-resolvable@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-resolvable/-/is-resolvable-1.1.0.tgz#fb18f87ce1feb925169c9a407c19318a3206ed88" - integrity sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg== - is-shared-array-buffer@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" @@ -7477,39 +6257,17 @@ is-weakref@^1.0.2: dependencies: call-bind "^1.0.2" -is-windows@^1.0.1, is-windows@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" - integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== - -is-wsl@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" - integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= - isarray@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= -isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= - isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= -isobject@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" - integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= - dependencies: - isarray "1.0.0" - -isobject@^3.0.0, isobject@^3.0.1: +isobject@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= @@ -8021,7 +6779,7 @@ jest-worker@^27.3.1: merge-stream "^2.0.0" supports-color "^8.0.0" -jest-worker@^27.5.1: +jest-worker@^27.4.5, jest-worker@^27.5.1: version "27.5.1" resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== @@ -8059,11 +6817,6 @@ js-sha3@0.8.0: resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== -js-tokens@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" - integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= - js-yaml@^3.13.1: version "3.14.1" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" @@ -8112,11 +6865,6 @@ jsdom@^16.6.0: ws "^7.4.6" xml-name-validator "^3.0.0" -jsesc@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" - integrity sha1-RsP+yMGJKxKwgz25vHYiF226s0s= - jsesc@^2.5.1: version "2.5.2" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" @@ -8140,20 +6888,15 @@ jshint@^2.13.4: minimatch "~3.0.2" strip-json-comments "1.0.x" -json-parse-better-errors@^1.0.1, json-parse-better-errors@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" - integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== - -json-parse-even-better-errors@^2.3.0: +json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== -json-pointer@^0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/json-pointer/-/json-pointer-0.6.1.tgz#3c6caa6ac139e2599f5a1659d39852154015054d" - integrity sha512-3OvjqKdCBvH41DLpV4iSt6v2XhZXV1bPB4OROuknvUXI7ZQNofieCPkmE26stEJ9zdQuvIxDHCuYhfgxFAAs+Q== +json-pointer@0.6.2, json-pointer@^0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/json-pointer/-/json-pointer-0.6.2.tgz#f97bd7550be5e9ea901f8c9264c9d436a22a93cd" + integrity sha512-vLWcKbOaXlO+jvRy4qNd+TI1QUPZzfJj1tpJ3vAXDych5XJf93ftpUKe5pKCrzyIIwgBJcOcCVRUfqQP25afBw== dependencies: foreach "^2.0.4" @@ -8177,11 +6920,6 @@ json-stringify-safe@^5.0.1: resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" integrity sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA== -json5@^0.5.0, json5@^0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" - integrity sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE= - json5@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" @@ -8196,14 +6934,12 @@ json5@^2.1.2: dependencies: minimist "^1.2.5" -jsonfile@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" - integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= - optionalDependencies: - graceful-fs "^4.1.6" +json5@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" + integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== -"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.1.0: +"jsx-ast-utils@^2.4.1 || ^3.0.0": version "3.2.1" resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.2.1.tgz#720b97bfe7d901b927d87c3773637ae8ea48781b" integrity sha512-uP5vu8xfy2F9A6LGC22KO7e2/vGTS1MhP+18f++ZNlf0Ohaxbc9nIEwHAsejlJKyzfZzU5UIhe5ItYkitcZnZA== @@ -8211,26 +6947,15 @@ jsonfile@^4.0.0: array-includes "^3.1.3" object.assign "^4.1.2" -kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: - version "3.2.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" - integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= - dependencies: - is-buffer "^1.1.5" - -kind-of@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" - integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= +jsx-ast-utils@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.3.0.tgz#e624f259143b9062c92b6413ff92a164c80d3ccb" + integrity sha512-XzO9luP6L0xkxwhIJMTJQpZo/eeN60K08jHdexfD569AGxeNug6UketeHXEhROoM8aR7EcUoOQmIhcJQjcuq8Q== dependencies: - is-buffer "^1.1.5" - -kind-of@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" - integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== + array-includes "^3.1.4" + object.assign "^4.1.2" -kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3: +kind-of@^6.0.2, kind-of@^6.0.3: version "6.0.3" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== @@ -8257,14 +6982,6 @@ language-tags@^1.0.5: dependencies: language-subtag-registry "~0.3.2" -last-call-webpack-plugin@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/last-call-webpack-plugin/-/last-call-webpack-plugin-3.0.0.tgz#9742df0e10e3cf46e5c0381c2de90d3a7a2d7555" - integrity sha512-7KI2l2GIZa9p2spzPIVZBYyNKkN+e/SQPpnjlTiPhdbDW3F86tdKKELxKpzJ5sgU19wQWsACULZmpTPYHeWO5w== - dependencies: - lodash "^4.17.5" - webpack-sources "^1.1.0" - leven@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" @@ -8286,37 +7003,22 @@ levn@~0.3.0: prelude-ls "~1.1.2" type-check "~0.3.2" +lilconfig@^2.0.3: + version "2.0.5" + resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.0.5.tgz#19e57fd06ccc3848fd1891655b5a447092225b25" + integrity sha512-xaYmXZtTHPAw5m+xLN8ab9C+3a8YmV3asNSPOATITbtwrfbwaLJj8h66H1WMIpALCkqsIzK3h7oQ+PdX+LQ9Eg== + lines-and-columns@^1.1.6: version "1.1.6" resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" integrity sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA= -load-json-file@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-4.0.0.tgz#2f5f45ab91e33216234fd53adab668eb4ec0993b" - integrity sha1-L19Fq5HjMhYjT9U62rZo607AmTs= - dependencies: - graceful-fs "^4.1.2" - parse-json "^4.0.0" - pify "^3.0.0" - strip-bom "^3.0.0" - -loader-runner@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357" - integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw== - -loader-utils@^0.2.16: - version "0.2.17" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348" - integrity sha1-+G5jdNQyBabmxg6RlvF8Apm/s0g= - dependencies: - big.js "^3.1.3" - emojis-list "^2.0.0" - json5 "^0.5.0" - object-assign "^4.0.1" +loader-runner@^4.2.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" + integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== -loader-utils@^1.2.3, loader-utils@^1.4.0: +loader-utils@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.0.tgz#c579b5e34cb34b1a74edc6c1fb36bfa371d5a613" integrity sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA== @@ -8342,14 +7044,6 @@ locate-path@^2.0.0: p-locate "^2.0.0" path-exists "^3.0.0" -locate-path@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" - integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== - dependencies: - p-locate "^3.0.0" - path-exists "^3.0.0" - locate-path@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" @@ -8357,11 +7051,6 @@ locate-path@^5.0.0: dependencies: p-locate "^4.1.0" -lodash.clonedeep@^4.5.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" - integrity sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8= - lodash.debounce@^4.0.8: version "4.0.8" resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" @@ -8380,7 +7069,7 @@ lodash.isequal@^4.5.0: lodash.memoize@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" - integrity sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4= + integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== lodash.merge@^4.6.2: version "4.6.2" @@ -8405,9 +7094,9 @@ lodash.truncate@^4.4.2: lodash.uniq@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" - integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M= + integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== -"lodash@>=3.5 <5", lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.17.4, lodash@^4.17.5, lodash@^4.3.0, lodash@^4.7.0, lodash@~4.17.21: +lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0, lodash@~4.17.21: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -8432,13 +7121,6 @@ loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.4.0: dependencies: js-tokens "^3.0.0 || ^4.0.0" -lru-cache@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" - integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== - dependencies: - yallist "^3.0.2" - lru-cache@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" @@ -8456,14 +7138,6 @@ lz-string@^1.4.4: resolved "https://registry.yarnpkg.com/lz-string/-/lz-string-1.4.4.tgz#c0d8eaf36059f705796e1e344811cf4c498d3a26" integrity sha1-wNjq82BZ9wV5bh40SBHPTEmNOiY= -make-dir@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" - integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== - dependencies: - pify "^4.0.1" - semver "^5.6.0" - make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" @@ -8478,11 +7152,6 @@ makeerror@1.0.12: dependencies: tmpl "1.0.5" -map-cache@^0.2.2: - version "0.2.2" - resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" - integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= - map-obj@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" @@ -8498,22 +7167,15 @@ map-obj@^4.1.0: resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-4.3.0.tgz#9304f906e93faae70880da102a9f1df0ea8bb05a" integrity sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ== -map-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" - integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= - dependencies: - object-visit "^1.0.0" - mark.js@^8.11.1: version "8.11.1" resolved "https://registry.yarnpkg.com/mark.js/-/mark.js-8.11.1.tgz#180f1f9ebef8b0e638e4166ad52db879beb2ffc5" integrity sha1-GA8fnr74sOY45BZq1S24eb6y/8U= -marked@^4.0.10: - version "4.0.12" - resolved "https://registry.yarnpkg.com/marked/-/marked-4.0.12.tgz#2262a4e6fd1afd2f13557726238b69a48b982f7d" - integrity sha512-hgibXWrEDNBWgGiK18j/4lkS6ihTe9sxtV4Q1OQppb/0zzyPSzoFANBa5MfsG/zgsWklmNnhm0XACZOH/0HBiQ== +marked@^4.0.15: + version "4.0.17" + resolved "https://registry.yarnpkg.com/marked/-/marked-4.0.17.tgz#1186193d85bb7882159cdcfc57d1dfccaffb3fe9" + integrity sha512-Wfk0ATOK5iPxM4ptrORkFemqroz0ZDxp5MWfYA7H/F+wO17NRWV5Ypxi6p3g2Xmw2bKeiYOl6oVnLHKxBA0VhA== match-sorter@^6.0.2: version "6.3.1" @@ -8528,15 +7190,6 @@ mathml-tag-names@^2.1.3: resolved "https://registry.yarnpkg.com/mathml-tag-names/-/mathml-tag-names-2.1.3.tgz#4ddadd67308e780cf16a47685878ee27b736a0a3" integrity sha512-APMBEanjybaPzUrfqU0IMU5I0AswKMH7k8OTLs0vvV4KZpExkTkY87nR/zpbuTPj+gARop7aGUbl11pnDfW6xg== -md5.js@^1.3.4: - version "1.3.5" - resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" - integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== - dependencies: - hash-base "^3.0.0" - inherits "^2.0.1" - safe-buffer "^5.1.2" - mdast-util-from-markdown@^0.8.0: version "0.8.5" resolved "https://registry.yarnpkg.com/mdast-util-from-markdown/-/mdast-util-from-markdown-0.8.5.tgz#d1ef2ca42bc377ecb0463a987910dae89bd9a28c" @@ -8570,22 +7223,6 @@ mdn-data@2.0.14: resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50" integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== -memory-fs@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" - integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= - dependencies: - errno "^0.1.3" - readable-stream "^2.0.1" - -memory-fs@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c" - integrity sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA== - dependencies: - errno "^0.1.3" - readable-stream "^2.0.1" - meow@^9.0.0: version "9.0.0" resolved "https://registry.yarnpkg.com/meow/-/meow-9.0.0.tgz#cd9510bc5cac9dee7d03c73ee1f9ad959f4ea364" @@ -8622,26 +7259,7 @@ micromark@~2.11.0: debug "^4.0.0" parse-entities "^2.0.0" -micromatch@^3.0.4, micromatch@^3.1.10, micromatch@^3.1.4: - version "3.1.10" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" - integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - braces "^2.3.1" - define-property "^2.0.2" - extend-shallow "^3.0.2" - extglob "^2.0.4" - fragment-cache "^0.2.1" - kind-of "^6.0.2" - nanomatch "^1.2.9" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.2" - -micromatch@^4.0.2, micromatch@^4.0.4: +micromatch@^4.0.4: version "4.0.4" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.4.tgz#896d519dfe9db25fce94ceb7a500919bf881ebf9" integrity sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg== @@ -8654,14 +7272,6 @@ microseconds@0.2.0: resolved "https://registry.yarnpkg.com/microseconds/-/microseconds-0.2.0.tgz#233b25f50c62a65d861f978a4a4f8ec18797dc39" integrity sha512-n7DHHMjR1avBbSpsTBj6fmMGh2AGrifVV4e+WYc3Q9lO+xnSZ3NyhcBND3vzzatt05LFhoKFRxrIyklmLlUtyA== -miller-rabin@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" - integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== - dependencies: - bn.js "^4.0.0" - brorand "^1.0.1" - mime-db@1.48.0: version "1.48.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.48.0.tgz#e35b31045dd7eada3aaad537ed88a33afbef2d1d" @@ -8672,6 +7282,11 @@ mime-db@1.50.0: resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.50.0.tgz#abd4ac94e98d3c0e185016c67ab45d5fde40c11f" integrity sha512-9tMZCDlYHqeERXEHO9f/hKfNXhre5dK2eE/krIvUjZbS2KPcqGDfNShIWS1uW9XOTKQKqK6qbeOci18rbfW77A== +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + mime-types@^2.1.12: version "2.1.33" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.33.tgz#1fa12a904472fafd068e48d9e8401f74d3f70edb" @@ -8686,6 +7301,13 @@ mime-types@^2.1.26: dependencies: mime-db "1.48.0" +mime-types@^2.1.27: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" @@ -8696,25 +7318,15 @@ min-indent@^1.0.0: resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== -mini-css-extract-plugin@1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-1.6.0.tgz#b4db2525af2624899ed64a23b0016e0036411893" - integrity sha512-nPFKI7NSy6uONUo9yn2hIfb9vyYvkFu95qki0e21DQ9uaqNKDP15DGpK0KnV6wDroWxPHtExrdEwx/yDQ8nVRw== +mini-css-extract-plugin@^1.6.2: + version "1.6.2" + resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-1.6.2.tgz#83172b4fd812f8fc4a09d6f6d16f924f53990ca8" + integrity sha512-WhDvO3SjGm40oV5y26GjMJYjd2UMqrLAGKy5YS2/3QKJy2F7jgynuHTir/tgUUOiNQu5saXHdc8reo7YuhhT4Q== dependencies: loader-utils "^2.0.0" schema-utils "^3.0.0" webpack-sources "^1.1.0" -minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" - integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== - -minimalistic-crypto-utils@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" - integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= - minimatch@^3.0.4, minimatch@~3.0.2: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" @@ -8722,6 +7334,20 @@ minimatch@^3.0.4, minimatch@~3.0.2: dependencies: brace-expansion "^1.1.7" +minimatch@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimatch@^5.0.1: + version "5.1.0" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.0.tgz#1717b464f4971b144f6aabe8f2d0b8e4511e09c7" + integrity sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg== + dependencies: + brace-expansion "^2.0.1" + minimist-options@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/minimist-options/-/minimist-options-4.1.0.tgz#c0655713c53a8a2ebd77ffa247d342c40f010619" @@ -8731,7 +7357,7 @@ minimist-options@4.1.0: is-plain-obj "^1.1.0" kind-of "^6.0.3" -minimist@^1.2.0, minimist@^1.2.5: +minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: version "1.2.6" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== @@ -8772,37 +7398,6 @@ minizlib@^2.1.1: minipass "^3.0.0" yallist "^4.0.0" -mississippi@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022" - integrity sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA== - dependencies: - concat-stream "^1.5.0" - duplexify "^3.4.2" - end-of-stream "^1.1.0" - flush-write-stream "^1.0.0" - from2 "^2.1.0" - parallel-transform "^1.1.0" - pump "^3.0.0" - pumpify "^1.3.3" - stream-each "^1.1.0" - through2 "^2.0.0" - -mixin-deep@^1.2.0: - version "1.3.2" - resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" - integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== - dependencies: - for-in "^1.0.2" - is-extendable "^1.0.1" - -mkdirp@^0.5.1, mkdirp@^0.5.3: - version "0.5.5" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" - integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== - dependencies: - minimist "^1.2.5" - mkdirp@^1.0.3, mkdirp@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" @@ -8851,18 +7446,6 @@ moment-timezone@^0.5.34: resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.3.tgz#edd47411c322413999f7a5940d526de183c031f3" integrity sha512-c6YRvhEo//6T2Jz/vVtYzqBzwvPT95JBQ+smCytzf7c50oMZRsR/a4w88aD34I+/QVSfnoAnSBFPJHItlOMJVw== -move-concurrently@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92" - integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I= - dependencies: - aproba "^1.1.1" - copy-concurrently "^1.0.0" - fs-write-stream-atomic "^1.0.8" - mkdirp "^0.5.1" - rimraf "^2.5.4" - run-queue "^1.0.3" - ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" @@ -8878,11 +7461,6 @@ ms@^2.1.1: resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== -nan@^2.12.1: - version "2.14.2" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.2.tgz#f5376400695168f4cc694ac9393d0c9585eeea19" - integrity sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ== - nano-time@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/nano-time/-/nano-time-1.0.0.tgz#b0554f69ad89e22d0907f7a12b0993a5d96137ef" @@ -8895,22 +7473,10 @@ nanoid@^3.1.23: resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.2.tgz#c89622fafb4381cd221421c69ec58547a1eec557" integrity sha512-CuHBogktKwpm5g2sRgv83jEy2ijFzBwMoYA60orPDR7ynsLijJDqgsi4RDGj3OJpy3Ieb+LYwiRmIOGyytgITA== -nanomatch@^1.2.9: - version "1.2.13" - resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" - integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - define-property "^2.0.2" - extend-shallow "^3.0.2" - fragment-cache "^0.2.1" - is-windows "^1.0.2" - kind-of "^6.0.2" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" +nanoid@^3.3.4: + version "3.3.4" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab" + integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw== natural-compare@^1.4.0: version "1.4.0" @@ -8926,16 +7492,11 @@ needle@^2.2.4: iconv-lite "^0.4.4" sax "^1.2.4" -neo-async@^2.5.0, neo-async@^2.6.1: +neo-async@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== -nice-try@^1.0.4: - version "1.0.5" - resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" - integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== - nock@^13.2.4: version "13.2.4" resolved "https://registry.yarnpkg.com/nock/-/nock-13.2.4.tgz#43a309d93143ee5cdcca91358614e7bde56d20e1" @@ -8946,11 +7507,6 @@ nock@^13.2.4: lodash.set "^4.3.2" propagate "^2.0.0" -node-domexception@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" - integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ== - node-fetch-h2@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/node-fetch-h2/-/node-fetch-h2-2.3.0.tgz#c6188325f9bd3d834020bf0f2d6dc17ced2241ac" @@ -8958,49 +7514,18 @@ node-fetch-h2@^2.3.0: dependencies: http2-client "^1.2.5" -node-fetch@>=2.6.7, node-fetch@^2.6.1: - version "3.2.0" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.0.tgz#59390db4e489184fa35d4b74caf5510e8dfbaf3b" - integrity sha512-8xeimMwMItMw8hRrOl3C9/xzU49HV/yE6ORew/l+dxWimO5A4Ra8ld2rerlJvc/O7et5Z1zrWsPX43v1QBjCxw== +node-fetch@^2.6.1: + version "2.6.7" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" + integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== dependencies: - data-uri-to-buffer "^4.0.0" - fetch-blob "^3.1.4" - formdata-polyfill "^4.0.10" + whatwg-url "^5.0.0" node-int64@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" integrity sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs= -node-libs-browser@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.1.tgz#b64f513d18338625f90346d27b0d235e631f6425" - integrity sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q== - dependencies: - assert "^1.1.1" - browserify-zlib "^0.2.0" - buffer "^4.3.0" - console-browserify "^1.1.0" - constants-browserify "^1.0.0" - crypto-browserify "^3.11.0" - domain-browser "^1.1.1" - events "^3.0.0" - https-browserify "^1.0.0" - os-browserify "^0.3.0" - path-browserify "0.0.1" - process "^0.11.10" - punycode "^1.2.4" - querystring-es3 "^0.2.0" - readable-stream "^2.3.3" - stream-browserify "^2.0.1" - stream-http "^2.7.2" - string_decoder "^1.0.0" - timers-browserify "^2.0.4" - tty-browserify "0.0.0" - url "^0.11.0" - util "^0.11.0" - vm-browserify "^1.0.1" - node-modules-regexp@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz#8d9dbe28964a4ac5712e9131642107c71e90ec40" @@ -9018,7 +7543,12 @@ node-releases@^1.1.71: resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.73.tgz#dd4e81ddd5277ff846b80b52bb40c49edf7a7b20" integrity sha512-uW7fodD6pyW2FZNZnp/Z3hvWKeEW1Y8R1+1CnErE8cXFXzl5blBOoVB41CvMer6P6Q0S5FXDwcHgFd1Wj0U9zg== -normalize-package-data@^2.3.2, normalize-package-data@^2.5.0: +node-releases@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.5.tgz#280ed5bc3eba0d96ce44897d8aee478bfb3d9666" + integrity sha512-U9h1NLROZTq9uE1SNffn6WuPDg8icmi3ns4rEl/oTfIle4iLjTliCzgTsbaIFMq/Xn078/lfY/BL0GWZ+psK4Q== + +normalize-package-data@^2.5.0: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== @@ -9038,14 +7568,7 @@ normalize-package-data@^3.0.0: semver "^7.3.4" validate-npm-package-license "^3.0.1" -normalize-path@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" - integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= - dependencies: - remove-trailing-separator "^1.0.1" - -normalize-path@^3.0.0, normalize-path@~3.0.0: +normalize-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== @@ -9053,7 +7576,7 @@ normalize-path@^3.0.0, normalize-path@~3.0.0: normalize-range@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" - integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= + integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== normalize-registry-url@^1.0.0: version "1.0.0" @@ -9063,12 +7586,12 @@ normalize-registry-url@^1.0.0: normalize-selector@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/normalize-selector/-/normalize-selector-0.2.0.tgz#d0b145eb691189c63a78d201dc4fdb1293ef0c03" - integrity sha1-0LFF62kRicY6eNIB3E/bEpPvDAM= + integrity sha512-dxvWdI8gw6eAvk9BlPffgEoGfM7AdijoCwOEJge3e3ulT2XLgmU7KvvxprOaCu05Q1uGRHmOhHe1r6emZoKyFw== normalize-url@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.0.1.tgz#a4f27f58cf8c7b287b440b8a8201f42d0b00d256" - integrity sha512-VU4pzAuh7Kip71XEmO9aNREYAdMHFGTVj/i+CaTImS8x0i1d3jUZkXhqluy/PRgjPLMgsLQulYY3PJ/aSbSjpQ== + version "6.1.0" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" + integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== npm-run-path@^4.0.1: version "4.0.1" @@ -9077,17 +7600,17 @@ npm-run-path@^4.0.1: dependencies: path-key "^3.0.0" -nth-check@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.0.0.tgz#1bb4f6dac70072fc313e8c9cd1417b5074c0a125" - integrity sha512-i4sc/Kj8htBrAiH1viZ0TgU8Y5XqCaV/FziYK6TBczxmeKm3AEFWqqF3195yKudrarqy7Zu80Ra5dobFjn9X/Q== +nth-check@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" + integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== dependencies: boolbase "^1.0.0" num2fraction@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede" - integrity sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4= + integrity sha512-Y1wZESM7VUThYY+4W+X4ySH2maqcA+p7UR+w8VWNWVAd6lwuXXWz/w/Cz43J/dI2I+PS6wD5N+bJUF+gjWvIqg== nvd3@^1.8.6: version "1.8.6" @@ -9150,42 +7673,26 @@ object-assign@^4.0.1, object-assign@^4.1.1: resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== -object-copy@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" - integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= - dependencies: - copy-descriptor "^0.1.0" - define-property "^0.2.5" - kind-of "^3.0.3" - object-inspect@^1.10.3: version "1.10.3" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.10.3.tgz#c2aa7d2d09f50c99375704f7a0adf24c5782d369" integrity sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw== -object-inspect@^1.11.0, object-inspect@^1.9.0: - version "1.11.0" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.11.0.tgz#9dceb146cedd4148a0d9e51ab88d34cf509922b1" - integrity sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg== - object-inspect@^1.12.0: version "1.12.2" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== +object-inspect@^1.9.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.11.0.tgz#9dceb146cedd4148a0d9e51ab88d34cf509922b1" + integrity sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg== + object-keys@^1.0.12, object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== -object-visit@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" - integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= - dependencies: - isobject "^3.0.0" - object.assign@^4.1.0, object.assign@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" @@ -9196,15 +7703,6 @@ object.assign@^4.1.0, object.assign@^4.1.2: has-symbols "^1.0.1" object-keys "^1.1.1" -object.entries@^1.1.0, object.entries@^1.1.2, object.entries@^1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.4.tgz#43ccf9a50bc5fd5b649d45ab1a579f24e088cafd" - integrity sha512-h4LWKWE+wKQGhtMjZEBud7uLGhqyLwj8fpHOarZhD2uY3C9cRtk57VQ89ke3moByLXMedqs3XCHzyb4AmA2DjA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.18.2" - object.entries@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.5.tgz#e1acdd17c4de2cd96d5a08487cfb9db84d881861" @@ -9214,46 +7712,38 @@ object.entries@^1.1.5: define-properties "^1.1.3" es-abstract "^1.19.1" -object.fromentries@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.4.tgz#26e1ba5c4571c5c6f0890cef4473066456a120b8" - integrity sha512-EsFBshs5RUUpQEY1D4q/m59kMfz4YJvxuNCJcv/jWwOJr34EaVnG11ZrZa0UHB3wnzV1wx8m58T4hQL8IuNXlQ== +object.fromentries@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.5.tgz#7b37b205109c21e741e605727fe8b0ad5fa08251" + integrity sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw== dependencies: call-bind "^1.0.2" define-properties "^1.1.3" - es-abstract "^1.18.0-next.2" - has "^1.0.3" - -object.hasown@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/object.hasown/-/object.hasown-1.0.0.tgz#bdbade33cfacfb25d7f26ae2b6cb870bf99905c2" - integrity sha512-qYMF2CLIjxxLGleeM0jrcB4kiv3loGVAjKQKvH8pSU/i2VcRRvUNmxbD+nEMmrXRfORhuVJuH8OtSYCZoue3zA== - dependencies: - define-properties "^1.1.3" - es-abstract "^1.18.1" + es-abstract "^1.19.1" -object.pick@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" - integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= +object.hasown@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object.hasown/-/object.hasown-1.1.1.tgz#ad1eecc60d03f49460600430d97f23882cf592a3" + integrity sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A== dependencies: - isobject "^3.0.1" + define-properties "^1.1.4" + es-abstract "^1.19.5" -object.values@^1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.4.tgz#0d273762833e816b693a637d30073e7051535b30" - integrity sha512-TnGo7j4XSnKQoK3MfvkzqKCi0nVe/D9I9IjwTNYdb/fxYHpjrluHVOgw0AF6jrRFGMPHdfuidR09tIDiIvnaSg== +object.values@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.5.tgz#959f63e3ce9ef108720333082131e4a459b716ac" + integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg== dependencies: call-bind "^1.0.2" define-properties "^1.1.3" - es-abstract "^1.18.2" + es-abstract "^1.19.1" oblivious-set@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/oblivious-set/-/oblivious-set-1.0.0.tgz#c8316f2c2fb6ff7b11b6158db3234c49f733c566" integrity sha512-z+pI07qxo4c2CulUHCDf9lcqDlMSo72N/4rLUpRXf6fu+q8vjt8y0xS+Tlf8NTJDdTXHbdeO1n3MlbctwEoXZw== -once@^1.3.0, once@^1.3.1, once@^1.4.0: +once@^1.3.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= @@ -9267,22 +7757,13 @@ onetime@^5.1.2: dependencies: mimic-fn "^2.1.0" -openapi-sampler@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/openapi-sampler/-/openapi-sampler-1.2.0.tgz#2e82e4e32c1506a42f370be726d759d5275a9d4d" - integrity sha512-Y0tFg2iH7NWnNHYnesxhMfkXc7wWXyJXYMUTTxkGkfpl0U9u/ZOf6BxrdEXBD4sgs9uMlVWsbWLDLesVmSUU7Q== +openapi-sampler@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/openapi-sampler/-/openapi-sampler-1.3.0.tgz#5b99ceb4156b00d2aa3f860e52ccb768a5695793" + integrity sha512-2QfjK1oM9Sv0q82Ae1RrUe3yfFmAyjF548+6eAeb+h/cL1Uj51TW4UezraBEvwEdzoBgfo4AaTLVFGTKj+yYDw== dependencies: "@types/json-schema" "^7.0.7" - json-pointer "^0.6.1" - -optimize-css-assets-webpack-plugin@6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/optimize-css-assets-webpack-plugin/-/optimize-css-assets-webpack-plugin-6.0.0.tgz#00acd99d420715ad96ed3d8ad65a8a4df1be233b" - integrity sha512-XKVxJuCBSslP1Eyuf1uVtZT3Pkp6jEIkmg7BMcNU/pq6XAnDXTINkYFWmiQWt8+j//FO4dIDd4v+gn0m5VWJIw== - dependencies: - cssnano "^5.0.2" - last-call-webpack-plugin "^3.0.0" - postcss "^8.2.1" + json-pointer "0.6.2" optionator@^0.8.1: version "0.8.3" @@ -9308,21 +7789,6 @@ optionator@^0.9.1: type-check "^0.4.0" word-wrap "^1.2.3" -os-browserify@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" - integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= - -os-homedir@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" - integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= - -os-tmpdir@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" - integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= - p-limit@^1.1.0: version "1.3.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" @@ -9330,7 +7796,7 @@ p-limit@^1.1.0: dependencies: p-try "^1.0.0" -p-limit@^2.0.0, p-limit@^2.2.0: +p-limit@^2.2.0: version "2.3.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== @@ -9351,13 +7817,6 @@ p-locate@^2.0.0: dependencies: p-limit "^1.1.0" -p-locate@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" - integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== - dependencies: - p-limit "^2.0.0" - p-locate@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" @@ -9387,20 +7846,6 @@ p-try@^2.0.0: resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== -pako@~1.0.5: - version "1.0.11" - resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" - integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== - -parallel-transform@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.2.0.tgz#9049ca37d6cb2182c3b1d2c720be94d14a5814fc" - integrity sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg== - dependencies: - cyclist "^1.0.1" - inherits "^2.0.3" - readable-stream "^2.1.5" - parent-module@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" @@ -9408,17 +7853,6 @@ parent-module@^1.0.0: dependencies: callsites "^3.0.0" -parse-asn1@^5.0.0, parse-asn1@^5.1.5: - version "5.1.6" - resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.6.tgz#385080a3ec13cb62a62d39409cb3e88844cdaed4" - integrity sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw== - dependencies: - asn1.js "^5.2.0" - browserify-aes "^1.0.0" - evp_bytestokey "^1.0.0" - pbkdf2 "^3.0.3" - safe-buffer "^5.1.1" - parse-entities@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/parse-entities/-/parse-entities-2.0.0.tgz#53c6eb5b9314a1f4ec99fa0fdf7ce01ecda0cbe8" @@ -9431,14 +7865,6 @@ parse-entities@^2.0.0: is-decimal "^1.0.0" is-hexadecimal "^1.0.0" -parse-json@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" - integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= - dependencies: - error-ex "^1.3.1" - json-parse-better-errors "^1.0.1" - parse-json@^5.0.0, parse-json@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" @@ -9449,26 +7875,11 @@ parse-json@^5.0.0, parse-json@^5.2.0: json-parse-even-better-errors "^2.3.0" lines-and-columns "^1.1.6" -parse-passwd@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" - integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= - parse5@6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== -pascalcase@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" - integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= - -path-browserify@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a" - integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ== - path-browserify@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-1.0.1.tgz#d98454a9c3753d5790860f16f68867b9e46be1fd" @@ -9484,7 +7895,7 @@ path-exists@^4.0.0: resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== -path-is-absolute@^1.0.0, path-is-absolute@^1.0.1: +path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= @@ -9494,50 +7905,37 @@ path-is-inside@^1.0.2: resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= -path-key@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" - integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= - path-key@^3.0.0, path-key@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== -path-parse@^1.0.6: +path-parse@^1.0.6, path-parse@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== -path-type@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f" - integrity sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg== - dependencies: - pify "^3.0.0" - path-type@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== -pbkdf2@^3.0.3: - version "3.1.2" - resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.1.2.tgz#dd822aa0887580e52f1a039dc3eda108efae3075" - integrity sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA== - dependencies: - create-hash "^1.1.2" - create-hmac "^1.1.4" - ripemd160 "^2.0.1" - safe-buffer "^5.0.1" - sha.js "^2.4.8" - perfect-scrollbar@^1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/perfect-scrollbar/-/perfect-scrollbar-1.5.1.tgz#8ee5b3ca06ce9c3f7338fd4ab67a55248a6cf3be" integrity sha512-MrSImINnIh3Tm1hdPT6bji6fmIeRorVEegQvyUnhqko2hDGTHhmjPefHXfxG/Jb8xVbfCwgmUIlIajERGXjVXQ== -picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3: +picocolors@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-0.2.1.tgz#570670f793646851d1ba135996962abad587859f" + integrity sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA== + +picocolors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + +picomatch@^2.0.4, picomatch@^2.2.3: version "2.3.0" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.0.tgz#f1f061de8f6a4bf022892e2d128234fb98302972" integrity sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw== @@ -9547,11 +7945,6 @@ pify@^2.0.0: resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= -pify@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" - integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY= - pify@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" @@ -9581,20 +7974,6 @@ pirates@^4.0.4: resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== -pkg-dir@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b" - integrity sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s= - dependencies: - find-up "^2.1.0" - -pkg-dir@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" - integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== - dependencies: - find-up "^3.0.0" - pkg-dir@^4.1.0, pkg-dir@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" @@ -9602,13 +7981,6 @@ pkg-dir@^4.1.0, pkg-dir@^4.2.0: dependencies: find-up "^4.0.0" -pkg-up@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-2.0.0.tgz#c819ac728059a461cab1c3889a2be3c49a004d7f" - integrity sha1-yBmscoBZpGHKscOImivjxJoATX8= - dependencies: - find-up "^2.1.0" - pluralize@^8.0.0: version "8.0.0" resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-8.0.0.tgz#1a6fa16a38d12a1901e0320fa017051c539ce3b1" @@ -9631,55 +8003,51 @@ popmotion@9.3.6: style-value-types "4.1.4" tslib "^2.1.0" -posix-character-classes@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" - integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= - -postcss-calc@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-8.0.0.tgz#a05b87aacd132740a5db09462a3612453e5df90a" - integrity sha512-5NglwDrcbiy8XXfPM11F3HeC6hoT9W7GUH/Zi5U/p7u3Irv4rHhdDcIZwG0llHXV4ftsBjpfWMXAnXNl4lnt8g== +postcss-calc@^8.2.3: + version "8.2.4" + resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-8.2.4.tgz#77b9c29bfcbe8a07ff6693dc87050828889739a5" + integrity sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q== dependencies: - postcss-selector-parser "^6.0.2" - postcss-value-parser "^4.0.2" + postcss-selector-parser "^6.0.9" + postcss-value-parser "^4.2.0" -postcss-colormin@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-5.2.0.tgz#2b620b88c0ff19683f3349f4cf9e24ebdafb2c88" - integrity sha512-+HC6GfWU3upe5/mqmxuqYZ9B2Wl4lcoUUNkoaX59nEWV4EtADCMiBqui111Bu8R8IvaZTmqmxrqOAqjbHIwXPw== +postcss-colormin@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-5.3.0.tgz#3cee9e5ca62b2c27e84fce63affc0cfb5901956a" + integrity sha512-WdDO4gOFG2Z8n4P8TWBpshnL3JpmNmJwdnfP2gbk2qBA8PWwOYcmjmI/t3CmMeL72a7Hkd+x/Mg9O2/0rD54Pg== dependencies: browserslist "^4.16.6" caniuse-api "^3.0.0" - colord "^2.0.1" - postcss-value-parser "^4.1.0" + colord "^2.9.1" + postcss-value-parser "^4.2.0" -postcss-convert-values@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-5.0.1.tgz#4ec19d6016534e30e3102fdf414e753398645232" - integrity sha512-C3zR1Do2BkKkCgC0g3sF8TS0koF2G+mN8xxayZx3f10cIRmTaAnpgpRQZjNekTZxM2ciSPoh2IWJm0VZx8NoQg== +postcss-convert-values@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-5.1.2.tgz#31586df4e184c2e8890e8b34a0b9355313f503ab" + integrity sha512-c6Hzc4GAv95B7suy4udszX9Zy4ETyMCgFPUDtWjdFTKH1SE9eFY/jEpHSwTH1QPuwxHpWslhckUQWbNRM4ho5g== dependencies: - postcss-value-parser "^4.1.0" + browserslist "^4.20.3" + postcss-value-parser "^4.2.0" -postcss-discard-comments@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-5.0.1.tgz#9eae4b747cf760d31f2447c27f0619d5718901fe" - integrity sha512-lgZBPTDvWrbAYY1v5GYEv8fEO/WhKOu/hmZqmCYfrpD6eyDWWzAOsl2rF29lpvziKO02Gc5GJQtlpkTmakwOWg== +postcss-discard-comments@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz#8df5e81d2925af2780075840c1526f0660e53696" + integrity sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ== -postcss-discard-duplicates@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-5.0.1.tgz#68f7cc6458fe6bab2e46c9f55ae52869f680e66d" - integrity sha512-svx747PWHKOGpAXXQkCc4k/DsWo+6bc5LsVrAsw+OU+Ibi7klFZCyX54gjYzX4TH+f2uzXjRviLARxkMurA2bA== +postcss-discard-duplicates@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz#9eb4fe8456706a4eebd6d3b7b777d07bad03e848" + integrity sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw== -postcss-discard-empty@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-5.0.1.tgz#ee136c39e27d5d2ed4da0ee5ed02bc8a9f8bf6d8" - integrity sha512-vfU8CxAQ6YpMxV2SvMcMIyF2LX1ZzWpy0lqHDsOdaKKLQVQGVP1pzhrI9JlsO65s66uQTfkQBKBD/A5gp9STFw== +postcss-discard-empty@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz#e57762343ff7f503fe53fca553d18d7f0c369c6c" + integrity sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A== -postcss-discard-overridden@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-5.0.1.tgz#454b41f707300b98109a75005ca4ab0ff2743ac6" - integrity sha512-Y28H7y93L2BpJhrdUR2SR2fnSsT+3TVx1NmVQLbcnZWwIUpJ7mfcTC6Za9M2PG6w8j7UQRfzxqn8jU2VwFxo3Q== +postcss-discard-overridden@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz#7e8c5b53325747e9d90131bb88635282fb4a276e" + integrity sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw== postcss-html@^0.36.0: version "0.36.0" @@ -9700,224 +8068,169 @@ postcss-media-query-parser@^0.2.3: resolved "https://registry.yarnpkg.com/postcss-media-query-parser/-/postcss-media-query-parser-0.2.3.tgz#27b39c6f4d94f81b1a73b8f76351c609e5cef244" integrity sha1-J7Ocb02U+Bsac7j3Y1HGCeXO8kQ= -postcss-merge-longhand@^5.0.2: - version "5.0.2" - resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-5.0.2.tgz#277ada51d9a7958e8ef8cf263103c9384b322a41" - integrity sha512-BMlg9AXSI5G9TBT0Lo/H3PfUy63P84rVz3BjCFE9e9Y9RXQZD3+h3YO1kgTNsNJy7bBc1YQp8DmSnwLIW5VPcw== +postcss-merge-longhand@^5.1.5: + version "5.1.5" + resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-5.1.5.tgz#b0e03bee3b964336f5f33c4fc8eacae608e91c05" + integrity sha512-NOG1grw9wIO+60arKa2YYsrbgvP6tp+jqc7+ZD5/MalIw234ooH2C6KlR6FEn4yle7GqZoBxSK1mLBE9KPur6w== dependencies: - css-color-names "^1.0.1" - postcss-value-parser "^4.1.0" - stylehacks "^5.0.1" + postcss-value-parser "^4.2.0" + stylehacks "^5.1.0" -postcss-merge-rules@^5.0.2: - version "5.0.2" - resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-5.0.2.tgz#d6e4d65018badbdb7dcc789c4f39b941305d410a" - integrity sha512-5K+Md7S3GwBewfB4rjDeol6V/RZ8S+v4B66Zk2gChRqLTCC8yjnHQ601omj9TKftS19OPGqZ/XzoqpzNQQLwbg== +postcss-merge-rules@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-5.1.2.tgz#7049a14d4211045412116d79b751def4484473a5" + integrity sha512-zKMUlnw+zYCWoPN6yhPjtcEdlJaMUZ0WyVcxTAmw3lkkN/NDMRkOkiuctQEoWAOvH7twaxUUdvBWl0d4+hifRQ== dependencies: browserslist "^4.16.6" caniuse-api "^3.0.0" - cssnano-utils "^2.0.1" + cssnano-utils "^3.1.0" postcss-selector-parser "^6.0.5" - vendors "^1.0.3" -postcss-minify-font-values@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-5.0.1.tgz#a90cefbfdaa075bd3dbaa1b33588bb4dc268addf" - integrity sha512-7JS4qIsnqaxk+FXY1E8dHBDmraYFWmuL6cgt0T1SWGRO5bzJf8sUoelwa4P88LEWJZweHevAiDKxHlofuvtIoA== +postcss-minify-font-values@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz#f1df0014a726083d260d3bd85d7385fb89d1f01b" + integrity sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA== dependencies: - postcss-value-parser "^4.1.0" + postcss-value-parser "^4.2.0" -postcss-minify-gradients@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-5.0.1.tgz#2dc79fd1a1afcb72a9e727bc549ce860f93565d2" - integrity sha512-odOwBFAIn2wIv+XYRpoN2hUV3pPQlgbJ10XeXPq8UY2N+9ZG42xu45lTn/g9zZ+d70NKSQD6EOi6UiCMu3FN7g== +postcss-minify-gradients@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz#f1fe1b4f498134a5068240c2f25d46fcd236ba2c" + integrity sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw== dependencies: - cssnano-utils "^2.0.1" - is-color-stop "^1.1.0" - postcss-value-parser "^4.1.0" + colord "^2.9.1" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" -postcss-minify-params@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-5.0.1.tgz#371153ba164b9d8562842fdcd929c98abd9e5b6c" - integrity sha512-4RUC4k2A/Q9mGco1Z8ODc7h+A0z7L7X2ypO1B6V8057eVK6mZ6xwz6QN64nHuHLbqbclkX1wyzRnIrdZehTEHw== +postcss-minify-params@^5.1.3: + version "5.1.3" + resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-5.1.3.tgz#ac41a6465be2db735099bbd1798d85079a6dc1f9" + integrity sha512-bkzpWcjykkqIujNL+EVEPOlLYi/eZ050oImVtHU7b4lFS82jPnsCb44gvC6pxaNt38Els3jWYDHTjHKf0koTgg== dependencies: - alphanum-sort "^1.0.2" - browserslist "^4.16.0" - cssnano-utils "^2.0.1" - postcss-value-parser "^4.1.0" - uniqs "^2.0.0" + browserslist "^4.16.6" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" -postcss-minify-selectors@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-5.1.0.tgz#4385c845d3979ff160291774523ffa54eafd5a54" - integrity sha512-NzGBXDa7aPsAcijXZeagnJBKBPMYLaJJzB8CQh6ncvyl2sIndLVWfbcDi0SBjRWk5VqEjXvf8tYwzoKf4Z07og== +postcss-minify-selectors@^5.2.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz#d4e7e6b46147b8117ea9325a915a801d5fe656c6" + integrity sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg== dependencies: - alphanum-sort "^1.0.2" postcss-selector-parser "^6.0.5" -postcss-modules-extract-imports@^1.0.0: - version "1.2.1" - resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-1.2.1.tgz#dc87e34148ec7eab5f791f7cd5849833375b741a" - integrity sha512-6jt9XZwUhwmRUhb/CkyJY020PYaPJsCyt3UjbaWo6XEbH/94Hmv6MP7fG2C5NDU/BcHzyGYxNtHvM+LTf9HrYw== - dependencies: - postcss "^6.0.1" - -postcss-modules-extract-imports@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-2.0.0.tgz#818719a1ae1da325f9832446b01136eeb493cd7e" - integrity sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ== - dependencies: - postcss "^7.0.5" - -postcss-modules-local-by-default@^1.0.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-1.2.0.tgz#f7d80c398c5a393fa7964466bd19500a7d61c069" - integrity sha1-99gMOYxaOT+nlkRmvRlQCn1hwGk= - dependencies: - css-selector-tokenizer "^0.7.0" - postcss "^6.0.1" +postcss-modules-extract-imports@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d" + integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== -postcss-modules-local-by-default@^3.0.2: - version "3.0.3" - resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-3.0.3.tgz#bb14e0cc78279d504dbdcbfd7e0ca28993ffbbb0" - integrity sha512-e3xDq+LotiGesympRlKNgaJ0PCzoUIdpH0dj47iWAui/kyTgh3CiAr1qP54uodmJhl6p9rN6BoNcdEDVJx9RDw== +postcss-modules-local-by-default@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz#ebbb54fae1598eecfdf691a02b3ff3b390a5a51c" + integrity sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ== dependencies: - icss-utils "^4.1.1" - postcss "^7.0.32" + icss-utils "^5.0.0" postcss-selector-parser "^6.0.2" postcss-value-parser "^4.1.0" -postcss-modules-resolve-imports@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/postcss-modules-resolve-imports/-/postcss-modules-resolve-imports-1.3.0.tgz#398d3000b95ae969420cdf4cd83fa8067f1c5eae" - integrity sha1-OY0wALla6WlCDN9M2D+oBn8cXq4= - dependencies: - css-selector-tokenizer "^0.7.0" - icss-utils "^3.0.1" - minimist "^1.2.0" - -postcss-modules-scope@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-1.1.0.tgz#d6ea64994c79f97b62a72b426fbe6056a194bb90" - integrity sha1-1upkmUx5+XtipytCb75gVqGUu5A= - dependencies: - css-selector-tokenizer "^0.7.0" - postcss "^6.0.1" - -postcss-modules-scope@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-2.2.0.tgz#385cae013cc7743f5a7d7602d1073a89eaae62ee" - integrity sha512-YyEgsTMRpNd+HmyC7H/mh3y+MeFWevy7V1evVhJWewmMbjDHIbZbOXICC2y+m1xI1UVfIT1HMW/O04Hxyu9oXQ== - dependencies: - postcss "^7.0.6" - postcss-selector-parser "^6.0.0" - -postcss-modules-values@^1.1.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-1.3.0.tgz#ecffa9d7e192518389f42ad0e83f72aec456ea20" - integrity sha1-7P+p1+GSUYOJ9CrQ6D9yrsRW6iA= +postcss-modules-scope@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz#9ef3151456d3bbfa120ca44898dfca6f2fa01f06" + integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== dependencies: - icss-replace-symbols "^1.1.0" - postcss "^6.0.1" + postcss-selector-parser "^6.0.4" -postcss-modules-values@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-3.0.0.tgz#5b5000d6ebae29b4255301b4a3a54574423e7f10" - integrity sha512-1//E5jCBrZ9DmRX+zCtmQtRSV6PV42Ix7Bzj9GbwJceduuf7IqP8MgeTXuRDHOWj2m0VzZD5+roFWDuU8RQjcg== +postcss-modules-values@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c" + integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== dependencies: - icss-utils "^4.0.0" - postcss "^7.0.6" + icss-utils "^5.0.0" -postcss-normalize-charset@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-5.0.1.tgz#121559d1bebc55ac8d24af37f67bd4da9efd91d0" - integrity sha512-6J40l6LNYnBdPSk+BHZ8SF+HAkS4q2twe5jnocgd+xWpz/mx/5Sa32m3W1AA8uE8XaXN+eg8trIlfu8V9x61eg== +postcss-normalize-charset@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz#9302de0b29094b52c259e9b2cf8dc0879879f0ed" + integrity sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg== -postcss-normalize-display-values@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-display-values/-/postcss-normalize-display-values-5.0.1.tgz#62650b965981a955dffee83363453db82f6ad1fd" - integrity sha512-uupdvWk88kLDXi5HEyI9IaAJTE3/Djbcrqq8YgjvAVuzgVuqIk3SuJWUisT2gaJbZm1H9g5k2w1xXilM3x8DjQ== +postcss-normalize-display-values@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz#72abbae58081960e9edd7200fcf21ab8325c3da8" + integrity sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA== dependencies: - cssnano-utils "^2.0.1" - postcss-value-parser "^4.1.0" + postcss-value-parser "^4.2.0" -postcss-normalize-positions@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-positions/-/postcss-normalize-positions-5.0.1.tgz#868f6af1795fdfa86fbbe960dceb47e5f9492fe5" - integrity sha512-rvzWAJai5xej9yWqlCb1OWLd9JjW2Ex2BCPzUJrbaXmtKtgfL8dBMOOMTX6TnvQMtjk3ei1Lswcs78qKO1Skrg== +postcss-normalize-positions@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-positions/-/postcss-normalize-positions-5.1.0.tgz#902a7cb97cf0b9e8b1b654d4a43d451e48966458" + integrity sha512-8gmItgA4H5xiUxgN/3TVvXRoJxkAWLW6f/KKhdsH03atg0cB8ilXnrB5PpSshwVu/dD2ZsRFQcR1OEmSBDAgcQ== dependencies: - postcss-value-parser "^4.1.0" + postcss-value-parser "^4.2.0" -postcss-normalize-repeat-style@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.0.1.tgz#cbc0de1383b57f5bb61ddd6a84653b5e8665b2b5" - integrity sha512-syZ2itq0HTQjj4QtXZOeefomckiV5TaUO6ReIEabCh3wgDs4Mr01pkif0MeVwKyU/LHEkPJnpwFKRxqWA/7O3w== +postcss-normalize-repeat-style@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.0.tgz#f6d6fd5a54f51a741cc84a37f7459e60ef7a6398" + integrity sha512-IR3uBjc+7mcWGL6CtniKNQ4Rr5fTxwkaDHwMBDGGs1x9IVRkYIT/M4NelZWkAOBdV6v3Z9S46zqaKGlyzHSchw== dependencies: - cssnano-utils "^2.0.1" - postcss-value-parser "^4.1.0" + postcss-value-parser "^4.2.0" -postcss-normalize-string@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-string/-/postcss-normalize-string-5.0.1.tgz#d9eafaa4df78c7a3b973ae346ef0e47c554985b0" - integrity sha512-Ic8GaQ3jPMVl1OEn2U//2pm93AXUcF3wz+OriskdZ1AOuYV25OdgS7w9Xu2LO5cGyhHCgn8dMXh9bO7vi3i9pA== +postcss-normalize-string@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz#411961169e07308c82c1f8c55f3e8a337757e228" + integrity sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w== dependencies: - postcss-value-parser "^4.1.0" + postcss-value-parser "^4.2.0" -postcss-normalize-timing-functions@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.0.1.tgz#8ee41103b9130429c6cbba736932b75c5e2cb08c" - integrity sha512-cPcBdVN5OsWCNEo5hiXfLUnXfTGtSFiBU9SK8k7ii8UD7OLuznzgNRYkLZow11BkQiiqMcgPyh4ZqXEEUrtQ1Q== +postcss-normalize-timing-functions@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz#d5614410f8f0b2388e9f240aa6011ba6f52dafbb" + integrity sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg== dependencies: - cssnano-utils "^2.0.1" - postcss-value-parser "^4.1.0" + postcss-value-parser "^4.2.0" -postcss-normalize-unicode@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-unicode/-/postcss-normalize-unicode-5.0.1.tgz#82d672d648a411814aa5bf3ae565379ccd9f5e37" - integrity sha512-kAtYD6V3pK0beqrU90gpCQB7g6AOfP/2KIPCVBKJM2EheVsBQmx/Iof+9zR9NFKLAx4Pr9mDhogB27pmn354nA== +postcss-normalize-unicode@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.0.tgz#3d23aede35e160089a285e27bf715de11dc9db75" + integrity sha512-J6M3MizAAZ2dOdSjy2caayJLQT8E8K9XjLce8AUQMwOrCvjCHv24aLC/Lps1R1ylOfol5VIDMaM/Lo9NGlk1SQ== dependencies: - browserslist "^4.16.0" - postcss-value-parser "^4.1.0" + browserslist "^4.16.6" + postcss-value-parser "^4.2.0" -postcss-normalize-url@^5.0.2: - version "5.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-5.0.2.tgz#ddcdfb7cede1270740cf3e4dfc6008bd96abc763" - integrity sha512-k4jLTPUxREQ5bpajFQZpx8bCF2UrlqOTzP9kEqcEnOfwsRshWs2+oAFIHfDQB8GO2PaUaSE0NlTAYtbluZTlHQ== +postcss-normalize-url@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz#ed9d88ca82e21abef99f743457d3729a042adcdc" + integrity sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew== dependencies: - is-absolute-url "^3.0.3" normalize-url "^6.0.1" - postcss-value-parser "^4.1.0" + postcss-value-parser "^4.2.0" -postcss-normalize-whitespace@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.0.1.tgz#b0b40b5bcac83585ff07ead2daf2dcfbeeef8e9a" - integrity sha512-iPklmI5SBnRvwceb/XH568yyzK0qRVuAG+a1HFUsFRf11lEJTiQQa03a4RSCQvLKdcpX7XsI1Gen9LuLoqwiqA== +postcss-normalize-whitespace@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz#08a1a0d1ffa17a7cc6efe1e6c9da969cc4493cfa" + integrity sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA== dependencies: - postcss-value-parser "^4.1.0" + postcss-value-parser "^4.2.0" -postcss-ordered-values@^5.0.2: - version "5.0.2" - resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-5.0.2.tgz#1f351426977be00e0f765b3164ad753dac8ed044" - integrity sha512-8AFYDSOYWebJYLyJi3fyjl6CqMEG/UVworjiyK1r573I56kb3e879sCJLGvR3merj+fAdPpVplXKQZv+ey6CgQ== +postcss-ordered-values@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-5.1.2.tgz#daffacd4abf327d52d5ac570b59dfbcf4b836614" + integrity sha512-wr2avRbW4HS2XE2ZCqpfp4N/tDC6GZKZ+SVP8UBTOVS8QWrc4TD8MYrebJrvVVlGPKszmiSCzue43NDiVtgDmg== dependencies: - cssnano-utils "^2.0.1" - postcss-value-parser "^4.1.0" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" -postcss-reduce-initial@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-5.0.1.tgz#9d6369865b0f6f6f6b165a0ef5dc1a4856c7e946" - integrity sha512-zlCZPKLLTMAqA3ZWH57HlbCjkD55LX9dsRyxlls+wfuRfqCi5mSlZVan0heX5cHr154Dq9AfbH70LyhrSAezJw== +postcss-reduce-initial@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-5.1.0.tgz#fc31659ea6e85c492fb2a7b545370c215822c5d6" + integrity sha512-5OgTUviz0aeH6MtBjHfbr57tml13PuedK/Ecg8szzd4XRMbYxH4572JFG067z+FqBIf6Zp/d+0581glkvvWMFw== dependencies: - browserslist "^4.16.0" + browserslist "^4.16.6" caniuse-api "^3.0.0" -postcss-reduce-transforms@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-5.0.1.tgz#93c12f6a159474aa711d5269923e2383cedcf640" - integrity sha512-a//FjoPeFkRuAguPscTVmRQUODP+f3ke2HqFNgGPwdYnpeC29RZdCBvGRGTsKpMURb/I3p6jdKoBQ2zI+9Q7kA== +postcss-reduce-transforms@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz#333b70e7758b802f3dd0ddfe98bb1ccfef96b6e9" + integrity sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ== dependencies: - cssnano-utils "^2.0.1" - postcss-value-parser "^4.1.0" + postcss-value-parser "^4.2.0" postcss-resolve-nested-selector@^0.1.1: version "0.1.1" @@ -9946,7 +8259,7 @@ postcss-scss@^2.1.1: dependencies: postcss "^7.0.6" -postcss-selector-parser@^6.0.0, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5: +postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5: version "6.0.6" resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.6.tgz#2c5bba8174ac2f6981ab631a42ab0ee54af332ea" integrity sha512-9LXrvaaX3+mcv5xkg5kFwqSzSH1JIObIx51PrndZwlmznwXRfxMddDvo9gve3gVR8ZTKgoFDdWkbRFmEhT4PMg== @@ -9954,34 +8267,53 @@ postcss-selector-parser@^6.0.0, postcss-selector-parser@^6.0.2, postcss-selector cssesc "^3.0.0" util-deprecate "^1.0.2" -postcss-svgo@^5.0.2: - version "5.0.2" - resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-5.0.2.tgz#bc73c4ea4c5a80fbd4b45e29042c34ceffb9257f" - integrity sha512-YzQuFLZu3U3aheizD+B1joQ94vzPfE6BNUcSYuceNxlVnKKsOtdo6hL9/zyC168Q8EwfLSgaDSalsUGa9f2C0A== +postcss-selector-parser@^6.0.9: + version "6.0.10" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz#79b61e2c0d1bfc2602d549e11d0876256f8df88d" + integrity sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w== dependencies: - postcss-value-parser "^4.1.0" - svgo "^2.3.0" + cssesc "^3.0.0" + util-deprecate "^1.0.2" + +postcss-svgo@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-5.1.0.tgz#0a317400ced789f233a28826e77523f15857d80d" + integrity sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA== + dependencies: + postcss-value-parser "^4.2.0" + svgo "^2.7.0" postcss-syntax@^0.36.2: version "0.36.2" resolved "https://registry.yarnpkg.com/postcss-syntax/-/postcss-syntax-0.36.2.tgz#f08578c7d95834574e5593a82dfbfa8afae3b51c" integrity sha512-nBRg/i7E3SOHWxF3PpF5WnJM/jQ1YpY9000OaVXlAQj6Zp/kIqJxEDWIZ67tAd7NLuk7zqN4yqe9nc0oNAOs1w== -postcss-unique-selectors@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-5.0.1.tgz#3be5c1d7363352eff838bd62b0b07a0abad43bfc" - integrity sha512-gwi1NhHV4FMmPn+qwBNuot1sG1t2OmacLQ/AX29lzyggnjd+MnVD5uqQmpXO3J17KGL2WAxQruj1qTd3H0gG/w== +postcss-unique-selectors@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz#a9f273d1eacd09e9aa6088f4b0507b18b1b541b6" + integrity sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA== dependencies: - alphanum-sort "^1.0.2" postcss-selector-parser "^6.0.5" - uniqs "^2.0.0" -postcss-value-parser@^4.0.2, postcss-value-parser@^4.1.0: +postcss-value-parser@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.1.0.tgz#443f6a20ced6481a2bda4fa8532a6e55d789a2cb" integrity sha512-97DXOFbQJhk71ne5/Mt6cOu6yxsSfM0QGQyl0L25Gca4yGWEGJaig7l7gbCX623VqTBNGLRLaVUCnNkcedlRSQ== -postcss@>=8.2.10, postcss@^6.0.1, postcss@^6.0.2, postcss@^7.0.14, postcss@^7.0.2, postcss@^7.0.21, postcss@^7.0.26, postcss@^7.0.32, postcss@^7.0.35, postcss@^7.0.5, postcss@^7.0.6, postcss@^8.2.1: +postcss-value-parser@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" + integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== + +postcss@^7.0.14, postcss@^7.0.2, postcss@^7.0.21, postcss@^7.0.26, postcss@^7.0.32, postcss@^7.0.35, postcss@^7.0.6: + version "7.0.39" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.39.tgz#9624375d965630e2e1f2c02a935c82a59cb48309" + integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== + dependencies: + picocolors "^0.2.1" + source-map "^0.6.1" + +postcss@^8.2.15: version "8.3.4" resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.3.4.tgz#41ece1c43f2f7c74dc7d90144047ce052757b822" integrity sha512-/tZY0PXExXXnNhKv3TOvZAOUYRyuqcCbBm2c17YMDK0PlVII3K7/LKdt3ScHL+hhouddjUWi+1sKDf9xXW+8YA== @@ -9990,6 +8322,15 @@ postcss@>=8.2.10, postcss@^6.0.1, postcss@^6.0.2, postcss@^7.0.14, postcss@^7.0. nanoid "^3.1.23" source-map-js "^0.6.2" +postcss@^8.4.13: + version "8.4.14" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.14.tgz#ee9274d5622b4858c1007a74d76e42e56fd21caf" + integrity sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig== + dependencies: + nanoid "^3.3.4" + picocolors "^1.0.0" + source-map-js "^1.0.2" + prelude-ls@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" @@ -10019,30 +8360,10 @@ pretty-format@^27.5.1: ansi-styles "^5.0.0" react-is "^17.0.1" -prismjs@^1.24.1: - version "1.27.0" - resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.27.0.tgz#bb6ee3138a0b438a3653dd4d6ce0cc6510a45057" - integrity sha512-t13BGPUlFDR7wRB5kQDG4jjl7XeuH6jbJGt11JHPL96qwsEHNX2+68tFXqc1/k+/jALsbSWJKUOT/hcYAZ5LkA== - -private@^0.1.8: - version "0.1.8" - resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" - integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== - -process-nextick-args@~2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" - integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== - -process@^0.11.10: - version "0.11.10" - resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" - integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI= - -progress@^2.0.0: - version "2.0.3" - resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" - integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== +prismjs@^1.27.0: + version "1.28.0" + resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.28.0.tgz#0d8f561fa0f7cf6ebca901747828b149147044b6" + integrity sha512-8aaXdYvl1F7iC7Xm1spqSaY/OJBpYW3v+KJ+F17iYxvdc8sfjW194COK5wVhMZX45tGteiBQgdvD/nhxcRwylw== promise-inflight@^1.0.1: version "1.0.1" @@ -10066,7 +8387,7 @@ prop-types@^15.5.0: object-assign "^4.1.1" react-is "^16.8.1" -prop-types@^15.6.2, prop-types@^15.7.2: +prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1: version "15.8.1" resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== @@ -10080,78 +8401,16 @@ propagate@^2.0.0: resolved "https://registry.yarnpkg.com/propagate/-/propagate-2.0.1.tgz#40cdedab18085c792334e64f0ac17256d38f9a45" integrity sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag== -prr@~1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" - integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= - psl@^1.1.33: version "1.8.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== -public-encrypt@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" - integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== - dependencies: - bn.js "^4.1.0" - browserify-rsa "^4.0.0" - create-hash "^1.1.0" - parse-asn1 "^5.0.0" - randombytes "^2.0.1" - safe-buffer "^5.1.2" - -pump@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" - integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" - -pump@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" - integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" - -pumpify@^1.3.3: - version "1.5.1" - resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" - integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== - dependencies: - duplexify "^3.6.0" - inherits "^2.0.3" - pump "^2.0.0" - -punycode@1.3.2: - version "1.3.2" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" - integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= - -punycode@^1.2.4: - version "1.4.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" - integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= - punycode@^2.1.0, punycode@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== -querystring-es3@^0.2.0: - version "0.2.1" - resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" - integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= - -querystring@0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" - integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= - queue-microtask@^1.2.2: version "1.2.3" resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" @@ -10167,21 +8426,13 @@ quick-lru@^5.1.1: resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== -randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5, randombytes@^2.1.0: +randombytes@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" -randomfill@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" - integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== - dependencies: - randombytes "^2.0.5" - safe-buffer "^5.1.0" - react-clientside-effect@^1.2.6: version "1.2.6" resolved "https://registry.yarnpkg.com/react-clientside-effect/-/react-clientside-effect-1.2.6.tgz#29f9b14e944a376b03fb650eed2a754dd128ea3a" @@ -10236,10 +8487,10 @@ react-is@^17.0.1: resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0" integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== -react-query@^3.34.16: - version "3.34.16" - resolved "https://registry.yarnpkg.com/react-query/-/react-query-3.34.16.tgz#279ea180bcaeaec49c7864b29d1711ee9f152594" - integrity sha512-7FvBvjgEM4YQ8nPfmAr+lJfbW95uyW/TVjFoi2GwCkF33/S8ajx45tuPHPFGWs4qYwPy1mzwxD4IQfpUDrefNQ== +react-query@^3.39.1: + version "3.39.1" + resolved "https://registry.yarnpkg.com/react-query/-/react-query-3.39.1.tgz#3876c0fdac7a3b5a84e195534e5fa8fbdd628847" + integrity sha512-qYKT1bavdDiQZbngWZyPotlBVzcBjDYEJg5RQLBa++5Ix5jjfbEYJmHSZRZD+USVHUSvl/ey9Hu+QfF1QAK80A== dependencies: "@babel/runtime" "^7.5.5" broadcast-channel "^3.4.1" @@ -10288,10 +8539,10 @@ react-style-singleton@^2.2.1: invariant "^2.2.4" tslib "^2.0.0" -react-table@^7.7.0: - version "7.7.0" - resolved "https://registry.yarnpkg.com/react-table/-/react-table-7.7.0.tgz#e2ce14d7fe3a559f7444e9ecfe8231ea8373f912" - integrity sha512-jBlj70iBwOTvvImsU9t01LjFjy4sXEtclBovl3mTiqjz23Reu0DKnRza4zlLtOPACx6j2/7MrQIthIK1Wi+LIA== +react-table@^7.8.0: + version "7.8.0" + resolved "https://registry.yarnpkg.com/react-table/-/react-table-7.8.0.tgz#07858c01c1718c09f7f1aed7034fcfd7bda907d2" + integrity sha512-hNaz4ygkZO4bESeFfnfOft73iBUj8K5oKi1EcSHPAibEydfsX2MyU6Z8KCr3mv3C9Kqqh71U+DhZkFvibbnPbA== react-tabs@^3.2.2: version "3.2.2" @@ -10308,14 +8559,6 @@ react@^18.0.0: dependencies: loose-envify "^1.1.0" -read-pkg-up@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-3.0.0.tgz#3ed496685dba0f8fe118d0691dc51f4a1ff96f07" - integrity sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc= - dependencies: - find-up "^2.0.0" - read-pkg "^3.0.0" - read-pkg-up@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507" @@ -10325,15 +8568,6 @@ read-pkg-up@^7.0.1: read-pkg "^5.2.0" type-fest "^0.8.1" -read-pkg@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-3.0.0.tgz#9cbc686978fee65d16c00e2b19c237fcf6e38389" - integrity sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k= - dependencies: - load-json-file "^4.0.0" - normalize-package-data "^2.3.2" - path-type "^3.0.0" - read-pkg@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc" @@ -10344,19 +8578,6 @@ read-pkg@^5.2.0: parse-json "^5.0.0" type-fest "^0.6.0" -"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: - version "2.3.7" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" - integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - readable-stream@1.1: version "1.1.13" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.13.tgz#f6eef764f514c89e2b9e23146a75ba106756d23e" @@ -10367,7 +8588,7 @@ readable-stream@1.1: isarray "0.0.1" string_decoder "~0.10.x" -readable-stream@^3.1.1, readable-stream@^3.6.0: +readable-stream@^3.1.1: version "3.6.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== @@ -10376,21 +8597,12 @@ readable-stream@^3.1.1, readable-stream@^3.6.0: string_decoder "^1.1.1" util-deprecate "^1.0.1" -readdirp@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" - integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== - dependencies: - graceful-fs "^4.1.11" - micromatch "^3.1.10" - readable-stream "^2.0.2" - -readdirp@~3.5.0: - version "3.5.0" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.5.0.tgz#9ba74c019b15d365278d2e91bb8c48d7b4d42c9e" - integrity sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ== +rechoir@^0.7.0: + version "0.7.1" + resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.7.1.tgz#9478a96a1ca135b5e88fc027f03ee92d6c645686" + integrity sha512-/njmZ8s1wVeR6pjTZ+0nCnv8SpZNRMT2D1RLOJQESlYFDBvwpTA4KWJpZ+sBJ4+vhjILRcK7JIFdGCdxEAAitg== dependencies: - picomatch "^2.2.1" + resolve "^1.9.0" redent@^3.0.0: version "3.0.0" @@ -10400,27 +8612,26 @@ redent@^3.0.0: indent-string "^4.0.0" strip-indent "^3.0.0" -redoc@^2.0.0-rc.63: - version "2.0.0-rc.63" - resolved "https://registry.yarnpkg.com/redoc/-/redoc-2.0.0-rc.63.tgz#d1bea51d3fdf2a6f5371a9bf7f06253ac5c1a487" - integrity sha512-PsoPqRyNqHi7+jKUyFBwJhHrzjMl4N5vieTeBloRGbhWuY3PPH2DJ3ihgrLfdEV0glzq/LMTaqfarm8WLqCc4Q== +redoc@^2.0.0-rc.72: + version "2.0.0-rc.72" + resolved "https://registry.yarnpkg.com/redoc/-/redoc-2.0.0-rc.72.tgz#9eee22104d652b4a90e19ca50009b0b623a7b5b3" + integrity sha512-IX/WvVh4N3zwo4sAjnQFz6ffIUd6G47hcflxPtrpxblJaeOy0MBSzzY8f179WjssWPYcSmmndP5v0hgEXFiimg== dependencies: - "@redocly/openapi-core" "^1.0.0-beta.54" - "@redocly/react-dropdown-aria" "^2.0.11" + "@redocly/openapi-core" "^1.0.0-beta.97" classnames "^2.3.1" decko "^1.2.0" dompurify "^2.2.8" eventemitter3 "^4.0.7" - json-pointer "^0.6.1" + json-pointer "^0.6.2" lunr "^2.3.9" mark.js "^8.11.1" - marked "^4.0.10" + marked "^4.0.15" mobx-react "^7.2.0" - openapi-sampler "^1.1.1" + openapi-sampler "^1.3.0" path-browserify "^1.0.1" perfect-scrollbar "^1.5.1" polished "^4.1.3" - prismjs "^1.24.1" + prismjs "^1.27.0" prop-types "^15.7.2" react-tabs "^3.2.2" slugify "~1.4.7" @@ -10446,16 +8657,6 @@ regenerate@^1.4.2: resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== -regenerator-runtime@^0.10.5: - version "0.10.5" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.10.5.tgz#336c3efc1220adcedda2c9fab67b5a7955a33658" - integrity sha1-M2w+/BIgrc7dosn6tntaeVWjNlg= - -regenerator-runtime@^0.11.0: - version "0.11.1" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" - integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== - regenerator-runtime@^0.13.4: version "0.13.9" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" @@ -10468,23 +8669,7 @@ regenerator-transform@^0.14.2: dependencies: "@babel/runtime" "^7.8.4" -regex-not@^1.0.0, regex-not@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" - integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== - dependencies: - extend-shallow "^3.0.2" - safe-regex "^1.1.0" - -regexp.prototype.flags@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.3.1.tgz#7ef352ae8d159e758c0eadca6f8fcb4eef07be26" - integrity sha512-JiBdRBq91WlY7uRJ0ds7R+dU02i6LKi8r3BuQhNXn+kmeLN+EfHhfjqMRis1zJxnlu88hq/4dx0P2OP3APRTOA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - -regexp.prototype.flags@^1.4.3: +regexp.prototype.flags@^1.4.1, regexp.prototype.flags@^1.4.3: version "1.4.3" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== @@ -10493,7 +8678,7 @@ regexp.prototype.flags@^1.4.3: define-properties "^1.1.3" functions-have-names "^1.2.2" -regexpp@^3.0.0, regexpp@^3.1.0, regexpp@^3.2.0: +regexpp@^3.0.0, regexpp@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== @@ -10550,27 +8735,10 @@ remove-accents@0.4.2: resolved "https://registry.yarnpkg.com/remove-accents/-/remove-accents-0.4.2.tgz#0a43d3aaae1e80db919e07ae254b285d9e1c7bb5" integrity sha1-CkPTqq4egNuRngeuJUsoXZ4ce7U= -remove-trailing-separator@^1.0.1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" - integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= - -repeat-element@^1.1.2: - version "1.1.4" - resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9" - integrity sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ== - -repeat-string@^1.0.0, repeat-string@^1.6.1: +repeat-string@^1.0.0: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" - integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= - -repeating@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" - integrity sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo= - dependencies: - is-finite "^1.0.0" + integrity sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w== require-directory@^2.1.1: version "2.1.1" @@ -10579,20 +8747,8 @@ require-directory@^2.1.1: require-from-string@^2.0.2: version "2.0.2" - resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" - integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== - -require-main-filename@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" - integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== - -resolve-cwd@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" - integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= - dependencies: - resolve-from "^3.0.0" + resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== resolve-cwd@^3.0.0: version "3.0.0" @@ -10601,19 +8757,6 @@ resolve-cwd@^3.0.0: dependencies: resolve-from "^5.0.0" -resolve-dir@^1.0.0, resolve-dir@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" - integrity sha1-eaQGRMNivoLybv/nOcm7U4IEb0M= - dependencies: - expand-tilde "^2.0.0" - global-modules "^1.0.0" - -resolve-from@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" - integrity sha1-six699nWiBvItuZTM17rywoYh0g= - resolve-from@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" @@ -10624,11 +8767,6 @@ resolve-from@^5.0.0: resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== -resolve-url@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" - integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= - resolve.exports@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" @@ -10642,6 +8780,15 @@ resolve@^1.10.0, resolve@^1.10.1, resolve@^1.12.0, resolve@^1.14.2, resolve@^1.2 is-core-module "^2.2.0" path-parse "^1.0.6" +resolve@^1.22.0, resolve@^1.9.0: + version "1.22.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.0.tgz#5e0b8c67c15df57a89bdbabe603a002f21731198" + integrity sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw== + dependencies: + is-core-module "^2.8.1" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + resolve@^2.0.0-next.3: version "2.0.0-next.3" resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.3.tgz#d41016293d4a8586a39ca5d9b5f15cbea1f55e46" @@ -10650,26 +8797,11 @@ resolve@^2.0.0-next.3: is-core-module "^2.2.0" path-parse "^1.0.6" -ret@~0.1.10: - version "0.1.15" - resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" - integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== - reusify@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== -rgb-regex@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/rgb-regex/-/rgb-regex-1.0.1.tgz#c0e0d6882df0e23be254a475e8edd41915feaeb1" - integrity sha1-wODWiC3w4jviVKR16O3UGRX+rrE= - -rgba-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/rgba-regex/-/rgba-regex-1.0.0.tgz#43374e2e2ca0968b0ef1523460b7d730ff22eeb3" - integrity sha1-QzdOLiyglosO8VI0YLfXMP8i7rM= - rimraf@3.0.2, rimraf@^3.0.0, rimraf@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" @@ -10677,21 +8809,13 @@ rimraf@3.0.2, rimraf@^3.0.0, rimraf@^3.0.2: dependencies: glob "^7.1.3" -rimraf@^2.5.4, rimraf@^2.6.3: +rimraf@^2.6.3: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" -ripemd160@^2.0.0, ripemd160@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" - integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== - dependencies: - hash-base "^3.0.0" - inherits "^2.0.1" - run-parallel@^1.1.9: version "1.2.0" resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" @@ -10699,36 +8823,22 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" -run-queue@^1.0.0, run-queue@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47" - integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec= - dependencies: - aproba "^1.1.1" - rw@1: version "1.3.3" resolved "https://registry.yarnpkg.com/rw/-/rw-1.3.3.tgz#3f862dfa91ab766b14885ef4d01124bfda074fb4" integrity sha1-P4Yt+pGrdmsUiF700BEkv9oHT7Q= -safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@~5.2.0: +safe-buffer@^5.1.0, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== -safe-buffer@~5.1.0, safe-buffer@~5.1.1: +safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -safe-regex@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" - integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= - dependencies: - ret "~0.1.10" - -"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.1.0: +"safer-buffer@>= 2.1.2 < 3": version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== @@ -10752,15 +8862,6 @@ scheduler@^0.22.0: dependencies: loose-envify "^1.1.0" -schema-utils@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770" - integrity sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g== - dependencies: - ajv "^6.1.0" - ajv-errors "^1.0.0" - ajv-keywords "^3.1.0" - schema-utils@^2.6.5, schema-utils@^2.7.0: version "2.7.1" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7" @@ -10779,12 +8880,26 @@ schema-utils@^3.0.0: ajv "^6.12.5" ajv-keywords "^3.5.2" -seekout@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/seekout/-/seekout-1.0.2.tgz#09ba9f1bd5b46fbb134718eb19a68382cbb1b9c9" - integrity sha1-CbqfG9W0b7sTRxjrGaaDgsuxuck= +schema-utils@^3.1.0, schema-utils@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" + integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + +schema-utils@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.0.0.tgz#60331e9e3ae78ec5d16353c467c34b3a0a1d3df7" + integrity sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg== + dependencies: + "@types/json-schema" "^7.0.9" + ajv "^8.8.0" + ajv-formats "^2.1.1" + ajv-keywords "^5.0.0" -"semver@2 || 3 || 4 || 5", semver@^5.5.0, semver@^5.6.0: +"semver@2 || 3 || 4 || 5": version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== @@ -10799,13 +8914,6 @@ semver@^6.0.0, semver@^6.1.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== -semver@^7.2.1, semver@^7.3.7: - version "7.3.7" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" - integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== - dependencies: - lru-cache "^6.0.0" - semver@^7.3.2, semver@^7.3.4: version "7.3.5" resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" @@ -10813,12 +8921,12 @@ semver@^7.3.2, semver@^7.3.4: dependencies: lru-cache "^6.0.0" -serialize-javascript@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa" - integrity sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw== +semver@^7.3.5, semver@^7.3.7: + version "7.3.7" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" + integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== dependencies: - randombytes "^2.1.0" + lru-cache "^6.0.0" serialize-javascript@^5.0.1: version "5.0.1" @@ -10827,40 +8935,19 @@ serialize-javascript@^5.0.1: dependencies: randombytes "^2.1.0" -set-blocking@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" - integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= - -set-value@^2.0.0, set-value@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" - integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== - dependencies: - extend-shallow "^2.0.1" - is-extendable "^0.1.1" - is-plain-object "^2.0.3" - split-string "^3.0.1" - -setimmediate@^1.0.4: - version "1.0.5" - resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" - integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= - -sha.js@^2.4.0, sha.js@^2.4.8: - version "2.4.11" - resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" - integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== +serialize-javascript@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8" + integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag== dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" + randombytes "^2.1.0" -shebang-command@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" - integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= +shallow-clone@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3" + integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== dependencies: - shebang-regex "^1.0.0" + kind-of "^6.0.2" shebang-command@^2.0.0: version "2.0.0" @@ -10869,11 +8956,6 @@ shebang-command@^2.0.0: dependencies: shebang-regex "^3.0.0" -shebang-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" - integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= - shebang-regex@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" @@ -10947,11 +9029,6 @@ sisteransi@^1.0.5: resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== -slash@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55" - integrity sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU= - slash@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" @@ -10971,37 +9048,7 @@ slugify@~1.4.7: resolved "https://registry.yarnpkg.com/slugify/-/slugify-1.4.7.tgz#e42359d505afd84a44513280868e31202a79a628" integrity sha512-tf+h5W1IrjNm/9rKKj0JU2MDMruiopx0jjVA5zCdBtcGjfp0+c5rHw/zADLC3IeKlGHtVbHtpfzvYA0OYT+HKg== -snapdragon-node@^2.0.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" - integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== - dependencies: - define-property "^1.0.0" - isobject "^3.0.0" - snapdragon-util "^3.0.1" - -snapdragon-util@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" - integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== - dependencies: - kind-of "^3.2.0" - -snapdragon@^0.8.1: - version "0.8.2" - resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" - integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== - dependencies: - base "^0.11.1" - debug "^2.2.0" - define-property "^0.2.5" - extend-shallow "^2.0.1" - map-cache "^0.2.2" - source-map "^0.5.6" - source-map-resolve "^0.5.0" - use "^3.1.0" - -source-list-map@^2.0.0: +source-list-map@^2.0.0, source-list-map@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== @@ -11011,16 +9058,10 @@ source-map-js@^0.6.2: resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-0.6.2.tgz#0bb5de631b41cfbda6cfba8bd05a80efdfd2385e" integrity sha512-/3GptzWzu0+0MBQFrDKzw/DvvMTUORvgY6k6jd/VS6iCR4RDTKWH6v6WPwQoUO8667uQEf9Oe38DxAYWY5F/Ug== -source-map-resolve@^0.5.0: - version "0.5.3" - resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" - integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== - dependencies: - atob "^2.1.2" - decode-uri-component "^0.2.0" - resolve-url "^0.2.1" - source-map-url "^0.4.0" - urix "^0.1.0" +source-map-js@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" + integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== source-map-resolve@^0.6.0: version "0.6.0" @@ -11030,13 +9071,6 @@ source-map-resolve@^0.6.0: atob "^2.1.2" decode-uri-component "^0.2.0" -source-map-support@^0.4.15: - version "0.4.18" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.18.tgz#0286a6de8be42641338594e97ccea75f0a2c585f" - integrity sha512-try0/JqxPLF9nOjvSta7tVondkP5dwgyLDjVoyMDlmjugT2lRZ1OfsrYTkCd2hkDnJTKRbO/Rl3orm8vlsUzbA== - dependencies: - source-map "^0.5.6" - source-map-support@^0.5.6, source-map-support@~0.5.20: version "0.5.20" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.20.tgz#12166089f8f5e5e8c56926b377633392dd2cb6c9" @@ -11045,20 +9079,7 @@ source-map-support@^0.5.6, source-map-support@~0.5.20: buffer-from "^1.0.0" source-map "^0.6.0" -source-map-support@~0.5.12: - version "0.5.19" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61" - integrity sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map-url@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.1.tgz#0af66605a745a5a2f91cf1bbf8a7afbc283dec56" - integrity sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw== - -source-map@^0.5.0, source-map@^0.5.6, source-map@^0.5.7: +source-map@^0.5.0, source-map@^0.5.7: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= @@ -11111,19 +9132,12 @@ specificity@^0.4.1: resolved "https://registry.yarnpkg.com/specificity/-/specificity-0.4.1.tgz#aab5e645012db08ba182e151165738d00887b019" integrity sha512-1klA3Gi5PD1Wv9Q0wUoOQN1IWAuPu0D1U03ThXTr0cJ20+/iq2tHSDnK7Kk/0LXJ1ztUB2/1Os0wKmfyNgUQfg== -split-string@^3.0.1, split-string@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" - integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== - dependencies: - extend-shallow "^3.0.0" - sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= -"ssri@>= 8.0.1", ssri@^6.0.1, ssri@^8.0.1: +ssri@^8.0.1: version "8.0.1" resolved "https://registry.yarnpkg.com/ssri/-/ssri-8.0.1.tgz#638e4e439e2ffbd2cd289776d5ca457c4f51a2af" integrity sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ== @@ -11142,51 +9156,11 @@ stack-utils@^2.0.3: dependencies: escape-string-regexp "^2.0.0" -static-extend@^0.1.1: - version "0.1.2" - resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" - integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= - dependencies: - define-property "^0.2.5" - object-copy "^0.1.0" - stickyfill@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/stickyfill/-/stickyfill-1.1.1.tgz#39413fee9d025c74a7e59ceecb23784cc0f17f02" integrity sha1-OUE/7p0CXHSn5ZzuyyN4TMDxfwI= -stream-browserify@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" - integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg== - dependencies: - inherits "~2.0.1" - readable-stream "^2.0.2" - -stream-each@^1.1.0: - version "1.2.3" - resolved "https://registry.yarnpkg.com/stream-each/-/stream-each-1.2.3.tgz#ebe27a0c389b04fbcc233642952e10731afa9bae" - integrity sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw== - dependencies: - end-of-stream "^1.1.0" - stream-shift "^1.0.0" - -stream-http@^2.7.2: - version "2.8.3" - resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc" - integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw== - dependencies: - builtin-status-codes "^3.0.0" - inherits "^2.0.1" - readable-stream "^2.3.6" - to-arraybuffer "^1.0.0" - xtend "^4.0.0" - -stream-shift@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d" - integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ== - string-length@^4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" @@ -11195,16 +9169,7 @@ string-length@^4.0.1: char-regex "^1.0.2" strip-ansi "^6.0.0" -string-width@^3.0.0, string-width@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" - integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== - dependencies: - emoji-regex "^7.0.1" - is-fullwidth-code-point "^2.0.0" - strip-ansi "^5.1.0" - -string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2: +string-width@^4.1.0, string-width@^4.2.0: version "4.2.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.2.tgz#dafd4f9559a7585cfba529c6a0a4f73488ebd4c5" integrity sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA== @@ -11213,7 +9178,7 @@ string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.0" -string-width@^4.2.3: +string-width@^4.2.2, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -11222,18 +9187,18 @@ string-width@^4.2.3: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" -string.prototype.matchall@^4.0.5: - version "4.0.5" - resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.5.tgz#59370644e1db7e4c0c045277690cf7b01203c4da" - integrity sha512-Z5ZaXO0svs0M2xd/6By3qpeKpLKd9mO4v4q3oMEQrk8Ck4xOD5d5XeBOOjGrmVZZ/AHB1S0CgG4N5r1G9N3E2Q== +string.prototype.matchall@^4.0.7: + version "4.0.7" + resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz#8e6ecb0d8a1fb1fda470d81acecb2dba057a481d" + integrity sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg== dependencies: call-bind "^1.0.2" define-properties "^1.1.3" - es-abstract "^1.18.2" + es-abstract "^1.19.1" get-intrinsic "^1.1.1" - has-symbols "^1.0.2" + has-symbols "^1.0.3" internal-slot "^1.0.3" - regexp.prototype.flags "^1.3.1" + regexp.prototype.flags "^1.4.1" side-channel "^1.0.4" string.prototype.trimend@^1.0.4: @@ -11270,7 +9235,7 @@ string.prototype.trimstart@^1.0.5: define-properties "^1.1.4" es-abstract "^1.19.5" -string_decoder@^1.0.0, string_decoder@^1.1.1: +string_decoder@^1.1.1: version "1.3.0" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== @@ -11282,27 +9247,6 @@ string_decoder@~0.10.x: resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ= -string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - -strip-ansi@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" - integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= - dependencies: - ansi-regex "^2.0.0" - -strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" - integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== - dependencies: - ansi-regex "^4.1.0" - strip-ansi@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532" @@ -11380,12 +9324,12 @@ style-value-types@4.1.4: hey-listen "^1.0.8" tslib "^2.1.0" -stylehacks@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-5.0.1.tgz#323ec554198520986806388c7fdaebc38d2c06fb" - integrity sha512-Es0rVnHIqbWzveU1b24kbw92HsebBepxfcqe5iix7t9j0PQqhs0IxXVXv0pY2Bxa08CgMkzD6OWql7kbGOuEdA== +stylehacks@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-5.1.0.tgz#a40066490ca0caca04e96c6b02153ddc39913520" + integrity sha512-SzLmvHQTrIWfSgljkQCw2++C9+Ne91d/6Sp92I8c5uHTcy/PgeHamwITIbBW9wnFTY/3ZfSXR9HIL6Ikqmcu6Q== dependencies: - browserslist "^4.16.0" + browserslist "^4.16.6" postcss-selector-parser "^6.0.4" stylelint-config-recommended@^3.0.0: @@ -11471,11 +9415,6 @@ sugarss@^2.0.0: dependencies: postcss "^7.0.2" -supports-color@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" - integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= - supports-color@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" @@ -11483,13 +9422,6 @@ supports-color@^5.3.0: dependencies: has-flag "^3.0.0" -supports-color@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" - integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== - dependencies: - has-flag "^3.0.0" - supports-color@^7.0.0, supports-color@^7.1.0: version "7.2.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" @@ -11512,22 +9444,27 @@ supports-hyperlinks@^2.0.0: has-flag "^4.0.0" supports-color "^7.0.0" +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + svg-tags@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/svg-tags/-/svg-tags-1.0.0.tgz#58f71cee3bd519b59d4b2a843b6c7de64ac04764" integrity sha1-WPcc7jvVGbWdSyqEO2x95krAR2Q= -svgo@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/svgo/-/svgo-2.3.0.tgz#6b3af81d0cbd1e19c83f5f63cec2cb98c70b5373" - integrity sha512-fz4IKjNO6HDPgIQxu4IxwtubtbSfGEAJUq/IXyTPIkGhWck/faiiwfkvsB8LnBkKLvSoyNNIY6d13lZprJMc9Q== +svgo@^2.7.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/svgo/-/svgo-2.8.0.tgz#4ff80cce6710dc2795f0c7c74101e6764cfccd24" + integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg== dependencies: - "@trysound/sax" "0.1.1" - chalk "^4.1.0" - commander "^7.1.0" - css-select "^3.1.2" - css-tree "^1.1.2" + "@trysound/sax" "0.2.0" + commander "^7.2.0" + css-select "^4.1.3" + css-tree "^1.1.3" csso "^4.2.0" + picocolors "^1.0.0" stable "^0.1.8" swagger2openapi@^7.0.6: @@ -11552,7 +9489,7 @@ symbol-tree@^3.2.4: resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== -table@^6.0.9: +table@^6.6.0: version "6.8.0" resolved "https://registry.yarnpkg.com/table/-/table-6.8.0.tgz#87e28f14fa4321c3377ba286f07b79b281a3b3ca" integrity sha512-s/fitrbVeEyHKFa7mFdkuQMWlH1Wgw/yEXMt5xACT4ZpzWFluehAxRtUUQKPuWhaLAWhFcVx6w3oC8VKaUfPGA== @@ -11563,24 +9500,12 @@ table@^6.0.9: string-width "^4.2.3" strip-ansi "^6.0.1" -table@^6.6.0: - version "6.7.1" - resolved "https://registry.yarnpkg.com/table/-/table-6.7.1.tgz#ee05592b7143831a8c94f3cee6aae4c1ccef33e2" - integrity sha512-ZGum47Yi6KOOFDE8m223td53ath2enHcYLgOCjGr5ngu8bdIARQk6mN/wRMv4yMRcHnCSnHbCEha4sobQx5yWg== - dependencies: - ajv "^8.0.1" - lodash.clonedeep "^4.5.0" - lodash.truncate "^4.4.2" - slice-ansi "^4.0.0" - string-width "^4.2.0" - strip-ansi "^6.0.0" - -tapable@^1.0.0, tapable@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" - integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== +tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" + integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== -tar@>=6.1.9, tar@^6.0.2: +tar@^6.0.2: version "6.1.11" resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.11.tgz#6760a38f003afa1b2ffd0ffe9e9abbd0eab3d621" integrity sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA== @@ -11615,29 +9540,16 @@ terser-webpack-plugin@<5.0.0: terser "^5.3.4" webpack-sources "^1.4.3" -terser-webpack-plugin@^1.4.3: - version "1.4.5" - resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.5.tgz#a217aefaea330e734ffacb6120ec1fa312d6040b" - integrity sha512-04Rfe496lN8EYruwi6oPQkG0vo8C+HT49X687FZnpPF0qMAIHONI6HEXYPKDOE8e5HjXTyKfqRd/agHtH0kOtw== - dependencies: - cacache "^12.0.2" - find-cache-dir "^2.1.0" - is-wsl "^1.1.0" - schema-utils "^1.0.0" - serialize-javascript "^4.0.0" - source-map "^0.6.1" - terser "^4.1.2" - webpack-sources "^1.4.0" - worker-farm "^1.7.0" - -terser@^4.1.2: - version "4.8.0" - resolved "https://registry.yarnpkg.com/terser/-/terser-4.8.0.tgz#63056343d7c70bb29f3af665865a46fe03a0df17" - integrity sha512-EAPipTNeWsb/3wLPeup1tVPaXfIaU68xMnVdPafIL1TV05OhASArYyIfFvnvJCNrR2NIOvDVNNTFRa+Re2MWyw== +terser-webpack-plugin@^5.1.3: + version "5.3.3" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.3.tgz#8033db876dd5875487213e87c627bca323e5ed90" + integrity sha512-Fx60G5HNYknNTNQnzQ1VePRuu89ZVYWfjRAeT5rITuCY/1b08s49e5kSQwHDirKZWuoKOBRFS98EUUoZ9kLEwQ== dependencies: - commander "^2.20.0" - source-map "~0.6.1" - source-map-support "~0.5.12" + "@jridgewell/trace-mapping" "^0.3.7" + jest-worker "^27.4.5" + schema-utils "^3.1.1" + serialize-javascript "^6.0.0" + terser "^5.7.2" terser@^5.3.4: version "5.10.0" @@ -11648,6 +9560,16 @@ terser@^5.3.4: source-map "~0.7.2" source-map-support "~0.5.20" +terser@^5.7.2: + version "5.14.1" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.14.1.tgz#7c95eec36436cb11cf1902cc79ac564741d19eca" + integrity sha512-+ahUAE+iheqBTDxXhTisdA8hgvbEG1hHOQ9xmNjeUJSoi6DU/gMrKNcfZjHkyY6Alnuyc+ikYJaxxfHkT3+WuQ== + dependencies: + "@jridgewell/source-map" "^0.3.2" + acorn "^8.5.0" + commander "^2.20.0" + source-map-support "~0.5.20" + test-exclude@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" @@ -11667,26 +9589,6 @@ throat@^6.0.1: resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375" integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w== -through2@^2.0.0: - version "2.0.5" - resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" - integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== - dependencies: - readable-stream "~2.3.6" - xtend "~4.0.1" - -timers-browserify@^2.0.4: - version "2.0.12" - resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.12.tgz#44a45c11fbf407f34f97bccd1577c652361b00ee" - integrity sha512-9phl76Cqm6FhSX9Xe1ZUAMLtm1BLkKj2Qd5ApyWkXzsMRaA7dgr81kf4wJmQf/hAvg8EEyJxDo3du/0KlhPiKQ== - dependencies: - setimmediate "^1.0.4" - -timsort@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/timsort/-/timsort-0.3.0.tgz#405411a8e7e6339fe64db9a234de11dc31e02bd4" - integrity sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q= - tiny-invariant@^1.0.6: version "1.2.0" resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.2.0.tgz#a1141f86b672a9148c72e978a19a73b9b94a15a9" @@ -11697,36 +9599,11 @@ tmpl@1.0.5: resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== -to-arraybuffer@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" - integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M= - -to-fast-properties@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" - integrity sha1-uDVx+k2MJbguIxsG46MFXeTKGkc= - to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= -to-object-path@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" - integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= - dependencies: - kind-of "^3.0.2" - -to-regex-range@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" - integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= - dependencies: - is-number "^3.0.0" - repeat-string "^1.6.1" - to-regex-range@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" @@ -11734,16 +9611,6 @@ to-regex-range@^5.0.1: dependencies: is-number "^7.0.0" -to-regex@^3.0.1, to-regex@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" - integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== - dependencies: - define-property "^2.0.2" - extend-shallow "^3.0.2" - regex-not "^1.0.2" - safe-regex "^1.1.0" - toggle-selection@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/toggle-selection/-/toggle-selection-1.0.6.tgz#6e45b1263f2017fa0acc7d89d78b15b8bf77da32" @@ -11765,29 +9632,29 @@ tr46@^2.1.0: dependencies: punycode "^2.1.1" -trim-newlines@>=3.0.1, trim-newlines@^3.0.0: - version "4.0.2" - resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-4.0.2.tgz#d6aaaf6a0df1b4b536d183879a6b939489808c7c" - integrity sha512-GJtWyq9InR/2HRiLZgpIKv+ufIKrVrvjQWEj7PxAXNc5dwbNJkqhAUoAGgzRmULAnoOM5EIpveYd3J2VeSAIew== +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== -trim-right@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" - integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM= +trim-newlines@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-3.0.1.tgz#260a5d962d8b752425b32f3a7db0dcacd176c144" + integrity sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw== trough@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/trough/-/trough-1.0.5.tgz#b8b639cefad7d0bb2abd37d433ff8293efa5f406" integrity sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA== -tsconfig-paths@^3.11.0: - version "3.11.0" - resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.11.0.tgz#954c1fe973da6339c78e06b03ce2e48810b65f36" - integrity sha512-7ecdYDnIdmv639mmDwslG6KQg1Z9STTz1j7Gcz0xa+nshh/gKDAHcPxRbWOsA3SPp0tXP2leTcY9Kw+NAkfZzA== +tsconfig-paths@^3.14.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz#ba0734599e8ea36c862798e920bcf163277b137a" + integrity sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ== dependencies: "@types/json5" "^0.0.29" json5 "^1.0.1" - minimist "^1.2.0" + minimist "^1.2.6" strip-bom "^3.0.0" tslib@^1.0.0, tslib@^1.8.1: @@ -11812,11 +9679,6 @@ tsutils@^3.21.0: dependencies: tslib "^1.8.1" -tty-browserify@0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" - integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= - type-check@^0.4.0, type-check@~0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" @@ -11873,11 +9735,6 @@ typedarray-to-buffer@^3.1.5: dependencies: is-typedarray "^1.0.0" -typedarray@^0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" - integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= - typescript@^4.6.3: version "4.7.3" resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.7.3.tgz#8364b502d5257b540f9de4c40be84c98e23a129d" @@ -11927,9 +9784,9 @@ unicode-property-aliases-ecmascript@^2.0.0: integrity sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ== unified@^9.1.0: - version "9.2.1" - resolved "https://registry.yarnpkg.com/unified/-/unified-9.2.1.tgz#ae18d5674c114021bfdbdf73865ca60f410215a3" - integrity sha512-juWjuI8Z4xFg8pJbnEZ41b5xjGUWGHqXALmBZ3FC3WX0PIx1CZBIIJ6mXbYMcf6Yw4Fi0rFUTA1cdz/BglbOhA== + version "9.2.2" + resolved "https://registry.yarnpkg.com/unified/-/unified-9.2.2.tgz#67649a1abfc3ab85d2969502902775eb03146975" + integrity sha512-Sg7j110mtefBD+qunSLO1lqOEKdrwBFBrR6Qd8f4uwkhWNlbkaqwHse6e7QvD3AP/MNoJdEDLaf8OxYyoWgorQ== dependencies: bail "^1.0.0" extend "^3.0.0" @@ -11938,21 +9795,6 @@ unified@^9.1.0: trough "^1.0.0" vfile "^4.0.0" -union-value@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" - integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== - dependencies: - arr-union "^3.1.0" - get-value "^2.0.6" - is-extendable "^0.1.1" - set-value "^2.0.1" - -uniqs@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/uniqs/-/uniqs-2.0.0.tgz#ffede4b36b25290696e6e165d4a59edb998e6b02" - integrity sha1-/+3ks2slKQaW5uFl1KWe25mOawI= - unique-filename@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" @@ -11986,7 +9828,7 @@ unist-util-stringify-position@^2.0.0: dependencies: "@types/unist" "^2.0.2" -universalify@^0.1.0, universalify@^0.1.2: +universalify@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== @@ -11999,19 +9841,6 @@ unload@2.2.0: "@babel/runtime" "^7.6.2" detect-node "^2.0.4" -unset-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" - integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= - dependencies: - has-value "^0.3.1" - isobject "^3.0.0" - -upath@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" - integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== - uri-js@^4.2.2: version "4.4.1" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" @@ -12019,11 +9848,6 @@ uri-js@^4.2.2: dependencies: punycode "^2.1.0" -urix@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" - integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= - url-loader@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/url-loader/-/url-loader-4.1.0.tgz#c7d6b0d6b0fccd51ab3ffc58a78d32b8d89a7be2" @@ -12043,14 +9867,6 @@ url-template@^2.0.8: resolved "https://registry.yarnpkg.com/url-template/-/url-template-2.0.8.tgz#fc565a3cccbff7730c775f5641f9555791439f21" integrity sha1-/FZaPMy/93MMd19WQflVV5FDnyE= -url@^0.11.0: - version "0.11.0" - resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" - integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE= - dependencies: - punycode "1.3.2" - querystring "0.2.0" - use-callback-ref@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/use-callback-ref/-/use-callback-ref-1.3.0.tgz#772199899b9c9a50526fedc4993fc7fa1f7e32d5" @@ -12066,31 +9882,12 @@ use-sidecar@^1.1.2: detect-node-es "^1.1.0" tslib "^2.0.0" -use@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" - integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== - -util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: +util-deprecate@^1.0.1, util-deprecate@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= -util@0.10.3: - version "0.10.3" - resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" - integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk= - dependencies: - inherits "2.0.1" - -util@^0.11.0: - version "0.11.1" - resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61" - integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ== - dependencies: - inherits "2.0.3" - -v8-compile-cache@^2.0.3, v8-compile-cache@^2.1.1, v8-compile-cache@^2.3.0: +v8-compile-cache@^2.0.3, v8-compile-cache@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== @@ -12112,11 +9909,6 @@ validate-npm-package-license@^3.0.1: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" -vendors@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/vendors/-/vendors-1.0.4.tgz#e2b800a53e7a29b93506c3cf41100d16c4c4ad8e" - integrity sha512-/juG65kTL4Cy2su4P8HjtkTxk6VmJDiOPBufWniqQ6wknac6jNiXS9vU+hO3wgusiyqWlzTbVHi0dyJqRONg3w== - vfile-message@^2.0.0: version "2.0.4" resolved "https://registry.yarnpkg.com/vfile-message/-/vfile-message-2.0.4.tgz#5b43b88171d409eae58477d13f23dd41d52c371a" @@ -12135,11 +9927,6 @@ vfile@^4.0.0: unist-util-stringify-position "^2.0.0" vfile-message "^2.0.0" -vm-browserify@^1.0.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0" - integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== - w3c-hr-time@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" @@ -12161,28 +9948,18 @@ walker@^1.0.7: dependencies: makeerror "1.0.12" -watchpack-chokidar2@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/watchpack-chokidar2/-/watchpack-chokidar2-2.0.1.tgz#38500072ee6ece66f3769936950ea1771be1c957" - integrity sha512-nCFfBIPKr5Sh61s4LPpy1Wtfi0HE8isJ3d2Yb5/Ppw2P2B/3eVSEBjKfN0fmHJSK14+31KwMKmcrzs2GM4P0Ww== - dependencies: - chokidar "^2.1.8" - -watchpack@^1.7.4: - version "1.7.5" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.7.5.tgz#1267e6c55e0b9b5be44c2023aed5437a2c26c453" - integrity sha512-9P3MWk6SrKjHsGkLT2KHXdQ/9SNkyoJbabxnKOoJepsvJjJG8uYTR3yTPxPQvNDI3w4Nz1xnE0TLHK4RIVe/MQ== +watchpack@^2.3.1: + version "2.4.0" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" + integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== dependencies: + glob-to-regexp "^0.4.1" graceful-fs "^4.1.2" - neo-async "^2.5.0" - optionalDependencies: - chokidar "^3.4.1" - watchpack-chokidar2 "^2.0.1" -web-streams-polyfill@^3.0.3: - version "3.2.0" - resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.2.0.tgz#a6b74026b38e4885869fb5c589e90b95ccfc7965" - integrity sha512-EqPmREeOzttaLRm5HS7io98goBgZ7IVz79aDvqjD0kYXLtFZTc0T/U6wHTPKyIjb+MdN7DFIIX6hgdBEpWmfPA== +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== webidl-conversions@^5.0.0: version "5.0.0" @@ -12194,46 +9971,53 @@ webidl-conversions@^6.1.0: resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== -webpack-cli@^3.3.12: - version "3.3.12" - resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-3.3.12.tgz#94e9ada081453cd0aa609c99e500012fd3ad2d4a" - integrity sha512-NVWBaz9k839ZH/sinurM+HcDvJOTXwSjYp1ku+5XKeOC03z8v5QitnK/x+lAxGXFyhdayoIf/GOpv85z3/xPag== - dependencies: - chalk "^2.4.2" - cross-spawn "^6.0.5" - enhanced-resolve "^4.1.1" - findup-sync "^3.0.0" - global-modules "^2.0.0" - import-local "^2.0.0" - interpret "^1.4.0" - loader-utils "^1.4.0" - supports-color "^6.1.0" - v8-compile-cache "^2.1.1" - yargs "^13.3.2" +webpack-cli@^4.0.0: + version "4.10.0" + resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-4.10.0.tgz#37c1d69c8d85214c5a65e589378f53aec64dab31" + integrity sha512-NLhDfH/h4O6UOy+0LSso42xvYypClINuMNBVVzX4vX98TmTaTUxwRbXdhucbFMd2qLaCTcLq/PdYrvi8onw90w== + dependencies: + "@discoveryjs/json-ext" "^0.5.0" + "@webpack-cli/configtest" "^1.2.0" + "@webpack-cli/info" "^1.5.0" + "@webpack-cli/serve" "^1.7.0" + colorette "^2.0.14" + commander "^7.0.0" + cross-spawn "^7.0.3" + fastest-levenshtein "^1.0.12" + import-local "^3.0.2" + interpret "^2.2.0" + rechoir "^0.7.0" + webpack-merge "^5.7.3" webpack-license-plugin@^4.2.1: - version "4.2.1" - resolved "https://registry.yarnpkg.com/webpack-license-plugin/-/webpack-license-plugin-4.2.1.tgz#92ad642205c839706145f46f3692c249d7904e8b" - integrity sha512-T5Q6P1rI4RwkLpo0lryYyTBNyJ/R7aimQfC5uGpOV8q2bCb5/Q5YJUQp/7H9CPR7k7M46XzFOo9J4wwfX0ropQ== + version "4.2.2" + resolved "https://registry.yarnpkg.com/webpack-license-plugin/-/webpack-license-plugin-4.2.2.tgz#22a1171717cee770718e0d2c28e93a4b07d19bec" + integrity sha512-OfIdm659IKurEInKlBN6Sfzrh+MNKIWkChKKg+aDCoPf3Ok1OSXBDd2RKSbuUAtxjmdW2j6LUVZWnRYRnVdOxA== dependencies: - chalk "^4.1.0" + chalk "^5.0.1" get-npm-tarball-url "^2.0.1" lodash "^4.17.20" needle "^2.2.4" spdx-expression-validate "^2.0.0" webpack-sources "^3.2.1" -webpack-manifest-plugin@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/webpack-manifest-plugin/-/webpack-manifest-plugin-2.2.0.tgz#19ca69b435b0baec7e29fbe90fb4015de2de4f16" - integrity sha512-9S6YyKKKh/Oz/eryM1RyLVDVmy3NSPV0JXMRhZ18fJsq+AwGxUY34X54VNwkzYcEmEkDwNxuEOboCZEebJXBAQ== +webpack-manifest-plugin@^4.0.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/webpack-manifest-plugin/-/webpack-manifest-plugin-4.1.1.tgz#10f8dbf4714ff93a215d5a45bcc416d80506f94f" + integrity sha512-YXUAwxtfKIJIKkhg03MKuiFAD72PlrqCiwdwO4VEXdRO5V0ORCNwaOwAZawPZalCbmH9kBDmXnNeQOw+BIEiow== + dependencies: + tapable "^2.0.0" + webpack-sources "^2.2.0" + +webpack-merge@^5.7.3: + version "5.8.0" + resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-5.8.0.tgz#2b39dbf22af87776ad744c390223731d30a68f61" + integrity sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q== dependencies: - fs-extra "^7.0.0" - lodash ">=3.5 <5" - object.entries "^1.1.0" - tapable "^1.0.0" + clone-deep "^4.0.1" + wildcard "^2.0.0" -webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1, webpack-sources@^1.4.3: +webpack-sources@^1.1.0, webpack-sources@^1.4.3: version "1.4.3" resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== @@ -12241,39 +10025,48 @@ webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1, webpack- source-list-map "^2.0.0" source-map "~0.6.1" -webpack-sources@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.1.tgz#251a7d9720d75ada1469ca07dbb62f3641a05b6d" - integrity sha512-t6BMVLQ0AkjBOoRTZgqrWm7xbXMBzD+XDq2EZ96+vMfn3qKgsvdXZhbPZ4ElUOpdv4u+iiGe+w3+J75iy/bYGA== - -webpack@4.44.2: - version "4.44.2" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.44.2.tgz#6bfe2b0af055c8b2d1e90ed2cd9363f841266b72" - integrity sha512-6KJVGlCxYdISyurpQ0IPTklv+DULv05rs2hseIXer6D7KrUicRDLFb4IUM1S6LUAKypPM/nSiVSuv8jHu1m3/Q== - dependencies: - "@webassemblyjs/ast" "1.9.0" - "@webassemblyjs/helper-module-context" "1.9.0" - "@webassemblyjs/wasm-edit" "1.9.0" - "@webassemblyjs/wasm-parser" "1.9.0" - acorn "^6.4.1" - ajv "^6.10.2" - ajv-keywords "^3.4.1" +webpack-sources@^2.2.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-2.3.1.tgz#570de0af163949fe272233c2cefe1b56f74511fd" + integrity sha512-y9EI9AO42JjEcrTJFOYmVywVZdKVUfOvDUPsJea5GIr1JOEGFVqwlY2K098fFoIjOkDzHn2AjRvM8dsBZu+gCA== + dependencies: + source-list-map "^2.0.1" + source-map "^0.6.1" + +webpack-sources@^3.2.1, webpack-sources@^3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" + integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== + +webpack@^5.73.0: + version "5.73.0" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.73.0.tgz#bbd17738f8a53ee5760ea2f59dce7f3431d35d38" + integrity sha512-svjudQRPPa0YiOYa2lM/Gacw0r6PvxptHj4FuEKQ2kX05ZLkjbVc5MnPs6its5j7IZljnIqSVo/OsY2X0IpHGA== + dependencies: + "@types/eslint-scope" "^3.7.3" + "@types/estree" "^0.0.51" + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/wasm-edit" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + acorn "^8.4.1" + acorn-import-assertions "^1.7.6" + browserslist "^4.14.5" chrome-trace-event "^1.0.2" - enhanced-resolve "^4.3.0" - eslint-scope "^4.0.3" - json-parse-better-errors "^1.0.2" - loader-runner "^2.4.0" - loader-utils "^1.2.3" - memory-fs "^0.4.1" - micromatch "^3.1.10" - mkdirp "^0.5.3" - neo-async "^2.6.1" - node-libs-browser "^2.2.1" - schema-utils "^1.0.0" - tapable "^1.1.3" - terser-webpack-plugin "^1.4.3" - watchpack "^1.7.4" - webpack-sources "^1.4.1" + enhanced-resolve "^5.9.3" + es-module-lexer "^0.9.0" + eslint-scope "5.1.1" + events "^3.2.0" + glob-to-regexp "^0.4.1" + graceful-fs "^4.2.9" + json-parse-even-better-errors "^2.3.1" + loader-runner "^4.2.0" + mime-types "^2.1.27" + neo-async "^2.6.2" + schema-utils "^3.1.0" + tapable "^2.1.1" + terser-webpack-plugin "^5.1.3" + watchpack "^2.3.1" + webpack-sources "^3.2.3" whatwg-encoding@^1.0.5: version "1.0.5" @@ -12287,6 +10080,14 @@ whatwg-mimetype@^2.3.0: resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + whatwg-url@^8.0.0, whatwg-url@^8.5.0: version "8.7.0" resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77" @@ -12307,12 +10108,7 @@ which-boxed-primitive@^1.0.2: is-string "^1.0.5" is-symbol "^1.0.3" -which-module@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" - integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= - -which@^1.2.14, which@^1.2.9, which@^1.3.1: +which@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== @@ -12326,27 +10122,16 @@ which@^2.0.1: dependencies: isexe "^2.0.0" +wildcard@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/wildcard/-/wildcard-2.0.0.tgz#a77d20e5200c6faaac979e4b3aadc7b3dd7f8fec" + integrity sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw== + word-wrap@^1.2.3, word-wrap@~1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== -worker-farm@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8" - integrity sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw== - dependencies: - errno "~0.1.7" - -wrap-ansi@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" - integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== - dependencies: - ansi-styles "^3.2.0" - string-width "^3.0.0" - strip-ansi "^5.0.0" - wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" @@ -12386,21 +10171,11 @@ xmlchars@^2.2.0: resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== -xtend@^4.0.0, xtend@~4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" - integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== - -y18n@>=5.0.5, y18n@^4.0.0, y18n@^5.0.5: +y18n@^5.0.5: version "5.0.8" resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== -yallist@^3.0.2: - version "3.1.1" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" - integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== - yallist@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" @@ -12411,40 +10186,16 @@ yaml-ast-parser@0.0.43: resolved "https://registry.yarnpkg.com/yaml-ast-parser/-/yaml-ast-parser-0.0.43.tgz#e8a23e6fb4c38076ab92995c5dca33f3d3d7c9bb" integrity sha512-2PTINUwsRqSd+s8XxKaJWQlUuEMHJQyEuh2edBbW8KNJz0SJPwUSD2zRWqezFEdN7IzAgeuYHFUCF7o8zRdZ0A== -yaml@^1.10.0, yaml@^1.7.2: +yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: version "1.10.2" resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== -yargs-parser@^13.1.2: - version "13.1.2" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.2.tgz#130f09702ebaeef2650d54ce6e3e5706f7a4fb38" - integrity sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg== - dependencies: - camelcase "^5.0.0" - decamelize "^1.2.0" - yargs-parser@^20.2.2, yargs-parser@^20.2.3: version "20.2.7" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.7.tgz#61df85c113edfb5a7a4e36eb8aa60ef423cbc90a" integrity sha512-FiNkvbeHzB/syOjIUxFDCnhSfzAL8R5vs40MgLFBorXACCOAEaWu0gRZl14vG8MR9AOJIZbmkjhusqBYZ3HTHw== -yargs@^13.3.2: - version "13.3.2" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd" - integrity sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw== - dependencies: - cliui "^5.0.0" - find-up "^3.0.0" - get-caller-file "^2.0.1" - require-directory "^2.1.1" - require-main-filename "^2.0.0" - set-blocking "^2.0.0" - string-width "^3.0.0" - which-module "^2.0.0" - y18n "^4.0.0" - yargs-parser "^13.1.2" - yargs@^16.2.0: version "16.2.0" resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" From ca8c8cb64bfa3d1ea373233c4ffa04d267b717e2 Mon Sep 17 00:00:00 2001 From: Brent Bovenzi Date: Thu, 16 Jun 2022 15:19:56 -0400 Subject: [PATCH 093/118] Restore tooltip animation (#24503) * update webpack-cli, eslint, stylelint, babel * revert stylelint changes * update more plugins * update to webpack 5 * restore animation with framer-motion v6 (cherry picked from commit 9dd16be0c5459c5d438503ac1905716be04bad9a) --- airflow/www/package.json | 2 +- .../www/static/js/grid/components/Tooltip.tsx | 84 ++++++++++++------- airflow/www/yarn.lock | 41 +++++---- 3 files changed, 78 insertions(+), 49 deletions(-) diff --git a/airflow/www/package.json b/airflow/www/package.json index 85665962b1c84..ad11cc3a0d976 100644 --- a/airflow/www/package.json +++ b/airflow/www/package.json @@ -90,7 +90,7 @@ "datatables.net": "^1.11.4", "datatables.net-bs": "^1.11.4", "eonasdan-bootstrap-datetimepicker": "^4.17.47", - "framer-motion": "4.1.17", + "framer-motion": "^6.0.0", "jquery": ">=3.5.0", "jshint": "^2.13.4", "lodash": "^4.17.21", diff --git a/airflow/www/static/js/grid/components/Tooltip.tsx b/airflow/www/static/js/grid/components/Tooltip.tsx index da4af5b529439..d01aec657c1e5 100644 --- a/airflow/www/static/js/grid/components/Tooltip.tsx +++ b/airflow/www/static/js/grid/components/Tooltip.tsx @@ -32,6 +32,7 @@ import { Portal, PortalProps, } from '@chakra-ui/react'; +import { motion, AnimatePresence } from 'framer-motion'; export interface TooltipProps extends HTMLChakraProps<'div'>, @@ -69,6 +70,27 @@ export interface TooltipProps portalProps?: Pick } +const scale = { + exit: { + scale: 0.85, + opacity: 0, + transition: { + opacity: { duration: 0.15, easings: 'easeInOut' }, + scale: { duration: 0.2, easings: 'easeInOut' }, + }, + }, + enter: { + scale: 1, + opacity: 1, + transition: { + opacity: { easings: 'easeOut', duration: 0.2 }, + scale: { duration: 0.2, ease: [0.175, 0.885, 0.4, 1.1] }, + }, + }, +}; + +const StyledTooltip = chakra(motion.div); + const styles = { '--popper-arrow-bg': ['var(--tooltip-bg)'], '--tooltip-bg': 'colors.gray.700', @@ -134,42 +156,42 @@ const Tooltip = forwardRef((props, ref) => { return ( <> {trigger} - {/* TODO: put back in AnimatePresence when we can upgrade framer-motion without ts errors */} - {/* */} - {tooltip.isOpen && ( - - + + {tooltip.isOpen && ( + - {label} - {hasArrow && ( - + {label} + {hasArrow && ( - - )} + data-popper-arrow + className="chakra-tooltip__arrow-wrapper" + > + + + )} + - - - )} - {/* */} + + )} + ); }); diff --git a/airflow/www/yarn.lock b/airflow/www/yarn.lock index 2c015b08d18b2..edeb3eb35eed2 100644 --- a/airflow/www/yarn.lock +++ b/airflow/www/yarn.lock @@ -5509,15 +5509,15 @@ form-data@^3.0.0: combined-stream "^1.0.8" mime-types "^2.1.12" -framer-motion@4.1.17: - version "4.1.17" - resolved "https://registry.yarnpkg.com/framer-motion/-/framer-motion-4.1.17.tgz#4029469252a62ea599902e5a92b537120cc89721" - integrity sha512-thx1wvKzblzbs0XaK2X0G1JuwIdARcoNOW7VVwjO8BUltzXPyONGAElLu6CiCScsOQRI7FIk/45YTFtJw5Yozw== +framer-motion@^6.0.0: + version "6.3.11" + resolved "https://registry.yarnpkg.com/framer-motion/-/framer-motion-6.3.11.tgz#c304ce9728601ad9377d47d5d9264e43d741d470" + integrity sha512-xQLk+ZSklNs5QNCUmdWPpKMOuWiB8ZETsvcIOWw8xvri9K3TamuifgCI/B6XpaEDR0/V2ZQF2Wm+gUAZrXo+rw== dependencies: - framesync "5.3.0" + framesync "6.0.1" hey-listen "^1.0.8" - popmotion "9.3.6" - style-value-types "4.1.4" + popmotion "11.0.3" + style-value-types "5.0.0" tslib "^2.1.0" optionalDependencies: "@emotion/is-prop-valid" "^0.8.2" @@ -5529,6 +5529,13 @@ framesync@5.3.0: dependencies: tslib "^2.1.0" +framesync@6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/framesync/-/framesync-6.0.1.tgz#5e32fc01f1c42b39c654c35b16440e07a25d6f20" + integrity sha512-fUY88kXvGiIItgNC7wcTOl0SNRCVXMKSWW2Yzfmn7EKNc+MpCzcz9DhdHcdjbrtN3c6R4H5dTY2jiCpPdysEjA== + dependencies: + tslib "^2.1.0" + fs-minipass@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" @@ -7993,14 +8000,14 @@ polished@^4.1.3: dependencies: "@babel/runtime" "^7.14.0" -popmotion@9.3.6: - version "9.3.6" - resolved "https://registry.yarnpkg.com/popmotion/-/popmotion-9.3.6.tgz#b5236fa28f242aff3871b9e23721f093133248d1" - integrity sha512-ZTbXiu6zIggXzIliMi8LGxXBF5ST+wkpXGEjeTUDUOCdSQ356hij/xjeUdv0F8zCQNeqB1+PR5/BB+gC+QLAPw== +popmotion@11.0.3: + version "11.0.3" + resolved "https://registry.yarnpkg.com/popmotion/-/popmotion-11.0.3.tgz#565c5f6590bbcddab7a33a074bb2ba97e24b0cc9" + integrity sha512-Y55FLdj3UxkR7Vl3s7Qr4e9m0onSnP8W7d/xQLsoJM40vs6UKHFdygs6SWryasTZYqugMjm3BepCF4CWXDiHgA== dependencies: - framesync "5.3.0" + framesync "6.0.1" hey-listen "^1.0.8" - style-value-types "4.1.4" + style-value-types "5.0.0" tslib "^2.1.0" postcss-calc@^8.2.3: @@ -9316,10 +9323,10 @@ style-search@^0.1.0: resolved "https://registry.yarnpkg.com/style-search/-/style-search-0.1.0.tgz#7958c793e47e32e07d2b5cafe5c0bf8e12e77902" integrity sha1-eVjHk+R+MuB9K1yv5cC/jhLneQI= -style-value-types@4.1.4: - version "4.1.4" - resolved "https://registry.yarnpkg.com/style-value-types/-/style-value-types-4.1.4.tgz#80f37cb4fb024d6394087403dfb275e8bb627e75" - integrity sha512-LCJL6tB+vPSUoxgUBt9juXIlNJHtBMy8jkXzUJSBzeHWdBu6lhzHqCvLVkXFGsFIlNa2ln1sQHya/gzaFmB2Lg== +style-value-types@5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/style-value-types/-/style-value-types-5.0.0.tgz#76c35f0e579843d523187989da866729411fc8ad" + integrity sha512-08yq36Ikn4kx4YU6RD7jWEv27v4V+PUsOGa4n/as8Et3CuODMJQ00ENeAVXAeydX4Z2j1XHZF1K2sX4mGl18fA== dependencies: hey-listen "^1.0.8" tslib "^2.1.0" From 05737bccdaff5851c750049ed919b5ef0e23c473 Mon Sep 17 00:00:00 2001 From: Brent Bovenzi Date: Wed, 22 Jun 2022 15:30:24 -0400 Subject: [PATCH 094/118] Migrate jsx files that affect run/task selection to tsx (#24509) * convert all useSelection files to ts Update grid data ts, remove some anys * yarn, lint and tests * convert statusbox to ts * remove some anys, update instance tooltip * fix types * remove any, add comment for global vars * fix url selection and grid/task defaults * remove React.FC declarations * specify tsconfig file path * remove ts-loader (cherry picked from commit c3c1f7ea2851377ba913075844a8dde9bfe6376e) --- .../www/static/js/grid/{Main.jsx => Main.tsx} | 4 +- airflow/www/static/js/grid/ToggleGroups.jsx | 8 +- .../static/js/grid/api/{index.js => index.ts} | 4 +- .../api/{useGridData.js => useGridData.ts} | 64 +++++++++------- .../js/grid/api/{useTasks.js => useTasks.ts} | 18 +++-- ...ltip.test.jsx => InstanceTooltip.test.tsx} | 22 ++++-- ...nstanceTooltip.jsx => InstanceTooltip.tsx} | 20 +++-- .../{StatusBox.jsx => StatusBox.tsx} | 44 +++++++---- .../www/static/js/grid/components/Time.tsx | 2 +- .../static/js/grid/context/autorefresh.jsx | 8 +- .../{containerRef.jsx => containerRef.tsx} | 11 ++- .../js/grid/dagRuns/{index.jsx => index.tsx} | 8 +- .../static/js/grid/details/BreadcrumbText.tsx | 38 ++++++++++ .../grid/details/{Header.jsx => Header.tsx} | 25 +++---- .../taskInstance/{index.jsx => index.tsx} | 30 +++++--- .../js/grid/details/{index.jsx => index.tsx} | 14 ++-- airflow/www/static/js/grid/index.d.ts | 28 +++++++ ...kRows.test.jsx => renderTaskRows.test.tsx} | 21 ++---- ...{renderTaskRows.jsx => renderTaskRows.tsx} | 68 +++++++++++------ airflow/www/static/js/grid/types/index.ts | 75 +++++++++++++++++++ ...lection.test.jsx => useSelection.test.tsx} | 8 +- .../{useSelection.js => useSelection.ts} | 7 +- airflow/www/tsconfig.json | 4 +- airflow/www/webpack.config.js | 2 +- 24 files changed, 385 insertions(+), 148 deletions(-) rename airflow/www/static/js/grid/{Main.jsx => Main.tsx} (95%) rename airflow/www/static/js/grid/api/{index.js => index.ts} (93%) rename airflow/www/static/js/grid/api/{useGridData.js => useGridData.ts} (64%) rename airflow/www/static/js/grid/api/{useTasks.js => useTasks.ts} (77%) rename airflow/www/static/js/grid/components/{InstanceTooltip.test.jsx => InstanceTooltip.test.tsx} (80%) rename airflow/www/static/js/grid/components/{InstanceTooltip.jsx => InstanceTooltip.tsx} (86%) rename airflow/www/static/js/grid/components/{StatusBox.jsx => StatusBox.tsx} (71%) rename airflow/www/static/js/grid/context/{containerRef.jsx => containerRef.tsx} (80%) rename airflow/www/static/js/grid/dagRuns/{index.jsx => index.tsx} (94%) create mode 100644 airflow/www/static/js/grid/details/BreadcrumbText.tsx rename airflow/www/static/js/grid/details/{Header.jsx => Header.tsx} (86%) rename airflow/www/static/js/grid/details/content/taskInstance/{index.jsx => index.tsx} (85%) rename airflow/www/static/js/grid/details/{index.jsx => index.tsx} (82%) create mode 100644 airflow/www/static/js/grid/index.d.ts rename airflow/www/static/js/grid/{renderTaskRows.test.jsx => renderTaskRows.test.tsx} (87%) rename airflow/www/static/js/grid/{renderTaskRows.jsx => renderTaskRows.tsx} (75%) create mode 100644 airflow/www/static/js/grid/types/index.ts rename airflow/www/static/js/grid/utils/{useSelection.test.jsx => useSelection.test.tsx} (94%) rename airflow/www/static/js/grid/utils/{useSelection.js => useSelection.ts} (91%) diff --git a/airflow/www/static/js/grid/Main.jsx b/airflow/www/static/js/grid/Main.tsx similarity index 95% rename from airflow/www/static/js/grid/Main.jsx rename to airflow/www/static/js/grid/Main.tsx index 5e9a4f2a9a12d..8779b547ec16b 100644 --- a/airflow/www/static/js/grid/Main.jsx +++ b/airflow/www/static/js/grid/Main.tsx @@ -47,10 +47,10 @@ const Main = () => { const onPanelToggle = () => { if (!isOpen) { - localStorage.setItem(detailsPanelKey, false); + localStorage.setItem(detailsPanelKey, 'false'); } else { clearSelection(); - localStorage.setItem(detailsPanelKey, true); + localStorage.setItem(detailsPanelKey, 'true'); } onToggle(); }; diff --git a/airflow/www/static/js/grid/ToggleGroups.jsx b/airflow/www/static/js/grid/ToggleGroups.jsx index 3705d67d80241..2f027cd668776 100644 --- a/airflow/www/static/js/grid/ToggleGroups.jsx +++ b/airflow/www/static/js/grid/ToggleGroups.jsx @@ -34,15 +34,15 @@ const getGroupIds = (groups) => { }; const ToggleGroups = ({ groups, openGroupIds, onToggleGroups }) => { + // Don't show button if the DAG has no task groups + const hasGroups = groups.children && groups.children.find((c) => !!c.children); + if (!hasGroups) return null; + const allGroupIds = getGroupIds(groups.children); const isExpandDisabled = allGroupIds.length === openGroupIds.length; const isCollapseDisabled = !openGroupIds.length; - // Don't show button if the DAG has no task groups - const hasGroups = groups.children.find((c) => !!c.children); - if (!hasGroups) return null; - const onExpand = () => { onToggleGroups(allGroupIds); }; diff --git a/airflow/www/static/js/grid/api/index.js b/airflow/www/static/js/grid/api/index.ts similarity index 93% rename from airflow/www/static/js/grid/api/index.js rename to airflow/www/static/js/grid/api/index.ts index 3487ecd6eaff0..0ac8e4e28410d 100644 --- a/airflow/www/static/js/grid/api/index.js +++ b/airflow/www/static/js/grid/api/index.ts @@ -17,7 +17,7 @@ * under the License. */ -import axios from 'axios'; +import axios, { AxiosResponse } from 'axios'; import camelcaseKeys from 'camelcase-keys'; import useTasks from './useTasks'; @@ -35,7 +35,7 @@ import useGridData from './useGridData'; import useMappedInstances from './useMappedInstances'; axios.interceptors.response.use( - (res) => (res.data ? camelcaseKeys(res.data, { deep: true }) : res), + (res: AxiosResponse) => (res.data ? camelcaseKeys(res.data, { deep: true }) : res), ); axios.defaults.headers.common.Accept = 'application/json'; diff --git a/airflow/www/static/js/grid/api/useGridData.js b/airflow/www/static/js/grid/api/useGridData.ts similarity index 64% rename from airflow/www/static/js/grid/api/useGridData.js rename to airflow/www/static/js/grid/api/useGridData.ts index 38d4e00748d32..ec12ee6d601dd 100644 --- a/airflow/www/static/js/grid/api/useGridData.js +++ b/airflow/www/static/js/grid/api/useGridData.ts @@ -17,10 +17,8 @@ * under the License. */ -/* global autoRefreshInterval */ - import { useQuery } from 'react-query'; -import axios from 'axios'; +import axios, { AxiosResponse } from 'axios'; import { getMetaValue } from '../../utils'; import { useAutoRefresh } from '../context/autorefresh'; @@ -28,6 +26,7 @@ import useErrorToast from '../utils/useErrorToast'; import useFilters, { BASE_DATE_PARAM, NUM_RUNS_PARAM, RUN_STATE_PARAM, RUN_TYPE_PARAM, now, } from '../utils/useFilters'; +import type { Task, DagRun } from '../types'; const DAG_ID_PARAM = 'dag_id'; @@ -36,12 +35,21 @@ const dagId = getMetaValue(DAG_ID_PARAM); const gridDataUrl = getMetaValue('grid_data_url') || ''; const urlRoot = getMetaValue('root'); -const emptyData = { +interface GridData { + dagRuns: DagRun[]; + groups: Task; +} + +const emptyGridData: GridData = { dagRuns: [], - groups: {}, + groups: { + id: null, + label: null, + instances: [], + }, }; -export const areActiveRuns = (runs = []) => runs.filter((run) => ['queued', 'running', 'scheduled'].includes(run.state)).length > 0; +export const areActiveRuns = (runs: DagRun[] = []) => runs.filter((run) => ['queued', 'running', 'scheduled'].includes(run.state)).length > 0; const useGridData = () => { const { isRefreshOn, stopRefresh } = useAutoRefresh(); @@ -52,8 +60,9 @@ const useGridData = () => { }, } = useFilters(); - return useQuery(['gridData', baseDate, numRuns, runType, runState], async () => { - try { + const query = useQuery( + ['gridData', baseDate, numRuns, runType, runState], + async () => { const params = { root: urlRoot || undefined, [DAG_ID_PARAM]: dagId, @@ -62,24 +71,29 @@ const useGridData = () => { [RUN_TYPE_PARAM]: runType, [RUN_STATE_PARAM]: runState, }; - const newData = await axios.get(gridDataUrl, { params }); + const response = await axios.get(gridDataUrl, { params }); // turn off auto refresh if there are no active runs - if (!areActiveRuns(newData.dagRuns)) stopRefresh(); - return newData; - } catch (error) { - stopRefresh(); - errorToast({ - title: 'Auto-refresh Error', - error, - }); - throw (error); - } - }, { - placeholderData: emptyData, - // only refetch if the refresh switch is on - refetchInterval: isRefreshOn && autoRefreshInterval * 1000, - keepPreviousData: true, - }); + if (!areActiveRuns(response.dagRuns)) stopRefresh(); + return response; + }, + { + // only refetch if the refresh switch is on + refetchInterval: isRefreshOn && (autoRefreshInterval || 1) * 1000, + keepPreviousData: true, + onError: (error) => { + stopRefresh(); + errorToast({ + title: 'Auto-refresh Error', + error, + }); + throw (error); + }, + }, + ); + return { + ...query, + data: query.data ?? emptyGridData, + }; }; export default useGridData; diff --git a/airflow/www/static/js/grid/api/useTasks.js b/airflow/www/static/js/grid/api/useTasks.ts similarity index 77% rename from airflow/www/static/js/grid/api/useTasks.js rename to airflow/www/static/js/grid/api/useTasks.ts index c214444dcb81e..68878a78a006a 100644 --- a/airflow/www/static/js/grid/api/useTasks.js +++ b/airflow/www/static/js/grid/api/useTasks.ts @@ -17,19 +17,25 @@ * under the License. */ -import axios from 'axios'; +import axios, { AxiosResponse } from 'axios'; import { useQuery } from 'react-query'; import { getMetaValue } from '../../utils'; +interface TaskData { + tasks: any[]; + totalEntries: number; +} + export default function useTasks() { - return useQuery( + const query = useQuery( 'tasks', () => { const tasksUrl = getMetaValue('tasks_api'); - return axios.get(tasksUrl); - }, - { - initialData: { tasks: [], totalEntries: 0 }, + return axios.get(tasksUrl || ''); }, ); + return { + ...query, + data: query.data || { tasks: [], totalEntries: 0 }, + }; } diff --git a/airflow/www/static/js/grid/components/InstanceTooltip.test.jsx b/airflow/www/static/js/grid/components/InstanceTooltip.test.tsx similarity index 80% rename from airflow/www/static/js/grid/components/InstanceTooltip.test.jsx rename to airflow/www/static/js/grid/components/InstanceTooltip.test.tsx index eb1abe8ba4a79..71e1147da9453 100644 --- a/airflow/www/static/js/grid/components/InstanceTooltip.test.jsx +++ b/airflow/www/static/js/grid/components/InstanceTooltip.test.tsx @@ -24,19 +24,21 @@ import { render } from '@testing-library/react'; import InstanceTooltip from './InstanceTooltip'; import { Wrapper } from '../utils/testUtils'; +import type { TaskState } from '../types'; const instance = { - startDate: new Date(), - endDate: new Date(), - state: 'success', + startDate: new Date().toISOString(), + endDate: new Date().toISOString(), + state: 'success' as TaskState, runId: 'run', + taskId: 'task', }; describe('Test Task InstanceTooltip', () => { test('Displays a normal task', () => { const { getByText } = render( , { wrapper: Wrapper }, @@ -48,7 +50,9 @@ describe('Test Task InstanceTooltip', () => { test('Displays a mapped task with overall status', () => { const { getByText } = render( , { wrapper: Wrapper }, @@ -63,12 +67,20 @@ describe('Test Task InstanceTooltip', () => { const { getByText, queryByText } = render( { +}: Props) => { + if (!group) return null; const isGroup = !!group.children; - const { isMapped } = group; - const summary = []; + const summary: React.ReactNode[] = []; + + const isMapped = group?.isMapped; const numMap = finalStatesMap(); let numMapped = 0; - if (isGroup) { + if (isGroup && group.children) { group.children.forEach((child) => { const taskInstance = child.instances.find((ti) => ti.runId === runId); if (taskInstance) { const stateKey = taskInstance.state == null ? 'no_status' : taskInstance.state; - if (numMap.has(stateKey)) numMap.set(stateKey, numMap.get(stateKey) + 1); + if (numMap.has(stateKey)) numMap.set(stateKey, (numMap.get(stateKey) || 0) + 1); } }); } else if (isMapped && mappedStates) { @@ -88,7 +96,7 @@ const InstanceTooltip = ({ Started: {' '} - Duration: diff --git a/airflow/www/static/js/grid/components/StatusBox.jsx b/airflow/www/static/js/grid/components/StatusBox.tsx similarity index 71% rename from airflow/www/static/js/grid/components/StatusBox.jsx rename to airflow/www/static/js/grid/components/StatusBox.tsx index 2f079949a9fb8..f9acf57402ee1 100644 --- a/airflow/www/static/js/grid/components/StatusBox.jsx +++ b/airflow/www/static/js/grid/components/StatusBox.tsx @@ -17,36 +17,48 @@ * under the License. */ -/* global stateColors */ - import React from 'react'; import { isEqual } from 'lodash'; import { Box, useTheme, + BoxProps, } from '@chakra-ui/react'; import Tooltip from './Tooltip'; import InstanceTooltip from './InstanceTooltip'; import { useContainerRef } from '../context/containerRef'; +import type { Task, TaskInstance, TaskState } from '../types'; +import type { SelectionProps } from '../utils/useSelection'; export const boxSize = 10; export const boxSizePx = `${boxSize}px`; -export const SimpleStatus = ({ state, ...rest }) => ( +interface SimpleStatusProps extends BoxProps { + state: TaskState; +} + +export const SimpleStatus = ({ state, ...rest }: SimpleStatusProps) => ( ); +interface Props { + group: Task; + instance: TaskInstance; + onSelect: (selection: SelectionProps) => void; + isActive: boolean; +} + const StatusBox = ({ group, instance, onSelect, isActive, -}) => { +}: Props) => { const containerRef = useContainerRef(); const { runId, taskId } = instance; const { colors } = useTheme(); @@ -54,15 +66,19 @@ const StatusBox = ({ // Fetch the corresponding column element and set its background color when hovering const onMouseEnter = () => { - [...containerRef.current.getElementsByClassName(`js-${runId}`)] - .forEach((e) => { - // Don't apply hover if it is already selected - if (e.getAttribute('data-selected') === 'false') e.style.backgroundColor = hoverBlue; - }); + if (containerRef && containerRef.current) { + ([...containerRef.current.getElementsByClassName(`js-${runId}`)] as HTMLElement[]) + .forEach((e) => { + // Don't apply hover if it is already selected + if (e.getAttribute('data-selected') === 'false') e.style.backgroundColor = hoverBlue; + }); + } }; const onMouseLeave = () => { - [...containerRef.current.getElementsByClassName(`js-${runId}`)] - .forEach((e) => { e.style.backgroundColor = null; }); + if (containerRef && containerRef.current) { + ([...containerRef.current.getElementsByClassName(`js-${runId}`)] as HTMLElement[]) + .forEach((e) => { e.style.backgroundColor = ''; }); + } }; const onClick = () => { @@ -97,8 +113,8 @@ const StatusBox = ({ // The default equality function is a shallow comparison and json objects will return false // This custom compare function allows us to do a deeper comparison const compareProps = ( - prevProps, - nextProps, + prevProps: Props, + nextProps: Props, ) => ( isEqual(prevProps.group, nextProps.group) && isEqual(prevProps.instance, nextProps.instance) diff --git a/airflow/www/static/js/grid/components/Time.tsx b/airflow/www/static/js/grid/components/Time.tsx index 5712d163c9877..fbc0b16e79221 100644 --- a/airflow/www/static/js/grid/components/Time.tsx +++ b/airflow/www/static/js/grid/components/Time.tsx @@ -27,7 +27,7 @@ interface Props { format?: string; } -const Time: React.FC = ({ dateTime, format = defaultFormatWithTZ }) => { +const Time = ({ dateTime, format = defaultFormatWithTZ }: Props) => { const { timezone } = useTimezone(); const time = moment(dateTime); diff --git a/airflow/www/static/js/grid/context/autorefresh.jsx b/airflow/www/static/js/grid/context/autorefresh.jsx index 35df9b7daf920..11c987fe34493 100644 --- a/airflow/www/static/js/grid/context/autorefresh.jsx +++ b/airflow/www/static/js/grid/context/autorefresh.jsx @@ -29,7 +29,13 @@ const autoRefreshKey = 'disabledAutoRefresh'; const initialIsPaused = getMetaValue('is_paused') === 'True'; const isRefreshDisabled = JSON.parse(localStorage.getItem(autoRefreshKey)); -const AutoRefreshContext = React.createContext(null); +const AutoRefreshContext = React.createContext({ + isRefreshOn: false, + isPaused: true, + toggleRefresh: () => {}, + stopRefresh: () => {}, + startRefresh: () => {}, +}); export const AutoRefreshProvider = ({ children }) => { const [isPaused, setIsPaused] = useState(initialIsPaused); diff --git a/airflow/www/static/js/grid/context/containerRef.jsx b/airflow/www/static/js/grid/context/containerRef.tsx similarity index 80% rename from airflow/www/static/js/grid/context/containerRef.jsx rename to airflow/www/static/js/grid/context/containerRef.tsx index 9062f907ede00..4ddc03642880c 100644 --- a/airflow/www/static/js/grid/context/containerRef.jsx +++ b/airflow/www/static/js/grid/context/containerRef.tsx @@ -19,12 +19,17 @@ import React, { useContext, useRef } from 'react'; -const ContainerRefContext = React.createContext(null); +// eslint-disable-next-line max-len +const ContainerRefContext = React.createContext | undefined>(undefined); + +interface Props { + children: React.ReactNode; +} // containerRef is necessary to render for tooltips, modals, and dialogs // This provider allows the containerRef to be accessed by any react component -export const ContainerRefProvider = ({ children }) => { - const containerRef = useRef(); +export const ContainerRefProvider = ({ children }: Props) => { + const containerRef = useRef(null); return ( diff --git a/airflow/www/static/js/grid/dagRuns/index.jsx b/airflow/www/static/js/grid/dagRuns/index.tsx similarity index 94% rename from airflow/www/static/js/grid/dagRuns/index.jsx rename to airflow/www/static/js/grid/dagRuns/index.tsx index ec588313c6150..679b7db3a5ecc 100644 --- a/airflow/www/static/js/grid/dagRuns/index.jsx +++ b/airflow/www/static/js/grid/dagRuns/index.tsx @@ -24,14 +24,16 @@ import { Text, Box, Flex, + TextProps, } from '@chakra-ui/react'; import { useGridData } from '../api'; import DagRunBar from './Bar'; import { getDuration, formatDuration } from '../../datetime_utils'; import useSelection from '../utils/useSelection'; +import type { DagRun } from '../types'; -const DurationTick = ({ children, ...rest }) => ( +const DurationTick = ({ children, ...rest }: TextProps) => ( {children} @@ -40,7 +42,7 @@ const DurationTick = ({ children, ...rest }) => ( const DagRuns = () => { const { data: { dagRuns } } = useGridData(); const { selected, onSelect } = useSelection(); - const durations = []; + const durations: number[] = []; const runs = dagRuns.map((dagRun) => { const duration = getDuration(dagRun.startDate, dagRun.endDate); durations.push(duration); @@ -91,7 +93,7 @@ const DagRuns = () => {
diff --git a/airflow/www/static/js/grid/types/index.ts b/airflow/www/static/js/grid/types/index.ts new file mode 100644 index 0000000000000..df1f873ef79c3 --- /dev/null +++ b/airflow/www/static/js/grid/types/index.ts @@ -0,0 +1,75 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +type RunState = 'success' | 'running' | 'queued' | 'failed'; + +type TaskState = RunState +| 'removed' +| 'scheduled' +| 'shutdown' +| 'restarting' +| 'up_for_retry' +| 'up_for_reschedule' +| 'upstream_failed' +| 'skipped' +| 'sensing' +| 'deferred' +| null; + +interface DagRun { + runId: string; + runType: 'manual' | 'backfill' | 'scheduled'; + state: RunState; + executionDate: string; + dataIntervalStart: string; + dataIntervalEnd: string; + startDate: string | null; + endDate: string | null; + lastSchedulingDecision: string | null; +} + +interface TaskInstance { + runId: string; + taskId: string; + startDate: string | null; + endDate: string | null; + state: TaskState | null; + mappedStates?: { + [key: string]: number; + }, + tryNumber?: number; +} + +interface Task { + id: string | null; + label: string | null; + instances: TaskInstance[]; + tooltip?: string; + children?: Task[]; + extraLinks?: string[]; + isMapped?: boolean; +} + +export type { + DagRun, + RunState, + TaskState, + TaskInstance, + Task, +}; diff --git a/airflow/www/static/js/grid/utils/useSelection.test.jsx b/airflow/www/static/js/grid/utils/useSelection.test.tsx similarity index 94% rename from airflow/www/static/js/grid/utils/useSelection.test.jsx rename to airflow/www/static/js/grid/utils/useSelection.test.tsx index 2d2eeeb4db742..19871c3ff098a 100644 --- a/airflow/www/static/js/grid/utils/useSelection.test.jsx +++ b/airflow/www/static/js/grid/utils/useSelection.test.tsx @@ -25,7 +25,11 @@ import { MemoryRouter } from 'react-router-dom'; import useSelection from './useSelection'; -const Wrapper = ({ children }) => ( +interface Props { + children: React.ReactNode; +} + +const Wrapper = ({ children }: Props) => ( {children} @@ -47,7 +51,7 @@ describe('Test useSelection hook', () => { test.each([ { taskId: 'task_1', runId: 'run_1' }, - { taskId: null, runId: 'run_1' }, + { runId: 'run_1', taskId: null }, { taskId: 'task_1', runId: null }, ])('Test onSelect() and clearSelection()', async (selected) => { const { result } = renderHook(() => useSelection(), { wrapper: Wrapper }); diff --git a/airflow/www/static/js/grid/utils/useSelection.js b/airflow/www/static/js/grid/utils/useSelection.ts similarity index 91% rename from airflow/www/static/js/grid/utils/useSelection.js rename to airflow/www/static/js/grid/utils/useSelection.ts index c90578837a2e1..c4f5290b110a4 100644 --- a/airflow/www/static/js/grid/utils/useSelection.js +++ b/airflow/www/static/js/grid/utils/useSelection.ts @@ -22,6 +22,11 @@ import { useSearchParams } from 'react-router-dom'; const RUN_ID = 'dag_run_id'; const TASK_ID = 'task_id'; +export interface SelectionProps { + runId?: string | null ; + taskId?: string | null; +} + const useSelection = () => { const [searchParams, setSearchParams] = useSearchParams(); @@ -32,7 +37,7 @@ const useSelection = () => { setSearchParams(searchParams); }; - const onSelect = ({ runId, taskId }) => { + const onSelect = ({ runId, taskId }: SelectionProps) => { const params = new URLSearchParams(searchParams); if (runId) params.set(RUN_ID, runId); diff --git a/airflow/www/tsconfig.json b/airflow/www/tsconfig.json index 5717d624e36b0..f264a9e3cca57 100644 --- a/airflow/www/tsconfig.json +++ b/airflow/www/tsconfig.json @@ -25,8 +25,8 @@ "strict": true, "allowJs": true, "importsNotUsedAsValues": "error", - "target": "esnext", - "module": "esnext", + "target": "ES6", + "module": "ES6", "moduleResolution": "node", "isolatedModules": true, "esModuleInterop": true, diff --git a/airflow/www/webpack.config.js b/airflow/www/webpack.config.js index e48b01c687736..b4019c3238676 100644 --- a/airflow/www/webpack.config.js +++ b/airflow/www/webpack.config.js @@ -106,7 +106,7 @@ const config = { ], }, { - test: /\.[j|t]sx?$/, + test: /\.(js|jsx|tsx|ts)$/, exclude: /node_modules/, use: [ { From 06c4e8426b59bcecd8963d9b59b22107f6697795 Mon Sep 17 00:00:00 2001 From: Brent Bovenzi Date: Tue, 28 Jun 2022 11:28:27 -0400 Subject: [PATCH 095/118] don't try to render child rows for closed groups (#24637) (cherry picked from commit 8f638bbeb9d3591a4e3c3000a542ae8edb58fb26) --- airflow/www/static/js/grid/renderTaskRows.tsx | 44 +++++++++---------- 1 file changed, 20 insertions(+), 24 deletions(-) diff --git a/airflow/www/static/js/grid/renderTaskRows.tsx b/airflow/www/static/js/grid/renderTaskRows.tsx index c7f82333f4b4f..11658e873f2de 100644 --- a/airflow/www/static/js/grid/renderTaskRows.tsx +++ b/airflow/www/static/js/grid/renderTaskRows.tsx @@ -23,7 +23,6 @@ import { Td, Box, Flex, - Collapse, useTheme, } from '@chakra-ui/react'; @@ -140,13 +139,14 @@ const Row = (props: RowProps) => { [isGroup, isOpen, task.label, openGroupIds, onToggleGroups], ); - const isFullyOpen = level === openParentCount; + // check if the group's parents are all open, if not, return null + if (level !== openParentCount) return null; return ( <> { width="100%" zIndex={1} > - - - + - {isGroup && ( + {isGroup && isOpen && ( renderTaskRows({ - ...props, level: level + 1, openParentCount: openParentCount + (isOpen ? 1 : 0), + ...props, level: level + 1, openParentCount: openParentCount + 1, }) )} From 1306f9950e1245cfb2e5440f628b64a6b6089d4e Mon Sep 17 00:00:00 2001 From: Brent Bovenzi Date: Tue, 28 Jun 2022 11:43:28 -0400 Subject: [PATCH 096/118] Fix Grid vertical scrolling (#24684) * fix vertical scrolling * fix flex grow for panel open/close * add type checking * add duration axis component * remove details/grid width changes this should be done in a separate PR (cherry picked from commit 1429091d7cf8e16e16efc7a0a3cc00f6ae5716a1) --- .../www/static/js/grid/{Grid.jsx => Grid.tsx} | 73 ++++++++-------- .../js/grid/dagRuns/{Bar.jsx => Bar.tsx} | 29 +++++-- airflow/www/static/js/grid/dagRuns/index.tsx | 87 ++++++++++--------- airflow/www/static/js/grid/renderTaskRows.tsx | 1 - 4 files changed, 103 insertions(+), 87 deletions(-) rename airflow/www/static/js/grid/{Grid.jsx => Grid.tsx} (73%) rename airflow/www/static/js/grid/dagRuns/{Bar.jsx => Bar.tsx} (82%) diff --git a/airflow/www/static/js/grid/Grid.jsx b/airflow/www/static/js/grid/Grid.tsx similarity index 73% rename from airflow/www/static/js/grid/Grid.jsx rename to airflow/www/static/js/grid/Grid.tsx index 17101cef4f028..c32d8230ce548 100644 --- a/airflow/www/static/js/grid/Grid.jsx +++ b/airflow/www/static/js/grid/Grid.tsx @@ -40,18 +40,24 @@ import AutoRefresh from './AutoRefresh'; const dagId = getMetaValue('dag_id'); -const Grid = ({ isPanelOpen = false, onPanelToggle, hoveredTaskState }) => { - const scrollRef = useRef(); - const tableRef = useRef(); +interface Props { + isPanelOpen?: boolean; + onPanelToggle: () => void; + hoveredTaskState?: string; +} + +const Grid = ({ isPanelOpen = false, onPanelToggle, hoveredTaskState }: Props) => { + const scrollRef = useRef(null); + const tableRef = useRef(null); const { data: { groups, dagRuns } } = useGridData(); const dagRunIds = dagRuns.map((dr) => dr.runId); const openGroupsKey = `${dagId}/open-groups`; - const storedGroups = JSON.parse(localStorage.getItem(openGroupsKey)) || []; + const storedGroups = JSON.parse(localStorage.getItem(openGroupsKey) || '[]'); const [openGroupIds, setOpenGroupIds] = useState(storedGroups); - const onToggleGroups = (groupIds) => { + const onToggleGroups = (groupIds: string[]) => { localStorage.setItem(openGroupsKey, JSON.stringify(groupIds)); setOpenGroupIds(groupIds); }; @@ -60,7 +66,11 @@ const Grid = ({ isPanelOpen = false, onPanelToggle, hoveredTaskState }) => { const scrollOnResize = new ResizeObserver(() => { const runsContainer = scrollRef.current; // Set scroll to top right if it is scrollable - if (runsContainer && runsContainer.scrollWidth > runsContainer.clientWidth) { + if ( + tableRef?.current + && runsContainer + && runsContainer.scrollWidth > runsContainer.clientWidth + ) { runsContainer.scrollBy(tableRef.current.offsetWidth, 0); } }); @@ -74,26 +84,21 @@ const Grid = ({ isPanelOpen = false, onPanelToggle, hoveredTaskState }) => { }; } return () => {}; - }, [tableRef]); + }, [tableRef, isPanelOpen]); return ( @@ -110,28 +115,28 @@ const Grid = ({ isPanelOpen = false, onPanelToggle, hoveredTaskState }) => { title={`${isPanelOpen ? 'Hide ' : 'Show '} Details Panel`} aria-label={isPanelOpen ? 'Show Details' : 'Hide Details'} icon={} - transform={!isPanelOpen && 'rotateZ(180deg)'} + transform={!isPanelOpen ? 'rotateZ(180deg)' : undefined} transitionProperty="none" /> -
- {runs.map((run, i) => ( + {runs.map((run: DagRun, i: number) => ( ( + + {label} + {value} + +); + +export default BreadcrumbText; diff --git a/airflow/www/static/js/grid/details/Header.jsx b/airflow/www/static/js/grid/details/Header.tsx similarity index 86% rename from airflow/www/static/js/grid/details/Header.jsx rename to airflow/www/static/js/grid/details/Header.tsx index c158deabcaa59..db6a75b993f83 100644 --- a/airflow/www/static/js/grid/details/Header.jsx +++ b/airflow/www/static/js/grid/details/Header.tsx @@ -22,8 +22,6 @@ import { Breadcrumb, BreadcrumbItem, BreadcrumbLink, - Box, - Heading, Text, } from '@chakra-ui/react'; import { MdPlayArrow, MdOutlineSchedule } from 'react-icons/md'; @@ -33,20 +31,15 @@ import { getMetaValue } from '../../utils'; import useSelection from '../utils/useSelection'; import Time from '../components/Time'; import { useTasks, useGridData } from '../api'; +import BreadcrumbText from './BreadcrumbText'; const dagId = getMetaValue('dag_id'); -const LabelValue = ({ label, value }) => ( - - {label} - {value} - -); - const Header = () => { const { data: { dagRuns } } = useGridData(); - const { selected: { taskId, runId }, onSelect, clearSelection } = useSelection(); const { data: { tasks } } = useTasks(); + + const { selected: { taskId, runId }, onSelect, clearSelection } = useSelection(); const dagRun = dagRuns.find((r) => r.runId === runId); const task = tasks.find((t) => t.taskId === taskId); @@ -59,7 +52,7 @@ const Header = () => { }, [clearSelection, dagRun, runId]); let runLabel; - if (dagRun) { + if (dagRun && runId) { if (runId.includes('manual__') || runId.includes('scheduled__') || runId.includes('backfill__')) { runLabel = (
{ task={task} selectedRunId={selected.runId} onSelect={onSelect} - activeTaskState={hoveredTaskState} + hoveredTaskState={hoveredTaskState} />
{ width={`${dagRunIds.length * columnWidth}px`} borderBottom={0} > - - - +
- - - - {/* TODO: remove hardcoded values. 665px is roughly the total heade+footer height */} - - {renderTaskRows({ - task: groups, dagRunIds, openGroupIds, onToggleGroups, hoveredTaskState, - })} - -
+ + + + + + {/* TODO: remove hardcoded values. 665px is roughly the total heade+footer height */} + + {renderTaskRows({ + task: groups, dagRunIds, openGroupIds, onToggleGroups, hoveredTaskState, + })} + +
+
); }; diff --git a/airflow/www/static/js/grid/dagRuns/Bar.jsx b/airflow/www/static/js/grid/dagRuns/Bar.tsx similarity index 82% rename from airflow/www/static/js/grid/dagRuns/Bar.jsx rename to airflow/www/static/js/grid/dagRuns/Bar.tsx index 22163a44da05e..18399ab8852fe 100644 --- a/airflow/www/static/js/grid/dagRuns/Bar.jsx +++ b/airflow/www/static/js/grid/dagRuns/Bar.tsx @@ -35,12 +35,23 @@ import { RiArrowGoBackFill } from 'react-icons/ri'; import DagRunTooltip from './Tooltip'; import { useContainerRef } from '../context/containerRef'; import Time from '../components/Time'; +import type { SelectionProps } from '../utils/useSelection'; +import type { RunWithDuration } from '.'; const BAR_HEIGHT = 100; +interface Props { + run: RunWithDuration + max: number; + index: number; + totalRuns: number; + isSelected: boolean; + onSelect: (props: SelectionProps) => void; +} + const DagRunBar = ({ run, max, index, totalRuns, isSelected, onSelect, -}) => { +}: Props) => { const containerRef = useContainerRef(); const { colors } = useTheme(); const hoverBlue = `${colors.blue[100]}50`; @@ -48,20 +59,20 @@ const DagRunBar = ({ // Fetch the corresponding column element and set its background color when hovering const onMouseEnter = () => { if (!isSelected) { - [...containerRef.current.getElementsByClassName(`js-${run.runId}`)] - .forEach((e) => { e.style.backgroundColor = hoverBlue; }); + const els = Array.from(containerRef?.current?.getElementsByClassName(`js-${run.runId}`) as HTMLCollectionOf); + els.forEach((e) => { e.style.backgroundColor = hoverBlue; }); } }; const onMouseLeave = () => { - [...containerRef.current.getElementsByClassName(`js-${run.runId}`)] - .forEach((e) => { e.style.backgroundColor = null; }); + const els = Array.from(containerRef?.current?.getElementsByClassName(`js-${run.runId}`) as HTMLCollectionOf); + els.forEach((e) => { e.style.backgroundColor = ''; }); }; return ( - {index < totalRuns - 3 && index % 10 === 0 && ( + {(index === totalRuns - 4 || (index + 4) % 10 === 0) && (
', html=True - ), + UIAlert('Visit airflow.apache.org', html=True), UIAlert(Markup("Welcome %s") % ("John & Jane Doe",)), ] diff --git a/docs/apache-airflow/howto/define_extra_link.rst b/docs/apache-airflow/howto/define_extra_link.rst index c793a9320b887..9bae547492cea 100644 --- a/docs/apache-airflow/howto/define_extra_link.rst +++ b/docs/apache-airflow/howto/define_extra_link.rst @@ -93,12 +93,10 @@ tasks using :class:`~airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Ope operators = [GCSToS3Operator] def get_link(self, operator, *, ti_key): - return ( - "https://s3.amazonaws.com/airflow-logs/{dag_id}/{task_id}/{run_id}".format( - dag_id=operator.dag_id, - task_id=operator.task_id, - run_id=ti_key.run_id, - ) + return "https://s3.amazonaws.com/airflow-logs/{dag_id}/{task_id}/{run_id}".format( + dag_id=operator.dag_id, + task_id=operator.task_id, + run_id=ti_key.run_id, ) diff --git a/docs/apache-airflow/lineage.rst b/docs/apache-airflow/lineage.rst index 20adfb96fa205..8029d94ceeb4a 100644 --- a/docs/apache-airflow/lineage.rst +++ b/docs/apache-airflow/lineage.rst @@ -50,9 +50,7 @@ works. ) f_final = File(url="/tmp/final") - run_this_last = EmptyOperator( - task_id="run_this_last", dag=dag, inlets=AUTO, outlets=f_final - ) + run_this_last = EmptyOperator(task_id="run_this_last", dag=dag, inlets=AUTO, outlets=f_final) f_in = File(url="/tmp/whole_directory/") outlets = [] @@ -60,9 +58,7 @@ works. f_out = File(url="/tmp/{}/{{{{ data_interval_start }}}}".format(file)) outlets.append(f_out) - run_this = BashOperator( - task_id="run_me_first", bash_command="echo 1", dag=dag, inlets=f_in, outlets=outlets - ) + run_this = BashOperator(task_id="run_me_first", bash_command="echo 1", dag=dag, inlets=f_in, outlets=outlets) run_this.set_downstream(run_this_last) Inlets can be a (list of) upstream task ids or statically defined as an attr annotated object diff --git a/docs/apache-airflow/plugins.rst b/docs/apache-airflow/plugins.rst index f719cbae23454..ae011caa12350 100644 --- a/docs/apache-airflow/plugins.rst +++ b/docs/apache-airflow/plugins.rst @@ -302,9 +302,7 @@ will automatically load the registered plugins from the entrypoint list. setup( name="my-package", # ... - entry_points={ - "airflow.plugins": ["my_plugin = my_package.my_plugin:MyAirflowPlugin"] - }, + entry_points={"airflow.plugins": ["my_plugin = my_package.my_plugin:MyAirflowPlugin"]}, ) Automatic reloading webserver diff --git a/docs/apache-airflow/security/webserver.rst b/docs/apache-airflow/security/webserver.rst index cee3ed9e3b70d..fbbf938df0461 100644 --- a/docs/apache-airflow/security/webserver.rst +++ b/docs/apache-airflow/security/webserver.rst @@ -150,9 +150,7 @@ Here is an example of what you might have in your webserver_config.py: AUTH_TYPE = AUTH_OAUTH AUTH_ROLES_SYNC_AT_LOGIN = True # Checks roles on every login - AUTH_USER_REGISTRATION = ( - True # allow users who are not already in the FAB DB to register - ) + AUTH_USER_REGISTRATION = True # allow users who are not already in the FAB DB to register # Make sure to replace this with the path to your security manager class FAB_SECURITY_MANAGER_CLASS = "your_module.your_security_manager_class" AUTH_ROLES_MAPPING = { @@ -219,9 +217,7 @@ webserver_config.py itself if you wish. # In this example, the oauth provider == 'github'. # If you ever want to support other providers, see how it is done here: # https://github.com/dpgaspar/Flask-AppBuilder/blob/master/flask_appbuilder/security/manager.py#L550 - def get_oauth_user_info( - self, provider: str, resp: Any - ) -> Dict[str, Union[str, List[str]]]: + def get_oauth_user_info(self, provider: str, resp: Any) -> Dict[str, Union[str, List[str]]]: # Creates the user info payload from Github. # The user previously allowed your app to act on their behalf, diff --git a/docs/apache-airflow/upgrading-from-1-10/index.rst b/docs/apache-airflow/upgrading-from-1-10/index.rst index 334753e52b9d6..dd58b2f4f4b73 100644 --- a/docs/apache-airflow/upgrading-from-1-10/index.rst +++ b/docs/apache-airflow/upgrading-from-1-10/index.rst @@ -182,9 +182,7 @@ Whereas previously a user would import each individual class to build the pod as volume_config = {"persistentVolumeClaim": {"claimName": "test-volume"}} volume = Volume(name="test-volume", configs=volume_config) - volume_mount = VolumeMount( - "test-volume", mount_path="/root/mount_file", sub_path=None, read_only=True - ) + volume_mount = VolumeMount("test-volume", mount_path="/root/mount_file", sub_path=None, read_only=True) port = Port("http", 80) secret_file = Secret("volume", "/etc/sql_conn", "airflow-secrets", "sql_alchemy_conn") @@ -224,9 +222,7 @@ Now the user can use the ``kubernetes.client.models`` class as a single point of volume = k8s.V1Volume( name="test-volume", - persistent_volume_claim=k8s.V1PersistentVolumeClaimVolumeSource( - claim_name="test-volume" - ), + persistent_volume_claim=k8s.V1PersistentVolumeClaimVolumeSource(claim_name="test-volume"), ) port = k8s.V1ContainerPort(name="http", container_port=80) @@ -602,9 +598,7 @@ Before: from airflow.kubernetes.volume_mount import VolumeMount - volume_mount = VolumeMount( - "test-volume", mount_path="/root/mount_file", sub_path=None, read_only=True - ) + volume_mount = VolumeMount("test-volume", mount_path="/root/mount_file", sub_path=None, read_only=True) k = KubernetesPodOperator( namespace="default", image="ubuntu:16.04", @@ -660,9 +654,7 @@ After: volume = k8s.V1Volume( name="test-volume", - persistent_volume_claim=k8s.V1PersistentVolumeClaimVolumeSource( - claim_name="test-volume" - ), + persistent_volume_claim=k8s.V1PersistentVolumeClaimVolumeSource(claim_name="test-volume"), ) k = KubernetesPodOperator( namespace="default", @@ -737,9 +729,7 @@ After: env_vars = [ k8s.V1EnvVar( name="ENV3", - value_from=k8s.V1EnvVarSource( - field_ref=k8s.V1ObjectFieldSelector(field_path="status.podIP") - ), + value_from=k8s.V1EnvVarSource(field_ref=k8s.V1ObjectFieldSelector(field_path="status.podIP")), ) ] @@ -777,9 +767,7 @@ After: from kubernetes.client import models as k8s configmap = "test-configmap" - env_from = [ - k8s.V1EnvFromSource(config_map_ref=k8s.V1ConfigMapEnvSource(name=configmap)) - ] + env_from = [k8s.V1EnvFromSource(config_map_ref=k8s.V1ConfigMapEnvSource(name=configmap))] k = KubernetesPodOperator( namespace="default", @@ -1149,9 +1137,7 @@ non-RBAC UI (``flask-admin`` based UI), update it to use ``flask_appbuilder_view v = TestView(category="Test Plugin", name="Test View") - ml = MenuLink( - category="Test Plugin", name="Test Menu Link", url="https://airflow.apache.org/" - ) + ml = MenuLink(category="Test Plugin", name="Test Menu Link", url="https://airflow.apache.org/") class AirflowTestPlugin(AirflowPlugin): diff --git a/tests/dags_corrupted/README.md b/tests/dags_corrupted/README.md index 5c3a0278deb97..d91e568a6139a 100644 --- a/tests/dags_corrupted/README.md +++ b/tests/dags_corrupted/README.md @@ -25,9 +25,7 @@ Python interpreter from loading this file. To access a DAG in this folder, use the following code inside a unit test. ```python -TEST_DAG_FOLDER = os.path.join( - os.path.dirname(os.path.realpath(__file__)), "dags_corrupted" -) +TEST_DAG_FOLDER = os.path.join(os.path.dirname(os.path.realpath(__file__)), "dags_corrupted") dagbag = DagBag(dag_folder=TEST_DAG_FOLDER) dag = dagbag.get_dag(dag_id) diff --git a/tests/test_utils/perf/perf_kit/__init__.py b/tests/test_utils/perf/perf_kit/__init__.py index f9b0fe23ef7a4..13252f45692f6 100644 --- a/tests/test_utils/perf/perf_kit/__init__.py +++ b/tests/test_utils/perf/perf_kit/__init__.py @@ -75,10 +75,7 @@ def test_bulk_write_to_db(self): clear_db_dags() - dags = [ - DAG(f"dag-bulk-sync-{i}", start_date=DEFAULT_DATE, tags=["test-dag"]) - for i in range(0, 4) - ] + dags = [DAG(f"dag-bulk-sync-{i}", start_date=DEFAULT_DATE, tags=["test-dag"]) for i in range(0, 4)] with assert_queries_count(3): DAG.bulk_write_to_db(dags) @@ -101,10 +98,7 @@ def test_bulk_write_to_db(self): @trace_queries def test_bulk_write_to_db(self): clear_db_dags() - dags = [ - DAG(f"dag-bulk-sync-{i}", start_date=DEFAULT_DATE, tags=["test-dag"]) - for i in range(0, 4) - ] + dags = [DAG(f"dag-bulk-sync-{i}", start_date=DEFAULT_DATE, tags=["test-dag"]) for i in range(0, 4)] with assert_queries_count(3): DAG.bulk_write_to_db(dags) From ad78c85af7ef6812d8984ac18386b64461f8245d Mon Sep 17 00:00:00 2001 From: Ephraim Anierobi Date: Thu, 30 Jun 2022 17:14:51 +0100 Subject: [PATCH 117/118] Add release note for 2.3.3 --- RELEASE_NOTES.rst | 119 +++++++++++++++++- .../elasticsearch/example_dags/__init__.py | 16 --- .../example_elasticsearch_query.py | 50 -------- docs/spelling_wordlist.txt | 2 + newsfragments/24399.significant.rst | 31 ----- newsfragments/24519.misc.rst | 1 - 6 files changed, 120 insertions(+), 99 deletions(-) delete mode 100644 airflow/providers/elasticsearch/example_dags/__init__.py delete mode 100644 airflow/providers/elasticsearch/example_dags/example_elasticsearch_query.py delete mode 100644 newsfragments/24399.significant.rst delete mode 100644 newsfragments/24519.misc.rst diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index 2b763f7ce703b..ed9ea874af846 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -21,6 +21,124 @@ .. towncrier release notes start + +Airflow 2.3.3 (2022-07-05) +-------------------------- + +Significant Changes +^^^^^^^^^^^^^^^^^^^ + +We've upgraded Flask App Builder to a major version 4.* (#24399) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Flask App Builder is one of the important components of Airflow Webserver, as +it uses a lot of dependencies that are essential to run the webserver and integrate it +in enterprise environments - especially authentication. + +The FAB 4.* upgrades a number of dependencies to major releases, which upgrades them to versions +that have a number of security issues fixed. A lot of tests were performed to bring the dependencies +in a backwards-compatible way, however the dependencies themselves implement breaking changes in their +internals so it might be that some of those changes might impact the users in case they are using the +libraries for their own purposes. + +One important change that you likely will need to apply to Oauth configuration is to add +``server_metadata_url`` or ``jwks_uri`` and you can read about it more +in `this issue `_. + +Here is the list of breaking changes in dependencies that comes together with FAB 4: + + * ``Flask`` from 1.X to 2.X `breaking changes `__ + + * ``flask-jwt-extended`` 3.X to 4.X `breaking changes: `__ + + * ``Jinja2`` 2.X to 3.X `breaking changes: `__ + + * ``Werkzeug`` 1.X to 2.X `breaking changes `__ + + * ``pyJWT`` 1.X to 2.X `breaking changes: `__ + + * ``Click`` 7.X to 8.X `breaking changes: `__ + + * ``itsdangerous`` 1.X to 2.X `breaking changes `__ + +Bug Fixes +^^^^^^^^^ + +- Allow for ``LOGGING_LEVEL=DEBUG`` (#23360) +- Fix grid date ticks (#24738) +- Debounce status highlighting in Grid view (#24710) +- Fix Grid vertical scrolling (#24684) +- don't try to render child rows for closed groups (#24637) +- Do not calculate grid root instances (#24528) +- Maintain grid view selection on filtering upstream (#23779) +- Speed up ``grid_data`` endpoint by 10x (#24284) +- Apply per-run log templates to log handlers (#24153) +- Don't crash scheduler if exec config has old k8s objects (#24117) +- ``TI.log_url`` fix for ``map_index`` (#24335) +- Fix migration ``0080_2_0_2`` - Replace null values before setting column not null (#24585) +- Patch ``sql_alchemy_conn`` if old Postgres schemes used (#24569) +- Seed ``log_template`` table (#24511) +- Fix deprecated ``log_id_template`` value (#24506) +- Fix toast messages (#24505) +- Add indexes for CASCADE deletes for ``task_instance`` (#24488) +- Return empty dict if Pod JSON encoding fails (#24478) +- Improve grid rendering performance with a custom tooltip (#24417, #24449) +- Check for ``run_id`` for grid group summaries (#24327) +- Optimize calendar view for cron scheduled DAGs (#24262) +- Use ``get_hostname`` instead of ``socket.getfqdn`` (#24260) +- Check that edge nodes actually exist (#24166) +- Fix ``useTasks`` crash on error (#24152) +- Do not fail re-queued TIs (#23846) +- Reduce grid view API calls (#24083) +- Rename Permissions to Permission Pairs. (#24065) +- Replace ``use_task_execution_date`` with ``use_task_logical_date`` (#23983) +- Grid fix details button truncated and small UI tweaks (#23934) +- Add TaskInstance State ``REMOVED`` to finished states and success states (#23797) +- Fix mapped task immutability after clear (#23667) +- Fix permission issue for dag that has dot in name (#23510) +- Fix closing connection ``dbapi.get_pandas_df`` (#23452) +- Check bag DAG ``schedule_interval`` match timetable (#23113) +- Parse error for task added to multiple groups (#23071) +- Fix flaky order of returned dag runs (#24405) +- Migrate ``jsx`` files that affect run/task selection to ``tsx`` (#24509) +- Fix links to sources for examples (#24386) +- Set proper ``Content-Type`` and ``chartset`` on ``grid_data`` endpoint (#24375) + +Doc only changes +^^^^^^^^^^^^^^^^ + +- Update templates doc to mention ``extras`` and format Airflow ``Vars`` / ``Conns`` (#24735) +- Document built in Timetables (#23099) +- Alphabetizes two tables (#23923) +- Clarify that users should not use Maria DB (#24556) +- Add imports to deferring code samples (#24544) +- Add note about image regeneration in June 2022 (#24524) +- Small cleanup of ``get_current_context()`` chapter (#24482) +- Fix default 2.2.5 ``log_id_template`` (#24455) +- Update description of installing providers separately from core (#24454) +- Mention context variables and logging (#24304) + +Misc/Internal +^^^^^^^^^^^^^ + +- Remove internet explorer support (#24495) +- Removing magic status code numbers from ``api_connexion`` (#24050) +- Upgrade FAB to ``4.1.2`` (#24619) +- Switch Markdown engine to ``markdown-it-py`` (#19702) +- Update ``rich`` to latest version across the board. (#24186) +- Get rid of ``TimedJSONWebSignatureSerializer`` (#24519) +- Update flask-appbuilder ``authlib``/ ``oauth`` dependency (#24516) +- Upgrade to ``webpack`` 5 (#24485) +- Add ``typescript`` (#24337) +- The JWT claims in the request to retrieve logs have been standardized: we use ``nbf`` and ``aud`` claims for + maturity and audience of the requests. Also "filename" payload field is used to keep log name. (#24519) +- Address all ``yarn`` test warnings (#24722) +- Upgrade to react 18 and chakra 2 (#24430) +- Refactor ``DagRun.verify_integrity`` (#24114) +- Upgrade FAB to ``4.1.1`` (#24399) +- We now need at least ``Flask-WTF 0.15`` (#24621) + + Airflow 2.3.2 (2022-06-04) -------------------------- @@ -7835,7 +7953,6 @@ There are five roles created for Airflow by default: Admin, User, Op, Viewer, an Breaking changes ~~~~~~~~~~~~~~~~ - * AWS Batch Operator renamed property queue to job_queue to prevent conflict with the internal queue from CeleryExecutor - AIRFLOW-2542 * Users created and stored in the old users table will not be migrated automatically. FAB's built-in authentication support must be reconfigured. * Airflow dag home page is now ``/home`` (instead of ``/admin``\ ). diff --git a/airflow/providers/elasticsearch/example_dags/__init__.py b/airflow/providers/elasticsearch/example_dags/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/airflow/providers/elasticsearch/example_dags/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/airflow/providers/elasticsearch/example_dags/example_elasticsearch_query.py b/airflow/providers/elasticsearch/example_dags/example_elasticsearch_query.py deleted file mode 100644 index d4a7b100f00f6..0000000000000 --- a/airflow/providers/elasticsearch/example_dags/example_elasticsearch_query.py +++ /dev/null @@ -1,50 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from datetime import datetime, timedelta - -from airflow import DAG -from airflow.decorators import task -from airflow.providers.elasticsearch.hooks.elasticsearch import ElasticsearchHook - - -@task(task_id='es_print_tables') -def show_tables(): - """ - show_tables queries elasticsearch to list available tables - """ - es = ElasticsearchHook(elasticsearch_conn_id='production-es') - - # Handle ES conn with context manager - with es.get_conn() as es_conn: - tables = es_conn.execute('SHOW TABLES') - for table, *_ in tables: - print(f"table: {table}") - return True - - -# Using a DAG context manager, you don't have to specify the dag property of each task -with DAG( - 'elasticsearch_dag', - start_date=datetime(2021, 8, 30), - max_active_runs=1, - schedule_interval=timedelta(days=1), - default_args={'retries': 1}, # Default setting applied to all tasks - catchup=False, -) as dag: - - show_tables() diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 5de2041c3314f..fd7a57694d350 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -111,6 +111,7 @@ Datasync DateFrame Datetimepicker Datetimes +Debounce Decrypt Decrypts DeidentifyContentResponse @@ -589,6 +590,7 @@ cfg cgi cgroups cgroupspy +chakra changelog charset checklicence diff --git a/newsfragments/24399.significant.rst b/newsfragments/24399.significant.rst deleted file mode 100644 index 7f1833a8e8c7e..0000000000000 --- a/newsfragments/24399.significant.rst +++ /dev/null @@ -1,31 +0,0 @@ -We've upgraded Flask Application Builder to a major version 4.*. - -Flask Application Builder is one of the important components of Airflow Webserver, as -it uses a lof of dependencies that are essential to run the webserver and integrate it -in enterprise environments - especially authentication. - -The FAB 4.* upgrades a number of dependencies to major releases, which upgrades them to versions -that have a number of security issues fixed. A lot of tests were performed to bring the dependencies -in a backwards-compatible way, however the dependencies themselves implement breaking changes in their -internals so it might be that some of those changes might impact the users in case they are using the -libraries for their onw purposes. - -One important change that you likely will need to apply to Oauth configuration is to add -``server_metadata_url`` or ``jwks_uri`` and you can read about it more -in `this issue `_. - -Here is the list of breaking changes in dependencies that comes together with FAB 4: - -* Flask from 1.X to 2.X `breaking changes `_ - -* flask-jwt-extended 3.X to 4.X `breaking changes: `_ - -* Jinja2 2.X to 3.X `breaking changes: `_ - -* Werkzeug 1.X to 2.X `breaking changes `_ - -* pyJWT 1.X to 2.X `breaking changes: `_ - -* Click 7.X to 8.X `breaking changes: `_ - -* itsdangerous 1.X to 2.X `breaking changes `_ diff --git a/newsfragments/24519.misc.rst b/newsfragments/24519.misc.rst deleted file mode 100644 index 799d9141d2a0a..0000000000000 --- a/newsfragments/24519.misc.rst +++ /dev/null @@ -1 +0,0 @@ -The JWT claims in the request to retrieve logs have been standardized: we use "nbf" and "aud" claims for maturity and audience of the requests. Also "filename" payload field is used to keep log name. From b5202fa4ef16c97e59a8848fc04995bdf5bda418 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sat, 2 Jul 2022 16:43:22 +0200 Subject: [PATCH 118/118] Unified "dash-name" convention for outputs in ci workflows. There were errors with retieving constraints branch caused by using different convention for output names (sometimes dash, sometimes camelCase as suggested by most GitHub documents). The "dash-name" looks much better and is far more readable so we shoud unify all internal outputs to follow it. During that rename some old, unused outputs were removed, also it turned out that the new selective-check can replace previous "dynamic outputs" written in Bash as well. Additionally, the "defaults" are now retrieved via Python script, not bash script which will make it much more readable - both build_images and ci.yaml use it in the right place - before replacing the scripts and dev with the version coming in from PR in case of build_images.yaml. (cherry picked from commit 017507be1e1dbf39abcc94a44fab8869037893ea) --- .github/workflows/build-images.yml | 265 +++--- .github/workflows/ci.yml | 781 +++++++++--------- .../src/airflow_breeze/branch_defaults.py | 1 + .../airflow_breeze/utils/selective_checks.py | 4 + 4 files changed, 545 insertions(+), 506 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 2d895e3c092fd..df53d8986c809 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -27,7 +27,6 @@ env: ANSWER: "yes" CHECK_IMAGE_FOR_REBUILD: "true" SKIP_CHECK_REMOTE_IMAGE: "true" - DEBIAN_VERSION: "bullseye" DB_RESET: "true" VERBOSE: "true" GITHUB_REPOSITORY: ${{ github.repository }} @@ -51,26 +50,22 @@ jobs: name: "Build Info" runs-on: ${{ github.repository == 'apache/airflow' && 'self-hosted' || 'ubuntu-20.04' }} env: - targetBranch: ${{ github.event.pull_request.base.ref }} - DEFAULT_BRANCH: ${{ steps.selective-checks.outputs.default-constraints-branch }} - DEFAULT_CONSTRAINTS_BRANCH: ${{ steps.selective-checks.outputs.default-constraints-branch }} - DEBIAN_VERSION: ${{ steps.selective-checks.outputs.debian-version }} + TARGET_BRANCH: ${{ github.event.pull_request.base.ref }} outputs: - runsOn: ${{ github.repository == 'apache/airflow' && '["self-hosted"]' || '["ubuntu-20.04"]' }} - pythonVersions: "${{ steps.selective-checks.python-versions }}" - upgradeToNewerDependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }} - allPythonVersions: ${{ steps.selective-checks.outputs.all-python-versions }} - allPythonVersionsListAsString: ${{ steps.selective-checks.outputs.all-python-versions-list-as-string }} - defaultPythonVersion: ${{ steps.selective-checks.outputs.default-python-version }} + runs-on: ${{ github.repository == 'apache/airflow' && '["self-hosted"]' || '["ubuntu-20.04"]' }} + python-versions: "${{ steps.selective-checks.python-versions }}" + upgrade-to-newer-dependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }} + all-python-versions-list-as-string: >- + ${{ steps.selective-checks.outputs.all-python-versions-list-as-string }} + default-python-version: ${{ steps.selective-checks.outputs.default-python-version }} run-tests: ${{ steps.selective-checks.outputs.run-tests }} run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }} - image-build: ${{ steps.dynamic-outputs.outputs.image-build }} - cacheDirective: ${{ steps.dynamic-outputs.outputs.cacheDirective }} - targetBranch: ${{ steps.dynamic-outputs.outputs.targetBranch }} - defaultBranch: ${{ steps.selective-checks.outputs.default-branch }} - defaultConstraintsBranch: ${{ steps.selective-checks.outputs.default-constraints-branch }} - debianVersion: ${{ steps.selective-checks.outputs.debian-version }} - targetCommitSha: "${{steps.discover-pr-merge-commit.outputs.targetCommitSha || + image-build: ${{ steps.selective-checks.outputs.image-build }} + cache-directive: ${{ steps.selective-checks.outputs.cache-directive }} + default-branch: ${{ steps.selective-checks.outputs.default-branch }} + default-constraints-branch: ${{ steps.selective-checks.outputs.default-constraints-branch }} + debian-version: ${{ steps.selective-checks.outputs.debian-version }} + target-commit-sha: "${{steps.discover-pr-merge-commit.outputs.target-commit-sha || github.event.pull_request.head.sha || github.sha }}" @@ -80,14 +75,14 @@ jobs: run: | TARGET_COMMIT_SHA="$(gh api '${{ github.event.pull_request.url }}' --jq .merge_commit_sha)" echo "TARGET_COMMIT_SHA=$TARGET_COMMIT_SHA" >> $GITHUB_ENV - echo "::set-output name=targetCommitSha::${TARGET_COMMIT_SHA}" + echo "::set-output name=target-commit-sha::${TARGET_COMMIT_SHA}" if: github.event_name == 'pull_request_target' # The labels in the event aren't updated when re-triggering the job, So lets hit the API to get # up-to-date values - name: Get latest PR labels id: get-latest-pr-labels run: | - echo -n "::set-output name=pullRequestLabels::" + echo -n "::set-output name=pull-request-labels::" gh api graphql --paginate -F node_id=${{github.event.pull_request.node_id}} -f query=' query($node_id: ID!, $endCursor: String) { node(id:$node_id) { @@ -109,80 +104,69 @@ jobs: ref: ${{ env.TARGET_COMMIT_SHA }} persist-credentials: false fetch-depth: 2 - - name: "Retrieve DEFAULTS from the _initialization.sh" - # We cannot "source" the script here because that would be a security problem (we cannot run - # any code that comes from the sources coming from the PR. Therefore we extract the - # DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands - id: defaults - run: | - DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV - DEFAULT_CONSTRAINTS_BRANCH=$(grep "export DEFAULT_CONSTRAINTS_BRANCH" \ - scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}" >> $GITHUB_ENV - DEBIAN_VERSION=$(grep "export DEBIAN_VERSION" scripts/ci/libraries/_initialization.sh | \ - cut -d "=" -f 3 | sed s'/["}]//g') - echo "DEBIAN_VERSION=${DEBIAN_VERSION}" >> $GITHUB_ENV - - name: Checkout main branch to 'main-airflow' folder to use breeze from there. - uses: actions/checkout@v2 - with: - path: "main-airflow" - ref: "main" - persist-credentials: false - submodules: recursive - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: # We do not have output from selective checks yet, so we need to hardcode python python-version: 3.7 cache: 'pip' cache-dependency-path: ./dev/breeze/setup* + - name: "Retrieve defaults from branch_defaults.py" + # We cannot "execute" the branch_defaults.py python code here because that would be + # a security problem (we cannot run any code that comes from the sources coming from the PR. + # Therefore, we extract the branches via embedded Python code + # we need to do it before next step replaces checked-out breeze and scripts code coming from + # the PR, because the PR defaults have to be retrieved here. + id: defaults + run: | + python - <>$GITHUB_ENV + from pathlib import Path + import re + import sys + + DEFAULTS_CONTENT = Path('dev/breeze/src/airflow_breeze/branch_defaults.py').read_text() + BRANCH_PATTERN = r'^AIRFLOW_BRANCH = "(.*)"$' + CONSTRAINTS_BRANCH_PATTERN = r'^DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH = "(.*)"$' + DEBIAN_VERSION_PATTERN = r'^DEBIAN_VERSION = "(.*)"$' + + branch = re.search(BRANCH_PATTERN, DEFAULTS_CONTENT, re.MULTILINE).group(1) + constraints_branch = re.search(CONSTRAINTS_BRANCH_PATTERN, DEFAULTS_CONTENT, re.MULTILINE).group(1) + debian_version = re.search(DEBIAN_VERSION_PATTERN, DEFAULTS_CONTENT, re.MULTILINE).group(1) + + output = f""" + DEFAULT_BRANCH={branch} + DEFAULT_CONSTRAINTS_BRANCH={constraints_branch} + DEBIAN_VERSION={debian_version} + """.strip() + + print(output) + # Stdout is redirected to GITHUB_ENV but we also print it to stderr to see it in ci log + print(output, file=sys.stderr) + EOF + - name: Checkout main branch to 'main-airflow' folder to use breeze from there. + uses: actions/checkout@v3 + with: + persist-credentials: false + submodules: recursive - run: ./scripts/ci/install_breeze.sh - name: Selective checks id: selective-checks env: - PR_LABELS: "$${{ steps.get-latest-pr-labels.outputs.pullRequestLabels }}" + PR_LABELS: "$${{ steps.get-latest-pr-labels.outputs.pull-request-labels }}" COMMIT_REF: "${{ env.TARGET_COMMIT_SHA }}" run: breeze selective-check - - name: Compute dynamic outputs - id: dynamic-outputs - run: | - set -x - if [[ "${{ github.event_name }}" == 'pull_request_target' ]]; then - echo "::set-output name=targetBranch::${targetBranch}" - else - # Direct push to branch, or scheduled build - echo "::set-output name=targetBranch::${GITHUB_REF#refs/heads/}" - fi - - if [[ "${{ github.event_name }}" == 'schedule' ]]; then - echo "::set-output name=cacheDirective::disabled" - else - echo "::set-output name=cacheDirective:registry" - fi - - if [[ "$SELECTIVE_CHECKS_IMAGE_BUILD" == "true" ]]; then - echo "::set-output name=image-build::true" - else - echo "::set-output name=image-build::false" - fi - env: - SELECTIVE_CHECKS_IMAGE_BUILD: ${{ steps.selective-checks.outputs.image-build }} - name: env run: printenv env: - dynamicOutputs: ${{ toJSON(steps.dynamic-outputs.outputs) }} - PR_LABELS: ${{ steps.get-latest-pr-labels.outputs.pullRequestLabels }} + PR_LABELS: ${{ steps.get-latest-pr-labels.outputs.pull-request-labels }} GITHUB_CONTEXT: ${{ toJson(github) }} build-ci-images: permissions: packages: write timeout-minutes: 80 - name: "Build CI images ${{ needs.build-info.outputs.allPythonVersionsListAsString }}" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + name: "Build CI images ${{ needs.build-info.outputs.all-python-versions-list-as-string }}" + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info] if: | needs.build-info.outputs.image-build == 'true' && @@ -191,30 +175,32 @@ jobs: DEFAULT_BRANCH: ${{ needs.build-info.outputs.default-branch }} DEFAULT_CONSTRAINTS_BRANCH: ${{ needs.build-info.outputs.default-constraints-branch }} DEBIAN_VERSION: ${{ needs.build-info.outputs.debian-version }} - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on)[0] }} BACKEND: sqlite outputs: ${{toJSON(needs.build-info.outputs) }} steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: - ref: ${{ needs.build-info.outputs.targetCommitSha }} + ref: ${{ needs.build-info.outputs.target-commit-sha }} persist-credentials: false submodules: recursive - - name: Checkout main branch to 'main-airflow' folder to use ci/scripts from there. - uses: actions/checkout@v2 + - name: "Setup python" + uses: actions/setup-python@v4 + with: + python-version: ${{ needs.build-info.outputs.default-python-version }} + - name: > + Checkout "main branch to 'main-airflow' folder + to use ci/scripts from there. + uses: actions/checkout@v3 with: path: "main-airflow" ref: "main" persist-credentials: false submodules: recursive - - name: "Setup python" - uses: actions/setup-python@v2 - with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} - name: > - Override "scripts/ci" with the "${{ needs.build-info.outputs.targetBranch }}" branch + Override "scripts/ci" with the "main" branch so that the PR does not override it # We should not override those scripts which become part of the image as they will not be # changed in the image built - we should only override those that are executed to build @@ -224,23 +210,35 @@ jobs: rm -rfv "dev" mv -v "main-airflow/scripts/ci" "scripts" mv -v "main-airflow/dev" "." - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh - name: "Free space" run: breeze free-space - - name: > + - name: Cache pre-commit envs + uses: actions/cache@v3 + with: + path: ~/.cache/pre-commit + key: "pre-commit-${{steps.host-python-version.outputs.host-python-version}}-\ +${{ hashFiles('.pre-commit-config.yaml') }}" + restore-keys: pre-commit-${{steps.host-python-version.outputs.host-python-version}} + - name: "Regenerate dependencies in case they was modified manually so that we can build an image" + run: > + breeze static-checks --type update-providers-dependencies --all-files + --show-diff-on-failure --color always || true + if: needs.build-info.outputs.default-branch == 'main' + - name: >- Build & Push AMD64 CI images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + ${{ needs.build-info.outputs.all-python-versions-list-as-string }} run: breeze build-image --push-image --tag-as-latest --run-in-parallel env: - UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} - DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} + UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + DOCKER_CACHE: ${{ needs.build-info.outputs.cache-directive }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + PYTHON_VERSIONS: ${{ needs.build-info.outputs.all-python-versions-list-as-string }} - name: Push empty CI image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} if: failure() || cancelled() run: breeze build-image --push-image --empty-image --run-in-parallel @@ -250,7 +248,7 @@ jobs: if: failure() || cancelled() run: > breeze find-newer-dependencies --max-age 1 - --python "${{ needs.build-info.outputs.defaultPythonVersion }}" + --python "${{ needs.build-info.outputs.default-python-version }}" - name: "Fix ownership" run: breeze fix-ownership if: always() @@ -259,8 +257,8 @@ jobs: permissions: packages: write timeout-minutes: 80 - name: "Build PROD images ${{ needs.build-info.outputs.allPythonVersionsListAsString }}" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + name: "Build PROD images ${{ needs.build-info.outputs.all-python-versions-list-as-string }}" + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, build-ci-images] if: | needs.build-info.outputs.image-build == 'true' && @@ -269,31 +267,31 @@ jobs: DEFAULT_BRANCH: ${{ needs.build-info.outputs.default-branch }} DEFAULT_CONSTRAINTS_BRANCH: ${{ needs.build-info.outputs.default-constraints-branch }} DEBIAN_VERSION: ${{ needs.build-info.outputs.debian-version }} - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on)[0] }} BACKEND: sqlite steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: - ref: ${{ needs.build-info.outputs.targetCommitSha }} + ref: ${{ needs.build-info.outputs.target-commit-sha }} persist-credentials: false submodules: recursive + - name: "Setup python" + uses: actions/setup-python@v4 + with: + python-version: ${{ needs.build-info.outputs.default-python-version }} - name: > - Checkout "${{ needs.build-info.outputs.targetBranch }}" branch to 'main-airflow' folder + Checkout "main" branch to 'main-airflow' folder to use ci/scripts from there. - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: path: "main-airflow" - ref: "${{ needs.build-info.outputs.targetBranch }}" + ref: "main" persist-credentials: false submodules: recursive - - name: "Setup python" - uses: actions/setup-python@v2 - with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} - name: > - Override "scripts/ci" with the "${{ needs.build-info.outputs.targetBranch }}" branch + Override "scripts/ci" with the "main" branch so that the PR does not override it # We should not override those scripts which become part of the image as they will not be # changed in the image built - we should only override those that are executed to build @@ -303,21 +301,34 @@ jobs: rm -rfv "dev" mv -v "main-airflow/scripts/ci" "scripts" mv -v "main-airflow/dev" "." - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh - name: "Free space" run: breeze free-space + - name: Cache pre-commit envs + uses: actions/cache@v3 + with: + path: ~/.cache/pre-commit + key: "pre-commit-${{steps.host-python-version.outputs.host-python-version}}-\ +${{ hashFiles('.pre-commit-config.yaml') }}" + restore-keys: pre-commit-${{steps.host-python-version.outputs.host-python-version}} + if: needs.build-info.outputs.default-branch == 'main' + - name: "Regenerate dependencies in case they was modified manually so that we can build an image" + run: > + breeze static-checks --type update-providers-dependencies --all-files + --show-diff-on-failure --color always || true + if: needs.build-info.outputs.default-branch == 'main' - name: > Pull CI image for PROD build: - ${{ needs.build-info.outputs.defaultPythonVersion }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} + ${{ needs.build-info.outputs.default-python-version }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} run: breeze pull-image --tag-as-latest env: # Always use default Python version of CI image for preparing packages - PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }} + PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.default-python-version }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Cleanup dist and context file" run: rm -fv ./dist/* ./docker-context-files/* @@ -331,9 +342,9 @@ jobs: run: breeze prepare-airflow-package --package-format wheel --version-suffix-for-pypi dev0 - name: "Move dist packages to docker-context files" run: mv -v ./dist/*.whl ./docker-context-files - - name: > + - name: >- Build & Push PROD images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + ${{ needs.build-info.outputs.all-python-versions-list-as-string }} run: > breeze build-prod-image --run-in-parallel @@ -343,10 +354,10 @@ jobs: --disable-airflow-repo-cache --airflow-is-in-context env: - UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} - DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} + UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + DOCKER_CACHE: ${{ needs.build-info.outputs.cache-directive }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + PYTHON_VERSIONS: ${{ needs.build-info.outputs.all-python-versions-list-as-string }} - name: Push empty PROD images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} if: failure() || cancelled() run: breeze build-prod-image --cleanup-context --push-image --empty-image --run-in-parallel @@ -358,19 +369,19 @@ jobs: build-ci-images-arm: - timeout-minutes: 80 - name: "Build ARM CI images ${{ needs.build-info.outputs.allPythonVersionsListAsString }}" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + timeout-minutes: 120 + name: "Build ARM CI images ${{ needs.build-info.outputs.all-python-versions-list-as-string }}" + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, build-prod-images] if: | needs.build-info.outputs.image-build == 'true' && - needs.build-info.outputs.upgradeToNewerDependencies != 'false' && + needs.build-info.outputs.upgrade-to-newer-dependencies != 'false' && github.event.pull_request.head.repo.full_name != 'apache/airflow' env: DEFAULT_BRANCH: ${{ needs.build-info.outputs.default-branch }} DEFAULT_CONSTRAINTS_BRANCH: ${{ needs.build-info.outputs.default-constraints-branch }} DEBIAN_VERSION: ${{ needs.build-info.outputs.debian-version }} - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on)[0] }} BACKEND: sqlite outputs: ${{toJSON(needs.build-info.outputs) }} steps: @@ -378,20 +389,20 @@ jobs: run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - uses: actions/checkout@v2 with: - ref: ${{ needs.build-info.outputs.targetCommitSha }} + ref: ${{ needs.build-info.outputs.target-commit-sha }} persist-credentials: false submodules: recursive - name: > - Checkout "${{ needs.build-info.outputs.targetBranch }}" branch to 'main-airflow' folder + Checkout "main" branch to 'main-airflow' folder to use ci/scripts from there. uses: actions/checkout@v2 with: path: "main-airflow" - ref: "${{ needs.build-info.outputs.targetBranch }}" + ref: "main" persist-credentials: false submodules: recursive - name: > - Override "scripts/ci" with the "${{ needs.build-info.outputs.targetBranch }}" branch + Override "scripts/ci" with the "main" branch so that the PR does not override it # We should not override those scripts which become part of the image as they will not be # changed in the image built - we should only override those that are executed to build @@ -404,7 +415,7 @@ jobs: - name: "Setup python" uses: actions/setup-python@v2 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} - run: ./scripts/ci/install_breeze.sh - name: "Free space" run: breeze free-space @@ -412,15 +423,15 @@ jobs: run: ./scripts/ci/images/ci_start_arm_instance_and_connect_to_docker.sh - name: > Build ARM CI images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + ${{ needs.build-info.outputs.all-python-versions-list-as-string }} run: > breeze build-image --run-in-parallel --parallelism 1 --builder airflow_cache --platform "linux/arm64" env: - UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} - DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} + UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + DOCKER_CACHE: ${{ needs.build-info.outputs.cache-directive }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + PYTHON_VERSIONS: ${{ needs.build-info.outputs.all-python-versions-list-as-string }} - name: "Stop ARM instance" run: ./scripts/ci/images/ci_stop_arm_instance.sh if: always() diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ad6193f1f77fa..33cbb0b7515a2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -125,36 +125,28 @@ jobs: env: GITHUB_CONTEXT: ${{ toJson(github) }} outputs: - defaultBranch: ${{ steps.selective-checks.outputs.default-branch }} - defaultConstraintsBranch: ${{ steps.selective-checks.outputs.default-constraints-branch }} - debianVersion: ${{ steps.selective-checks.outputs.debian-version }} - cacheDirective: ${{ steps.dynamic-outputs.outputs.cacheDirective }} - waitForImage: ${{ steps.wait-for-image.outputs.wait-for-image }} - allPythonVersions: ${{ steps.selective-checks.outputs.all-python-versions }} - upgradeToNewerDependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }} - pythonVersions: ${{ steps.selective-checks.outputs.python-versions }} - pythonVersionsListAsString: ${{ steps.selective-checks.outputs.python-versions-list-as-string }} - allPythonVersionsListAsString: ${{ steps.selective-checks.outputs.all-python-versions-list-as-string }} - defaultPythonVersion: ${{ steps.selective-checks.outputs.default-python-version }} - kubernetesVersions: ${{ steps.selective-checks.outputs.kubernetes-versions }} - kubernetesVersionsListAsString: ${{ steps.selective-checks.outputs.kubernetes-versions-list-as-string }} - defaultKubernetesVersion: ${{ steps.selective-checks.outputs.default-kubernetes-version }} - kubernetesModes: ${{ steps.selective-checks.outputs.kubernetes-modes }} - defaultKubernetesMode: ${{ steps.selective-checks.outputs.default-kubernetes-mode }} - postgresVersions: ${{ steps.selective-checks.outputs.postgres-versions }} - defaultPostgresVersion: ${{ steps.selective-checks.outputs.default-postgres-version }} - mysqlVersions: ${{ steps.selective-checks.outputs.mysql-versions }} - mssqlVersions: ${{ steps.selective-checks.outputs.mssql-versions }} - defaultMySQLVersion: ${{ steps.selective-checks.outputs.default-mysql-version }} - helmVersions: ${{ steps.selective-checks.outputs.helm-versions }} - defaultHelmVersion: ${{ steps.selective-checks.outputs.default-helm-version }} - kindVersions: ${{ steps.selective-checks.outputs.kind-versions }} - defaultKindVersion: ${{ steps.selective-checks.outputs.default-kind-version }} - testTypes: ${{ steps.selective-checks.outputs.test-types }} - postgresExclude: ${{ steps.selective-checks.outputs.postgres-exclude }} - mysqlExclude: ${{ steps.selective-checks.outputs.mysql-exclude }} - mssqlExclude: ${{ steps.selective-checks.outputs.mssql-exclude }} - sqliteExclude: ${{ steps.selective-checks.outputs.sqlite-exclude }} + debian-version: ${{ steps.selective-checks.outputs.debian-version }} + cache-directive: ${{ steps.selective-checks.outputs.cache-directive }} + upgrade-to-newer-dependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }} + python-versions: ${{ steps.selective-checks.outputs.python-versions }} + python-versions-list-as-string: ${{ steps.selective-checks.outputs.python-versions-list-as-string }} + all-python-versions-list-as-string: >- + ${{ steps.selective-checks.outputs.all-python-versions-list-as-string }} + default-python-version: ${{ steps.selective-checks.outputs.default-python-version }} + kubernetes-versions-list-as-string: >- + ${{ steps.selective-checks.outputs.kubernetes-versions-list-as-string }} + postgres-versions: ${{ steps.selective-checks.outputs.postgres-versions }} + default-postgres-version: ${{ steps.selective-checks.outputs.default-postgres-version }} + mysql-versions: ${{ steps.selective-checks.outputs.mysql-versions }} + mssql-versions: ${{ steps.selective-checks.outputs.mssql-versions }} + default-mysql-version: ${{ steps.selective-checks.outputs.default-mysql-version }} + default-helm-version: ${{ steps.selective-checks.outputs.default-helm-version }} + default-kind-version: ${{ steps.selective-checks.outputs.default-kind-version }} + test-types: ${{ steps.selective-checks.outputs.test-types }} + postgres-exclude: ${{ steps.selective-checks.outputs.postgres-exclude }} + mysql-exclude: ${{ steps.selective-checks.outputs.mysql-exclude }} + mssql-exclude: ${{ steps.selective-checks.outputs.mssql-exclude }} + sqlite-exclude: ${{ steps.selective-checks.outputs.sqlite-exclude }} run-tests: ${{ steps.selective-checks.outputs.run-tests }} run-ui-tests: ${{ steps.selective-checks.outputs.run-ui-tests }} run-www-tests: ${{ steps.selective-checks.outputs.run-www-tests }} @@ -166,21 +158,21 @@ jobs: needs-api-tests: ${{ steps.selective-checks.outputs.needs-api-tests }} needs-api-codegen: ${{ steps.selective-checks.outputs.needs-api-codegen }} default-branch: ${{ steps.selective-checks.outputs.default-branch }} + default-constraints-branch: ${{ steps.selective-checks.outputs.default-constraints-branch }} docs-filter: ${{ steps.selective-checks.outputs.docs-filter }} skip-pre-commits: ${{ steps.selective-checks.outputs.skip-pre-commits }} - sourceHeadRepo: ${{ steps.source-run-info.outputs.sourceHeadRepo }} - pullRequestNumber: ${{ steps.source-run-info.outputs.pullRequestNumber }} - pullRequestLabels: ${{ steps.source-run-info.outputs.pullRequestLabels }} - runsOn: ${{ steps.set-runs-on.outputs.runsOn }} - runCoverage: ${{ steps.set-run-coverage.outputs.runCoverage }} - inWorkflowBuild: ${{ steps.set-in-workflow-build.outputs.inWorkflowBuild }} - buildJobDescription: ${{ steps.set-in-workflow-build.outputs.buildJobDescription }} - mergeRun: ${{ steps.set-merge-run.outputs.merge-run }} + source-head-repo: ${{ steps.source-run-info.outputs.source-head-repo }} + pull-request-labels: ${{ steps.source-run-info.outputs.pullRequestLabels }} + runs-on: ${{ steps.set-runs-on.outputs.runs-on }} + run-coverage: ${{ steps.set-run-coverage.outputs.run-coverage }} + in-workflow-build: ${{ steps.set-in-workflow-build.outputs.in-workflow-build }} + build-job-description: ${{ steps.set-in-workflow-build.outputs.build-job-description }} + merge-run: ${{ steps.set-merge-run.outputs.merge-run }} steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false submodules: recursive @@ -190,39 +182,54 @@ jobs: with: token: ${{ secrets.GITHUB_TOKEN }} - name: Fetch incoming commit ${{ github.sha }} with its parent - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: ref: ${{ github.sha }} fetch-depth: 2 persist-credentials: false - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: # We do not have output from selective checks yet, so we need to hardcode python python-version: 3.7 cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - - run: ./scripts/ci/install_breeze.sh - - name: "Retrieve DEFAULTS from the _initialization.sh" - # We cannot "source" the script here because that would be a security problem (we cannot run - # any code that comes from the sources coming from the PR. Therefore, we extract the - # DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands + - name: "Retrieve defaults from branch_defaults.py" id: defaults + # We could retrieve it differently here - by just importing the variables and + # printing them from python code, however we want to have the same code as used in + # the build-images.yml (there we cannot import python code coming from the PR - we need to + # treat the python code as text and extract the variables from there. run: | - DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV - DEFAULT_CONSTRAINTS_BRANCH=$(grep "export DEFAULT_CONSTRAINTS_BRANCH" \ - scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}" >> $GITHUB_ENV - DEBIAN_VERSION=$(grep "export DEBIAN_VERSION" scripts/ci/libraries/_initialization.sh | \ - awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g') - echo "DEBIAN_VERSION=${DEBIAN_VERSION}" >> $GITHUB_ENV + python - <>$GITHUB_ENV + from pathlib import Path + import re + import sys + + DEFAULTS_CONTENT = Path('dev/breeze/src/airflow_breeze/branch_defaults.py').read_text() + BRANCH_PATTERN = r'^AIRFLOW_BRANCH = "(.*)"$' + CONSTRAINTS_BRANCH_PATTERN = r'^DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH = "(.*)"$' + DEBIAN_VERSION_PATTERN = r'^DEBIAN_VERSION = "(.*)"$' + + branch = re.search(BRANCH_PATTERN, DEFAULTS_CONTENT, re.MULTILINE).group(1) + constraints_branch = re.search(CONSTRAINTS_BRANCH_PATTERN, DEFAULTS_CONTENT, re.MULTILINE).group(1) + debian_version = re.search(DEBIAN_VERSION_PATTERN, DEFAULTS_CONTENT, re.MULTILINE).group(1) + + output = f""" + DEFAULT_BRANCH={branch} + DEFAULT_CONSTRAINTS_BRANCH={constraints_branch} + DEBIAN_VERSION={debian_version} + """.strip() + + print(output) + # Stdout is redirected to GITHUB_ENV but we also print it to stderr to see it in ci log + print(output, file=sys.stderr) + EOF + - run: ./scripts/ci/install_breeze.sh - name: Selective checks id: selective-checks env: - PR_LABELS: "${{ steps.source-run-info.outputs.pullRequestLabels }}" + PR_LABELS: "${{ steps.source-run-info.outputs.pull-request-labels }}" COMMIT_REF: "${{ github.sha }}" run: breeze selective-check # Avoid having to specify the runs-on logic every time. We use the custom @@ -231,22 +238,22 @@ jobs: - name: Set runs-on id: set-runs-on env: - PR_LABELS: "${{ steps.source-run-info.outputs.pullRequestLabels }}" + PR_LABELS: "${{ steps.source-run-info.outputs.pull-request-labels }}" run: | if [[ ${PR_LABELS=} == *"use public runners"* ]]; then echo "Forcing running on Public Runners via `use public runners` label" - echo "::set-output name=runsOn::\"ubuntu-20.04\"" + echo "::set-output name=runs-on::\"ubuntu-20.04\"" elif [[ ${AIRFLOW_SELF_HOSTED_RUNNER} == "" ]]; then echo "Regular PR running with Public Runner" - echo "::set-output name=runsOn::\"ubuntu-20.04\"" + echo "::set-output name=runs-on::\"ubuntu-20.04\"" else echo "Maintainer or main run running with self-hosted runner" - echo "::set-output name=runsOn::\"self-hosted\"" + echo "::set-output name=runs-on::\"self-hosted\"" fi # Avoid having to specify the coverage logic every time. - name: Set run coverage id: set-run-coverage - run: echo "::set-output name=runCoverage::true" + run: echo "::set-output name=run-coverage::true" if: > github.ref == 'refs/heads/main' && github.repository == 'apache/airflow' && github.event_name == 'push' && @@ -262,12 +269,12 @@ jobs: if [[ ${GITHUB_EVENT_NAME} == "push" || ${GITHUB_EVENT_NAME} == "push" || \ ${{steps.source-run-info.outputs.sourceHeadRepo}} == "apache/airflow" ]]; then echo "Images will be built in current workflow" - echo "::set-output name=inWorkflowBuild::true" - echo "::set-output name=buildJobDescription::Build" + echo "::set-output name=in-workflow-build::true" + echo "::set-output name=build-job-description::Build" else echo "Images will be built in pull_request_target workflow" - echo "::set-output name=inWorkflowBuild::false" - echo "::set-output name=buildJobDescription::Skip Build (pull_request_target)" + echo "::set-output name=in-workflow-build::false" + echo "::set-output name=build-job-description::Skip Build (pull_request_target)" fi - name: Determine if this is merge run id: set-merge-run @@ -279,147 +286,163 @@ jobs: github.ref_name == 'main' || startsWith(github.ref_name, 'v2') && endsWith(github.ref_name, 'test') ) - - name: Compute dynamic outputs - id: dynamic-outputs - run: | - set -x - if [[ "${{ github.event_name }}" == 'schedule' ]]; then - echo "::set-output name=cacheDirective::disabled" - else - echo "::set-output name=cacheDirective::registry" - fi - - if [[ "$SELECTIVE_CHECKS_IMAGE_BUILD" == "true" ]]; then - echo "::set-output name=image-build::true" - else - echo "::set-output name=image-build::false" - fi - env: - SELECTIVE_CHECKS_IMAGE_BUILD: ${{ steps.selective-checks.outputs.image-build }} - name: env run: printenv env: - dynamicOutputs: ${{ toJSON(steps.dynamic-outputs.outputs) }} - PR_LABELS: ${{ steps.get-latest-pr-labels.outputs.pullRequestLabels }} + PR_LABELS: ${{ steps.get-latest-pr-labels.outputs.pull-request-labels }} GITHUB_CONTEXT: ${{ toJson(github) }} build-ci-images: permissions: packages: write timeout-minutes: 80 - name: > - ${{needs.build-info.outputs.buildJobDescription}} CI images - ${{ needs.build-info.outputs.allPythonVersionsListAsString }} - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + name: >- + ${{needs.build-info.outputs.build-job-description}} CI images + ${{ needs.build-info.outputs.all-python-versions-list-as-string }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info] env: DEFAULT_BRANCH: ${{ needs.build-info.outputs.default-branch }} DEFAULT_CONSTRAINTS_BRANCH: ${{ needs.build-info.outputs.default-constraints-branch }} DEBIAN_VERSION: ${{ needs.build-info.outputs.debian-version }} - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on)[0] }} steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - if: needs.build-info.outputs.inWorkflowBuild == 'true' - - uses: actions/checkout@v2 + if: needs.build-info.outputs.in-workflow-build == 'true' + - uses: actions/checkout@v3 with: ref: ${{ needs.build-info.outputs.targetCommitSha }} persist-credentials: false submodules: recursive - if: needs.build-info.outputs.inWorkflowBuild == 'true' + if: needs.build-info.outputs.in-workflow-build == 'true' - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} - if: needs.build-info.outputs.inWorkflowBuild == 'true' + python-version: ${{ needs.build-info.outputs.default-python-version }} + if: needs.build-info.outputs.in-workflow-build == 'true' - run: ./scripts/ci/install_breeze.sh - if: needs.build-info.outputs.inWorkflowBuild == 'true' + if: needs.build-info.outputs.in-workflow-build == 'true' - name: "Free space" run: breeze free-space - if: needs.build-info.outputs.inWorkflowBuild == 'true' + if: needs.build-info.outputs.in-workflow-build == 'true' + - name: Cache pre-commit envs + uses: actions/cache@v3 + with: + path: ~/.cache/pre-commit + key: "pre-commit-${{steps.host-python-version.outputs.host-python-version}}-\ +${{ hashFiles('.pre-commit-config.yaml') }}" + restore-keys: pre-commit-${{steps.host-python-version.outputs.host-python-version}} + if: > + needs.build-info.outputs.in-workflow-build == 'true' && + needs.build-info.outputs.default-branch == 'main' + - name: "Regenerate dependencies in case they was modified manually so that we can build an image" + run: > + breeze static-checks --type update-providers-dependencies --all-files + --show-diff-on-failure --color always || true + if: > + needs.build-info.outputs.in-workflow-build == 'true' && + needs.build-info.outputs.default-branch == 'main' - name: > Build & Push CI images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + ${{ needs.build-info.outputs.all-python-versions-list-as-string }} run: breeze build-image --push-image --tag-as-latest --run-in-parallel env: - UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} - DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} + UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + DOCKER_CACHE: ${{ needs.build-info.outputs.cache-directive }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }} - if: needs.build-info.outputs.inWorkflowBuild == 'true' + PYTHON_VERSIONS: ${{ needs.build-info.outputs.all-python-versions-list-as-string }} + if: needs.build-info.outputs.in-workflow-build == 'true' - name: "Candidates for pip resolver backtrack triggers" if: failure() || cancelled() run: > breeze find-newer-dependencies --max-age 1 - --python "${{ needs.build-info.outputs.defaultPythonVersion }}" + --python "${{ needs.build-info.outputs.default-python-version }}" - name: "Fix ownership" run: breeze fix-ownership - if: always() && needs.build-info.outputs.inWorkflowBuild == 'true' + if: always() && needs.build-info.outputs.in-workflow-build == 'true' build-prod-images: permissions: packages: write timeout-minutes: 80 name: > - ${{needs.build-info.outputs.buildJobDescription}} PROD images - ${{ needs.build-info.outputs.allPythonVersionsListAsString }} - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + ${{needs.build-info.outputs.build-job-description}} PROD images + ${{ needs.build-info.outputs.all-python-versions-list-as-string }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, build-ci-images] env: DEFAULT_BRANCH: ${{ needs.build-info.outputs.default-branch }} DEFAULT_CONSTRAINTS_BRANCH: ${{ needs.build-info.outputs.default-constraints-branch }} DEBIAN_VERSION: ${{ needs.build-info.outputs.debian-version }} - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on)[0] }} BACKEND: sqlite - DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} + DOCKER_CACHE: ${{ needs.build-info.outputs.cache-directive }} VERSION_SUFFIX_FOR_PYPI: "dev0" steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - if: needs.build-info.outputs.inWorkflowBuild == 'true' - - uses: actions/checkout@v2 + if: needs.build-info.outputs.in-workflow-build == 'true' + - uses: actions/checkout@v3 with: ref: ${{ needs.build-info.outputs.targetCommitSha }} persist-credentials: false submodules: recursive - if: needs.build-info.outputs.inWorkflowBuild == 'true' + if: needs.build-info.outputs.in-workflow-build == 'true' - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} - if: needs.build-info.outputs.inWorkflowBuild == 'true' + python-version: ${{ needs.build-info.outputs.default-python-version }} + if: needs.build-info.outputs.in-workflow-build == 'true' - run: ./scripts/ci/install_breeze.sh - if: needs.build-info.outputs.inWorkflowBuild == 'true' + if: needs.build-info.outputs.in-workflow-build == 'true' - name: "Free space" run: breeze free-space - if: needs.build-info.outputs.inWorkflowBuild == 'true' + if: needs.build-info.outputs.in-workflow-build == 'true' + - name: Cache pre-commit envs + uses: actions/cache@v3 + with: + path: ~/.cache/pre-commit + key: "pre-commit-${{steps.host-python-version.outputs.host-python-version}}-\ +${{ hashFiles('.pre-commit-config.yaml') }}" + restore-keys: pre-commit-${{steps.host-python-version.outputs.host-python-version}} + if: > + needs.build-info.outputs.in-workflow-build == 'true' && + needs.build-info.outputs.default-branch == 'main' + - name: "Regenerate dependencies in case they was modified manually so that we can build an image" + run: > + breeze static-checks --type update-providers-dependencies --all-files + --show-diff-on-failure --color always || true + if: > + needs.build-info.outputs.in-workflow-build == 'true' && + needs.build-info.outputs.default-branch == 'main' - name: > Pull CI image for PROD build: - ${{ needs.build-info.outputs.defaultPythonVersion }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}" + ${{ needs.build-info.outputs.default-python-version }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}" run: breeze pull-image --tag-as-latest env: # Always use default Python version of CI image for preparing packages - PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }} + PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.default-python-version }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - if: needs.build-info.outputs.inWorkflowBuild == 'true' + if: needs.build-info.outputs.in-workflow-build == 'true' - name: "Cleanup dist and context file" run: rm -fv ./dist/* ./docker-context-files/* - if: needs.build-info.outputs.inWorkflowBuild == 'true' + if: needs.build-info.outputs.in-workflow-build == 'true' - name: "Prepare providers packages" run: > breeze prepare-provider-packages --package-list-file ./scripts/ci/installed_providers.txt --package-format wheel --version-suffix-for-pypi dev0 - if: needs.build-info.outputs.inWorkflowBuild == 'true' + if: needs.build-info.outputs.in-workflow-build == 'true' - name: "Prepare airflow package" run: breeze prepare-airflow-package --package-format wheel --version-suffix-for-pypi dev0 - if: needs.build-info.outputs.inWorkflowBuild == 'true' + if: needs.build-info.outputs.in-workflow-build == 'true' - name: "Move dist packages to docker-context files" run: mv -v ./dist/*.whl ./docker-context-files - if: needs.build-info.outputs.inWorkflowBuild == 'true' + if: needs.build-info.outputs.in-workflow-build == 'true' - name: > Build & Push PROD images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + ${{ needs.build-info.outputs.all-python-versions-list-as-string }} run: > breeze build-prod-image --tag-as-latest @@ -429,29 +452,29 @@ jobs: --disable-airflow-repo-cache --airflow-is-in-context env: - UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} - DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} + UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + DOCKER_CACHE: ${{ needs.build-info.outputs.cache-directive }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }} - if: needs.build-info.outputs.inWorkflowBuild == 'true' + PYTHON_VERSIONS: ${{ needs.build-info.outputs.all-python-versions-list-as-string }} + if: needs.build-info.outputs.in-workflow-build == 'true' - name: "Fix ownership" run: breeze fix-ownership - if: always() && needs.build-info.outputs.inWorkflowBuild == 'true' + if: always() && needs.build-info.outputs.in-workflow-build == 'true' run-new-breeze-tests: timeout-minutes: 10 name: Breeze unit tests - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info] steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: persist-credentials: false - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: python -m pip install --editable ./dev/breeze/ @@ -461,18 +484,18 @@ jobs: tests-ui: timeout-minutes: 10 name: React UI tests - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info] if: needs.build-info.outputs.run-ui-tests == 'true' steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: "Setup node" - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 - name: "Cache eslint" @@ -488,22 +511,22 @@ jobs: tests-www: timeout-minutes: 10 name: React WWW tests - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info] if: needs.build-info.outputs.run-www-tests == 'true' steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: "Setup node" - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 - name: "Cache eslint" - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: 'airflow/www/node_modules' key: ${{ runner.os }}-ui-node-modules-${{ hashFiles('airflow/ui/**/yarn.lock') }} @@ -516,14 +539,14 @@ jobs: test-openapi-client-generation: timeout-minutes: 10 name: "Test OpenAPI client generation" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info] if: needs.build-info.outputs.needs-api-codegen == 'true' steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 2 persist-credentials: false @@ -533,7 +556,7 @@ jobs: test-examples-of-prod-image-building: timeout-minutes: 60 name: "Test examples of production image building" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info] if: needs.build-info.outputs.image-build == 'true' steps: @@ -545,9 +568,9 @@ jobs: fetch-depth: 2 persist-credentials: false - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/requirements.txt - name: "Test examples of PROD image building" @@ -558,23 +581,23 @@ jobs: wait-for-ci-images: timeout-minutes: 120 name: "Wait for CI images" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, build-ci-images] if: needs.build-info.outputs.image-build == 'true' env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} BACKEND: sqlite steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh @@ -584,7 +607,7 @@ jobs: id: wait-for-images run: breeze pull-image --run-in-parallel --verify-image --wait-for-image --tag-as-latest env: - PYTHON_VERSIONS: ${{ needs.build-info.outputs.pythonVersionsListAsString }} + PYTHON_VERSIONS: ${{ needs.build-info.outputs.python-versions-list-as-string }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Fix ownership" run: breeze fix-ownership @@ -593,27 +616,27 @@ jobs: static-checks: timeout-minutes: 30 name: "Static checks" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, wait-for-ci-images] env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} - PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} + PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.default-python-version }} if: needs.build-info.outputs.basic-checks-only == 'false' steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - name: Cache pre-commit envs - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: ~/.cache/pre-commit key: "pre-commit-${{steps.host-python-version.outputs.host-python-version}}-\ @@ -647,33 +670,33 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" static-checks-basic-checks-only: timeout-minutes: 30 name: "Static checks: basic checks only" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info] env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} if: needs.build-info.outputs.basic-checks-only == 'true' steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - name: Cache pre-commit envs - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: ~/.cache/pre-commit key: "pre-commit-basic-${{steps.host-python-version.outputs.host-python-version}}-\ ${{ hashFiles('.pre-commit-config.yaml') }}" restore-keys: pre-commit-basic-${{steps.host-python-version.outputs.host-python-version}} - name: Fetch incoming commit ${{ github.sha }} with its parent - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: ref: ${{ github.sha }} fetch-depth: 2 @@ -701,23 +724,23 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" docs: timeout-minutes: 45 name: "Build docs" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, wait-for-ci-images] if: needs.build-info.outputs.docs-build == 'true' env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} - PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} + PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.default-python-version }} steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false submodules: recursive - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: - python-version: ${{needs.build-info.outputs.defaultPythonVersion}} + python-version: ${{needs.build-info.outputs.default-python-version}} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh @@ -727,7 +750,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" run: breeze pull-image --tag-as-latest env: IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - - uses: actions/cache@v2 + - uses: actions/cache@v3 id: cache-doc-inventories with: path: ./docs/_inventory_cache/ @@ -758,24 +781,24 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" prepare-test-provider-packages-wheel: timeout-minutes: 40 name: "Build and test provider packages wheel" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, wait-for-ci-images] env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} - PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} + PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.default-python-version }} if: needs.build-info.outputs.image-build == 'true' && needs.build-info.outputs.default-branch == 'main' steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false if: needs.build-info.outputs.default-branch == 'main' - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh @@ -799,8 +822,8 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" breeze verify-provider-packages --use-airflow-version wheel --use-packages-from-dist --package-format wheel env: - SKIP_CONSTRAINTS: "${{ needs.build-info.outputs.upgradeToNewerDependencies }}" - - name: "Remove airflow package and replace providers with 2.1-compliant versions" + SKIP_CONSTRAINTS: "${{ needs.build-info.outputs.upgrade-to-newer-dependencies }}" + - name: "Remove airflow package and replace providers with 2.2-compliant versions" run: | rm -vf dist/apache_airflow-*.whl \ dist/apache_airflow_providers_cncf_kubernetes*.whl \ @@ -808,12 +831,12 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" pip download --no-deps --dest dist \ apache-airflow-providers-cncf-kubernetes==3.0.0 \ apache-airflow-providers-celery==2.1.3 - - name: "Install and test provider packages and airflow on Airflow 2.1 files" + - name: "Install and test provider packages and airflow on Airflow 2.2 files" run: > - breeze verify-provider-packages --use-airflow-version 2.1.0 - --use-packages-from-dist --package-format wheel --airflow-constraints-reference constraints-2.1.0 + breeze verify-provider-packages --use-airflow-version 2.2.0 + --use-packages-from-dist --package-format wheel --airflow-constraints-reference constraints-2.2.0 env: - # The extras below are all extras that should be installed with Airflow 2.1.0 + # The extras below are all extras that should be installed with Airflow 2.2.0 AIRFLOW_EXTRAS: "airbyte,alibaba,amazon,apache.atlas.apache.beam,apache.cassandra,apache.drill,\ apache.druid,apache.hdfs,apache.hive,apache.kylin,apache.livy,apache.pig,apache.pinot,\ apache.spark,apache.sqoop,apache.webhdfs,arangodb,asana,async,\ @@ -831,26 +854,26 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" if: always() prepare-test-provider-packages-sdist: - timeout-minutes: 40 + timeout-minutes: 80 name: "Build and test provider packages sdist" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, wait-for-ci-images] env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} - PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} + PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.default-python-version }} if: needs.build-info.outputs.image-build == 'true' && needs.build-info.outputs.default-branch == 'main' steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false if: needs.build-info.outputs.default-branch == 'main' - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh @@ -878,7 +901,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" breeze verify-provider-packages --use-airflow-version sdist --use-packages-from-dist --package-format sdist env: - SKIP_CONSTRAINTS: "${{ needs.build-info.outputs.upgradeToNewerDependencies }}" + SKIP_CONSTRAINTS: "${{ needs.build-info.outputs.upgrade-to-newer-dependencies }}" - name: "Fix ownership" run: breeze fix-ownership if: always() @@ -886,15 +909,15 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" tests-helm: timeout-minutes: 80 name: "Python unit tests for helm chart" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, wait-for-ci-images] env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} MOUNT_SELECTED_LOCAL_SOURCES: "true" TEST_TYPES: "Helm" BACKEND: "" DB_RESET: "false" - PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}} + PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.default-python-version}} if: > needs.build-info.outputs.needs-helm-tests == 'true' && (github.repository == 'apache/airflow' || github.event_name != 'schedule') && @@ -903,13 +926,13 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh @@ -923,25 +946,25 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: "Tests: Helm" run: ./scripts/ci/testing/ci_run_airflow_testing.sh env: - PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}" + PR_LABELS: "${{ needs.build-info.outputs.pull-request-labels }}" IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Upload airflow logs" - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: failure() with: name: airflow-logs-helm path: "./files/airflow_logs*" retention-days: 7 - name: "Upload container logs" - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: failure() with: name: container-logs-helm path: "./files/container_logs*" retention-days: 7 - name: "Upload artifact for coverage" - uses: actions/upload-artifact@v2 - if: needs.build-info.outputs.runCoverage == 'true' + uses: actions/upload-artifact@v3 + if: needs.build-info.outputs.run-coverage == 'true' with: name: coverage-helm path: "./files/coverage*.xml" @@ -954,33 +977,33 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" timeout-minutes: 130 name: > Postgres${{matrix.postgres-version}},Py${{matrix.python-version}}: - ${{needs.build-info.outputs.testTypes}} - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + ${{needs.build-info.outputs.test-types}} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, wait-for-ci-images] strategy: matrix: - python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }} - postgres-version: ${{ fromJson(needs.build-info.outputs.postgresVersions) }} - exclude: ${{ fromJson(needs.build-info.outputs.postgresExclude) }} + python-version: ${{ fromJson(needs.build-info.outputs.python-versions) }} + postgres-version: ${{ fromJson(needs.build-info.outputs.postgres-versions) }} + exclude: ${{ fromJson(needs.build-info.outputs.postgres-exclude) }} fail-fast: false env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} BACKEND: postgres POSTGRES_VERSION: ${{ matrix.postgres-version }} - TEST_TYPES: "${{needs.build-info.outputs.testTypes}}" + TEST_TYPES: "${{needs.build-info.outputs.test-types}}" PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} if: needs.build-info.outputs.run-tests == 'true' steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh @@ -994,28 +1017,28 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" run: ./scripts/ci/testing/run_downgrade_test.sh - name: "Test Offline SQL generation" run: ./scripts/ci/testing/run_offline_sql_test.sh - - name: "Tests: ${{needs.build-info.outputs.testTypes}}" + - name: "Tests: ${{needs.build-info.outputs.test-types}}" run: ./scripts/ci/testing/ci_run_airflow_testing.sh env: - PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}" + PR_LABELS: "${{ needs.build-info.outputs.pull-request-labels }}" IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Upload airflow logs" - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: failure() with: name: airflow-logs-${{matrix.python-version}}-${{matrix.postgres-version}} path: "./files/airflow_logs*" retention-days: 7 - name: "Upload container logs" - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: failure() with: name: container-logs-postgres-${{matrix.python-version}}-${{matrix.postgres-version}} path: "./files/container_logs*" retention-days: 7 - name: "Upload artifact for coverage" - uses: actions/upload-artifact@v2 - if: needs.build-info.outputs.runCoverage == 'true' + uses: actions/upload-artifact@v3 + if: needs.build-info.outputs.run-coverage == 'true' with: name: coverage-postgres-${{matrix.python-version}}-${{matrix.postgres-version}} path: "./files/coverage*.xml" @@ -1027,33 +1050,33 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" tests-mysql: timeout-minutes: 130 name: > - MySQL${{matrix.mysql-version}}, Py${{matrix.python-version}}: ${{needs.build-info.outputs.testTypes}} - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + MySQL${{matrix.mysql-version}}, Py${{matrix.python-version}}: ${{needs.build-info.outputs.test-types}} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, wait-for-ci-images] strategy: matrix: - python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }} - mysql-version: ${{ fromJson(needs.build-info.outputs.mysqlVersions) }} - exclude: ${{ fromJson(needs.build-info.outputs.mysqlExclude) }} + python-version: ${{ fromJson(needs.build-info.outputs.python-versions) }} + mysql-version: ${{ fromJson(needs.build-info.outputs.mysql-versions) }} + exclude: ${{ fromJson(needs.build-info.outputs.mysql-exclude) }} fail-fast: false env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} BACKEND: mysql MYSQL_VERSION: ${{ matrix.mysql-version }} - TEST_TYPES: "${{needs.build-info.outputs.testTypes}}" + TEST_TYPES: "${{needs.build-info.outputs.test-types}}" PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} if: needs.build-info.outputs.run-tests == 'true' steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh @@ -1067,28 +1090,28 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" run: ./scripts/ci/testing/run_downgrade_test.sh - name: "Test Offline SQL generation" run: ./scripts/ci/testing/run_offline_sql_test.sh - - name: "Tests: ${{needs.build-info.outputs.testTypes}}" + - name: "Tests: ${{needs.build-info.outputs.test-types}}" run: ./scripts/ci/testing/ci_run_airflow_testing.sh env: - PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}" + PR_LABELS: "${{ needs.build-info.outputs.pull-request-labels }}" IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Upload airflow logs" - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: failure() with: name: airflow-logs-${{matrix.python-version}}-${{matrix.mysql-version}} path: "./files/airflow_logs*" retention-days: 7 - name: "Upload container logs" - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: failure() with: name: container-logs-mysql-${{matrix.python-version}}-${{matrix.mysql-version}} path: "./files/container_logs*" retention-days: 7 - name: "Upload artifact for coverage" - uses: actions/upload-artifact@v2 - if: needs.build-info.outputs.runCoverage == 'true' + uses: actions/upload-artifact@v3 + if: needs.build-info.outputs.run-coverage == 'true' with: name: coverage-mysql-${{matrix.python-version}}-${{matrix.mysql-version}} path: "./files/coverage*.xml" @@ -1100,33 +1123,33 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" tests-mssql: timeout-minutes: 130 name: > - MSSQL${{matrix.mssql-version}}, Py${{matrix.python-version}}: ${{needs.build-info.outputs.testTypes}} - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + MSSQL${{matrix.mssql-version}}, Py${{matrix.python-version}}: ${{needs.build-info.outputs.test-types}} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, wait-for-ci-images] strategy: matrix: - python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }} - mssql-version: ${{ fromJson(needs.build-info.outputs.mssqlVersions) }} - exclude: ${{ fromJson(needs.build-info.outputs.mssqlExclude) }} + python-version: ${{ fromJson(needs.build-info.outputs.python-versions) }} + mssql-version: ${{ fromJson(needs.build-info.outputs.mssql-versions) }} + exclude: ${{ fromJson(needs.build-info.outputs.mssql-exclude) }} fail-fast: false env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} BACKEND: mssql MSSQL_VERSION: ${{ matrix.mssql-version }} - TEST_TYPES: "${{needs.build-info.outputs.testTypes}}" + TEST_TYPES: "${{needs.build-info.outputs.test-types}}" PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} if: needs.build-info.outputs.run-tests == 'true' steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh @@ -1138,28 +1161,28 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Test downgrade" run: ./scripts/ci/testing/run_downgrade_test.sh - - name: "Tests: ${{needs.build-info.outputs.testTypes}}" + - name: "Tests: ${{needs.build-info.outputs.test-types}}" run: ./scripts/ci/testing/ci_run_airflow_testing.sh env: - PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}" + PR_LABELS: "${{ needs.build-info.outputs.pull-request-labels }}" IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Upload airflow logs" - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: failure() with: name: airflow-logs-${{matrix.python-version}}-${{matrix.mssql-version}} path: "./files/airflow_logs*" retention-days: 7 - name: "Upload container logs" - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: failure() with: name: container-logs-mssql-${{matrix.python-version}}-${{matrix.mssql-version}} path: "./files/container_logs*" retention-days: 7 - name: "Upload artifact for coverage" - uses: actions/upload-artifact@v2 - if: needs.build-info.outputs.runCoverage == 'true' + uses: actions/upload-artifact@v3 + if: needs.build-info.outputs.run-coverage == 'true' with: name: coverage-mssql-${{matrix.python-version}}-${{matrix.mssql-version}} path: "./files/coverage*.xml" @@ -1171,31 +1194,31 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" tests-sqlite: timeout-minutes: 130 name: > - Sqlite Py${{matrix.python-version}}: ${{needs.build-info.outputs.testTypes}} - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + Sqlite Py${{matrix.python-version}}: ${{needs.build-info.outputs.test-types}} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, wait-for-ci-images] strategy: matrix: - python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }} - exclude: ${{ fromJson(needs.build-info.outputs.sqliteExclude) }} + python-version: ${{ fromJson(needs.build-info.outputs.python-versions) }} + exclude: ${{ fromJson(needs.build-info.outputs.sqlite-exclude) }} fail-fast: false env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} BACKEND: sqlite - TEST_TYPES: "${{needs.build-info.outputs.testTypes}}" + TEST_TYPES: "${{needs.build-info.outputs.test-types}}" PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} if: needs.build-info.outputs.run-tests == 'true' steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh @@ -1207,28 +1230,28 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Test downgrade" run: ./scripts/ci/testing/run_downgrade_test.sh - - name: "Tests: ${{needs.build-info.outputs.testTypes}}" + - name: "Tests: ${{needs.build-info.outputs.test-types}}" run: ./scripts/ci/testing/ci_run_airflow_testing.sh env: - PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}" + PR_LABELS: "${{ needs.build-info.outputs.pull-request-labels }}" IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Upload airflow logs" - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: failure() with: name: airflow-logs-${{matrix.python-version}} path: './files/airflow_logs*' retention-days: 7 - name: "Upload container logs" - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: failure() with: name: container-logs-sqlite-${{matrix.python-version}} path: "./files/container_logs*" retention-days: 7 - name: "Upload artifact for coverage" - uses: actions/upload-artifact@v2 - if: needs.build-info.outputs.runCoverage == 'true' + uses: actions/upload-artifact@v3 + if: needs.build-info.outputs.run-coverage == 'true' with: name: coverage-sqlite-${{matrix.python-version}} path: ./files/coverage*.xml @@ -1240,27 +1263,27 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" tests-quarantined: timeout-minutes: 60 name: "Quarantined tests" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} continue-on-error: true needs: [build-info, wait-for-ci-images] env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} - MYSQL_VERSION: ${{needs.build-info.outputs.defaultMySQLVersion}} - POSTGRES_VERSION: ${{needs.build-info.outputs.defaultPostgresVersion}} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} + MYSQL_VERSION: ${{needs.build-info.outputs.default-mysql-version}} + POSTGRES_VERSION: ${{needs.build-info.outputs.default-postgres-version}} TEST_TYPES: "Quarantined" - PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }} + PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.default-python-version }} if: needs.build-info.outputs.run-tests == 'true' steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - name: "Set issue id for main" @@ -1285,31 +1308,31 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: "Tests: Quarantined" run: ./scripts/ci/testing/ci_run_quarantined_tests.sh env: - PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}" + PR_LABELS: "${{ needs.build-info.outputs.pull-request-labels }}" - name: "Upload Quarantine test results" - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: always() with: name: quarantined-tests path: "files/test_result-*.xml" retention-days: 7 - name: "Upload airflow logs" - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: failure() with: name: airflow-logs-quarantined-${{ matrix.backend }} path: "./files/airflow_logs*" retention-days: 7 - name: "Upload container logs" - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: failure() with: name: container-logs-quarantined-${{ matrix.backend }} path: "./files/container_logs*" retention-days: 7 - name: "Upload artifact for coverage" - uses: actions/upload-artifact@v2 - if: needs.build-info.outputs.runCoverage == 'true' + uses: actions/upload-artifact@v3 + if: needs.build-info.outputs.run-coverage == 'true' with: name: coverage-quarantined-${{ matrix.backend }} path: "./files/coverage*.xml" @@ -1321,7 +1344,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" upload-coverage: timeout-minutes: 15 name: "Upload coverage" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} continue-on-error: true needs: - build-info @@ -1331,19 +1354,19 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - tests-mssql - tests-quarantined env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} # Only upload coverage on merges to main - if: needs.build-info.outputs.runCoverage == 'true' + if: needs.build-info.outputs.run-coverage == 'true' steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false submodules: recursive - name: "Download all artifacts from the current build" - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v3 with: path: ./coverage-files - name: "Removes unnecessary artifacts" @@ -1356,31 +1379,31 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" wait-for-prod-images: timeout-minutes: 120 name: "Wait for PROD images" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, wait-for-ci-images, build-prod-images] if: needs.build-info.outputs.image-build == 'true' env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} BACKEND: sqlite - PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }} + PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.default-python-version }} steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh - name: "Free space" run: breeze free-space - name: "Cache virtualenv environment" - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: '.build/.docker_venv' key: ${{ runner.os }}-docker-venv-${{ hashFiles('scripts/ci/images/ci_run_docker_tests.py') }} @@ -1391,7 +1414,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" # For the images to be available and test them. run: breeze pull-prod-image --verify-image --wait-for-image --run-in-parallel env: - PYTHON_VERSIONS: ${{ needs.build-info.outputs.pythonVersionsListAsString }} + PYTHON_VERSIONS: ${{ needs.build-info.outputs.python-versions-list-as-string }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Fix ownership" run: breeze fix-ownership @@ -1400,23 +1423,23 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" test-docker-compose-quick-start: timeout-minutes: 60 name: "Test docker-compose quick start" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, wait-for-prod-images] if: needs.build-info.outputs.image-build == 'true' env: - PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }} + PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.default-python-version }} steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 2 persist-credentials: false - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh @@ -1435,25 +1458,25 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" tests-kubernetes: timeout-minutes: 240 name: Helm Chart; ${{matrix.executor}} - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, wait-for-prod-images] strategy: matrix: executor: [KubernetesExecutor, CeleryExecutor, LocalExecutor] fail-fast: false env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} BACKEND: postgres RUN_TESTS: "true" RUNTIME: "kubernetes" KUBERNETES_MODE: "image" EXECUTOR: ${{matrix.executor}} - KIND_VERSION: "${{ needs.build-info.outputs.defaultKindVersion }}" - HELM_VERSION: "${{ needs.build-info.outputs.defaultHelmVersion }}" + KIND_VERSION: "${{ needs.build-info.outputs.default-kind-version }}" + HELM_VERSION: "${{ needs.build-info.outputs.default-helm-version }}" CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: > - ${{needs.build-info.outputs.pythonVersionsListAsString}} + ${{needs.build-info.outputs.python-versions-list-as-string}} CURRENT_KUBERNETES_VERSIONS_AS_STRING: > - ${{needs.build-info.outputs.kubernetesVersionsListAsString}} + ${{needs.build-info.outputs.kubernetes-versions-list-as-string}} if: > ( needs.build-info.outputs.run-kubernetes-tests == 'true' || needs.build-info.outputs.needs-helm-tests == 'true' ) && @@ -1462,13 +1485,13 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh @@ -1477,22 +1500,22 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: Pull PROD images ${{ env.PYTHON_VERSIONS }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} run: breeze pull-prod-image --run-in-parallel --tag-as-latest env: - PYTHON_VERSIONS: ${{ needs.build-info.outputs.pythonVersionsListAsString }} + PYTHON_VERSIONS: ${{ needs.build-info.outputs.python-versions-list-as-string }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Cache bin folder with tools for kubernetes testing" - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: ".build/kubernetes-bin" key: "kubernetes-binaries --${{ needs.build-info.outputs.defaultKindVersion }}\ --${{ needs.build-info.outputs.defaultHelmVersion }}" +-${{ needs.build-info.outputs.default-kind-version }}\ +-${{ needs.build-info.outputs.default-helm-version }}" restore-keys: "kubernetes-binaries" - name: "Kubernetes Tests" run: ./scripts/ci/kubernetes/ci_setup_clusters_and_run_kubernetes_tests_in_parallel.sh env: - PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}" + PR_LABELS: "${{ needs.build-info.outputs.pull-request-labels }}" - name: "Upload KinD logs" - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: failure() || cancelled() with: name: kind-logs-${{matrix.executor}} @@ -1505,21 +1528,21 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" tests-helm-executor-upgrade: timeout-minutes: 150 name: Helm Chart Executor Upgrade - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: [build-info, wait-for-prod-images] env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} BACKEND: postgres RUN_TESTS: "true" RUNTIME: "kubernetes" KUBERNETES_MODE: "image" EXECUTOR: "KubernetesExecutor" - KIND_VERSION: "${{ needs.build-info.outputs.defaultKindVersion }}" - HELM_VERSION: "${{ needs.build-info.outputs.defaultHelmVersion }}" + KIND_VERSION: "${{ needs.build-info.outputs.default-kind-version }}" + HELM_VERSION: "${{ needs.build-info.outputs.default-helm-version }}" CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: > - ${{needs.build-info.outputs.pythonVersionsListAsString}} + ${{needs.build-info.outputs.python-versions-list-as-string}} CURRENT_KUBERNETES_VERSIONS_AS_STRING: > - ${{needs.build-info.outputs.kubernetesVersionsListAsString}} + ${{needs.build-info.outputs.kubernetes-versions-list-as-string}} if: > needs.build-info.outputs.run-kubernetes-tests == 'true' && needs.build-info.outputs.default-branch == 'main' @@ -1527,13 +1550,13 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh @@ -1542,33 +1565,33 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: Pull PROD images ${{ env.PYTHON_VERSIONS }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} run: breeze pull-prod-image --run-in-parallel --tag-as-latest env: - PYTHON_VERSIONS: ${{ needs.build-info.outputs.pythonVersionsListAsString }} + PYTHON_VERSIONS: ${{ needs.build-info.outputs.python-versions-list-as-string }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Cache virtualenv for kubernetes testing" - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: ".build/.kubernetes_venv" - key: "kubernetes-${{ needs.build-info.outputs.defaultPythonVersion }}\ - -${{needs.build-info.outputs.kubernetesVersionsListAsString}} - -${{needs.build-info.outputs.pythonVersionsListAsString}} + key: "kubernetes-${{ needs.build-info.outputs.default-python-version }}\ + -${{needs.build-info.outputs.kubernetes-versions-list-as-string}} + -${{needs.build-info.outputs.python-versions-list-as-string}} -${{ hashFiles('setup.py','setup.cfg') }}" - restore-keys: "kubernetes-${{ needs.build-info.outputs.defaultPythonVersion }}-\ - -${{needs.build-info.outputs.kubernetesVersionsListAsString}} - -${{needs.build-info.outputs.pythonVersionsListAsString}}" + restore-keys: "kubernetes-${{ needs.build-info.outputs.default-python-version }}-\ + -${{needs.build-info.outputs.kubernetes-versions-list-as-string}} + -${{needs.build-info.outputs.python-versions-list-as-string}}" - name: "Cache bin folder with tools for kubernetes testing" - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: ".build/kubernetes-bin" key: "kubernetes-binaries - -${{ needs.build-info.outputs.defaultKindVersion }}\ - -${{ needs.build-info.outputs.defaultHelmVersion }}" + -${{ needs.build-info.outputs.default-kind-version }}\ + -${{ needs.build-info.outputs.default-helm-version }}" restore-keys: "kubernetes-binaries" - name: "Kubernetes Helm Chart Executor Upgrade Tests" run: ./scripts/ci/kubernetes/ci_upgrade_cluster_with_different_executors_in_parallel.sh env: - PR_LABELS: "${{ needs.build-info.outputs.pullRequestLabels }}" + PR_LABELS: "${{ needs.build-info.outputs.pull-request-labels }}" - name: "Upload KinD logs" - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: failure() || cancelled() with: name: kind-logs-KubernetesExecutor @@ -1583,7 +1606,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" contents: write timeout-minutes: 40 name: "Constraints" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: - build-info - wait-for-ci-images @@ -1594,20 +1617,20 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - tests-mssql - tests-postgres env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} - if: needs.build-info.outputs.upgradeToNewerDependencies != 'false' + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} + if: needs.build-info.outputs.upgrade-to-newer-dependencies != 'false' steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false submodules: recursive - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh @@ -1616,7 +1639,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: Pull CI images ${{ env.PYTHON_VERSIONS }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }} run: breeze pull-image --run-in-parallel --tag-as-latest env: - PYTHON_VERSIONS: ${{ needs.build-info.outputs.pythonVersionsListAsString }} + PYTHON_VERSIONS: ${{ needs.build-info.outputs.python-versions-list-as-string }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Generate constraints" run: | @@ -1625,24 +1648,24 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" breeze generate-constraints --run-in-parallel --airflow-constraints-mode constraints-no-providers breeze generate-constraints --run-in-parallel --airflow-constraints-mode constraints env: - PYTHON_VERSIONS: ${{ needs.build-info.outputs.pythonVersionsListAsString }} + PYTHON_VERSIONS: ${{ needs.build-info.outputs.python-versions-list-as-string }} - name: "Set constraints branch name" id: constraints-branch run: ./scripts/ci/constraints/ci_branch_constraints.sh - if: needs.build-info.outputs.mergeRun == 'true' + if: needs.build-info.outputs.merge-run == 'true' - name: Checkout ${{ steps.constraints-branch.outputs.branch }} - uses: actions/checkout@v2 - if: needs.build-info.outputs.mergeRun == 'true' + uses: actions/checkout@v3 + if: needs.build-info.outputs.merge-run == 'true' with: path: "repo" ref: ${{ steps.constraints-branch.outputs.branch }} persist-credentials: false - - name: "Commit changed constraint files for ${{needs.build-info.outputs.pythonVersions}}" + - name: "Commit changed constraint files for ${{needs.build-info.outputs.python-versions}}" run: ./scripts/ci/constraints/ci_commit_constraints.sh - if: needs.build-info.outputs.mergeRun == 'true' + if: needs.build-info.outputs.merge-run == 'true' - name: "Push changes" uses: ./.github/actions/github-push-action - if: needs.build-info.outputs.mergeRun == 'true' + if: needs.build-info.outputs.merge-run == 'true' with: github_token: ${{ secrets.GITHUB_TOKEN }} branch: ${{ steps.constraints-branch.outputs.branch }} @@ -1660,31 +1683,31 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" packages: write timeout-minutes: 120 name: "Push Image Cache" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: - build-info - constraints - docs - if: needs.build-info.outputs.mergeRun == 'true' + if: needs.build-info.outputs.merge-run == 'true' strategy: fail-fast: false matrix: - python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }} + python-version: ${{ fromJson(needs.build-info.outputs.python-versions) }} platform: ["linux/amd64", "linux/arm64"] env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on) }} PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: "Setup python" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} + python-version: ${{ needs.build-info.outputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - run: ./scripts/ci/install_breeze.sh @@ -1696,7 +1719,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" run: breeze pull-image --tag-as-latest env: # Always use default Python version of CI image for preparing packages - PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }} + PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.default-python-version }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - name: "Cleanup dist and context file" run: rm -fv ./dist/* ./docker-context-files/* @@ -1744,11 +1767,11 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" if: always() build-ci-arm-images: - timeout-minutes: 80 + timeout-minutes: 120 name: > - ${{needs.build-info.outputs.buildJobDescription}} CI ARM images - ${{ needs.build-info.outputs.allPythonVersionsListAsString }} - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + ${{needs.build-info.outputs.build-job-description}} CI ARM images + ${{ needs.build-info.outputs.all-python-versions-list-as-string }} + runs-on: ${{ fromJson(needs.build-info.outputs.runs-on) }} needs: - build-info - wait-for-ci-images @@ -1762,46 +1785,46 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" DEFAULT_BRANCH: ${{ needs.build-info.outputs.default-branch }} DEFAULT_CONSTRAINTS_BRANCH: ${{ needs.build-info.outputs.default-constraints-branch }} DEBIAN_VERSION: ${{ needs.build-info.outputs.debian-version }} - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }} - if: needs.build-info.outputs.upgradeToNewerDependencies != 'false' + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runs-on)[0] }} + if: needs.build-info.outputs.upgrade-to-newer-dependencies != 'false' steps: - name: Cleanup repo run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - if: needs.build-info.outputs.inWorkflowBuild == 'true' + if: needs.build-info.outputs.in-workflow-build == 'true' - uses: actions/checkout@v2 with: ref: ${{ needs.build-info.outputs.targetCommitSha }} persist-credentials: false submodules: recursive - if: needs.build-info.outputs.inWorkflowBuild == 'true' + if: needs.build-info.outputs.in-workflow-build == 'true' - name: "Setup python" uses: actions/setup-python@v2 with: - python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} - if: needs.build-info.outputs.inWorkflowBuild == 'true' + python-version: ${{ needs.build-info.outputs.default-python-version }} + if: needs.build-info.outputs.in-workflow-build == 'true' - run: ./scripts/ci/install_breeze.sh - if: needs.build-info.outputs.inWorkflowBuild == 'true' + if: needs.build-info.outputs.in-workflow-build == 'true' - name: "Free space" run: breeze free-space - if: needs.build-info.outputs.inWorkflowBuild == 'true' + if: needs.build-info.outputs.in-workflow-build == 'true' - name: "Start ARM instance" run: ./scripts/ci/images/ci_start_arm_instance_and_connect_to_docker.sh - if: needs.build-info.outputs.inWorkflowBuild == 'true' + if: needs.build-info.outputs.in-workflow-build == 'true' - name: > Build CI ARM images ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - ${{ needs.build-info.outputs.allPythonVersionsListAsString }} + ${{ needs.build-info.outputs.all-python-versions-list-as-string }} run: > breeze build-image --run-in-parallel --parallelism 1 --builder airflow_cache --platform "linux/arm64" env: - UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} - DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }} + UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + DOCKER_CACHE: ${{ needs.build-info.outputs.cache-directive }} IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }} - PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }} - if: needs.build-info.outputs.inWorkflowBuild == 'true' + PYTHON_VERSIONS: ${{ needs.build-info.outputs.all-python-versions-list-as-string }} + if: needs.build-info.outputs.in-workflow-build == 'true' - name: "Stop ARM instance" run: ./scripts/ci/images/ci_stop_arm_instance.sh - if: always() && needs.build-info.outputs.inWorkflowBuild == 'true' + if: always() && needs.build-info.outputs.in-workflow-build == 'true' - name: "Fix ownership" run: breeze fix-ownership - if: always() && needs.build-info.outputs.inWorkflowBuild == 'true' + if: always() && needs.build-info.outputs.in-workflow-build == 'true' diff --git a/dev/breeze/src/airflow_breeze/branch_defaults.py b/dev/breeze/src/airflow_breeze/branch_defaults.py index ea837607501dc..f02f4812cf178 100644 --- a/dev/breeze/src/airflow_breeze/branch_defaults.py +++ b/dev/breeze/src/airflow_breeze/branch_defaults.py @@ -38,3 +38,4 @@ AIRFLOW_BRANCH = "v2-3-test" DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH = "constraints-2-3" +DEBIAN_VERSION = "bullseye" diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py index 57da548421b3f..f47e30ec85095 100644 --- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -513,3 +513,7 @@ def docs_filter(self) -> str: @cached_property def skip_pre_commits(self) -> str: return "identity" if self._default_branch == "main" else "identity,check-airflow-2-2-compatibility" + + @cached_property + def cache_directive(self) -> str: + return "disabled" if self._github_event == GithubEvents.SCHEDULE else "registry"