diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 38de56acbc40e..883816f901e60 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -422,11 +422,11 @@ repos: - id: check-no-relative-imports language: pygrep name: No relative imports - description: Airflow style is to use absolute imports only + description: Airflow style is to use absolute imports only (except docs building) entry: "^\\s*from\\s+\\." pass_filenames: true files: \.py$ - exclude: ^tests/|^airflow/_vendor/ + exclude: ^tests/|^airflow/_vendor/|^docs/ - id: check-for-inclusive-language language: pygrep name: Check for language that we do not accept as community @@ -648,7 +648,7 @@ repos: entry: ./scripts/ci/pre_commit/pre_commit_check_system_tests.py language: python files: ^tests/system/.*/example_[^/]*.py$ - exclude: ^tests/system/providers/google/bigquery/example_bigquery_queries\.py$ + exclude: ^tests/system/providers/google/cloud/bigquery/example_bigquery_queries\.py$ pass_filenames: true additional_dependencies: ['rich>=12.4.4'] - id: lint-markdown @@ -786,6 +786,21 @@ repos: pass_filenames: true files: ^docs/.*index\.rst$|^docs/.*example-dags\.rst$ additional_dependencies: ['rich>=12.4.4', 'pyyaml'] + always_run: true + - id: check-system-tests-tocs + name: Check that system tests is properly added + entry: ./scripts/ci/pre_commit/pre_commit_check_system_tests_hidden_in_index.py + language: python + pass_filenames: true + files: ^docs/apache-airflow-providers-[^/]*/index\.rst$ + additional_dependencies: ['rich>=12.4.4', 'pyyaml'] + - id: create-missing-init-py-files-tests + name: Create missing init.py files in tests + entry: ./scripts/ci/pre_commit/pre_commit_check_init_in_tests.py + language: python + additional_dependencies: ['rich>=12.4.4'] + pass_filenames: false + files: ^tests/.*\.py$ ## ADD MOST PRE-COMMITS ABOVE THAT LINE # The below pre-commits are those requiring CI image to be built - id: run-mypy diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index 33cae19c5cea2..e4232bf1947d5 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -1249,7 +1249,7 @@ Logical date of a DAG run triggered from the web UI now have its sub-second comp Due to a change in how the logical date (``execution_date``) is generated for a manual DAG run, a manual DAG run’s logical date may not match its time-of-trigger, but have its sub-second part zero-ed out. For example, a DAG run triggered on ``2021-10-11T12:34:56.78901`` would have its logical date set to ``2021-10-11T12:34:56.00000``. -This may affect some logic that expects on this quirk to detect whether a run is triggered manually or not. Note that ``dag_run.run_type`` is a more authoritative value for this purpose. Also, if you need this distinction between automated and manually-triggered rus for “next execution date” calculation, please also consider using the new data interval variables instead, which provide a more consistent behavior between the two run types. +This may affect some logic that expects on this quirk to detect whether a run is triggered manually or not. Note that ``dag_run.run_type`` is a more authoritative value for this purpose. Also, if you need this distinction between automated and manually-triggered run for “next execution date” calculation, please also consider using the new data interval variables instead, which provide a more consistent behavior between the two run types. New Features ^^^^^^^^^^^^ diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst index 7de5a4cf57edc..53b526fdfcd8d 100644 --- a/STATIC_CODE_CHECKS.rst +++ b/STATIC_CODE_CHECKS.rst @@ -209,10 +209,14 @@ require Breeze Docker image to be build locally. +--------------------------------------------------------+------------------------------------------------------------------+---------+ | check-system-tests-present | Check if system tests have required segments of code | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ +| check-system-tests-tocs | Check that system tests is properly added | | ++--------------------------------------------------------+------------------------------------------------------------------+---------+ | check-xml | Check XML files with xmllint | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ | codespell | Run codespell to check for common misspellings in files | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ +| create-missing-init-py-files-tests | Create missing init.py files in tests | | ++--------------------------------------------------------+------------------------------------------------------------------+---------+ | debug-statements | Detect accidentally committed debug statements | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ | detect-private-key | Detect if private key is added to the repository | | diff --git a/airflow/example_dags/example_branch_datetime_operator.py b/airflow/example_dags/example_branch_datetime_operator.py index 3c86e40402aef..e707514c868a0 100644 --- a/airflow/example_dags/example_branch_datetime_operator.py +++ b/airflow/example_dags/example_branch_datetime_operator.py @@ -26,7 +26,7 @@ from airflow.operators.datetime import BranchDateTimeOperator from airflow.operators.empty import EmptyOperator -dag = DAG( +dag1 = DAG( dag_id="example_branch_datetime_operator", start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), catchup=False, @@ -35,8 +35,8 @@ ) # [START howto_branch_datetime_operator] -empty_task_1 = EmptyOperator(task_id='date_in_range', dag=dag) -empty_task_2 = EmptyOperator(task_id='date_outside_range', dag=dag) +empty_task_11 = EmptyOperator(task_id='date_in_range', dag=dag1) +empty_task_21 = EmptyOperator(task_id='date_outside_range', dag=dag1) cond1 = BranchDateTimeOperator( task_id='datetime_branch', @@ -44,15 +44,15 @@ follow_task_ids_if_false=['date_outside_range'], target_upper=pendulum.datetime(2020, 10, 10, 15, 0, 0), target_lower=pendulum.datetime(2020, 10, 10, 14, 0, 0), - dag=dag, + dag=dag1, ) # Run empty_task_1 if cond1 executes between 2020-10-10 14:00:00 and 2020-10-10 15:00:00 -cond1 >> [empty_task_1, empty_task_2] +cond1 >> [empty_task_11, empty_task_21] # [END howto_branch_datetime_operator] -dag = DAG( +dag2 = DAG( dag_id="example_branch_datetime_operator_2", start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), catchup=False, @@ -60,8 +60,8 @@ schedule_interval="@daily", ) # [START howto_branch_datetime_operator_next_day] -empty_task_1 = EmptyOperator(task_id='date_in_range', dag=dag) -empty_task_2 = EmptyOperator(task_id='date_outside_range', dag=dag) +empty_task_12 = EmptyOperator(task_id='date_in_range', dag=dag2) +empty_task_22 = EmptyOperator(task_id='date_outside_range', dag=dag2) cond2 = BranchDateTimeOperator( task_id='datetime_branch', @@ -69,10 +69,10 @@ follow_task_ids_if_false=['date_outside_range'], target_upper=pendulum.time(0, 0, 0), target_lower=pendulum.time(15, 0, 0), - dag=dag, + dag=dag2, ) # Since target_lower happens after target_upper, target_upper will be moved to the following day # Run empty_task_1 if cond2 executes between 15:00:00, and 00:00:00 of the following day -cond2 >> [empty_task_1, empty_task_2] +cond2 >> [empty_task_12, empty_task_22] # [END howto_branch_datetime_operator_next_day] diff --git a/airflow/example_dags/example_external_task_marker_dag.py b/airflow/example_dags/example_external_task_marker_dag.py index 0c4479a0d66f0..733b732756633 100644 --- a/airflow/example_dags/example_external_task_marker_dag.py +++ b/airflow/example_dags/example_external_task_marker_dag.py @@ -18,23 +18,25 @@ """ Example DAG demonstrating setting up inter-DAG dependencies using ExternalTaskSensor and -ExternalTaskMarker +ExternalTaskMarker. In this example, child_task1 in example_external_task_marker_child depends on parent_task in -example_external_task_marker_parent. When parent_task is cleared with "Recursive" selected, -the presence of ExternalTaskMarker tells Airflow to clear child_task1 and its -downstream tasks. +example_external_task_marker_parent. When parent_task is cleared with 'Recursive' selected, +the presence of ExternalTaskMarker tells Airflow to clear child_task1 and its downstream tasks. ExternalTaskSensor will keep poking for the status of remote ExternalTaskMarker task at a regular interval till one of the following will happen: -1. ExternalTaskMarker reaches the states mentioned in the allowed_states list - In this case, ExternalTaskSensor will exit with a success status code -2. ExternalTaskMarker reaches the states mentioned in the failed_states list - In this case, ExternalTaskSensor will raise an AirflowException and user need to handle this - with multiple downstream tasks -3. ExternalTaskSensor times out - In this case, ExternalTaskSensor will raise AirflowSkipException or AirflowSensorTimeout - exception + +ExternalTaskMarker reaches the states mentioned in the allowed_states list. +In this case, ExternalTaskSensor will exit with a success status code + +ExternalTaskMarker reaches the states mentioned in the failed_states list +In this case, ExternalTaskSensor will raise an AirflowException and user need to handle this +with multiple downstream tasks + +ExternalTaskSensor times out. In this case, ExternalTaskSensor will raise AirflowSkipException +or AirflowSensorTimeout exception + """ import pendulum diff --git a/airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py b/airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py index 682cc83919129..8909adbf41598 100644 --- a/airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py +++ b/airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py @@ -26,7 +26,8 @@ Further information: YOUTUBE_VIDEO_PUBLISHED_AFTER and YOUTUBE_VIDEO_PUBLISHED_BEFORE needs to be formatted -"YYYY-MM-DDThh:mm:ss.sZ". See https://developers.google.com/youtube/v3/docs/search/list for more information. +``YYYY-MM-DDThh:mm:ss.sZ``. +See https://developers.google.com/youtube/v3/docs/search/list for more information. YOUTUBE_VIDEO_PARTS depends on the fields you pass via YOUTUBE_VIDEO_FIELDS. See https://developers.google.com/youtube/v3/docs/videos/list#parameters for more information. YOUTUBE_CONN_ID is optional for public videos. It does only need to authenticate when there are private videos diff --git a/airflow/providers/amazon/aws/example_dags/example_s3.py b/airflow/providers/amazon/aws/example_dags/example_s3.py index 7e06575d4a58b..4be800a9933ea 100644 --- a/airflow/providers/amazon/aws/example_dags/example_s3.py +++ b/airflow/providers/amazon/aws/example_dags/example_s3.py @@ -65,12 +65,9 @@ # [START howto_sensor_s3_key_function_definition] def check_fn(files: List) -> bool: """ - Example of custom check: check if all files are bigger than 1kB + Example of custom check: check if all files are bigger than ``1kB`` :param files: List of S3 object attributes. - Format: [{ - 'Size': int - }] :return: true if the criteria is met :rtype: bool """ diff --git a/airflow/providers/arangodb/example_dags/example_arangodb.py b/airflow/providers/arangodb/example_dags/example_arangodb.py index f9da187cfb665..37a8250dfd16a 100644 --- a/airflow/providers/arangodb/example_dags/example_arangodb.py +++ b/airflow/providers/arangodb/example_dags/example_arangodb.py @@ -41,7 +41,7 @@ # [START howto_aql_sensor_template_file_arangodb] -sensor = AQLSensor( +sensor2 = AQLSensor( task_id="aql_sensor_template_file", query="search_judy.sql", timeout=60, @@ -65,7 +65,7 @@ # [START howto_aql_operator_template_file_arangodb] -operator = AQLOperator( +operator2 = AQLOperator( task_id='aql_operator_template_file', dag=dag, result_processor=lambda cursor: print([document["name"] for document in cursor]), diff --git a/airflow/providers/google/cloud/example_dags/example_automl_tables.py b/airflow/providers/google/cloud/example_dags/example_automl_tables.py index 9ba0314dae777..c13de99fa8512 100644 --- a/airflow/providers/google/cloud/example_dags/example_automl_tables.py +++ b/airflow/providers/google/cloud/example_dags/example_automl_tables.py @@ -204,14 +204,14 @@ def get_target_column_spec(columns_specs: List[Dict], column_name: str) -> str: catchup=False, user_defined_macros={"extract_object_id": extract_object_id}, ) as example_dag: - create_dataset_task = AutoMLCreateDatasetOperator( + create_dataset_task2 = AutoMLCreateDatasetOperator( task_id="create_dataset_task", dataset=DATASET, location=GCP_AUTOML_LOCATION, project_id=GCP_PROJECT_ID, ) - dataset_id = create_dataset_task.output['dataset_id'] + dataset_id = create_dataset_task2.output['dataset_id'] import_dataset_task = AutoMLImportDataOperator( task_id="import_dataset_task", diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py b/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py index be858c4018753..bf73959d4ff72 100644 --- a/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py +++ b/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py @@ -17,17 +17,15 @@ # under the License. """ -Example Airflow DAG that demonstrates interactions with Google Cloud Transfer. +Example Airflow DAG that demonstrates interactions with Google Cloud Transfer. This DAG relies on +the following OS environment variables - -This DAG relies on the following OS environment variables +Note that you need to provide a large enough set of data so that operations do not execute too quickly. +Otherwise, DAG will fail. * GCP_PROJECT_ID - Google Cloud Project to use for the Google Cloud Transfer Service. * GCP_DESCRIPTION - Description of transfer job * GCP_TRANSFER_SOURCE_AWS_BUCKET - Amazon Web Services Storage bucket from which files are copied. - .. warning:: - You need to provide a large enough set of data so that operations do not execute too quickly. - Otherwise, DAG will fail. * GCP_TRANSFER_SECOND_TARGET_BUCKET - Google Cloud Storage bucket to which files are copied * WAIT_FOR_OPERATION_POKE_INTERVAL - interval of what to check the status of the operation A smaller value than the default value accelerates the system test and ensures its correct execution with diff --git a/airflow/providers/google/cloud/example_dags/example_pubsub.py b/airflow/providers/google/cloud/example_dags/example_pubsub.py index 8e3dd1fe8f01e..05ae16bd68f35 100644 --- a/airflow/providers/google/cloud/example_dags/example_pubsub.py +++ b/airflow/providers/google/cloud/example_dags/example_pubsub.py @@ -56,7 +56,7 @@ catchup=False, ) as example_sensor_dag: # [START howto_operator_gcp_pubsub_create_topic] - create_topic = PubSubCreateTopicOperator( + create_topic1 = PubSubCreateTopicOperator( task_id="create_topic", topic=TOPIC_FOR_SENSOR_DAG, project_id=GCP_PROJECT_ID, fail_if_exists=False ) # [END howto_operator_gcp_pubsub_create_topic] @@ -105,7 +105,7 @@ ) # [END howto_operator_gcp_pubsub_delete_topic] - create_topic >> subscribe_task >> publish_task + create_topic1 >> subscribe_task >> publish_task pull_messages >> pull_messages_result >> unsubscribe_task >> delete_topic # Task dependencies created via `XComArgs`: @@ -120,7 +120,7 @@ catchup=False, ) as example_operator_dag: # [START howto_operator_gcp_pubsub_create_topic] - create_topic = PubSubCreateTopicOperator( + create_topic2 = PubSubCreateTopicOperator( task_id="create_topic", topic=TOPIC_FOR_OPERATOR_DAG, project_id=GCP_PROJECT_ID ) # [END howto_operator_gcp_pubsub_create_topic] @@ -170,7 +170,7 @@ # [END howto_operator_gcp_pubsub_delete_topic] ( - create_topic + create_topic2 >> subscribe_task >> publish_task >> pull_messages_operator diff --git a/airflow/providers/google/cloud/example_dags/example_vertex_ai.py b/airflow/providers/google/cloud/example_dags/example_vertex_ai.py index cded48ae9b4de..a421c31a7ee2c 100644 --- a/airflow/providers/google/cloud/example_dags/example_vertex_ai.py +++ b/airflow/providers/google/cloud/example_dags/example_vertex_ai.py @@ -26,15 +26,16 @@ This DAG relies on the following OS environment variables: * GCP_VERTEX_AI_BUCKET - Google Cloud Storage bucket where the model will be saved -after training process was finished. + after training process was finished. * CUSTOM_CONTAINER_URI - path to container with model. * PYTHON_PACKAGE_GSC_URI - path to test model in archive. * LOCAL_TRAINING_SCRIPT_PATH - path to local training script. * DATASET_ID - ID of dataset which will be used in training process. * MODEL_ID - ID of model which will be used in predict process. * MODEL_ARTIFACT_URI - The artifact_uri should be the path to a GCS directory containing saved model -artifacts. + artifacts. """ + import os from datetime import datetime from uuid import uuid4 diff --git a/airflow/providers/mongo/hooks/mongo.py b/airflow/providers/mongo/hooks/mongo.py index 96a5ec800302a..c022ec4135ebd 100644 --- a/airflow/providers/mongo/hooks/mongo.py +++ b/airflow/providers/mongo/hooks/mongo.py @@ -266,7 +266,7 @@ def replace_many( :param mongo_collection: The name of the collection to update. :param docs: The new documents. :param filter_docs: A list of queries that match the documents to replace. - Can be omitted; then the _id fields from docs will be used. + Can be omitted; then the _id fields from airflow.docs will be used. :param mongo_db: The name of the database to use. Can be omitted; then the database from the connection string is used. :param upsert: If ``True``, perform an insert if no documents diff --git a/dev/breeze/src/airflow_breeze/pre_commit_ids.py b/dev/breeze/src/airflow_breeze/pre_commit_ids.py index 33ed3c6a6050c..541707045661f 100644 --- a/dev/breeze/src/airflow_breeze/pre_commit_ids.py +++ b/dev/breeze/src/airflow_breeze/pre_commit_ids.py @@ -61,8 +61,10 @@ 'check-setup-order', 'check-start-date-not-used-in-defaults', 'check-system-tests-present', + 'check-system-tests-tocs', 'check-xml', 'codespell', + 'create-missing-init-py-files-tests', 'debug-statements', 'detect-private-key', 'doctoc', diff --git a/docs/__init__.py b/docs/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/docs/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/docs/apache-airflow-providers-alibaba/index.rst b/docs/apache-airflow-providers-alibaba/index.rst index f9d98fd7e8bdb..0a5398d5f0b5b 100644 --- a/docs/apache-airflow-providers-alibaba/index.rst +++ b/docs/apache-airflow-providers-alibaba/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/alibaba/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/alibaba/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-amazon/example-dags.rst b/docs/apache-airflow-providers-amazon/example-dags.rst index 243016810adc0..253ae79041437 100644 --- a/docs/apache-airflow-providers-amazon/example-dags.rst +++ b/docs/apache-airflow-providers-amazon/example-dags.rst @@ -21,3 +21,4 @@ Example DAGs You can learn how to use Amazon AWS integrations by analyzing the source code of the example DAGs: * `Amazon AWS `__ +* `Amazon AWS (legacy) `__ diff --git a/docs/apache-airflow-providers-amazon/index.rst b/docs/apache-airflow-providers-amazon/index.rst index 8d27c4e53fc23..013dbd4dccf0e 100644 --- a/docs/apache-airflow-providers-amazon/index.rst +++ b/docs/apache-airflow-providers-amazon/index.rst @@ -36,6 +36,12 @@ Content Python API <_api/airflow/providers/amazon/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/amazon/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-apache-beam/index.rst b/docs/apache-airflow-providers-apache-beam/index.rst index 8c1b0a3e4c60e..8fcb19477b5bd 100644 --- a/docs/apache-airflow-providers-apache-beam/index.rst +++ b/docs/apache-airflow-providers-apache-beam/index.rst @@ -26,6 +26,17 @@ Content :caption: References Python API <_api/airflow/providers/apache/beam/index> + +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/beam/index> + +.. toctree:: + :maxdepth: 1 + :caption: Resources + PyPI Repository Example DAGs diff --git a/docs/apache-airflow-providers-apache-cassandra/index.rst b/docs/apache-airflow-providers-apache-cassandra/index.rst index 54f400b9d77b3..2808d827f368c 100644 --- a/docs/apache-airflow-providers-apache-cassandra/index.rst +++ b/docs/apache-airflow-providers-apache-cassandra/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/apache/cassandra/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/cassandra/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-apache-drill/index.rst b/docs/apache-airflow-providers-apache-drill/index.rst index 024aa35fade6b..a67b73453bff3 100644 --- a/docs/apache-airflow-providers-apache-drill/index.rst +++ b/docs/apache-airflow-providers-apache-drill/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/apache/drill/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/drill/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-apache-druid/index.rst b/docs/apache-airflow-providers-apache-druid/index.rst index de2649c5c57cd..70ab26ebaef95 100644 --- a/docs/apache-airflow-providers-apache-druid/index.rst +++ b/docs/apache-airflow-providers-apache-druid/index.rst @@ -32,6 +32,18 @@ Content :caption: References Python API <_api/airflow/providers/apache/druid/index> + +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/druid/index> + + +.. toctree:: + :maxdepth: 1 + :caption: Resources + PyPI Repository Installing from sources Example DAGs diff --git a/docs/apache-airflow-providers-apache-hive/index.rst b/docs/apache-airflow-providers-apache-hive/index.rst index 11ae61a0ce002..7ab19b0e6007f 100644 --- a/docs/apache-airflow-providers-apache-hive/index.rst +++ b/docs/apache-airflow-providers-apache-hive/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/apache/hive/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/hive/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-apache-kylin/index.rst b/docs/apache-airflow-providers-apache-kylin/index.rst index 4b5c2280fcf5e..a872a9767d828 100644 --- a/docs/apache-airflow-providers-apache-kylin/index.rst +++ b/docs/apache-airflow-providers-apache-kylin/index.rst @@ -28,6 +28,12 @@ Content Python API <_api/airflow/providers/apache/kylin/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/kylin/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-apache-livy/index.rst b/docs/apache-airflow-providers-apache-livy/index.rst index dccc0300e20b8..0320bee0a8fca 100644 --- a/docs/apache-airflow-providers-apache-livy/index.rst +++ b/docs/apache-airflow-providers-apache-livy/index.rst @@ -33,6 +33,12 @@ Content Python API <_api/airflow/providers/apache/livy/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/livy/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-apache-pig/index.rst b/docs/apache-airflow-providers-apache-pig/index.rst index adb93b8f3f7a2..e2c1c8e41cd59 100644 --- a/docs/apache-airflow-providers-apache-pig/index.rst +++ b/docs/apache-airflow-providers-apache-pig/index.rst @@ -33,6 +33,12 @@ Content Python API <_api/airflow/providers/apache/pig/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/pig/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-apache-spark/index.rst b/docs/apache-airflow-providers-apache-spark/index.rst index d6dc9d175038b..b183c817aa71b 100644 --- a/docs/apache-airflow-providers-apache-spark/index.rst +++ b/docs/apache-airflow-providers-apache-spark/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/apache/spark/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/spark/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-asana/index.rst b/docs/apache-airflow-providers-asana/index.rst index a10dccd9c8dd9..5f5c604a7ad1f 100644 --- a/docs/apache-airflow-providers-asana/index.rst +++ b/docs/apache-airflow-providers-asana/index.rst @@ -36,15 +36,16 @@ Content Python API <_api/airflow/providers/asana/index> .. toctree:: - :maxdepth: 1 - :caption: Resources + :hidden: + :caption: System tests - Example DAGs + System Tests <_api/tests/system/providers/asana/index> .. toctree:: :maxdepth: 1 :caption: Resources + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-cncf-kubernetes/index.rst b/docs/apache-airflow-providers-cncf-kubernetes/index.rst index 5a9a6150cc713..7137a59829e6d 100644 --- a/docs/apache-airflow-providers-cncf-kubernetes/index.rst +++ b/docs/apache-airflow-providers-cncf-kubernetes/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/cncf/kubernetes/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/cncf/kubernetes/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-databricks/index.rst b/docs/apache-airflow-providers-databricks/index.rst index 600f176da0bd7..b31955a3cc9ee 100644 --- a/docs/apache-airflow-providers-databricks/index.rst +++ b/docs/apache-airflow-providers-databricks/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/databricks/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/databricks/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-dbt-cloud/index.rst b/docs/apache-airflow-providers-dbt-cloud/index.rst index 2cf6c40e8ad09..4d37b2eeecd97 100644 --- a/docs/apache-airflow-providers-dbt-cloud/index.rst +++ b/docs/apache-airflow-providers-dbt-cloud/index.rst @@ -39,6 +39,12 @@ Content Python API <_api/airflow/providers/dbt/cloud/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/dbt/cloud/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-dingding/index.rst b/docs/apache-airflow-providers-dingding/index.rst index 4484b46010102..8868f5a621e4d 100644 --- a/docs/apache-airflow-providers-dingding/index.rst +++ b/docs/apache-airflow-providers-dingding/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/dingding/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/dingding/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-docker/index.rst b/docs/apache-airflow-providers-docker/index.rst index 5756d402b4152..2588d67ea88d1 100644 --- a/docs/apache-airflow-providers-docker/index.rst +++ b/docs/apache-airflow-providers-docker/index.rst @@ -29,6 +29,12 @@ Content Connection types Python API <_api/airflow/providers/docker/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/docker/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-elasticsearch/index.rst b/docs/apache-airflow-providers-elasticsearch/index.rst index e7e3da15711f0..bab52f0dd1ff5 100644 --- a/docs/apache-airflow-providers-elasticsearch/index.rst +++ b/docs/apache-airflow-providers-elasticsearch/index.rst @@ -36,15 +36,16 @@ Content Python API <_api/airflow/providers/elasticsearch/index> .. toctree:: - :maxdepth: 1 - :caption: Resources + :hidden: + :caption: System tests - Example DAGs + System Tests <_api/tests/system/providers/elasticsearch/index> .. toctree:: :maxdepth: 1 :caption: Resources + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-github/index.rst b/docs/apache-airflow-providers-github/index.rst index 25ba45e492859..d1a6b9b2c5e49 100644 --- a/docs/apache-airflow-providers-github/index.rst +++ b/docs/apache-airflow-providers-github/index.rst @@ -36,21 +36,16 @@ Content Python API <_api/airflow/providers/github/index> .. toctree:: - :maxdepth: 1 - :caption: Resources - - Example DAGs - -.. toctree:: - :maxdepth: 1 - :caption: Resources + :hidden: + :caption: System tests - PyPI Repository + System Tests <_api/tests/system/providers/github/index> .. toctree:: :maxdepth: 1 :caption: Resources + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-google/example-dags.rst b/docs/apache-airflow-providers-google/example-dags.rst index c745389993c21..6c11c5faca079 100644 --- a/docs/apache-airflow-providers-google/example-dags.rst +++ b/docs/apache-airflow-providers-google/example-dags.rst @@ -21,7 +21,8 @@ Example DAGs You can learn how to use Google integrations by analyzing the source code of the example DAGs: * `Google Ads `__ -* `Google Cloud `__ +* `Google Cloud (legacy) `__ +* `Google Cloud `__ * `Google Firebase `__ * `Google Marketing Platform `__ * `Google Workplace `__ (formerly Google Suite) diff --git a/docs/apache-airflow-providers-google/index.rst b/docs/apache-airflow-providers-google/index.rst index 3f20ba48fc2eb..d3c1ccc74498b 100644 --- a/docs/apache-airflow-providers-google/index.rst +++ b/docs/apache-airflow-providers-google/index.rst @@ -38,6 +38,12 @@ Content Python API <_api/airflow/providers/google/index> Configuration +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/google/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst b/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst index 96038ce8c191a..57e4d87ff8c03 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst @@ -42,7 +42,7 @@ Create dataset To create an empty dataset in a BigQuery database you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator`. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_dataset.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_dataset] @@ -58,7 +58,7 @@ To get the details of an existing dataset you can use This operator returns a `Dataset Resource `__. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_dataset.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_get_dataset] @@ -72,7 +72,7 @@ List tables in dataset To retrieve the list of tables in a given dataset use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryGetDatasetTablesOperator`. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_get_dataset_tables] @@ -89,7 +89,7 @@ To update a table in BigQuery you can use The update method replaces the entire Table resource, whereas the patch method only replaces fields that are provided in the submitted Table resource. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_update_table] @@ -106,7 +106,7 @@ To update a dataset in BigQuery you can use The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_dataset.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_update_dataset] @@ -120,7 +120,7 @@ Delete dataset To delete an existing dataset from a BigQuery database you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteDatasetOperator`. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_dataset.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_dataset] @@ -143,7 +143,7 @@ ways. You may either directly pass the schema fields in, or you may point the operator to a Google Cloud Storage object name. The object in Google Cloud Storage must be a JSON file with the schema fields in it. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_table] @@ -151,7 +151,7 @@ Storage must be a JSON file with the schema fields in it. You can use this operator to create a view on top of an existing table. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_view] @@ -160,7 +160,7 @@ You can use this operator to create a view on top of an existing table. You can also use this operator to create a materialized view that periodically cache results of a query for increased performance and efficiency. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_materialized_view] @@ -179,7 +179,7 @@ Similarly to :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyTableOperator` you can directly pass the schema fields in. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_operations.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_external_table] @@ -187,7 +187,7 @@ you can directly pass the schema fields in. Or you may point the operator to a Google Cloud Storage object name where the schema is stored. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_table_schema_json] @@ -211,7 +211,7 @@ returned list will be equal to the number of rows fetched. Each element in the list will again be a list where elements would represent the column values for that row. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_get_data] @@ -228,7 +228,7 @@ To upsert a table you can use This operator either updates the existing table or creates a new, empty table in the given dataset. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_upsert_table] @@ -245,7 +245,7 @@ To update the schema of a table you can use This operator updates the schema field values supplied, while leaving the rest unchanged. This is useful for instance to set new field descriptions on an existing table schema. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_update_table_schema] @@ -259,7 +259,7 @@ Delete table To delete an existing table you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteTableOperator`. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_table] @@ -267,7 +267,7 @@ To delete an existing table you can use You can also use this operator to delete a view. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_view] @@ -275,7 +275,7 @@ You can also use this operator to delete a view. You can also use this operator to delete a materialized view. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_materialized_view] @@ -288,7 +288,7 @@ Execute BigQuery jobs Let's say you would like to execute the following query. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 0 :start-after: [START howto_operator_bigquery_query] @@ -298,7 +298,7 @@ To execute the SQL query in a specific BigQuery database you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryInsertJobOperator` with proper query job configuration that can be Jinja templated. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_insert_job] @@ -310,7 +310,7 @@ For more information on types of BigQuery job please check If you want to include some files in your configuration you can use ``include`` clause of Jinja template language as follow: -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_select_job] @@ -339,7 +339,7 @@ This operator expects a sql query that will return a single row. Each value on that first row is evaluated using python ``bool`` casting. If any of the values return ``False`` the check is failed and errors out. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_check] @@ -357,7 +357,7 @@ This operator expects a sql query that will return a single row. Each value on that first row is evaluated against ``pass_value`` which can be either a string or numeric value. If numeric, you can also specify ``tolerance``. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_value_check] @@ -372,7 +372,7 @@ To check that the values of metrics given as SQL expressions are within a certai tolerance of the ones from ``days_back`` before you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryIntervalCheckOperator`. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_interval_check] @@ -390,7 +390,7 @@ use the ``{{ ds_nodash }}`` macro as the table name suffix. :class:`~airflow.providers.google.cloud.sensors.bigquery.BigQueryTableExistenceSensor`. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_sensors.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_bigquery_table] @@ -402,7 +402,7 @@ Check that a Table Partition exists To check that a table exists and has a partition you can use. :class:`~airflow.providers.google.cloud.sensors.bigquery.BigQueryTablePartitionExistenceSensor`. -.. exampleinclude:: /../../tests/system/providers/google/bigquery/example_bigquery_sensors.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_bigquery_table_partition] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst index 0e9a2a872c660..6855235d7a66c 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst @@ -42,7 +42,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_db_create] @@ -50,7 +50,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Example request body: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_db_create_body] :end-before: [END howto_operator_cloudsql_db_create_body] @@ -87,7 +87,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_db_delete] @@ -127,7 +127,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_db_patch] @@ -135,7 +135,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Example request body: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_db_patch_body] :end-before: [END howto_operator_cloudsql_db_patch_body] @@ -174,7 +174,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_delete] @@ -183,7 +183,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Note: If the instance has read or failover replicas you need to delete them before you delete the primary instance. Replicas are deleted the same way as primary instances: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_replicas_delete] @@ -224,7 +224,7 @@ Arguments Example body defining the export operation: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_export_body] :end-before: [END howto_operator_cloudsql_export_body] @@ -235,7 +235,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_export] @@ -269,7 +269,7 @@ To grant the service account with the appropriate WRITE permissions for the GCS you can use the :class:`~airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator`, as shown in the example: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_export_gcs_permissions] @@ -309,7 +309,7 @@ Arguments Example body defining the import operation: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_import_body] :end-before: [END howto_operator_cloudsql_import_body] @@ -320,7 +320,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_import] @@ -354,7 +354,7 @@ To grant the service account with the appropriate READ permissions for the GCS o you can use the :class:`~airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator`, as shown in the example: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_import_gcs_permissions] @@ -380,14 +380,14 @@ Arguments Example body defining the instance with failover replica: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_create_body] :end-before: [END howto_operator_cloudsql_create_body] Example body defining read replica for the instance above: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_create_replica] :end-before: [END howto_operator_cloudsql_create_replica] @@ -401,7 +401,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_create] @@ -441,7 +441,7 @@ Arguments Example body defining the instance: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_patch_body] :end-before: [END howto_operator_cloudsql_patch_body] @@ -452,7 +452,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_patch] diff --git a/docs/apache-airflow-providers-google/operators/cloud/dataproc.rst b/docs/apache-airflow-providers-google/operators/cloud/dataproc.rst index cd178e5f6eeea..4bcb3dc089eec 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/dataproc.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/dataproc.rst @@ -43,7 +43,7 @@ For more information about the available fields to pass when creating a cluster, A cluster configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_hive.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_create_cluster] @@ -52,7 +52,7 @@ A cluster configuration can look as followed: With this configuration we can create the cluster: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocCreateClusterOperator` -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_hive.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_cluster_operator] @@ -60,7 +60,7 @@ With this configuration we can create the cluster: For create Dataproc cluster in Google Kubernetes Engine you should use this cluster configuration: -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_gke.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_create_cluster_in_gke_config] @@ -69,7 +69,7 @@ For create Dataproc cluster in Google Kubernetes Engine you should use this clus With this configuration we can create the cluster: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocCreateClusterOperator` -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_gke.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_cluster_operator_in_gke] @@ -82,7 +82,7 @@ this could be easily done using **make()** of :class:`~airflow.providers.google.cloud.operators.dataproc.ClusterGenerator` You can generate and use config as followed: -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_cluster_generator.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_create_cluster_generate_cluster_config] @@ -96,7 +96,7 @@ For more information on updateMask and other parameters take a look at `Dataproc An example of a new cluster config and the updateMask: -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_update.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_update.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_updatemask_cluster_operator] @@ -105,7 +105,7 @@ An example of a new cluster config and the updateMask: To update a cluster you can use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocUpdateClusterOperator` -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_update.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_update.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_update_cluster_operator] @@ -118,7 +118,7 @@ To delete a cluster you can use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocDeleteClusterOperator`. -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_hive.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_delete_cluster_operator] @@ -137,7 +137,7 @@ file system. You can specify a file:/// path to refer to a local file on a clust The job configuration can be submitted by using: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocSubmitJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_pyspark.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_submit_job_to_cluster_operator] @@ -152,7 +152,7 @@ There are more arguments to provide in the jobs than the examples show. For the Example of the configuration for a PySpark Job: -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_pyspark.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_pyspark_config] @@ -160,7 +160,7 @@ Example of the configuration for a PySpark Job: Example of the configuration for a SparkSQl Job: -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_spark_sql.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_spark_sql.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_sparksql_config] @@ -168,7 +168,7 @@ Example of the configuration for a SparkSQl Job: Example of the configuration for a Spark Job: -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_spark.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_spark.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_spark_config] @@ -176,7 +176,7 @@ Example of the configuration for a Spark Job: Example of the configuration for a Hive Job: -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_hive.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_hive_config] @@ -184,7 +184,7 @@ Example of the configuration for a Hive Job: Example of the configuration for a Hadoop Job: -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_hadoop.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_hadoop.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_hadoop_config] @@ -192,7 +192,7 @@ Example of the configuration for a Hadoop Job: Example of the configuration for a Pig Job: -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_pig.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_pig.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_pig_config] @@ -201,7 +201,7 @@ Example of the configuration for a Pig Job: Example of the configuration for a SparkR: -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_sparkr.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_sparkr.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_sparkr_config] @@ -215,7 +215,7 @@ Dataproc supports creating workflow templates that can be triggered later on. A workflow template can be created using: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocCreateWorkflowTemplateOperator`. -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_workflow.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_workflow.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_workflow_template] @@ -224,7 +224,7 @@ A workflow template can be created using: Once a workflow is created users can trigger it using :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocInstantiateWorkflowTemplateOperator`: -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_workflow.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_workflow.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_trigger_workflow_template] @@ -233,7 +233,7 @@ Once a workflow is created users can trigger it using The inline operator is an alternative. It creates a workflow, run it, and delete it afterwards: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocInstantiateInlineWorkflowTemplateOperator`: -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_workflow.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_workflow.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_instantiate_inline_workflow_template] @@ -247,7 +247,7 @@ Dataproc supports creating a batch workload. A batch can be created using: :class: ``~airflow.providers.google.cloud.operators.dataproc.DataprocCreateBatchOperator``. -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_batch.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_batch_operator] @@ -257,7 +257,7 @@ For creating a batch with Persistent History Server first you should create a Da with specific parameters. Documentation how create cluster you can find here: https://cloud.google.com/dataproc/docs/concepts/jobs/history-server#setting_up_a_persistent_history_server -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_batch_persistent.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_cluster_for_persistent_history_server] @@ -265,7 +265,7 @@ https://cloud.google.com/dataproc/docs/concepts/jobs/history-server#setting_up_a After Cluster was created you should add it to the Batch configuration. -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_batch_persistent.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_batch_operator_with_persistent_history_server] @@ -277,7 +277,7 @@ Get a Batch To get a batch you can use: :class: ``~airflow.providers.google.cloud.operators.dataproc.DataprocGetBatchOperator``. -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_batch.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_get_batch_operator] @@ -289,7 +289,7 @@ List a Batch To get a list of exists batches you can use: :class: ``~airflow.providers.google.cloud.operators.dataproc.DataprocListBatchesOperator``. -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_batch.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_list_batches_operator] @@ -301,7 +301,7 @@ Delete a Batch To delete a batch you can use: :class: ``~airflow.providers.google.cloud.operators.dataproc.DataprocDeleteBatchOperator``. -.. exampleinclude:: /../../tests/system/providers/google/dataproc/example_dataproc_batch.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_delete_batch_operator] diff --git a/docs/apache-airflow-providers-google/operators/cloud/datastore.rst b/docs/apache-airflow-providers-google/operators/cloud/datastore.rst index 4a8e623d6ee14..05da36a3f83a2 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/datastore.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/datastore.rst @@ -38,7 +38,7 @@ Export Entities To export entities from Google Cloud Datastore to Cloud Storage use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreExportEntitiesOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_export_import.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_export_import.py :language: python :dedent: 4 :start-after: [START how_to_export_task] @@ -52,7 +52,7 @@ Import Entities To import entities from Cloud Storage to Google Cloud Datastore use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreImportEntitiesOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_export_import.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_export_import.py :language: python :dedent: 4 :start-after: [START how_to_import_task] @@ -66,7 +66,7 @@ Allocate Ids To allocate IDs for incomplete keys use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreAllocateIdsOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_commit.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START how_to_allocate_ids] @@ -74,7 +74,7 @@ To allocate IDs for incomplete keys use An example of a partial keys required by the operator: -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_commit.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 0 :start-after: [START how_to_keys_def] @@ -88,7 +88,7 @@ Begin transaction To begin a new transaction use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreBeginTransactionOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_commit.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START how_to_begin_transaction] @@ -96,7 +96,7 @@ To begin a new transaction use An example of a transaction options required by the operator: -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_commit.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 0 :start-after: [START how_to_transaction_def] @@ -110,7 +110,7 @@ Commit transaction To commit a transaction, optionally creating, deleting or modifying some entities use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreCommitOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_commit.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START how_to_commit_task] @@ -118,7 +118,7 @@ use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreCo An example of a commit information required by the operator: -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_commit.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 0 :start-after: [START how_to_commit_def] @@ -132,7 +132,7 @@ Run query To run a query for entities use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreRunQueryOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_query.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_query.py :language: python :dedent: 4 :start-after: [START how_to_run_query] @@ -140,7 +140,7 @@ To run a query for entities use An example of a query required by the operator: -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_query.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_query.py :language: python :dedent: 0 :start-after: [START how_to_query_def] @@ -154,7 +154,7 @@ Roll back transaction To roll back a transaction use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreRollbackOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_rollback.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_rollback.py :language: python :dedent: 4 :start-after: [START how_to_rollback_transaction] @@ -168,7 +168,7 @@ Get operation state To get the current state of a long-running operation use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreGetOperationOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_export_import.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_export_import.py :language: python :dedent: 4 :start-after: [START get_operation_state] @@ -182,7 +182,7 @@ Delete operation To delete an operation use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreDeleteOperationOperator` -.. exampleinclude:: /../../tests/system/providers/google/datastore/example_datastore_export_import.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_export_import.py :language: python :dedent: 4 :start-after: [START delete_operation] diff --git a/docs/apache-airflow-providers-google/operators/cloud/gcs.rst b/docs/apache-airflow-providers-google/operators/cloud/gcs.rst index 487d786aa2ed3..923e7f7396a75 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/gcs.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/gcs.rst @@ -41,7 +41,7 @@ Use the :class:`~airflow.providers.google.cloud.transfers.gcs_to_bigquery.GCSToBigQueryOperator` to execute a BigQuery load job. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_to_bigquery.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_bigquery] @@ -60,7 +60,7 @@ The time span is defined by the time span's start and end timestamps. If a DAG does not have a *next* DAG instance scheduled, the time span end infinite, meaning the operator processes all files older than ``data_interval_start``. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_transform_timespan.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_transform_timespan.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_timespan_file_transform_operator_Task] @@ -80,7 +80,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_acl.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_acl.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_bucket_create_acl_entry_task] @@ -114,7 +114,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_acl.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_acl.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_object_create_acl_entry_task] @@ -145,7 +145,7 @@ Deleting Bucket allows you to remove bucket object from the Google Cloud Storage It is performed through the :class:`~airflow.providers.google.cloud.operators.gcs.GCSDeleteBucketOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_upload_download.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_delete_bucket] @@ -174,7 +174,7 @@ GCSObjectExistenceSensor Use the :class:`~airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceSensor` to wait (poll) for the existence of a file in Google Cloud Storage. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_exists_task] @@ -187,7 +187,7 @@ GCSObjectsWithPrefixExistenceSensor Use the :class:`~airflow.providers.google.cloud.sensors.gcs.GCSObjectsWithPrefixExistenceSensor` to wait (poll) for the existence of a file with a specified prefix in Google Cloud Storage. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_with_prefix_exists_task] @@ -200,7 +200,7 @@ GCSUploadSessionCompleteSensor Use the :class:`~airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionCompleteSensor` to check for a change in the number of files with a specified prefix in Google Cloud Storage. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_gcs_upload_session_complete_task] @@ -213,7 +213,7 @@ GCSObjectUpdateSensor Use the :class:`~airflow.providers.google.cloud.sensors.gcs.GCSObjectUpdateSensor` to check if an object is updated in Google Cloud Storage. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_update_exists_task] diff --git a/docs/apache-airflow-providers-google/operators/cloud/index.rst b/docs/apache-airflow-providers-google/operators/cloud/index.rst index 79c347e269c5b..22ded79012385 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/index.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/index.rst @@ -29,4 +29,4 @@ Google Cloud Operators .. note:: You can learn how to use Google Cloud integrations by analyzing the - `source code `_ of the particular example DAGs. + `source code `_ of the particular example DAGs. diff --git a/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst b/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst index 39da66207a965..23ed67d8aa9b1 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst @@ -43,7 +43,7 @@ Create GKE cluster Here is an example of a cluster definition: -.. exampleinclude:: /../../tests/system/providers/google/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :start-after: [START howto_operator_gcp_gke_create_cluster_definition] :end-before: [END howto_operator_gcp_gke_create_cluster_definition] @@ -53,7 +53,7 @@ A dict object like this, or a definition, is required when creating a cluster with :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKECreateClusterOperator`. -.. exampleinclude:: /../../tests/system/providers/google/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_create_cluster] @@ -68,7 +68,7 @@ To delete a cluster, use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKEDeleteClusterOperator`. This would also delete all the nodes allocated to the cluster. -.. exampleinclude:: /../../tests/system/providers/google/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_delete_cluster] @@ -117,7 +117,7 @@ is the path ``/airflow/xcom``. To provide values to the XCom, ensure your Pod wr ``return.json`` in the sidecar. The contents of this can then be used downstream in your DAG. Here is an example of it being used: -.. exampleinclude:: /../../tests/system/providers/google/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_start_pod_xcom] @@ -125,7 +125,7 @@ Here is an example of it being used: And then use it in other operators: -.. exampleinclude:: /../../tests/system/providers/google/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_xcom_result] diff --git a/docs/apache-airflow-providers-google/operators/cloud/spanner.rst b/docs/apache-airflow-providers-google/operators/cloud/spanner.rst index ff4b0dda8e721..5ec51982f77cc 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/spanner.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/spanner.rst @@ -41,7 +41,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/spanner/example_spanner.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_deploy] @@ -80,7 +80,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/spanner/example_spanner.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_database_delete] @@ -120,7 +120,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/spanner/example_spanner.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_database_deploy] @@ -164,13 +164,13 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/spanner/example_spanner.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_database_update] :end-before: [END howto_operator_spanner_database_update] -.. exampleinclude:: /../../tests/system/providers/google/spanner/example_spanner.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_database_update_idempotent] @@ -207,7 +207,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/spanner/example_spanner.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_query] @@ -246,7 +246,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/spanner/example_spanner.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_delete] diff --git a/docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst b/docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst index 42854d036e757..80a77a1f5b0b5 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst @@ -42,14 +42,14 @@ google.cloud.speech_v1.types module for more information, see: https://googleapis.github.io/google-cloud-python/latest/speech/gapic/v1/api.html#google.cloud.speech_v1.SpeechClient.recognize -.. exampleinclude:: /../../tests/system/providers/google/speech_to_text/example_speech_to_text.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py :language: python :start-after: [START howto_operator_text_to_speech_api_arguments] :end-before: [END howto_operator_text_to_speech_api_arguments] filename is a simple string argument: -.. exampleinclude:: /../../tests/system/providers/google/speech_to_text/example_speech_to_text.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py :language: python :start-after: [START howto_operator_speech_to_text_api_arguments] :end-before: [END howto_operator_speech_to_text_api_arguments] @@ -57,7 +57,7 @@ filename is a simple string argument: Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/speech_to_text/example_speech_to_text.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py :language: python :dedent: 4 :start-after: [START howto_operator_speech_to_text_recognize] diff --git a/docs/apache-airflow-providers-google/operators/cloud/stackdriver.rst b/docs/apache-airflow-providers-google/operators/cloud/stackdriver.rst index e081c835dcd6d..463c2dbc84f00 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/stackdriver.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/stackdriver.rst @@ -40,7 +40,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_list_alert_policy] @@ -60,7 +60,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_enable_alert_policy] @@ -80,7 +80,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_disable_alert_policy] @@ -101,7 +101,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_upsert_alert_policy] @@ -120,7 +120,7 @@ Using the operator The name of the alert to be deleted should be given in the format projects//alertPolicies/ -.. exampleinclude:: /../../tests/system/providers/google/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_delete_alert_policy] @@ -140,7 +140,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_list_notification_channel] @@ -160,7 +160,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_enable_notification_channel] @@ -180,7 +180,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_disable_notification_channel] @@ -201,7 +201,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_disable_notification_channel] @@ -220,7 +220,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_delete_notification_channel] diff --git a/docs/apache-airflow-providers-google/operators/cloud/tasks.rst b/docs/apache-airflow-providers-google/operators/cloud/tasks.rst index 875ae81920bbc..291bd44e46c5f 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/tasks.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/tasks.rst @@ -41,7 +41,7 @@ Create queue To create new Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueueCreateOperator` -.. exampleinclude:: /../../tests/system/providers/google/tasks/example_queue.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START create_queue] @@ -55,7 +55,7 @@ Delete queue To delete Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueueDeleteOperator` -.. exampleinclude:: /../../tests/system/providers/google/tasks/example_queue.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START delete_queue] @@ -70,7 +70,7 @@ Resume queue To resume Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueueResumeOperator` -.. exampleinclude:: /../../tests/system/providers/google/tasks/example_queue.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START resume_queue] @@ -84,7 +84,7 @@ Pause queue To pause Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueuePauseOperator` -.. exampleinclude:: /../../tests/system/providers/google/tasks/example_queue.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START pause_queue] @@ -98,7 +98,7 @@ Purge queue To purge Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueuePurgeOperator` -.. exampleinclude:: /../../tests/system/providers/google/tasks/example_queue.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START purge_queue] @@ -112,7 +112,7 @@ Get queue To get Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueueGetOperator` -.. exampleinclude:: /../../tests/system/providers/google/tasks/example_queue.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START get_queue] @@ -126,7 +126,7 @@ Update queue To update Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueueUpdateOperator` -.. exampleinclude:: /../../tests/system/providers/google/tasks/example_queue.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START update_queue] @@ -140,7 +140,7 @@ List queues To list all Queues use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueuesListOperator` -.. exampleinclude:: /../../tests/system/providers/google/tasks/example_queue.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START list_queue] @@ -158,7 +158,7 @@ Create task To create new Task in a particular queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksTaskCreateOperator` -.. exampleinclude:: /../../tests/system/providers/google/tasks/example_tasks.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_tasks.py :language: python :dedent: 4 :start-after: [START create_task] @@ -172,7 +172,7 @@ Get task To get the Tasks in a particular queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksTaskGetOperator` -.. exampleinclude:: /../../tests/system/providers/google/tasks/example_tasks.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_tasks.py :language: python :dedent: 4 :start-after: [START tasks_get] @@ -186,7 +186,7 @@ Run task To run the Task in a particular queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksTaskRunOperator` -.. exampleinclude:: /../../tests/system/providers/google/tasks/example_tasks.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_tasks.py :language: python :dedent: 4 :start-after: [START run_task] @@ -200,7 +200,7 @@ List tasks To list all Tasks in a particular queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksTasksListOperator` -.. exampleinclude:: /../../tests/system/providers/google/tasks/example_tasks.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_tasks.py :language: python :dedent: 4 :start-after: [START list_tasks] @@ -214,7 +214,7 @@ Delete task To delete the Task from particular queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksTaskDeleteOperator` -.. exampleinclude:: /../../tests/system/providers/google/tasks/example_tasks.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_tasks.py :language: python :dedent: 4 :start-after: [START create_task] diff --git a/docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst b/docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst index 97fb90fd8dddd..f79ca28336cd3 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst @@ -42,14 +42,14 @@ The ``input``, ``voice`` and ``audio_config`` arguments need to be dicts or obje for more information, see: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/api.html#google.cloud.texttospeech_v1.TextToSpeechClient.synthesize_speech -.. exampleinclude:: /../../tests/system/providers/google/text_to_speech/example_text_to_speech.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py :language: python :start-after: [START howto_operator_text_to_speech_api_arguments] :end-before: [END howto_operator_text_to_speech_api_arguments] The ``filename`` argument is a simple string argument: -.. exampleinclude:: /../../tests/system/providers/google/text_to_speech/example_text_to_speech.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py :language: python :start-after: [START howto_operator_text_to_speech_gcp_filename] :end-before: [END howto_operator_text_to_speech_gcp_filename] @@ -57,7 +57,7 @@ The ``filename`` argument is a simple string argument: Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/text_to_speech/example_text_to_speech.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py :language: python :dedent: 4 :start-after: [START howto_operator_text_to_speech_synthesize] diff --git a/docs/apache-airflow-providers-google/operators/cloud/workflows.rst b/docs/apache-airflow-providers-google/operators/cloud/workflows.rst index 0cea43f0f79bd..9eafcf35a22ee 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/workflows.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/workflows.rst @@ -39,7 +39,7 @@ Create workflow To create a workflow use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsCreateWorkflowOperator`. -.. exampleinclude:: /../../tests/system/providers/google/workflows/example_workflows.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_create_workflow] @@ -47,7 +47,7 @@ To create a workflow use The workflow should be define in similar why to this example: -.. exampleinclude:: /../../tests/system/providers/google/workflows/example_workflows.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py :language: python :dedent: 0 :start-after: [START how_to_define_workflow] @@ -65,7 +65,7 @@ Update workflow To update a workflow use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsUpdateWorkflowOperator`. -.. exampleinclude:: /../../tests/system/providers/google/workflows/example_workflows.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_update_workflow] @@ -79,7 +79,7 @@ Get workflow To get a workflow use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsGetWorkflowOperator`. -.. exampleinclude:: /../../tests/system/providers/google/workflows/example_workflows.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_get_workflow] @@ -93,7 +93,7 @@ List workflows To list workflows use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsListWorkflowsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/workflows/example_workflows.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_list_workflows] @@ -107,7 +107,7 @@ Delete workflow To delete a workflow use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsDeleteWorkflowOperator`. -.. exampleinclude:: /../../tests/system/providers/google/workflows/example_workflows.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_delete_workflow] @@ -122,7 +122,7 @@ To create an execution use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsCreateExecutionOperator`. This operator is not idempotent due to API limitation. -.. exampleinclude:: /../../tests/system/providers/google/workflows/example_workflows.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_create_execution] @@ -131,7 +131,7 @@ This operator is not idempotent due to API limitation. The create operator does not wait for execution to complete. To wait for execution result use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowExecutionSensor`. -.. exampleinclude:: /../../tests/system/providers/google/workflows/example_workflows.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_wait_for_execution] @@ -145,7 +145,7 @@ Get execution To get an execution use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsGetExecutionOperator`. -.. exampleinclude:: /../../tests/system/providers/google/workflows/example_workflows.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_get_execution] @@ -160,7 +160,7 @@ To list executions use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsListExecutionsOperator`. By default this operator will return only executions for last 60 minutes. -.. exampleinclude:: /../../tests/system/providers/google/workflows/example_workflows.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_list_executions] @@ -174,7 +174,7 @@ Cancel execution To cancel an execution use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsCancelExecutionOperator`. -.. exampleinclude:: /../../tests/system/providers/google/workflows/example_workflows.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_cancel_execution] diff --git a/docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst index 1ff6965d68eeb..021dc41787dc5 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst @@ -38,7 +38,7 @@ All parameters are described in the reference documentation - :class:`~airflow.p An example operator call might look like this: -.. exampleinclude:: /../../tests/system/providers/google/azure/example_azure_fileshare_to_gcs.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/azure/example_azure_fileshare_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_azure_fileshare_to_gcs_basic] diff --git a/docs/apache-airflow-providers-google/operators/transfer/calendar_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/calendar_to_gcs.rst index 42dae95bdf232..b31369137e4d2 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/calendar_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/calendar_to_gcs.rst @@ -37,7 +37,7 @@ Upload data from Google Calendar to GCS To upload data from Google Calendar to Google Cloud Storage you can use the :class:`~airflow.providers.google.cloud.transfers.calendar_to_gcs.GoogleCalendarToGCSOperator`. -.. exampleinclude:: /../../tests/system/providers/google/calendar/example_calendar_to_gcs.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py :language: python :dedent: 4 :start-after: [START upload_calendar_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gcs.rst index 12ba4ce7607a9..b3fa2ac191157 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gcs.rst @@ -94,7 +94,7 @@ Copy single file The following example would copy a single file, ``OBJECT_1`` from the ``BUCKET_1_SRC`` GCS bucket to the ``BUCKET_1_DST`` bucket. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_single_file] @@ -105,7 +105,7 @@ Copy multiple files There are several ways to copy multiple files, various examples of which are presented following. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_wildcard] @@ -115,7 +115,7 @@ The ``source_object`` value may contain one wild card, denoted as "*". All files be copied. In this example, all root level files ending with ``.txt`` in ``BUCKET_1_SRC`` will be copied to the ``data`` folder in ``BUCKET_1_DST``, with file names unchanged. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_delimiter] @@ -127,7 +127,7 @@ Then copy files from source_objects to destination_object and rename each source The following example would copy all the files in ``subdir/`` folder (i.e subdir/a.csv, subdir/b.csv, subdir/c.csv) from the ``BUCKET_1_SRC`` GCS bucket to the ``backup/`` folder in ``BUCKET_1_DST`` bucket. (i.e backup/a.csv, backup/b.csv, backup/c.csv) -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_without_wildcard] @@ -137,7 +137,7 @@ The delimiter filed may be specified to select any source files starting with `` value supplied to ``delimiter``. This example uses the ``delimiter`` value to implement the same functionality as the prior example. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_list] @@ -153,7 +153,7 @@ Move single file Supplying ``True`` to the ``move`` argument causes the operator to delete ``source_object`` once the copy is complete. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_single_file_move] @@ -165,7 +165,7 @@ Move multiple files Multiple files may be moved by supplying ``True`` to the ``move`` argument. The same rules concerning wild cards and the ``delimiter`` argument apply to moves as well as copies. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_list_move] @@ -198,7 +198,7 @@ The following example will ensure all files in ``BUCKET_1_SRC``, including any i ``BUCKET_1_DST``. It will not overwrite identically named files in ``BUCKET_1_DST`` if they already exist. It will not delete any files in ``BUCKET_1_DST`` not in ``BUCKET_1_SRC``. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_synch_bucket] @@ -211,7 +211,7 @@ This example will ensure all files in ``BUCKET_1_SRC``, including any in subdire ``BUCKET_1_DST``. It will overwrite identically named files in ``BUCKET_1_DST`` if they already exist. It will delete any files in ``BUCKET_1_DST`` not in ``BUCKET_1_SRC``. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_synch_full_bucket] @@ -224,7 +224,7 @@ The following example will ensure all files in ``BUCKET_1_SRC``, including any i ``subdir`` folder in ``BUCKET_1_DST``. It will not overwrite identically named files in ``BUCKET_1_DST/subdir`` if they already exist and it will not delete any files in ``BUCKET_1_DST/subdir`` not in ``BUCKET_1_SRC``. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_synch_to_subdir] @@ -237,7 +237,7 @@ This example will ensure all files in ``BUCKET_1_SRC/subdir``, including any in in ``BUCKET_1_DST``. It will not overwrite identically named files in ``BUCKET_1_DST`` if they already exist and it will not delete any files in ``BUCKET_1_DST`` not in ``BUCKET_1_SRC/subdir``. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_sync_from_subdir] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_local.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_local.rst index a64a5dfa8ca1a..95fa3e0d7e81f 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_local.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_local.rst @@ -37,7 +37,7 @@ data from GCS to local filesystem. Below is an example of using this operator to download a file from GCS. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_upload_download.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py :language: python :dedent: 0 :start-after: [START howto_operator_gcs_download_file_task] diff --git a/docs/apache-airflow-providers-google/operators/transfer/local_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/local_to_gcs.rst index 429e7d93506b9..70692da9ec8e1 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/local_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/local_to_gcs.rst @@ -38,7 +38,7 @@ When you use this operator, you can optionally compress the data being uploaded. Below is an example of using this operator to upload a file to GCS. -.. exampleinclude:: /../../tests/system/providers/google/gcs/example_gcs_upload_download.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py :language: python :dedent: 0 :start-after: [START howto_operator_local_filesystem_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst index 3ad8c48180045..a83e118f89916 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst @@ -38,7 +38,7 @@ Upload data from Google Sheets to GCS To upload data from Google Spreadsheet to Google Cloud Storage you can use the :class:`~airflow.providers.google.cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator`. -.. exampleinclude:: /../../tests/system/providers/google/sheets/example_sheets_to_gcs.py +.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_sheets_to_gcs.py :language: python :dedent: 4 :start-after: [START upload_sheet_to_gcs] diff --git a/docs/apache-airflow-providers-http/index.rst b/docs/apache-airflow-providers-http/index.rst index d06887fd798cf..28903dd26fa75 100644 --- a/docs/apache-airflow-providers-http/index.rst +++ b/docs/apache-airflow-providers-http/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/http/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/http/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-influxdb/index.rst b/docs/apache-airflow-providers-influxdb/index.rst index f38ef58721991..b5f542ea5e4af 100644 --- a/docs/apache-airflow-providers-influxdb/index.rst +++ b/docs/apache-airflow-providers-influxdb/index.rst @@ -36,21 +36,16 @@ Content Python API <_api/airflow/providers/influxdb/index> .. toctree:: - :maxdepth: 1 - :caption: Resources - - Example DAGs - -.. toctree:: - :maxdepth: 1 - :caption: Resources + :hidden: + :caption: System tests - PyPI Repository + System Tests <_api/tests/system/providers/influxdb/index> .. toctree:: :maxdepth: 1 :caption: Resources + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-jdbc/index.rst b/docs/apache-airflow-providers-jdbc/index.rst index 4161462aa3fc8..e1549b6b2e90c 100644 --- a/docs/apache-airflow-providers-jdbc/index.rst +++ b/docs/apache-airflow-providers-jdbc/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/jdbc/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/jdbc/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-jenkins/index.rst b/docs/apache-airflow-providers-jenkins/index.rst index 2bc16e33133f7..2f7d699d403f5 100644 --- a/docs/apache-airflow-providers-jenkins/index.rst +++ b/docs/apache-airflow-providers-jenkins/index.rst @@ -33,6 +33,12 @@ Content Python API <_api/airflow/providers/jenkins/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/jenkins/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-microsoft-azure/index.rst b/docs/apache-airflow-providers-microsoft-azure/index.rst index 647ffe9acd96d..8985be1ba7165 100644 --- a/docs/apache-airflow-providers-microsoft-azure/index.rst +++ b/docs/apache-airflow-providers-microsoft-azure/index.rst @@ -37,6 +37,12 @@ Content Python API <_api/airflow/providers/microsoft/azure/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/microsoft/azure/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-microsoft-mssql/index.rst b/docs/apache-airflow-providers-microsoft-mssql/index.rst index a73a25e25391e..d95dd0ca1efb8 100644 --- a/docs/apache-airflow-providers-microsoft-mssql/index.rst +++ b/docs/apache-airflow-providers-microsoft-mssql/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/microsoft/mssql/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/microsoft/mssql/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-microsoft-winrm/index.rst b/docs/apache-airflow-providers-microsoft-winrm/index.rst index 51a6de87842a3..39b95a9f3b836 100644 --- a/docs/apache-airflow-providers-microsoft-winrm/index.rst +++ b/docs/apache-airflow-providers-microsoft-winrm/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/microsoft/winrm/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/microsoft/winrm/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-mysql/index.rst b/docs/apache-airflow-providers-mysql/index.rst index 6173a4776084f..0a60faf554f4d 100644 --- a/docs/apache-airflow-providers-mysql/index.rst +++ b/docs/apache-airflow-providers-mysql/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/mysql/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/mysql/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-neo4j/index.rst b/docs/apache-airflow-providers-neo4j/index.rst index f0db721140273..b41a97d0b9312 100644 --- a/docs/apache-airflow-providers-neo4j/index.rst +++ b/docs/apache-airflow-providers-neo4j/index.rst @@ -36,15 +36,16 @@ Content Python API <_api/airflow/providers/neo4j/index> .. toctree:: - :maxdepth: 1 - :caption: Resources + :hidden: + :caption: System tests - Example DAGs + System Tests <_api/tests/system/providers/neo4j/index> .. toctree:: :maxdepth: 1 :caption: Resources + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-opsgenie/index.rst b/docs/apache-airflow-providers-opsgenie/index.rst index e8ee514b43d81..5ccc858c2bccd 100644 --- a/docs/apache-airflow-providers-opsgenie/index.rst +++ b/docs/apache-airflow-providers-opsgenie/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/opsgenie/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/opsgenie/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-papermill/index.rst b/docs/apache-airflow-providers-papermill/index.rst index 557146e375db1..80192dda578de 100644 --- a/docs/apache-airflow-providers-papermill/index.rst +++ b/docs/apache-airflow-providers-papermill/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/papermill/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/papermill/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-plexus/index.rst b/docs/apache-airflow-providers-plexus/index.rst index 952c638f0a439..afc022b80930e 100644 --- a/docs/apache-airflow-providers-plexus/index.rst +++ b/docs/apache-airflow-providers-plexus/index.rst @@ -29,15 +29,16 @@ Content Python API <_api/airflow/providers/plexus/index> .. toctree:: - :maxdepth: 1 - :caption: Resources + :hidden: + :caption: System tests - Example DAGs + System Tests <_api/tests/system/providers/plexus/index> .. toctree:: :maxdepth: 1 :caption: Resources + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-postgres/index.rst b/docs/apache-airflow-providers-postgres/index.rst index 6ab9dde028b4a..b7a15c5913276 100644 --- a/docs/apache-airflow-providers-postgres/index.rst +++ b/docs/apache-airflow-providers-postgres/index.rst @@ -36,15 +36,16 @@ Content Python API <_api/airflow/providers/postgres/index> .. toctree:: - :maxdepth: 1 - :caption: Resources + :hidden: + :caption: System tests - Example DAGs + System Tests <_api/tests/system/providers/postgres/index> .. toctree:: :maxdepth: 1 :caption: Resources + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-presto/index.rst b/docs/apache-airflow-providers-presto/index.rst index 94ad584e1d489..6394a1a0efb73 100644 --- a/docs/apache-airflow-providers-presto/index.rst +++ b/docs/apache-airflow-providers-presto/index.rst @@ -40,6 +40,12 @@ Content Python API <_api/airflow/providers/presto/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/presto/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-qubole/index.rst b/docs/apache-airflow-providers-qubole/index.rst index a4901bed04be0..e3fd8ddd5afe0 100644 --- a/docs/apache-airflow-providers-qubole/index.rst +++ b/docs/apache-airflow-providers-qubole/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/qubole/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/qubole/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-salesforce/index.rst b/docs/apache-airflow-providers-salesforce/index.rst index eceeafa00b909..8d50dade333b3 100644 --- a/docs/apache-airflow-providers-salesforce/index.rst +++ b/docs/apache-airflow-providers-salesforce/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/salesforce/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/salesforce/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-singularity/index.rst b/docs/apache-airflow-providers-singularity/index.rst index cb6d18708c82e..7144a5ade2195 100644 --- a/docs/apache-airflow-providers-singularity/index.rst +++ b/docs/apache-airflow-providers-singularity/index.rst @@ -28,6 +28,12 @@ Content Python API <_api/airflow/providers/singularity/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/singularity/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-snowflake/index.rst b/docs/apache-airflow-providers-snowflake/index.rst index 64c6abfbaa74f..0127024d68ec1 100644 --- a/docs/apache-airflow-providers-snowflake/index.rst +++ b/docs/apache-airflow-providers-snowflake/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/snowflake/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/snowflake/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-sqlite/index.rst b/docs/apache-airflow-providers-sqlite/index.rst index 48b274abf95a7..ff4a763594ed5 100644 --- a/docs/apache-airflow-providers-sqlite/index.rst +++ b/docs/apache-airflow-providers-sqlite/index.rst @@ -36,15 +36,16 @@ Content Python API <_api/airflow/providers/sqlite/index> .. toctree:: - :maxdepth: 1 - :caption: Resources + :hidden: + :caption: System tests - Example DAGs + System Tests <_api/tests/system/providers/sqlite/index> .. toctree:: :maxdepth: 1 :caption: Resources + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-tableau/index.rst b/docs/apache-airflow-providers-tableau/index.rst index 755088d333529..1b851d64e78de 100644 --- a/docs/apache-airflow-providers-tableau/index.rst +++ b/docs/apache-airflow-providers-tableau/index.rst @@ -30,6 +30,12 @@ Content Operators Python API <_api/airflow/providers/tableau/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/tableau/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-telegram/index.rst b/docs/apache-airflow-providers-telegram/index.rst index c376f366c8e2f..39a5898b37521 100644 --- a/docs/apache-airflow-providers-telegram/index.rst +++ b/docs/apache-airflow-providers-telegram/index.rst @@ -34,6 +34,12 @@ Content Python API <_api/airflow/providers/telegram/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/telegram/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-trino/index.rst b/docs/apache-airflow-providers-trino/index.rst index a3bad87196f6a..5c89cc08976fe 100644 --- a/docs/apache-airflow-providers-trino/index.rst +++ b/docs/apache-airflow-providers-trino/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/trino/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/trino/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-yandex/index.rst b/docs/apache-airflow-providers-yandex/index.rst index 32dfa81900a7b..093ba90c275ac 100644 --- a/docs/apache-airflow-providers-yandex/index.rst +++ b/docs/apache-airflow-providers-yandex/index.rst @@ -35,6 +35,12 @@ Content Python API <_api/airflow/providers/yandex/index> +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/yandex/index> + .. toctree:: :maxdepth: 1 :caption: Resources diff --git a/docs/apache-airflow-providers-zendesk/index.rst b/docs/apache-airflow-providers-zendesk/index.rst index b8b6b2faf31c6..084abdf406575 100644 --- a/docs/apache-airflow-providers-zendesk/index.rst +++ b/docs/apache-airflow-providers-zendesk/index.rst @@ -27,6 +27,17 @@ Content :caption: References Python API <_api/airflow/providers/zendesk/index> + +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/zendesk/index> + +.. toctree:: + :maxdepth: 1 + :caption: Resources + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow/python-api-ref.rst b/docs/apache-airflow/python-api-ref.rst index e8c4dff3c499c..28a4545859db6 100644 --- a/docs/apache-airflow/python-api-ref.rst +++ b/docs/apache-airflow/python-api-ref.rst @@ -154,3 +154,12 @@ schedule DAG runs in ways not possible with built-in schedule expressions. :maxdepth: 1 _api/airflow/timetables/index + +Example DAGs +------------ + +.. toctree:: + :includehidden: + :maxdepth: 1 + + _api/airflow/example_dags/index diff --git a/docs/build_docs.py b/docs/build_docs.py index d7edad1cad97e..9f4217bd106c4 100755 --- a/docs/build_docs.py +++ b/docs/build_docs.py @@ -15,17 +15,19 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + + import argparse import multiprocessing import os import sys from collections import defaultdict -from typing import Dict, List, NamedTuple, Optional, Tuple +from itertools import filterfalse, tee +from typing import Callable, Dict, Iterable, List, NamedTuple, Optional, Tuple, TypeVar from rich.console import Console from tabulate import tabulate -from airflow.utils.helpers import partition from docs.exts.docs_build import dev_index_generator, lint_checks from docs.exts.docs_build.code_utils import CONSOLE_WIDTH, PROVIDER_INIT_FILE from docs.exts.docs_build.docs_builder import DOCS_DIR, AirflowDocsBuilder, get_available_packages @@ -62,6 +64,14 @@ console = Console(force_terminal=True, color_system="standard", width=CONSOLE_WIDTH) +T = TypeVar('T') + + +def partition(pred: Callable[[T], bool], iterable: Iterable[T]) -> Tuple[Iterable[T], Iterable[T]]: + """Use a predicate to partition entries into false entries and true entries""" + iter_1, iter_2 = tee(iterable) + return filterfalse(pred, iter_1), filter(pred, iter_2) + def _promote_new_flags(): console.print() diff --git a/docs/conf.py b/docs/conf.py index b1742f0c00202..47334381fc8e4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -35,20 +35,17 @@ import os import sys from collections import defaultdict +from pathlib import Path from typing import Any, Dict, List, Optional, Tuple import yaml -try: - from yaml import CSafeLoader as SafeLoader -except ImportError: - from yaml import SafeLoader # type: ignore[misc] - import airflow from airflow.configuration import AirflowConfigParser, default_config_yaml -from docs.exts.docs_build.third_party_inventories import THIRD_PARTY_INDEXES -sys.path.append(os.path.join(os.path.dirname(__file__), 'exts')) +sys.path.append(str(Path(__file__).parent / 'exts')) + +from docs_build.third_party_inventories import THIRD_PARTY_INDEXES # noqa: E402 CONF_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__))) INVENTORY_CACHE_DIR = os.path.join(CONF_DIR, '_inventory_cache') @@ -61,6 +58,7 @@ if PACKAGE_NAME == 'apache-airflow': PACKAGE_DIR = os.path.join(ROOT_DIR, 'airflow') PACKAGE_VERSION = airflow.__version__ + SYSTEM_TESTS_DIR = None elif PACKAGE_NAME.startswith('apache-airflow-providers-'): from provider_yaml_utils import load_package_data @@ -75,23 +73,27 @@ raise Exception(f"Could not find provider.yaml file for package: {PACKAGE_NAME}") PACKAGE_DIR = CURRENT_PROVIDER['package-dir'] PACKAGE_VERSION = CURRENT_PROVIDER['versions'][0] + SYSTEM_TESTS_DIR = CURRENT_PROVIDER['system-tests-dir'] elif PACKAGE_NAME == 'apache-airflow-providers': from provider_yaml_utils import load_package_data PACKAGE_DIR = os.path.join(ROOT_DIR, 'airflow', 'providers') PACKAGE_VERSION = 'devel' ALL_PROVIDER_YAMLS = load_package_data() + SYSTEM_TESTS_DIR = None elif PACKAGE_NAME == 'helm-chart': PACKAGE_DIR = os.path.join(ROOT_DIR, 'chart') CHART_YAML_FILE = os.path.join(PACKAGE_DIR, 'Chart.yaml') with open(CHART_YAML_FILE) as chart_file: - chart_yaml_contents = yaml.load(chart_file, SafeLoader) + chart_yaml_contents = yaml.safe_load(chart_file) PACKAGE_VERSION = chart_yaml_contents['version'] + SYSTEM_TESTS_DIR = None else: PACKAGE_DIR = None PACKAGE_VERSION = 'devel' + SYSTEM_TESTS_DIR = None # Adds to environment variables for easy access from other plugins like airflow_intersphinx. os.environ['AIRFLOW_PACKAGE_NAME'] = PACKAGE_NAME if PACKAGE_DIR: @@ -220,6 +222,7 @@ def _get_rst_filepath_from_path(filepath: str): exclude_patterns.append(f"_api/airflow/{name.rpartition('.')[0]}") browsable_packages = [ "hooks", + "example_dags", "executors", "models", "operators", @@ -318,9 +321,12 @@ def _get_rst_filepath_from_path(filepath: str): html_show_copyright = False # Theme configuration -html_theme_options: Dict[str, Any] = { - 'hide_website_buttons': True, -} +if PACKAGE_NAME.startswith('apache-airflow-providers-'): + # Only hide hidden items for providers. For Chart and Airflow we are using the approach where + # TOC is hidden but sidebar still shows the content (but we are not doing it for providers). + html_theme_options: Dict[str, Any] = {'hide_website_buttons': True, 'sidebar_includehidden': False} +else: + html_theme_options = {'hide_website_buttons': True, 'sidebar_includehidden': True} if FOR_PRODUCTION: html_theme_options['navbar_links'] = [ {'href': '/community/', 'text': 'Community'}, @@ -415,7 +421,7 @@ def _load_config(): return {} with open(file_path) as f: - return yaml.load(f, SafeLoader) + return yaml.safe_load(f) config = _load_config() jinja_contexts = { @@ -668,6 +674,9 @@ def _get_params(root_schema: dict, prefix: str = "", default_section: str = "") PACKAGE_DIR, ] +if SYSTEM_TESTS_DIR and os.path.exists(SYSTEM_TESTS_DIR): + autoapi_dirs.append(SYSTEM_TESTS_DIR) + # A directory that has user-defined templates to override our default templates. if PACKAGE_NAME == 'apache-airflow': autoapi_template_dir = 'autoapi_templates' @@ -675,11 +684,13 @@ def _get_params(root_schema: dict, prefix: str = "", default_section: str = "") # A list of patterns to ignore when finding files autoapi_ignore = [ '*/airflow/_vendor/*', - '*/example_dags/*', '*/_internal*', '*/node_modules/*', '*/migrations/*', '*/contrib/*', + '**/example_sla_dag.py', + '**/example_taskflow_api_etl_docker_virtualenv.py', + '**/example_dag_decorator.py', ] if PACKAGE_NAME == 'apache-airflow': autoapi_ignore.append('*/airflow/providers/*') diff --git a/docs/exts/docs_build/docs_builder.py b/docs/exts/docs_build/docs_builder.py index cc65f3f1700fc..b8fcf6fd64ecf 100644 --- a/docs/exts/docs_build/docs_builder.py +++ b/docs/exts/docs_build/docs_builder.py @@ -24,7 +24,7 @@ from rich.console import Console -from docs.exts.docs_build.code_utils import ( +from .code_utils import ( AIRFLOW_SITE_DIR, ALL_PROVIDER_YAMLS, CONSOLE_WIDTH, @@ -32,9 +32,9 @@ PROCESS_TIMEOUT, pretty_format_path, ) -from docs.exts.docs_build.errors import DocBuildError, parse_sphinx_warnings -from docs.exts.docs_build.helm_chart_utils import chart_version -from docs.exts.docs_build.spelling_checks import SpellingError, parse_spelling_warnings +from .errors import DocBuildError, parse_sphinx_warnings +from .helm_chart_utils import chart_version +from .spelling_checks import SpellingError, parse_spelling_warnings console = Console(force_terminal=True, color_system="standard", width=CONSOLE_WIDTH) diff --git a/docs/exts/provider_yaml_utils.py b/docs/exts/provider_yaml_utils.py index a6d1ee297d560..085cd480c722f 100644 --- a/docs/exts/provider_yaml_utils.py +++ b/docs/exts/provider_yaml_utils.py @@ -18,19 +18,14 @@ import json import os from glob import glob +from pathlib import Path from typing import Any, Dict, List import jsonschema import yaml -try: - from yaml import CSafeLoader as SafeLoader -except ImportError: - from yaml import SafeLoader # type: ignore[misc] - - -ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) -PROVIDER_DATA_SCHEMA_PATH = os.path.join(ROOT_DIR, "airflow", "provider.yaml.schema.json") +ROOT_DIR = Path(__file__).parents[2].resolve() +PROVIDER_DATA_SCHEMA_PATH = ROOT_DIR / "airflow" / "provider.yaml.schema.json" def _load_schema() -> Dict[str, Any]: @@ -40,8 +35,17 @@ def _load_schema() -> Dict[str, Any]: def _filepath_to_module(filepath: str): - filepath = os.path.relpath(os.path.abspath(filepath), ROOT_DIR) - return filepath.replace("/", ".") + return str(Path(filepath).relative_to(ROOT_DIR)).replace("/", ".") + + +def _filepath_to_system_tests(filepath: str): + return str( + ROOT_DIR + / "tests" + / "system" + / "providers" + / Path(filepath).relative_to(ROOT_DIR / "airflow" / "providers") + ) def get_provider_yaml_paths(): @@ -59,12 +63,14 @@ def load_package_data() -> List[Dict[str, Any]]: result = [] for provider_yaml_path in get_provider_yaml_paths(): with open(provider_yaml_path) as yaml_file: - provider = yaml.load(yaml_file, SafeLoader) + provider = yaml.safe_load(yaml_file) try: jsonschema.validate(provider, schema=schema) except jsonschema.ValidationError: raise Exception(f"Unable to parse: {provider_yaml_path}.") - provider['python-module'] = _filepath_to_module(os.path.dirname(provider_yaml_path)) - provider['package-dir'] = os.path.dirname(provider_yaml_path) + provider_yaml_dir = os.path.dirname(provider_yaml_path) + provider['python-module'] = _filepath_to_module(provider_yaml_dir) + provider['package-dir'] = provider_yaml_dir + provider['system-tests-dir'] = _filepath_to_system_tests(provider_yaml_dir) result.append(provider) return result diff --git a/docs/publish_docs.py b/docs/publish_docs.py index 60c89d10e424c..7451033f16c20 100755 --- a/docs/publish_docs.py +++ b/docs/publish_docs.py @@ -17,6 +17,7 @@ # specific language governing permissions and limitations # under the License. + import argparse import os @@ -26,6 +27,7 @@ AIRFLOW_SITE_DIR = os.environ.get('AIRFLOW_SITE_DIRECTORY') + if __name__ != "__main__": raise SystemExit( "This file is intended to be executed as an executable program. You cannot use it as a module." diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index eb8f6d7795c27..1e3142b654306 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -193,6 +193,7 @@ Hou Http HttpError HttpRequest +IGM IdP ImageAnnotatorClient Imap @@ -281,6 +282,7 @@ OSS Oauth Oauthlib Okta +OnFailure Oozie Opsgenie Optimise @@ -795,6 +797,7 @@ evals eventlet evo exasol +executables execvp exitcode explicit @@ -820,12 +823,14 @@ filehandle fileloc filelocs filepath +fileshare filesize filesystem filesystems filetype finalizers findall +firestore firstname fluentd fmt @@ -874,6 +879,7 @@ greenlet groupId grpc gz +gzipped hadoop hadoopcmd hardcoded @@ -1189,6 +1195,7 @@ preloading prepend prepended preprocess +preprocessing presign presigned prestocmd @@ -1345,6 +1352,7 @@ sourceRepository sourceUploadUrl sparkApplication sparkcmd +sparkr sparksql spegno spotahome @@ -1475,6 +1483,7 @@ tsql tsv ttl tunables +twitterHandle txt typeahead tz diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index 053c0707ca0b4..9fcea53a4b9f8 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -1 +1 @@ -bd98d98848beee4dc1b17f16589165a1 +7f2019004f86eeab48332eb0ea11114d diff --git a/images/breeze/output-static-checks.svg b/images/breeze/output-static-checks.svg index a76ace38326ee..695266c06fd3b 100644 --- a/images/breeze/output-static-checks.svg +++ b/images/breeze/output-static-checks.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - Command: static-checks + Command: static-checks - + - - -Usage: breeze static-checks [OPTIONS] [PRECOMMIT_ARGS]... - -Run static checks. - -╭─ Pre-commit flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---type-tType(s) of the static checks to run (multiple can be added).                             -(all | black | blacken-docs | check-airflow-2-1-compatibility |                          -check-airflow-config-yaml-consistent | check-airflow-providers-have-extras |             -check-apache-license-rat | check-base-operator-partial-arguments |                       -check-base-operator-usage | check-boring-cyborg-configuration |                          -check-breeze-top-dependencies-limited | check-builtin-literals |                         -check-changelog-has-no-duplicates | check-daysago-import-from-utils |                    -check-docstring-param-types | check-example-dags-urls | check-executables-have-shebangs  -| check-extra-packages-references | check-extras-order | check-for-inclusive-language |  -check-hooks-apply | check-incorrect-use-of-LoggingMixin |                                -check-integrations-are-consistent | check-merge-conflict | check-newsfragments-are-valid -| check-no-providers-in-core-examples | check-no-relative-imports |                      -check-persist-credentials-disabled-in-github-workflows |                                 -check-pre-commit-information-consistent | check-provide-create-sessions-imports |        -check-provider-yaml-valid | check-providers-init-file-missing |                          -check-providers-subpackages-init-file-exist | check-pydevd-left-in-code |                -check-revision-heads-map | check-safe-filter-usage-in-html | check-setup-order |         -check-start-date-not-used-in-defaults | check-system-tests-present | check-xml |         -codespell | debug-statements | detect-private-key | doctoc | end-of-file-fixer |         -fix-encoding-pragma | flynt | forbid-tabs | identity | insert-license | isort |          -lint-chart-schema | lint-css | lint-dockerfile | lint-helm-chart | lint-javascript |     -lint-json-schema | lint-markdown | lint-openapi | mixed-line-ending | pretty-format-json -| pydocstyle | python-no-log-warn | pyupgrade | rst-backticks | run-flake8 | run-mypy |  -run-shellcheck | static-check-autoflake | trailing-whitespace | update-breeze-file |     -update-breeze-readme-config-hash | update-extras | update-in-the-wild-to-be-sorted |     -update-inlined-dockerfile-scripts | update-local-yml-file | update-migration-references  -| update-providers-dependencies | update-setup-cfg-file |                                -update-spelling-wordlist-to-be-sorted | update-supported-versions |                      -update-vendored-in-k8s-json-schema | update-version | yamllint | yesqa)                  ---file-fList of files to run the checks on.(PATH) ---all-files-aRun checks on all files. ---show-diff-on-failure-sShow diff for files modified by the checks. ---last-commit-cRun checks for all files in last commit. Mutually exclusive with --commit-ref. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---commit-ref-rRun checks for this commit reference only (can be any git commit-ish reference). Mutually   -exclusive with --last-commit.                                                               -(TEXT)                                                                                      ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze static-checks [OPTIONS] [PRECOMMIT_ARGS]... + +Run static checks. + +╭─ Pre-commit flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--type-tType(s) of the static checks to run (multiple can be added).                             +(all | black | blacken-docs | check-airflow-2-1-compatibility |                          +check-airflow-config-yaml-consistent | check-airflow-providers-have-extras |             +check-apache-license-rat | check-base-operator-partial-arguments |                       +check-base-operator-usage | check-boring-cyborg-configuration |                          +check-breeze-top-dependencies-limited | check-builtin-literals |                         +check-changelog-has-no-duplicates | check-daysago-import-from-utils |                    +check-docstring-param-types | check-example-dags-urls | check-executables-have-shebangs  +| check-extra-packages-references | check-extras-order | check-for-inclusive-language |  +check-hooks-apply | check-incorrect-use-of-LoggingMixin |                                +check-integrations-are-consistent | check-merge-conflict | check-newsfragments-are-valid +| check-no-providers-in-core-examples | check-no-relative-imports |                      +check-persist-credentials-disabled-in-github-workflows |                                 +check-pre-commit-information-consistent | check-provide-create-sessions-imports |        +check-provider-yaml-valid | check-providers-init-file-missing |                          +check-providers-subpackages-init-file-exist | check-pydevd-left-in-code |                +check-revision-heads-map | check-safe-filter-usage-in-html | check-setup-order |         +check-start-date-not-used-in-defaults | check-system-tests-present |                     +check-system-tests-tocs | check-xml | codespell | create-missing-init-py-files-tests |   +debug-statements | detect-private-key | doctoc | end-of-file-fixer | fix-encoding-pragma +| flynt | forbid-tabs | identity | insert-license | isort | lint-chart-schema | lint-css +| lint-dockerfile | lint-helm-chart | lint-javascript | lint-json-schema | lint-markdown +| lint-openapi | mixed-line-ending | pretty-format-json | pydocstyle |                   +python-no-log-warn | pyupgrade | rst-backticks | run-flake8 | run-mypy | run-shellcheck  +| static-check-autoflake | trailing-whitespace | update-breeze-file |                    +update-breeze-readme-config-hash | update-extras | update-in-the-wild-to-be-sorted |     +update-inlined-dockerfile-scripts | update-local-yml-file | update-migration-references  +| update-providers-dependencies | update-setup-cfg-file |                                +update-spelling-wordlist-to-be-sorted | update-supported-versions |                      +update-vendored-in-k8s-json-schema | update-version | yamllint | yesqa)                  +--file-fList of files to run the checks on.(PATH) +--all-files-aRun checks on all files. +--show-diff-on-failure-sShow diff for files modified by the checks. +--last-commit-cRun checks for all files in last commit. Mutually exclusive with --commit-ref. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--commit-ref-rRun checks for this commit reference only (can be any git commit-ish reference). Mutually   +exclusive with --last-commit.                                                               +(TEXT)                                                                                      +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/scripts/ci/pre_commit/pre_commit_check_init_in_tests.py b/scripts/ci/pre_commit/pre_commit_check_init_in_tests.py new file mode 100755 index 0000000000000..947f284e1661d --- /dev/null +++ b/scripts/ci/pre_commit/pre_commit_check_init_in_tests.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import os +import pathlib +import sys +from pathlib import Path +from typing import List + +from rich.console import Console + +if __name__ not in ("__main__", "__mp_main__"): + raise SystemExit( + "This file is intended to be executed as an executable program. You cannot use it as a module." + f"To execute this script, run ./{__file__} [FILE] ..." + ) + +ROOT_DIR = pathlib.Path(__file__).resolve().parents[3] + + +console = Console(color_system="standard", width=200) + +errors: List[str] = [] + +added = False + +if __name__ == '__main__': + for dir, sub_dirs, files in os.walk(str(ROOT_DIR / "tests")): + for sub_dir in sub_dirs: + dir_to_check = dir + os.sep + sub_dir + init_py_path = Path(dir_to_check) / "__init__.py" + if not init_py_path.exists() and "/test_logs/" not in str(init_py_path): + init_py_path.touch() + console.print(f"[yellow] Created {init_py_path}[/]") + added = True + sys.exit(1 if added else 0) diff --git a/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py b/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py index 6097ac22512b5..28d2bbc7c9e5b 100755 --- a/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py +++ b/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py @@ -59,7 +59,7 @@ def get_extras_from_setup() -> Set[str]: def get_extras_from_docs() -> Set[str]: """ - Returns a list of extras from docs. + Returns a list of extras from airflow.docs. """ docs_content = get_file_content(DOCS_FILE) extras_section_regex = re.compile( @@ -86,7 +86,7 @@ def get_preinstalled_providers_from_docs() -> List[str]: def get_deprecated_extras_from_docs() -> Dict[str, str]: """ - Returns dict of deprecated extras from docs (alias -> target extra) + Returns dict of deprecated extras from airflow.docs (alias -> target extra) """ deprecated_extras = {} docs_content = get_file_content(DOCS_FILE) diff --git a/scripts/ci/pre_commit/pre_commit_check_system_tests_hidden_in_index.py b/scripts/ci/pre_commit/pre_commit_check_system_tests_hidden_in_index.py new file mode 100755 index 0000000000000..72f8b4fe78cc0 --- /dev/null +++ b/scripts/ci/pre_commit/pre_commit_check_system_tests_hidden_in_index.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import sys +from pathlib import Path + +from rich.console import Console + +if __name__ not in ("__main__", "__mp_main__"): + raise SystemExit( + "This file is intended to be executed as an executable program. You cannot use it as a module." + f"To run this script, run the ./{__file__} command [FILE] ..." + ) + + +console = Console(color_system="standard", width=200) + +AIRFLOW_SOURCES_ROOT = Path(__file__).parents[3].resolve() +DOCS_ROOT = AIRFLOW_SOURCES_ROOT / "docs" + +PREFIX = "apache-airflow-providers-" + + +errors = [] + + +def check_system_test_entry_hidden(provider_index: Path): + console.print(f"[bright_blue]Checking {provider_index}") + provider_folder = provider_index.parent.name + if not provider_folder.startswith(PREFIX): + console.print(f"[red]Bad provider index passed: {provider_index}") + errors.append(provider_index) + provider_path = provider_folder[len(PREFIX) :].replace("-", "/") + expected_text = f""" +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/{provider_path}/index> +""" + index_text = provider_index.read_text() + system_tests_path = AIRFLOW_SOURCES_ROOT / "tests" / "system" / "providers" / provider_path + if system_tests_path.exists(): + if expected_text not in index_text: + console.print(f"[red]The {provider_index} does not contain System Tests TOC.\n") + console.print(f"[yellow]Make sure to add those lines to {provider_index}:\n") + console.print(expected_text, markup=False) + errors.append(provider_index) + else: + console.print(f"[green]All ok. The {provider_index} contains hidden index.\n") + else: + console.print(f"[yellow]All ok. The {provider_index} does not contain system tests.\n") + + +if __name__ == '__main__': + for file in sys.argv[1:]: + check_system_test_entry_hidden(Path(file)) + sys.exit(0 if len(errors) == 0 else 1) diff --git a/scripts/ci/pre_commit/pre_commit_update_example_dags_paths.py b/scripts/ci/pre_commit/pre_commit_update_example_dags_paths.py index fb72d5f14da6e..b910d3f408ba7 100755 --- a/scripts/ci/pre_commit/pre_commit_update_example_dags_paths.py +++ b/scripts/ci/pre_commit/pre_commit_update_example_dags_paths.py @@ -18,7 +18,7 @@ import re import sys from pathlib import Path -from typing import Tuple +from typing import Optional, Tuple import yaml from rich.console import Console @@ -39,10 +39,6 @@ r"^(.*)(https://github.com/apache/airflow/tree/(.*)/airflow/providers/(.*)/example_dags)(/?>.*)$" ) -SYSTEM_TESTS_URL_MATCHER = re.compile( - r"^(.*)(https://github.com/apache/airflow/tree/(.*)/tests/system/providers/(.*))(/?>.*)$" -) - def get_provider_and_version(url_path: str) -> Tuple[str, str]: candidate_folders = url_path.split("/") @@ -67,45 +63,39 @@ def get_provider_and_version(url_path: str) -> Tuple[str, str]: sys.exit(1) -def replace_match(file: str, line: str) -> str: - for matcher in [EXAMPLE_DAGS_URL_MATCHER, SYSTEM_TESTS_URL_MATCHER]: - match = matcher.match(line) - if match: - new_line = line - url_path_to_dir = match.group(4) - folders = url_path_to_dir.split("/") - example_dags_folder = (AIRFLOW_SOURCES_ROOT / "airflow" / "providers").joinpath( - *folders - ) / "example_dags" - system_tests_folder = (AIRFLOW_SOURCES_ROOT / "tests" / "system" / "providers").joinpath(*folders) - provider, version = get_provider_and_version(url_path_to_dir) - if system_tests_folder.exists(): - proper_system_tests_url = ( - f"https://github.com/apache/airflow/tree/providers-{provider}/{version}" - f"/tests/system/providers/{url_path_to_dir}" - ) - new_line = re.sub(matcher, r"\1" + proper_system_tests_url + r"\5", line) - elif example_dags_folder.exists(): - proper_example_dags_url = ( - f"https://github.com/apache/airflow/tree/providers-{provider}/{version}" - f"/airflow/providers/{url_path_to_dir}/example_dags" - ) - new_line = re.sub(matcher, r"\1" + proper_example_dags_url + r"\5", line) +def replace_match(file: Path, line: str) -> Optional[str]: + match = EXAMPLE_DAGS_URL_MATCHER.match(line) + if match: + url_path_to_dir = match.group(4) + folders = url_path_to_dir.split("/") + example_dags_folder = (AIRFLOW_SOURCES_ROOT / "airflow" / "providers").joinpath( + *folders + ) / "example_dags" + provider, version = get_provider_and_version(url_path_to_dir) + proper_system_tests_url = ( + f"https://github.com/apache/airflow/tree/providers-{provider}/{version}" + f"/tests/system/providers/{url_path_to_dir}" + ) + if not example_dags_folder.exists(): + if proper_system_tests_url in file.read_text(): + console.print(f'[yellow] Removing from {file}[/]\n{line.strip()}') + return None else: - console.print( - f"[red] Error - neither example dags nor system tests folder exists for {provider}[/]" - ) - if line != new_line: - console.print(f'[yellow] Replacing in {file}[/]\n{line.strip()}\n{new_line.strip()}') + new_line = re.sub(EXAMPLE_DAGS_URL_MATCHER, r"\1" + proper_system_tests_url + r"\5", line) + if new_line != line: + console.print(f'[yellow] Replacing in {file}[/]\n{line.strip()}\n{new_line.strip()}') return new_line return line def find_matches(_file: Path): + new_lines = [] lines = _file.read_text().splitlines(keepends=True) for index, line in enumerate(lines): - lines[index] = replace_match(str(_file), line) - _file.write_text("".join(lines)) + new_line = replace_match(_file, line) + if new_line is not None: + new_lines.append(new_line) + _file.write_text("".join(new_lines)) if __name__ == '__main__': diff --git a/scripts/in_container/run_docs_build.sh b/scripts/in_container/run_docs_build.sh index b96be023048c9..85e7c85dafae3 100755 --- a/scripts/in_container/run_docs_build.sh +++ b/scripts/in_container/run_docs_build.sh @@ -18,7 +18,8 @@ # shellcheck source=scripts/in_container/_in_container_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh" -sudo -E "${AIRFLOW_SOURCES}/docs/build_docs.py" "${@}" +cd "${AIRFLOW_SOURCES}" || exit 1 +python -m docs.build_docs "${@}" if [[ ( ${CI:="false"} == "true" || ${CI} == "True" ) && -d "${AIRFLOW_SOURCES}/docs/_build/docs/" ]]; then diff --git a/setup.cfg b/setup.cfg index bd7310499674b..e0976f9ba3b9b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -226,7 +226,7 @@ no_implicit_optional = False line_length=110 combine_as_imports = true default_section = THIRDPARTY -known_first_party=airflow,airflow_breeze,tests +known_first_party=airflow,airflow_breeze,tests,docs # Need to be consistent with the exclude config defined in pre-commit-config.yaml skip=build,.tox,venv profile = black diff --git a/tests/config_templates/__init__.py b/tests/config_templates/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/config_templates/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/core/__init__.py b/tests/core/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/core/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/dags/__init__.py b/tests/dags/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/dags/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/dags/subdir1/__init__.py b/tests/dags/subdir1/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/dags/subdir1/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/dags/subdir2/__init__.py b/tests/dags/subdir2/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/dags/subdir2/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/dags/subdir2/subdir3/__init__.py b/tests/dags/subdir2/subdir3/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/dags/subdir2/subdir3/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/dags_corrupted/__init__.py b/tests/dags_corrupted/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/dags_corrupted/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/dags_with_system_exit/__init__.py b/tests/dags_with_system_exit/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/dags_with_system_exit/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/executors/kubernetes_executor_template_files/__init__.py b/tests/executors/kubernetes_executor_template_files/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/executors/kubernetes_executor_template_files/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index 5837623bdacf2..6e0135ade78c5 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -2665,6 +2665,7 @@ def test_list_py_file_paths(self): 'test_ignore_this.py', 'test_invalid_param.py', 'test_nested_dag.py', + '__init__.py', } for root, _, files in os.walk(TEST_DAG_FOLDER): for file_name in files: diff --git a/tests/secrets/__init__.py b/tests/secrets/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/secrets/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/README.md b/tests/system/README.md index 912bfc248ce0d..c1452bfe6e5df 100644 --- a/tests/system/README.md +++ b/tests/system/README.md @@ -67,7 +67,7 @@ example of command: ```commandline # pytest --system [provider_name] [path_to_test(s)] -pytest --system google tests/system/providers/google/bigquery/example_bigquery_queries.py +pytest --system google tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py ``` You can specify several `--system` flags if you want to execute tests for several providers: diff --git a/tests/system/providers/alibaba/__init__.py b/tests/system/providers/alibaba/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/alibaba/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/amazon/__init__.py b/tests/system/providers/amazon/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/amazon/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/amazon/aws/__init__.py b/tests/system/providers/amazon/aws/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/amazon/aws/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/apache/cassandra/__init__.py b/tests/system/providers/apache/cassandra/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/apache/cassandra/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/apache/drill/__init__.py b/tests/system/providers/apache/drill/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/apache/drill/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/apache/druid/__init__.py b/tests/system/providers/apache/druid/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/apache/druid/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/apache/kylin/__init__.py b/tests/system/providers/apache/kylin/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/apache/kylin/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/apache/livy/__init__.py b/tests/system/providers/apache/livy/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/apache/livy/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/apache/pig/__init__.py b/tests/system/providers/apache/pig/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/apache/pig/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/apache/spark/__init__.py b/tests/system/providers/apache/spark/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/apache/spark/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/asana/__init__.py b/tests/system/providers/asana/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/asana/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/cncf/__init__.py b/tests/system/providers/cncf/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/cncf/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/cncf/kubernetes/__init__.py b/tests/system/providers/cncf/kubernetes/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/cncf/kubernetes/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/databricks/__init__.py b/tests/system/providers/databricks/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/databricks/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/dingding/__init__.py b/tests/system/providers/dingding/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/dingding/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/docker/__init__.py b/tests/system/providers/docker/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/docker/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/docker/example_docker_copy_data.py b/tests/system/providers/docker/example_docker_copy_data.py index d709bf14d546e..56a3e20d28611 100644 --- a/tests/system/providers/docker/example_docker_copy_data.py +++ b/tests/system/providers/docker/example_docker_copy_data.py @@ -22,7 +22,7 @@ The following operators are being used: DockerOperator, BashOperator & ShortCircuitOperator. TODO: Review the workflow, change it accordingly to - your environment & enable the code. +your environment & enable the code. """ import os from datetime import datetime diff --git a/tests/system/providers/elasticsearch/__init__.py b/tests/system/providers/elasticsearch/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/elasticsearch/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/github/__init__.py b/tests/system/providers/github/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/github/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/README.md b/tests/system/providers/google/README.md index 35d07c8e1e72d..da8709cb901e1 100644 --- a/tests/system/providers/google/README.md +++ b/tests/system/providers/google/README.md @@ -23,7 +23,7 @@ All Google-related system tests are located inside this subdirectory of system tests which is `tests/system/providers/google/`. They are grouped in directories by the related service name, e.g. all BigQuery -tests are stored inside `tests/system/providers/google/bigquery/` directory. In each directory you will find test files +tests are stored inside `tests/system/providers/google/cloud/bigquery/` directory. In each directory you will find test files as self-contained DAGs (one DAG per file). Each test may require some additional resources which should be placed in `resources` directory found on the same level as tests. Each test file should start with prefix `example_*`. If there is anything more needed for the test to be executed, it should be documented in the docstrings. diff --git a/tests/system/providers/google/cloud/__init__.py b/tests/system/providers/google/cloud/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/cloud/azure/__init__.py b/tests/system/providers/google/cloud/azure/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/azure/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/azure/example_azure_fileshare_to_gcs.py b/tests/system/providers/google/cloud/azure/example_azure_fileshare_to_gcs.py similarity index 100% rename from tests/system/providers/google/azure/example_azure_fileshare_to_gcs.py rename to tests/system/providers/google/cloud/azure/example_azure_fileshare_to_gcs.py diff --git a/tests/system/providers/google/cloud/bigquery/__init__.py b/tests/system/providers/google/cloud/bigquery/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/bigquery/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/bigquery/example_bigquery_dataset.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py similarity index 100% rename from tests/system/providers/google/bigquery/example_bigquery_dataset.py rename to tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py diff --git a/tests/system/providers/google/bigquery/example_bigquery_operations.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py similarity index 100% rename from tests/system/providers/google/bigquery/example_bigquery_operations.py rename to tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py diff --git a/tests/system/providers/google/bigquery/example_bigquery_operations_location.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_operations_location.py similarity index 100% rename from tests/system/providers/google/bigquery/example_bigquery_operations_location.py rename to tests/system/providers/google/cloud/bigquery/example_bigquery_operations_location.py diff --git a/tests/system/providers/google/bigquery/example_bigquery_queries.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py similarity index 100% rename from tests/system/providers/google/bigquery/example_bigquery_queries.py rename to tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py diff --git a/tests/system/providers/google/bigquery/example_bigquery_sensors.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py similarity index 100% rename from tests/system/providers/google/bigquery/example_bigquery_sensors.py rename to tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py diff --git a/tests/system/providers/google/bigquery/example_bigquery_tables.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py similarity index 100% rename from tests/system/providers/google/bigquery/example_bigquery_tables.py rename to tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py diff --git a/tests/system/providers/google/bigquery/example_bigquery_to_bigquery.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_to_bigquery.py similarity index 100% rename from tests/system/providers/google/bigquery/example_bigquery_to_bigquery.py rename to tests/system/providers/google/cloud/bigquery/example_bigquery_to_bigquery.py diff --git a/tests/system/providers/google/bigquery/example_bigquery_to_gcs.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs.py similarity index 100% rename from tests/system/providers/google/bigquery/example_bigquery_to_gcs.py rename to tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs.py diff --git a/tests/system/providers/google/cloud/bigquery/resources/__init__.py b/tests/system/providers/google/cloud/bigquery/resources/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/bigquery/resources/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/bigquery/resources/example_bigquery_query.sql b/tests/system/providers/google/cloud/bigquery/resources/example_bigquery_query.sql similarity index 100% rename from tests/system/providers/google/bigquery/resources/example_bigquery_query.sql rename to tests/system/providers/google/cloud/bigquery/resources/example_bigquery_query.sql diff --git a/tests/system/providers/google/bigquery/resources/update_table_schema.json b/tests/system/providers/google/cloud/bigquery/resources/update_table_schema.json similarity index 100% rename from tests/system/providers/google/bigquery/resources/update_table_schema.json rename to tests/system/providers/google/cloud/bigquery/resources/update_table_schema.json diff --git a/tests/system/providers/google/bigquery/resources/us-states.csv b/tests/system/providers/google/cloud/bigquery/resources/us-states.csv similarity index 100% rename from tests/system/providers/google/bigquery/resources/us-states.csv rename to tests/system/providers/google/cloud/bigquery/resources/us-states.csv diff --git a/tests/system/providers/google/cloud/cloud_sql/__init__.py b/tests/system/providers/google/cloud/cloud_sql/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/cloud_sql/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/cloud_sql/example_cloud_sql.py b/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py similarity index 100% rename from tests/system/providers/google/cloud_sql/example_cloud_sql.py rename to tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py diff --git a/tests/system/providers/google/cloud/dataproc/__init__.py b/tests/system/providers/google/cloud/dataproc/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/dataproc/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/dataproc/example_dataproc_batch.py b/tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py similarity index 100% rename from tests/system/providers/google/dataproc/example_dataproc_batch.py rename to tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py diff --git a/tests/system/providers/google/dataproc/example_dataproc_batch_persistent.py b/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py similarity index 100% rename from tests/system/providers/google/dataproc/example_dataproc_batch_persistent.py rename to tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py diff --git a/tests/system/providers/google/dataproc/example_dataproc_cluster_generator.py b/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py similarity index 100% rename from tests/system/providers/google/dataproc/example_dataproc_cluster_generator.py rename to tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py diff --git a/tests/system/providers/google/dataproc/example_dataproc_gke.py b/tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py similarity index 100% rename from tests/system/providers/google/dataproc/example_dataproc_gke.py rename to tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py diff --git a/tests/system/providers/google/dataproc/example_dataproc_hadoop.py b/tests/system/providers/google/cloud/dataproc/example_dataproc_hadoop.py similarity index 100% rename from tests/system/providers/google/dataproc/example_dataproc_hadoop.py rename to tests/system/providers/google/cloud/dataproc/example_dataproc_hadoop.py diff --git a/tests/system/providers/google/dataproc/example_dataproc_hive.py b/tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py similarity index 100% rename from tests/system/providers/google/dataproc/example_dataproc_hive.py rename to tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py diff --git a/tests/system/providers/google/dataproc/example_dataproc_pig.py b/tests/system/providers/google/cloud/dataproc/example_dataproc_pig.py similarity index 100% rename from tests/system/providers/google/dataproc/example_dataproc_pig.py rename to tests/system/providers/google/cloud/dataproc/example_dataproc_pig.py diff --git a/tests/system/providers/google/dataproc/example_dataproc_pyspark.py b/tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py similarity index 100% rename from tests/system/providers/google/dataproc/example_dataproc_pyspark.py rename to tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py diff --git a/tests/system/providers/google/dataproc/example_dataproc_spark.py b/tests/system/providers/google/cloud/dataproc/example_dataproc_spark.py similarity index 100% rename from tests/system/providers/google/dataproc/example_dataproc_spark.py rename to tests/system/providers/google/cloud/dataproc/example_dataproc_spark.py diff --git a/tests/system/providers/google/dataproc/example_dataproc_spark_async.py b/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_async.py similarity index 100% rename from tests/system/providers/google/dataproc/example_dataproc_spark_async.py rename to tests/system/providers/google/cloud/dataproc/example_dataproc_spark_async.py diff --git a/tests/system/providers/google/dataproc/example_dataproc_spark_sql.py b/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_sql.py similarity index 100% rename from tests/system/providers/google/dataproc/example_dataproc_spark_sql.py rename to tests/system/providers/google/cloud/dataproc/example_dataproc_spark_sql.py diff --git a/tests/system/providers/google/dataproc/example_dataproc_sparkr.py b/tests/system/providers/google/cloud/dataproc/example_dataproc_sparkr.py similarity index 100% rename from tests/system/providers/google/dataproc/example_dataproc_sparkr.py rename to tests/system/providers/google/cloud/dataproc/example_dataproc_sparkr.py diff --git a/tests/system/providers/google/dataproc/example_dataproc_update.py b/tests/system/providers/google/cloud/dataproc/example_dataproc_update.py similarity index 100% rename from tests/system/providers/google/dataproc/example_dataproc_update.py rename to tests/system/providers/google/cloud/dataproc/example_dataproc_update.py diff --git a/tests/system/providers/google/dataproc/example_dataproc_workflow.py b/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow.py similarity index 100% rename from tests/system/providers/google/dataproc/example_dataproc_workflow.py rename to tests/system/providers/google/cloud/dataproc/example_dataproc_workflow.py diff --git a/tests/system/providers/google/cloud/dataproc/resources/__init__.py b/tests/system/providers/google/cloud/dataproc/resources/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/dataproc/resources/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/dataproc/resources/hello_world.R b/tests/system/providers/google/cloud/dataproc/resources/hello_world.R similarity index 100% rename from tests/system/providers/google/dataproc/resources/hello_world.R rename to tests/system/providers/google/cloud/dataproc/resources/hello_world.R diff --git a/tests/system/providers/google/dataproc/resources/hello_world.py b/tests/system/providers/google/cloud/dataproc/resources/hello_world.py similarity index 100% rename from tests/system/providers/google/dataproc/resources/hello_world.py rename to tests/system/providers/google/cloud/dataproc/resources/hello_world.py diff --git a/tests/system/providers/google/dataproc/resources/hive.sql b/tests/system/providers/google/cloud/dataproc/resources/hive.sql similarity index 100% rename from tests/system/providers/google/dataproc/resources/hive.sql rename to tests/system/providers/google/cloud/dataproc/resources/hive.sql diff --git a/tests/system/providers/google/dataproc/resources/pip-install.sh b/tests/system/providers/google/cloud/dataproc/resources/pip-install.sh similarity index 100% rename from tests/system/providers/google/dataproc/resources/pip-install.sh rename to tests/system/providers/google/cloud/dataproc/resources/pip-install.sh diff --git a/tests/system/providers/google/cloud/datastore/__init__.py b/tests/system/providers/google/cloud/datastore/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/datastore/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/datastore/example_datastore_commit.py b/tests/system/providers/google/cloud/datastore/example_datastore_commit.py similarity index 100% rename from tests/system/providers/google/datastore/example_datastore_commit.py rename to tests/system/providers/google/cloud/datastore/example_datastore_commit.py diff --git a/tests/system/providers/google/datastore/example_datastore_export_import.py b/tests/system/providers/google/cloud/datastore/example_datastore_export_import.py similarity index 100% rename from tests/system/providers/google/datastore/example_datastore_export_import.py rename to tests/system/providers/google/cloud/datastore/example_datastore_export_import.py diff --git a/tests/system/providers/google/datastore/example_datastore_query.py b/tests/system/providers/google/cloud/datastore/example_datastore_query.py similarity index 100% rename from tests/system/providers/google/datastore/example_datastore_query.py rename to tests/system/providers/google/cloud/datastore/example_datastore_query.py diff --git a/tests/system/providers/google/datastore/example_datastore_rollback.py b/tests/system/providers/google/cloud/datastore/example_datastore_rollback.py similarity index 100% rename from tests/system/providers/google/datastore/example_datastore_rollback.py rename to tests/system/providers/google/cloud/datastore/example_datastore_rollback.py diff --git a/tests/system/providers/google/cloud/gcs/__init__.py b/tests/system/providers/google/cloud/gcs/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/gcs/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/calendar/example_calendar_to_gcs.py b/tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py similarity index 100% rename from tests/system/providers/google/calendar/example_calendar_to_gcs.py rename to tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py diff --git a/tests/system/providers/google/gcs/example_gcs_acl.py b/tests/system/providers/google/cloud/gcs/example_gcs_acl.py similarity index 100% rename from tests/system/providers/google/gcs/example_gcs_acl.py rename to tests/system/providers/google/cloud/gcs/example_gcs_acl.py diff --git a/tests/system/providers/google/gcs/example_gcs_copy_delete.py b/tests/system/providers/google/cloud/gcs/example_gcs_copy_delete.py similarity index 100% rename from tests/system/providers/google/gcs/example_gcs_copy_delete.py rename to tests/system/providers/google/cloud/gcs/example_gcs_copy_delete.py diff --git a/tests/system/providers/google/gcs/example_gcs_sensor.py b/tests/system/providers/google/cloud/gcs/example_gcs_sensor.py similarity index 100% rename from tests/system/providers/google/gcs/example_gcs_sensor.py rename to tests/system/providers/google/cloud/gcs/example_gcs_sensor.py diff --git a/tests/system/providers/google/gcs/example_gcs_to_bigquery.py b/tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery.py similarity index 100% rename from tests/system/providers/google/gcs/example_gcs_to_bigquery.py rename to tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery.py diff --git a/tests/system/providers/google/gcs/example_gcs_to_gcs.py b/tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py similarity index 100% rename from tests/system/providers/google/gcs/example_gcs_to_gcs.py rename to tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py diff --git a/tests/system/providers/google/gcs/example_gcs_transform.py b/tests/system/providers/google/cloud/gcs/example_gcs_transform.py similarity index 100% rename from tests/system/providers/google/gcs/example_gcs_transform.py rename to tests/system/providers/google/cloud/gcs/example_gcs_transform.py diff --git a/tests/system/providers/google/gcs/example_gcs_transform_timespan.py b/tests/system/providers/google/cloud/gcs/example_gcs_transform_timespan.py similarity index 100% rename from tests/system/providers/google/gcs/example_gcs_transform_timespan.py rename to tests/system/providers/google/cloud/gcs/example_gcs_transform_timespan.py diff --git a/tests/system/providers/google/gcs/example_gcs_upload_download.py b/tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py similarity index 100% rename from tests/system/providers/google/gcs/example_gcs_upload_download.py rename to tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py diff --git a/tests/system/providers/google/sheets/example_sheets_to_gcs.py b/tests/system/providers/google/cloud/gcs/example_sheets_to_gcs.py similarity index 100% rename from tests/system/providers/google/sheets/example_sheets_to_gcs.py rename to tests/system/providers/google/cloud/gcs/example_sheets_to_gcs.py diff --git a/tests/system/providers/google/cloud/gcs/resources/__init__.py b/tests/system/providers/google/cloud/gcs/resources/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/gcs/resources/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/gcs/resources/example_upload.txt b/tests/system/providers/google/cloud/gcs/resources/example_upload.txt similarity index 100% rename from tests/system/providers/google/gcs/resources/example_upload.txt rename to tests/system/providers/google/cloud/gcs/resources/example_upload.txt diff --git a/tests/system/providers/google/gcs/resources/transform_script.py b/tests/system/providers/google/cloud/gcs/resources/transform_script.py similarity index 100% rename from tests/system/providers/google/gcs/resources/transform_script.py rename to tests/system/providers/google/cloud/gcs/resources/transform_script.py diff --git a/tests/system/providers/google/gcs/resources/transform_timespan.py b/tests/system/providers/google/cloud/gcs/resources/transform_timespan.py similarity index 100% rename from tests/system/providers/google/gcs/resources/transform_timespan.py rename to tests/system/providers/google/cloud/gcs/resources/transform_timespan.py diff --git a/tests/system/providers/google/cloud/kubernetes_engine/__init__.py b/tests/system/providers/google/cloud/kubernetes_engine/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/kubernetes_engine/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/kubernetes_engine/example_kubernetes_engine.py b/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py similarity index 100% rename from tests/system/providers/google/kubernetes_engine/example_kubernetes_engine.py rename to tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py diff --git a/tests/system/providers/google/cloud/spanner/__init__.py b/tests/system/providers/google/cloud/spanner/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/spanner/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/spanner/example_spanner.py b/tests/system/providers/google/cloud/spanner/example_spanner.py similarity index 100% rename from tests/system/providers/google/spanner/example_spanner.py rename to tests/system/providers/google/cloud/spanner/example_spanner.py diff --git a/tests/system/providers/google/cloud/speech_to_text/__init__.py b/tests/system/providers/google/cloud/speech_to_text/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/speech_to_text/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/speech_to_text/example_speech_to_text.py b/tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py similarity index 100% rename from tests/system/providers/google/speech_to_text/example_speech_to_text.py rename to tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py diff --git a/tests/system/providers/google/cloud/stackdriver/__init__.py b/tests/system/providers/google/cloud/stackdriver/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/stackdriver/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/stackdriver/example_stackdriver.py b/tests/system/providers/google/cloud/stackdriver/example_stackdriver.py similarity index 100% rename from tests/system/providers/google/stackdriver/example_stackdriver.py rename to tests/system/providers/google/cloud/stackdriver/example_stackdriver.py diff --git a/tests/system/providers/google/cloud/tasks/__init__.py b/tests/system/providers/google/cloud/tasks/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/tasks/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/tasks/example_queue.py b/tests/system/providers/google/cloud/tasks/example_queue.py similarity index 100% rename from tests/system/providers/google/tasks/example_queue.py rename to tests/system/providers/google/cloud/tasks/example_queue.py diff --git a/tests/system/providers/google/tasks/example_tasks.py b/tests/system/providers/google/cloud/tasks/example_tasks.py similarity index 98% rename from tests/system/providers/google/tasks/example_tasks.py rename to tests/system/providers/google/cloud/tasks/example_tasks.py index 8ef2ac454b5d7..3b2cf46c1e188 100644 --- a/tests/system/providers/google/tasks/example_tasks.py +++ b/tests/system/providers/google/cloud/tasks/example_tasks.py @@ -17,7 +17,7 @@ # under the License. """ -Example Airflow DAG that createsand deletes Queues and creates, gets, lists, +Example Airflow DAG that creates and deletes Queues and creates, gets, lists, runs and deletes Tasks in the Google Cloud Tasks service in the Google Cloud. """ import os diff --git a/tests/system/providers/google/cloud/text_to_speech/__init__.py b/tests/system/providers/google/cloud/text_to_speech/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/text_to_speech/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/text_to_speech/example_text_to_speech.py b/tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py similarity index 100% rename from tests/system/providers/google/text_to_speech/example_text_to_speech.py rename to tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py diff --git a/tests/system/providers/google/cloud/workflows/__init__.py b/tests/system/providers/google/cloud/workflows/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/cloud/workflows/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/workflows/example_workflows.py b/tests/system/providers/google/cloud/workflows/example_workflows.py similarity index 100% rename from tests/system/providers/google/workflows/example_workflows.py rename to tests/system/providers/google/cloud/workflows/example_workflows.py diff --git a/tests/system/providers/google/workplace/__init__.py b/tests/system/providers/google/workplace/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/workplace/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/http/__init__.py b/tests/system/providers/http/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/http/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/influxdb/__init__.py b/tests/system/providers/influxdb/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/influxdb/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/jdbc/__init__.py b/tests/system/providers/jdbc/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/jdbc/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/jenkins/__init__.py b/tests/system/providers/jenkins/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/jenkins/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/microsoft/mssql/__init__.py b/tests/system/providers/microsoft/mssql/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/microsoft/mssql/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/microsoft/winrm/__init__.py b/tests/system/providers/microsoft/winrm/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/microsoft/winrm/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/mysql/__init__.py b/tests/system/providers/mysql/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/mysql/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/neo4j/__init__.py b/tests/system/providers/neo4j/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/neo4j/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/opsgenie/__init__.py b/tests/system/providers/opsgenie/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/opsgenie/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/plexus/__init__.py b/tests/system/providers/plexus/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/plexus/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/postgres/__init__.py b/tests/system/providers/postgres/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/postgres/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/presto/__init__.py b/tests/system/providers/presto/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/presto/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/qubole/__init__.py b/tests/system/providers/qubole/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/qubole/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/salesforce/__init__.py b/tests/system/providers/salesforce/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/salesforce/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/singularity/__init__.py b/tests/system/providers/singularity/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/singularity/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/snowflake/__init__.py b/tests/system/providers/snowflake/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/snowflake/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/sqlite/__init__.py b/tests/system/providers/sqlite/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/sqlite/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/tableau/__init__.py b/tests/system/providers/tableau/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/tableau/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/telegram/__init__.py b/tests/system/providers/telegram/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/telegram/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/trino/__init__.py b/tests/system/providers/trino/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/trino/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/yandex/__init__.py b/tests/system/providers/yandex/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/yandex/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/zendesk/__init__.py b/tests/system/providers/zendesk/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/zendesk/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/test_utils/operators/__init__.py b/tests/test_utils/operators/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/test_utils/operators/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/test_utils/perf/__init__.py b/tests/test_utils/perf/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/test_utils/perf/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/test_utils/perf/dags/__init__.py b/tests/test_utils/perf/dags/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/test_utils/perf/dags/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/testconfig/__init__.py b/tests/testconfig/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/testconfig/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/testconfig/conf/__init__.py b/tests/testconfig/conf/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/testconfig/conf/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/triggers/__init__.py b/tests/triggers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/triggers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/www/test_logs/__init__.py b/tests/www/test_logs/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/www/test_logs/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/www/views/__init__.py b/tests/www/views/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/www/views/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License.