diff --git a/tests/dags/test_raise_executor_error.py b/tests/dags/test_raise_executor_error.py deleted file mode 100644 index c0138edd064d7..0000000000000 --- a/tests/dags/test_raise_executor_error.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -""" -DAG designed to test what happens when running a DAG fails before -a task runs -- prior to a fix, this could actually cause an Executor to report -SUCCESS. Since the task never reports any status, this can lead to an infinite -rescheduling loop. -""" -from datetime import datetime - -from airflow.models import DAG -from airflow.operators import SubDagOperator -from airflow.example_dags.subdags.subdag import subdag - -args = { - 'owner': 'airflow', - 'start_date': datetime(2016, 1, 1), -} - -dag = DAG( - dag_id='test_raise_executor_error', - default_args=args, - schedule_interval="@daily", -) - -section_1 = SubDagOperator( - task_id='subdag_op', - subdag=subdag('test_raise_executor_error', 'subdag_op', args), - default_args=args, - dag=dag, -) - -# change the subdag name -- this creates an error because the subdag -# won't be found, but it'll do it in a way that causes the executor to report -# success -section_1.subdag.dag_id = 'bad_id' diff --git a/tests/jobs.py b/tests/jobs.py index be72da60e7e18..ec26c70600984 100644 --- a/tests/jobs.py +++ b/tests/jobs.py @@ -71,41 +71,6 @@ def test_backfill_examples(self): ignore_first_depends_on_past=True) job.run() - def test_trap_executor_error(self): - """ - Test that errors setting up tasks (before tasks run) are caught. - - Executors run tasks with the `airflow run` command. If a task runs, - its state (success, failure, or other) is stored in the database and - `airflow run` exits without error. However, if an error is raised - before the task runs, then the task won't be able to update its status. - This can put the executor into an infinite loop of trying to run the - task. - - To counteract that, the executor traps errors coming from - `airflow run` (essentially looking for returncode != 0). - This unit test creates such an error by trying - to run a subdag whose dag_id has changed and therefore can't be - found. If the trap is working properly, the error will be caught - and the Backfill will report failures. If the trap is not working, - the job will run infinitely (the unit test uses a timeout to protect - against that case). - - Test for https://github.com/airbnb/airflow/pull/1220 - """ - dag = self.dagbag.get_dag('test_raise_executor_error') - dag.clear() - job = BackfillJob( - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE) - # run with timeout because this creates an infinite loop if not - # caught - def run_with_timeout(): - with timeout(seconds=30): - job.run() - self.assertRaises(AirflowException, run_with_timeout) - def test_backfill_pooled_tasks(self): """ Test that queued tasks are executed by BackfillJob