From 931babdb7e36e1151eb5b2397efdce5c5defa57e Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Fri, 21 Jan 2022 16:51:18 +0100 Subject: [PATCH] Fix last remaining MyPy errors Closes: #19891 --- .github/workflows/ci.yml | 11 +++-------- airflow/api/common/experimental/get_dag_runs.py | 3 ++- airflow/jobs/base_job.py | 4 ++-- airflow/models/base.py | 3 ++- airflow/models/dagrun.py | 8 ++++++-- airflow/providers/microsoft/winrm/hooks/winrm.py | 2 +- airflow/providers/ssh/hooks/ssh.py | 2 +- tests/providers/amazon/aws/hooks/test_eks.py | 4 +++- tests/providers/google/cloud/hooks/test_gcs.py | 4 +++- tests/www/api/experimental/test_dag_runs_endpoint.py | 8 ++------ 10 files changed, 25 insertions(+), 24 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6fa77f8af4dd8..b143054b6adff 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -488,17 +488,12 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" with: path: 'airflow/ui/node_modules' key: ${{ runner.os }}-ui-node-modules-${{ hashFiles('airflow/ui/**/yarn.lock') }} - - name: "Static checks (no mypy)" + - name: "Static checks" run: ./scripts/ci/static_checks/run_static_checks.sh env: VERBOSE: false - SKIP: "identity,mypy" - - name: "MyPy static checks" - run: ./scripts/ci/static_checks/run_static_checks.sh mypy - env: - VERBOSE: false - DO_NOT_FAIL_ON_ERROR: "true" - COLUMNS: 300 + SKIP: "identity" + COLUMNS: 250 # Those checks are run if no image needs to be built for checks. This is for simple changes that # Do not touch any of the python code or any of the important files that might require building diff --git a/airflow/api/common/experimental/get_dag_runs.py b/airflow/api/common/experimental/get_dag_runs.py index cd939676bb3ac..2064d6eb51267 100644 --- a/airflow/api/common/experimental/get_dag_runs.py +++ b/airflow/api/common/experimental/get_dag_runs.py @@ -22,6 +22,7 @@ from airflow.api.common.experimental import check_and_get_dag from airflow.models import DagRun +from airflow.utils.state import DagRunState def get_dag_runs(dag_id: str, state: Optional[str] = None) -> List[Dict[str, Any]]: @@ -36,7 +37,7 @@ def get_dag_runs(dag_id: str, state: Optional[str] = None) -> List[Dict[str, Any check_and_get_dag(dag_id=dag_id) dag_runs = [] - state = state.lower() if state else None + state = DagRunState(state.lower()) if state else None for run in DagRun.find(dag_id=dag_id, state=state): dag_runs.append( { diff --git a/airflow/jobs/base_job.py b/airflow/jobs/base_job.py index 2a3fe0f0fefff..7befccf956c41 100644 --- a/airflow/jobs/base_job.py +++ b/airflow/jobs/base_job.py @@ -76,13 +76,13 @@ class BaseJob(Base, LoggingMixin): task_instances_enqueued = relationship( TaskInstance, - primaryjoin=id == foreign(TaskInstance.queued_by_job_id), # type: ignore + primaryjoin=id == foreign(TaskInstance.queued_by_job_id), # type: ignore[has-type] backref=backref('queued_by_job', uselist=False), ) dag_runs = relationship( DagRun, - primaryjoin=id == foreign(DagRun.creating_job_id), + primaryjoin=id == foreign(DagRun.creating_job_id), # type: ignore[has-type] backref=backref('creating_job'), ) diff --git a/airflow/models/base.py b/airflow/models/base.py index 6b24e57a7fe93..f44a2834a96ab 100644 --- a/airflow/models/base.py +++ b/airflow/models/base.py @@ -55,6 +55,7 @@ class Operator: template_fields: Collection[str] # Defines which files extensions to look for in the templated fields. template_ext: Collection[str] + owner: str def get_dag(self) -> "Optional[DAG]": raise NotImplementedError() @@ -64,7 +65,7 @@ def dag_id(self) -> str: """Returns dag id if it has one or an adhoc + owner""" dag = self.get_dag() if dag: - return self.dag.dag_id + return dag.dag_id return f"adhoc_{self.owner}" def get_template_env(self) -> jinja2.Environment: diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index 81d26bc490d5e..9f47d38c3bed0 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -511,8 +511,12 @@ def update_state( finished_tasks = info.finished_tasks unfinished_tasks = info.unfinished_tasks - none_depends_on_past = all(not t.task.depends_on_past for t in unfinished_tasks) - none_task_concurrency = all(t.task.max_active_tis_per_dag is None for t in unfinished_tasks) + none_depends_on_past = all( + not t.task.depends_on_past for t in unfinished_tasks # type: ignore[has-type] + ) + none_task_concurrency = all( + t.task.max_active_tis_per_dag is None for t in unfinished_tasks # type: ignore[has-type] + ) none_deferred = all(t.state != State.DEFERRED for t in unfinished_tasks) if unfinished_tasks and none_depends_on_past and none_task_concurrency and none_deferred: diff --git a/airflow/providers/microsoft/winrm/hooks/winrm.py b/airflow/providers/microsoft/winrm/hooks/winrm.py index 0f8158279df4d..ee11dbac9c9d2 100644 --- a/airflow/providers/microsoft/winrm/hooks/winrm.py +++ b/airflow/providers/microsoft/winrm/hooks/winrm.py @@ -27,7 +27,7 @@ try: from airflow.utils.platform import getuser except ImportError: - from getpass import getuser + from getpass import getuser # type: ignore[misc] # TODO: Fixme please - I have too complex implementation diff --git a/airflow/providers/ssh/hooks/ssh.py b/airflow/providers/ssh/hooks/ssh.py index 419125cf7b709..db98aad42f519 100644 --- a/airflow/providers/ssh/hooks/ssh.py +++ b/airflow/providers/ssh/hooks/ssh.py @@ -38,7 +38,7 @@ try: from airflow.utils.platform import getuser except ImportError: - from getpass import getuser + from getpass import getuser # type: ignore[misc] TIMEOUT_DEFAULT = 10 diff --git a/tests/providers/amazon/aws/hooks/test_eks.py b/tests/providers/amazon/aws/hooks/test_eks.py index a2e7232022a95..9a23406d2e116 100644 --- a/tests/providers/amazon/aws/hooks/test_eks.py +++ b/tests/providers/amazon/aws/hooks/test_eks.py @@ -583,7 +583,9 @@ def test_create_nodegroup_with_ownership_tag_uses_provided_value(self, cluster_b **dict(deepcopy(NodegroupInputs.REQUIRED)), )[ResponseAttributes.NODEGROUP] - assert created_nodegroup.get(NodegroupAttributes.TAGS).get(ownership_tag_key) == provided_tag_value + tags = created_nodegroup.get(NodegroupAttributes.TAGS) + assert tags is not None + assert tags.get(ownership_tag_key) == provided_tag_value def test_describe_nodegroup_throws_exception_when_cluster_not_found(self, nodegroup_builder) -> None: eks_hook, generated_test_data = nodegroup_builder() diff --git a/tests/providers/google/cloud/hooks/test_gcs.py b/tests/providers/google/cloud/hooks/test_gcs.py index d364282093c45..708eb02a7f1c9 100644 --- a/tests/providers/google/cloud/hooks/test_gcs.py +++ b/tests/providers/google/cloud/hooks/test_gcs.py @@ -27,7 +27,9 @@ import dateutil import pytest -from google.cloud import exceptions, storage + +# dynamic storage type in google.cloud needs to be type-ignored +from google.cloud import exceptions, storage # type: ignore[attr-defined] from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks import gcs diff --git a/tests/www/api/experimental/test_dag_runs_endpoint.py b/tests/www/api/experimental/test_dag_runs_endpoint.py index 90a34f5547c4c..654583146818d 100644 --- a/tests/www/api/experimental/test_dag_runs_endpoint.py +++ b/tests/www/api/experimental/test_dag_runs_endpoint.py @@ -97,12 +97,8 @@ def test_get_dag_runs_success_with_state_no_result(self): # Create DagRun trigger_dag(dag_id=dag_id, run_id='test_get_dag_runs_success') - response = self.app.get(url_template.format(dag_id)) - assert 200 == response.status_code - data = json.loads(response.data.decode('utf-8')) - - assert isinstance(data, list) - assert len(data) == 0 + with pytest.raises(ValueError): + self.app.get(url_template.format(dag_id)) def test_get_dag_runs_invalid_dag_id(self): url_template = '/api/experimental/dags/{}/dag_runs'