diff --git a/airflow/configuration.py b/airflow/configuration.py index a33b3f3998e78..729e780f74a6a 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -391,7 +391,7 @@ def _validate_enums(self): if value not in enum_options: raise AirflowConfigException( f"`[{section_key}] {option_key}` should not be " - + f"{value!r}. Possible values: {', '.join(enum_options)}." + f"{value!r}. Possible values: {', '.join(enum_options)}." ) def _validate_config_dependencies(self): diff --git a/airflow/providers/amazon/aws/hooks/eks.py b/airflow/providers/amazon/aws/hooks/eks.py index 1475d3d1f5354..d2a795e498dfd 100644 --- a/airflow/providers/amazon/aws/hooks/eks.py +++ b/airflow/providers/amazon/aws/hooks/eks.py @@ -638,7 +638,7 @@ class EKSHook(EksHook): def __init__(self, *args, **kwargs): warnings.warn( - "This hook is deprecated. " "Please use `airflow.providers.amazon.aws.hooks.eks.EksHook`.", + "This hook is deprecated. Please use `airflow.providers.amazon.aws.hooks.eks.EksHook`.", DeprecationWarning, stacklevel=2, ) diff --git a/airflow/providers/amazon/aws/hooks/ses.py b/airflow/providers/amazon/aws/hooks/ses.py index 21efa14107071..92dcce7ecb617 100644 --- a/airflow/providers/amazon/aws/hooks/ses.py +++ b/airflow/providers/amazon/aws/hooks/ses.py @@ -106,7 +106,7 @@ class SESHook(SesHook): def __init__(self, *args, **kwargs): warnings.warn( - "This hook is deprecated. " "Please use :class:`airflow.providers.amazon.aws.hooks.ses.SesHook`.", + "This hook is deprecated. Please use :class:`airflow.providers.amazon.aws.hooks.ses.SesHook`.", DeprecationWarning, stacklevel=2, ) diff --git a/airflow/providers/amazon/aws/hooks/sns.py b/airflow/providers/amazon/aws/hooks/sns.py index 94c83d8d31300..fc009d9f9bdae 100644 --- a/airflow/providers/amazon/aws/hooks/sns.py +++ b/airflow/providers/amazon/aws/hooks/sns.py @@ -100,7 +100,7 @@ class AwsSnsHook(SnsHook): def __init__(self, *args, **kwargs): warnings.warn( - "This hook is deprecated. " "Please use :class:`airflow.providers.amazon.aws.hooks.sns.SnsHook`.", + "This hook is deprecated. Please use :class:`airflow.providers.amazon.aws.hooks.sns.SnsHook`.", DeprecationWarning, stacklevel=2, ) diff --git a/airflow/providers/amazon/aws/hooks/sqs.py b/airflow/providers/amazon/aws/hooks/sqs.py index c6de9f904879a..b94756f63aa26 100644 --- a/airflow/providers/amazon/aws/hooks/sqs.py +++ b/airflow/providers/amazon/aws/hooks/sqs.py @@ -88,7 +88,7 @@ class SQSHook(SqsHook): def __init__(self, *args, **kwargs): warnings.warn( - "This hook is deprecated. " "Please use :class:`airflow.providers.amazon.aws.hooks.sqs.SqsHook`.", + "This hook is deprecated. Please use :class:`airflow.providers.amazon.aws.hooks.sqs.SqsHook`.", DeprecationWarning, stacklevel=2, ) diff --git a/airflow/providers/databricks/operators/databricks_repos.py b/airflow/providers/databricks/operators/databricks_repos.py index 982adcfee2177..97b46b8e244d6 100644 --- a/airflow/providers/databricks/operators/databricks_repos.py +++ b/airflow/providers/databricks/operators/databricks_repos.py @@ -90,7 +90,7 @@ def __init__( self.git_provider = self.__detect_repo_provider__(git_url) if self.git_provider is None: raise AirflowException( - "git_provider isn't specified and couldn't be guessed for URL {git_url}" + f"git_provider isn't specified and couldn't be guessed for URL {git_url}" ) else: self.git_provider = git_provider diff --git a/airflow/providers/google/cloud/hooks/kubernetes_engine.py b/airflow/providers/google/cloud/hooks/kubernetes_engine.py index 4cfb4fe6fbcfb..31c6c6c1fd142 100644 --- a/airflow/providers/google/cloud/hooks/kubernetes_engine.py +++ b/airflow/providers/google/cloud/hooks/kubernetes_engine.py @@ -123,8 +123,10 @@ def get_operation(self, operation_name: str, project_id: Optional[str] = None) - :return: The new, updated operation from Google Cloud """ return self.get_cluster_manager_client().get_operation( - name=f'projects/{project_id or self.project_id}' - + f'/locations/{self.location}/operations/{operation_name}' + name=( + f'projects/{project_id or self.project_id}' + f'/locations/{self.location}/operations/{operation_name}' + ) ) @staticmethod diff --git a/airflow/providers/google/cloud/utils/credentials_provider.py b/airflow/providers/google/cloud/utils/credentials_provider.py index ac1cfb42a31ab..0a8143ceae782 100644 --- a/airflow/providers/google/cloud/utils/credentials_provider.py +++ b/airflow/providers/google/cloud/utils/credentials_provider.py @@ -201,7 +201,7 @@ def __init__( key_options = [key_path, key_secret_name, keyfile_dict] if len([x for x in key_options if x]) > 1: raise AirflowException( - "The `keyfile_dict`, `key_path`, and `key_secret_name` fields" + "The `keyfile_dict`, `key_path`, and `key_secret_name` fields " "are all mutually exclusive. Please provide only one value." ) self.key_path = key_path diff --git a/airflow/providers/microsoft/psrp/operators/psrp.py b/airflow/providers/microsoft/psrp/operators/psrp.py index 5ec70f292f536..ea07ee9115567 100644 --- a/airflow/providers/microsoft/psrp/operators/psrp.py +++ b/airflow/providers/microsoft/psrp/operators/psrp.py @@ -164,7 +164,7 @@ def get_template_env(self): def securestring(value: str): if not native: raise AirflowException( - "Filter 'securestring' not applicable to non-native " "templating environment" + "Filter 'securestring' not applicable to non-native templating environment" ) return TaggedValue("SS", value) diff --git a/airflow/utils/email.py b/airflow/utils/email.py index ec0095e983056..868574379cfd9 100644 --- a/airflow/utils/email.py +++ b/airflow/utils/email.py @@ -99,7 +99,7 @@ def send_email_smtp( else: if from_email is None: raise Exception( - "You should set from email - either by smtp/smtp_mail_from config or " "`from_email parameter" + "You should set from email - either by smtp/smtp_mail_from config or `from_email` parameter" ) mail_from = from_email diff --git a/airflow/utils/file.py b/airflow/utils/file.py index 5a3db7fd48f9b..db786a5d88fa6 100644 --- a/airflow/utils/file.py +++ b/airflow/utils/file.py @@ -241,7 +241,7 @@ def _find_path_from_directory( if dirpath in patterns_by_dir: raise RuntimeError( "Detected recursive loop when walking DAG directory " - + f"{base_dir_path}: {dirpath} has appeared more than once." + f"{base_dir_path}: {dirpath} has appeared more than once." ) patterns_by_dir.update({dirpath: patterns.copy()}) diff --git a/airflow/www/fab_security/manager.py b/airflow/www/fab_security/manager.py index e34a3f736a8ad..8381f7b08cdc7 100644 --- a/airflow/www/fab_security/manager.py +++ b/airflow/www/fab_security/manager.py @@ -221,9 +221,7 @@ def __init__(self, appbuilder): # LDAP Config if self.auth_type == AUTH_LDAP: if "AUTH_LDAP_SERVER" not in app.config: - raise Exception( - "No AUTH_LDAP_SERVER defined on config" " with AUTH_LDAP authentication type." - ) + raise Exception("No AUTH_LDAP_SERVER defined on config with AUTH_LDAP authentication type.") app.config.setdefault("AUTH_LDAP_SEARCH", "") app.config.setdefault("AUTH_LDAP_SEARCH_FILTER", "") app.config.setdefault("AUTH_LDAP_APPEND_DOMAIN", "") @@ -970,9 +968,7 @@ def _ldap_bind_indirect(self, ldap, con) -> None: con.simple_bind_s(self.auth_ldap_bind_user, self.auth_ldap_bind_password) log.debug(f"LDAP bind indirect SUCCESS with username: '{self.auth_ldap_bind_user}'") except ldap.INVALID_CREDENTIALS as ex: - log.error( - "AUTH_LDAP_BIND_USER and AUTH_LDAP_BIND_PASSWORD are" " not valid LDAP bind credentials" - ) + log.error("AUTH_LDAP_BIND_USER and AUTH_LDAP_BIND_PASSWORD are not valid LDAP bind credentials") raise ex @staticmethod diff --git a/airflow/www/views.py b/airflow/www/views.py index 79927a30242f7..e9a52611fcd65 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -1143,7 +1143,7 @@ def code(self, dag_id, session=None): except Exception as e: all_errors += ( "Exception encountered during " - + f"dag_id retrieval/dag retrieval fallback/code highlighting:\n\n{e}\n" + f"dag_id retrieval/dag retrieval fallback/code highlighting:\n\n{e}\n" ) html_code = Markup('

Failed to load DAG file Code.

Details: {}

').format( escape(all_errors) diff --git a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py index 784290294b9ce..b066d3827da1c 100644 --- a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py @@ -370,7 +370,7 @@ def prepare_docker_build_cache_command( ) final_command.extend(["--platform", image_params.platform]) final_command.extend( - [f"--cache-to=type=registry,ref={image_params.get_cache(image_params.platform)}," f"mode=max"] + [f"--cache-to=type=registry,ref={image_params.get_cache(image_params.platform)},mode=max"] ) cmd = ['docker', 'buildx', 'inspect', 'airflow_cache'] buildx_command_result = run_command(cmd, verbose=verbose, dry_run=dry_run, text=True) diff --git a/dev/breeze/src/airflow_breeze/utils/image.py b/dev/breeze/src/airflow_breeze/utils/image.py index 4131b1c0bb078..1962951f949ea 100644 --- a/dev/breeze/src/airflow_breeze/utils/image.py +++ b/dev/breeze/src/airflow_breeze/utils/image.py @@ -159,7 +159,7 @@ def run_pull_image( def tag_image_as_latest(image_params: _CommonBuildParams, dry_run: bool, verbose: bool) -> RunCommandResult: if image_params.airflow_image_name_with_tag == image_params.airflow_image_name: get_console().print( - f"[info]Skip tagging {image_params.airflow_image_name} " "as latest as it is already 'latest'[/]" + f"[info]Skip tagging {image_params.airflow_image_name} as latest as it is already 'latest'[/]" ) return subprocess.CompletedProcess(returncode=0, args=[]) return run_command( @@ -251,7 +251,7 @@ def find_available_ci_image(github_repository: str, dry_run: bool, verbose: bool ) if inspect_command_result.returncode == 0: get_console().print( - "[info]Running fix_ownership " f"with {shell_params.airflow_image_name_with_tag}.[/]" + f"[info]Running fix_ownership with {shell_params.airflow_image_name_with_tag}.[/]" ) return shell_params shell_params, _ = just_pull_ci_image( diff --git a/dev/breeze/src/airflow_breeze/utils/run_utils.py b/dev/breeze/src/airflow_breeze/utils/run_utils.py index 241407a1af548..b811980f722bd 100644 --- a/dev/breeze/src/airflow_breeze/utils/run_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/run_utils.py @@ -174,7 +174,7 @@ def assert_pre_commit_installed(verbose: bool): sys.exit(1) else: get_console().print( - "\n[warning]Could not determine version of pre-commit. " "You might need to update it![/]\n" + "\n[warning]Could not determine version of pre-commit. You might need to update it![/]\n" ) else: get_console().print("\n[error]Error checking for pre-commit-installation:[/]\n") diff --git a/dev/system_tests/update_issue_status.py b/dev/system_tests/update_issue_status.py index bc109fe1e56e3..d1bd9af566685 100755 --- a/dev/system_tests/update_issue_status.py +++ b/dev/system_tests/update_issue_status.py @@ -176,7 +176,7 @@ def update_issue_status( console.print(f" Re-added file number: {total_re_added}") console.print(f" Completed file number: {total_completed}") console.print( - f" Done {total_count_done}/{total_count_all} = " f"{(total_count_done * 100/ total_count_all):.2f}%" + f" Done {total_count_done}/{total_count_all} = {(total_count_done * 100/ total_count_all):.2f}%" ) console.print() diff --git a/setup.py b/setup.py index 677dead0502bb..b3d4245e9e696 100644 --- a/setup.py +++ b/setup.py @@ -621,6 +621,7 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version 'filelock', 'flake8>=3.6.0', 'flake8-colors', + 'flake8-implicit-str-concat', 'flaky', 'freezegun', # Github3 version 3.1.2 requires PyJWT>=2.3.0 which clashes with Flask App Builder where PyJWT is <2.0.0 diff --git a/tests/always/test_connection.py b/tests/always/test_connection.py index 8db252b746782..eefd762e49748 100644 --- a/tests/always/test_connection.py +++ b/tests/always/test_connection.py @@ -143,7 +143,7 @@ def test_connection_extra_with_encryption_rotate_fernet_key(self): description='with extras', ), UriTestCaseConfig( - test_conn_uri='scheme://user:password@host%2Flocation:1234/schema?' '__extra__=single+value', + test_conn_uri='scheme://user:password@host%2Flocation:1234/schema?__extra__=single+value', test_conn_attributes=dict( conn_type='scheme', host='host/location', diff --git a/tests/api_connexion/endpoints/test_task_instance_endpoint.py b/tests/api_connexion/endpoints/test_task_instance_endpoint.py index a7ffd6e80358b..b4fd87ae2e0a6 100644 --- a/tests/api_connexion/endpoints/test_task_instance_endpoint.py +++ b/tests/api_connexion/endpoints/test_task_instance_endpoint.py @@ -314,7 +314,7 @@ def test_should_respond_200_mapped_task_instance_with_rtif(self, session): for map_index in (1, 2): response = self.client.get( "/api/v1/dags/example_python_operator/dagRuns/TEST_DAG_RUN_ID/taskInstances" - + f"/print_the_context/{map_index}", + f"/print_the_context/{map_index}", environ_overrides={"REMOTE_USER": "test"}, ) assert response.status_code == 200 @@ -480,7 +480,7 @@ class TestGetTaskInstances(TestTaskInstanceEndpoint): {"state": State.NONE}, ], False, - ("/api/v1/dags/example_python_operator/dagRuns/" "TEST_DAG_RUN_ID/taskInstances"), + ("/api/v1/dags/example_python_operator/dagRuns/TEST_DAG_RUN_ID/taskInstances"), 4, ), ( diff --git a/tests/core/test_providers_manager.py b/tests/core/test_providers_manager.py index 4d97eb2fa04dd..a98dc9534e823 100644 --- a/tests/core/test_providers_manager.py +++ b/tests/core/test_providers_manager.py @@ -238,5 +238,5 @@ def test_optional_feature_debug(self, mock_importlib_import_string): hook_class_name=None, provider_info=None, package_name=None, connection_type="test_connection" ) assert [ - "Optional provider feature disabled when importing 'HookClass' from " "'test_package' package" + "Optional provider feature disabled when importing 'HookClass' from 'test_package' package" ] == self._caplog.messages diff --git a/tests/providers/amazon/aws/hooks/test_glacier.py b/tests/providers/amazon/aws/hooks/test_glacier.py index c22620f3c0d84..864a29a554b73 100644 --- a/tests/providers/amazon/aws/hooks/test_glacier.py +++ b/tests/providers/amazon/aws/hooks/test_glacier.py @@ -58,11 +58,11 @@ def test_retrieve_inventory_should_log_mgs(self, mock_conn): log.output, [ 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:' - + f"Retrieving inventory for vault: {VAULT_NAME}", + f"Retrieving inventory for vault: {VAULT_NAME}", 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:' - + f"Initiated inventory-retrieval job for: {VAULT_NAME}", + f"Initiated inventory-retrieval job for: {VAULT_NAME}", 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:' - + f"Retrieval Job ID: {job_id.get('jobId')}", + f"Retrieval Job ID: {job_id.get('jobId')}", ], ) @@ -86,7 +86,7 @@ def test_retrieve_inventory_results_should_log_mgs(self, mock_conn): log.output, [ 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:' - + f"Retrieving the job results for vault: {VAULT_NAME}...", + f"Retrieving the job results for vault: {VAULT_NAME}...", ], ) @@ -110,8 +110,8 @@ def test_describe_job_should_log_mgs(self, mock_conn): log.output, [ 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:' - + f"Retrieving status for vault: {VAULT_NAME} and job {JOB_ID}", + f"Retrieving status for vault: {VAULT_NAME} and job {JOB_ID}", 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:' - + f"Job status: {JOB_STATUS.get('Action')}, code status: {JOB_STATUS.get('StatusCode')}", + f"Job status: {JOB_STATUS.get('Action')}, code status: {JOB_STATUS.get('StatusCode')}", ], ) diff --git a/tests/providers/databricks/operators/test_databricks.py b/tests/providers/databricks/operators/test_databricks.py index 34e41a673236c..08ece4a455296 100644 --- a/tests/providers/databricks/operators/test_databricks.py +++ b/tests/providers/databricks/operators/test_databricks.py @@ -183,7 +183,7 @@ def test_init_with_bad_type(self): # Looks a bit weird since we have to escape regex reserved symbols. exception_message = ( r'Type \<(type|class) \'datetime.datetime\'\> used ' - + r'for parameter json\[test\] is not a number or a string' + r'for parameter json\[test\] is not a number or a string' ) with pytest.raises(AirflowException, match=exception_message): DatabricksSubmitRunOperator(task_id=TASK_ID, json=json) @@ -498,7 +498,7 @@ def test_init_with_bad_type(self): # Looks a bit weird since we have to escape regex reserved symbols. exception_message = ( r'Type \<(type|class) \'datetime.datetime\'\> used ' - + r'for parameter json\[test\] is not a number or a string' + r'for parameter json\[test\] is not a number or a string' ) with pytest.raises(AirflowException, match=exception_message): DatabricksRunNowOperator(task_id=TASK_ID, job_id=JOB_ID, json=json) diff --git a/tests/providers/google/cloud/hooks/test_datacatalog.py b/tests/providers/google/cloud/hooks/test_datacatalog.py index bcc2078a8ff12..f2d7e1db64b41 100644 --- a/tests/providers/google/cloud/hooks/test_datacatalog.py +++ b/tests/providers/google/cloud/hooks/test_datacatalog.py @@ -70,11 +70,11 @@ TEST_TAG_TEMPLATE_PATH: str = f"projects/{{}}/locations/{TEST_LOCATION}/tagTemplates/{TEST_TAG_TEMPLATE_ID}" TEST_TAG_TEMPLATE_FIELD_PATH: str = ( f"projects/{{}}/locations/{TEST_LOCATION}/tagTemplates/" - + f"{TEST_TAG_TEMPLATE_ID}/fields/{TEST_TAG_TEMPLATE_FIELD_ID}" + f"{TEST_TAG_TEMPLATE_ID}/fields/{TEST_TAG_TEMPLATE_FIELD_ID}" ) TEST_TAG_PATH: str = ( f"projects/{{}}/locations/{TEST_LOCATION}/entryGroups/{TEST_ENTRY_GROUP_ID}" - + f"/entries/{TEST_ENTRY_ID}/tags/{TEST_TAG_ID}" + f"/entries/{TEST_ENTRY_ID}/tags/{TEST_TAG_ID}" ) TEST_PROJECT_ID_1 = "example-project-1" TEST_PROJECT_ID_2 = "example-project-2" diff --git a/tests/providers/google/cloud/operators/test_datacatalog.py b/tests/providers/google/cloud/operators/test_datacatalog.py index 24c5cc99b84d3..ff6f14a10f01a 100644 --- a/tests/providers/google/cloud/operators/test_datacatalog.py +++ b/tests/providers/google/cloud/operators/test_datacatalog.py @@ -76,7 +76,7 @@ TEST_UPDATE_MASK: Dict = {"fields": ["name"]} TEST_ENTRY_PATH: str = ( f"projects/{TEST_PROJECT_ID}/locations/{TEST_LOCATION}" - + f"/entryGroups/{TEST_ENTRY_GROUP_ID}/entries/{TEST_ENTRY_ID}" + f"/entryGroups/{TEST_ENTRY_GROUP_ID}/entries/{TEST_ENTRY_ID}" ) TEST_ENTRY_GROUP_PATH: str = ( f"projects/{TEST_PROJECT_ID}/locations/{TEST_LOCATION}/entryGroups/{TEST_ENTRY_GROUP_ID}" @@ -86,7 +86,7 @@ ) TEST_TAG_PATH: str = ( f"projects/{TEST_PROJECT_ID}/locations/{TEST_LOCATION}/entryGroups/" - + f"{TEST_ENTRY_GROUP_ID}/entries/{TEST_ENTRY_ID}/tags/{TEST_TAG_ID}" + f"{TEST_ENTRY_GROUP_ID}/entries/{TEST_ENTRY_ID}/tags/{TEST_TAG_ID}" ) TEST_ENTRY: Entry = Entry(name=TEST_ENTRY_PATH) diff --git a/tests/providers/google/cloud/operators/test_mlengine.py b/tests/providers/google/cloud/operators/test_mlengine.py index af7a487f42af0..36cd5818f9ee8 100644 --- a/tests/providers/google/cloud/operators/test_mlengine.py +++ b/tests/providers/google/cloud/operators/test_mlengine.py @@ -230,7 +230,7 @@ def test_invalid_model_origin(self): task_args['model_name'] = 'fake_model' with pytest.raises(AirflowException) as ctx: MLEngineStartBatchPredictionJobOperator(**task_args).execute(None) - assert 'Ambiguous model origin: Both uri and ' 'model/version name are provided.' == str(ctx.value) + assert 'Ambiguous model origin: Both uri and model/version name are provided.' == str(ctx.value) # Test that both uri and model/version is given task_args = self.BATCH_PREDICTION_DEFAULT_ARGS.copy() @@ -239,7 +239,7 @@ def test_invalid_model_origin(self): task_args['version_name'] = 'fake_version' with pytest.raises(AirflowException) as ctx: MLEngineStartBatchPredictionJobOperator(**task_args).execute(None) - assert 'Ambiguous model origin: Both uri and ' 'model/version name are provided.' == str(ctx.value) + assert 'Ambiguous model origin: Both uri and model/version name are provided.' == str(ctx.value) # Test that a version is given without a model task_args = self.BATCH_PREDICTION_DEFAULT_ARGS.copy() diff --git a/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py b/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py index d6ddd4ddd21c9..c006c230d388f 100644 --- a/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py @@ -47,12 +47,14 @@ b'{"some_num": 44, "some_str": "mock_row_content_3"}\n', ] CSV_LINES = [ - b'some_str,some_num\r\n' b'mock_row_content_1,42\r\n', + b'some_str,some_num\r\n', + b'mock_row_content_1,42\r\n', b'mock_row_content_2,43\r\n', b'mock_row_content_3,44\r\n', ] CSV_LINES_PIPE_DELIMITED = [ - b'some_str|some_num\r\n' b'mock_row_content_1|42\r\n', + b'some_str|some_num\r\n', + b'mock_row_content_1|42\r\n', b'mock_row_content_2|43\r\n', b'mock_row_content_3|44\r\n', ] diff --git a/tests/providers/google/cloud/utils/test_credentials_provider.py b/tests/providers/google/cloud/utils/test_credentials_provider.py index d392b170934e8..c1fdd7268c4fc 100644 --- a/tests/providers/google/cloud/utils/test_credentials_provider.py +++ b/tests/providers/google/cloud/utils/test_credentials_provider.py @@ -167,7 +167,7 @@ def test_get_credentials_and_project_id_with_default_auth_and_scopes(self, scope assert mock_auth_default.return_value == result @mock.patch( - 'airflow.providers.google.cloud.utils.credentials_provider.' 'impersonated_credentials.Credentials' + 'airflow.providers.google.cloud.utils.credentials_provider.impersonated_credentials.Credentials' ) @mock.patch('google.auth.default') def test_get_credentials_and_project_id_with_default_auth_and_target_principal( @@ -189,7 +189,7 @@ def test_get_credentials_and_project_id_with_default_auth_and_target_principal( assert (mock_impersonated_credentials.return_value, ANOTHER_PROJECT_ID) == result @mock.patch( - 'airflow.providers.google.cloud.utils.credentials_provider.' 'impersonated_credentials.Credentials' + 'airflow.providers.google.cloud.utils.credentials_provider.impersonated_credentials.Credentials' ) @mock.patch('google.auth.default') def test_get_credentials_and_project_id_with_default_auth_and_scopes_and_target_principal( @@ -212,7 +212,7 @@ def test_get_credentials_and_project_id_with_default_auth_and_scopes_and_target_ assert (mock_impersonated_credentials.return_value, self.test_project_id) == result @mock.patch( - 'airflow.providers.google.cloud.utils.credentials_provider.' 'impersonated_credentials.Credentials' + 'airflow.providers.google.cloud.utils.credentials_provider.impersonated_credentials.Credentials' ) @mock.patch('google.auth.default') def test_get_credentials_and_project_id_with_default_auth_and_target_principal_and_delegates( @@ -312,7 +312,7 @@ def test_get_credentials_and_project_id_with_mutually_exclusive_configuration( with pytest.raises( AirflowException, match=re.escape( - 'The `keyfile_dict`, `key_path`, and `key_secret_name` fieldsare all mutually exclusive.' + 'The `keyfile_dict`, `key_path`, and `key_secret_name` fields are all mutually exclusive.' ), ): get_credentials_and_project_id(key_path='KEY.json', keyfile_dict={'private_key': 'PRIVATE_KEY'}) diff --git a/tests/providers/google/common/hooks/test_base_google.py b/tests/providers/google/common/hooks/test_base_google.py index bfb422965f40c..f833469412bee 100644 --- a/tests/providers/google/common/hooks/test_base_google.py +++ b/tests/providers/google/common/hooks/test_base_google.py @@ -448,7 +448,7 @@ def test_get_credentials_and_project_id_with_mutually_exclusive_configuration( with pytest.raises( AirflowException, match=re.escape( - "The `keyfile_dict`, `key_path`, and `key_secret_name` fields" "are all mutually exclusive. " + "The `keyfile_dict`, `key_path`, and `key_secret_name` fields are all mutually exclusive. " ), ): self.instance._get_credentials_and_project_id() diff --git a/tests/providers/ssh/hooks/test_ssh.py b/tests/providers/ssh/hooks/test_ssh.py index b17e3170a8007..c248ebf45d4bf 100644 --- a/tests/providers/ssh/hooks/test_ssh.py +++ b/tests/providers/ssh/hooks/test_ssh.py @@ -742,7 +742,7 @@ def test_openssh_private_key(self): def test_oneline_key(self): with pytest.raises(Exception): - TEST_ONELINE_KEY = "-----BEGIN OPENSSH" + "PRIVATE KEY-----asdfg-----END OPENSSH PRIVATE KEY-----" + TEST_ONELINE_KEY = "-----BEGIN OPENSSHPRIVATE KEY-----asdfg-----END OPENSSHPRIVATE KEY-----" session = settings.Session() try: conn = Connection( diff --git a/tests/system/providers/google/bigquery/example_bigquery_sensors.py b/tests/system/providers/google/bigquery/example_bigquery_sensors.py index 21d9f530c0e61..edd9db51ba1b0 100644 --- a/tests/system/providers/google/bigquery/example_bigquery_sensors.py +++ b/tests/system/providers/google/bigquery/example_bigquery_sensors.py @@ -48,7 +48,7 @@ PARTITION_NAME = "{{ ds_nodash }}" -INSERT_ROWS_QUERY = f"INSERT {DATASET_NAME}.{TABLE_NAME} VALUES " "(42, '{{ ds }}')" +INSERT_ROWS_QUERY = f"INSERT {DATASET_NAME}.{TABLE_NAME} VALUES (42, '{{ ds }}')" SCHEMA = [ {"name": "value", "type": "INTEGER", "mode": "REQUIRED"}, diff --git a/tests/utils/test_file.py b/tests/utils/test_file.py index 99f7e90a7d033..3d94f36228b33 100644 --- a/tests/utils/test_file.py +++ b/tests/utils/test_file.py @@ -159,8 +159,7 @@ def test_find_path_from_directory_fails_on_recursive_link(self, test_dir): list(find_path_from_directory(test_dir, ignore_list_file, ignore_file_syntax="glob")) assert False, "Walking a self-recursive tree should fail" except RuntimeError as err: - assert ( - str(err) - == f"Detected recursive loop when walking DAG directory {test_dir}: " - + f"{Path(recursing_tgt).resolve()} has appeared more than once." + assert str(err) == ( + f"Detected recursive loop when walking DAG directory {test_dir}: " + f"{Path(recursing_tgt).resolve()} has appeared more than once." )