Skip to content

Commit

Permalink
Introduce flake8-implicit-str-concat plugin to static checks (#23873)
Browse files Browse the repository at this point in the history
(cherry picked from commit 92ddcf4)
  • Loading branch information
josh-fell authored and ephraimbuddy committed May 28, 2022
1 parent 48efec1 commit 54cad99
Show file tree
Hide file tree
Showing 32 changed files with 55 additions and 55 deletions.
2 changes: 1 addition & 1 deletion airflow/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,7 +391,7 @@ def _validate_enums(self):
if value not in enum_options:
raise AirflowConfigException(
f"`[{section_key}] {option_key}` should not be "
+ f"{value!r}. Possible values: {', '.join(enum_options)}."
f"{value!r}. Possible values: {', '.join(enum_options)}."
)

def _validate_config_dependencies(self):
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/hooks/eks.py
Original file line number Diff line number Diff line change
Expand Up @@ -638,7 +638,7 @@ class EKSHook(EksHook):

def __init__(self, *args, **kwargs):
warnings.warn(
"This hook is deprecated. " "Please use `airflow.providers.amazon.aws.hooks.eks.EksHook`.",
"This hook is deprecated. Please use `airflow.providers.amazon.aws.hooks.eks.EksHook`.",
DeprecationWarning,
stacklevel=2,
)
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/hooks/ses.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ class SESHook(SesHook):

def __init__(self, *args, **kwargs):
warnings.warn(
"This hook is deprecated. " "Please use :class:`airflow.providers.amazon.aws.hooks.ses.SesHook`.",
"This hook is deprecated. Please use :class:`airflow.providers.amazon.aws.hooks.ses.SesHook`.",
DeprecationWarning,
stacklevel=2,
)
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/hooks/sns.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ class AwsSnsHook(SnsHook):

def __init__(self, *args, **kwargs):
warnings.warn(
"This hook is deprecated. " "Please use :class:`airflow.providers.amazon.aws.hooks.sns.SnsHook`.",
"This hook is deprecated. Please use :class:`airflow.providers.amazon.aws.hooks.sns.SnsHook`.",
DeprecationWarning,
stacklevel=2,
)
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/hooks/sqs.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ class SQSHook(SqsHook):

def __init__(self, *args, **kwargs):
warnings.warn(
"This hook is deprecated. " "Please use :class:`airflow.providers.amazon.aws.hooks.sqs.SqsHook`.",
"This hook is deprecated. Please use :class:`airflow.providers.amazon.aws.hooks.sqs.SqsHook`.",
DeprecationWarning,
stacklevel=2,
)
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/databricks/operators/databricks_repos.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def __init__(
self.git_provider = self.__detect_repo_provider__(git_url)
if self.git_provider is None:
raise AirflowException(
"git_provider isn't specified and couldn't be guessed for URL {git_url}"
f"git_provider isn't specified and couldn't be guessed for URL {git_url}"
)
else:
self.git_provider = git_provider
Expand Down
6 changes: 4 additions & 2 deletions airflow/providers/google/cloud/hooks/kubernetes_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,8 +123,10 @@ def get_operation(self, operation_name: str, project_id: Optional[str] = None) -
:return: The new, updated operation from Google Cloud
"""
return self.get_cluster_manager_client().get_operation(
name=f'projects/{project_id or self.project_id}'
+ f'/locations/{self.location}/operations/{operation_name}'
name=(
f'projects/{project_id or self.project_id}'
f'/locations/{self.location}/operations/{operation_name}'
)
)

@staticmethod
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ def __init__(
key_options = [key_path, key_secret_name, keyfile_dict]
if len([x for x in key_options if x]) > 1:
raise AirflowException(
"The `keyfile_dict`, `key_path`, and `key_secret_name` fields"
"The `keyfile_dict`, `key_path`, and `key_secret_name` fields "
"are all mutually exclusive. Please provide only one value."
)
self.key_path = key_path
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/microsoft/psrp/operators/psrp.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def get_template_env(self):
def securestring(value: str):
if not native:
raise AirflowException(
"Filter 'securestring' not applicable to non-native " "templating environment"
"Filter 'securestring' not applicable to non-native templating environment"
)
return TaggedValue("SS", value)

Expand Down
2 changes: 1 addition & 1 deletion airflow/utils/email.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def send_email_smtp(
else:
if from_email is None:
raise Exception(
"You should set from email - either by smtp/smtp_mail_from config or " "`from_email parameter"
"You should set from email - either by smtp/smtp_mail_from config or `from_email` parameter"
)
mail_from = from_email

Expand Down
2 changes: 1 addition & 1 deletion airflow/utils/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ def _find_path_from_directory(
if dirpath in patterns_by_dir:
raise RuntimeError(
"Detected recursive loop when walking DAG directory "
+ f"{base_dir_path}: {dirpath} has appeared more than once."
f"{base_dir_path}: {dirpath} has appeared more than once."
)
patterns_by_dir.update({dirpath: patterns.copy()})

Expand Down
8 changes: 2 additions & 6 deletions airflow/www/fab_security/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,9 +221,7 @@ def __init__(self, appbuilder):
# LDAP Config
if self.auth_type == AUTH_LDAP:
if "AUTH_LDAP_SERVER" not in app.config:
raise Exception(
"No AUTH_LDAP_SERVER defined on config" " with AUTH_LDAP authentication type."
)
raise Exception("No AUTH_LDAP_SERVER defined on config with AUTH_LDAP authentication type.")
app.config.setdefault("AUTH_LDAP_SEARCH", "")
app.config.setdefault("AUTH_LDAP_SEARCH_FILTER", "")
app.config.setdefault("AUTH_LDAP_APPEND_DOMAIN", "")
Expand Down Expand Up @@ -970,9 +968,7 @@ def _ldap_bind_indirect(self, ldap, con) -> None:
con.simple_bind_s(self.auth_ldap_bind_user, self.auth_ldap_bind_password)
log.debug(f"LDAP bind indirect SUCCESS with username: '{self.auth_ldap_bind_user}'")
except ldap.INVALID_CREDENTIALS as ex:
log.error(
"AUTH_LDAP_BIND_USER and AUTH_LDAP_BIND_PASSWORD are" " not valid LDAP bind credentials"
)
log.error("AUTH_LDAP_BIND_USER and AUTH_LDAP_BIND_PASSWORD are not valid LDAP bind credentials")
raise ex

@staticmethod
Expand Down
2 changes: 1 addition & 1 deletion airflow/www/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1143,7 +1143,7 @@ def code(self, dag_id, session=None):
except Exception as e:
all_errors += (
"Exception encountered during "
+ f"dag_id retrieval/dag retrieval fallback/code highlighting:\n\n{e}\n"
f"dag_id retrieval/dag retrieval fallback/code highlighting:\n\n{e}\n"
)
html_code = Markup('<p>Failed to load DAG file Code.</p><p>Details: {}</p>').format(
escape(all_errors)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,7 @@ def prepare_docker_build_cache_command(
)
final_command.extend(["--platform", image_params.platform])
final_command.extend(
[f"--cache-to=type=registry,ref={image_params.get_cache(image_params.platform)}," f"mode=max"]
[f"--cache-to=type=registry,ref={image_params.get_cache(image_params.platform)},mode=max"]
)
cmd = ['docker', 'buildx', 'inspect', 'airflow_cache']
buildx_command_result = run_command(cmd, verbose=verbose, dry_run=dry_run, text=True)
Expand Down
4 changes: 2 additions & 2 deletions dev/breeze/src/airflow_breeze/utils/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def run_pull_image(
def tag_image_as_latest(image_params: _CommonBuildParams, dry_run: bool, verbose: bool) -> RunCommandResult:
if image_params.airflow_image_name_with_tag == image_params.airflow_image_name:
get_console().print(
f"[info]Skip tagging {image_params.airflow_image_name} " "as latest as it is already 'latest'[/]"
f"[info]Skip tagging {image_params.airflow_image_name} as latest as it is already 'latest'[/]"
)
return subprocess.CompletedProcess(returncode=0, args=[])
return run_command(
Expand Down Expand Up @@ -251,7 +251,7 @@ def find_available_ci_image(github_repository: str, dry_run: bool, verbose: bool
)
if inspect_command_result.returncode == 0:
get_console().print(
"[info]Running fix_ownership " f"with {shell_params.airflow_image_name_with_tag}.[/]"
f"[info]Running fix_ownership with {shell_params.airflow_image_name_with_tag}.[/]"
)
return shell_params
shell_params, _ = just_pull_ci_image(
Expand Down
2 changes: 1 addition & 1 deletion dev/breeze/src/airflow_breeze/utils/run_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def assert_pre_commit_installed(verbose: bool):
sys.exit(1)
else:
get_console().print(
"\n[warning]Could not determine version of pre-commit. " "You might need to update it![/]\n"
"\n[warning]Could not determine version of pre-commit. You might need to update it![/]\n"
)
else:
get_console().print("\n[error]Error checking for pre-commit-installation:[/]\n")
Expand Down
2 changes: 1 addition & 1 deletion dev/system_tests/update_issue_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ def update_issue_status(
console.print(f" Re-added file number: {total_re_added}")
console.print(f" Completed file number: {total_completed}")
console.print(
f" Done {total_count_done}/{total_count_all} = " f"{(total_count_done * 100/ total_count_all):.2f}%"
f" Done {total_count_done}/{total_count_all} = {(total_count_done * 100/ total_count_all):.2f}%"
)
console.print()

Expand Down
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -621,6 +621,7 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version
'filelock',
'flake8>=3.6.0',
'flake8-colors',
'flake8-implicit-str-concat',
'flaky',
'freezegun',
# Github3 version 3.1.2 requires PyJWT>=2.3.0 which clashes with Flask App Builder where PyJWT is <2.0.0
Expand Down
2 changes: 1 addition & 1 deletion tests/always/test_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def test_connection_extra_with_encryption_rotate_fernet_key(self):
description='with extras',
),
UriTestCaseConfig(
test_conn_uri='scheme://user:password@host%2Flocation:1234/schema?' '__extra__=single+value',
test_conn_uri='scheme://user:password@host%2Flocation:1234/schema?__extra__=single+value',
test_conn_attributes=dict(
conn_type='scheme',
host='host/location',
Expand Down
4 changes: 2 additions & 2 deletions tests/api_connexion/endpoints/test_task_instance_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,7 @@ def test_should_respond_200_mapped_task_instance_with_rtif(self, session):
for map_index in (1, 2):
response = self.client.get(
"/api/v1/dags/example_python_operator/dagRuns/TEST_DAG_RUN_ID/taskInstances"
+ f"/print_the_context/{map_index}",
f"/print_the_context/{map_index}",
environ_overrides={"REMOTE_USER": "test"},
)
assert response.status_code == 200
Expand Down Expand Up @@ -480,7 +480,7 @@ class TestGetTaskInstances(TestTaskInstanceEndpoint):
{"state": State.NONE},
],
False,
("/api/v1/dags/example_python_operator/dagRuns/" "TEST_DAG_RUN_ID/taskInstances"),
("/api/v1/dags/example_python_operator/dagRuns/TEST_DAG_RUN_ID/taskInstances"),
4,
),
(
Expand Down
2 changes: 1 addition & 1 deletion tests/core/test_providers_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,5 +238,5 @@ def test_optional_feature_debug(self, mock_importlib_import_string):
hook_class_name=None, provider_info=None, package_name=None, connection_type="test_connection"
)
assert [
"Optional provider feature disabled when importing 'HookClass' from " "'test_package' package"
"Optional provider feature disabled when importing 'HookClass' from 'test_package' package"
] == self._caplog.messages
12 changes: 6 additions & 6 deletions tests/providers/amazon/aws/hooks/test_glacier.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,11 +58,11 @@ def test_retrieve_inventory_should_log_mgs(self, mock_conn):
log.output,
[
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
+ f"Retrieving inventory for vault: {VAULT_NAME}",
f"Retrieving inventory for vault: {VAULT_NAME}",
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
+ f"Initiated inventory-retrieval job for: {VAULT_NAME}",
f"Initiated inventory-retrieval job for: {VAULT_NAME}",
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
+ f"Retrieval Job ID: {job_id.get('jobId')}",
f"Retrieval Job ID: {job_id.get('jobId')}",
],
)

Expand All @@ -86,7 +86,7 @@ def test_retrieve_inventory_results_should_log_mgs(self, mock_conn):
log.output,
[
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
+ f"Retrieving the job results for vault: {VAULT_NAME}...",
f"Retrieving the job results for vault: {VAULT_NAME}...",
],
)

Expand All @@ -110,8 +110,8 @@ def test_describe_job_should_log_mgs(self, mock_conn):
log.output,
[
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
+ f"Retrieving status for vault: {VAULT_NAME} and job {JOB_ID}",
f"Retrieving status for vault: {VAULT_NAME} and job {JOB_ID}",
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
+ f"Job status: {JOB_STATUS.get('Action')}, code status: {JOB_STATUS.get('StatusCode')}",
f"Job status: {JOB_STATUS.get('Action')}, code status: {JOB_STATUS.get('StatusCode')}",
],
)
4 changes: 2 additions & 2 deletions tests/providers/databricks/operators/test_databricks.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ def test_init_with_bad_type(self):
# Looks a bit weird since we have to escape regex reserved symbols.
exception_message = (
r'Type \<(type|class) \'datetime.datetime\'\> used '
+ r'for parameter json\[test\] is not a number or a string'
r'for parameter json\[test\] is not a number or a string'
)
with pytest.raises(AirflowException, match=exception_message):
DatabricksSubmitRunOperator(task_id=TASK_ID, json=json)
Expand Down Expand Up @@ -498,7 +498,7 @@ def test_init_with_bad_type(self):
# Looks a bit weird since we have to escape regex reserved symbols.
exception_message = (
r'Type \<(type|class) \'datetime.datetime\'\> used '
+ r'for parameter json\[test\] is not a number or a string'
r'for parameter json\[test\] is not a number or a string'
)
with pytest.raises(AirflowException, match=exception_message):
DatabricksRunNowOperator(task_id=TASK_ID, job_id=JOB_ID, json=json)
Expand Down
4 changes: 2 additions & 2 deletions tests/providers/google/cloud/hooks/test_datacatalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,11 +70,11 @@
TEST_TAG_TEMPLATE_PATH: str = f"projects/{{}}/locations/{TEST_LOCATION}/tagTemplates/{TEST_TAG_TEMPLATE_ID}"
TEST_TAG_TEMPLATE_FIELD_PATH: str = (
f"projects/{{}}/locations/{TEST_LOCATION}/tagTemplates/"
+ f"{TEST_TAG_TEMPLATE_ID}/fields/{TEST_TAG_TEMPLATE_FIELD_ID}"
f"{TEST_TAG_TEMPLATE_ID}/fields/{TEST_TAG_TEMPLATE_FIELD_ID}"
)
TEST_TAG_PATH: str = (
f"projects/{{}}/locations/{TEST_LOCATION}/entryGroups/{TEST_ENTRY_GROUP_ID}"
+ f"/entries/{TEST_ENTRY_ID}/tags/{TEST_TAG_ID}"
f"/entries/{TEST_ENTRY_ID}/tags/{TEST_TAG_ID}"
)
TEST_PROJECT_ID_1 = "example-project-1"
TEST_PROJECT_ID_2 = "example-project-2"
Expand Down
4 changes: 2 additions & 2 deletions tests/providers/google/cloud/operators/test_datacatalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@
TEST_UPDATE_MASK: Dict = {"fields": ["name"]}
TEST_ENTRY_PATH: str = (
f"projects/{TEST_PROJECT_ID}/locations/{TEST_LOCATION}"
+ f"/entryGroups/{TEST_ENTRY_GROUP_ID}/entries/{TEST_ENTRY_ID}"
f"/entryGroups/{TEST_ENTRY_GROUP_ID}/entries/{TEST_ENTRY_ID}"
)
TEST_ENTRY_GROUP_PATH: str = (
f"projects/{TEST_PROJECT_ID}/locations/{TEST_LOCATION}/entryGroups/{TEST_ENTRY_GROUP_ID}"
Expand All @@ -86,7 +86,7 @@
)
TEST_TAG_PATH: str = (
f"projects/{TEST_PROJECT_ID}/locations/{TEST_LOCATION}/entryGroups/"
+ f"{TEST_ENTRY_GROUP_ID}/entries/{TEST_ENTRY_ID}/tags/{TEST_TAG_ID}"
f"{TEST_ENTRY_GROUP_ID}/entries/{TEST_ENTRY_ID}/tags/{TEST_TAG_ID}"
)

TEST_ENTRY: Entry = Entry(name=TEST_ENTRY_PATH)
Expand Down
4 changes: 2 additions & 2 deletions tests/providers/google/cloud/operators/test_mlengine.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ def test_invalid_model_origin(self):
task_args['model_name'] = 'fake_model'
with pytest.raises(AirflowException) as ctx:
MLEngineStartBatchPredictionJobOperator(**task_args).execute(None)
assert 'Ambiguous model origin: Both uri and ' 'model/version name are provided.' == str(ctx.value)
assert 'Ambiguous model origin: Both uri and model/version name are provided.' == str(ctx.value)

# Test that both uri and model/version is given
task_args = self.BATCH_PREDICTION_DEFAULT_ARGS.copy()
Expand All @@ -239,7 +239,7 @@ def test_invalid_model_origin(self):
task_args['version_name'] = 'fake_version'
with pytest.raises(AirflowException) as ctx:
MLEngineStartBatchPredictionJobOperator(**task_args).execute(None)
assert 'Ambiguous model origin: Both uri and ' 'model/version name are provided.' == str(ctx.value)
assert 'Ambiguous model origin: Both uri and model/version name are provided.' == str(ctx.value)

# Test that a version is given without a model
task_args = self.BATCH_PREDICTION_DEFAULT_ARGS.copy()
Expand Down
6 changes: 4 additions & 2 deletions tests/providers/google/cloud/transfers/test_mysql_to_gcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,12 +47,14 @@
b'{"some_num": 44, "some_str": "mock_row_content_3"}\n',
]
CSV_LINES = [
b'some_str,some_num\r\n' b'mock_row_content_1,42\r\n',
b'some_str,some_num\r\n',
b'mock_row_content_1,42\r\n',
b'mock_row_content_2,43\r\n',
b'mock_row_content_3,44\r\n',
]
CSV_LINES_PIPE_DELIMITED = [
b'some_str|some_num\r\n' b'mock_row_content_1|42\r\n',
b'some_str|some_num\r\n',
b'mock_row_content_1|42\r\n',
b'mock_row_content_2|43\r\n',
b'mock_row_content_3|44\r\n',
]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ def test_get_credentials_and_project_id_with_default_auth_and_scopes(self, scope
assert mock_auth_default.return_value == result

@mock.patch(
'airflow.providers.google.cloud.utils.credentials_provider.' 'impersonated_credentials.Credentials'
'airflow.providers.google.cloud.utils.credentials_provider.impersonated_credentials.Credentials'
)
@mock.patch('google.auth.default')
def test_get_credentials_and_project_id_with_default_auth_and_target_principal(
Expand All @@ -189,7 +189,7 @@ def test_get_credentials_and_project_id_with_default_auth_and_target_principal(
assert (mock_impersonated_credentials.return_value, ANOTHER_PROJECT_ID) == result

@mock.patch(
'airflow.providers.google.cloud.utils.credentials_provider.' 'impersonated_credentials.Credentials'
'airflow.providers.google.cloud.utils.credentials_provider.impersonated_credentials.Credentials'
)
@mock.patch('google.auth.default')
def test_get_credentials_and_project_id_with_default_auth_and_scopes_and_target_principal(
Expand All @@ -212,7 +212,7 @@ def test_get_credentials_and_project_id_with_default_auth_and_scopes_and_target_
assert (mock_impersonated_credentials.return_value, self.test_project_id) == result

@mock.patch(
'airflow.providers.google.cloud.utils.credentials_provider.' 'impersonated_credentials.Credentials'
'airflow.providers.google.cloud.utils.credentials_provider.impersonated_credentials.Credentials'
)
@mock.patch('google.auth.default')
def test_get_credentials_and_project_id_with_default_auth_and_target_principal_and_delegates(
Expand Down Expand Up @@ -312,7 +312,7 @@ def test_get_credentials_and_project_id_with_mutually_exclusive_configuration(
with pytest.raises(
AirflowException,
match=re.escape(
'The `keyfile_dict`, `key_path`, and `key_secret_name` fieldsare all mutually exclusive.'
'The `keyfile_dict`, `key_path`, and `key_secret_name` fields are all mutually exclusive.'
),
):
get_credentials_and_project_id(key_path='KEY.json', keyfile_dict={'private_key': 'PRIVATE_KEY'})
Expand Down
2 changes: 1 addition & 1 deletion tests/providers/google/common/hooks/test_base_google.py
Original file line number Diff line number Diff line change
Expand Up @@ -448,7 +448,7 @@ def test_get_credentials_and_project_id_with_mutually_exclusive_configuration(
with pytest.raises(
AirflowException,
match=re.escape(
"The `keyfile_dict`, `key_path`, and `key_secret_name` fields" "are all mutually exclusive. "
"The `keyfile_dict`, `key_path`, and `key_secret_name` fields are all mutually exclusive. "
),
):
self.instance._get_credentials_and_project_id()
Expand Down
Loading

0 comments on commit 54cad99

Please sign in to comment.