Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Clean up in-line f-string concatenation #23591

Merged
merged 1 commit into from
May 9, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/utils/redshift.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def build_credentials_block(credentials: ReadOnlyCredentials) -> str:

else:
credentials_line = (
f"aws_access_key_id={credentials.access_key};" f"aws_secret_access_key={credentials.secret_key}"
f"aws_access_key_id={credentials.access_key};aws_secret_access_key={credentials.secret_key}"
)

return credentials_line
2 changes: 1 addition & 1 deletion airflow/providers/apache/drill/hooks/drill.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def get_uri(self) -> str:
conn_type = 'drill' if not conn_md.conn_type else conn_md.conn_type
dialect_driver = conn_md.extra_dejson.get('dialect_driver', 'drill+sadrill')
storage_plugin = conn_md.extra_dejson.get('storage_plugin', 'dfs')
return f'{conn_type}://{host}/{storage_plugin}' f'?dialect_driver={dialect_driver}'
return f'{conn_type}://{host}/{storage_plugin}?dialect_driver={dialect_driver}'

def set_autocommit(self, conn: Connection, autocommit: bool) -> NotImplementedError:
raise NotImplementedError("There are no transactions in Drill.")
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/databricks/operators/databricks_repos.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def __init__(
self.git_provider = self.__detect_repo_provider__(git_url)
if self.git_provider is None:
raise AirflowException(
"git_provider isn't specified and couldn't be guessed" f" for URL {git_url}"
"git_provider isn't specified and couldn't be guessed for URL {git_url}"
josh-fell marked this conversation as resolved.
Show resolved Hide resolved
)
else:
self.git_provider = git_provider
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/google/cloud/hooks/datafusion.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def wait_for_pipeline_state(
return
if current_state in failure_states:
raise AirflowException(
f"Pipeline {pipeline_name} state {current_state} is not " f"one of {success_states}"
f"Pipeline {pipeline_name} state {current_state} is not one of {success_states}"
)
sleep(30)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -369,5 +369,5 @@ def _get_project_id_from_service_account_email(service_account_email: str) -> st
return service_account_email.split('@')[1].split('.')[0]
except IndexError:
raise AirflowException(
f"Could not extract project_id from service account's email: " f"{service_account_email}."
f"Could not extract project_id from service account's email: {service_account_email}."
)
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def __init__(
)
if auth_type not in VALID_AUTH_TYPES:
raise VaultError(
f"The auth_type is not supported: {auth_type}. " f"It should be one of {VALID_AUTH_TYPES}"
f"The auth_type is not supported: {auth_type}. It should be one of {VALID_AUTH_TYPES}"
)
if auth_type == "token" and not token and not token_path:
raise VaultError("The 'token' authentication type requires 'token' or 'token_path'")
Expand Down
4 changes: 2 additions & 2 deletions airflow/utils/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def _check_cli_args(args):
raise ValueError("Args should be set")
if not isinstance(args[0], Namespace):
raise ValueError(
"1st positional argument should be argparse.Namespace instance," f"but is {type(args[0])}"
f"1st positional argument should be argparse.Namespace instance, but is {type(args[0])}"
)


Expand Down Expand Up @@ -148,7 +148,7 @@ def _build_metrics(func_name, namespace):

if not isinstance(namespace, Namespace):
raise ValueError(
"namespace argument should be argparse.Namespace instance," f"but is {type(namespace)}"
f"namespace argument should be argparse.Namespace instance, but is {type(namespace)}"
)
tmp_dic = vars(namespace)
metrics['dag_id'] = tmp_dic.get('dag_id')
Expand Down
2 changes: 1 addition & 1 deletion dev/assign_cherry_picked_prs_with_milestone.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ def assign_prs(
continue
console.print('-' * 80)
console.print(
f"\n >>>> Retrieving PR#{pr_number}: " f"https://github.com/apache/airflow/pull/{pr_number}"
f"\n >>>> Retrieving PR#{pr_number}: https://github.com/apache/airflow/pull/{pr_number}"
)
pr: PullRequest
try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -305,10 +305,10 @@ def version(verbose: bool, python: str):
f"{get_installation_sources_config_metadata_hash()}[/]"
)
get_console().print(
f"[info]Used sources config hash : " f"{get_used_sources_setup_metadata_hash()}[/]"
f"[info]Used sources config hash : {get_used_sources_setup_metadata_hash()}[/]"
)
get_console().print(
f"[info]Package config hash : " f"{(get_package_setup_metadata_hash())}[/]\n"
f"[info]Package config hash : {(get_package_setup_metadata_hash())}[/]\n"
)


Expand Down Expand Up @@ -497,7 +497,7 @@ def write_to_shell(command_to_execute: str, dry_run: bool, script_path: str, for
else:
get_console().print(f"[info]The autocomplete script would be added to {script_path}[/]")
get_console().print(
f"\n[warning]Please exit and re-enter your shell or run:[/]" f"\n\n source {script_path}\n"
f"\n[warning]Please exit and re-enter your shell or run:[/]\n\n source {script_path}\n"
)
return True

Expand Down
4 changes: 1 addition & 3 deletions dev/breeze/src/airflow_breeze/utils/custom_param_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,9 +89,7 @@ def convert(self, value, param, ctx):
if isinstance(value, CacheableDefault):
is_cached, new_value = read_and_validate_value_from_cache(param_name, value.value)
if not is_cached:
get_console().print(
f"\n[info]Default value of {param.name} " f"parameter {new_value} used.[/]\n"
)
get_console().print(f"\n[info]Default value of {param.name} parameter {new_value} used.[/]\n")
else:
allowed, allowed_values = check_if_values_allowed(param_name, value)
if allowed:
Expand Down
2 changes: 1 addition & 1 deletion dev/breeze/src/airflow_breeze/utils/run_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ def get_filesystem_type(filepath):

def instruct_build_image(python: str):
"""Print instructions to the user that they should build the image"""
get_console().print(f'[warning]\nThe CI image for ' f'python version {python} may be outdated[/]\n')
get_console().print(f'[warning]\nThe CI image for Python version {python} may be outdated[/]\n')
get_console().print(
f"\n[info]Please run at the earliest convenience:[/]\n\nbreeze build-image --python {python}\n\n"
)
Expand Down
2 changes: 1 addition & 1 deletion dev/prepare_release_issue.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ def generate_issue_content(
for i in range(count_prs):
pr_number = prs[i]
progress.console.print(
f"Retrieving PR#{pr_number}: " f"https://github.com/apache/airflow/pull/{pr_number}"
f"Retrieving PR#{pr_number}: https://github.com/apache/airflow/pull/{pr_number}"
)

pr: PullRequestOrIssue
Expand Down
4 changes: 2 additions & 2 deletions dev/provider_packages/prepare_provider_packages.py
Original file line number Diff line number Diff line change
Expand Up @@ -1773,7 +1773,7 @@ def generate_new_changelog(package_id, provider_details, changelog_path, changes
)
else:
console.print(
f"[green]Appending the provider {package_id} changelog for" f"`{latest_version}` version.[/]"
f"[green]Appending the provider {package_id} changelog for `{latest_version}` version.[/]"
)
with open(changelog_path, "wt") as changelog:
changelog.write("\n".join(new_changelog_lines))
Expand Down Expand Up @@ -1914,7 +1914,7 @@ def generate_issue_content(
for i in range(len(pr_list)):
pr_number = pr_list[i]
progress.console.print(
f"Retrieving PR#{pr_number}: " f"https://github.com/apache/airflow/pull/{pr_number}"
f"Retrieving PR#{pr_number}: https://github.com/apache/airflow/pull/{pr_number}"
)
try:
pull_requests[pr_number] = repo.get_pull(pr_number)
Expand Down
4 changes: 1 addition & 3 deletions docs/apache-airflow/security/webserver.rst
Original file line number Diff line number Diff line change
Expand Up @@ -234,9 +234,7 @@ webserver_config.py itself if you wish.
team_data = remote_app.get("user/teams")
teams = team_parser(team_data.json())
roles = map_roles(teams)
log.debug(
f"User info from Github: {user_data}\n" f"Team info from Github: {teams}"
)
log.debug(f"User info from Github: {user_data}\nTeam info from Github: {teams}")
return {"username": "github_" + user_data.get("login"), "role_keys": roles}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def get_errors_and_hooks(content: Any, max_length: int) -> Tuple[List[str], Dict
name = hook['name']
if len(name) > max_length:
errors.append(
f"Name is too long for hook {hook_id} in {PRE_COMMIT_YAML_FILE}. " f"Please shorten it!"
f"Name is too long for hook {hook_id} in {PRE_COMMIT_YAML_FILE}. Please shorten it!"
)
continue
hooks[hook_id].append(name)
Expand Down
6 changes: 3 additions & 3 deletions tests/cli/test_cli_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,9 +85,9 @@ def test_subcommand_arg_name_conflict(self):
for group, command in subcommand.items():
for com in command:
conflict_arg = [arg for arg, count in Counter(com.args).items() if count > 1]
assert [] == conflict_arg, (
f"Command group {group} function {com.name} have " f"conflict args name {conflict_arg}"
)
assert (
[] == conflict_arg
), f"Command group {group} function {com.name} have conflict args name {conflict_arg}"

def test_subcommand_arg_flag_conflict(self):
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@
dataset_id=DATASET_NAME,
table_id="test_materialized_view",
materialized_view={
"query": f"SELECT SUM(salary) AS sum_salary " f"FROM `{PROJECT_ID}.{DATASET_NAME}.test_table`",
"query": f"SELECT SUM(salary) AS sum_salary FROM `{PROJECT_ID}.{DATASET_NAME}.test_table`",
"enableRefresh": True,
"refreshIntervalMs": 2000000,
},
Expand Down