Skip to content

Commit

Permalink
chore: cleanup strings with implicit concatenation
Browse files Browse the repository at this point in the history
  • Loading branch information
Teqed committed Aug 9, 2023
1 parent 3628054 commit 6fd2fc9
Show file tree
Hide file tree
Showing 11 changed files with 103 additions and 89 deletions.
18 changes: 7 additions & 11 deletions argparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,7 @@ def parse_arguments() -> argparse.Namespace:
argparser.add_argument(
"--server",
required=False,
help="Required: The name of \
your server (e.g. `mstdn.thms.uk`)",
help="Required: The name of your server (e.g. `mstdn.thms.uk`)",
)
argparser.add_argument(
"--access-token",
Expand Down Expand Up @@ -148,25 +147,22 @@ def parse_arguments() -> argparse.Namespace:
"--on-done",
required=False,
default=None,
help="Provide \
a url that will be pinged when processing has completed. You can use this for "
"'dead man switch' monitoring of your task",
help="Provide a url that will be pinged when processing has completed. You can "
"use this for 'dead man switch' monitoring of your task",
)
argparser.add_argument(
"--on-start",
required=False,
default=None,
help="Provide \
a url that will be pinged when processing is starting. You can use this for "
"'dead man switch' monitoring of your task",
help="Provide a url that will be pinged when processing is starting. You can "
"use this for 'dead man switch' monitoring of your task",
)
argparser.add_argument(
"--on-fail",
required=False,
default=None,
help="Provide \
a url that will be pinged when processing has failed. You can use this for "
"'dead man switch' monitoring of your task",
help="Provide a url that will be pinged when processing has failed. You can "
"use this for 'dead man switch' monitoring of your task",
)
argparser.add_argument(
"--log-level",
Expand Down
4 changes: 2 additions & 2 deletions fedifetcher/api/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,8 +150,8 @@ def handle_response_lists(
if isinstance(body, list):
body = {"list": body}
if not isinstance(body, dict):
msg = f"Error with API on server {self.api_base_url}. \
The server returned an unexpected response: {body}"
msg = (f"Error with API on server {self.api_base_url}. "
f"The server returned an unexpected response: {body}")
raise TypeError(
msg,
)
Expand Down
90 changes: 54 additions & 36 deletions fedifetcher/api/lemmy/api_lemmy.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@


def get_user_posts_from_url(
parsed_url : tuple[str, str],
parsed_url: tuple[str, str],
) -> list[dict[str, str]] | None:
"""Get a list of posts from a user."""
logging.info(f"Getting posts from {parsed_url[0]} for user {parsed_url[1]}")
Expand All @@ -26,21 +26,23 @@ def get_user_posts_from_url(
return all_posts
except requests.HTTPError:
logging.error(
f"Error getting user posts for user {parsed_url[1]}: {response.text}")
f"Error getting user posts for user {parsed_url[1]}: {response.text}",
)
except requests.JSONDecodeError:
logging.error(
f"Error decoding JSON for user {parsed_url[1]}. \
{parsed_url[0]} may not be a Lemmy instance.")
f"Error decoding JSON for user {parsed_url[1]}. "
f"{parsed_url[0]} may not be a Lemmy instance.",
)
except Exception:
logging.exception(
f"Error getting user posts from {url}")
logging.exception(f"Error getting user posts from {url}")
return None


def get_community_posts_from_url(
parsed_url : tuple[str, str]) -> list[dict[str, str]] | None:
parsed_url: tuple[str, str],
) -> list[dict[str, str]] | None:
"""Get a list of posts from a community."""
logging.info(
f"Getting posts from {parsed_url[0]} for community {parsed_url[1]}")
logging.info(f"Getting posts from {parsed_url[0]} for community {parsed_url[1]}")
try:
url = f"https://{parsed_url[0]}/api/v3/post/list?community_name={parsed_url[1]}&sort=New&limit=50"
response = helpers.get(url)
Expand All @@ -50,16 +52,16 @@ def get_community_posts_from_url(
post["url"] = post["ap_id"]
return posts
except requests.exceptions.Timeout:
logging.error(
f"Timeout getting posts for community {parsed_url[1]}")
logging.error(f"Timeout getting posts for community {parsed_url[1]}")
except requests.exceptions.RequestException:
logging.exception(
f"Error getting posts for community {parsed_url[1]}")
logging.exception(f"Error getting posts for community {parsed_url[1]}")
else:
logging.error(
f"Error getting posts for community {parsed_url[1]}: {response.text}")
f"Error getting posts for community {parsed_url[1]}: {response.text}",
)
return None


def get_comment_context(
server: str,
toot_id: str,
Expand All @@ -83,7 +85,8 @@ def get_comment_context(
resp = helpers.get(comment)
except Exception as ex:
logging.error(
f"Error getting comment {toot_id} from {toot_url}. Exception: {ex}")
f"Error getting comment {toot_id} from {toot_url}. Exception: {ex}",
)
return []
if resp.status_code == helpers.Response.OK:
try:
Expand All @@ -92,24 +95,29 @@ def get_comment_context(
return get_comments_urls(server, post_id, toot_url)
except Exception as ex:
logging.error(
f"Error parsing context for comment {toot_url}. Exception: {ex}")
f"Error parsing context for comment {toot_url}. Exception: {ex}",
)
return []
if resp.status_code == helpers.Response.TOO_MANY_REQUESTS:
reset = datetime.strptime(resp.headers["x-ratelimit-reset"],
"%Y-%m-%dT%H:%M:%S.%fZ").astimezone(UTC)
reset = datetime.strptime(
resp.headers["x-ratelimit-reset"],
"%Y-%m-%dT%H:%M:%S.%fZ",
).astimezone(UTC)
logging.warning(
f"Rate Limit hit when getting context for {toot_url}. \
Waiting to retry at {resp.headers['x-ratelimit-reset']}")
f"Rate Limit hit when getting context for {toot_url}. Waiting to retry at "
f"{resp.headers['x-ratelimit-reset']}",
)
time.sleep((reset - datetime.now(UTC)).total_seconds() + 1)
return get_comment_context(server, toot_id, toot_url)

return []


def get_comments_urls( # noqa: PLR0912
server : str,
post_id : str,
toot_url : str,
) -> list[str]:
server: str,
post_id: str,
toot_url: str,
) -> list[str]:
"""Get the URLs of the comments of the given post.
Args:
Expand Down Expand Up @@ -138,37 +146,47 @@ def get_comments_urls( # noqa: PLR0912
return []
urls.append(res["post_view"]["post"]["ap_id"])
except Exception as ex:
logging.error(f"Error parsing post {post_id} from {toot_url}. \
Exception: {ex}")
logging.error(
f"Error parsing post {post_id} from {toot_url}. Exception: {ex}",
)

url = f"https://{server}/api/v3/comment/list?post_id={post_id}&sort=New&limit=50"
try:
resp = helpers.get(url)
except Exception as ex:
logging.error(f"Error getting comments for post {post_id} from {toot_url}. \
Exception: {ex}")
logging.error(
f"Error getting comments for post {post_id} from {toot_url}. Exception: "
f"{ex}",
)
return []

if resp.status_code == helpers.Response.OK:
try:
res = resp.json()
list_of_urls = \
[comment_info["comment"]["ap_id"] for comment_info in res["comments"]]
list_of_urls = [
comment_info["comment"]["ap_id"] for comment_info in res["comments"]
]
logging.info(f"Got {len(list_of_urls)} comments for post {toot_url}")
urls.extend(list_of_urls)
except Exception as ex:
logging.error(
f"Error parsing comments for post {toot_url}. Exception: {ex}")
f"Error parsing comments for post {toot_url}. Exception: {ex}",
)
else:
return urls
elif resp.status_code == helpers.Response.TOO_MANY_REQUESTS:
reset = datetime.strptime(resp.headers["x-ratelimit-reset"],
"%Y-%m-%dT%H:%M:%S.%fZ").astimezone(UTC)
logging.info(f"Rate Limit hit when getting comments for {toot_url}. Waiting to \
retry at {resp.headers['x-ratelimit-reset']}")
reset = datetime.strptime(
resp.headers["x-ratelimit-reset"],
"%Y-%m-%dT%H:%M:%S.%fZ",
).astimezone(UTC)
logging.info(
f"Rate Limit hit when getting comments for {toot_url}. Waiting to retry at "
f"{resp.headers['x-ratelimit-reset']}",
)
time.sleep((reset - datetime.now(UTC)).total_seconds() + 1)
return get_comments_urls(server, post_id, toot_url)

logging.error(
f"Error getting comments for post {toot_url}. Status code: {resp.status_code}")
f"Error getting comments for post {toot_url}. Status code: {resp.status_code}",
)
return []
4 changes: 2 additions & 2 deletions fedifetcher/api/mastodon/api_mastodon.py
Original file line number Diff line number Diff line change
Expand Up @@ -630,8 +630,8 @@ async def _get_trending_posts(
)
return []
logging.info(
f"Got {len(got_trending_posts)} trending posts for \
{self.client.api_base_url}",
f"Got {len(got_trending_posts)} trending posts for "
f"{self.client.api_base_url}",
)
trending_posts: list[dict[str, str]] = []
trending_posts.extend(got_trending_posts)
Expand Down
4 changes: 2 additions & 2 deletions fedifetcher/api/postgresql/postgresql.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,8 +284,8 @@ def get_from_cache(self, url: str) -> Status | None:
columns = [column[0] for column in cursor.description]
result = dict(zip(columns, result, strict=False))
logging.info(
f"Got status from cache: {url} \
Original: {result.get('original')}, ID: {result.get('status_id')}",
f"Got status from cache: {url} Original: "
f"{result.get('original')}, ID: {result.get('status_id')}",
)
status = Status(
id=result.get("status_id"),
Expand Down
4 changes: 2 additions & 2 deletions fedifetcher/find_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,6 @@ async def add_context_urls_wrapper(
logging.warning(f"Failed {result}")

logging.info(
f"\033[1mAdded {count} new statuses (with {failed} failures, \
{already_added} already seen)\033[0m",
f"\033[1mAdded {count} new statuses (with {failed} failures, {already_added} "
f"already seen)\033[0m",
)
29 changes: 15 additions & 14 deletions fedifetcher/find_trending_posts.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,28 +57,28 @@ def add_post_to_dict(
)
if original:
logging.info(
f"Reblogs: {trending_post['reblogs_count']} \
Favourites: {trending_post['favourites_count']} \
From: {t_post_url}",
f"Reblogs: {trending_post['reblogs_count']} "
f"Favourites: {trending_post['favourites_count']} "
f"From: {t_post_url}",
)
trending_posts_dict[t_post_url] = trending_post
trending_posts_dict[t_post_url]["original"] = "Yes"
return True
if t_post_url in trending_posts_dict:
if "original" not in trending_posts_dict[t_post_url]:
logging.debug(
f"\033[3;33mReblogs: {trending_post['reblogs_count']} \
Favourites: {trending_post['favourites_count']} \
Copy: {t_post_url}\033[0m",
f"\033[3;33mReblogs: {trending_post['reblogs_count']} "
f"Favourites: {trending_post['favourites_count']} "
f"Copy: {t_post_url}\033[0m",
)
increment_count(t_post_url, trending_post, trending_posts_dict)
return False
logging.debug(f"Already seen {t_post_url} from origin")
return True # We already have the original
logging.debug(
f"\033[3;33mReblogs: {trending_post['reblogs_count']} \
Favourites: {trending_post['favourites_count']} \
Copy: {t_post_url}\033[0m",
f"\033[3;33mReblogs: {trending_post['reblogs_count']} "
f"Favourites: {trending_post['favourites_count']} "
f"Copy: {t_post_url}\033[0m",
)
trending_posts_dict[t_post_url] = trending_post
return False
Expand Down Expand Up @@ -217,8 +217,8 @@ async def find_trending_posts( # noqa: C901
remember_to_find_me.pop(fetch_domain)

for fetch_domain in remember_to_find_me:
msg = f"Fetching {len(remember_to_find_me[fetch_domain])} \
less popular posts from {fetch_domain}"
msg = (f"Fetching {len(remember_to_find_me[fetch_domain])} "
f"less popular posts from {fetch_domain}")
logging.info(f"\033[1;34m{msg}\033[0m")
max_concurrent_requests = 10
semaphore = asyncio.Semaphore(max_concurrent_requests)
Expand Down Expand Up @@ -383,8 +383,8 @@ def queue_aux_fetch(
logging.debug(f"Adding {post_url} to aux_fetches[{parsed_url[0]}]")
self.aux_fetches[parsed_url[0]].append((parsed_url, post_url))
logging.debug(
f"aux_fetches[{parsed_url[0]}] is now \
{self.aux_fetches[parsed_url[0]]}",
f"aux_fetches[{parsed_url[0]}] is now "
f"{self.aux_fetches[parsed_url[0]]}",
)

async def do_aux_fetches(
Expand All @@ -404,7 +404,8 @@ async def fetching_domain(
if semaphore is None:
semaphore = asyncio.Semaphore(1)
async with semaphore:
msg = f"Fetching {len(self.aux_fetches[fetch_domain])} popular posts from {fetch_domain}"
msg = (f"Fetching {len(self.aux_fetches[fetch_domain])} popular posts "
f"from {fetch_domain}")
logging.info(f"\033[1;34m{msg}\033[0m")
list_of_posts = []
list_of_parsed_urls = []
Expand Down
8 changes: 4 additions & 4 deletions fedifetcher/getter_wrappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@ async def get_notification_users(
new_notification_users = filter_known_users(notification_users, known_users)

logging.info(
f"Found {len(notification_users)} users in notifications, \
{len(new_notification_users)} of which are new",
f"Found {len(notification_users)} users in notifications, "
f"{len(new_notification_users)} of which are new",
)

# return [user.get("account") for user in filter_known_users(
Expand Down Expand Up @@ -109,8 +109,8 @@ async def get_new_follow_requests(
new_follow_requests = filter_known_users(follow_requests, known_followings)

logging.info(
f"Got {len(follow_requests)} follow_requests, \
{len(new_follow_requests)} of which are new",
f"Got {len(follow_requests)} follow_requests, "
f"{len(new_follow_requests)} of which are new",
)

return new_follow_requests
Expand Down
10 changes: 5 additions & 5 deletions fedifetcher/helpers/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@ def setup_logging(log_level: str) -> None:
logger = logging.getLogger()
stdout = colorlog.StreamHandler(stream=sys.stdout)
fmt = colorlog.ColoredFormatter(
"%(white)s%(asctime)s%(reset)s | %(log_color)s%(levelname)s%(reset)s | \
%(threadName)s:%(name)s | %(blue)s%(filename)s:%(lineno)s%(reset)s | %(funcName)s >>> \
%(log_color)s%(message)s%(reset)s",
"%(white)s%(asctime)s%(reset)s | %(log_color)s%(levelname)s%(reset)s | "
"%(threadName)s:%(name)s | %(blue)s%(filename)s:%(lineno)s%(reset)s | "
"%(funcName)s >>> %(log_color)s%(message)s%(reset)s",
)
stdout.setFormatter(fmt)
logger.addHandler(stdout)
Expand Down Expand Up @@ -73,8 +73,8 @@ def get(
now = datetime.now(datetime.now(UTC).astimezone().tzinfo)
wait = (reset - now).total_seconds() + 1
logging.warning(
f"Rate Limit hit requesting {url} \
Waiting {wait} sec to retry at {response.headers['x-ratelimit-reset']}",
f"Rate Limit hit requesting {url} Waiting {wait} sec to retry at "
f"{response.headers['x-ratelimit-reset']}",
)
time.sleep(wait)
return get(url, headers, timeout, max_tries - 1)
Expand Down
Loading

0 comments on commit 6fd2fc9

Please sign in to comment.