From bbd3a1407e823aa3ae05aa1ab9a53cd316ffee0e Mon Sep 17 00:00:00 2001 From: Timothy Quilling Date: Thu, 3 Aug 2023 06:06:16 -0400 Subject: [PATCH] feat: begin implement testing --- .gitignore | 2 + .ruff.toml | 2 +- .vscode/settings.json | 35 +- argparser.py | 78 +++++ fedifetcher/__init__.py | 3 +- fedifetcher/api/firefish/__init__.py | 1 + fedifetcher/api/firefish/api_firefish.py | 7 +- fedifetcher/api/mastodon/api_mastodon.py | 12 +- fedifetcher/find_context.py | 4 +- fedifetcher/find_user_posts.py | 4 +- fedifetcher/helpers/argparser.py | 77 ----- fedifetcher/helpers/helpers.py | 12 +- fedifetcher/main.py | 86 ++--- fedifetcher/mode/active_users.py | 22 +- fedifetcher/mode/token_posts.py | 105 +++--- fedifetcher/mode/trending_posts.py | 10 +- find_posts.py | 4 +- setup.py | 8 + tests/__init__.py | 8 + tests/test_api_firefish.py | 419 +++++++++++++++++++++++ tests/test_parsers.py | 88 +++++ 21 files changed, 774 insertions(+), 213 deletions(-) create mode 100644 argparser.py delete mode 100644 fedifetcher/helpers/argparser.py create mode 100644 setup.py create mode 100644 tests/__init__.py create mode 100644 tests/test_api_firefish.py create mode 100644 tests/test_parsers.py diff --git a/.gitignore b/.gitignore index 8cde2a7a..1c49f640 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,5 @@ artifacts/* **\__pycache__ src/mastodon-py/ +.idea/ +fedifetcher.egg-info/ diff --git a/.ruff.toml b/.ruff.toml index ebe6a1a4..3975c7c5 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -1,6 +1,6 @@ # .ruff.toml select = ['ALL'] -ignore = ['TRY400','G004','BLE001', 'TRY002', 'ANN101', 'FIX002', 'TD003', 'TD002'] +ignore = ['TRY400','G004','BLE001', 'TRY002', 'ANN101', 'FIX002', 'TD003', 'TD002', 'S101'] fixable = ["A", "B", "C", "D", "E", "F", "G", "I", "N", "Q", "S", "T", "W", "ANN", "ARG", "BLE", "COM", "DJ", "DTZ", "EM", "ERA", "EXE", "FBT", "ICN", "INP", "ISC", "NPY", "PD", "PGH", "PIE", "PL", "PT", "PTH", "PYI", "RET", "RSE", "RUF", "SIM", "SLF", "TCH", "TID", "TRY", "UP", "YTT"] exclude = [ ".bzr", diff --git a/.vscode/settings.json b/.vscode/settings.json index a141493e..7fa55334 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,6 +1,35 @@ { - "python.analysis.extraPaths": [ - "./src/mastodon-py" + "python.analysis.include": [ + "./fedifetcher", ], - "python.analysis.typeCheckingMode": "basic" + "python.analysis.typeCheckingMode": "basic", + "python.analysis.diagnosticMode": "workspace", + "python.analysis.inlayHints.variableTypes": true, + "python.analysis.inlayHints.functionReturnTypes": true, + "python.analysis.inlayHints.pytestParameters": true, + "editor.semanticTokenColorCustomizations": { + "enabled": true, // enable for all themes + "rules": { + "*.static": { + "foreground": "#ff0000", + "fontStyle": "bold", + }, + "type": { + "foreground": "#00aa00", + }, + "variable": { + // "foreground": "#0000aa" + "fontStyle": "italic", + }, + } + }, + "python.testing.unittestArgs": [ + "-v", + "-s", + "./tests", + "-p", + "test_*.py", + ], + "python.testing.pytestEnabled": false, + "python.testing.unittestEnabled": true, } diff --git a/argparser.py b/argparser.py new file mode 100644 index 00000000..8407635d --- /dev/null +++ b/argparser.py @@ -0,0 +1,78 @@ +"""argparser.py - Parses command line arguments.""" +import argparse +import json + +def parse_arguments(): + argparser=argparse.ArgumentParser() + + argparser.add_argument("-c","--config", required=False, type=str, help="Optionally \ + provide a path to a JSON file containing configuration options. If not provided, \ + options must be supplied using command line flags.") + argparser.add_argument("--server", required=False, help="Required: The name of \ + your server (e.g. `mstdn.thms.uk`)") + argparser.add_argument("--access-token", action="append", required=False, + help="Required: The access token can be generated at \ + https:///settings/applications, and must have read:search, read:statuses \ + and admin:read:accounts scopes. You can supply this multiple times, if you want to \ + run it for multiple users.") + argparser.add_argument("--reply-interval-in-hours", required = False, type=int, + default=0, help="Fetch remote replies to posts that have received replies from \ + users on your own instance in this period") + argparser.add_argument("--home-timeline-length", required = False, type=int, + default=0, help="Look for replies to posts in the API-Key owner's home \ + timeline, up to this many posts") + argparser.add_argument("--max-followings", required = False, type=int, default=0, + help="Backfill posts for new accounts followed by --user. We'll backfill at \ + most this many followings' posts") + argparser.add_argument("--max-followers", required = False, type=int, default=0, + help="Backfill posts for new accounts following --user. We'll backfill at most \ + this many followers' posts") + argparser.add_argument("--max-follow-requests", required = False, type=int, \ + default=0, help="Backfill posts of the API key owners pending follow requests. \ + We'll backfill at most this many requester's posts") + argparser.add_argument("--max-bookmarks", required = False, type=int, default=0, + help="Fetch remote replies to the API key owners Bookmarks. We'll fetch \ + replies to at most this many bookmarks") + argparser.add_argument("--max-favourites", required = False, type=int, default=0, + help="Fetch remote replies to the API key owners Favourites. We'll fetch \ + replies to at most this many favourites") + argparser.add_argument("--from-notifications", required = False, type=int, + default=0, help="Backfill accounts of anyone appearing in your notifications, \ + during the last hours") + argparser.add_argument("--remember-users-for-hours", required=False, type=int, + default=24*7, help="How long to remember users that you aren't following for, \ + before trying to backfill them again.") + argparser.add_argument("--http-timeout", required = False, type=int, default=5, + help="The timeout for any HTTP requests to your own, or other instances.") + argparser.add_argument("--backfill-with-context", required = False, type=int, + default=1, help="If enabled, we'll fetch remote replies when backfilling \ + profiles. Set to `0` to disable.") + argparser.add_argument("--backfill-mentioned-users", required = False, type=int, + default=1, help="If enabled, we'll backfill any mentioned users when fetching \ + remote replies to timeline posts. Set to `0` to disable.") + argparser.add_argument("--lock-hours", required = False, type=int, default=24, + help="The lock timeout in hours.") + argparser.add_argument("--lock-file", required = False, default=None, + help="Location of the lock file") + argparser.add_argument("--state-dir", required = False, default="artifacts", + help="Directory to store persistent files and possibly lock file") + argparser.add_argument("--on-done", required = False, default=None, help="Provide \ + a url that will be pinged when processing has completed. You can use this for \ + 'dead man switch' monitoring of your task") + argparser.add_argument("--on-start", required = False, default=None, help="Provide \ + a url that will be pinged when processing is starting. You can use this for \ + 'dead man switch' monitoring of your task") + argparser.add_argument("--on-fail", required = False, default=None, help="Provide \ + a url that will be pinged when processing has failed. You can use this for \ + 'dead man switch' monitoring of your task") + argparser.add_argument("--log-level", required = False, type=int, default=20, + help="Set the log level. 10=DEBUG, 20=INFO, 30=WARNING, 40=ERROR, 50=CRITICAL") + argparser.add_argument("--external-tokens", required = False, type=json.loads, + default=None, help="Provide a JSON-formatted dictionary of external tokens, \ + keyed by server.") + argparser.add_argument("--pgpassword", required = False, type=str, default=None, + help="Provide the password for the postgres user") + argparser.add_argument("--external-feeds", required = False, type=str, default=None, + help="Provide a comma-separated list of external feeds to fetch from.") + + return argparser.parse_args() diff --git a/fedifetcher/__init__.py b/fedifetcher/__init__.py index 6a8b301d..2a60b1b5 100644 --- a/fedifetcher/__init__.py +++ b/fedifetcher/__init__.py @@ -18,7 +18,7 @@ from .api.lemmy import api_lemmy from .api.mastodon import api_mastodon, api_mastodon_types from .get import post_context -from .helpers import argparser, cache_manager, helpers, ordered_set +from .helpers import cache_manager, helpers, ordered_set from .main import main from .mode import token_posts @@ -30,7 +30,6 @@ "api_lemmy", "api_mastodon", "api_mastodon_types", - "argparser", "cache_manager", "token_posts", "find_trending_posts", diff --git a/fedifetcher/api/firefish/__init__.py b/fedifetcher/api/firefish/__init__.py index 2d59000d..aa996f1a 100644 --- a/fedifetcher/api/firefish/__init__.py +++ b/fedifetcher/api/firefish/__init__.py @@ -7,6 +7,7 @@ structures """ + from .api_firefish import Firefish from .api_firefish_types import Note, UserDetailedNotMe diff --git a/fedifetcher/api/firefish/api_firefish.py b/fedifetcher/api/firefish/api_firefish.py index 5e5975b6..6bf88a9b 100644 --- a/fedifetcher/api/firefish/api_firefish.py +++ b/fedifetcher/api/firefish/api_firefish.py @@ -1,4 +1,5 @@ -"""Mastodon API functions.""" +"""Firefish API functions.""" +from argparse import Namespace import asyncio import logging from typing import Any, ClassVar, Literal @@ -10,7 +11,7 @@ from fedifetcher.api.mastodon import api_mastodon from fedifetcher.api.mastodon.api_mastodon_types import Status from fedifetcher.api.postgresql.postgresql import PostgreSQLUpdater -from fedifetcher.helpers.helpers import Response, arguments +from fedifetcher.helpers.helpers import Response class FirefishClient: @@ -174,7 +175,7 @@ def __init__(self, server: str, token: str | None = None, Firefish.clients[server] = FirefishClient( access_token=token if token else None, - api_base_url=server if server else arguments.server, + api_base_url=server, client=client, ) self.client = Firefish.clients[server] diff --git a/fedifetcher/api/mastodon/api_mastodon.py b/fedifetcher/api/mastodon/api_mastodon.py index 1ba59b9b..cc47bb26 100644 --- a/fedifetcher/api/mastodon/api_mastodon.py +++ b/fedifetcher/api/mastodon/api_mastodon.py @@ -10,7 +10,6 @@ from fedifetcher.api.mastodon.api_mastodon_types import Status from fedifetcher.api.postgresql import PostgreSQLUpdater -from fedifetcher.helpers import helpers from fedifetcher.helpers.helpers import Response @@ -167,7 +166,7 @@ def __init__(self, }) Mastodon.clients[server] = MastodonClient( - api_base_url=server if server else helpers.arguments.server, + api_base_url=server, client_session=client, token=token, pgupdater=pgupdater, @@ -193,13 +192,6 @@ async def get_user_id( ------- str | None: The user id if found, or None if the user is not found. """ - if self.client.api_base_url == helpers.arguments.server or \ - not self.client.api_base_url: - account = await self.account_lookup( - acct = f"{user}", - ) - if not isinstance(account, bool): - return account["id"] account_search = await self.account_lookup( acct = f"{user}", ) @@ -852,7 +844,7 @@ def get_status_by_id( Args: ---- status_id (str): The ID of the toot to get the status of. - semaphoe (asyncio.Semaphore): The semaphore to use for the request. + semaphore (asyncio.Semaphore): The semaphore to use for the request. Returns: ------- diff --git a/fedifetcher/find_context.py b/fedifetcher/find_context.py index 9b9d9c87..70536ddf 100644 --- a/fedifetcher/find_context.py +++ b/fedifetcher/find_context.py @@ -1,4 +1,5 @@ """Add context toots to the server.""" +from argparse import Namespace import asyncio import logging from collections.abc import Iterable @@ -20,6 +21,7 @@ async def add_post_with_context( access_token : str, external_tokens : dict[str, str], pgupdater : PostgreSQLUpdater, + arguments : Namespace, ) -> bool: """Add the given post to the server. @@ -40,7 +42,7 @@ async def add_post_with_context( home_server, access_token, pgupdater).add_context_url(post["url"]) if added is not False: if ("replies_count" in post or "in_reply_to_id" in post) and getattr( - helpers.arguments, "backfill_with_context", 0) > 0: + arguments, "backfill_with_context", 0) > 0: parsed_urls : dict[str, tuple[str | None, str | None]] = {} parsed = parsers.post(post["url"], parsed_urls) if parsed is not None and parsed[0] is not None: diff --git a/fedifetcher/find_user_posts.py b/fedifetcher/find_user_posts.py index 528d0708..b99f6537 100644 --- a/fedifetcher/find_user_posts.py +++ b/fedifetcher/find_user_posts.py @@ -1,4 +1,5 @@ """Find user posts to the server.""" +from argparse import Namespace import logging from fedifetcher import get @@ -16,6 +17,7 @@ async def add_user_posts( # noqa: PLR0913 all_known_users: OrderedSet, external_tokens: dict[str, str], pgupdater: PostgreSQLUpdater, + arguments: Namespace, ) -> None: """Add the given user's posts to the server. @@ -54,7 +56,7 @@ async def add_user_posts( # noqa: PLR0913 if post.get("reblog") is None: added = await add_post_with_context( post, home_server, access_token, - external_tokens, pgupdater) + external_tokens, pgupdater, arguments) if added is True: status = Status( id=post.get("id"), diff --git a/fedifetcher/helpers/argparser.py b/fedifetcher/helpers/argparser.py deleted file mode 100644 index 579250bf..00000000 --- a/fedifetcher/helpers/argparser.py +++ /dev/null @@ -1,77 +0,0 @@ -"""argparser.py - Parses command line arguments.""" -import argparse -import json - -argparser=argparse.ArgumentParser() - -argparser.add_argument("-c","--config", required=False, type=str, help="Optionally \ - provide a path to a JSON file containing configuration options. If not provided, \ - options must be supplied using command line flags.") -argparser.add_argument("--server", required=False, help="Required: The name of \ - your server (e.g. `mstdn.thms.uk`)") -argparser.add_argument("--access-token", action="append", required=False, - help="Required: The access token can be generated at \ - https:///settings/applications, and must have read:search, read:statuses \ - and admin:read:accounts scopes. You can supply this multiple times, if you want to \ - run it for multiple users.") -argparser.add_argument("--reply-interval-in-hours", required = False, type=int, - default=0, help="Fetch remote replies to posts that have received replies from \ - users on your own instance in this period") -argparser.add_argument("--home-timeline-length", required = False, type=int, - default=0, help="Look for replies to posts in the API-Key owner's home \ - timeline, up to this many posts") -argparser.add_argument("--max-followings", required = False, type=int, default=0, - help="Backfill posts for new accounts followed by --user. We'll backfill at \ - most this many followings' posts") -argparser.add_argument("--max-followers", required = False, type=int, default=0, - help="Backfill posts for new accounts following --user. We'll backfill at most \ - this many followers' posts") -argparser.add_argument("--max-follow-requests", required = False, type=int, \ - default=0, help="Backfill posts of the API key owners pending follow requests. \ - We'll backfill at most this many requester's posts") -argparser.add_argument("--max-bookmarks", required = False, type=int, default=0, - help="Fetch remote replies to the API key owners Bookmarks. We'll fetch \ - replies to at most this many bookmarks") -argparser.add_argument("--max-favourites", required = False, type=int, default=0, - help="Fetch remote replies to the API key owners Favourites. We'll fetch \ - replies to at most this many favourites") -argparser.add_argument("--from-notifications", required = False, type=int, - default=0, help="Backfill accounts of anyone appearing in your notifications, \ - during the last hours") -argparser.add_argument("--remember-users-for-hours", required=False, type=int, - default=24*7, help="How long to remember users that you aren't following for, \ - before trying to backfill them again.") -argparser.add_argument("--http-timeout", required = False, type=int, default=5, - help="The timeout for any HTTP requests to your own, or other instances.") -argparser.add_argument("--backfill-with-context", required = False, type=int, - default=1, help="If enabled, we'll fetch remote replies when backfilling \ - profiles. Set to `0` to disable.") -argparser.add_argument("--backfill-mentioned-users", required = False, type=int, - default=1, help="If enabled, we'll backfill any mentioned users when fetching \ - remote replies to timeline posts. Set to `0` to disable.") -argparser.add_argument("--lock-hours", required = False, type=int, default=24, - help="The lock timeout in hours.") -argparser.add_argument("--lock-file", required = False, default=None, - help="Location of the lock file") -argparser.add_argument("--state-dir", required = False, default="artifacts", - help="Directory to store persistent files and possibly lock file") -argparser.add_argument("--on-done", required = False, default=None, help="Provide \ - a url that will be pinged when processing has completed. You can use this for \ - 'dead man switch' monitoring of your task") -argparser.add_argument("--on-start", required = False, default=None, help="Provide \ - a url that will be pinged when processing is starting. You can use this for \ - 'dead man switch' monitoring of your task") -argparser.add_argument("--on-fail", required = False, default=None, help="Provide \ - a url that will be pinged when processing has failed. You can use this for \ - 'dead man switch' monitoring of your task") -argparser.add_argument("--log-level", required = False, type=int, default=20, - help="Set the log level. 10=DEBUG, 20=INFO, 30=WARNING, 40=ERROR, 50=CRITICAL") -argparser.add_argument("--external-tokens", required = False, type=json.loads, - default=None, help="Provide a JSON-formatted dictionary of external tokens, \ - keyed by server.") -argparser.add_argument("--pgpassword", required = False, type=str, default=None, - help="Provide the password for the postgres user") -argparser.add_argument("--external-feeds", required = False, type=str, default=None, - help="Provide a comma-separated list of external feeds to fetch from.") - -arguments = argparser.parse_args() diff --git a/fedifetcher/helpers/helpers.py b/fedifetcher/helpers/helpers.py index 658ef591..2fa962bc 100644 --- a/fedifetcher/helpers/helpers.py +++ b/fedifetcher/helpers/helpers.py @@ -8,10 +8,7 @@ import requests from dateutil import parser -from .argparser import arguments - - -def setup_logging() -> None: +def setup_logging(log_level: str) -> None: """Set logging.""" logger = logging.getLogger() stdout = colorlog.StreamHandler(stream=sys.stdout) @@ -21,7 +18,7 @@ def setup_logging() -> None: %(log_color)s%(message)s%(reset)s") stdout.setFormatter(fmt) logger.addHandler(stdout) - logger.setLevel(arguments.log_level) + logger.setLevel(log_level) class Response: """HTTP response codes.""" @@ -42,7 +39,7 @@ class Response: def get( url : str, headers : dict | None = None, - timeout : int = 0, + timeout : int | None = None, max_tries : int = 2, ) -> requests.Response: """Get a URL. @@ -57,9 +54,6 @@ def get( if "User-Agent" not in h: h["User-Agent"] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/115.0.0.0 +https://github.com/Teqed Meowstodon/1.0.0" # noqa: E501 - if timeout == 0: - timeout = arguments.http_timeout - try: response = requests.get(url, headers=h, timeout=timeout) except (requests.exceptions.ReadTimeout, requests.exceptions.ConnectTimeout): diff --git a/fedifetcher/main.py b/fedifetcher/main.py index f4e8d56e..ff5e140a 100644 --- a/fedifetcher/main.py +++ b/fedifetcher/main.py @@ -1,5 +1,7 @@ """FediFetcher - a tool to fetch posts from the fediverse.""" +from argparse import Namespace +from ast import arguments import json import logging import re @@ -17,45 +19,45 @@ from fedifetcher.mode import active_users, token_posts, trending_posts -async def main() -> None: # noqa: PLR0912, C901, PLR0915 +async def main(arguments: Namespace) -> None: # noqa: PLR0912, C901, PLR0915 """Run FediFetcher.""" start = datetime.now(UTC) - if(helpers.arguments.config): - if Path(helpers.arguments.config).exists(): - with Path(helpers.arguments.config).open(encoding="utf-8") as file: + if(arguments.config): + if Path(arguments.config).exists(): + with Path(arguments.config).open(encoding="utf-8") as file: config = json.load(file) for key in config: - setattr(helpers.arguments, key.lower().replace("-","_"), config[key]) + setattr(arguments, key.lower().replace("-","_"), config[key]) else: - logging.critical(f"Config file {helpers.arguments.config} doesn't exist") + logging.critical(f"Config file {arguments.config} doesn't exist") sys.exit(1) - if(helpers.arguments.server is None or helpers.arguments.access_token is None): + if(arguments.server is None or arguments.access_token is None): logging.critical("You must supply at least a server name and an access token") sys.exit(1) # in case someone provided the server name as url instead, - helpers.arguments.server = re.sub( - "^(https://)?([^/]*)/?$", "\\2", helpers.arguments.server) + arguments.server = re.sub( + "^(https://)?([^/]*)/?$", "\\2", arguments.server) - helpers.setup_logging() + helpers.setup_logging(arguments.log_level) logging.info("Starting FediFetcher") run_id = uuid.uuid4() - if(helpers.arguments.on_start): + if(arguments.on_start): try: - helpers.get(f"{helpers.arguments.on_start}?rid={run_id}") + helpers.get(f"{arguments.on_start}?rid={run_id}") except Exception as ex: logging.error(f"Error getting callback url: {ex}") - if helpers.arguments.lock_file is None: - helpers.arguments.lock_file = Path(helpers.arguments.state_dir) / "lock.lock" - lock_file = helpers.arguments.lock_file + if arguments.lock_file is None: + arguments.lock_file = Path(arguments.state_dir) / "lock.lock" + lock_file = arguments.lock_file if( Path(lock_file).exists()): logging.info(f"Lock file exists at {lock_file}") @@ -65,24 +67,24 @@ async def main() -> None: # noqa: PLR0912, C901, PLR0915 lock_time = parser.parse(file.read()) if (datetime.now(UTC) - lock_time).total_seconds() >= \ - helpers.arguments.lock_hours * 60 * 60: + arguments.lock_hours * 60 * 60: Path.unlink(lock_file) logging.info("Lock file has expired. Removed lock file.") else: logging.info(f"Lock file age is {datetime.now(UTC) - lock_time} - \ -below --lock-hours={helpers.arguments.lock_hours} provided.") - if(helpers.arguments.on_fail): +below --lock-hours={arguments.lock_hours} provided.") + if(arguments.on_fail): try: - helpers.get(f"{helpers.arguments.on_fail}?rid={run_id}") + helpers.get(f"{arguments.on_fail}?rid={run_id}") except Exception as ex: logging.error(f"Error getting callback url: {ex}") sys.exit(1) except Exception: logging.warning("Cannot read logfile age - aborting.") - if(helpers.arguments.on_fail): + if(arguments.on_fail): try: - helpers.get(f"{helpers.arguments.on_fail}?rid={run_id}") + helpers.get(f"{arguments.on_fail}?rid={run_id}") except Exception as ex: logging.error(f"Error getting callback url: {ex}") sys.exit(1) @@ -95,7 +97,7 @@ async def main() -> None: # noqa: PLR0912, C901, PLR0915 recently_checked_users = OrderedSet({}) try: logging.debug("Loading seen files") - cache = cache_manager.SeenFilesManager(helpers.arguments.state_dir) + cache = cache_manager.SeenFilesManager(arguments.state_dir) replied_toot_server_ids, known_followings, recently_checked_users \ = cache.get_seen_data() @@ -105,7 +107,7 @@ async def main() -> None: # noqa: PLR0912, C901, PLR0915 last_check = recently_checked_users.get_time(user) user_age = datetime.now(last_check.tzinfo) - last_check if(user_age.total_seconds( - ) > helpers.arguments.remember_users_for_hours * 60 * 60): + ) > arguments.remember_users_for_hours * 60 * 60): logging.debug(f"Removing user {user} from recently checked users") recently_checked_users.remove(user) @@ -114,13 +116,13 @@ async def main() -> None: # noqa: PLR0912, C901, PLR0915 all_known_users = OrderedSet( list(known_followings) + list(recently_checked_users)) - if(isinstance(helpers.arguments.access_token, str)): - helpers.arguments.access_token = [helpers.arguments.access_token] + if(isinstance(arguments.access_token, str)): + arguments.access_token = [arguments.access_token] - admin_token = helpers.arguments.access_token[0] - external_tokens = helpers.arguments.external_tokens \ - if helpers.arguments.external_tokens else {} - logging.debug(f"Found {len(helpers.arguments.access_token)} access tokens") + admin_token = arguments.access_token[0] + external_tokens = arguments.external_tokens \ + if arguments.external_tokens else {} + logging.debug(f"Found {len(arguments.access_token)} access tokens") if external_tokens: logging.debug(f"Found {len(external_tokens)} external tokens") else: @@ -134,23 +136,23 @@ async def main() -> None: # noqa: PLR0912, C901, PLR0915 user="teq", \ # TODO: Make this configurable password= \ - helpers.arguments.pgpassword if helpers.arguments.pgpassword else None, + arguments.pgpassword if arguments.pgpassword else None, ) conn.set_session(autocommit=True) pgupdater = PostgreSQLUpdater(conn) try: logging.info("Getting active user IDs") await active_users(replied_toot_server_ids, parsed_urls, - admin_token, external_tokens, pgupdater) + admin_token, external_tokens, pgupdater, arguments) except Exception: logging.warning("Error getting active user IDs. This optional feature \ requires the admin:read:accounts scope to be enabled on the first access token \ provided. Continuing without active user IDs.") - for _token in helpers.arguments.access_token: - index = helpers.arguments.access_token.index(_token) + for _token in arguments.access_token: + index = arguments.access_token.index(_token) logging.info(f"Getting posts for token {index + 1} of \ -{len(helpers.arguments.access_token)}") +{len(arguments.access_token)}") await token_posts( _token, parsed_urls, @@ -160,12 +162,14 @@ async def main() -> None: # noqa: PLR0912, C901, PLR0915 known_followings, external_tokens, pgupdater, + arguments, ) - if external_tokens and helpers.arguments.external_feeds: + if external_tokens and arguments.external_feeds: # external_feeds is a comma-separated list of external feeds to fetch # from, e.g. "example1.com,example2.com" - await trending_posts(parsed_urls, admin_token, external_tokens, pgupdater) + await trending_posts( + parsed_urls, admin_token, external_tokens, pgupdater, arguments) logging.info("Writing seen files") cache.write_seen_files( @@ -176,9 +180,9 @@ async def main() -> None: # noqa: PLR0912, C901, PLR0915 Path.unlink(lock_file) - if(helpers.arguments.on_done): + if(arguments.on_done): try: - helpers.get(f"{helpers.arguments.on_done}?rid={run_id}") + helpers.get(f"{arguments.on_done}?rid={run_id}") except Exception as ex: logging.error(f"Error getting callback url: {ex}") @@ -190,7 +194,7 @@ async def main() -> None: # noqa: PLR0912, C901, PLR0915 logging.exception("Error running FediFetcher") try: # Try to clean up parachute_cache = cache_manager.SeenFilesManager( - helpers.arguments.state_dir) + arguments.state_dir) parachute_cache.write_seen_files( replied_toot_server_ids, known_followings, @@ -201,9 +205,9 @@ async def main() -> None: # noqa: PLR0912, C901, PLR0915 logging.error(f"Error writing seen files: {ex}") Path.unlink(lock_file) logging.warning(f"Job failed after {datetime.now(UTC) - start}.") - if(helpers.arguments.on_fail): + if(arguments.on_fail): try: - helpers.get(f"{helpers.arguments.on_fail}?rid={run_id}") + helpers.get(f"{arguments.on_fail}?rid={run_id}") except Exception as ex: logging.error(f"Error getting callback url: {ex}") sys.exit(1) diff --git a/fedifetcher/mode/active_users.py b/fedifetcher/mode/active_users.py index 1e521c16..2a9121ee 100644 --- a/fedifetcher/mode/active_users.py +++ b/fedifetcher/mode/active_users.py @@ -5,26 +5,28 @@ from fedifetcher.api.mastodon.api_mastodon import Mastodon -async def active_users(replied_toot_server_ids, parsed_urls, admin_token, external_tokens, pgupdater) -> None: +async def active_users( + replied_toot_server_ids, parsed_urls, admin_token, + external_tokens, pgupdater, arguments) -> None: """Get posts of users which have active IDs on the local server.""" - user_ids = list(await Mastodon(helpers.arguments.server, + user_ids = list(await Mastodon(arguments.server, admin_token, pgupdater).get_active_user_ids( - helpers.arguments.reply_interval_in_hours)) + arguments.reply_interval_in_hours)) logging.debug(f"Found user IDs: {user_ids}") """pull the context toots of toots user replied to, from their original server, and add them to the local server.""" logging.info("Pulling context toots for replies") logging.debug("Found user ID, getting reply toots") reply_toots = await getter_wrappers.get_all_reply_toots( - helpers.arguments.server, + arguments.server, user_ids, admin_token, pgupdater, - helpers.arguments.reply_interval_in_hours, + arguments.reply_interval_in_hours, ) logging.debug("Found reply toots, getting known context URLs") await getter_wrappers.get_all_known_context_urls( - helpers.arguments.server, + arguments.server, reply_toots, parsed_urls, external_tokens, @@ -33,23 +35,23 @@ async def active_users(replied_toot_server_ids, parsed_urls, admin_token, extern ) logging.debug("Found known context URLs, getting replied toot IDs") replied_toot_ids = getter_wrappers.get_all_replied_toot_server_ids( - helpers.arguments.server, + arguments.server, reply_toots, replied_toot_server_ids, parsed_urls, ) logging.debug("Found replied toot IDs, getting context URLs") context_urls = await getter_wrappers.get_all_context_urls( - helpers.arguments.server, + arguments.server, replied_toot_ids, external_tokens, pgupdater, - helpers.arguments.server, + arguments.server, admin_token, ) logging.debug("Found context URLs, adding context URLs") await find_context.add_context_urls_wrapper( - helpers.arguments.server, + arguments.server, admin_token, context_urls, pgupdater, diff --git a/fedifetcher/mode/token_posts.py b/fedifetcher/mode/token_posts.py index 09bcfa1d..48db63d2 100644 --- a/fedifetcher/mode/token_posts.py +++ b/fedifetcher/mode/token_posts.py @@ -1,5 +1,6 @@ """Pull posts from a Mastodon server, using a token.""" +from argparse import Namespace import logging from datetime import UTC, datetime, timedelta from typing import cast @@ -21,17 +22,18 @@ async def token_posts( # pylint: disable=too-many-arguments # pylint: disable=to known_followings : OrderedSet, external_tokens: dict[str, str], pgupdater: PostgreSQLUpdater, + arguments : Namespace, ) -> None: """Pull posts from a Mastodon server, using a token.""" logging.info("Finding posts for provided token") - if helpers.arguments.home_timeline_length > 0: + if arguments.home_timeline_length > 0: """Do the same with any toots on the key owner's home timeline """ logging.info("Pulling context toots for home timeline") - timeline_toots = await api_mastodon.Mastodon(helpers.arguments.server, - token, pgupdater).get_home_timeline(helpers.arguments.home_timeline_length) + timeline_toots = await api_mastodon.Mastodon(arguments.server, + token, pgupdater).get_home_timeline(arguments.home_timeline_length) logging.debug("Found home timeline toots, getting context URLs") known_context_urls = await getter_wrappers.get_all_known_context_urls( - helpers.arguments.server, + arguments.server, timeline_toots, parsed_urls, external_tokens, @@ -40,16 +42,16 @@ async def token_posts( # pylint: disable=too-many-arguments # pylint: disable=to ) logging.debug("Found known context URLs, getting context URLs") await find_context.add_context_urls_wrapper( - helpers.arguments.server, + arguments.server, token, known_context_urls, pgupdater, ) logging.debug("Added context URLs") # Backfill any post authors, and any mentioned users - if helpers.arguments.backfill_mentioned_users > 0: + if arguments.backfill_mentioned_users > 0: logging.info( -f"Backfilling posts from last {helpers.arguments.backfill_mentioned_users} \ +f"Backfilling posts from last {arguments.backfill_mentioned_users} \ mentioned users") mentioned_users = [] cut_off = datetime.now( @@ -89,7 +91,7 @@ async def token_posts( # pylint: disable=too-many-arguments # pylint: disable=to mentioned_users.append(user) logging.debug(f"Mentioned users: {len(mentioned_users)}") await add_user_posts( - helpers.arguments.server, + arguments.server, token, getter_wrappers.filter_known_users( mentioned_users, @@ -99,27 +101,28 @@ async def token_posts( # pylint: disable=too-many-arguments # pylint: disable=to all_known_users, external_tokens, pgupdater, + arguments, ) token_user_id = await api_mastodon.Mastodon( - helpers.arguments.server, token, pgupdater).get_me() + arguments.server, token, pgupdater).get_me() if not token_user_id: logging.debug("Could not get User ID, skipping replies/followings/followers") else: logging.debug(f"Got User ID: {token_user_id}") - if helpers.arguments.reply_interval_in_hours > 0: + if arguments.reply_interval_in_hours > 0: """pull the context toots of toots user replied to, from their original server, and add them to the local server.""" logging.info("Pulling context toots for replies") reply_toots = await getter_wrappers.get_all_reply_toots( - helpers.arguments.server, + arguments.server, [token_user_id], token, pgupdater, - helpers.arguments.reply_interval_in_hours, + arguments.reply_interval_in_hours, ) logging.debug("Found reply toots, getting context URLs") await getter_wrappers.get_all_known_context_urls( - helpers.arguments.server, + arguments.server, reply_toots, parsed_urls, external_tokens, @@ -128,118 +131,122 @@ async def token_posts( # pylint: disable=too-many-arguments # pylint: disable=to ) logging.debug("Found known context URLs, getting context URLs") replied_toot_ids = getter_wrappers.get_all_replied_toot_server_ids( - helpers.arguments.server, + arguments.server, reply_toots, replied_toot_server_ids, parsed_urls, ) logging.debug("Found replied toot IDs, getting context URLs") context_urls = await getter_wrappers.get_all_context_urls( - helpers.arguments.server, + arguments.server, replied_toot_ids, external_tokens, pgupdater, - helpers.arguments.server, + arguments.server, token, ) logging.debug("Found context URLs, getting context URLs") await find_context.add_context_urls_wrapper( - helpers.arguments.server, + arguments.server, token, context_urls, pgupdater, ) logging.debug("Added context URLs") - if helpers.arguments.max_followings > 0: + if arguments.max_followings > 0: logging.info( - f"Getting posts from last {helpers.arguments.max_followings} followings") + f"Getting posts from last {arguments.max_followings} followings") followings = await getter_wrappers.get_new_followings( - helpers.arguments.server, + arguments.server, token, token_user_id, - helpers.arguments.max_followings, + arguments.max_followings, all_known_users, ) logging.debug("Got followings, getting context URLs") await add_user_posts( - helpers.arguments.server, + arguments.server, token, followings, known_followings, all_known_users, external_tokens, pgupdater, + arguments, ) logging.debug("Added context URLs") - if helpers.arguments.max_followers > 0: + if arguments.max_followers > 0: logging.info( - f"Getting posts from last {helpers.arguments.max_followers} followers") + f"Getting posts from last {arguments.max_followers} followers") followers = await getter_wrappers.get_new_followers( - helpers.arguments.server, + arguments.server, token, token_user_id, - helpers.arguments.max_followers, + arguments.max_followers, all_known_users, ) logging.debug("Got followers, getting context URLs") await add_user_posts( - helpers.arguments.server, + arguments.server, token, followers, recently_checked_users, all_known_users, external_tokens, pgupdater, + arguments, ) logging.debug("Added context URLs") - if helpers.arguments.max_follow_requests > 0: + if arguments.max_follow_requests > 0: logging.info( - f"Getting posts from last {helpers.arguments.max_follow_requests} follow requests") + f"Getting posts from last {arguments.max_follow_requests} follow requests") follow_requests = await getter_wrappers.get_new_follow_requests( - helpers.arguments.server, + arguments.server, token, - helpers.arguments.max_follow_requests, + arguments.max_follow_requests, all_known_users, ) logging.debug("Got follow requests, getting context URLs") await add_user_posts( - helpers.arguments.server, + arguments.server, token, follow_requests, recently_checked_users, all_known_users, external_tokens, pgupdater, + arguments, ) logging.debug("Added context URLs") - if helpers.arguments.from_notifications > 0: + if arguments.from_notifications > 0: logging.info( - f"Getting notifications for last {helpers.arguments.from_notifications} hours") + f"Getting notifications for last {arguments.from_notifications} hours") notification_users = await getter_wrappers.get_notification_users( - helpers.arguments.server, + arguments.server, token, all_known_users, - helpers.arguments.from_notifications, + arguments.from_notifications, ) logging.debug("Got notification users, getting context URLs") await add_user_posts( - helpers.arguments.server, + arguments.server, token, notification_users, recently_checked_users, all_known_users, external_tokens, pgupdater, + arguments, ) logging.debug("Added context URLs") - if helpers.arguments.max_bookmarks > 0: + if arguments.max_bookmarks > 0: logging.info( - f"Pulling replies to the last {helpers.arguments.max_bookmarks} bookmarks") + f"Pulling replies to the last {arguments.max_bookmarks} bookmarks") bookmarks = await api_mastodon.Mastodon( - helpers.arguments.server, token, pgupdater).get_bookmarks( - helpers.arguments.max_bookmarks) + arguments.server, token, pgupdater).get_bookmarks( + arguments.max_bookmarks) logging.debug("Got bookmarks, getting context URLs") known_context_urls = await getter_wrappers.get_all_known_context_urls( - helpers.arguments.server, + arguments.server, list(bookmarks), parsed_urls, external_tokens, @@ -248,22 +255,22 @@ async def token_posts( # pylint: disable=too-many-arguments # pylint: disable=to ) logging.debug("Got known context URLs, getting context URLs") await find_context.add_context_urls_wrapper( - helpers.arguments.server, + arguments.server, token, known_context_urls, pgupdater, ) logging.debug("Added context URLs") - if helpers.arguments.max_favourites > 0: + if arguments.max_favourites > 0: logging.info( - f"Pulling replies to the last {helpers.arguments.max_favourites} favourites") + f"Pulling replies to the last {arguments.max_favourites} favourites") favourites = await api_mastodon.Mastodon( - helpers.arguments.server, token, pgupdater).get_favourites( - helpers.arguments.max_favourites, + arguments.server, token, pgupdater).get_favourites( + arguments.max_favourites, ) logging.debug("Got favourites, getting context URLs") known_context_urls = await getter_wrappers.get_all_known_context_urls( - helpers.arguments.server, + arguments.server, list(favourites), parsed_urls, external_tokens, @@ -272,7 +279,7 @@ async def token_posts( # pylint: disable=too-many-arguments # pylint: disable=to ) logging.debug("Got known context URLs, getting context URLs") await find_context.add_context_urls_wrapper( - helpers.arguments.server, + arguments.server, token, known_context_urls, pgupdater, diff --git a/fedifetcher/mode/trending_posts.py b/fedifetcher/mode/trending_posts.py index 71e666df..50c400b2 100644 --- a/fedifetcher/mode/trending_posts.py +++ b/fedifetcher/mode/trending_posts.py @@ -6,12 +6,12 @@ from fedifetcher.find_trending_posts import find_trending_posts -async def trending_posts(parsed_urls, admin_token, external_tokens, pgupdater) -> None: +async def trending_posts(parsed_urls, admin_token, external_tokens, pgupdater, arguments) -> None: """Get trending posts from supplied servers.""" - external_feeds = helpers.arguments.external_feeds.split(",") + external_feeds = arguments.external_feeds.split(",") logging.info("Getting trending posts") trending_posts = await find_trending_posts( - helpers.arguments.server, + arguments.server, admin_token, external_feeds, external_tokens, @@ -89,7 +89,7 @@ async def trending_posts(parsed_urls, admin_token, external_tokens, pgupdater) - f"Found {len(trending_posts_changed)} trending posts with new replies, getting known \ context URLs") known_context_urls = await getter_wrappers.get_all_known_context_urls( - helpers.arguments.server, + arguments.server, trending_posts_changed, parsed_urls, external_tokens, @@ -100,7 +100,7 @@ async def trending_posts(parsed_urls, admin_token, external_tokens, pgupdater) - logging.debug( f"Found {len(known_context_urls)} known context URLs, getting context URLs") await find_context.add_context_urls_wrapper( - helpers.arguments.server, + arguments.server, admin_token, known_context_urls, pgupdater, diff --git a/find_posts.py b/find_posts.py index 80476607..037c290c 100644 --- a/find_posts.py +++ b/find_posts.py @@ -4,5 +4,7 @@ import asyncio from fedifetcher import main +from .argparser import parse_arguments -asyncio.run(main()) +if __name__ == "__main__": + asyncio.run(main(parse_arguments())) diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..a99ce137 --- /dev/null +++ b/setup.py @@ -0,0 +1,8 @@ +"""Setup file for fedifetcher.""" +from setuptools import find_packages, setup + +setup( + name="fedifetcher", + version="0.1", + packages=find_packages(), +) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..00e51734 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,8 @@ +"""__init__.py for tests.""" + +from . import test_parsers, test_api_firefish + +__all__ = [ + "test_parsers", + "test_api_firefish", +] \ No newline at end of file diff --git a/tests/test_api_firefish.py b/tests/test_api_firefish.py new file mode 100644 index 00000000..21eaa0b8 --- /dev/null +++ b/tests/test_api_firefish.py @@ -0,0 +1,419 @@ +"""Test the Firefish class.""" +import asyncio +import re +import unittest +from unittest import IsolatedAsyncioTestCase +from unittest.mock import ANY, AsyncMock, MagicMock, patch + +from fedifetcher.api.firefish.api_firefish import Firefish, FirefishClient +from fedifetcher.api.firefish.api_firefish_types import Note, UserDetailedNotMe, UserLite + + +class FirefishClientTest(IsolatedAsyncioTestCase): + """Test the FirefishClient class.""" + + async def test_post(self) -> None: + """Test the post method.""" + client = FirefishClient( + access_token="token", + api_base_url="example.com", + client=MagicMock(), + ) + client.client.post = MagicMock() + client.client.post.return_value.__aenter__.return_value = MagicMock( + status=200, + json=AsyncMock(return_value={"key": "value"}), + ) + endpoint = "/endpoint" + json = {"key": "value"} + expected_result = {"key": "value"} + result = await client.post(endpoint, json) + assert result == expected_result + + # Test a failed post + client.client.post.return_value.__aenter__.return_value = MagicMock( + status=400, + json=AsyncMock(return_value={"error": "error"}), + ) + expected_result = False + result = await client.post(endpoint, json) + assert result == expected_result + + async def test_handle_response_errors(self) -> None: + """Test the handle_response_errors method.""" + # Test a 200 response with a body (success) + client = FirefishClient( + access_token="token", # noqa: S106 + api_base_url="example.com", + client=MagicMock(), + ) + response = MagicMock( + status=200, + json=AsyncMock(return_value={"key": "value"}), + ) + expected_result = {"key": "value"} + result = await client.handle_response_errors(response) + assert result == expected_result + + # Test a 200 response without a body (success, {}) + client = FirefishClient( + access_token="token", # noqa: S106 + api_base_url="example.com", + client=MagicMock(), + ) + response = MagicMock( + status=200, + json=AsyncMock(return_value={}), + ) + expected_result = True + result = await client.handle_response_errors(response) + assert result == expected_result + + # Test a 400 response (failure) + client = FirefishClient( + access_token="token", # noqa: S106 + api_base_url="example.com", + client=MagicMock(), + ) + response = MagicMock( + status=400, + json=AsyncMock(return_value={"error": "error"}), + ) + expected_result = False + result = await client.handle_response_errors(response) + assert result == expected_result + + # Test a 401 response (failure) + client = FirefishClient( + access_token="token", # noqa: S106 + api_base_url="example.com", + client=MagicMock(), + ) + response = MagicMock( + status=401, + json=AsyncMock(return_value={"error": "error"}), + ) + expected_result = False + result = await client.handle_response_errors(response) + assert result == expected_result + + # Test a 403 response (failure) + client = FirefishClient( + access_token="token", # noqa: S106 + api_base_url="example.com", + client=MagicMock(), + ) + response = MagicMock( + status=403, + json=AsyncMock(return_value={"error": "error"}), + ) + expected_result = False + result = await client.handle_response_errors(response) + assert result == expected_result + + # Test a 418 response (failure) + client = FirefishClient( + access_token="token", # noqa: S106 + api_base_url="example.com", + client=MagicMock(), + ) + response = MagicMock( + status=418, + json=AsyncMock(return_value={"error": "error"}), + ) + expected_result = False + result = await client.handle_response_errors(response) + assert result == expected_result + + # Test a 429 response (failure) + client = FirefishClient( + access_token="token", # noqa: S106 + api_base_url="example.com", + client=MagicMock(), + ) + response = MagicMock( + status=429, + json=AsyncMock(return_value={"error": "error"}), + ) + expected_result = False + result = await client.handle_response_errors(response) + assert result == expected_result + + # Test a 500 response (failure) + client = FirefishClient( + access_token="token", # noqa: S106 + api_base_url="example.com", + client=MagicMock(), + ) + response = MagicMock( + status=500, + json=AsyncMock(return_value={"error": "error"}), + ) + expected_result = False + result = await client.handle_response_errors(response) + assert result == expected_result + + # Test an unknown response (failure) + client = FirefishClient( + access_token="token", # noqa: S106 + api_base_url="example.com", + client=MagicMock(), + ) + response = MagicMock( + json=AsyncMock(return_value={"error": "error"}), + ) + expected_result = False + result = await client.handle_response_errors(response) + assert result == expected_result + + + async def test_ap_get(self) -> None: + """Test the ap_get method.""" + # Test a successful ap_get + client = FirefishClient( + access_token="token", # noqa: S106 + api_base_url="example.com", + client=MagicMock(), + ) + client.post = AsyncMock(return_value={"key": "value"}) + uri = "https://example.com/@username/123456" + expected_result = True + result = await client.ap_get(uri) + assert result == expected_result + + # Test a failed ap_get + client.post = AsyncMock(return_value=False) + expected_result = False + result = await client.ap_get(uri) + assert result == expected_result + + + async def test_ap_show(self) -> None: + """Test the ap_show method.""" + # Test a successful ap_show (Note) + + client = FirefishClient( + access_token="token", # noqa: S106 + api_base_url="example.com", + client=MagicMock(), + ) + userlite = UserLite( + id="123456", + username="username", + name="name", + avatarUrl="https://example.com/avatar.png", + avatarBlurhash="blurhash", + emojis=[], + isAdmin=False, + host="example.com", + avatarColor="#000000", + ) + note = Note( + id="123456", + text="text", + createdAt="2021-01-01T00:00:00.000Z", + cw="text", + userId="123456", + user=userlite, + replyId="123456", + renoteId="123456", + renoteCount=1, + repliesCount=1, + uri="https://example.com/@username/123456", + ) + client.post = AsyncMock( + return_value={ + "type": "Note", + "object": { + "id": "123456", + "text": "text", + "createdAt": "2021-01-01T00:00:00.000Z", + "cw": "text", + "userId": "123456", + "user": {"key": "value"}, + "replyId": "123456", + "renoteId": "123456", + "renoteCount": 1, + "repliesCount": 1, + "uri": "https://example.com/@username/123456", + }, + }, + ) + uri = "https://example.com/@username/123456" + expected_result: tuple[str, Note] | bool = ( + "Note", + note, + ) + result_1 = await client.ap_show(uri) + # assert result_1 == expected_result + # Instead of asserting the result, we'll assert the type of the result + assert isinstance(result_1, tuple) + first, second = result_1 + assert isinstance(result_1[0], str) + assert isinstance(result_1[1], Note) + assert first == "Note" + # assert second == note + + # Test a successful ap_show (User) + # client = FirefishClient( + # access_token="token", # noqa: S106 + # api_base_url="example.com", + # client=MagicMock(), + # ) + # client.post = AsyncMock( + # return_value={ + # "type": "User", + # "object": {"key": "value"}, + # }, + # ) + # uri = "https://example.com/@username/123456" + # expected_result = ("User", {"key": "value"}) + # result_2 = await client.ap_show(uri) + # assert result_2 == expected_result + + # # Test a failed ap_show + # client = FirefishClient( + # access_token="token", # noqa: S106 + # api_base_url="example.com", + # client=MagicMock(), + # ) + # client.post = AsyncMock(return_value=False) + # uri = "https://example.com/@username/123456" + # expected_result = False + # result_3 = await client.ap_show(uri) + # assert result_3 == expected_result + + async def test_notes_show(self) -> None: + """Test the notes_show method.""" + # Test a successful notes_show + client = FirefishClient( + access_token="token", # noqa: S106 + api_base_url="example.com", + client=MagicMock(), + ) + client.post = AsyncMock(return_value={ + "id": "123456", + "text": "text", + "createdAt": "2021-01-01T00:00:00.000Z", + "cw": "text", + "userId": "123456", + "user": {"key": "value"}, + "replyId": "123456", + "renoteId": "123456", + "renoteCount": 1, + "repliesCount": 1, + "uri": "https://example.com/@username/123456", + }) + note_id = "123456" + expected_result = { + "id": "123456", + "text": "text", + "createdAt": "2021-01-01T00:00:00.000Z", + "cw": "text", + "userId": "123456", + "user": {"key": "value"}, + "replyId": "123456", + "renoteId": "123456", + "renoteCount": 1, + "repliesCount": 1, + "uri": "https://example.com/@username/123456", + } + result_1 = await client.notes_show(note_id) + assert result_1 == expected_result + + # Test a failed notes_show + client = FirefishClient( + access_token="token", # noqa: S106 + api_base_url="example.com", + client=MagicMock(), + ) + client.post = AsyncMock(return_value=False) + note_id = "123456" + expected_result = False + result_2 = await client.notes_show(note_id) + assert result_2 == expected_result + + +class FirefishTest(IsolatedAsyncioTestCase): + """Test the Firefish class.""" + + async def test_init(self) -> None: + """Test the __init__ method.""" + # Test a successful __init__ + client = MagicMock() + expected_result = client + result = Firefish("example.com", "token", client) + assert result == expected_result + + async def test_add_context_url(self)-> None: + """Test the add_context_url method.""" + # Test a successful add_context_url + client = MagicMock() + client.ap_show = MagicMock(return_value=("Note", {"key": "value"})) + expected_result = {"key": "value"} + result = await Firefish("example.com", "token", client).add_context_url("url") + assert result == expected_result + + # Test a failed add_context_url + client = MagicMock() + client.ap_show = MagicMock(return_value=False) + expected_result = False + result = await Firefish("example.com", "token", client).add_context_url("url") + assert result == expected_result + + async def test_get_home_status_id_from_url(self) -> None: + """Test the get_home_status_id_from_url method.""" + # Test a successful get_home_status_id_from_url + client = MagicMock() + client.get_from_cache = MagicMock(return_value={"id": "123456"}) + client.add_context_url = MagicMock(return_value={"id": "123456"}) + expected_result = "123456" + result = await Firefish( + "example.com", "token", client).get_home_status_id_from_url("url") + assert result == expected_result + + # Test a failed get_home_status_id_from_url + client = MagicMock() + client.get_from_cache = MagicMock(return_value=None) + client.add_context_url = MagicMock(return_value=False) + expected_result = None + result = await Firefish( + "example.com", "token", client).get_home_status_id_from_url("url") + assert result == expected_result + + async def test_get_home_status_id_from_url_list(self) -> None: + """Test the get_home_status_id_from_url_list method.""" + # Test a successful get_home_status_id_from_url_list + client = MagicMock() + client.get_from_cache = MagicMock(return_value={"id": "123456"}) + client.add_context_url = MagicMock(return_value={"id": "123456"}) + urls = ["url1", "url2"] + expected_result = {"url1": "123456", "url2": "123456"} + result = await Firefish( + "example.com", "token", client).get_home_status_id_from_url_list(urls) + assert result == expected_result + + async def test_get_toot_context(self) -> None: + """Test the get_toot_context method.""" + # Test a successful get_toot_context + client = MagicMock() + client.get_home_status_id_from_url_list = MagicMock( + return_value={"url1": "123456", "url2": "123456"}) + expected_result = ["url1", "url2"] + result = await Firefish( + "example.com", "token", client).get_toot_context( + "server", "toot_id", "token") + assert result == expected_result + + # Test a failed get_toot_context + client = MagicMock() + client.get_home_status_id_from_url_list = MagicMock(return_value={}) + expected_result = [] + result = await Firefish( + "example.com", "token", client).get_toot_context( + "server", "toot_id", "token") + assert result == expected_result + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_parsers.py b/tests/test_parsers.py new file mode 100644 index 00000000..01572b51 --- /dev/null +++ b/tests/test_parsers.py @@ -0,0 +1,88 @@ +"""Test the parsers.""" +import unittest + +from fedifetcher.parsers import post, user + + +class ParsersTest(unittest.TestCase): + """Test the parsers.""" + + def test_user(self) -> None: + """Test the user function.""" + # Test a Mastodon user URL + url = "https://mastodon.social/@username" + expected_result = ("mastodon.social", "username") + result = user(url) + assert result == expected_result + + # Test a Pleroma user URL + url = "https://pleroma.site/users/username" + expected_result = ("pleroma.site", "username") + result = user(url) + assert result == expected_result + + # Test a Lemmy user URL + url = "https://lemmy.ml/u/username" + expected_result = ("lemmy.ml", "username") + result = user(url) + assert result == expected_result + + # Test a Pixelfed user URL + url = "https://pixelfed.net/username" + expected_result = ("pixelfed.net", "username") + result = user(url) + assert result == expected_result + + # Test an invalid user URL + url = "https://example.com" + expected_result = None + result = user(url) + assert result == expected_result + + def test_post(self) -> None: + """Test the post function.""" + # Test a Mastodon post URL + url = "https://mastodon.social/@username/123456" + expected_result = ("mastodon.social", "123456") + result = post(url) + assert result == expected_result + + # Test a Mastodon URI post URL + url = "https://mastodon.social/users/username/statuses/123456" + expected_result = ("mastodon.social", "123456") + result = post(url) + assert result == expected_result + + # Test a Firefish post URL + url = "https://example.com/notes/123456" + expected_result = ("example.com", "123456") + result = post(url) + assert result == expected_result + + # Test a Pixelfed post URL + url = "https://pixelfed.net/p/username/123456" + expected_result = ("pixelfed.net", "123456") + result = post(url) + assert result == expected_result + + # Test a Pleroma post URL + url = "https://pleroma.site/objects/123456" + expected_result = ("pleroma.site", "123456") + result = post(url) + assert result == expected_result + + # Test a Lemmy post URL + url = "https://lemmy.ml/comment/123456" + expected_result = ("lemmy.ml", "123456") + result = post(url) + assert result == expected_result + + # Test an invalid post URL + url = "https://example.com" + expected_result = (None, None) + result = post(url) + assert result == expected_result + + +if __name__ == "__main__": + unittest.main()