Skip to content

Commit

Permalink
feat: begin implement testing
Browse files Browse the repository at this point in the history
  • Loading branch information
Teqed committed Aug 3, 2023
1 parent 350ae72 commit bbd3a14
Show file tree
Hide file tree
Showing 21 changed files with 774 additions and 213 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,5 @@
artifacts/*
**\__pycache__
src/mastodon-py/
.idea/
fedifetcher.egg-info/
2 changes: 1 addition & 1 deletion .ruff.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# .ruff.toml
select = ['ALL']
ignore = ['TRY400','G004','BLE001', 'TRY002', 'ANN101', 'FIX002', 'TD003', 'TD002']
ignore = ['TRY400','G004','BLE001', 'TRY002', 'ANN101', 'FIX002', 'TD003', 'TD002', 'S101']
fixable = ["A", "B", "C", "D", "E", "F", "G", "I", "N", "Q", "S", "T", "W", "ANN", "ARG", "BLE", "COM", "DJ", "DTZ", "EM", "ERA", "EXE", "FBT", "ICN", "INP", "ISC", "NPY", "PD", "PGH", "PIE", "PL", "PT", "PTH", "PYI", "RET", "RSE", "RUF", "SIM", "SLF", "TCH", "TID", "TRY", "UP", "YTT"]
exclude = [
".bzr",
Expand Down
35 changes: 32 additions & 3 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,35 @@
{
"python.analysis.extraPaths": [
"./src/mastodon-py"
"python.analysis.include": [
"./fedifetcher",
],
"python.analysis.typeCheckingMode": "basic"
"python.analysis.typeCheckingMode": "basic",
"python.analysis.diagnosticMode": "workspace",
"python.analysis.inlayHints.variableTypes": true,
"python.analysis.inlayHints.functionReturnTypes": true,
"python.analysis.inlayHints.pytestParameters": true,
"editor.semanticTokenColorCustomizations": {
"enabled": true, // enable for all themes
"rules": {
"*.static": {
"foreground": "#ff0000",
"fontStyle": "bold",
},
"type": {
"foreground": "#00aa00",
},
"variable": {
// "foreground": "#0000aa"
"fontStyle": "italic",
},
}
},
"python.testing.unittestArgs": [
"-v",
"-s",
"./tests",
"-p",
"test_*.py",
],
"python.testing.pytestEnabled": false,
"python.testing.unittestEnabled": true,
}
78 changes: 78 additions & 0 deletions argparser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
"""argparser.py - Parses command line arguments."""
import argparse
import json

def parse_arguments():
argparser=argparse.ArgumentParser()

argparser.add_argument("-c","--config", required=False, type=str, help="Optionally \
provide a path to a JSON file containing configuration options. If not provided, \
options must be supplied using command line flags.")
argparser.add_argument("--server", required=False, help="Required: The name of \
your server (e.g. `mstdn.thms.uk`)")
argparser.add_argument("--access-token", action="append", required=False,
help="Required: The access token can be generated at \
https://<server>/settings/applications, and must have read:search, read:statuses \
and admin:read:accounts scopes. You can supply this multiple times, if you want to \
run it for multiple users.")
argparser.add_argument("--reply-interval-in-hours", required = False, type=int,
default=0, help="Fetch remote replies to posts that have received replies from \
users on your own instance in this period")
argparser.add_argument("--home-timeline-length", required = False, type=int,
default=0, help="Look for replies to posts in the API-Key owner's home \
timeline, up to this many posts")
argparser.add_argument("--max-followings", required = False, type=int, default=0,
help="Backfill posts for new accounts followed by --user. We'll backfill at \
most this many followings' posts")
argparser.add_argument("--max-followers", required = False, type=int, default=0,
help="Backfill posts for new accounts following --user. We'll backfill at most \
this many followers' posts")
argparser.add_argument("--max-follow-requests", required = False, type=int, \
default=0, help="Backfill posts of the API key owners pending follow requests. \
We'll backfill at most this many requester's posts")
argparser.add_argument("--max-bookmarks", required = False, type=int, default=0,
help="Fetch remote replies to the API key owners Bookmarks. We'll fetch \
replies to at most this many bookmarks")
argparser.add_argument("--max-favourites", required = False, type=int, default=0,
help="Fetch remote replies to the API key owners Favourites. We'll fetch \
replies to at most this many favourites")
argparser.add_argument("--from-notifications", required = False, type=int,
default=0, help="Backfill accounts of anyone appearing in your notifications, \
during the last hours")
argparser.add_argument("--remember-users-for-hours", required=False, type=int,
default=24*7, help="How long to remember users that you aren't following for, \
before trying to backfill them again.")
argparser.add_argument("--http-timeout", required = False, type=int, default=5,
help="The timeout for any HTTP requests to your own, or other instances.")
argparser.add_argument("--backfill-with-context", required = False, type=int,
default=1, help="If enabled, we'll fetch remote replies when backfilling \
profiles. Set to `0` to disable.")
argparser.add_argument("--backfill-mentioned-users", required = False, type=int,
default=1, help="If enabled, we'll backfill any mentioned users when fetching \
remote replies to timeline posts. Set to `0` to disable.")
argparser.add_argument("--lock-hours", required = False, type=int, default=24,
help="The lock timeout in hours.")
argparser.add_argument("--lock-file", required = False, default=None,
help="Location of the lock file")
argparser.add_argument("--state-dir", required = False, default="artifacts",
help="Directory to store persistent files and possibly lock file")
argparser.add_argument("--on-done", required = False, default=None, help="Provide \
a url that will be pinged when processing has completed. You can use this for \
'dead man switch' monitoring of your task")
argparser.add_argument("--on-start", required = False, default=None, help="Provide \
a url that will be pinged when processing is starting. You can use this for \
'dead man switch' monitoring of your task")
argparser.add_argument("--on-fail", required = False, default=None, help="Provide \
a url that will be pinged when processing has failed. You can use this for \
'dead man switch' monitoring of your task")
argparser.add_argument("--log-level", required = False, type=int, default=20,
help="Set the log level. 10=DEBUG, 20=INFO, 30=WARNING, 40=ERROR, 50=CRITICAL")
argparser.add_argument("--external-tokens", required = False, type=json.loads,
default=None, help="Provide a JSON-formatted dictionary of external tokens, \
keyed by server.")
argparser.add_argument("--pgpassword", required = False, type=str, default=None,
help="Provide the password for the postgres user")
argparser.add_argument("--external-feeds", required = False, type=str, default=None,
help="Provide a comma-separated list of external feeds to fetch from.")

return argparser.parse_args()
3 changes: 1 addition & 2 deletions fedifetcher/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from .api.lemmy import api_lemmy
from .api.mastodon import api_mastodon, api_mastodon_types
from .get import post_context
from .helpers import argparser, cache_manager, helpers, ordered_set
from .helpers import cache_manager, helpers, ordered_set
from .main import main
from .mode import token_posts

Expand All @@ -30,7 +30,6 @@
"api_lemmy",
"api_mastodon",
"api_mastodon_types",
"argparser",
"cache_manager",
"token_posts",
"find_trending_posts",
Expand Down
1 change: 1 addition & 0 deletions fedifetcher/api/firefish/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
structures
"""


from .api_firefish import Firefish
from .api_firefish_types import Note, UserDetailedNotMe

Expand Down
7 changes: 4 additions & 3 deletions fedifetcher/api/firefish/api_firefish.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Mastodon API functions."""
"""Firefish API functions."""
from argparse import Namespace
import asyncio
import logging
from typing import Any, ClassVar, Literal
Expand All @@ -10,7 +11,7 @@
from fedifetcher.api.mastodon import api_mastodon
from fedifetcher.api.mastodon.api_mastodon_types import Status
from fedifetcher.api.postgresql.postgresql import PostgreSQLUpdater
from fedifetcher.helpers.helpers import Response, arguments
from fedifetcher.helpers.helpers import Response


class FirefishClient:
Expand Down Expand Up @@ -174,7 +175,7 @@ def __init__(self, server: str, token: str | None = None,

Firefish.clients[server] = FirefishClient(
access_token=token if token else None,
api_base_url=server if server else arguments.server,
api_base_url=server,
client=client,
)
self.client = Firefish.clients[server]
Expand Down
12 changes: 2 additions & 10 deletions fedifetcher/api/mastodon/api_mastodon.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@

from fedifetcher.api.mastodon.api_mastodon_types import Status
from fedifetcher.api.postgresql import PostgreSQLUpdater
from fedifetcher.helpers import helpers
from fedifetcher.helpers.helpers import Response


Expand Down Expand Up @@ -167,7 +166,7 @@ def __init__(self,
})

Mastodon.clients[server] = MastodonClient(
api_base_url=server if server else helpers.arguments.server,
api_base_url=server,
client_session=client,
token=token,
pgupdater=pgupdater,
Expand All @@ -193,13 +192,6 @@ async def get_user_id(
-------
str | None: The user id if found, or None if the user is not found.
"""
if self.client.api_base_url == helpers.arguments.server or \
not self.client.api_base_url:
account = await self.account_lookup(
acct = f"{user}",
)
if not isinstance(account, bool):
return account["id"]
account_search = await self.account_lookup(
acct = f"{user}",
)
Expand Down Expand Up @@ -852,7 +844,7 @@ def get_status_by_id(
Args:
----
status_id (str): The ID of the toot to get the status of.
semaphoe (asyncio.Semaphore): The semaphore to use for the request.
semaphore (asyncio.Semaphore): The semaphore to use for the request.
Returns:
-------
Expand Down
4 changes: 3 additions & 1 deletion fedifetcher/find_context.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Add context toots to the server."""
from argparse import Namespace
import asyncio
import logging
from collections.abc import Iterable
Expand All @@ -20,6 +21,7 @@ async def add_post_with_context(
access_token : str,
external_tokens : dict[str, str],
pgupdater : PostgreSQLUpdater,
arguments : Namespace,
) -> bool:
"""Add the given post to the server.
Expand All @@ -40,7 +42,7 @@ async def add_post_with_context(
home_server, access_token, pgupdater).add_context_url(post["url"])
if added is not False:
if ("replies_count" in post or "in_reply_to_id" in post) and getattr(
helpers.arguments, "backfill_with_context", 0) > 0:
arguments, "backfill_with_context", 0) > 0:
parsed_urls : dict[str, tuple[str | None, str | None]] = {}
parsed = parsers.post(post["url"], parsed_urls)
if parsed is not None and parsed[0] is not None:
Expand Down
4 changes: 3 additions & 1 deletion fedifetcher/find_user_posts.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Find user posts to the server."""
from argparse import Namespace
import logging

from fedifetcher import get
Expand All @@ -16,6 +17,7 @@ async def add_user_posts( # noqa: PLR0913
all_known_users: OrderedSet,
external_tokens: dict[str, str],
pgupdater: PostgreSQLUpdater,
arguments: Namespace,
) -> None:
"""Add the given user's posts to the server.
Expand Down Expand Up @@ -54,7 +56,7 @@ async def add_user_posts( # noqa: PLR0913
if post.get("reblog") is None:
added = await add_post_with_context(
post, home_server, access_token,
external_tokens, pgupdater)
external_tokens, pgupdater, arguments)
if added is True:
status = Status(
id=post.get("id"),
Expand Down
77 changes: 0 additions & 77 deletions fedifetcher/helpers/argparser.py

This file was deleted.

12 changes: 3 additions & 9 deletions fedifetcher/helpers/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,7 @@
import requests
from dateutil import parser

from .argparser import arguments


def setup_logging() -> None:
def setup_logging(log_level: str) -> None:
"""Set logging."""
logger = logging.getLogger()
stdout = colorlog.StreamHandler(stream=sys.stdout)
Expand All @@ -21,7 +18,7 @@ def setup_logging() -> None:
%(log_color)s%(message)s%(reset)s")
stdout.setFormatter(fmt)
logger.addHandler(stdout)
logger.setLevel(arguments.log_level)
logger.setLevel(log_level)

class Response:
"""HTTP response codes."""
Expand All @@ -42,7 +39,7 @@ class Response:
def get(
url : str,
headers : dict | None = None,
timeout : int = 0,
timeout : int | None = None,
max_tries : int = 2,
) -> requests.Response:
"""Get a URL.
Expand All @@ -57,9 +54,6 @@ def get(
if "User-Agent" not in h:
h["User-Agent"] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/115.0.0.0 +https://github.com/Teqed Meowstodon/1.0.0" # noqa: E501

if timeout == 0:
timeout = arguments.http_timeout

try:
response = requests.get(url, headers=h, timeout=timeout)
except (requests.exceptions.ReadTimeout, requests.exceptions.ConnectTimeout):
Expand Down
Loading

0 comments on commit bbd3a14

Please sign in to comment.