diff --git a/.flake8 b/.flake8 index 3e220828..b9f32f37 100644 --- a/.flake8 +++ b/.flake8 @@ -2,16 +2,34 @@ color=always max-line-length=120 ignore= - W503, ; Linebreak before binary operator - Y015, ; Allow default value other than "..." - E402, ; Allow imports at the bottom of file + ; Linebreak before binary operator + W503, + ; Closing bracket may not match multi-line method invocation style (enforced by add-trailing-comma) + E124, + ; Allow imports at the bottom of file + E402, + ; contextlib.suppress is roughly 3x slower than try/except + SIM105, + ; False positives for attribute docstrings + CCE001, + ; Let's use datetime.utcnow() + DTZ003, + ; datetime.strptime + DTZ007 per-file-ignores= ; Quotes + ; Allow ... on same line as class ; Allow ... on same line as def ; Line too long - ; Naming stuff, we don't have control over external libraries' naming conventions - typings/**: Q000,E704,E501,N8 + ; Naming conventions can't be controlled for external libraries + ; Variable names can't be controlled for external libraries + ; Argument names can't be controlled for external libraries + ; Attribute names can't be controlled for external libraries + ; False positive Class level expression with elipsis + ; Type re-exports + ; mypy 3.7 Union issue + *.pyi: Q000,E701,E704,E501,N8,A001,A002,A003,CCE002,F401,Y037 ; McCabe max-complexity is also taken care of by Pylint and doesn't fail the build there ; So this is the hard limit max-complexity=32 -inline-quotes=" +inline-quotes=double diff --git a/.github/workflows/lint-python.yml b/.github/workflows/lint-python.yml index ab727516..0dacd4c5 100644 --- a/.github/workflows/lint-python.yml +++ b/.github/workflows/lint-python.yml @@ -14,95 +14,111 @@ on: paths: - "**.py" - "**.pyi" + +env: + python-version: "3.10" + jobs: - add-trailing-comma: - runs-on: windows-latest + isort: + runs-on: ubuntu-latest steps: - name: Checkout ${{ github.repository }}/${{ github.ref }} uses: actions/checkout@v3 - - name: Set up Python 3.9 + - name: Set up Python ${{ env.python-version }} uses: actions/setup-python@v4 with: - python-version: "3.9" + python-version: ${{ env.python-version }} cache: "pip" + cache-dependency-path: "scripts/requirements*.txt" - name: Install dependencies run: | - python -m pip install --upgrade pip pip install wheel pip install -r "scripts/requirements.txt" - - name: Analysing the code with add-trailing-comma + - name: Analysing the code with ${{ job.name }} + run: isort backend/ typings/ --check-only + add-trailing-comma: + runs-on: ubuntu-latest + steps: + - name: Checkout ${{ github.repository }}/${{ github.ref }} + uses: actions/checkout@v3 + - name: Set up Python ${{ env.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ env.python-version }} + cache: "pip" + cache-dependency-path: "scripts/requirements*.txt" + - run: pip install add-trailing-comma + - name: Analysing the code with ${{ job.name }} run: add-trailing-comma $(git ls-files '**.py*') --py36-plus - Pyright: + Bandit: runs-on: ubuntu-latest steps: - name: Checkout ${{ github.repository }}/${{ github.ref }} uses: actions/checkout@v3 - - name: Set up Node - uses: actions/setup-node@v3 - - name: Set up Python 3.9 + - name: Set up Python ${{ env.python-version }} uses: actions/setup-python@v4 with: - python-version: "3.9" + python-version: ${{ env.python-version }} cache: "pip" - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install wheel - pip install -r "scripts/requirements.txt" + cache-dependency-path: "scripts/requirements*.txt" + - run: pip install bandit - run: mv backend/configs.template.py backend/configs.py - name: Analysing the code with ${{ job.name }} - run: pyright --warnings backend - Pylint: + run: bandit -n 1 --severity-level medium --recursive backend + Pyright: runs-on: ubuntu-latest steps: - name: Checkout ${{ github.repository }}/${{ github.ref }} uses: actions/checkout@v3 - - name: Set up Python 3.9 + - name: Set up Python ${{ env.python-version }} uses: actions/setup-python@v4 with: - python-version: "3.9" + python-version: ${{ env.python-version }} cache: "pip" + cache-dependency-path: "scripts/requirements*.txt" - name: Install dependencies run: | - python -m pip install --upgrade pip pip install wheel pip install -r "scripts/requirements.txt" - run: mv backend/configs.template.py backend/configs.py - name: Analysing the code with ${{ job.name }} - run: pylint --reports=y --output-format=colorized $(git ls-files 'backend/*.py') - Flake8: + uses: jakebailey/pyright-action@v1 + with: + working-directory: backend/ + extra-args: --warnings + Pylint: runs-on: ubuntu-latest steps: - name: Checkout ${{ github.repository }}/${{ github.ref }} uses: actions/checkout@v3 - - name: Set up Python 3.9 + - name: Set up Python ${{ env.python-version }} uses: actions/setup-python@v4 with: - python-version: "3.9" + python-version: ${{ env.python-version }} cache: "pip" + cache-dependency-path: "scripts/requirements*.txt" - name: Install dependencies run: | - python -m pip install --upgrade pip pip install wheel pip install -r "scripts/requirements.txt" - run: mv backend/configs.template.py backend/configs.py - name: Analysing the code with ${{ job.name }} - run: flake8 backend - Bandit: + run: pylint backend/ --reports=y --output-format=colorized + Flake8: runs-on: ubuntu-latest steps: - name: Checkout ${{ github.repository }}/${{ github.ref }} uses: actions/checkout@v3 - - name: Set up Python 3.9 + - name: Set up Python ${{ env.python-version }} uses: actions/setup-python@v4 with: - python-version: "3.9" + python-version: ${{ env.python-version }} cache: "pip" + cache-dependency-path: "scripts/requirements*.txt" - name: Install dependencies run: | - python -m pip install --upgrade pip pip install wheel pip install -r "scripts/requirements.txt" - run: mv backend/configs.template.py backend/configs.py - name: Analysing the code with ${{ job.name }} - run: bandit -n 1 --severity-level medium --recursive backend + run: flake8 backend/ typings/ diff --git a/.sonarcloud.properties b/.sonarcloud.properties new file mode 100644 index 00000000..0cb5a64d --- /dev/null +++ b/.sonarcloud.properties @@ -0,0 +1 @@ +sonar.python.version=3.10 diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 2d9bd42e..287e4f00 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -4,13 +4,17 @@ "bungcip.better-toml", "davidanson.vscode-markdownlint", "dbaeumer.vscode-eslint", + "dotenv.dotenv-vscode", "eamodio.gitlens", + "emeraldwalk.runonsave", "meganrogge.template-string-converter", - "mikestead.dotenv", + "ms-python.autopep8", "ms-python.flake8", - "ms-vscode.powershell", + "ms-python.isort", + "ms-python.pylint", "ms-python.python", "ms-python.vscode-pylance", + "ms-vscode.powershell", "ms-vscode.vscode-typescript-next", "pkief.material-icon-theme", "redhat.vscode-yaml", @@ -27,6 +31,8 @@ // Replaced by ESLint "eg2.tslint", "ms-vscode.vscode-typescript-tslint-plugin", + // Replaced by + "mikestead.dotenv", // Obsoleted by Pylance "ms-pyright.pyright", // Not configurable per workspace, tends to conflict with other linters diff --git a/.vscode/settings.json b/.vscode/settings.json index 782a230d..81288b11 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,4 +1,5 @@ { + "typescript.tsdk": "./tournament-scheduler/node_modules/typescript/lib", "editor.rulers": [ 80, 120 @@ -8,6 +9,11 @@ 72 ] }, + "files.insertFinalNewline": true, + "files.trimFinalNewlines": true, + "files.trimTrailingWhitespace": true, + "editor.comments.insertSpace": true, + "editor.insertSpaces": true, "editor.detectIndentation": false, "editor.tabSize": 2, "editor.formatOnSave": true, @@ -17,32 +23,33 @@ "source.fixAll.convertImportFormat": true, "source.organizeImports": false, }, - // Set the default formatter to help avoid Prettier - "[json]": { - "editor.defaultFormatter": "vscode.json-language-features", - }, - "[jsonc]": { - "editor.defaultFormatter": "vscode.json-language-features", + "trailing-spaces.includeEmptyLines": true, + "trailing-spaces.trimOnSave": true, + "trailing-spaces.syntaxIgnore": [ + "markdown" + ], + "emeraldwalk.runonsave": { + "commands": [ + { + "match": "\\.pyi?", + "cmd": "unify ${file} --in-place --quote=\"\\\"\"" + }, + { + "match": "\\.pyi?", + "cmd": "add-trailing-comma ${file} --py36-plus" + }, + ] }, - "[html]": { + // Set the default formatter to help avoid Prettier + "[json][jsonc][html]": { "editor.defaultFormatter": "vscode.html-language-features", }, - "[javascript]": { - "editor.defaultFormatter": "vscode.typescript-language-features", - }, - "[javascriptreact]": { - "editor.defaultFormatter": "vscode.typescript-language-features", - }, - "[typescript]": { - "editor.defaultFormatter": "vscode.typescript-language-features", - }, - "[typescriptreact]": { + "[javascript][javascriptreact][typescript][typescriptreact]": { "editor.defaultFormatter": "vscode.typescript-language-features", }, "javascript.preferences.quoteStyle": "single", "typescript.preferences.quoteStyle": "single", "html.format.wrapAttributes": "force-expand-multiline", - "typescript.tsdk": "./tournament-scheduler/node_modules/typescript/lib", "javascript.format.semicolons": "remove", "typescript.format.semicolons": "remove", "eslint.validate": [ @@ -60,18 +67,10 @@ "severity": "downgrade" } ], - "files.insertFinalNewline": true, - "trailing-spaces.includeEmptyLines": true, - "trailing-spaces.trimOnSave": true, - "trailing-spaces.syntaxIgnore": [ - "markdown" - ], "javascript.preferences.importModuleSpecifier": "non-relative", "typescript.preferences.importModuleSpecifier": "non-relative", "files.associations": { - "*.json": "json", - "extensions.json": "jsonc", - "settings.json": "jsonc", + ".flake8": "properties", ".eslintrc*.json": "jsonc", "tsconfig*.json": "jsonc", // "*.html": "jinja-html" @@ -85,7 +84,7 @@ "**/.DS_Store": true, "**/Thumbs.db": true, "build": true, - ".mypy_cache": true, + "**/.mypy_cache": true, "**/__pycache__/": true, "**/node_modules": false, "*.zip": true, @@ -99,27 +98,42 @@ "**/*.code-search": true, "*.lock": true, "package-lock.json": true, - "typings": true, }, "[python]": { + // Cannot use autotpep8 until https://github.com/microsoft/vscode-autopep8/issues/32 is fixed + "editor.defaultFormatter": "ms-python.python", "editor.tabSize": 4, "editor.rulers": [ 72, // PEP8-17 docstrings // 79, // PEP8-17 default max // 88, // Black default - 99, // PEP8-17 acceptable max + // 99, // PEP8-17 acceptable max 120, // Our hard rule ], "editor.codeActionsOnSave": { "source.organizeImports": true, }, }, + // Important to follow the config in pyrightconfig.json + "python.analysis.useLibraryCodeForTypes": false, + "python.analysis.diagnosticMode": "workspace", + "python.formatting.provider": "autopep8", + "isort.check": true, + "isort.importStrategy": "fromEnvironment", "python.linting.enabled": true, - "python.linting.pylintEnabled": true, - "python.linting.pylintCategorySeverity.convention": "Warning", - "python.linting.pylintCategorySeverity.refactor": "Warning", + // Use the new Pylint extension instead + "python.linting.pylintEnabled": false, + "pylint.severity": { + "convention": "Warning", + "error": "Error", + "fatal": "Error", + "refactor": "Warning", + "warning": "Warning", + "info": "Information" + }, // Use the new Flake8 extension instead "python.linting.flake8Enabled": false, + // Partial codes don't work yet: https://github.com/microsoft/vscode-flake8/issues/7 "flake8.severity": { "convention": "Warning", "error": "Error", @@ -164,4 +178,5 @@ "powershell.codeFormatting.useCorrectCasing": true, "powershell.codeFormatting.whitespaceBetweenParameters": true, "powershell.integratedConsole.showOnStartup": false, + "terminal.integrated.defaultProfile.windows": "PowerShell", } diff --git a/backend/api/api_wrappers.py b/backend/api/api_wrappers.py index 908e1a04..5fb2161d 100644 --- a/backend/api/api_wrappers.py +++ b/backend/api/api_wrappers.py @@ -15,11 +15,11 @@ def _verify(*args, **kwargs): invalid_msg = { "message": "Invalid token. Authentification and / or authentication required", - "authenticated": False + "authenticated": False, } expired_msg = { "message": "Expired token. Reauthentication required.", - "authenticated": False + "authenticated": False, } if len(auth_headers) != 2: @@ -45,11 +45,14 @@ def _verify(*args, **kwargs): if not isinstance(response, tuple) or not isinstance(response[0], str): return response - extended_token: Union[bytes, str] = jwt.encode({ - "sub": data["sub"], - "iat": data["iat"], - "exp": datetime.utcnow() + timedelta(days=1)}, - current_app.config["SECRET_KEY"]) + extended_token: Union[bytes, str] = jwt.encode( + { + "sub": data["sub"], + "iat": data["iat"], + "exp": datetime.utcnow() + timedelta(days=1), + }, + current_app.config["SECRET_KEY"], + ) if isinstance(extended_token, bytes): extended_token = extended_token.decode() try: @@ -67,6 +70,7 @@ def _verify(*args, **kwargs): **response_content, "token": extended_token, }), - response[1]) + response[1], + ) return _verify diff --git a/backend/api/core_api.py b/backend/api/core_api.py index adedca54..7b652297 100644 --- a/backend/api/core_api.py +++ b/backend/api/core_api.py @@ -2,8 +2,9 @@ Provides the core API endpoints for consuming and producing REST requests and responses. Like login and user management. """ +from __future__ import annotations + from datetime import datetime, timedelta -from typing import Optional import configs import jwt @@ -17,7 +18,7 @@ @api.route("/login", methods=("POST",)) def login(): - data: Optional[JSONObjectType] = request.get_json() + data: JSONObjectType | None = request.get_json() try: api_key = data["speedruncomApiKey"] if data else "" if not isinstance(api_key, str): @@ -29,11 +30,14 @@ def login(): if not player: return jsonify({"message": error_message, "authenticated": False}), 401 - token: str = jwt.encode({ - "sub": player.user_id, - "iat": datetime.utcnow(), - "exp": datetime.utcnow() + timedelta(days=1)}, - current_app.config["SECRET_KEY"]) + token: str = jwt.encode( + { + "sub": player.user_id, + "iat": datetime.utcnow(), + "exp": datetime.utcnow() + timedelta(days=1), + }, + current_app.config["SECRET_KEY"], + ) # Note: https://github.com/jpadilla/pyjwt/issues/529 if isinstance(token, bytes): token = token.decode("UTF-8") @@ -42,7 +46,8 @@ def login(): "user": { "userId": player.user_id, "name": player.name, - }}) + }, + }) @api.route("/configs", methods=("GET",)) @@ -60,4 +65,5 @@ def get_user_current(current_user: Player): "user": { "userId": current_user.user_id, "name": current_user.name, - }}) + }, + }) diff --git a/backend/api/global_scoreboard_api.py b/backend/api/global_scoreboard_api.py index 79c80886..d97a29ed 100644 --- a/backend/api/global_scoreboard_api.py +++ b/backend/api/global_scoreboard_api.py @@ -5,14 +5,14 @@ from __future__ import annotations from datetime import datetime -from typing import Optional, cast +from typing import cast import configs from api.api_wrappers import authentication_required from flask import Blueprint, jsonify, request from markupsafe import escape from models.core_models import Player -from models.exceptions import UnderALotOfPressure, UnhandledThreadException, UserUpdaterError +from models.exceptions import UnderALotOfPressureError, UnhandledThreadException, UserUpdaterError from services.user_updater import get_updated_user from services.utils import map_to_dto from sqlalchemy import exc @@ -24,7 +24,7 @@ @api.route("/players", methods=("GET",)) def get_all_players(): - country_code_str: Optional[str] = request.args.get("region") + country_code_str: str | None = request.args.get("region") if country_code_str is None: return jsonify(map_to_dto(Player.get_all())) country_codes = list(set(country_code_str.split(","))) @@ -54,10 +54,10 @@ def update_player(name_or_id: str): return error_message, 500 -def __do_update_player_bypass_restrictions(name_or_id: str, current_user: Optional[Player] = None): +def __do_update_player_bypass_restrictions(name_or_id: str, current_user: Player | None = None): try: result = get_updated_user(name_or_id) - except UnderALotOfPressure: + except UnderALotOfPressureError: # Meme code for meme error return "", 418 finally: @@ -74,7 +74,7 @@ def __do_update_player_bypass_restrictions(name_or_id: str, current_user: Option @authentication_required def __do_update_player(current_user: Player, name_or_id: str): - now = datetime.now() + now = datetime.utcnow() minutes_5 = 5 * 60 # Check if the current user is already updating someone @@ -96,7 +96,7 @@ def __do_update_player(current_user: Player, name_or_id: str): @api.route("/players/current/friends", methods=("GET",)) @authentication_required def get_friends_current(current_user: Player): - field: Optional[str] = request.args.get("field") + field: str | None = request.args.get("field") friends_dto = map_to_dto(current_user.get_friends()) if field is None: diff --git a/backend/api/tournament_scheduler_api.py b/backend/api/tournament_scheduler_api.py index 21c4ecb6..8721cf00 100644 --- a/backend/api/tournament_scheduler_api.py +++ b/backend/api/tournament_scheduler_api.py @@ -2,7 +2,7 @@ Provides the API endpoints for consuming and producing REST requests and responses within the Tournament Scheduler context """ -from typing import Optional, Union +from __future__ import annotations from api.api_wrappers import authentication_required from flask import Blueprint, jsonify, request @@ -22,13 +22,13 @@ def get_all_schedules(current_user: Player): @api.route("/schedules/", methods=("GET",)) -def get_schedule(schedule_id: Union[str, int]): +def get_schedule(schedule_id: str | int): try: schedule_id = int(schedule_id) except ValueError: return jsonify({"message": "/schedule_id is not a valid number", "authenticated": True}), 400 - registration_key: Optional[str] = request.args.get("registrationKey") + registration_key: str | None = request.args.get("registrationKey") schedule = Schedule.get(schedule_id) \ if registration_key is None \ @@ -43,7 +43,7 @@ def get_schedule(schedule_id: Union[str, int]): @api.route("/schedules", methods=("POST",)) @authentication_required def post_schedule(current_user: Player): - data: Optional[JSONObjectType] = request.get_json() + data: JSONObjectType | None = request.get_json() error_message, name, is_active, deadline, time_slots, order = __validate_create_schedule(data) if error_message is not None: return jsonify({"message": error_message, "authenticated": True}), 400 @@ -53,13 +53,13 @@ def post_schedule(current_user: Player): @api.route("/schedules/", methods=("PUT",)) @authentication_required -def put_schedule(current_user: Player, schedule_id: Union[str, int]): +def put_schedule(current_user: Player, schedule_id: str | int): try: schedule_id = int(schedule_id) except ValueError: return jsonify({"message": "/schedule_id is not a valid number", "authenticated": True}), 400 - data: Optional[JSONObjectType] = request.get_json() + data: JSONObjectType | None = request.get_json() error_message, name, is_active, deadline, time_slots, _ = __validate_create_schedule(data) if error_message is not None: return jsonify({"message": error_message, "authenticated": True}), 400 @@ -70,7 +70,7 @@ def put_schedule(current_user: Player, schedule_id: Union[str, int]): @api.route("/schedules/", methods=("DELETE",)) @authentication_required -def delete_schedule(current_user: Player, schedule_id: Union[str, int]): +def delete_schedule(current_user: Player, schedule_id: str | int): try: schedule_id = int(schedule_id) except ValueError: @@ -83,7 +83,7 @@ def delete_schedule(current_user: Player, schedule_id: Union[str, int]): @api.route("/schedules/order", methods=("PUT",)) @authentication_required def put_schedule_order(current_user: Player): - data: Optional[list[ScheduleOrderDict]] = request.get_json() + data: list[ScheduleOrderDict] | None = request.get_json() if not data: return "missing data", 400 @@ -97,7 +97,7 @@ def put_schedule_order(current_user: Player): @api.route("/schedules//group_id/", methods=("PUT",)) @authentication_required -def put_schedule_group_id(current_user: Player, schedule_id: Union[str, int], group_id: Optional[Union[str, int]]): +def put_schedule_group_id(current_user: Player, schedule_id: str | int, group_id: str | int | None): try: schedule_id = int(schedule_id) except ValueError: @@ -120,7 +120,7 @@ def get_all_schedule_groups(current_user: Player): @api.route("/schedule_groups/", methods=("GET",)) -def get_schedule_group(group_id: Union[str, int]): +def get_schedule_group(group_id: str | int): try: group_id = int(group_id) except ValueError: @@ -135,7 +135,7 @@ def get_schedule_group(group_id: Union[str, int]): @api.route("/schedule_groups//schedules", methods=("GET",)) -def get_schedules_from_group(group_id: Union[str, int]): +def get_schedules_from_group(group_id: str | int): try: group_id = int(group_id) except ValueError: @@ -146,7 +146,7 @@ def get_schedules_from_group(group_id: Union[str, int]): @api.route("/schedule_groups", methods=("POST",)) @authentication_required def post_schedule_group(current_user: Player): - data: Optional[JSONObjectType] = request.get_json() + data: JSONObjectType | None = request.get_json() error_message, name, order = __validate_create_schedule_group(data) if error_message is not None: return jsonify({"message": error_message, "authenticated": True}), 400 @@ -156,12 +156,12 @@ def post_schedule_group(current_user: Player): @api.route("/schedule_groups/", methods=("PUT",)) @authentication_required -def put_schedule_group(current_user: Player, group_id: Union[str, int]): +def put_schedule_group(current_user: Player, group_id: str | int): try: group_id = int(group_id) except ValueError: return jsonify({"message": "/group_id is not a valid number", "authenticated": True}), 400 - data: Optional[JSONObjectType] = request.get_json() + data: JSONObjectType | None = request.get_json() error_message, name, order = __validate_create_schedule_group(data) if error_message is not None: return jsonify({"message": error_message, "authenticated": True}), 400 @@ -172,7 +172,7 @@ def put_schedule_group(current_user: Player, group_id: Union[str, int]): @api.route("/schedule_groups/", methods=("DELETE",)) @authentication_required -def delete_schedule_group(current_user: Player, group_id: Union[str, int]): +def delete_schedule_group(current_user: Player, group_id: str | int): try: group_id = int(group_id) except ValueError: @@ -187,13 +187,13 @@ def delete_schedule_group(current_user: Player, group_id: Union[str, int]): @api.route("/time-slots//registrations", methods=("POST",)) -def post_registration(time_slot_id: Union[str, int]): +def post_registration(time_slot_id: str | int): try: registration_id = int(time_slot_id) except ValueError: return jsonify({"message": "/time_slot_id is not a valid number", "authenticated": True}), 400 - data: Optional[JSONObjectType] = request.get_json() + data: JSONObjectType | None = request.get_json() error_message, registration_key, participants = __validate_create_registration(data) if error_message is not None: return jsonify({"message": error_message, "authenticated": True}), 400 @@ -210,13 +210,13 @@ def post_registration(time_slot_id: Union[str, int]): @api.route("/registrations/", methods=("PUT",)) @authentication_required -def put_registration(current_user: Player, registration_id: Union[str, int]): +def put_registration(current_user: Player, registration_id: str | int): try: registration_id = int(registration_id) except ValueError: return jsonify({"message": "/registration_id is not a valid number", "authenticated": True}), 400 - data: Optional[JSONObjectType] = request.get_json() + data: JSONObjectType | None = request.get_json() error_message, _, participants = __validate_create_registration(data, False) if error_message is not None: return jsonify({"message": error_message, "authenticated": True}), 400 @@ -227,7 +227,7 @@ def put_registration(current_user: Player, registration_id: Union[str, int]): @api.route("/registrations/", methods=("DELETE",)) @authentication_required -def delete_registration(current_user: Player, registration_id: Union[str, int]): +def delete_registration(current_user: Player, registration_id: str | int): try: registration_id = int(registration_id) except ValueError: @@ -241,7 +241,7 @@ def delete_registration(current_user: Player, registration_id: Union[str, int]): # region Validation -def __validate_create_registration(data: Optional[JSONObjectType], with_registration_key=True): +def __validate_create_registration(data: JSONObjectType | None, with_registration_key=True): registration_key = "" participants = [] if not data: @@ -262,7 +262,7 @@ def __validate_create_registration(data: Optional[JSONObjectType], with_registra return None, registration_key, participants -def __validate_create_schedule_group(data: Optional[JSONObjectType]): +def __validate_create_schedule_group(data: JSONObjectType | None): error_message = "" name = "" order = None @@ -279,10 +279,10 @@ def __validate_create_schedule_group(data: Optional[JSONObjectType]): error_message += "order has to be defined" except ValueError: error_message += "order has to be a number" - return None if not error_message else error_message, name, order + return error_message if error_message else None, name, order -def __validate_create_schedule(data: Optional[JSONObjectType]): +def __validate_create_schedule(data: JSONObjectType | None): error_message = "" name = "" is_active = False @@ -336,6 +336,6 @@ def __validate_create_schedule(data: Optional[JSONObjectType]): except KeyError: error_message += "timeSlots.participantsPerEntry has to be defined" - return None if not error_message else error_message, name, is_active, deadline, time_slots, order + return error_message if error_message else None, name, is_active, deadline, time_slots, order # endregion diff --git a/backend/flask_app.py b/backend/flask_app.py index 006e1ccd..c77f1902 100644 --- a/backend/flask_app.py +++ b/backend/flask_app.py @@ -44,18 +44,19 @@ app.register_blueprint(tournament_scheduler_api, url_prefix="/api") # Setup the dal (SQLAlchemy) -SQLALCHEMY_DATABASE_URI = "mysql+{connector}://{username}:{password}@{hostname}/{database_name}".format( +SQLALCHEMY_DATABASE_URI = "mysql+{connector}://{username}:{password}@{hostname}/{database_name}".format( # pylint: disable=C0209 # noqa: E501 connector=configs.sql_connector, username=configs.sql_username, password=configs.sql_password, hostname=configs.sql_hostname, - database_name=configs.sql_database_name) + database_name=configs.sql_database_name, +) app.config["SQLALCHEMY_DATABASE_URI"] = SQLALCHEMY_DATABASE_URI app.config["SQLALCHEMY_POOL_RECYCLE"] = 299 app.config["SQLALCHEMY_POOL_SIZE"] = 3 # PythonAnywhere allows 3 connections per webworker app.config["SQLALCHEMY_MAX_OVERFLOW"] = 0 app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = configs.sql_track_modifications -db.app = app # type: ignore # TODO: Raise issue upstream +db.app = app db.init_app(app) diff --git a/backend/models/core_models.py b/backend/models/core_models.py index bb248d04..36887370 100644 --- a/backend/models/core_models.py +++ b/backend/models/core_models.py @@ -4,7 +4,7 @@ import traceback import uuid from datetime import datetime -from typing import TYPE_CHECKING, Any, Optional, TypedDict, Union, cast, overload +from typing import TYPE_CHECKING, Any, TypedDict, Union, cast from flask_sqlalchemy import SQLAlchemy from models.exceptions import SpeedrunComError, UserUpdaterError @@ -32,23 +32,25 @@ db.Column( "user_id", db.String(8), - db.ForeignKey("player.user_id")), + db.ForeignKey("player.user_id"), + ), db.Column( "friend_id", db.String(8), - db.ForeignKey("player.user_id")) + db.ForeignKey("player.user_id"), + ), ) class __TimeSlotsDict(TypedDict): - id: int + id: int # noqa: A003 dateTime: str # noqa: N815 maximumEntries: int # noqa: N815 participantsPerEntry: int # noqa: N815 class ScheduleOrderDict(TypedDict): - id: int + id: int # noqa: A003 isGroup: bool # noqa: N815 order: int @@ -63,37 +65,36 @@ class Player(BaseModel): score = db.Column(db.Integer, nullable=False) score_details = db.Column(db.String()) last_update = db.Column(db.DateTime()) - rank: Optional[int] = None + rank: int | None = None schedules = db.relationship("Schedule", back_populates="owner") - if TYPE_CHECKING: - @overload - def __init__( # type: ignore # pylint: disable=too-many-arguments + if TYPE_CHECKING: # noqa: CCE002 + def __init__( # pylint: disable=too-many-arguments self, user_id: str | Column[String], name: str | Column[String], - country_code: Optional[str | Column[String]], + country_code: str | Column[String] | None, score: int | float | Column[Integer], - last_update: Optional[str | Column[DateTime]], - score_details: Optional[str | Column[String]] = ..., - rank: Optional[int] = ... + last_update: str | Column[DateTime] | None, + score_details: str | Column[String] | None = ..., + rank: int | None = ..., ): ... @staticmethod - def authenticate(api_key: str) -> tuple[Optional[Player], Optional[str]]: + def authenticate(api_key: str) -> tuple[Player | None, str | None]: try: # Get user from speedrun.com using the API key src_profile: SrcProfileDto = get_file( "https://www.speedrun.com/api/v1/profile", - headers={"X-API-Key": api_key} + headers={"X-API-Key": api_key}, )["data"] except UserUpdaterError as exception: if isinstance(exception, SpeedrunComError) and exception.args[0]["error"].startswith("403"): return None, "Invalid SR.C API key" return None, f"Error: {exception.args[0]['error']}\n{exception.args[0]['details']}" except Exception: # pylint: disable=broad-except # Do catch unknown errors - print("\nError: Unknown\n{}".format(traceback.format_exc())) + print(f"\nError: Unknown\n{traceback.format_exc()}") return None, traceback.format_exc() user_id = src_profile["id"] @@ -125,14 +126,18 @@ def get_all(): + " FROM player, (SELECT @cur_rank := 1, @_sequence := 1, @_last_score := NULL) r " + " WHERE score > 0 " + " ORDER BY score DESC " - + ") ranked;") - return [Player( - user_id=player[0], - name=player[1], - country_code=player[2], - score=player[3], - last_update=player[4], - rank=player[5]) for player in db.engine.execute(sql).fetchall()] + + ") ranked;", + ) + return [ + Player( + user_id=player[0], + name=player[1], + country_code=player[2], + score=player[3], + last_update=player[4], + rank=player[5], + ) for player in db.engine.execute(sql).fetchall() + ] @staticmethod def get_by_country_code(country_codes: list[str]): @@ -144,37 +149,45 @@ def to_filtered_dto(player: Player): "lastUpdate": player.last_update, } - country_code_queries = [y for x in [( - Player.country_code == country_code, - # https://github.com/PyCQA/pylint/issues/3334#issuecomment-1036944735 - Player.country_code.like(f"{country_code}/%") # pylint: disable=no-member - ) for country_code in country_codes] - for y in x] - - return [to_filtered_dto(cast(Player, player)) - for player in Player.query.filter(or_(*country_code_queries)).all()] + country_code_queries = [ + y for x in [ + ( + Player.country_code == country_code, + # https://github.com/PyCQA/pylint/issues/3334#issuecomment-1036944735 + Player.country_code.like(f"{country_code}/%"), # pylint: disable=no-member + ) for country_code in country_codes + ] + for y in x + ] + + return [ + to_filtered_dto(cast(Player, player)) + for player in Player.query.filter(or_(*country_code_queries)).all() + ] @staticmethod def create( user_id: str, name: str, - country_code: Optional[str] = None, + country_code: str | None = None, score: int | float = 0, - score_details: Optional[str] = None, - last_update: Optional[str] = None) -> Player: + score_details: str | None = None, + last_update: str | None = None, + ) -> Player: player = Player( user_id=user_id, name=name, country_code=country_code, score=score, score_details=score_details, - last_update=last_update) + last_update=last_update, + ) db.session.add(player) db.session.commit() return player - def update(self, **kwargs: Union[Optional[str], float, datetime]): + def update(self, **kwargs: str | float | datetime | None): # noqa: CCE001 """ kwargs: - name: str @@ -198,41 +211,48 @@ def get_friends(self) -> list[Player]: sql = text( "SELECT f.friend_id, p.name, p.country_code, p.score, p.last_update FROM friend f " # nosec B608 + "JOIN player p ON p.user_id = f.friend_id " - + "WHERE f.user_id = :user_id;") - return [Player( - user_id=friend[0], - name=friend[1], - country_code=friend[2], - score=friend[3], - last_update=friend[4]) - for friend in db.engine.execute(sql, user_id=self.user_id).fetchall()] + + "WHERE f.user_id = :user_id;", + ) + return [ + Player( + user_id=friend[0], + name=friend[1], + country_code=friend[2], + score=friend[3], + last_update=friend[4], + ) + for friend in db.engine.execute(sql, user_id=self.user_id).fetchall() + ] def befriend(self, friend_id: str) -> bool: if self.user_id == friend_id: return False - sql = text("INSERT INTO friend (user_id, friend_id) " - + "VALUES (:user_id, :friend_id);") + sql = text( + "INSERT INTO friend (user_id, friend_id) " + + "VALUES (:user_id, :friend_id);", + ) return db.engine.execute(sql, user_id=self.user_id, friend_id=friend_id).rowcount > 0 def unfriend(self, friend_id: str) -> bool: sql = text( "DELETE FROM friend " # nosec B608 - + "WHERE user_id = :user_id AND friend_id = :friend_id") + + "WHERE user_id = :user_id AND friend_id = :friend_id", + ) return db.engine.execute(sql, user_id=self.user_id, friend_id=friend_id).rowcount > 0 def get_schedules(self): return cast( list[Schedule], - Schedule.query.filter(Schedule.owner_id == self.user_id).all() + Schedule.query.filter(Schedule.owner_id == self.user_id).all(), ) def create_schedule( self, name: str, is_active: bool, - deadline: Optional[str], + deadline: str | None, time_slots: list[__TimeSlotsDict], - order: Optional[int] + order: int | None, ): new_schedule = Schedule( name=name, @@ -240,16 +260,20 @@ def create_schedule( registration_key=str(uuid.uuid4()), is_active=is_active, deadline=None if deadline is None else datetime.strptime(deadline, DATETIME_FORMAT), - order=order) + order=order, + ) db.session.add(new_schedule) db.session.flush() - new_time_slots = [TimeSlot( - schedule_id=new_schedule.schedule_id, - date_time=datetime.strptime(time_slot["dateTime"], DATETIME_FORMAT), - maximum_entries=time_slot["maximumEntries"], - participants_per_entry=time_slot["participantsPerEntry"]) - for time_slot in time_slots] + new_time_slots = [ + TimeSlot( + schedule_id=new_schedule.schedule_id, + date_time=datetime.strptime(time_slot["dateTime"], DATETIME_FORMAT), + maximum_entries=time_slot["maximumEntries"], + participants_per_entry=time_slot["participantsPerEntry"], + ) + for time_slot in time_slots + ] db.session.bulk_save_objects(new_time_slots) db.session.commit() @@ -260,8 +284,8 @@ def update_schedule( schedule_id: int, name: str, is_active: bool, - deadline: Optional[str], - time_slots: list[__TimeSlotsDict] + deadline: str | None, + time_slots: list[__TimeSlotsDict], ) -> bool: try: schedule_to_update = cast( @@ -270,7 +294,7 @@ def update_schedule( .query .filter(Schedule.schedule_id == schedule_id) .filter(Schedule.owner_id == self.user_id) - .one() + .one(), ) except orm.exc.NoResultFound: return False @@ -309,7 +333,7 @@ def update_schedule( def update_schedule_group_id( self, schedule_id: int, - group_id: Optional[int] + group_id: int | None, ) -> bool: try: if group_id is not None: @@ -324,7 +348,7 @@ def update_schedule_group_id( .query .filter(Schedule.schedule_id == schedule_id) .filter(Schedule.owner_id == self.user_id) - .one() + .one(), ) except orm.exc.NoResultFound: return False @@ -357,7 +381,7 @@ def update_schedule_order(self, schedule_orders: list[ScheduleOrderDict]) -> boo .query .filter(table.owner_id == self.user_id) .filter(id_filter == schedule_order["id"]) - .one() + .one(), ) to_update.order = schedule_order["order"] except orm.exc.NoResultFound: @@ -369,10 +393,10 @@ def update_schedule_order(self, schedule_orders: list[ScheduleOrderDict]) -> boo def get_schedule_groups(self): return cast( list[ScheduleGroup], - ScheduleGroup.query.filter(ScheduleGroup.owner_id == self.user_id).all() + ScheduleGroup.query.filter(ScheduleGroup.owner_id == self.user_id).all(), ) - def create_schedule_group(self, name: str, order: Optional[int]): + def create_schedule_group(self, name: str, order: int | None): new_schedule_group = ScheduleGroup(name=name, order=order, owner_id=self.user_id) db.session.add(new_schedule_group) @@ -383,7 +407,7 @@ def update_schedule_group( self, group_id: int, name: str, - order: Optional[int], + order: int | None, ) -> bool: try: schedule_group_to_update = cast( @@ -392,7 +416,7 @@ def update_schedule_group( .query .filter(ScheduleGroup.group_id == group_id) .filter(ScheduleGroup.owner_id == self.user_id) - .one() + .one(), ) except orm.exc.NoResultFound: return False @@ -422,7 +446,7 @@ def update_registration(self, registration_id: int, participant_names: list[str] Registration .query .filter(Registration.registration_id == registration_id) - .one() + .one(), ) Schedule \ @@ -465,7 +489,7 @@ def delete_registration(self, registration_id: int) -> bool: Registration .query .filter(Registration.registration_id == registration_id) - .one() + .one(), ) Schedule \ diff --git a/backend/models/exceptions.py b/backend/models/exceptions.py index 167239ea..8c300479 100644 --- a/backend/models/exceptions.py +++ b/backend/models/exceptions.py @@ -2,18 +2,33 @@ class UserUpdaterError(Exception): - """ Usage: raise UserUpdaterError({"error":"On Status Label", "details":"Details of error"}) """ + """ + Usage: raise UserUpdaterError({ + "error":"On Status Label", + "details":"Details of error", + }) + """ args: tuple[dict[Literal["error", "details"], str], ...] class SpeedrunComError(UserUpdaterError): - """ Usage: raise NotFoundError({"error":"`HTTP_STATUS_CODE` (speedrun.com)", "details":"Details of error"}) """ + """ + Usage: raise SpeedrunComError({ + "error":"On Status Label", + "details":"Details of error", + }) + """ -class UnderALotOfPressure(SpeedrunComError): - """ Usage: raise NotFoundError({"error":"`HTTP_STATUS_CODE` (speedrun.com)", "details":"Details of error"}) """ +class UnderALotOfPressureError(SpeedrunComError): + """ + Usage: raise UnderALotOfPressureError({ + "error":"On Status Label", + "details":"Details of error", + }) + """ -class UnhandledThreadException(Exception): +class UnhandledThreadException(Exception): # noqa: N818 pass diff --git a/backend/models/game_search_models.py b/backend/models/game_search_models.py index c0110108..789646d8 100644 --- a/backend/models/game_search_models.py +++ b/backend/models/game_search_models.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Optional, cast, overload +from typing import TYPE_CHECKING, cast from models.core_models import BaseModel, db from sqlalchemy import Column, Integer, String, orm @@ -18,15 +18,14 @@ class GameValues(BaseModel): wr_points = db.Column(db.Integer, nullable=False) mean_time = db.Column(db.Integer, nullable=False) - if TYPE_CHECKING: - @overload - def __init__( # type: ignore # pylint: disable=too-many-arguments + if TYPE_CHECKING: # noqa: CCE002 + def __init__( # pylint: disable=too-many-arguments self, game_id: str | Column[String], category_id: str | Column[String], run_id: str | Column[String], - platform_id: Optional[str | Column[String]], - alternate_platforms: Optional[str | Column[String]], + platform_id: str | Column[String] | None, + alternate_platforms: str | Column[String] | None, wr_time: int | Column[Integer], wr_points: int | Column[Integer], mean_time: int | Column[Integer], @@ -37,12 +36,13 @@ def __init__( # type: ignore # pylint: disable=too-many-arguments def create_or_update( game_id: str, category_id: str, - platform_id: Optional[str], - alternate_platforms: Optional[str], + platform_id: str | None, + alternate_platforms: str | None, wr_time: int, wr_points: int, mean_time: int, - run_id: str): + run_id: str, + ): existing_game_values = GameValues.get(game_id, category_id) if existing_game_values is None: return GameValues.create( @@ -52,7 +52,8 @@ def create_or_update( alternate_platforms, wr_time, wr_points, - mean_time, run_id) + mean_time, run_id, + ) existing_game_values.platform_id = platform_id existing_game_values.alternate_platforms = alternate_platforms existing_game_values.wr_time = wr_time @@ -66,12 +67,13 @@ def create_or_update( def create( game_id: str, category_id: str, - platform_id: Optional[str], - alternate_platforms: Optional[str], + platform_id: str | None, + alternate_platforms: str | None, wr_time: int, wr_points: int, mean_time: int, - run_id: str) -> GameValues: + run_id: str, + ) -> GameValues: game_values = GameValues( game_id=game_id, category_id=category_id, @@ -80,7 +82,8 @@ def create( wr_time=wr_time, wr_points=wr_points, mean_time=mean_time, - run_id=run_id) + run_id=run_id, + ) db.session.add(game_values) db.session.commit() @@ -95,7 +98,7 @@ def get(game_id: str, category_id: str): .query .filter(GameValues.game_id == game_id) .filter(GameValues.category_id == category_id) - .one() + .one(), ) except orm.exc.NoResultFound: return None diff --git a/backend/models/global_scoreboard_models.py b/backend/models/global_scoreboard_models.py index 02b43b2e..1c238a46 100644 --- a/backend/models/global_scoreboard_models.py +++ b/backend/models/global_scoreboard_models.py @@ -1,7 +1,7 @@ from __future__ import annotations from math import ceil -from typing import Optional, Union +from typing import Union from models.src_dto import SrcGameDto, SrcLevelDto, SrcProfileDto, SrcRunDto from services.utils import get_file, map_to_dto @@ -18,7 +18,7 @@ class Run: category: str category_name: str = "" variables = {} - level: Optional[SrcLevelDto] + level: SrcLevelDto | None level_fraction = 1.0 _points = 0.0 diminished_points = 0.0 @@ -29,9 +29,9 @@ class Run: def __init__( self, run_dto: SrcRunDto, - variables: Optional[dict[str, str]] = None, - level: Optional[SrcLevelDto] = None, - level_count: int = 0 + variables: dict[str, str] | None = None, + level: SrcLevelDto | None = None, + level_count: int = 0, ): self.id_ = run_dto["id"] self.primary_t = run_dto["times"]["primary_t"] @@ -45,17 +45,23 @@ def __init__( self.base_game_key = ROBLOX_SERIES_ID else: self.base_game_key = next( - (link["uri"].rstrip("/") - for link - in self.game["links"] - if link["rel"] == "base-game"), - "").split("/")[-1] + ( + link["uri"].rstrip("/") + for link + in self.game["links"] + if link["rel"] == "base-game" + ), + "", + ).split("/")[-1] series_id = next( - (link["uri"].rstrip("/") - for link - in self.game["links"] - if link["rel"] == "series"), - "").split("/")[-1] + ( + link["uri"].rstrip("/") + for link + in self.game["links"] + if link["rel"] == "series" + ), + "", + ).split("/")[-1] # If it is a derived-game/romhack, but base-game is not specified, use the series id instead # uri: "https://www.speedrun.com/api/v1/games/" if self.base_game_key == "games" and series_id and series_id != NO_SERIES_ID: @@ -63,6 +69,16 @@ def __init__( else: self.base_game_key = self.game["id"] + def to_dto(self) -> dict[str, str | float]: + return { + "gameName": self.game["names"]["international"], + "categoryName": self.category_name, + "levelName": self.level["name"] if self.level else "", + "points": self._points, + "diminishedPoints": self.diminished_points, + "levelFraction": self.level_fraction, + } + def __str__(self) -> str: level_str = f"Level/{str(self.level_fraction)[:4]}: {self.level['id']}, " if self.level else "" return f"Run: bool: """ :type other: Run """ - return not self == other + return self != other def __hash__(self): return hash((self.category, self.level and self.level["id"])) - def to_dto(self) -> dict[str, Union[str, float]]: - return { - "gameName": self.game["names"]["international"], - "categoryName": self.category_name, - "levelName": self.level["name"] if self.level else "", - "points": self._points, - "diminishedPoints": self.diminished_points, - "levelFraction": self.level_fraction - } - PointsDistributionDto = list[list[dict[str, Union[str, int, bool]]]] @@ -103,7 +109,7 @@ class User: _points: float = 0 _name: str = "" _id: str = "" - _country_code: Optional[str] = None + _country_code: str | None = None _banned: bool = False _points_distribution: list[list[Run]] = [[], []] @@ -117,7 +123,7 @@ def __str__(self) -> str: def get_points_distribution_dto(self) -> PointsDistributionDto: return [ map_to_dto(self._points_distribution[0]), - map_to_dto(self._points_distribution[1]) + map_to_dto(self._points_distribution[1]), ] def fetch_and_set_user_code_and_name(self) -> None: diff --git a/backend/models/src_dto.py b/backend/models/src_dto.py index da2e90d4..c123301d 100644 --- a/backend/models/src_dto.py +++ b/backend/models/src_dto.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Literal, Optional, TypedDict +from typing import Any, Literal, TypedDict SrcDataResultDto = dict[Literal["data"], Any] SrcPaginatedDataResultDto = dict[Literal["data"], list] @@ -13,30 +13,30 @@ class SrcPaginationResultDto(TypedDict): class __PaginationData(TypedDict): offset: int - max: int + max: int # noqa: A003 size: int links: list[__RelUriData] class SrcProfileDto(TypedDict): - id: str + id: str # noqa: A003 names: __NamesData pronouns: str weblink: str role: str signup: str location: __LocationData - twitch: Optional[__UriData] - hitbox: Optional[__UriData] - youtube: Optional[__UriData] - twitter: Optional[__UriData] - speedrunslive: Optional[__UriData] + twitch: __UriData | None + hitbox: __UriData | None + youtube: __UriData | None + twitter: __UriData | None + speedrunslive: __UriData | None assets: _AssetsData links: list[__RelUriData] class SrcRunDto(TypedDict): - id: str + id: str # noqa: A003 weblink: str game: dict[Literal["data"], SrcGameDto] # game: str # when not embedding @@ -47,7 +47,7 @@ class SrcRunDto(TypedDict): # # To simplify typings we assume truthyness # SrcLevelDto | None # ] - level: Optional[str] + level: str | None category: str videos: dict[Literal["links"], list[__UriData]] comment: str @@ -62,7 +62,7 @@ class SrcRunDto(TypedDict): class SrcGameDto(TypedDict): - id: str + id: str # noqa: A003 names: __NamesData abbreviation: str weblink: str @@ -81,17 +81,17 @@ class SrcGameDto(TypedDict): created: str assets: dict[str, __UriData] links: list[__RelUriData] - variables: dict[Literal["data"], list[dict[str, Optional[str]]]] + variables: dict[Literal["data"], list[dict[str, str | None]]] class SrcLeaderboardDto(TypedDict): weblink: str game: str category: str - level: Optional[str] - platform: Optional[str] - region: Optional[str] - emulators: Optional[str] + level: str | None + platform: str | None + region: str | None + emulators: str | None timing: str value: dict runs: list[__LeaderboardRunData] @@ -100,7 +100,7 @@ class SrcLeaderboardDto(TypedDict): class SrcLevelDto(TypedDict): - id: str + id: str # noqa: A003 name: str weblink: str rules: str @@ -119,7 +119,7 @@ class __LeaderboardRunData(TypedDict): class __SystemData(TypedDict): - platform: Optional[str] + platform: str | None emulated: bool region: str @@ -129,15 +129,15 @@ class __TimesData(TypedDict): primary_t: float realtime: str realtime_t: float - realtime_noloads: Optional[str] + realtime_noloads: str | None realtime_noloads_t: float - ingame: Optional[str] + ingame: str | None ingame_t: float class __StatusData(TypedDict): status: str - examiner: Optional[str] + examiner: str | None class __LocationData(TypedDict): @@ -167,4 +167,4 @@ class __RelUriData(__UriData): class __PlayersData(__RelUriData): - id: str + id: str # noqa: A003 diff --git a/backend/models/tournament_scheduler_models.py b/backend/models/tournament_scheduler_models.py index 9539fa31..5df82291 100644 --- a/backend/models/tournament_scheduler_models.py +++ b/backend/models/tournament_scheduler_models.py @@ -1,7 +1,7 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING, Optional, cast, overload +from typing import TYPE_CHECKING, Optional, cast from models.core_models import BaseModel, Player, db from services.utils import map_to_dto @@ -24,33 +24,33 @@ class Schedule(BaseModel): time_slots: list[TimeSlot] = db.relationship( "TimeSlot", cascade=CASCADE, - back_populates="schedule") + back_populates="schedule", + ) - group_id: Optional[int | Column[Integer]] = db.Column(db.Integer, nullable=True) - order: Optional[int | Column[Integer]] = db.Column(db.Integer, nullable=False, default=-1) + group_id: int | Column[Integer] | None = db.Column(db.Integer, nullable=True) + order: int | Column[Integer] | None = db.Column(db.Integer, nullable=False, default=-1) - if TYPE_CHECKING: - @overload - def __init__( # type: ignore # pylint: disable=too-many-arguments + if TYPE_CHECKING: # noqa: CCE002 + def __init__( # pylint: disable=too-many-arguments self, registration_key: str | Column[String], schedule_id: int | Column[Integer] = ..., owner_id: str | Column[String] = ..., owner: Player = ..., time_slots: list[TimeSlot] = ..., - group_id: Optional[int | Column[Integer]] = ..., - name: Optional[str | Column[String]] = ..., + group_id: int | Column[Integer] | None = ..., + name: str | Column[String] | None = ..., is_active: Optional[bool | Column[Boolean]] = ..., deadline: Optional[datetime | Column[DateTime]] = ..., - order: Optional[int | Column[Integer]] = ..., + order: int | Column[Integer] | None = ..., ): ... - @staticmethod + @ staticmethod def get(schedule_id: int): return cast(Optional[Schedule], Schedule.query.get(schedule_id)) - @staticmethod + @ staticmethod def get_with_key(schedule_id: int, registration_key: str): try: return cast( @@ -59,7 +59,7 @@ def get_with_key(schedule_id: int, registration_key: str): .query .filter(Schedule.schedule_id == schedule_id) .filter(Schedule.registration_key == registration_key) - .one() + .one(), ) except orm.exc.NoResultFound: return None @@ -83,24 +83,23 @@ class ScheduleGroup(BaseModel): group_id = db.Column(db.Integer, primary_key=True) name: str | Column[String] = db.Column(db.String(128), nullable=False, default="") owner_id = db.Column(db.String(8), db.ForeignKey("player.user_id"), nullable=False) - order: Optional[int | Column[Integer]] = db.Column(db.Integer, nullable=False, default=-1) + order: int | Column[Integer] | None = db.Column(db.Integer, nullable=False, default=-1) - if TYPE_CHECKING: - @overload - def __init__( # type: ignore # pylint: disable=too-many-arguments + if TYPE_CHECKING: # noqa: CCE002 + def __init__( # pylint: disable=too-many-arguments self, group_id: int | Column[Integer] = ..., name: str | Column[String] = ..., - order: Optional[int | Column[Integer]] = ..., + order: int | Column[Integer] | None = ..., owner_id: str | Column[String] = ..., ): ... - @staticmethod + @ staticmethod def get(group_id: int): return cast(Optional[ScheduleGroup], ScheduleGroup.query.get(group_id)) - @staticmethod + @ staticmethod def get_schedules(group_id: int): try: return cast(list[Schedule], Schedule.query.filter(Schedule.group_id == group_id).all()) @@ -128,11 +127,11 @@ class TimeSlot(BaseModel): registrations: list[Registration] = db.relationship( "Registration", cascade=CASCADE, - back_populates="timeslot") + back_populates="timeslot", + ) - if TYPE_CHECKING: - @overload - def __init__( # type: ignore # pylint: disable=too-many-arguments + if TYPE_CHECKING: # noqa: CCE002 + def __init__( # pylint: disable=too-many-arguments self, date_time: datetime | Column[DateTime], maximum_entries: int | Column[Integer], @@ -144,7 +143,7 @@ def __init__( # type: ignore # pylint: disable=too-many-arguments ): ... - @staticmethod + @ staticmethod def get_with_key(time_slot_id: int, registration_key: str) -> Optional[TimeSlot]: try: parent_schedule = cast( @@ -152,7 +151,7 @@ def get_with_key(time_slot_id: int, registration_key: str) -> Optional[TimeSlot] Schedule .query .filter(Schedule.registration_key == registration_key) - .one() + .one(), ) return cast( TimeSlot, @@ -160,7 +159,7 @@ def get_with_key(time_slot_id: int, registration_key: str) -> Optional[TimeSlot] .query .filter(TimeSlot.time_slot_id == time_slot_id) .filter(TimeSlot.schedule_id == parent_schedule.schedule_id) - .one() + .one(), ) except orm.exc.NoResultFound: return None @@ -170,10 +169,13 @@ def register_participant(self, participant_names: list[str]): db.session.add(new_registration) db.session.flush() - new_participants = [Participant( - registration_id=new_registration.registration_id, - name=participant_name) - for participant_name in participant_names] + new_participants = [ + Participant( + registration_id=new_registration.registration_id, + name=participant_name, + ) + for participant_name in participant_names + ] db.session.bulk_save_objects(new_participants) @@ -186,7 +188,7 @@ def to_dto(self): "dateTime": self.date_time, "maximumEntries": self.maximum_entries, "participantsPerEntry": self.participants_per_entry, - "registrations": map_to_dto(self.registrations) + "registrations": map_to_dto(self.registrations), } @@ -200,11 +202,11 @@ class Registration(BaseModel): participants: list[Participant] = db.relationship( "Participant", cascade=CASCADE, - back_populates="registration") + back_populates="registration", + ) - if TYPE_CHECKING: - @overload - def __init__( # type: ignore # pylint: disable=too-many-arguments + if TYPE_CHECKING: # noqa: CCE002 + def __init__( # pylint: disable=too-many-arguments self, time_slot_id: int | Column[Integer], registration_id: int | Column[Integer] = ..., @@ -216,7 +218,7 @@ def __init__( # type: ignore # pylint: disable=too-many-arguments def to_dto(self): return { "id": self.registration_id, - "participants": [participant.name for participant in self.participants] + "participants": [participant.name for participant in self.participants], } @@ -224,14 +226,14 @@ class Participant(BaseModel): __tablename__ = "participant" registration_id: int | Column[Integer] = db.Column( - db.Integer, db.ForeignKey("registration.registration_id"), primary_key=True) + db.Integer, db.ForeignKey("registration.registration_id"), primary_key=True, + ) name: str | Column[String] = db.Column(db.String(128), primary_key=True) registration: Registration = db.relationship("Registration", back_populates="participants") - if TYPE_CHECKING: - @overload - def __init__( # type: ignore # pylint: disable=too-many-arguments + if TYPE_CHECKING: # noqa: CCE002 + def __init__( # pylint: disable=too-many-arguments self, registration_id: int | Column[Integer] = ..., name: str | Column[String] = ..., diff --git a/backend/services/cached_requests.py b/backend/services/cached_requests.py index 8548d680..8fffb7ce 100644 --- a/backend/services/cached_requests.py +++ b/backend/services/cached_requests.py @@ -1,3 +1,4 @@ +import sys from datetime import timedelta from typing import Literal, Union, cast @@ -51,7 +52,8 @@ def __make_cache_session(user_id: str = "http_cache"): # SQLite specific parameters fast_save=True, wal=True, - use_temp=True) + use_temp=True, + ) session.mount("https://", __adapter) return session @@ -70,7 +72,7 @@ def use_session(user_id: Union[str, Literal[False]] = "http_cache"): __REDIS = None -if configs.cached_session_backend == "redis": +if sys.platform != "win32" and configs.cached_session_backend == "redis": import redislite # pyright: ignore # pylint: disable=import-error __REDIS = redislite.Redis( dbfilename="/tmp/redis-requests-cache.db", # nosec B108 diff --git a/backend/services/user_updater.py b/backend/services/user_updater.py index f279d101..4af8e848 100644 --- a/backend/services/user_updater.py +++ b/backend/services/user_updater.py @@ -15,11 +15,11 @@ from models.global_scoreboard_models import PointsDistributionDto, Run, User from models.src_dto import SrcLeaderboardDto, SrcLevelDto, SrcRunDto from services.cached_requests import clear_cache_for_user -from services.user_updater_helpers import (MIN_LEADERBOARD_SIZE, extract_sorted_valid_runs_from_leaderboard, - extract_top_runs_and_score, extract_valid_personal_bests, - get_probability_terms, get_subcategory_variables, - keep_runs_before_soft_cutoff, set_diminishing_returns, - update_runner_in_database) +from services.user_updater_helpers import ( + MIN_LEADERBOARD_SIZE, extract_sorted_valid_runs_from_leaderboard, extract_top_runs_and_score, + extract_valid_personal_bests, get_probability_terms, get_subcategory_variables, keep_runs_before_soft_cutoff, + set_diminishing_returns, update_runner_in_database, +) from services.utils import MAXIMUM_RESULTS_PER_PAGE, get_file, get_paginated_response, start_and_wait_for_threads TIME_BONUS_DIVISOR = 3600 * 12 # 12h (1/2 day) for +100% @@ -42,16 +42,20 @@ def get_updated_user(user_id: str) -> dict[str, Union[str, None, float, int, Poi # ID doesn't exists on speedrun.com but it does in the database, remove it player = Player.get(user._name) if player: - text_output = (f"User ID '{user._id}' not found on speedrun.com. " - "\nRemoved them from the database.") + text_output = ( + f"User ID '{user._id}' not found on speedrun.com. " + "\nRemoved them from the database." + ) result_state = "warning" db.session.delete(player) db.session.commit() else: - text_output = (f"User '{user._id}' not found. " - "\nMake sure the name or ID is typed properly. " - "It's possible the user you're looking for changed their name. " - "In case of doubt, use their ID.") + text_output = ( + f"User '{user._id}' not found. " + "\nMake sure the name or ID is typed properly. " + "It's possible the user you're looking for changed their name. " + "In case of doubt, use their ID." + ) result_state = "warning" else: # Setup a few checks @@ -61,7 +65,7 @@ def get_updated_user(user_id: str) -> dict[str, Union[str, None, float, int, Poi if ( not player or not player.last_update - or (datetime.now() - player.last_update).days >= configs.last_updated_days[0] + or (datetime.utcnow() - player.last_update).days >= configs.last_updated_days[0] or configs.bypass_update_restrictions ): __set_user_points(user) @@ -88,20 +92,27 @@ def get_updated_user(user_id: str) -> dict[str, Union[str, None, float, int, Poi } except ServerNotFoundError as exception: - raise UserUpdaterError({ - "error": "Server not found", - "details": f"{exception}\nPlease make sure you have an active internet connection"} + raise UserUpdaterError( + { + "error": "Server not found", + "details": f"{exception}\nPlease make sure you have an active internet connection", + }, ) from exception except (requests.exceptions.ChunkedEncodingError, ConnectionAbortedError) as exception: - raise UserUpdaterError({ - "error": "Connexion interrupted", - "details": exception} + raise UserUpdaterError( + { + "error": "Connexion interrupted", + "details": exception, + }, ) from exception except OperationalError as exception: - raise UserUpdaterError({ - "error": f"{type(exception).__name__}: {exception}", - "details": "There was an issue clearing the requests cache. It is probably stuck (known issue currently). " - + "Your request still completed, but a site administrator needs to go restart the cache."} + raise UserUpdaterError( + { + "error": f"{type(exception).__name__}: {exception}", + "details": "There was an issue clearing the requests cache. " + + "It is probably stuck (known issue currently). " + + "Your request still completed, but a site administrator needs to go restart the cache.", + }, ) from exception @@ -114,7 +125,9 @@ def set_points_thread(pb: SrcRunDto) -> None: level = None if pb["level"]: - url = "https://www.speedrun.com/api/v1/games/{game}/levels".format(game=pb["game"]["data"]["id"]) + url = "https://www.speedrun.com/api/v1/games/{game}/levels".format( # pylint: disable=C0209 + game=pb["game"]["data"]["id"], + ) levels: list[SrcLevelDto] = get_file(url, {"max": str(MAXIMUM_RESULTS_PER_PAGE)}, "http_cache")["data"] level = next(level for level in levels if level["id"] == pb["level"]) level_count = len(levels) @@ -178,17 +191,22 @@ def set_points_thread(pb: SrcRunDto) -> None: def __set_run_points_and_category_name(run: Run) -> None: - url = "https://www.speedrun.com/api/v1/leaderboards/{game}/{lvl_cat_str}{category}".format( + url = "https://www.speedrun.com/api/v1/leaderboards/{game}/{lvl_cat_str}{category}".format( # pylint: disable=C0209 game=run.game["id"], # If the run is an Individual Level, adapt the request url - lvl_cat_str="level/{level}/".format(level=run.level["id"]) if run.level else "category/", - category=run.category) + lvl_cat_str="level/{level}/".format( # pylint: disable=C0209 + level=run.level["id"], + ) if run.level else "category/", # pylint: disable=C0209 + category=run.category, + ) params = { "video-only": True, "embed": "players", - **{f"var-{var_id}": var_value - for var_id, var_value - in run.variables.items()}, + **{ + f"var-{var_id}": var_value + for var_id, var_value + in run.variables.items() + }, } try: leaderboard: SrcLeaderboardDto = get_file(url, params, "http_cache")["data"] @@ -256,7 +274,7 @@ def __set_run_points_and_category_name(run: Run) -> None: unquote(leaderboard["weblink"].split("#")[1]) .replace("_", " ") .title() - .replace("Ng1", "Ng+") + .replace("Ng1", "Ng+"), ) # Set game search data diff --git a/backend/services/user_updater_helpers.py b/backend/services/user_updater_helpers.py index dd907804..02709f13 100644 --- a/backend/services/user_updater_helpers.py +++ b/backend/services/user_updater_helpers.py @@ -49,7 +49,8 @@ def keep_if_pb(run: SrcRunDto): def extract_sorted_valid_runs_from_leaderboard( leaderboard: SrcLeaderboardDto, - level_fraction: float) -> list[BasicJSONType]: + level_fraction: float, +) -> list[BasicJSONType]: """ Check if the run is valid: - none of the players are banned @@ -74,8 +75,10 @@ def extract_sorted_valid_runs_from_leaderboard( is_board_known_speedrun = False # Get a list of all banned players in this leaderboard - banned_players = [p["id"] for p in leaderboard["players"]["data"] - if p.get("role") == "banned"] + banned_players = [ + p["id"] for p in leaderboard["players"]["data"] + if p.get("role") == "banned" + ] valid_runs = [] for run in leaderboard["runs"]: @@ -259,11 +262,13 @@ def update_runner_in_database(player: Player, user: User): else: text_output = f"{user} found. Updated their entry." result_state = "success" - player.update(name=user._name, - country_code=user._country_code, - score=floor(user._points), - score_details=json.dumps(user.get_points_distribution_dto()), - last_update=timestamp) + player.update( + name=user._name, + country_code=user._country_code, + score=floor(user._points), + score_details=json.dumps(user.get_points_distribution_dto()), + last_update=timestamp, + ) # User is banned: remove the database entry else: result_state = "warning" @@ -273,12 +278,14 @@ def update_runner_in_database(player: Player, user: User): elif user._points >= 1: text_output = f"{user} not found. Added a new row." result_state = "success" - Player.create(user._id, - name=user._name, - country_code=user._country_code, - score=user._points, - score_details=json.dumps(user.get_points_distribution_dto()), - last_update=timestamp) + Player.create( + user._id, + name=user._name, + country_code=user._country_code, + score=user._points, + score_details=json.dumps(user.get_points_distribution_dto()), + last_update=timestamp, + ) else: text_output = f"Not inserting new data as {user} " + \ f"{'is banned' if user._banned else 'has a score lower than 1'}." diff --git a/backend/services/utils.py b/backend/services/utils.py index e00d3925..30ffe461 100644 --- a/backend/services/utils.py +++ b/backend/services/utils.py @@ -13,7 +13,7 @@ from urllib.parse import parse_qs, urlparse import configs -from models.exceptions import SpeedrunComError, UnderALotOfPressure, UnhandledThreadException, UserUpdaterError +from models.exceptions import SpeedrunComError, UnderALotOfPressureError, UnhandledThreadException, UserUpdaterError from models.src_dto import SrcDataResultDto, SrcErrorResultDto, SrcPaginatedDataResultDto, SrcPaginationResultDto from ratelimiter import RateLimiter from requests import Response @@ -48,20 +48,20 @@ def __handle_json_error(response: Response, json_exception: ValueError): sleep(HTTP_ERROR_RETRY_DELAY_MIN) # No break or raise as we want to retry elif response.status_code == 503: - raise UnderALotOfPressure({ + raise UnderALotOfPressureError({ "error": f"{response.status_code} (speedrun.com)", - "details": exception.args[0] + "details": exception.args[0], }) from exception else: raise UserUpdaterError({ "error": f"HTTPError {response.status_code}", - "details": exception.args[0] + "details": exception.args[0], }) from exception else: # ... we don't know why (elevate the exception) print(f"ERROR/WARNING: response=({type(response)})'{response}'\n") raise UserUpdaterError({ "error": "JSONDecodeError", - "details": f"{json_exception.args[0]} in:\n{response}" + "details": f"{json_exception.args[0]} in:\n{response}", }) from json_exception @@ -76,8 +76,10 @@ def __handle_json_data(json_data: SrcErrorResultDto, response_status_code: int) retry_delay = randint(HTTP_ERROR_RETRY_DELAY_MIN, HTTP_ERROR_RETRY_DELAY_MAX) # nosec B311 if status == 420: if "too busy" in message: - raise UnderALotOfPressure({"error": f"{response_status_code} (speedrun.com)", - "details": message}) + raise UnderALotOfPressureError({ + "error": f"{response_status_code} (speedrun.com)", + "details": message, + }) print(f"Rate limit value: {len(rate_limiter.calls)}/{RATE_LIMIT}") print(f"WARNING: {status}. {message} Retrying in {retry_delay} seconds.") sleep(retry_delay) @@ -91,7 +93,7 @@ def __get_request_cache_bust_if_disk_quota_exceeded( url: str, params: Optional[_Params], cached: Union[str, Literal[False]], - headers: Optional[dict[str, Any]] + headers: Optional[dict[str, Any]], ): try: response = use_session(cached).get(url, params=params, headers=headers) @@ -125,7 +127,7 @@ def get_file( url: str, params: Optional[_Params] = None, cached: Union[str, Literal[False]] = False, - headers: Optional[dict[str, Any]] = None + headers: Optional[dict[str, Any]] = None, ) -> SrcDataResultDto: """ Returns the content of "url" parsed as JSON dict. @@ -196,8 +198,10 @@ def update_next_params(new_results_per_page: Optional[int] = None, take_next: bo # If it succeeds, slowly raise back up the page size if results_per_page < initial_results_per_page: increased_results_per_page = min(initial_results_per_page, ceil(results_per_page * 1.5)) - print("Reduced request successfull. Increasing the max results per page " - + f"from {results_per_page} back to {increased_results_per_page}") + print( + "Reduced request successfull. Increasing the max results per page " + + f"from {results_per_page} back to {increased_results_per_page}", + ) update_next_params(increased_results_per_page) else: update_next_params() @@ -212,8 +216,10 @@ def update_next_params(new_results_per_page: Optional[int] = None, take_next: bo if exception.args[0]["error"] != "HTTPError 500" or results_per_page <= MINIMUM_RESULTS_PER_PAGE: raise reduced_results_per_page = max(floor(results_per_page / 2.5), MINIMUM_RESULTS_PER_PAGE) - print("SR.C returned 500 for a paginated request. Reducing the max results per page " - + f"from {results_per_page} to {reduced_results_per_page}") + print( + "SR.C returned 500 for a paginated request. Reducing the max results per page " + + f"from {results_per_page} to {reduced_results_per_page}", + ) update_next_params(reduced_results_per_page, False) # ... and combine it with previous ones @@ -222,11 +228,11 @@ def update_next_params(new_results_per_page: Optional[int] = None, take_next: bo return summed_results -def parse_str_to_bool(string_to_parse: Optional[str]) -> bool: +def parse_str_to_bool(string_to_parse: str | None) -> bool: return string_to_parse is not None and string_to_parse.lower() == "true" -def parse_str_to_nullable_bool(string_to_parse: Optional[str]) -> Optional[bool]: +def parse_str_to_nullable_bool(string_to_parse: str | None) -> Optional[bool]: return None if string_to_parse is None else string_to_parse.lower() == "true" @@ -248,24 +254,25 @@ def start_and_wait_for_threads(fn: Callable, items: list): print( f"RuntimeError: Can't start {len(executor._threads) + 1}th thread. " + f"Waiting {HTTP_ERROR_RETRY_DELAY_MAX}s before trying again. " - + "Consider reducing MAX_THREADS_PER_WORKER") + + "Consider reducing MAX_THREADS_PER_WORKER", + ) sleep(HTTP_ERROR_RETRY_DELAY_MAX) try: for future in futures: future.result() - except UnderALotOfPressure: + except UnderALotOfPressureError: raise except UserUpdaterError as exception: raise UnhandledThreadException( exception.args[0]["error"] + UNHANDLED_THREAD_EXCEPTION_MESSAGE - + str(exception.args[0]["details"]) + + str(exception.args[0]["details"]), ) from exception except Exception as exception: raise UnhandledThreadException( "Unhandled exception in thread" + UNHANDLED_THREAD_EXCEPTION_MESSAGE - + traceback.format_exc() + + traceback.format_exc(), ) from exception @@ -276,7 +283,4 @@ def get_duplicates(array: list): def has_duplicates(array: list): counter = Counter(array) - for key in counter: - if counter[key] > 1: - return True - return False + return any(counter[key] > 1 for key in counter) diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000..8301096a --- /dev/null +++ b/mypy.ini @@ -0,0 +1,16 @@ +; We don't run mypy in the CI. This is just to help anyone who would like to use it manually. +; Namely, the mypy_primer tool. +[mypy] +; TODO: Fix everything and set to strict +strict=false +check_untyped_defs=true +explicit_package_bases=true +; Implicit return types ! +disallow_untyped_calls=false +disallow_untyped_defs=false +disallow_incomplete_defs=false + +; Of course my stubs are going to be incomplete. Otherwise they'd be on typeshed! +; Mypy becomes really whack with its errors inside these stubs though +mypy_path=typings,backend +exclude=.*typings/.* diff --git a/pyproject.toml b/pyproject.toml index aa0287c0..005c7510 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,6 +4,10 @@ max_line_length = 120 recursive = true aggressive = 3 +ignore = [ + "E124", # Closing bracket may not match multi-line method invocation style (enforced by add-trailing-comma) + "E70" # Allow ... on same line as def +] # https://github.com/microsoft/pyright/blob/main/docs/configuration.md#sample-pyprojecttoml-file [tool.pyright] @@ -16,8 +20,10 @@ reportMissingSuperCall="none" # False positives on base classes reportPropertyTypeMismatch="error" reportUninitializedInstanceVariable="error" reportUnnecessaryTypeIgnoreComment="error" -exclude = [ - "typings/", +# Ignore must be specified for Pylance to stop displaying errors +ignore = [ + # We expect stub files to be incomplete or contain useless statements + "**/*.pyi", ] reportUnusedCallResult="none" # Type stubs may not be completable @@ -28,29 +34,30 @@ reportImportCycles = "information" reportUnnecessaryComparison = "warning" # Flake8 does a better job reportUnusedImport = "none" -# numpy has way too many complex types that triggers this -reportUnknownMemberType = "none" # https://github.com/PyCQA/pylint/blob/main/examples/pylintrc # https://pylint.pycqa.org/en/latest/technical_reference/features.html [tool.pylint.REPORTS] -# Just like default but any error will make drop to 9 or less. warning and conventions are worth more -evaluation = "10.0 - error - ((float((warning + convention) * 10 + refactor ) / statement) * 10)" +# Just like default but any error, warning or convention will make drop to 9 or less. refactor are worth more +evaluation = "10.0 - error - warning - convention - ((10 * refactor) / statement) * 10" [tool.pylint.MASTER] fail-under = 9.0 -# Needed for Pylint to discover our own modules -init-hook = "import os, sys; sys.path.append(os.path.dirname('.'))" # https://pylint.pycqa.org/en/latest/technical_reference/extensions.html load-plugins = [ - "pylint.extensions.emptystring", + "pylint.extensions.bad_builtin", + "pylint.extensions.check_elif", + "pylint.extensions.comparison_placement", "pylint.extensions.confusing_elif", "pylint.extensions.consider_ternary_expression", - "pylint.extensions.bad_builtin", + "pylint.extensions.empty_comment", + "pylint.extensions.emptystring", + "pylint.extensions.for_any_all", + "pylint.extensions.eq_without_hash", "pylint.extensions.mccabe", - "pylint.extensions.check_elif", - "pylint.extensions.redefined_variable_type", "pylint.extensions.overlapping_exceptions", - "pylint.extensions.empty_comment", + "pylint.extensions.private_import", + # "pylint.extensions.redefined_loop_name", # 2.16 + "pylint.extensions.redefined_variable_type", "pylint.extensions.set_membership", "pylint.extensions.typing", # External plugins @@ -64,13 +71,10 @@ load-plugins = [ # "pylint.extensions.comparetozero", # "pylint.extensions.docstyle", # "pylint.extensions.while_used", - # Didn't work - # "pylint.extensions.comparison_placement", - # "pylint.extensions.for_any_all", ] ignore-paths = [ # We expect stub files to be incomplete or contain useless statements - "^.*\\.pyi$", + "^.*.pyi$", ] # Dynamic/Generated members from SQLAlchemy ignored-classes = ["scoped_session"] @@ -90,35 +94,30 @@ max-locals = 15 max-complexity = 15 # At least same as max-complexity max-branches = 15 -# https://pylint.pycqa.org/en/latest/user_guide/options.html#naming-styles -module-naming-style = "any" -# Can't make private class with PascalCase -class-rgx = "_?_?[a-zA-Z]+?$" -# https://github.com/PyCQA/pylint/issues/2018 -good-names = ["id", "x", "y", "a0", "i", "t0", "t1", "fn", "pb", "wr"] disable = [ # No need to mention the fixmes "fixme", "missing-docstring", - # We group imports + # Already taken care of by isort + "ungrouped-imports", + "unused-import", + "wrong-import-order", "wrong-import-position", + # Already taken care of by Flake8-naming, which does a better job + "invalid-name", # Already taken care of and grayed out. Also conflicts with Pylance reportIncompatibleMethodOverride "unused-argument", - # Already taken care of by Flake8 - "unused-import", - # Similar lines in 2 files, doesn't really work - "R0801", + # Only reports a single instance. Pyright does a better job anyway + "cyclic-import", # TODO: Diabled until using *proper* class based logic "protected-access", - # Strings are ok. Pylance also doesn't seem to see our overriden Exception classes + # Strings are ok. Pylance also doesn't seem to see our overriden Exception classes (TODO: Make it better?) "invalid-sequence-index", # Happens too often with Flask, child classes should not be affected by this rule # See: https://github.com/PyCQA/pylint/issues/4352 "too-few-public-methods", - # Too many false positives with Columns[] - # https://github.com/PyCQA/pylint/issues/3979#issuecomment-1036936661 - # https://github.com/PyCQA/pylint/issues/4369#issuecomment-1036932987 - "unsubscriptable-object", + # Similar lines in 2 files, doesn't really work + "R0801", ] [tool.isort] diff --git a/scripts/install.ps1 b/scripts/install.ps1 index 2ffbff04..c39ef4f6 100644 --- a/scripts/install.ps1 +++ b/scripts/install.ps1 @@ -5,5 +5,7 @@ If ($IsWindows) { New-Item -ItemType SymbolicLink -Path $python3 -Target $python -ErrorAction SilentlyContinue } +# Ensures installation tools are up to date. This also aliases pip to pip3 on MacOS. python3 -m pip install wheel pip setuptools --upgrade -python3 -m pip install -r "$PSScriptRoot/requirements.txt" +pip install -r "$PSScriptRoot/requirements.txt" --upgrade +npm i --global pyright@latest diff --git a/scripts/lint.ps1 b/scripts/lint.ps1 index e9cd2a43..e40d139d 100644 --- a/scripts/lint.ps1 +++ b/scripts/lint.ps1 @@ -3,14 +3,14 @@ Set-Location "$PSScriptRoot/.." $exitCodes = 0 Write-Host "`nRunning autofixes..." -isort src/ typings/ +isort backend/ typings/ autopep8 $(git ls-files '**.py*') --in-place -unify src/ --recursive --in-place --quote='"""' +unify backend/ --recursive --in-place --quote='"' add-trailing-comma $(git ls-files '**.py*') --py36-plus Write-Host "`nRunning Pyright..." $Env:PYRIGHT_PYTHON_FORCE_VERSION = 'latest' -pyright src/ --warnings +pyright backend/ --warnings $exitCodes += $LastExitCode if ($LastExitCode -gt 0) { Write-Host "`Pyright failed ($LastExitCode)" -ForegroundColor Red @@ -20,7 +20,7 @@ else { } Write-Host "`nRunning Pylint..." -pylint src/ --output-format=colorized +pylint backend/ --output-format=colorized $exitCodes += $LastExitCode if ($LastExitCode -gt 0) { Write-Host "`Pylint failed ($LastExitCode)" -ForegroundColor Red @@ -30,7 +30,7 @@ else { } Write-Host "`nRunning Flake8..." -flake8 src/ typings/ +flake8 backend/ typings/ $exitCodes += $LastExitCode if ($LastExitCode -gt 0) { Write-Host "`Flake8 failed ($LastExitCode)" -ForegroundColor Red @@ -40,7 +40,7 @@ else { } Write-Host "`nRunning Bandit..." -bandit src/ -f custom --silent --recursive +bandit backend/ -f custom --silent --recursive # $exitCodes += $LastExitCode # Returns 1 on low if ($LastExitCode -gt 0) { Write-Host "`Bandit warning ($LastExitCode)" -ForegroundColor Yellow diff --git a/scripts/requirements.txt b/scripts/requirements.txt index 2106e6c3..1c1de14b 100644 --- a/scripts/requirements.txt +++ b/scripts/requirements.txt @@ -4,26 +4,34 @@ httplib2 mysql-connector pyjwt ratelimiter -redislite +redislite ; sys_platform != 'win32' requests requests-cache>=0.9.3 sqlalchemy -# Linting and Types +# Linters bandit -flake8>=5 # flake8-pyi deprecation warnings +flake8>=5,<6 # flake8-pyi deprecation warnings # flake8-quotes doesn't support v6 yet flake8-builtins flake8-bugbear flake8-class-attributes-order flake8-comprehensions>=3.8 # flake8 5 support flake8-datetimez -flake8-isort>=4.2 # flake8 5 support -flake8-pyi>=22.8.1 # flake8 5 support +flake8-pyi>=22.11.0 # flake8 6 support flake8-quotes flake8-simplify pep8-naming -pylint>=2.13.9,<3.0.0 # Respect ignore configuration options with --recursive=y # 3.0 still in pre-release +pylint>=2.14,<3.0.0 # New checks # 3.0 still in pre-release pylint-flask pylint-flask-sqlalchemy -pyright +# Formatters +add-trailing-comma>=2.3.0 # Added support for with statement +autopep8>=2.0.0 # New checks +isort unify + +# types +types-Flask # TODO: Deprecated, update Flask instead! +types-Flask-SQLAlchemy +types-httplib2 +types-requests diff --git a/typings/ratelimiter.pyi b/typings/ratelimiter.pyi index c78a96ec..cc752a4a 100644 --- a/typings/ratelimiter.pyi +++ b/typings/ratelimiter.pyi @@ -1,15 +1,16 @@ -from typing import Optional -from types import TracebackType -from threading import Lock -from collections.abc import Callable -from collections import deque import sys +from collections import deque +from collections.abc import Callable +from threading import Lock +from types import TracebackType + +from typing_extensions import Literal __author__: str __version__: str __license__: str __description__: str -PY35 = sys.version_info >= (3, 5) +PY35: Literal[False] class RateLimiter: @@ -17,14 +18,14 @@ class RateLimiter: period: int max_calls: int - callback: Optional[Callable, None] + callback: Callable | None _lock: Lock _alock: Lock # Lock to protect creation of self._alock s_init_lock: Lock - def __init__(self, max_calls: int, period: int = ..., callback: Optional[Callable, None] = ...) -> None: + def __init__(self, max_calls: int, period: int = ..., callback: Callable | None = ...) -> None: ... def __call__(self, f: Callable) -> Callable: @@ -35,16 +36,12 @@ class RateLimiter: def __exit__( self, - exc_type: Optional[type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType] + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, ) -> None: ... - if PY35: - aenter_code: str - __aexit__ = __exit__ - @property def _timespan(self) -> float: ...