diff --git a/Makefile b/Makefile index d35afc79..9a746398 100644 --- a/Makefile +++ b/Makefile @@ -135,7 +135,7 @@ devserver: py-venv-check # Run a local Celery instance for background tasks. celery: - celery -A ambuda.tasks worker --loglevel=INFO + celery -A make_celery worker --loglevel=INFO # Docker commands diff --git a/ambuda/__init__.py b/ambuda/__init__.py index c5c62c4f..f7ac007f 100644 --- a/ambuda/__init__.py +++ b/ambuda/__init__.py @@ -13,16 +13,16 @@ from flask import Flask, session from flask_babel import Babel, pgettext from sentry_sdk.integrations.flask import FlaskIntegration -from sqlalchemy import exc import config from ambuda import admin as admin_manager from ambuda import auth as auth_manager -from ambuda import checks, filters, queries +from ambuda import checks, filters from ambuda.consts import LOCALES from ambuda.mail import mailer +from ambuda.models.base import db +from ambuda.tasks import celery_init_app from ambuda.utils import assets -from ambuda.utils.json_serde import AmbudaJSONEncoder from ambuda.utils.url_converters import ListConverter from ambuda.views.about import bp as about from ambuda.views.api import bp as api @@ -42,32 +42,6 @@ def _initialize_sentry(sentry_dsn: str): ) -def _initialize_db_session(app, config_name: str): - """Ensure that our SQLAlchemy session behaves well. - - The Flask-SQLAlchemy library manages all of this boilerplate for us - automatically, but Flask-SQLAlchemy has relatively poor support for using - our models outside of the application context, e.g. when running seed - scripts or other batch jobs. So instead of using that extension, we manage - the boilerplate ourselves. - """ - - @app.teardown_appcontext - def shutdown_session(exception=None): - """Reset session state to prevent caching and memory leaks.""" - queries.get_session_class().remove() - - if config_name == config.PRODUCTION: - # The hook below hides database errors. So, install the hook only if - # we're in production. - - @app.errorhandler(exc.SQLAlchemyError) - def handle_db_exceptions(error): - """Rollback errors so that the db can handle future requests.""" - session = queries.get_session() - session.rollback() - - def _initialize_logger(log_level: int) -> None: """Initialize a simple logger for all requests.""" handler = logging.StreamHandler(sys.stderr) @@ -79,7 +53,11 @@ def _initialize_logger(log_level: int) -> None: def create_app(config_env: str): - """Initialize the Ambuda application.""" + """Initialize the Ambuda application. + + :param config_env: the config environment to use. For valid values, see + the string constants in `config.py`. + """ # We store all env variables in a `.env` file so that it's easier to manage # different configurations. @@ -99,6 +77,12 @@ def create_app(config_env: str): # Config app.config.from_object(config_spec) + # Database + db.init_app(app) + + # Celery + celery_init_app(app) + # Sanity checks assert config_env == config_spec.AMBUDA_ENVIRONMENT if config_env != config.TESTING: @@ -108,10 +92,7 @@ def create_app(config_env: str): # Logger _initialize_logger(config_spec.LOG_LEVEL) - # Database - _initialize_db_session(app, config_env) - - # Extensions + # Various Flask extensions babel = Babel(app) @babel.localeselector @@ -142,12 +123,17 @@ def get_locale(): # Debug-only routes for local development. if app.debug: + from flask_debugtoolbar import DebugToolbarExtension + from ambuda.views.debug import bp as debug_bp + DebugToolbarExtension(app) app.register_blueprint(debug_bp, url_prefix="/debug") # i18n string trimming + # For more, see:https://jinja.palletsprojects.com/en/3.1.x/api/#ext-i18n-trimmed app.jinja_env.policies["ext.i18n.trimmed"] = True + # Template functions and filters app.jinja_env.filters.update( { @@ -167,5 +153,4 @@ def get_locale(): } ) - app.json_encoder = AmbudaJSONEncoder return app diff --git a/ambuda/admin.py b/ambuda/admin.py index 62d68bbf..5dc4994e 100644 --- a/ambuda/admin.py +++ b/ambuda/admin.py @@ -6,7 +6,7 @@ from flask_login import current_user import ambuda.database as db -import ambuda.queries as q +from ambuda.models.base import db as flask_sqla class AmbudaIndexView(AdminIndexView): @@ -68,7 +68,7 @@ class SponsorshipView(ModeratorBaseView): def create_admin_manager(app): - session = q.get_session_class() + session = flask_sqla.session admin = Admin( app, name="Ambuda", diff --git a/ambuda/checks.py b/ambuda/checks.py index 0c3a2d39..2ced183a 100644 --- a/ambuda/checks.py +++ b/ambuda/checks.py @@ -9,7 +9,7 @@ from ambuda import consts, enums from ambuda import database as db from ambuda import queries as q -from ambuda.models.base import Base +from ambuda.models.base import db as flask_sqla def _warn(text: str = ""): @@ -70,7 +70,7 @@ def _check_app_schema_matches_db_schema(database_uri: str) -> list[str]: errors = [] - for table_name, table in Base.metadata.tables.items(): + for table_name, table in flask_sqla.Model.metadata.tables.items(): app_columns = table.columns db_columns = {c["name"]: c for c in inspector.get_columns(table_name)} diff --git a/ambuda/database.py b/ambuda/database.py index ea5b7e45..35d64f80 100644 --- a/ambuda/database.py +++ b/ambuda/database.py @@ -4,7 +4,6 @@ from ambuda.enums import SiteRole # NOQA F401 from ambuda.models.auth import * # NOQA F401,F403 -from ambuda.models.base import Base # NOQA F401,F403 from ambuda.models.blog import * # NOQA F401,F403 from ambuda.models.dictionaries import * # NOQA F401,F403 from ambuda.models.parse import * # NOQA F401,F403 diff --git a/ambuda/models/auth.py b/ambuda/models/auth.py index a811581a..450107f8 100644 --- a/ambuda/models/auth.py +++ b/ambuda/models/auth.py @@ -7,11 +7,11 @@ from sqlalchemy.orm import relationship from werkzeug.security import check_password_hash, generate_password_hash -from ambuda.models.base import Base, foreign_key, pk +from ambuda.models.base import db, foreign_key, pk from ambuda.utils.user_mixins import AmbudaUserMixin -class User(AmbudaUserMixin, Base): +class User(AmbudaUserMixin, db.Model): """A user.""" __tablename__ = "users" @@ -63,7 +63,7 @@ def check_password(self, raw_password: str) -> bool: return check_password_hash(self.password_hash, raw_password) -class Role(Base): +class Role(db.Model): """A role. @@ -83,7 +83,7 @@ def __repr__(self): return f"" -class UserRoles(Base): +class UserRoles(db.Model): """Secondary table for users and roles.""" @@ -95,7 +95,7 @@ class UserRoles(Base): role_id = Column(Integer, ForeignKey("roles.id"), primary_key=True, index=True) -class PasswordResetToken(Base): +class PasswordResetToken(db.Model): """Models a "forgot password" recovery token.""" diff --git a/ambuda/models/base.py b/ambuda/models/base.py index c8004e71..52e288f0 100644 --- a/ambuda/models/base.py +++ b/ambuda/models/base.py @@ -1,11 +1,11 @@ """Base model and utilities.""" +from flask_sqlalchemy import SQLAlchemy from sqlalchemy import Column, ForeignKey, Integer -from sqlalchemy.orm import declarative_base -#: The base class for all of Ambuda's models. All new models should inherit -#: from this class. -Base = declarative_base() +# TODO(arun): rename and standardize this across the project. Avoid confusion +# with the `database` module, which is usually imported as `db`. +db = SQLAlchemy(session_options=dict(autoflush=False, autocommit=False)) def pk(): diff --git a/ambuda/models/blog.py b/ambuda/models/blog.py index 8968f8b4..8a01c744 100644 --- a/ambuda/models/blog.py +++ b/ambuda/models/blog.py @@ -4,10 +4,10 @@ from sqlalchemy import Text as Text_ from sqlalchemy.orm import relationship -from ambuda.models.base import Base, foreign_key, pk, same_as +from ambuda.models.base import db, foreign_key, pk, same_as -class BlogPost(Base): +class BlogPost(db.Model): """A blog post.""" diff --git a/ambuda/models/dictionaries.py b/ambuda/models/dictionaries.py index 9787680b..915c6d6d 100644 --- a/ambuda/models/dictionaries.py +++ b/ambuda/models/dictionaries.py @@ -1,10 +1,10 @@ from sqlalchemy import Column, String from sqlalchemy.orm import relationship -from ambuda.models.base import Base, foreign_key, pk +from ambuda.models.base import db, foreign_key, pk -class Dictionary(Base): +class Dictionary(db.Model): """A dictionary that maps Sanskrit expressions to definitions in various languages.""" @@ -21,7 +21,7 @@ class Dictionary(Base): entries = relationship("DictionaryEntry", backref="dictionary", cascade="delete") -class DictionaryEntry(Base): +class DictionaryEntry(db.Model): """Dictionary definitions for a specific entry key. diff --git a/ambuda/models/parse.py b/ambuda/models/parse.py index 447997d8..c91abc7d 100644 --- a/ambuda/models/parse.py +++ b/ambuda/models/parse.py @@ -3,10 +3,10 @@ from sqlalchemy import Column from sqlalchemy import Text as _Text -from ambuda.models.base import Base, foreign_key, pk +from ambuda.models.base import db, foreign_key, pk -class BlockParse(Base): +class BlockParse(db.Model): """Parse data for a `TextBlock`.""" __tablename__ = "block_parses" diff --git a/ambuda/models/proofing.py b/ambuda/models/proofing.py index e2dfebcd..ec7152c9 100644 --- a/ambuda/models/proofing.py +++ b/ambuda/models/proofing.py @@ -6,7 +6,7 @@ from sqlalchemy import Text as Text_ from sqlalchemy.orm import relationship -from ambuda.models.base import Base, foreign_key, pk, same_as +from ambuda.models.base import db, foreign_key, pk, same_as def string(): @@ -19,7 +19,7 @@ def text(): return Column(Text_, nullable=False, default="") -class Project(Base): +class Project(db.Model): """A proofreading project. @@ -69,7 +69,7 @@ class Project(Base): ) -class Page(Base): +class Page(db.Model): """A page in a proofreading project. @@ -115,7 +115,7 @@ class Page(Base): ) -class PageStatus(Base): +class PageStatus(db.Model): """The transcription status of a given page. @@ -130,7 +130,7 @@ class PageStatus(Base): name = Column(String, nullable=False, unique=True) -class Revision(Base): +class Revision(db.Model): """A specific page revision. diff --git a/ambuda/models/site.py b/ambuda/models/site.py index 7b97fd68..d6948872 100644 --- a/ambuda/models/site.py +++ b/ambuda/models/site.py @@ -8,10 +8,10 @@ from sqlalchemy import Column, Integer, String from sqlalchemy import Text as Text_ -from ambuda.models.base import Base, pk +from ambuda.models.base import db, pk -class ProjectSponsorship(Base): +class ProjectSponsorship(db.Model): """A project that a donor can sponsor.""" diff --git a/ambuda/models/talk.py b/ambuda/models/talk.py index 45133103..eaaaffc9 100644 --- a/ambuda/models/talk.py +++ b/ambuda/models/talk.py @@ -6,7 +6,7 @@ from sqlalchemy import Text as Text_ from sqlalchemy.orm import relationship -from ambuda.models.base import Base, foreign_key, pk, same_as +from ambuda.models.base import db, foreign_key, pk, same_as def string(): @@ -14,7 +14,7 @@ def string(): return Column(String, nullable=False, default="") -class Board(Base): +class Board(db.Model): """A list of threads.""" @@ -34,7 +34,7 @@ class Board(Base): ) -class Thread(Base): +class Thread(db.Model): """A list of posts.""" @@ -59,7 +59,7 @@ class Thread(Base): posts = relationship("Post", order_by=lambda: Post.created_at, backref="thread") -class Post(Base): +class Post(db.Model): """A post.""" diff --git a/ambuda/models/texts.py b/ambuda/models/texts.py index 38d8a1be..09d5cc1d 100644 --- a/ambuda/models/texts.py +++ b/ambuda/models/texts.py @@ -11,10 +11,10 @@ from sqlalchemy import Text as _Text from sqlalchemy.orm import relationship -from ambuda.models.base import Base, foreign_key, pk +from ambuda.models.base import db, foreign_key, pk -class Text(Base): +class Text(db.Model): """A text with its metadata.""" @@ -32,7 +32,7 @@ class Text(Base): sections = relationship("TextSection", backref="text", cascade="delete") -class TextSection(Base): +class TextSection(db.Model): """Ordered divisions of text content. This represent divisions like kāṇḍas, sargas, etc. @@ -63,7 +63,7 @@ class TextSection(Base): ) -class TextBlock(Base): +class TextBlock(db.Model): """A verse or paragraph. A TextBlock is the "unit of reuse." When we make cross-references between diff --git a/ambuda/queries.py b/ambuda/queries.py index ec7e9122..6eaf15be 100644 --- a/ambuda/queries.py +++ b/ambuda/queries.py @@ -4,54 +4,17 @@ For simple or adhoc queries, you can just write them in their corresponding view. """ -import functools from flask import current_app -from sqlalchemy import create_engine -from sqlalchemy.orm import load_only, scoped_session, selectinload, sessionmaker +from sqlalchemy.orm import load_only, selectinload import ambuda.database as db - -# NOTE: this logic is copied from Flask-SQLAlchemy. We avoid Flask-SQLAlchemy -# because we also need to access the database from a non-Flask context when -# we run database seed scripts. -# ~~~ -# Scope the session to the current greenlet if greenlet is available, -# otherwise fall back to the current thread. -try: - from greenlet import getcurrent as _ident_func -except ImportError: - from threading import get_ident as _ident_func - - -# functools.cache makes this return value a singleton. -@functools.cache -def get_engine(): - database_uri = current_app.config["SQLALCHEMY_DATABASE_URI"] - return create_engine(database_uri) - - -# functools.cache makes this return value a singleton. -@functools.cache -def get_session_class(): - # Scoped sessions remove various kinds of errors, e.g. when using database - # objects created on different threads. - # - # For details, see: - # - https://stackoverflow.com/questions/12223335 - # - https://flask.palletsprojects.com/en/2.1.x/patterns/sqlalchemy/ - session_factory = sessionmaker(bind=get_engine(), autoflush=False, autocommit=False) - return scoped_session(session_factory, scopefunc=_ident_func) +from ambuda.models.base import db as flask_sqla def get_session(): - """Instantiate a scoped session. - - If we implemented this right, there should be exactly one unique session - per request. - """ - Session = get_session_class() - return Session() + with current_app.app_context(): + return flask_sqla.session def texts() -> list[db.Text]: diff --git a/ambuda/tasks/__init__.py b/ambuda/tasks/__init__.py index cbf78c6c..42560067 100644 --- a/ambuda/tasks/__init__.py +++ b/ambuda/tasks/__init__.py @@ -1,35 +1,65 @@ """Main entrypoint for Ambuda's background task runner. -The code here shares some utilities with our Flask application, but otherwise -it is an entirely different program that operates outside the Flask application -context. +Our Celery runner and our Flask application run in separate programs. Since +Celery needs access to certain aspects of our Flask app (in particular, to our +flask-sqlalchemy config), we follow a pattern suggested in the Flask +documentation [1]. Use utilities from outside this package with care. For more information, see our "Background tasks with Celery" doc: https://ambuda.readthedocs.io/en/latest/ + +[1]: https://flask.palletsprojects.com/en/2.2.x/patterns/celery/ """ import os -from celery import Celery +from celery import Celery, Task +from flask import Flask # For context on why we use Redis for both the backend and the broker, see the # "Background tasks with Celery" doc. +# +# TODO: move REDIS_URL into `config.py`. redis_url = os.getenv("REDIS_URL", "redis://localhost:6379/0") -app = Celery( - "ambuda-tasks", - backend=redis_url, - broker=redis_url, - include=[ - "ambuda.tasks.projects", - "ambuda.tasks.ocr", - ], -) -app.conf.update( - # Run all tasks asynchronously by default. - task_always_eager=False, - # Force arguments to be plain data by requiring them to be JSON-compatible. - task_serializer="json", -) + + +def celery_init_app(app: Flask) -> Celery: + """Initialize the Celery app against our Flask instance. + + Source: https://flask.palletsprojects.com/en/2.2.x/patterns/celery/ + """ + + class FlaskTask(Task): + def __call__(self, *args: object, **kwargs: object) -> object: + # Run tasks within the application context so that they have full + # database access. + with app.app_context(): + return self.run(*args, **kwargs) + + celery_app = Celery( + "ambuda-tasks", + task_cls=FlaskTask, + backend=redis_url, + broker=redis_url, + include=[ + "ambuda.tasks.projects", + "ambuda.tasks.ocr", + ], + ) + + # Make this app instance the "default" handler so that we can use the + # `shared_task` decorator elsewhere. For details, see the Flask+Celery docs. + celery_app.set_default() + celery_app.conf.update( + # Run all tasks asynchronously by default. + task_always_eager=False, + # Force arguments to be plain data by requiring them to be JSON-compatible. + task_serializer="json", + ) + + # Save this instance on `app` so that we have access to it later. + app.extensions["celery"] = celery_app + return celery_app diff --git a/ambuda/tasks/ocr.py b/ambuda/tasks/ocr.py index d165c843..38da9da4 100644 --- a/ambuda/tasks/ocr.py +++ b/ambuda/tasks/ocr.py @@ -1,80 +1,69 @@ """Background tasks for proofing projects.""" -from celery import group +from celery import group, shared_task from celery.result import GroupResult from ambuda import consts from ambuda import database as db from ambuda import queries as q from ambuda.enums import SitePageStatus -from ambuda.tasks import app from ambuda.utils import google_ocr from ambuda.utils.assets import get_page_image_filepath from ambuda.utils.revisions import add_revision -from config import create_config_only_app def _run_ocr_for_page_inner( - app_env: str, project_slug: str, page_slug: str, ) -> int: """Must run in the application context.""" + bot_user = q.user(consts.BOT_USERNAME) + if bot_user is None: + raise ValueError(f'User "{consts.BOT_USERNAME}" is not defined.') - flask_app = create_config_only_app(app_env) - with flask_app.app_context(): - bot_user = q.user(consts.BOT_USERNAME) - if bot_user is None: - raise ValueError(f'User "{consts.BOT_USERNAME}" is not defined.') + # The actual API call. + image_path = get_page_image_filepath(project_slug, page_slug) + ocr_response = google_ocr.run(image_path) - # The actual API call. - image_path = get_page_image_filepath(project_slug, page_slug) - ocr_response = google_ocr.run(image_path) + session = q.get_session() + project = q.project(project_slug) + page = q.page(project.id, page_slug) - session = q.get_session() - project = q.project(project_slug) - page = q.page(project.id, page_slug) - - page.ocr_bounding_boxes = google_ocr.serialize_bounding_boxes( - ocr_response.bounding_boxes + page.ocr_bounding_boxes = google_ocr.serialize_bounding_boxes( + ocr_response.bounding_boxes + ) + session.add(page) + session.commit() + + summary = "Run OCR" + try: + return add_revision( + page=page, + summary=summary, + content=ocr_response.text_content, + status=SitePageStatus.R0, + version=0, + author_id=bot_user.id, ) - session.add(page) - session.commit() - - summary = "Run OCR" - try: - return add_revision( - page=page, - summary=summary, - content=ocr_response.text_content, - status=SitePageStatus.R0, - version=0, - author_id=bot_user.id, - ) - except Exception as e: - raise ValueError( - f'OCR failed for page "{project.slug}/{page.slug}".' - ) from e + except Exception as e: + raise ValueError(f'OCR failed for page "{project.slug}/{page.slug}".') from e -@app.task(bind=True) +@shared_task(bind=True) def run_ocr_for_page( self, *, - app_env: str, project_slug: str, page_slug: str, ): _run_ocr_for_page_inner( - app_env, project_slug, page_slug, ) def run_ocr_for_project( - app_env: str, project: db.Project, ) -> GroupResult | None: """Create a `group` task to run OCR on a project. @@ -86,14 +75,11 @@ def run_ocr_for_project( :return: the Celery result, or ``None`` if no tasks were run. """ - flask_app = create_config_only_app(app_env) - with flask_app.app_context(): - unedited_pages = [p for p in project.pages if p.version == 0] + unedited_pages = [p for p in project.pages if p.version == 0] if unedited_pages: tasks = group( run_ocr_for_page.s( - app_env=app_env, project_slug=project.slug, page_slug=p.slug, ) diff --git a/ambuda/tasks/projects.py b/ambuda/tasks/projects.py index 68f4e874..0cc5ba74 100644 --- a/ambuda/tasks/projects.py +++ b/ambuda/tasks/projects.py @@ -7,13 +7,12 @@ # package called `fitz` (https://pypi.org/project/fitz/) that is completely # unrelated to PDF parsing. import fitz +from celery import shared_task from slugify import slugify from ambuda import database as db from ambuda import queries as q -from ambuda.tasks import app from ambuda.tasks.utils import CeleryTaskStatus, TaskStatus -from config import create_config_only_app def _split_pdf_into_pages( @@ -75,7 +74,6 @@ def create_project_inner( title: str, pdf_path: str, output_dir: str, - app_environment: str, creator_id: int, task_status: TaskStatus, ): @@ -87,18 +85,15 @@ def create_project_inner( :param title: the project title. :param pdf_path: local path to the source PDF. :param output_dir: local path where page images will be stored. - :param app_environment: the app environment, e.g. `"development"`. :param creator_id: the user that created this project. :param task_status: tracks progress on the task. """ logging.info(f'Received upload task "{title}" for path {pdf_path}.') # Tasks must be idempotent. Exit if the project already exists. - app = create_config_only_app(app_environment) - with app.app_context(): - session = q.get_session() - slug = slugify(title) - project = session.query(db.Project).filter_by(slug=slug).first() + session = q.get_session() + slug = slugify(title) + project = session.query(db.Project).filter_by(slug=slug).first() if project: raise ValueError( @@ -109,25 +104,23 @@ def create_project_inner( pages_dir = Path(output_dir) num_pages = _split_pdf_into_pages(Path(pdf_path), Path(pages_dir), task_status) - with app.app_context(): - _add_project_to_database( - title=title, - slug=slug, - num_pages=num_pages, - creator_id=creator_id, - ) + _add_project_to_database( + title=title, + slug=slug, + num_pages=num_pages, + creator_id=creator_id, + ) task_status.success(num_pages, slug) -@app.task(bind=True) +@shared_task(bind=True) def create_project( self, *, title: str, pdf_path: str, output_dir: str, - app_environment: str, creator_id: int, ): """Split the given PDF into pages and register the project on the database. @@ -139,7 +132,6 @@ def create_project( title=title, pdf_path=pdf_path, output_dir=output_dir, - app_environment=app_environment, creator_id=creator_id, task_status=task_status, ) diff --git a/ambuda/templates/macros/proofing.html b/ambuda/templates/macros/proofing.html index f19679da..33263259 100644 --- a/ambuda/templates/macros/proofing.html +++ b/ambuda/templates/macros/proofing.html @@ -170,22 +170,12 @@

{% endmacro %} -{# List various activity (edits, new projects, etc.) #} -{% macro activity_log(activity) %} - -{% endmacro %} - - -{# List the given revisions. Revisions might correspond to multiple pages. #} +{# List the given revisions. Revisions might correspond to multiple projects. #} {% macro revision_list(revisions) %} {% endmacro %} diff --git a/ambuda/templates/proofing/projects/activity.html b/ambuda/templates/proofing/projects/activity.html index b79b5dff..972ca51b 100644 --- a/ambuda/templates/proofing/projects/activity.html +++ b/ambuda/templates/proofing/projects/activity.html @@ -15,6 +15,6 @@ {{ m.project_nav(project=project, active='activity') }} {% set search_url = url_for("proofing.project.search", slug=project.slug) %} -{{ m.activity_log(recent_activity) }} +{{ m.revision_list(recent_revisions) }} {% endblock %} diff --git a/ambuda/templates/proofing/recent-changes.html b/ambuda/templates/proofing/recent-changes.html index 1882b417..b59cbf35 100644 --- a/ambuda/templates/proofing/recent-changes.html +++ b/ambuda/templates/proofing/recent-changes.html @@ -2,6 +2,16 @@ {% import "macros/proofing.html" as m %} +{% macro page_link(text, page) %} +{% if page %} + {% set url = url_for("proofing.recent_changes", page=page) %} +
  • {{ text|safe }}
  • +{% else %} +
  • {{ text|safe }}
  • +{% endif %} +{% endmacro %} + + {% block title %}Recent changes | Ambuda{% endblock %} @@ -13,8 +23,38 @@ _("Recent changes"), "Recent edits and revisions made by people like you!") }} -{% if recent_activity -%} -{{ m.activity_log(recent_activity) }} +{% if recent_revisions -%} + + + {# Page links #} + {% set revs = recent_revisions %} + {% else %}

    No changes found.

    {% endif %} diff --git a/ambuda/templates/proofing/user/activity.html b/ambuda/templates/proofing/user/activity.html index ec6e547f..3a7ef7fc 100644 --- a/ambuda/templates/proofing/user/activity.html +++ b/ambuda/templates/proofing/user/activity.html @@ -31,7 +31,7 @@

    Summary

    All changes

    -{{ m.activity_log(recent_activity) }} +{{ m.revision_list(recent_activity) }} {% else %}

    This user has not made any changes.

    diff --git a/ambuda/utils/json_serde.py b/ambuda/utils/json_serde.py index 966d401b..e69de29b 100644 --- a/ambuda/utils/json_serde.py +++ b/ambuda/utils/json_serde.py @@ -1,12 +0,0 @@ -import dataclasses - -from flask import json - - -class AmbudaJSONEncoder(json.JSONEncoder): - """Extend Flask's default encoder to support dataclasses.""" - - def default(self, o): - if dataclasses.is_dataclass(o): - return dataclasses.asdict(o) - return super().default(o) diff --git a/ambuda/views/proofing/main.py b/ambuda/views/proofing/main.py index 39b2f135..ca0332bd 100644 --- a/ambuda/views/proofing/main.py +++ b/ambuda/views/proofing/main.py @@ -13,9 +13,10 @@ from wtforms.widgets import TextArea from ambuda import consts -from ambuda import database as db from ambuda import queries as q +from ambuda.database import Page, Project, Revision, User from ambuda.enums import SitePageStatus +from ambuda.models.base import db from ambuda.tasks import projects as project_tasks from ambuda.views.proofing.decorators import moderator_required, p2_required @@ -97,11 +98,9 @@ def index(): # Fetch all project data in a single query for better performance. session = q.get_session() projects = ( - session.query(db.Project) + session.query(Project) .options( - orm.joinedload(db.Project.pages) - .load_only(db.Page.id) - .joinedload(db.Page.status) + orm.joinedload(Project.pages).load_only(Page.id).joinedload(Page.status) ) .all() ) @@ -199,7 +198,6 @@ def create_project(): title=title, pdf_path=str(pdf_path), output_dir=str(page_image_dir), - app_environment=current_app.config["AMBUDA_ENVIRONMENT"], creator_id=current_user.id, ) return render_template( @@ -242,35 +240,25 @@ def create_project_status(task_id): @bp.route("/recent-changes") def recent_changes(): """Show recent changes across all projects.""" - num_per_page = 100 + per_page = 50 # Exclude bot edits, which overwhelm all other edits on the site. bot_user = q.user(consts.BOT_USERNAME) assert bot_user, "Bot user not defined" - session = q.get_session() - recent_revisions = ( - session.query(db.Revision) - .options(orm.defer(db.Revision.content)) - .filter(db.Revision.author_id != bot_user.id) - .order_by(db.Revision.created.desc()) - .limit(num_per_page) - .all() - ) - recent_activity = [("revision", r.created, r) for r in recent_revisions] - - recent_projects = ( - session.query(db.Project) - .order_by(db.Project.created_at.desc()) - .limit(num_per_page) - .all() + q.get_session() + recent_revisions = db.paginate( + db.select(Revision) + # Defer slow columns + .options(orm.defer(Revision.content)) + # Avoid bot changes, which dominate the logs. + .filter(Revision.author_id != bot_user.id).order_by(Revision.created.desc()), + per_page=per_page, + max_per_page=per_page, ) - recent_activity += [("project", p.created_at, p) for p in recent_projects] - recent_activity.sort(key=lambda x: x[1], reverse=True) - recent_activity = recent_activity[:num_per_page] return render_template( - "proofing/recent-changes.html", recent_activity=recent_activity + "proofing/recent-changes.html", recent_revisions=recent_revisions ) @@ -295,16 +283,14 @@ def dashboard(): days_ago_1d = now - timedelta(days=1) session = q.get_session() - bot = session.query(db.User).filter_by(username=consts.BOT_USERNAME).one() + bot = session.query(User).filter_by(username=consts.BOT_USERNAME).one() bot_id = bot.id revisions_30d = ( - session.query(db.Revision) - .filter( - (db.Revision.created >= days_ago_30d) & (db.Revision.author_id != bot_id) - ) - .options(orm.load_only(db.Revision.created, db.Revision.author_id)) - .order_by(db.Revision.created) + session.query(Revision) + .filter((Revision.created >= days_ago_30d) & (Revision.author_id != bot_id)) + .options(orm.load_only(Revision.created, Revision.author_id)) + .order_by(Revision.created) .all() ) revisions_7d = [x for x in revisions_30d if x.created >= days_ago_7d] diff --git a/ambuda/views/proofing/project.py b/ambuda/views/proofing/project.py index 185f5582..ecaf4e87 100644 --- a/ambuda/views/proofing/project.py +++ b/ambuda/views/proofing/project.py @@ -32,7 +32,6 @@ from ambuda import database as db from ambuda import queries as q -from ambuda.tasks import app as celery_app from ambuda.tasks import ocr as ocr_tasks from ambuda.utils import project_utils, proofing_utils from ambuda.utils.revisions import add_revision @@ -42,6 +41,10 @@ LOG = logging.getLogger(__name__) +def get_celery_app(): + return current_app.extensions["celery"] + + def _is_valid_page_number_spec(_, field): try: _ = project_utils.parse_page_number_spec(field.data) @@ -185,13 +188,11 @@ def activity(slug): .limit(100) .all() ) - recent_activity = [("revision", r.created, r) for r in recent_revisions] - recent_activity.append(("project", project_.created_at, project_)) return render_template( "proofing/projects/activity.html", project=project_, - recent_activity=recent_activity, + recent_revisions=recent_revisions, ) @@ -607,10 +608,7 @@ def batch_ocr(slug): abort(404) if request.method == "POST": - task = ocr_tasks.run_ocr_for_project( - app_env=current_app.config["AMBUDA_ENVIRONMENT"], - project=project_, - ) + task = ocr_tasks.run_ocr_for_project(project=project_) if task: return render_template( "proofing/projects/batch-ocr-post.html", @@ -632,7 +630,7 @@ def batch_ocr(slug): @bp.route("/batch-ocr-status/") def batch_ocr_status(task_id): - r = GroupResult.restore(task_id, app=celery_app) + r = GroupResult.restore(task_id, app=get_celery_app()) assert r, task_id if r.results: diff --git a/ambuda/views/proofing/user.py b/ambuda/views/proofing/user.py index 7a2d9aeb..da110d04 100644 --- a/ambuda/views/proofing/user.py +++ b/ambuda/views/proofing/user.py @@ -53,21 +53,13 @@ def activity(username): .limit(100) .all() ) - recent_projects = ( - session.query(db.Project) - .filter_by(creator_id=user_.id) - .order_by(db.Project.created_at.desc()) - .all() - ) - recent_activity = [("revision", r.created, r) for r in recent_revisions] - recent_activity += [("project", p.created_at, p) for p in recent_projects] - hm = heatmap.create(x[1].date() for x in recent_activity) + hm = heatmap.create(r.created.date() for r in recent_revisions) return render_template( "proofing/user/activity.html", user=user_, - recent_activity=recent_activity, + recent_revisions=recent_revisions, heatmap=hm, ) diff --git a/ambuda/views/reader/texts.py b/ambuda/views/reader/texts.py index d67ca71c..7b044a00 100644 --- a/ambuda/views/reader/texts.py +++ b/ambuda/views/reader/texts.py @@ -1,15 +1,12 @@ """Views related to texts: title pages, sections, verses, etc.""" -import json - -from flask import Blueprint, abort, jsonify, render_template, url_for +from flask import Blueprint, abort, json, jsonify, render_template, url_for from indic_transliteration import sanscript import ambuda.database as db import ambuda.queries as q from ambuda.consts import TEXT_CATEGORIES from ambuda.utils import xml -from ambuda.utils.json_serde import AmbudaJSONEncoder from ambuda.views.api import bp as api from ambuda.views.reader.schema import Block, Section @@ -150,9 +147,7 @@ def section(text_slug, section_slug): # Fetch with content blocks cur = q.text_section(text_.id, section_slug) - - with q.get_session() as _: - _ = cur.blocks + _ = cur.blocks blocks = [] for block in cur.blocks: @@ -170,7 +165,7 @@ def section(text_slug, section_slug): prev_url=_make_section_url(text_, prev), next_url=_make_section_url(text_, next_), ) - json_payload = json.dumps(data, cls=AmbudaJSONEncoder) + json_payload = json.dumps(data) return render_template( "texts/section.html", diff --git a/cli.py b/cli.py index 608928b9..c1833840 100755 --- a/cli.py +++ b/cli.py @@ -6,17 +6,13 @@ import click from slugify import slugify from sqlalchemy import or_ -from sqlalchemy.orm import Session import ambuda from ambuda import database as db from ambuda import queries as q -from ambuda.seed.utils.data_utils import create_db from ambuda.tasks.projects import create_project_inner from ambuda.tasks.utils import LocalTaskStatus -engine = create_db() - @click.group() def cli(): @@ -33,22 +29,22 @@ def create_user(): raw_password = getpass.getpass("Password: ") email = input("Email: ") - with Session(engine) as session: - u = ( - session.query(db.User) - .where(or_(db.User.username == username, db.User.email == email)) - .first() - ) - if u is not None: - if u.username == username: - raise click.ClickException(f'User "{username}" already exists.') - else: - raise click.ClickException(f'Email "{email}" already exists.') + session = q.get_session() + existing_user = ( + session.query(db.User) + .where(or_(db.User.username == username, db.User.email == email)) + .first() + ) + if existing_user is not None: + if existing_user.username == username: + raise click.ClickException(f'User "{username}" already exists.') + else: + raise click.ClickException(f'Email "{email}" already exists.') - user = db.User(username=username, email=email) - user.set_password(raw_password) - session.add(user) - session.commit() + new_user = db.User(username=username, email=email) + new_user.set_password(raw_password) + session.add(new_user) + session.commit() @cli.command() @@ -60,19 +56,19 @@ def add_role(username, role): In particular, `add-role admin` will give a user administrator privileges and grant them full access to Ambuda's data and content. """ - with Session(engine) as session: - u = session.query(db.User).where(db.User.username == username).first() - if u is None: - raise click.ClickException(f'User "{username}" does not exist.') - r = session.query(db.Role).where(db.Role.name == role).first() - if r is None: - raise click.ClickException(f'Role "{role}" does not exist.') - if r in u.roles: - raise click.ClickException(f'User "{username}" already has role "{role}".') - - u.roles.append(r) - session.add(u) - session.commit() + session = q.get_session() + u = session.query(db.User).where(db.User.username == username).first() + if u is None: + raise click.ClickException(f'User "{username}" does not exist.') + r = session.query(db.Role).where(db.Role.name == role).first() + if r is None: + raise click.ClickException(f'Role "{role}" does not exist.') + if r in u.roles: + raise click.ClickException(f'User "{username}" already has role "{role}".') + + u.roles.append(r) + session.add(u) + session.commit() print(f'Added role "{role}" to user "{username}".') @@ -101,7 +97,6 @@ def create_project(title, pdf_path): title=title, pdf_path=pdf_path, output_dir=str(page_image_dir), - app_environment=current_app.config["AMBUDA_ENVIRONMENT"], creator_id=arbitrary_user.id, task_status=LocalTaskStatus(), ) diff --git a/config.py b/config.py index 056ad7f8..2b22d08e 100644 --- a/config.py +++ b/config.py @@ -80,6 +80,9 @@ class BaseConfig: #: https://docs.sqlalchemy.org/en/14/core/engines.html#database-urls SQLALCHEMY_DATABASE_URI = _env("SQLALCHEMY_DATABASE_URI") + #: If set, record queries during a request. + SQLALCHEMY_RECORD_QUERIES = False + #: Where to store user uploads (PDFs, images, etc.). UPLOAD_FOLDER = _env("FLASK_UPLOAD_FOLDER") @@ -173,6 +176,11 @@ class UnitTestConfig(BaseConfig): class DevelopmentConfig(BaseConfig): """For local development.""" + #: Disable redirect intercepts when using flask-debugtoolbar + DEBUG_TB_INTERCEPT_REDIRECTS = False + #: Record queries for debugging + SQLALCHEMY_RECORD_QUERIES = True + AMBUDA_ENVIRONMENT = DEVELOPMENT DEBUG = True #: If set, automatically reload Flask templates (including imports) when @@ -269,7 +277,7 @@ def _validate_config(config: BaseConfig): assert Path(google_creds).exists() -def load_config_object(name: str): +def load_config_object(name: str) -> BaseConfig: """Load and validate an application config.""" config_map = { TESTING: UnitTestConfig, diff --git a/make_celery.py b/make_celery.py new file mode 100644 index 00000000..f49d283e --- /dev/null +++ b/make_celery.py @@ -0,0 +1,19 @@ +"""Entrypoint for our Celery runner. +""" + + +import os + +from dotenv import load_dotenv + +from ambuda import create_app + +# Celery runs various background tasks (PDF parsing, OCR) and needs access to +# our Flask application so that it has the right database context. Per the +# Flask docs [1], the right way to do this is to follow the pattern below. +# +# [1]: https://flask.palletsprojects.com/en/2.2.x/patterns/celery/ +load_dotenv(".env") +config_env = os.environ["FLASK_ENV"] +flask_app = create_app(config_env) +celery = flask_app.extensions["celery"] diff --git a/poetry.lock b/poetry.lock index 0e72c079..391fa716 100644 --- a/poetry.lock +++ b/poetry.lock @@ -677,23 +677,40 @@ pathlib2 = "*" pytest = ["mock (>=2.0.0,<3.0)", "pytest (>=3.2.5,<4.0)"] testing = ["mock (>=2.0.0,<3.0)"] +[[package]] +name = "flake8" +version = "5.0.4" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, + {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.9.0,<2.10.0" +pyflakes = ">=2.5.0,<2.6.0" + [[package]] name = "flask" -version = "2.1.2" +version = "2.2.3" description = "A simple framework for building complex web applications." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "Flask-2.1.2-py3-none-any.whl", hash = "sha256:fad5b446feb0d6db6aec0c3184d16a8c1f6c3e464b511649c8918a9be100b4fe"}, - {file = "Flask-2.1.2.tar.gz", hash = "sha256:315ded2ddf8a6281567edb27393010fe3406188bafbfe65a3339d5787d89e477"}, + {file = "Flask-2.2.3-py3-none-any.whl", hash = "sha256:c0bec9477df1cb867e5a67c9e1ab758de9cb4a3e52dd70681f59fa40a62b3f2d"}, + {file = "Flask-2.2.3.tar.gz", hash = "sha256:7eb373984bf1c770023fce9db164ed0c3353cd0b53f130f4693da0ca756a2e6d"}, ] [package.dependencies] click = ">=8.0" itsdangerous = ">=2.0" Jinja2 = ">=3.0" -Werkzeug = ">=2.0" +Werkzeug = ">=2.2.2" [package.extras] async = ["asgiref (>=3.2)"] @@ -701,13 +718,14 @@ dotenv = ["python-dotenv"] [[package]] name = "flask-admin" -version = "1.6.0" +version = "1.6.1" description = "Simple and extensible admin interface framework for Flask" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "Flask-Admin-1.6.0.tar.gz", hash = "sha256:424ffc79b7b0dfff051555686ea12e86e48dffacac14beaa319fb4502ac40988"}, + {file = "Flask-Admin-1.6.1.tar.gz", hash = "sha256:24cae2af832b6a611a01d7dc35f42d266c1d6c75a426b869d8cb241b78233369"}, + {file = "Flask_Admin-1.6.1-py3-none-any.whl", hash = "sha256:fd8190f1ec3355913a22739c46ed3623f1d82b8112cde324c60a6fc9b21c9406"}, ] [package.dependencies] @@ -755,16 +773,34 @@ files = [ bcrypt = ">=3.1.1" Flask = "*" +[[package]] +name = "flask-debugtoolbar" +version = "0.13.1" +description = "A toolbar overlay for debugging Flask applications." +category = "dev" +optional = false +python-versions = ">=2.7" +files = [ + {file = "Flask-DebugToolbar-0.13.1.tar.gz", hash = "sha256:0c26aa013a9813b8886857bf0ec24d28ab494114a264baf06c951cadc4dd0dae"}, + {file = "Flask_DebugToolbar-0.13.1-py3-none-any.whl", hash = "sha256:491c737f321830c06a2835784acf1fc8488fd257a0ef318810b3b6bed5f600d5"}, +] + +[package.dependencies] +Blinker = "*" +Flask = ">=0.8" +itsdangerous = "*" +werkzeug = "*" + [[package]] name = "flask-login" -version = "0.6.1" +version = "0.6.2" description = "User authentication and session management for Flask." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "Flask-Login-0.6.1.tar.gz", hash = "sha256:1306d474a270a036d6fd14f45640c4d77355e4f1c67ca4331b372d3448997b8c"}, - {file = "Flask_Login-0.6.1-py3-none-any.whl", hash = "sha256:b9a4287a2d0067a7a482a23e40075e0d670f371974633fe890222dece4e02a74"}, + {file = "Flask-Login-0.6.2.tar.gz", hash = "sha256:c0a7baa9fdc448cdd3dd6f0939df72eec5177b2f7abe6cb82fc934d29caac9c3"}, + {file = "Flask_Login-0.6.2-py3-none-any.whl", hash = "sha256:1ef79843f5eddd0f143c2cd994c1b05ac83c0401dc6234c143495af9a939613f"}, ] [package.dependencies] @@ -894,14 +930,14 @@ test = ["backports.socketpair", "cffi (>=1.12.2)", "contextvars (==2.4)", "cover [[package]] name = "google-api-core" -version = "2.8.2" +version = "1.34.0" description = "Google API client core library" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "google-api-core-2.8.2.tar.gz", hash = "sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc"}, - {file = "google_api_core-2.8.2-py3-none-any.whl", hash = "sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50"}, + {file = "google-api-core-1.34.0.tar.gz", hash = "sha256:6fb380f49d19ee1d09a9722d0379042b7edb06c0112e4796c7a395078a043e71"}, + {file = "google_api_core-1.34.0-py3-none-any.whl", hash = "sha256:7421474c39d396a74dfa317dddbc69188f2336835f526087c7648f91105e32ff"}, ] [package.dependencies] @@ -909,22 +945,49 @@ google-auth = ">=1.25.0,<3.0dev" googleapis-common-protos = ">=1.56.2,<2.0dev" grpcio = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} grpcio-status = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} -protobuf = ">=3.15.0,<5.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.0.0dev" requests = ">=2.18.0,<3.0.0dev" [package.extras] grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] + +[[package]] +name = "google-api-core" +version = "2.11.0" +description = "Google API client core library" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-api-core-2.11.0.tar.gz", hash = "sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22"}, + {file = "google_api_core-2.11.0-py3-none-any.whl", hash = "sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0dev" +googleapis-common-protos = ">=1.56.2,<2.0dev" +grpcio = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} +grpcio-status = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" +requests = ">=2.18.0,<3.0.0dev" + +[package.extras] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)", "grpcio-status (>=1.49.1,<2.0dev)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] [[package]] name = "google-auth" -version = "2.9.0" +version = "2.17.1" description = "Google Authentication Library" category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" files = [ - {file = "google-auth-2.9.0.tar.gz", hash = "sha256:3b2f9d2f436cc7c3b363d0ac66470f42fede249c3bafcc504e9f0bcbe983cff0"}, - {file = "google_auth-2.9.0-py2.py3-none-any.whl", hash = "sha256:75b3977e7e22784607e074800048f44d6a56df589fb2abe58a11d4d20c97c314"}, + {file = "google-auth-2.17.1.tar.gz", hash = "sha256:8f379b46bad381ad2a0b989dfb0c13ad28d3c2a79f27348213f8946a1d15d55a"}, + {file = "google_auth-2.17.1-py2.py3-none-any.whl", hash = "sha256:357ff22a75b4c0f6093470f21816a825d2adee398177569824e37b6c10069e19"}, ] [package.dependencies] @@ -936,46 +999,47 @@ six = ">=1.9.0" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"] enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] -pyopenssl = ["pyopenssl (>=20.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0dev)"] [[package]] name = "google-cloud-vision" -version = "3.1.1" -description = "Cloud Vision API API client library" +version = "3.4.1" +description = "Google Cloud Vision API client library" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-vision-3.1.1.tar.gz", hash = "sha256:5fc611dad4c481c61e37a8f855f5dae00694fae45bc5a6d643275cd1461715b2"}, - {file = "google_cloud_vision-3.1.1-py2.py3-none-any.whl", hash = "sha256:dad3792820cedef8ed8271a503e4bb33b3a227cbf217586f3d993a1e0255ec7f"}, + {file = "google-cloud-vision-3.4.1.tar.gz", hash = "sha256:56d048eecf09bd71b13246dc4597c6b28ac7d1105034675fdced93d259a2e9fd"}, + {file = "google_cloud_vision-3.4.1-py2.py3-none-any.whl", hash = "sha256:30d9017e928fba402af2f925077ddebdf977a2ffbc7df3d8bc2b8e5bee6e8dcb"}, ] [package.dependencies] -google-api-core = {version = ">=1.32.0,<2.0.0 || >=2.8.0,<3.0.0dev", extras = ["grpc"]} -proto-plus = ">=1.22.0,<2.0.0dev" -protobuf = ">=3.19.0,<5.0.0dev" - -[package.extras] -libcst = ["libcst (>=0.2.5)"] +google-api-core = {version = ">=1.34.0,<2.0.0 || >=2.11.0,<3.0.0dev", extras = ["grpc"]} +proto-plus = [ + {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, + {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, +] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" [[package]] name = "googleapis-common-protos" -version = "1.56.3" +version = "1.59.0" description = "Common protobufs used in Google APIs" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.56.3.tar.gz", hash = "sha256:6f1369b58ed6cf3a4b7054a44ebe8d03b29c309257583a2bbdc064cd1e4a1442"}, - {file = "googleapis_common_protos-1.56.3-py2.py3-none-any.whl", hash = "sha256:87955d7b3a73e6e803f2572a33179de23989ebba725e05ea42f24838b792e461"}, + {file = "googleapis-common-protos-1.59.0.tar.gz", hash = "sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44"}, + {file = "googleapis_common_protos-1.59.0-py2.py3-none-any.whl", hash = "sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f"}, ] [package.dependencies] -protobuf = ">=3.15.0,<5.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" [package.extras] -grpc = ["grpcio (>=1.0.0,<2.0.0dev)"] +grpc = ["grpcio (>=1.44.0,<2.0.0dev)"] [[package]] name = "greenlet" @@ -1248,6 +1312,24 @@ files = [ {file = "invoke-1.7.1.tar.gz", hash = "sha256:7b6deaf585eee0a848205d0b8c0014b9bf6f287a8eb798818a642dff1df14b19"}, ] +[[package]] +name = "isort" +version = "5.12.0" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] + +[package.extras] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + [[package]] name = "itsdangerous" version = "2.1.2" @@ -1869,14 +1951,14 @@ wcwidth = "*" [[package]] name = "proto-plus" -version = "1.22.0" +version = "1.22.2" description = "Beautiful, Pythonic protocol buffers." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "proto-plus-1.22.0.tar.gz", hash = "sha256:c2e6693fdf68c405a6428226915a8625d21d0513793598ae3287a1210478d8ec"}, - {file = "proto_plus-1.22.0-py3-none-any.whl", hash = "sha256:a27192d8cdc54e044f137b4c9053c9108cf5c065b46d067f1bcd389a911faf5b"}, + {file = "proto-plus-1.22.2.tar.gz", hash = "sha256:0e8cda3d5a634d9895b75c573c9352c16486cb75deb0e078b5fda34db4243165"}, + {file = "proto_plus-1.22.2-py3-none-any.whl", hash = "sha256:de34e52d6c9c6fcd704192f09767cb561bb4ee64e70eede20b0834d841f0be4d"}, ] [package.dependencies] @@ -1887,26 +1969,57 @@ testing = ["google-api-core[grpc] (>=1.31.5)"] [[package]] name = "protobuf" -version = "4.21.6" +version = "3.20.3" +description = "Protocol Buffers" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"}, + {file = "protobuf-3.20.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e"}, + {file = "protobuf-3.20.3-cp310-cp310-win32.whl", hash = "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c"}, + {file = "protobuf-3.20.3-cp310-cp310-win_amd64.whl", hash = "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7"}, + {file = "protobuf-3.20.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469"}, + {file = "protobuf-3.20.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4"}, + {file = "protobuf-3.20.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4"}, + {file = "protobuf-3.20.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454"}, + {file = "protobuf-3.20.3-cp37-cp37m-win32.whl", hash = "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905"}, + {file = "protobuf-3.20.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c"}, + {file = "protobuf-3.20.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7"}, + {file = "protobuf-3.20.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee"}, + {file = "protobuf-3.20.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050"}, + {file = "protobuf-3.20.3-cp38-cp38-win32.whl", hash = "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86"}, + {file = "protobuf-3.20.3-cp38-cp38-win_amd64.whl", hash = "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9"}, + {file = "protobuf-3.20.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b"}, + {file = "protobuf-3.20.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b"}, + {file = "protobuf-3.20.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402"}, + {file = "protobuf-3.20.3-cp39-cp39-win32.whl", hash = "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480"}, + {file = "protobuf-3.20.3-cp39-cp39-win_amd64.whl", hash = "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7"}, + {file = "protobuf-3.20.3-py2.py3-none-any.whl", hash = "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db"}, + {file = "protobuf-3.20.3.tar.gz", hash = "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2"}, +] + +[[package]] +name = "protobuf" +version = "4.22.1" description = "" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "protobuf-4.21.6-cp310-abi3-win32.whl", hash = "sha256:49f88d56a9180dbb7f6199c920f5bb5c1dd0172f672983bb281298d57c2ac8eb"}, - {file = "protobuf-4.21.6-cp310-abi3-win_amd64.whl", hash = "sha256:7a6cc8842257265bdfd6b74d088b829e44bcac3cca234c5fdd6052730017b9ea"}, - {file = "protobuf-4.21.6-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ba596b9ffb85c909fcfe1b1a23136224ed678af3faf9912d3fa483d5f9813c4e"}, - {file = "protobuf-4.21.6-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4143513c766db85b9d7c18dbf8339673c8a290131b2a0fe73855ab20770f72b0"}, - {file = "protobuf-4.21.6-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:b6cea204865595a92a7b240e4b65bcaaca3ad5d2ce25d9db3756eba06041138e"}, - {file = "protobuf-4.21.6-cp37-cp37m-win32.whl", hash = "sha256:9666da97129138585b26afcb63ad4887f602e169cafe754a8258541c553b8b5d"}, - {file = "protobuf-4.21.6-cp37-cp37m-win_amd64.whl", hash = "sha256:308173d3e5a3528787bb8c93abea81d5a950bdce62840d9760effc84127fb39c"}, - {file = "protobuf-4.21.6-cp38-cp38-win32.whl", hash = "sha256:aa29113ec901281f29d9d27b01193407a98aa9658b8a777b0325e6d97149f5ce"}, - {file = "protobuf-4.21.6-cp38-cp38-win_amd64.whl", hash = "sha256:8f9e60f7d44592c66e7b332b6a7b4b6e8d8b889393c79dbc3a91f815118f8eac"}, - {file = "protobuf-4.21.6-cp39-cp39-win32.whl", hash = "sha256:80e6540381080715fddac12690ee42d087d0d17395f8d0078dfd6f1181e7be4c"}, - {file = "protobuf-4.21.6-cp39-cp39-win_amd64.whl", hash = "sha256:77b355c8604fe285536155286b28b0c4cbc57cf81b08d8357bf34829ea982860"}, - {file = "protobuf-4.21.6-py2.py3-none-any.whl", hash = "sha256:07a0bb9cc6114f16a39c866dc28b6e3d96fa4ffb9cc1033057412547e6e75cb9"}, - {file = "protobuf-4.21.6-py3-none-any.whl", hash = "sha256:c7c864148a237f058c739ae7a05a2b403c0dfa4ce7d1f3e5213f352ad52d57c6"}, - {file = "protobuf-4.21.6.tar.gz", hash = "sha256:6b1040a5661cd5f6e610cbca9cfaa2a17d60e2bb545309bc1b278bb05be44bdd"}, + {file = "protobuf-4.22.1-cp310-abi3-win32.whl", hash = "sha256:85aa9acc5a777adc0c21b449dafbc40d9a0b6413ff3a4f77ef9df194be7f975b"}, + {file = "protobuf-4.22.1-cp310-abi3-win_amd64.whl", hash = "sha256:8bc971d76c03f1dd49f18115b002254f2ddb2d4b143c583bb860b796bb0d399e"}, + {file = "protobuf-4.22.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:5917412347e1da08ce2939eb5cd60650dfb1a9ab4606a415b9278a1041fb4d19"}, + {file = "protobuf-4.22.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:9e12e2810e7d297dbce3c129ae5e912ffd94240b050d33f9ecf023f35563b14f"}, + {file = "protobuf-4.22.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:953fc7904ef46900262a26374b28c2864610b60cdc8b272f864e22143f8373c4"}, + {file = "protobuf-4.22.1-cp37-cp37m-win32.whl", hash = "sha256:6e100f7bc787cd0a0ae58dbf0ab8bbf1ee7953f862b89148b6cf5436d5e9eaa1"}, + {file = "protobuf-4.22.1-cp37-cp37m-win_amd64.whl", hash = "sha256:87a6393fa634f294bf24d1cfe9fdd6bb605cbc247af81b9b10c4c0f12dfce4b3"}, + {file = "protobuf-4.22.1-cp38-cp38-win32.whl", hash = "sha256:e3fb58076bdb550e75db06ace2a8b3879d4c4f7ec9dd86e4254656118f4a78d7"}, + {file = "protobuf-4.22.1-cp38-cp38-win_amd64.whl", hash = "sha256:651113695bc2e5678b799ee5d906b5d3613f4ccfa61b12252cfceb6404558af0"}, + {file = "protobuf-4.22.1-cp39-cp39-win32.whl", hash = "sha256:67b7d19da0fda2733702c2299fd1ef6cb4b3d99f09263eacaf1aa151d9d05f02"}, + {file = "protobuf-4.22.1-cp39-cp39-win_amd64.whl", hash = "sha256:b8700792f88e59ccecfa246fa48f689d6eee6900eddd486cdae908ff706c482b"}, + {file = "protobuf-4.22.1-py3-none-any.whl", hash = "sha256:3e19dcf4adbf608924d3486ece469dd4f4f2cf7d2649900f0efcd1a84e8fd3ba"}, + {file = "protobuf-4.22.1.tar.gz", hash = "sha256:dce7a55d501c31ecf688adb2f6c3f763cf11bc0be815d1946a84d74772ab07a7"}, ] [[package]] @@ -2017,6 +2130,18 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] +[[package]] +name = "pyflakes" +version = "2.5.0" +description = "passive checker of Python programs" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, + {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, +] + [[package]] name = "pygments" version = "2.12.0" @@ -2198,6 +2323,36 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +[[package]] +name = "pytest-cover" +version = "3.0.0" +description = "Pytest plugin for measuring coverage. Forked from `pytest-cov`." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "pytest-cover-3.0.0.tar.gz", hash = "sha256:5bdb6c1cc3dd75583bb7bc2c57f5e1034a1bfcb79d27c71aceb0b16af981dbf4"}, + {file = "pytest_cover-3.0.0-py2.py3-none-any.whl", hash = "sha256:578249955eb3b5f3991209df6e532bb770b647743b7392d3d97698dc02f39ebb"}, +] + +[package.dependencies] +pytest-cov = ">=2.0" + +[[package]] +name = "pytest-coverage" +version = "0.0" +description = "Pytest plugin for measuring coverage. Forked from `pytest-cov`." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "pytest-coverage-0.0.tar.gz", hash = "sha256:db6af2cbd7e458c7c9fd2b4207cee75258243c8a81cad31a7ee8cfad5be93c05"}, + {file = "pytest_coverage-0.0-py2.py3-none-any.whl", hash = "sha256:dedd084c5e74d8e669355325916dc011539b190355021b037242514dee546368"}, +] + +[package.dependencies] +pytest-cover = "*" + [[package]] name = "python-dateutil" version = "2.8.2" @@ -2415,25 +2570,25 @@ files = [ [[package]] name = "requests" -version = "2.27.1" +version = "2.28.2" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.7, <4" files = [ - {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, - {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} -idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" [package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<5)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "roman" @@ -2462,33 +2617,6 @@ files = [ [package.dependencies] pyasn1 = ">=0.1.3" -[[package]] -name = "ruff" -version = "0.0.260" -description = "An extremely fast Python linter, written in Rust." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruff-0.0.260-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:c559650b623f3fbdc39c7ed1bcb064765c666a53ee738c53d1461afbf3f23db2"}, - {file = "ruff-0.0.260-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:90ff1479e292a84c388a8a035d223247ddeea5f6760752a9142b88b6d59ac334"}, - {file = "ruff-0.0.260-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25584d1b9f445fde72651caab97e7430a4c5bfd2a0ce9af39868753826cba10d"}, - {file = "ruff-0.0.260-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8032e35357384a29791c75194a71e314031171eb0731fcaa872dfaf4c1f4470a"}, - {file = "ruff-0.0.260-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e4fa7293f97c021825b3b72f2bf53f0eb4f59625608a889678c1fc6660f412d"}, - {file = "ruff-0.0.260-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8bec0271e2c8cd36bcf915cb9f6a93e40797a3ff3d2cda4ca87b7bed9e598472"}, - {file = "ruff-0.0.260-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e075a61aaff8ebe56172217f0ac14c5df9637b289bf161ac697445a9003d5c2"}, - {file = "ruff-0.0.260-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8678f54eb2696481618902a10c3cb28325f3323799af99997ad6f06005ea4f5"}, - {file = "ruff-0.0.260-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57d9f0bfdef739b76aa3112b9182a214f0f34589a2659f88353492c7670fe2fe"}, - {file = "ruff-0.0.260-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3ec1f77219ba5adaa194289cb82ba924ff2ed931fd00b8541d66a1724c89fbc9"}, - {file = "ruff-0.0.260-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:aae2170a7ec6f7fc4a73db30aa7aa7fce936176bf66bf85f77f69ddd1dd4a665"}, - {file = "ruff-0.0.260-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5f847b72ef994ab88e9da250c7eb5cbb3f1555b92a9f22c5ed1c27a44b7e98d6"}, - {file = "ruff-0.0.260-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6dd705d4eff405c2b70513188fbff520f49db6df91f0d5e8258c5d469efa58bc"}, - {file = "ruff-0.0.260-py3-none-win32.whl", hash = "sha256:3866a96b2ef92c7d837ba6bf8fc9dd125a67886f1c5512ad6fa5d5fefaceff87"}, - {file = "ruff-0.0.260-py3-none-win_amd64.whl", hash = "sha256:0733d524946decbd4f1e63f7dc26820f5c1e6c31da529ba20fb995057f8e79b1"}, - {file = "ruff-0.0.260-py3-none-win_arm64.whl", hash = "sha256:12542a26f189a5a10c719bfa14d415d0511ac05e5c9ff5e79cc9d5cc50b81bc8"}, - {file = "ruff-0.0.260.tar.gz", hash = "sha256:ea8f94262f33b81c47ee9d81f455b144e94776f5c925748cb0c561a12206eae1"}, -] - [[package]] name = "selenium" version = "4.2.0" @@ -2507,36 +2635,43 @@ urllib3 = {version = ">=1.26,<2.0", extras = ["secure", "socks"]} [[package]] name = "sentry-sdk" -version = "1.6.0" +version = "1.18.0" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = false python-versions = "*" files = [ - {file = "sentry-sdk-1.6.0.tar.gz", hash = "sha256:b82ad57306d5546713f15d5d70daea0408cf7f998c7566db16e0e6257e51e561"}, - {file = "sentry_sdk-1.6.0-py2.py3-none-any.whl", hash = "sha256:ddbd191b6f4e696b7845b4d87389898ae1207981faf114f968a57363aa6be03c"}, + {file = "sentry-sdk-1.18.0.tar.gz", hash = "sha256:d07b9569a151033b462f7a7113ada94cc41ecf49daa83d35f5f852a0b9cf3b44"}, + {file = "sentry_sdk-1.18.0-py2.py3-none-any.whl", hash = "sha256:714203a9adcac4a4a35e348dc9d3e294ad0200a66cdca26c068967d728f34fcb"}, ] [package.dependencies] certifi = "*" -urllib3 = ">=1.10.0" +urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} [package.extras] aiohttp = ["aiohttp (>=3.5)"] +arq = ["arq (>=0.23)"] beam = ["apache-beam (>=2.12)"] bottle = ["bottle (>=0.12.13)"] celery = ["celery (>=3)"] chalice = ["chalice (>=1.16.0)"] django = ["django (>=1.8)"] falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] flask = ["blinker (>=1.1)", "flask (>=0.11)"] httpx = ["httpx (>=0.16.0)"] +huey = ["huey (>=2)"] +opentelemetry = ["opentelemetry-distro (>=0.35b0)"] pure-eval = ["asttokens", "executing", "pure-eval"] +pymongo = ["pymongo (>=3.1)"] pyspark = ["pyspark (>=2.4.4)"] quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] rq = ["rq (>=0.6)"] sanic = ["sanic (>=0.8)"] sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] +starlite = ["starlite (>=1.48)"] tornado = ["tornado (>=5)"] [[package]] @@ -2822,6 +2957,32 @@ postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] sqlcipher = ["sqlcipher3-binary"] +[[package]] +name = "swig" +version = "4.1.1" +description = "SWIG is a software development tool that connects programs written in C and C++ with a variety of high-level programming languages." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "swig-4.1.1-py2.py3-none-macosx_10_9_universal2.whl", hash = "sha256:42c6c203dba1c1afa60c8e3c6a4da7f39ce95a44bf0bf5b0f5cb16aa6caa51fe"}, + {file = "swig-4.1.1-py2.py3-none-macosx_10_9_x86_64.whl", hash = "sha256:fe9b0a49190c484816a5ad020a88fa35b42628fb6c5f3d4a6f3da5f3bb70b31a"}, + {file = "swig-4.1.1-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:cee47d9f21bc34fcc0bcb529795313041e589bf4b69fc7bffa1ef325dae0972f"}, + {file = "swig-4.1.1-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eb403545dbe2e16a612abc7cfce5e29da2ef9b2e81944fb7669fdd3a22810f4"}, + {file = "swig-4.1.1-py2.py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0bb43aecb5043be8cabd94e391b7c4eed2b900e2183b86be4f9d37da13dc43e"}, + {file = "swig-4.1.1-py2.py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ff3aa551c42aee4170d42667b11f55bdd2ec43532717b03e6a10b97604b438c"}, + {file = "swig-4.1.1-py2.py3-none-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e669f7d82e34a866aef1c7d7c2e9d05c5dad033fd7094c02898a85d1bc5905d7"}, + {file = "swig-4.1.1-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3f9da5564718bb81234dd497dc265025b0456c6c9378dfa8206cda56a7fa65ba"}, + {file = "swig-4.1.1-py2.py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:326a1e68dc5531f1ccb16d314d33afb8c964294d14111912ef069e90573c0b2a"}, + {file = "swig-4.1.1-py2.py3-none-musllinux_1_1_i686.whl", hash = "sha256:1f8d43e6b29d6024374c4bcafa88cb5149f3e335d13db2556829d94dad1178eb"}, + {file = "swig-4.1.1-py2.py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:c51a5d9d6791151a42bcf5524ab33ace6c206d8fa874f75961e91649ba5adb16"}, + {file = "swig-4.1.1-py2.py3-none-musllinux_1_1_s390x.whl", hash = "sha256:32f91c83ed5cb09b80ef35c38aedbb80bd495d18716e6ca7ff2f9ec1f39ad8bc"}, + {file = "swig-4.1.1-py2.py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:5df85ef91652d928fa2c45c4956def2ae3beae0957013c559152bb0da6643f4b"}, + {file = "swig-4.1.1-py2.py3-none-win32.whl", hash = "sha256:bc753a7417a0a232fc34add810d7e46e1c78f49342f704a1a38a5ac920de6ca0"}, + {file = "swig-4.1.1-py2.py3-none-win_amd64.whl", hash = "sha256:20a00158ddea0c11ce3535f5b9ddd808acdeb4918c89884fba6a697a0e21c33f"}, + {file = "swig-4.1.1.tar.gz", hash = "sha256:7507e9ccd394a84dc080896277f72fda8393fa122e2e194f47f32fbf3a4cd564"}, +] + [[package]] name = "text-unidecode" version = "1.3" @@ -3002,14 +3163,14 @@ testing = ["coverage", "pytest", "pytest-randomly", "pytest-xdist"] [[package]] name = "urllib3" -version = "1.26.9" +version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, - {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, ] [package.dependencies] @@ -3018,12 +3179,25 @@ cryptography = {version = ">=1.3.4", optional = true, markers = "extra == \"secu idna = {version = ">=2.0.0", optional = true, markers = "extra == \"secure\""} pyOpenSSL = {version = ">=0.14", optional = true, markers = "extra == \"secure\""} PySocks = {version = ">=1.5.6,<1.5.7 || >1.5.7,<2.0", optional = true, markers = "extra == \"socks\""} +urllib3-secure-extra = {version = "*", optional = true, markers = "extra == \"secure\""} [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +[[package]] +name = "urllib3-secure-extra" +version = "0.1.0" +description = "Marker library to detect whether urllib3 was installed with the deprecated [secure] extra" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "urllib3-secure-extra-0.1.0.tar.gz", hash = "sha256:ee9409cbfeb4b8609047be4c32fb4317870c602767e53fd8a41005ebe6a41dff"}, + {file = "urllib3_secure_extra-0.1.0-py2.py3-none-any.whl", hash = "sha256:f7adcb108b4d12a4b26b99eb60e265d087f435052a76aefa396b6ee85e9a6ef9"}, +] + [[package]] name = "vine" version = "5.0.0" @@ -3104,16 +3278,19 @@ files = [ [[package]] name = "werkzeug" -version = "2.1.2" +version = "2.2.3" description = "The comprehensive WSGI web application library." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, - {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, + {file = "Werkzeug-2.2.3-py3-none-any.whl", hash = "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612"}, + {file = "Werkzeug-2.2.3.tar.gz", hash = "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe"}, ] +[package.dependencies] +MarkupSafe = ">=2.1.1" + [package.extras] watchdog = ["watchdog"] @@ -3331,4 +3508,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "01b15e609ba4200b7064935630b59b5212eee0c6602e455e464f160c4292d640" +content-hash = "fa0f653090e10ebd7f0095bdf219475b419de9902174b6bea7074d5b52fafed6" diff --git a/pyproject.toml b/pyproject.toml index 0942c876..f573299a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,19 +42,16 @@ dnspython = "2.2.1" docutils = "0.17.1" email-validator = "1.2.1" fabric = "2.7.0" -Flask = "2.1.2" -Flask-Admin = "1.6.0" +Flask = "2.2.3" +Flask-Admin = "1.6.1" Flask-Babel = "2.0.0" Flask-Bcrypt = "1.0.1" -Flask-Login = "0.6.1" +Flask-Login = "0.6.2" Flask-Mail = "0.9.1" Flask-WTF = "1.0.1" fonttools = "4.33.3" gevent = "21.12.0" -google-api-core = "2.8.2" -google-auth = "2.9.0" -google-cloud-vision = "3.1.1" -googleapis-common-protos = "1.56.3" +google-cloud-vision = "3.4.1" greenlet = "1.1.2" grpcio = "1.47.0" grpcio-status = "1.47.0" @@ -93,8 +90,6 @@ platformdirs = "2.5.2" pluggy = "1.0.0" prometheus-client = "0.14.1" prompt-toolkit = "3.0.30" -proto-plus = "1.22.0" -protobuf = "4.21.6" psutil = "5.9.1" py = "1.11.0" pyasn1 = "0.4.8" @@ -116,11 +111,11 @@ pytz = "2022.2.1" PyYAML = "6.0" redis = "4.3.4" regex = "2022.6.2" -requests = "2.27.1" +requests = "2.28.2" roman = "3.3" rsa = "4.8" selenium = "4.2.0" -sentry-sdk = "1.6.0" +sentry-sdk = "1.18.0" six = "1.16.0" sniffio = "1.2.0" snowballstemmer = "2.2.0" @@ -143,12 +138,11 @@ typer = "0.6.1" typing-extensions = "4.2.0" uc-micro-py = "1.0.1" unicodedata2 = "14.0.0" -urllib3 = "1.26.9" vine = "5.0.0" watchdog = "2.1.9" watchdog-gevent = "0.1.1" wcwidth = "0.2.5" -Werkzeug = "2.1.2" +Werkzeug = "2.2.3" wrapt = "1.14.1" wsproto = "1.1.0" WTForms = "3.0.1" @@ -158,15 +152,16 @@ zipp = "3.8.0" [tool.poetry.dev-dependencies] -pytest = "^7.1.2" black = "^22.3.0" -setuptools = "^57.5.0" -swig = "^4.0.2" -isort = "^5.10.1" flake8 = "5.0.4" +flask-debugtoolbar = "0.13.1" +isort = "^5.10.1" +pytest = "^7.1.2" pytest-cov = "4.0.0" pytest-cover = "3.0.0" pytest-coverage = "0.0" +setuptools = "^57.5.0" +swig = "^4.0.2" [build-system] diff --git a/requirements.txt b/requirements.txt index 40eedcc1..56a8603a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -27,20 +27,21 @@ Deprecated==1.2.13 dnspython==2.2.1 docutils==0.17.1 email-validator==1.2.1 +exceptiongroup==1.1.1 fabric==2.7.0 -Flask==2.1.2 -Flask-Admin==1.6.0 +Flask==2.2.3 +Flask-Admin==1.6.1 Flask-Babel==2.0.0 Flask-Bcrypt==1.0.1 -Flask-Login==0.6.1 +flask-debugToolbar==0.13.1 +Flask-Login==0.6.2 Flask-Mail==0.9.1 +Flask-SQLAlchemy==3.0.3 Flask-WTF==1.0.1 +flask-debugtoolbar==0.13.1 fonttools==4.33.3 gevent==21.12.0 -google-api-core==2.8.2 -google-auth==2.9.0 -google-cloud-vision==3.1.1 -googleapis-common-protos==1.56.3 +google-cloud-vision==3.4.1 greenlet==1.1.2 grpcio==1.47.0 grpcio-status==1.47.0 @@ -79,14 +80,14 @@ platformdirs==2.5.2 pluggy==1.0.0 prometheus-client==0.14.1 prompt-toolkit==3.0.30 -proto-plus==1.22.0 -protobuf==3.20.1 psutil==5.9.1 py==1.11.0 pyasn1==0.4.8 pyasn1-modules==0.2.8 pycodestyle==2.9.1 pycparser==2.21 +pyee==9.0.4 +pyflakes==2.5.0 Pygments==2.12.0 pykakasi==2.2.1 PyMuPDF==1.20.2 @@ -103,12 +104,13 @@ pytz==2022.2.1 PyYAML==6.0 redis==4.3.4 regex==2022.6.2 -requests==2.27.1 +requests==2.28.2 roman==3.3 ruff==0.0.260 rsa==4.8 +ruff==0.0.260 selenium==4.2.0 -sentry-sdk==1.6.0 +sentry-sdk==1.18.0 six==1.16.0 sniffio==1.2.0 snowballstemmer==2.2.0 @@ -122,6 +124,7 @@ sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 SQLAlchemy==1.4.37 +swig==4.0.2 text-unidecode==1.3 toml==0.10.2 tomli==2.0.1 @@ -131,12 +134,11 @@ typer==0.6.1 typing_extensions==4.2.0 uc-micro-py==1.0.1 unicodedata2==14.0.0 -urllib3==1.26.9 vine==5.0.0 watchdog==2.1.9 watchdog-gevent==0.1.1 wcwidth==0.2.5 -Werkzeug==2.1.2 +Werkzeug==2.2.3 wrapt==1.14.1 wsproto==1.1.0 WTForms==3.0.1 diff --git a/test/ambuda/conftest.py b/test/ambuda/conftest.py index b8d12dc9..21d9ce9d 100644 --- a/test/ambuda/conftest.py +++ b/test/ambuda/conftest.py @@ -4,7 +4,8 @@ import ambuda.database as db from ambuda import create_app from ambuda.consts import BOT_USERNAME, TEXT_CATEGORIES -from ambuda.queries import get_engine, get_session +from ambuda.models.base import db as flask_sqla +from ambuda.queries import get_session def _add_dictionaries(session): @@ -21,13 +22,10 @@ def _add_dictionaries(session): def initialize_test_db(): - engine = get_engine() - assert ":memory:" in engine.url + flask_sqla.drop_all() + flask_sqla.create_all() - db.Base.metadata.drop_all(engine) - db.Base.metadata.create_all(engine) - - session = get_session() + session = flask_sqla.session # Text and parse data text = db.Text(slug="pariksha", title="parIkSA") @@ -60,20 +58,20 @@ def initialize_test_db(): _add_dictionaries(session) - # Bot + # Bot user bot = db.User(username=BOT_USERNAME, email="ambuda-bot@ambuda.org") bot.set_password("password") session.add(bot) session.flush() - # Auth + # Basic user rama = db.User(username="ramacandra", email="rama@ayodhya.com") rama.set_password("maithili") session.add(rama) session.flush() # Moderator - moderator = db.User(username="user-mod", email="mod@ambuda.org") + moderator = db.User(username="u-mod", email="mod@ambuda.org") moderator.set_password("secret password") session.add(moderator) session.flush() @@ -85,11 +83,11 @@ def initialize_test_db(): session.flush() # Deleted and Banned - deleted_admin = db.User(username="sandrocottus-deleted", email="cgm@ambuda.org") + deleted_admin = db.User(username="u-deleted-banned", email="cgm@ambuda.org") deleted_admin.set_password("maurya") deleted_admin.set_is_deleted(True) - banned = db.User(username="sikander-banned", email="alex@ambuda.org") + banned = db.User(username="u-banned", email="alex@ambuda.org") banned.set_password("onesicritus") banned.set_is_banned(True) @@ -120,7 +118,7 @@ def initialize_test_db(): session.add(banned) session.flush() - # Blog + # Blog posts post = db.BlogPost( title="Sample post", slug="sample-post", @@ -179,39 +177,45 @@ def flask_app(): @pytest.fixture() def client(flask_app): - return flask_app.test_client() + with flask_app.app_context(): + yield flask_app.test_client() @pytest.fixture() def rama_client(flask_app): - session = get_session() - user = session.query(db.User).filter_by(username="ramacandra").first() - return flask_app.test_client(user=user) + with flask_app.app_context(): + session = get_session() + user = session.query(db.User).filter_by(username="ramacandra").first() + return flask_app.test_client(user=user) @pytest.fixture() def moderator_client(flask_app): - session = get_session() - moderator = session.query(db.User).filter_by(username="user-mod").first() - return flask_app.test_client(user=moderator) + with flask_app.app_context(): + session = get_session() + moderator = session.query(db.User).filter_by(username="u-mod").first() + return flask_app.test_client(user=moderator) @pytest.fixture() def admin_client(flask_app): - session = get_session() - user = session.query(db.User).filter_by(username="u-admin").first() - return flask_app.test_client(user=user) + with flask_app.app_context(): + session = get_session() + user = session.query(db.User).filter_by(username="u-admin").first() + return flask_app.test_client(user=user) @pytest.fixture() def deleted_client(flask_app): - session = get_session() - user = session.query(db.User).filter_by(username="sandrocottus-deleted").first() - return flask_app.test_client(user=user) + with flask_app.app_context(): + session = get_session() + user = session.query(db.User).filter_by(username="u-deleted-banned").first() + return flask_app.test_client(user=user) @pytest.fixture() def banned_client(flask_app): - session = get_session() - user = session.query(db.User).filter_by(username="sikander-banned").first() - return flask_app.test_client(user=user) + with flask_app.app_context(): + session = get_session() + user = session.query(db.User).filter_by(username="u-banned").first() + return flask_app.test_client(user=user) diff --git a/test/ambuda/tasks/test_projects_tasks.py b/test/ambuda/tasks/test_projects_tasks.py index d178ef3b..841583ff 100644 --- a/test/ambuda/tasks/test_projects_tasks.py +++ b/test/ambuda/tasks/test_projects_tasks.py @@ -29,7 +29,6 @@ def test_create_project_inner(flask_app): title="Cool project", pdf_path=f.name, output_dir=flask_app.config["UPLOAD_FOLDER"], - app_environment=flask_app.config["AMBUDA_ENVIRONMENT"], creator_id=1, task_status=ambuda.tasks.utils.LocalTaskStatus(), ) diff --git a/test/ambuda/utils/test_json_serde.py b/test/ambuda/utils/test_json_serde.py deleted file mode 100644 index 2b529623..00000000 --- a/test/ambuda/utils/test_json_serde.py +++ /dev/null @@ -1,15 +0,0 @@ -import json -from dataclasses import dataclass - -from ambuda.utils.json_serde import AmbudaJSONEncoder - - -@dataclass -class Dummy: - foo: str - bar: str - - -def test_encode(): - dummy = Dummy(foo="oof", bar="rab") - assert json.dumps(dummy, cls=AmbudaJSONEncoder) == '{"foo": "oof", "bar": "rab"}' diff --git a/test/ambuda/views/reader/test_texts.py b/test/ambuda/views/reader/test_texts.py index 3aed5ee8..5fc548fd 100644 --- a/test/ambuda/views/reader/test_texts.py +++ b/test/ambuda/views/reader/test_texts.py @@ -20,6 +20,26 @@ def test_text__missing(client): assert resp.status_code == 404 +def test_about(client): + resp = client.get("/texts/pariksha/about") + assert resp.status_code == 200 + + +def test_about__missing(client): + resp = client.get("/texts/unknown-test/about") + assert resp.status_code == 404 + + +def test_resources(client): + resp = client.get("/texts/pariksha/resources") + assert resp.status_code == 200 + + +def test_resources__missing(client): + resp = client.get("/texts/unknown-test/resources") + assert resp.status_code == 404 + + def test_section(client): resp = client.get("/texts/pariksha/1") assert resp.status_code == 200 diff --git a/test/ambuda/views/test_blog.py b/test/ambuda/views/test_blog.py index ca0b2dcf..ee9fb456 100644 --- a/test/ambuda/views/test_blog.py +++ b/test/ambuda/views/test_blog.py @@ -10,16 +10,36 @@ def test_post(client): assert "Sample post" in resp.text +def test_post__missing(client): + resp = client.get("/blog/p/missing") + assert resp.status_code == 404 + + def test_create_post(admin_client): # Test that the "create post" page loads for admin users. resp = admin_client.get("/blog/create") assert resp.status_code == 200 + resp = admin_client.post( + "/blog/create", data={"title": "Title", "content": "Content"} + ) + assert resp.status_code == 302 + def test_edit_post(admin_client): - resp = admin_client.get("/blog/p/sample-post/edit") + resp = admin_client.post( + "/blog/create", data={"title": "some post", "content": "some content"} + ) + assert resp.status_code == 302 + + resp = admin_client.get("/blog/p/some-post/edit") assert resp.status_code == 200 + resp = admin_client.post( + "/blog/p/some-post/edit", data={"title": "Title", "content": "Content"} + ) + assert resp.status_code == 302 + def test_delete_post(admin_client): resp = admin_client.get("/blog/p/sample-post/delete")