From 3f2af6c8a602e2825229b68a0437e4c7888d4b21 Mon Sep 17 00:00:00 2001 From: Antoine LAURENT Date: Thu, 4 Jul 2024 14:32:16 +0200 Subject: [PATCH 1/2] metabase: Remove new sync to c1 analyse database This reverts commits - 80aabad7dccbcfc95b3bf2aa40f0e5952a4aa1e0. - f3ab82f20bade965c40c7f93221431d49cf1d392. --- clevercloud/cron.json | 1 - clevercloud/crons/populate_c1_analyses.sh | 17 --- config/settings/base.py | 7 - config/settings/dev.py | 7 - itou/metabase/dataframes.py | 4 +- itou/metabase/db.py | 49 +++---- .../management/commands/metabase_to_csv.py | 4 +- .../commands/populate_c1_analyses.py | 53 -------- .../commands/populate_metabase_matomo.py | 4 +- itou/metabase/tables/c1_analyses.py | 108 --------------- tests/metabase/conftest.py | 7 +- .../management/test_populate_c1_analyses.py | 127 ------------------ 12 files changed, 23 insertions(+), 365 deletions(-) delete mode 100755 clevercloud/crons/populate_c1_analyses.sh delete mode 100644 itou/metabase/management/commands/populate_c1_analyses.py delete mode 100644 itou/metabase/tables/c1_analyses.py delete mode 100644 tests/metabase/management/test_populate_c1_analyses.py diff --git a/clevercloud/cron.json b/clevercloud/cron.json index 52769714e2..bbe3834714 100644 --- a/clevercloud/cron.json +++ b/clevercloud/cron.json @@ -29,7 +29,6 @@ "0 0 * * 1 $ROOT/clevercloud/run_management_command.sh shorten_active_sessions", "0 2 * * 1 $ROOT/clevercloud/crons/populate_metabase_matomo.sh", - "0 3 * * 1 $ROOT/clevercloud/crons/populate_c1_analyses.sh", "0 0 1 * * $ROOT/clevercloud/run_management_command.sh delete_old_emails --wet-run", "0 0 1 * * $ROOT/clevercloud/run_management_command.sh sync_cities --wet-run", diff --git a/clevercloud/crons/populate_c1_analyses.sh b/clevercloud/crons/populate_c1_analyses.sh deleted file mode 100755 index ff1c82258c..0000000000 --- a/clevercloud/crons/populate_c1_analyses.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -l - -if [[ "$CRON_ENABLED" != "1" ]]; then - exit 0 -fi - -cd "$APP_HOME" || exit - -set -o pipefail -set -o errexit - -OUTPUT_PATH=shared_bucket/populate_c1_analyses -mkdir -p $OUTPUT_PATH - -OUTPUT_LOG="$OUTPUT_PATH/output_$(date '+%Y-%m-%d_%H-%M-%S').log" - -django-admin populate_c1_analyses --wet-run |& tee -a "$OUTPUT_LOG" diff --git a/config/settings/base.py b/config/settings/base.py index e7312fbe24..30cb46a5ca 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -420,13 +420,6 @@ METABASE_USER = os.getenv("METABASE_USER") METABASE_PASSWORD = os.getenv("METABASE_PASSWORD") -# use almost the same settings for C1 analyses db as base PG. -C1_METABASE_HOST = os.getenv("C1_METABASE_HOST") -C1_METABASE_PORT = os.getenv("C1_METABASE_PORT") -C1_METABASE_USER = os.getenv("C1_METABASE_USER") -C1_METABASE_PASSWORD = os.getenv("C1_METABASE_PASSWORD") -C1_METABASE_DATABASE = os.getenv("C1_METABASE_DATABASE") - # Embedding signed Metabase dashboard METABASE_SITE_URL = os.getenv("METABASE_SITE_URL") METABASE_SECRET_KEY = os.getenv("METABASE_SECRET_KEY") diff --git a/config/settings/dev.py b/config/settings/dev.py index 1c983266c2..017bd07781 100644 --- a/config/settings/dev.py +++ b/config/settings/dev.py @@ -77,13 +77,6 @@ METABASE_PASSWORD = os.getenv("METABASE_PASSWORD", os.getenv("PGPASSWORD", "password")) # noqa: F405 METABASE_DATABASE = os.getenv("METABASE_DATABASE", os.getenv("PGDATABASE", "metabase")) # noqa: F405 -# use almost the same settings for C1 analyses db as base PG. -C1_METABASE_HOST = os.getenv("C1_METABASE_HOST", os.getenv("PGHOST", "127.0.0.1")) -C1_METABASE_PORT = os.getenv("C1_METABASE_PORT", os.getenv("PGPORT", "5432")) -C1_METABASE_USER = os.getenv("C1_METABASE_USER", os.getenv("PGUSER", "postgres")) -C1_METABASE_PASSWORD = os.getenv("C1_METABASE_PASSWORD", os.getenv("PGPASSWORD", "password")) -C1_METABASE_DATABASE = os.getenv("C1_METABASE_DATABASE", os.getenv("PGDATABASE", "metabase")) - FORCE_IC_LOGIN = os.getenv("FORCE_IC_LOGIN", "True") == "True" AWS_STORAGE_BUCKET_NAME = "dev" diff --git a/itou/metabase/dataframes.py b/itou/metabase/dataframes.py index f2894ce5cc..7ce67b956d 100644 --- a/itou/metabase/dataframes.py +++ b/itou/metabase/dataframes.py @@ -8,7 +8,7 @@ from psycopg import sql from tqdm import tqdm -from itou.metabase.db import DB_CURSOR, create_table, get_cursor, get_new_table_name, rename_table_atomically +from itou.metabase.db import MetabaseDatabaseCursor, create_table, get_new_table_name, rename_table_atomically PANDA_DATAFRAME_TO_PSQL_TYPES_MAPPING = { @@ -59,7 +59,7 @@ def store_df(df, table_name, max_attempts=5): create_table(new_table_name, columns, reset=True) for df_chunk in tqdm(df_chunks): rows = df_chunk.replace({np.nan: None}).to_dict(orient="split")["data"] - with get_cursor(DB_CURSOR.C2) as (cursor, conn): + with MetabaseDatabaseCursor() as (cursor, conn): with cursor.copy( sql.SQL("COPY {table_name} FROM STDIN WITH (FORMAT BINARY)").format( table_name=sql.Identifier(new_table_name), diff --git a/itou/metabase/db.py b/itou/metabase/db.py index 4c8d0b3a09..7cac66fee1 100644 --- a/itou/metabase/db.py +++ b/itou/metabase/db.py @@ -3,7 +3,6 @@ """ import copy -import enum import gc import os import urllib @@ -18,21 +17,17 @@ class MetabaseDatabaseCursor: - def __init__(self, settings_prefix): - self.settings_prefix = settings_prefix + def __init__(self): self.cursor = None self.connection = None - def get_setting(self, name): - return getattr(settings, f"{self.settings_prefix}{name}") - def __enter__(self): self.connection = psycopg.connect( - host=self.get_setting("HOST"), - port=self.get_setting("PORT"), - dbname=self.get_setting("DATABASE"), - user=self.get_setting("USER"), - password=self.get_setting("PASSWORD"), + host=settings.METABASE_HOST, + port=settings.METABASE_PORT, + dbname=settings.METABASE_DATABASE, + user=settings.METABASE_USER, + password=settings.METABASE_PASSWORD, keepalives=1, keepalives_idle=30, keepalives_interval=5, @@ -48,20 +43,6 @@ def __exit__(self, exc_type, exc_value, exc_traceback): self.connection.close() -class DB_CURSOR(enum.Enum): - C1 = "C1" - C2 = "C2" - - -def get_cursor(cursor_name): - """Allow to easily monkey patch cursor in tests""" - settings_prefix = { - DB_CURSOR.C2: "METABASE_", - DB_CURSOR.C1: "C1_METABASE_", - }[cursor_name] - return MetabaseDatabaseCursor(settings_prefix) - - def get_current_dir(): return os.path.dirname(os.path.realpath(__file__)) @@ -74,7 +55,7 @@ def get_old_table_name(table_name): return f"z_old_{table_name}" -def rename_table_atomically(from_table_name, to_table_name, cursor_name=DB_CURSOR.C2): +def rename_table_atomically(from_table_name, to_table_name): """ Rename from_table_name to to_table_name. Most of the time, we replace an existing table, so we will first rename @@ -84,7 +65,7 @@ def rename_table_atomically(from_table_name, to_table_name, cursor_name=DB_CURSO are deleted as well, they will be rebuilt by the next run of the airflow DAG `dbt_daily`. """ - with get_cursor(cursor_name) as (cur, conn): + with MetabaseDatabaseCursor() as (cur, conn): # CASCADE will drop airflow staging views (e.g. stg_structures) as well. cur.execute( sql.SQL("DROP TABLE IF EXISTS {} CASCADE").format(sql.Identifier(get_old_table_name(to_table_name))) @@ -110,9 +91,9 @@ def rename_table_atomically(from_table_name, to_table_name, cursor_name=DB_CURSO conn.commit() -def create_table(table_name: str, columns: list[str, str], reset=False, cursor_name=DB_CURSOR.C2): +def create_table(table_name: str, columns: list[str, str], reset=False): """Create table from columns names and types""" - with get_cursor(cursor_name) as (cursor, conn): + with MetabaseDatabaseCursor() as (cursor, conn): if reset: cursor.execute(sql.SQL("DROP TABLE IF EXISTS {table_name}").format(table_name=sql.Identifier(table_name))) create_table_query = sql.SQL("CREATE TABLE IF NOT EXISTS {table_name} ({fields_with_type})").format( @@ -152,7 +133,7 @@ def create_unversioned_tables_if_needed(): The present function creates these unversioned tables without any content, at least now all the requests can complete locally and we have a good visibility of how many tables are left to be automated. """ - with get_cursor(DB_CURSOR.C2) as (cur, conn): + with MetabaseDatabaseCursor() as (cur, conn): create_table_sql_requests = """ /* TODO @defajait DROP ASAP - use codes_insee_vs_codes_postaux instead */ CREATE TABLE IF NOT EXISTS "commune_gps" ( @@ -268,7 +249,7 @@ def create_unversioned_tables_if_needed(): print("Done.") -def populate_table(table, batch_size, querysets=None, extra_object=None, cursor_name=DB_CURSOR.C2): +def populate_table(table, batch_size, querysets=None, extra_object=None): """ About commits: a single final commit freezes the itou-metabase-db temporarily, making our GUI unable to connect to the db during this commit. @@ -309,9 +290,9 @@ def populate_table(table, batch_size, querysets=None, extra_object=None, cursor_ print(f"Injecting {total_rows} rows with {len(table.columns)} columns into table {table_name}:") new_table_name = get_new_table_name(table_name) - create_table(new_table_name, [(c["name"], c["type"]) for c in table.columns], reset=True, cursor_name=cursor_name) + create_table(new_table_name, [(c["name"], c["type"]) for c in table.columns], reset=True) - with get_cursor(cursor_name) as (cur, conn): + with MetabaseDatabaseCursor() as (cur, conn): def inject_chunk(table_columns, chunk, new_table_name): rows = [[c["fn"](row) for c in table_columns] for row in chunk] @@ -358,4 +339,4 @@ def inject_chunk(table_columns, chunk, new_table_name): # Trigger garbage collection to optimize memory use. gc.collect() - rename_table_atomically(new_table_name, table_name, cursor_name=cursor_name) + rename_table_atomically(new_table_name, table_name) diff --git a/itou/metabase/management/commands/metabase_to_csv.py b/itou/metabase/management/commands/metabase_to_csv.py index 5b9c92579b..ce7628832f 100644 --- a/itou/metabase/management/commands/metabase_to_csv.py +++ b/itou/metabase/management/commands/metabase_to_csv.py @@ -18,7 +18,7 @@ from psycopg import sql -from itou.metabase.db import DB_CURSOR, get_cursor +from itou.metabase.db import MetabaseDatabaseCursor from itou.utils.command import BaseCommand @@ -30,7 +30,7 @@ def add_arguments(self, parser): parser.add_argument("--table_name", action="store", dest="table_name", type=str) def handle(self, prefix, table_name, **kwargs): - with get_cursor(DB_CURSOR.C2) as (cursor, _conn): + with MetabaseDatabaseCursor() as (cursor, _conn): self.stdout.write(f"exporting {table_name=}") cursor.execute( sql.SQL( diff --git a/itou/metabase/management/commands/populate_c1_analyses.py b/itou/metabase/management/commands/populate_c1_analyses.py deleted file mode 100644 index 7bccde69ab..0000000000 --- a/itou/metabase/management/commands/populate_c1_analyses.py +++ /dev/null @@ -1,53 +0,0 @@ -from itou.approvals.enums import Origin -from itou.job_applications.models import JobApplication -from itou.metabase.db import DB_CURSOR, populate_table -from itou.metabase.tables.c1_analyses import JobApplicationsTable, UsersTable -from itou.users.enums import UserKind -from itou.users.models import User -from itou.utils.command import BaseCommand - - -class Command(BaseCommand): - help = "Populate c1 analyses database with data from itou database." - - def populate_users(self): - queryset = ( - User.objects.filter(kind=UserKind.JOB_SEEKER) - .select_related("created_by") - .only( - "pk", - "kind", - "date_joined", - "first_login", - "last_login", - "created_by__kind", - "created_by__is_staff", - ) - ) - populate_table(UsersTable, batch_size=10_000, querysets=[queryset], cursor_name=DB_CURSOR.C1) - - def populate_job_applications(self): - queryset = ( - JobApplication.objects.select_related("to_company", "sender_company", "sender_prescriber_organization") - .only( - "pk", - "created_at", - "processed_at", - "state", - "refusal_reason", - "origin", - "sender_kind", - "sender_prescriber_organization__kind", - "sender_prescriber_organization__is_authorized", - "sender_company__kind", - "job_seeker_id", - "to_company__kind", - ) - .exclude(origin=Origin.PE_APPROVAL) - .all() - ) - populate_table(JobApplicationsTable, batch_size=10_000, querysets=[queryset], cursor_name=DB_CURSOR.C1) - - def handle(self, **kwargs): - self.populate_users() - self.populate_job_applications() diff --git a/itou/metabase/management/commands/populate_metabase_matomo.py b/itou/metabase/management/commands/populate_metabase_matomo.py index bc1bfe8cf2..60a387b66e 100644 --- a/itou/metabase/management/commands/populate_metabase_matomo.py +++ b/itou/metabase/management/commands/populate_metabase_matomo.py @@ -13,7 +13,7 @@ from psycopg import sql from sentry_sdk.crons import monitor -from itou.metabase.db import DB_CURSOR, create_table, get_cursor +from itou.metabase.db import MetabaseDatabaseCursor, create_table from itou.utils import constants from itou.utils.command import BaseCommand @@ -101,7 +101,7 @@ def update_table_at_date(table_name, column_names, at, rows): table_name, [(col_name, "varchar") for col_name in column_names], ) - with get_cursor(DB_CURSOR.C2) as (cursor, conn): + with MetabaseDatabaseCursor() as (cursor, conn): cursor.execute( sql.SQL("""DELETE FROM {table_name} WHERE "Date" = {value}""").format( table_name=sql.Identifier(table_name), diff --git a/itou/metabase/tables/c1_analyses.py b/itou/metabase/tables/c1_analyses.py deleted file mode 100644 index 94244e9854..0000000000 --- a/itou/metabase/tables/c1_analyses.py +++ /dev/null @@ -1,108 +0,0 @@ -from itou.job_applications.enums import JobApplicationState -from itou.metabase.tables.job_applications import get_job_application_detailed_origin -from itou.metabase.tables.job_seekers import get_user_signup_kind -from itou.metabase.tables.utils import MetabaseTable, get_choice - - -UsersTable = MetabaseTable(name="c1_users") -UsersTable.add_columns( - [ - { - "name": "id", - "type": "integer", - "comment": "ID de l'utilisateur", - "fn": lambda o: o.pk, - }, - { - "name": "type", - "type": "varchar", - "comment": "Type d'utilisateur", - "fn": lambda o: o.kind, - }, - { - "name": "date_inscription", - "type": "date", - "comment": "Date de création de compte", - "fn": lambda o: o.date_joined, - }, - { - "name": "date_premiere_connexion", - "type": "date", - "comment": "Date de première connexion", - "fn": lambda o: o.first_login, - }, - { - "name": "date_dernier_connexion", - "type": "date", - "comment": "Date de dernière connexion", - "fn": lambda o: o.last_login, - }, - { - "name": "type_inscription", - "type": "varchar", - "comment": "Type inscription du candidat", - "fn": get_user_signup_kind, - }, - ] -) - - -JobApplicationsTable = MetabaseTable(name="c1_job_applications") -JobApplicationsTable.add_columns( - [ - { - "name": "id", - "type": "uuid", - "comment": "ID C1 de la candidature", - "fn": lambda o: o.pk, - }, - { - "name": "date_candidature", - "type": "date", - "comment": "Date de la candidature", - "fn": lambda o: o.created_at, - }, - { - "name": "date_traitement", - "type": "date", - "comment": "Date de dernière traitement de la candidature", - "fn": lambda o: o.processed_at, - }, - { - "name": "état", - "type": "varchar", - "comment": "Etat de la candidature", - "fn": lambda o: get_choice(choices=JobApplicationState.choices, key=o.state), - }, - { - "name": "motif_de_refus", - "type": "varchar", - "comment": "Motif de refus de la candidature", - "fn": lambda o: o.get_refusal_reason_display() if o.refusal_reason != "" else None, - }, - { - "name": "parcours_de_création", - "type": "varchar", - "comment": ( - "Parcours de création de la candidature " - "(Normale, reprise de stock AI, import agrément PE, action support...)" - ), - "fn": lambda o: o.origin, - }, - { - "name": "origine_détaillée", - "type": "varchar", - "comment": ( - "Origine détaillée de la candidature " - "(employeur EI, ACI... candidat, orienteur, prescripteur PE, ML...)" - ), - "fn": get_job_application_detailed_origin, - }, - { - "name": "type_structure", - "type": "varchar", - "comment": "Type de la structure destinaire de la candidature", - "fn": lambda o: o.to_company.kind, - }, - ] -) diff --git a/tests/metabase/conftest.py b/tests/metabase/conftest.py index 4caebf0eac..d48ea7631c 100644 --- a/tests/metabase/conftest.py +++ b/tests/metabase/conftest.py @@ -37,11 +37,8 @@ def __exit__(self, exc_type, exc_value, exc_traceback): if self.cursor: self.cursor.close() - def get_cursor(*args): - return FakeMetabase() - - monkeypatch.setattr(dataframes, "get_cursor", get_cursor) - monkeypatch.setattr(db, "get_cursor", get_cursor) + monkeypatch.setattr(dataframes, "MetabaseDatabaseCursor", FakeMetabase) + monkeypatch.setattr(db, "MetabaseDatabaseCursor", FakeMetabase) # This setting need to be editable in `dev` to manually test transferring data from "les emplois" to "pilotage", # but the one used in `test` should be fixed, `dev` inheriting from `test` we can't put it in settings. monkeypatch.setattr(settings, "METABASE_HASH_SALT", None) diff --git a/tests/metabase/management/test_populate_c1_analyses.py b/tests/metabase/management/test_populate_c1_analyses.py deleted file mode 100644 index c00f81c5f0..0000000000 --- a/tests/metabase/management/test_populate_c1_analyses.py +++ /dev/null @@ -1,127 +0,0 @@ -import datetime - -import pytest -from django.core import management -from django.db import connection -from django.utils import timezone -from pytest_django.asserts import assertNumQueries - -from itou.users.enums import UserKind -from tests.job_applications.factories import JobApplicationFactory -from tests.users.factories import EmployerFactory, JobSeekerFactory, PrescriberFactory - - -BASE_NUM_QUERIES = ( - 1 # Count users rows - + 1 # COMMIT Queryset counts (autocommit mode) - + 1 # COMMIT Create table - + 1 # Select all users elements ids (chunked_queryset) - # 3 more queries here if there are some users - + 1 # COMMIT (rename_table_atomically DROP TABLE) - + 1 # COMMIT (rename_table_atomically RENAME TABLE) - + 1 # COMMIT (rename_table_atomically DROP TABLE) - + 1 # Count job_appication row - + 1 # COMMIT Queryset counts (autocommit mode) - + 1 # COMMIT Create table - + 1 # Select all job_application elements ids (chunked_queryset) - # 3 more queries here if there are some job applications - + 1 # COMMIT (rename_table_atomically DROP TABLE) - + 1 # COMMIT (rename_table_atomically RENAME TABLE) - + 1 # COMMIT (rename_table_atomically DROP TABLE) -) - - -@pytest.mark.django_db(transaction=True) -@pytest.mark.usefixtures("metabase") -def test_populate_users(): - date_maj = datetime.date.today() + datetime.timedelta(days=-1) - - # First user - # - created by prescriber - # - logged_in recently - user_1 = JobSeekerFactory(created_by=PrescriberFactory(), last_login=timezone.now(), first_login=timezone.now()) - # Second user - # - created by an employer - user_2 = JobSeekerFactory(created_by=EmployerFactory()) - - # Third user - # - self created - user_3 = JobSeekerFactory(last_login=timezone.now(), first_login=timezone.now() - datetime.timedelta(days=-1)) - - with assertNumQueries( - BASE_NUM_QUERIES - + 1 # Select last user pk for current chunk - + 1 # Select users chunk - + 1 # COMMIT (inject_chunk) - ): - management.call_command("populate_c1_analyses") - - with connection.cursor() as cursor: - cursor.execute("SELECT * FROM c1_users ORDER BY id") - rows = cursor.fetchall() - - assert rows == [ - ( - user_1.pk, - UserKind.JOB_SEEKER, - datetime.date.today(), - datetime.date.today(), - datetime.date.today(), - "par prescripteur", - date_maj, - ), - ( - user_2.pk, - UserKind.JOB_SEEKER, - datetime.date.today(), - None, - None, - "par employeur", - date_maj, - ), - ( - user_3.pk, - UserKind.JOB_SEEKER, - datetime.date.today(), - datetime.date.today() - datetime.timedelta(days=-1), - datetime.date.today(), - "autonome", - date_maj, - ), - ] - - -@pytest.mark.django_db(transaction=True) -@pytest.mark.usefixtures("metabase") -def test_populate_job_applications(): - date_maj = datetime.date.today() + datetime.timedelta(days=-1) - ja = JobApplicationFactory(state="accepted") - - with assertNumQueries( - BASE_NUM_QUERIES - + 1 # Select last user pk for current chunk - + 1 # Select users chunk - + 1 # COMMIT (inject_chunk) - + 1 # Select last job application pk for current chunk - + 1 # Select job applcations chunk - + 1 # COMMIT (inject_chunk) - ): - management.call_command("populate_c1_analyses") - - with connection.cursor() as cursor: - cursor.execute("SELECT * FROM c1_job_applications ORDER BY id") - rows = cursor.fetchall() - assert len(rows) == 1 - assert rows == [ - ( - ja.pk, - ja.created_at.date(), - ja.processed_at.date(), - "Candidature acceptée", - None, - "default", - "Orienteur sans organisation", - ja.to_company.kind, - date_maj, - ), - ] From 8269c2ee5ac166aa2ce1be5638884f3cf5bad034 Mon Sep 17 00:00:00 2001 From: Antoine LAURENT Date: Thu, 4 Jul 2024 14:45:31 +0200 Subject: [PATCH 2/2] metabase: Add new columns to users and job applications --- .../commands/populate_metabase_emplois.py | 4 ++-- itou/metabase/tables/job_applications.py | 6 ++++++ itou/metabase/tables/job_seekers.py | 6 ++++++ .../management/test_populate_metabase_emplois.py | 15 ++++++++++++--- 4 files changed, 26 insertions(+), 5 deletions(-) diff --git a/itou/metabase/management/commands/populate_metabase_emplois.py b/itou/metabase/management/commands/populate_metabase_emplois.py index 06d343fb36..d94e9cd1a7 100644 --- a/itou/metabase/management/commands/populate_metabase_emplois.py +++ b/itou/metabase/management/commands/populate_metabase_emplois.py @@ -269,7 +269,7 @@ def populate_job_seekers(self): """ queryset = ( User.objects.filter(kind=UserKind.JOB_SEEKER) - .select_related("jobseeker_profile") + .select_related("jobseeker_profile", "created_by") .prefetch_related( Prefetch( "eligibility_diagnoses", @@ -297,7 +297,6 @@ def populate_job_seekers(self): "job_applications", queryset=JobApplication.objects.select_related("to_company"), ), - "created_by", ) .annotate( eligibility_diagnoses_count=Count("eligibility_diagnoses", distinct=True), @@ -327,6 +326,7 @@ def populate_job_applications(self): .only( "pk", "created_at", + "processed_at", "hiring_start_at", "origin", "sender_kind", diff --git a/itou/metabase/tables/job_applications.py b/itou/metabase/tables/job_applications.py index 472ec1a606..b47713451c 100644 --- a/itou/metabase/tables/job_applications.py +++ b/itou/metabase/tables/job_applications.py @@ -130,6 +130,12 @@ def get_ja_hiring_date(ja): "comment": "Date de début du contrat", "fn": lambda o: o.hiring_start_at, }, + { + "name": "date_traitement", + "type": "date", + "comment": "Date de dernière traitement de la candidature", + "fn": lambda o: o.processed_at, + }, { "name": "état", "type": "varchar", diff --git a/itou/metabase/tables/job_seekers.py b/itou/metabase/tables/job_seekers.py index 7221688db9..07f362a576 100644 --- a/itou/metabase/tables/job_seekers.py +++ b/itou/metabase/tables/job_seekers.py @@ -244,6 +244,12 @@ def get_table(): "comment": "Date de dernière connexion au service du candidat", "fn": lambda o: o.last_login, }, + { + "name": "date_premiere_connexion", + "type": "date", + "comment": "Date de première connexion", + "fn": lambda o: o.first_login, + }, { "name": "actif", "type": "boolean", diff --git a/tests/metabase/management/test_populate_metabase_emplois.py b/tests/metabase/management/test_populate_metabase_emplois.py index 0d8262e5e1..06efc5e18b 100644 --- a/tests/metabase/management/test_populate_metabase_emplois.py +++ b/tests/metabase/management/test_populate_metabase_emplois.py @@ -14,6 +14,7 @@ from itou.companies.models import JobDescription from itou.eligibility.models import AdministrativeCriteria from itou.geo.utils import coords_to_geometry +from itou.job_applications.enums import JobApplicationState from itou.metabase.tables.utils import hash_content from itou.users.enums import IdentityProvider, UserKind from tests.analytics.factories import DatumFactory, StatsDashboardVisitFactory @@ -139,6 +140,7 @@ def test_populate_job_seekers(): identity_provider=IdentityProvider.PE_CONNECT, jobseeker_profile__pole_emploi_id="", last_login=timezone.now(), + first_login=timezone.now(), jobseeker_profile__nir="179038704133768", post_code="33360", geocoding_score=1, @@ -209,7 +211,6 @@ def test_populate_job_seekers(): num_queries += 1 # Select job seekers chunck (with annotations) num_queries += 1 # Prefetch EligibilityDiagnosis with anotations, author_prescriber_organization and author_siae num_queries += 1 # Prefetch JobApplications with Siaes - num_queries += 1 # Prefetch created_by Users num_queries += 1 # Get QPV users num_queries += 1 # Select AI stock approvals pks num_queries += 1 # COMMIT (inject_chunk) @@ -236,6 +237,7 @@ def test_populate_job_seekers(): 1, 0, datetime.date.today(), + datetime.date.today(), 1, "33360", "33", @@ -287,6 +289,7 @@ def test_populate_job_seekers(): 0, 1, None, + None, 0, "", "", @@ -338,6 +341,7 @@ def test_populate_job_seekers(): 0, 1, None, + None, 0, "", "", @@ -417,7 +421,11 @@ def test_populate_job_applications(): kind="GEIQ", ) job = JobDescriptionFactory(is_active=True, company=company) - ja = JobApplicationFactory(with_geiq_eligibility_diagnosis=True, contract_type=ContractType.APPRENTICESHIP) + ja = JobApplicationFactory( + with_geiq_eligibility_diagnosis=True, + contract_type=ContractType.APPRENTICESHIP, + state=JobApplicationState.ACCEPTED, + ) ja.selected_jobs.add(job) num_queries = 1 # Select siaes for get_active_companies_pks() @@ -444,7 +452,8 @@ def test_populate_job_applications(): ja.pk, ja.created_at.date(), ja.hiring_start_at, - "Nouvelle candidature", + ja.processed_at.date(), + "Candidature acceptée", "Orienteur", "Orienteur sans organisation", None,