diff --git a/models/problems/fact_problem_responses.sql b/models/problems/fact_problem_responses.sql index 5b65d660..af187f41 100644 --- a/models/problems/fact_problem_responses.sql +++ b/models/problems/fact_problem_responses.sql @@ -1,18 +1,7 @@ with responses as ( - select - emission_time, - org, - course_key, - object_id, - {{ get_problem_id("object_id") }} as problem_id, - actor_id, - responses, - success, - attempts, - interaction_type - from {{ ref("problem_events") }} - where verb_id = 'https://w3id.org/xapi/acrossx/verbs/evaluated' + select * + from {{ ref("problem_responses") }} ) select @@ -44,25 +33,3 @@ join ) left outer join {{ ref("dim_user_pii") }} users on toUUID(actor_id) = users.external_user_id -group by - -- multi-part questions include an extra record for the response to the first - -- part of the question. this group by clause eliminates the duplicate record - emission_time, - org, - course_key, - course_name, - course_run, - problem_id, - problem_name, - problem_name_with_location, - problem_link, - actor_id, - responses, - success, - attempts, - course_order, - graded, - interaction_type, - username, - name, - email diff --git a/models/problems/int_problem_results.sql b/models/problems/int_problem_results.sql index bf7d89d8..0f01a137 100644 --- a/models/problems/int_problem_results.sql +++ b/models/problems/int_problem_results.sql @@ -11,7 +11,7 @@ with problem_id, actor_id, min(emission_time) as first_success_at - from {{ ref("fact_problem_responses") }} + from {{ ref("problem_responses") }} where -- clickhouse throws an error when shortening this to `where success` success = true @@ -26,7 +26,7 @@ with problem_id, actor_id, max(emission_time) as last_response_at - from {{ ref("fact_problem_responses") }} + from {{ ref("problem_responses") }} where actor_id not in (select distinct actor_id from successful_responses) group by org, course_key, problem_id, actor_id ), diff --git a/models/problems/int_problem_results_v2.sql b/models/problems/int_problem_results_v2.sql new file mode 100644 index 00000000..7ce5c459 --- /dev/null +++ b/models/problems/int_problem_results_v2.sql @@ -0,0 +1,11 @@ +{{ + config( + materialized="materialized_view", + schema=env_var("ASPECTS_XAPI_DATABASE", "xapi"), + engine=get_engine("ReplacingMergeTree()"), + primary_key="(org, course_key, object_type)", + order_by="(org, course_key, object_type, emission_time, actor_id, starting_position, event_id)", + partition_by="(toYYYYMM(emission_time))", + ttl=env_var("ASPECTS_DATA_TTL_EXPRESSION", ""), + ) +}} diff --git a/models/problems/problem_responses.sql b/models/problems/problem_responses.sql new file mode 100644 index 00000000..48445d31 --- /dev/null +++ b/models/problems/problem_responses.sql @@ -0,0 +1,53 @@ +/* +TODO: verify if the order key is correct and will fix this issue: +group by + -- multi-part questions include an extra record for the response to the first + -- part of the question. this group by clause eliminates the duplicate record +*/ +{{ + config( + materialized="materialized_view", + engine=get_engine("ReplacingMergeTree()"), + primary_key="(org, course_key, problem_id)", + order_by="(org, course_key, problem_id, actor_id, emission_time, responses, success, attempts, interaction_type)", + partition_by="(toYYYYMM(emission_time))", + ttl=env_var("ASPECTS_DATA_TTL_EXPRESSION", ""), + ) +}} + +select + emission_time, + org, + course_key, + object_id, + {{ get_problem_id("object_id") }} as problem_id, + actor_id, + responses, + success, + attempts, + interaction_type + PROJECTION successful_responses + ( + select + org, + course_key, + problem_id, + actor_id, + min(emission_time) as first_success_at + where + success = true + group by org, course_key, problem_id, actor_id + ) + PROJECTION unsuccessful_responses + ( + select + org, + course_key, + problem_id, + actor_id, + max(emission_time) as last_response_at + where actor_id not in (select distinct actor_id from successful_responses) + group by org, course_key, problem_id, actor_id + ) +from {{ ref("problem_events") }} +where verb_id = 'https://w3id.org/xapi/acrossx/verbs/evaluated'