Skip to content

Commit

Permalink
Merge pull request #4174 from bjester/publish-errors
Browse files Browse the repository at this point in the history
Skip broken exercises and properly report exceptions
  • Loading branch information
bjester authored Jun 29, 2023
2 parents 8a80874 + dfd6536 commit 893525e
Show file tree
Hide file tree
Showing 5 changed files with 54 additions and 23 deletions.
15 changes: 15 additions & 0 deletions contentcuration/contentcuration/tests/test_exportchannel.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,18 @@ def setUp(self):
new_exercise.complete = True
new_exercise.parent = current_exercise.parent
new_exercise.save()

bad_container = create_node({'kind_id': 'topic', 'title': 'Bad topic container', 'children': []})
bad_container.complete = True
bad_container.parent = self.content_channel.main_tree
bad_container.save()

# exercise without mastery model, but marked as complete
broken_exercise = create_node({'kind_id': 'exercise', 'title': 'Bad mastery test', 'extra_fields': {}})
broken_exercise.complete = True
broken_exercise.parent = bad_container
broken_exercise.save()

thumbnail_data = create_studio_file(thumbnail_bytes, preset="exercise_thumbnail", ext="png")
file_obj = thumbnail_data["db_file"]
file_obj.contentnode = new_exercise
Expand Down Expand Up @@ -247,6 +259,9 @@ def test_contentnode_incomplete_not_published(self):
for node in incomplete_nodes:
assert kolibri_nodes.filter(pk=node.node_id).count() == 0

# bad exercise node should not be published (technically incomplete)
assert kolibri_models.ContentNode.objects.filter(title='Bad mastery test').count() == 0

def test_tags_greater_than_30_excluded(self):
tag_node = kolibri_models.ContentNode.objects.filter(title='kolibri tag test').first()
published_tags = tag_node.tags.all()
Expand Down
28 changes: 22 additions & 6 deletions contentcuration/contentcuration/utils/publish.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,6 +221,17 @@ def recurse_nodes(self, node, inherited_fields): # noqa C901

# Only process nodes that are either non-topics or have non-topic descendants
if node.get_descendants(include_self=True).exclude(kind_id=content_kinds.TOPIC).exists() and node.complete:
# early validation to make sure we don't have any exercises without mastery models
# which should be unlikely when the node is complete, but just in case
if node.kind_id == content_kinds.EXERCISE:
try:
# migrates and extracts the mastery model from the exercise
_, mastery_model = parse_assessment_metadata(node)
if not mastery_model:
raise ValueError("Exercise does not have a mastery model")
except Exception as e:
logging.warning("Unable to parse exercise {id} mastery model: {error}".format(id=node.pk, error=str(e)))
return

metadata = {}

Expand All @@ -242,12 +253,12 @@ def recurse_nodes(self, node, inherited_fields): # noqa C901

kolibrinode = create_bare_contentnode(node, self.default_language, self.channel_id, self.channel_name, metadata)

if node.kind.kind == content_kinds.EXERCISE:
if node.kind_id == content_kinds.EXERCISE:
exercise_data = process_assessment_metadata(node, kolibrinode)
if self.force_exercises or node.changed or not \
node.files.filter(preset_id=format_presets.EXERCISE).exists():
create_perseus_exercise(node, kolibrinode, exercise_data, user_id=self.user_id)
elif node.kind.kind == content_kinds.SLIDESHOW:
elif node.kind_id == content_kinds.SLIDESHOW:
create_slideshow_manifest(node, user_id=self.user_id)
elif node.kind_id == content_kinds.TOPIC:
for child in node.children.all():
Expand Down Expand Up @@ -486,18 +497,23 @@ def create_perseus_exercise(ccnode, kolibrinode, exercise_data, user_id=None):
temppath and os.unlink(temppath)


def process_assessment_metadata(ccnode, kolibrinode):
# Get mastery model information, set to default if none provided
assessment_items = ccnode.assessment_items.all().order_by('order')
def parse_assessment_metadata(ccnode):
extra_fields = ccnode.extra_fields
if isinstance(extra_fields, basestring):
extra_fields = json.loads(extra_fields)
extra_fields = migrate_extra_fields(extra_fields) or {}
randomize = extra_fields.get('randomize') if extra_fields.get('randomize') is not None else True
return randomize, extra_fields.get('options').get('completion_criteria').get('threshold')


def process_assessment_metadata(ccnode, kolibrinode):
# Get mastery model information, set to default if none provided
assessment_items = ccnode.assessment_items.all().order_by('order')
assessment_item_ids = [a.assessment_id for a in assessment_items]

exercise_data = deepcopy(extra_fields.get('options').get('completion_criteria').get('threshold'))
randomize, mastery_criteria = parse_assessment_metadata(ccnode)

exercise_data = deepcopy(mastery_criteria)
exercise_data_type = exercise_data.get('mastery_model', "")

mastery_model = {'type': exercise_data_type or exercises.M_OF_N}
Expand Down
1 change: 1 addition & 0 deletions contentcuration/contentcuration/viewsets/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -960,6 +960,7 @@ def update_progress(progress=None):
task_object.status = states.FAILURE
task_object.traceback = traceback.format_exc()
task_object.save()
raise
finally:
if task_object.status == states.STARTED:
# No error reported, cleanup.
Expand Down
23 changes: 9 additions & 14 deletions contentcuration/contentcuration/viewsets/contentnode.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@
from contentcuration.viewsets.sync.constants import CREATED
from contentcuration.viewsets.sync.constants import DELETED
from contentcuration.viewsets.sync.utils import generate_update_event

from contentcuration.viewsets.sync.utils import log_sync_exception

channel_query = Channel.objects.filter(main_tree__tree_id=OuterRef("tree_id"))

Expand Down Expand Up @@ -908,9 +908,11 @@ def copy_from_changes(self, changes):
for copy in changes:
# Copy change will have key, must also have other attributes, defined in `copy`
# Just pass as keyword arguments here to let copy do the validation
copy_errors = self.copy(copy["key"], **copy)
if copy_errors:
copy.update({"errors": copy_errors})
try:
self.copy(copy["key"], **copy)
except Exception as e:
log_sync_exception(e, user=self.request.user, change=copy)
copy["errors"] = [str(e)]
errors.append(copy)
return errors

Expand All @@ -924,23 +926,18 @@ def copy(
excluded_descendants=None,
**kwargs
):
try:
target, position = self.validate_targeting_args(target, position)
except ValidationError as e:
return str(e)
target, position = self.validate_targeting_args(target, position)

try:
source = self.get_queryset().get(pk=from_key)
except ContentNode.DoesNotExist:
error = ValidationError("Copy source node does not exist")
return str(error)
raise ValidationError("Copy source node does not exist")

# Affected channel for the copy is the target's channel
channel_id = target.channel_id

if ContentNode.filter_by_pk(pk=pk).exists():
error = ValidationError("Copy pk already exists")
return str(error)
raise ValidationError("Copy pk already exists")

can_edit_source_channel = ContentNode.filter_edit_queryset(
ContentNode.filter_by_pk(pk=source.id), user=self.request.user
Expand Down Expand Up @@ -969,8 +966,6 @@ def copy(
created_by_id=self.request.user.id,
)

return None

def perform_create(self, serializer, change=None):
instance = serializer.save()

Expand Down
10 changes: 7 additions & 3 deletions contentcuration/contentcuration/viewsets/sync/utils.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import logging

from django.conf import settings

from contentcuration.utils.sentry import report_exception
from contentcuration.viewsets.sync.constants import ALL_TABLES
from contentcuration.viewsets.sync.constants import CHANNEL
Expand Down Expand Up @@ -92,7 +94,9 @@ def log_sync_exception(e, user=None, change=None, changes=None):
elif changes is not None:
contexts["changes"] = changes

report_exception(e, user=user, contexts=contexts)
# in production, we'll get duplicates in Sentry if we log the exception here.
if settings.DEBUG:
# make sure we leave a record in the logs just in case.
logging.exception(e)

# make sure we leave a record in the logs just in case.
logging.exception(e)
report_exception(e, user=user, contexts=contexts)

0 comments on commit 893525e

Please sign in to comment.