- {{ isChannel ? details.name : details.title }}
+ {{ isChannel ? _details.name : _details.title }}
- {{ details.description }}
+ {{ _details.description }}
-
+
{{
- details.primary_token.slice(0, 5) + '-' + details.primary_token.slice(5)
+ _details.primary_token.slice(0, 5) + '-' + _details.primary_token.slice(5)
}}
- {{ publishedDate }}
+ {{ publishedDate }}
{{ $tr('unpublishedText') }}
-
- {{ details.version }}
+
+ {{ _details.version }}
{{ defaultText }}
@@ -62,7 +62,7 @@
/>
- {{ $formatNumber(details.resource_count) }}
+ {{ $formatNumber(_details.resource_count) }}
-
+
{{ $tr('coachHeading') }}
-
+
{{ $tr('assessmentsIncludedText') }}
-
+
{{ defaultText }}
@@ -137,7 +137,7 @@
@@ -162,7 +162,7 @@
@@ -172,7 +172,7 @@
@@ -186,7 +186,7 @@
@@ -199,7 +199,7 @@
@@ -212,7 +212,7 @@
@@ -222,7 +222,7 @@
-
+
{{ translateConstant(license) }}
@@ -239,7 +239,7 @@
+ import cloneDeep from 'lodash/cloneDeep';
+ import defaultsDeep from 'lodash/defaultsDeep';
import camelCase from 'lodash/camelCase';
import orderBy from 'lodash/orderBy';
import { SCALE_TEXT, SCALE, CHANNEL_SIZE_DIVISOR } from './constants';
@@ -328,6 +330,39 @@
import Thumbnail from 'shared/views/files/Thumbnail';
import CopyToken from 'shared/views/CopyToken';
+ const DEFAULT_DETAILS = {
+ name: '',
+ title: '',
+ description: '',
+ thumbnail_url: null,
+ thumbnail_src: null,
+ thumbnail_encoding: null,
+ published: false,
+ version: null,
+ primary_token: null,
+ language: null,
+ last_update: null,
+ created: null,
+ last_published: null,
+ resource_count: 0,
+ resource_size: 0,
+ includes: { coach_content: 0, exercises: 0 },
+ kind_count: [],
+ languages: [],
+ accessible_languages: [],
+ licenses: [],
+ tags: [],
+ copyright_holders: [],
+ authors: [],
+ aggregators: [],
+ providers: [],
+ sample_pathway: [],
+ original_channels: [],
+ sample_nodes: [],
+ levels: [],
+ categories: [],
+ };
+
export default {
name: 'Details',
components: {
@@ -364,13 +399,19 @@
},
},
computed: {
+ _details() {
+ const details = cloneDeep(this.details);
+ defaultsDeep(details, DEFAULT_DETAILS);
+ details.published = Boolean(details.last_published);
+ return details;
+ },
defaultText() {
// Making this a computed property so it's easier to update
return '---';
},
publishedDate() {
if (this.isChannel) {
- return this.$formatDate(this.details.last_published, {
+ return this.$formatDate(this._details.last_published, {
year: 'numeric',
month: 'long',
day: 'numeric',
@@ -382,7 +423,7 @@
return window.libraryMode;
},
sizeText() {
- const size = (this.details && this.details.resource_size) || 0;
+ const size = this._details.resource_size;
const sizeIndex = Math.max(
1,
Math.min(Math.ceil(Math.log(size / CHANNEL_SIZE_DIVISOR) / Math.log(2)), 10)
@@ -393,10 +434,10 @@
});
},
kindCount() {
- return orderBy(this.details.kind_count, ['count', 'kind_id'], ['desc', 'asc']);
+ return orderBy(this._details.kind_count, ['count', 'kind_id'], ['desc', 'asc']);
},
createdDate() {
- return this.$formatDate(this.details.created, {
+ return this.$formatDate(this._details.created, {
year: 'numeric',
month: 'long',
day: 'numeric',
@@ -406,28 +447,28 @@
return Object.keys(this.details).length;
},
sortedTags() {
- return orderBy(this.details.tags, ['count'], ['desc']);
+ return orderBy(this._details.tags, ['count'], ['desc']);
},
includesPrintable() {
const includes = [];
- if (this.details.includes.coach_content) {
+ if (this._details.includes.coach_content) {
includes.push(this.$tr('coachHeading'));
}
- if (this.details.includes.exercises) {
+ if (this._details.includes.exercises) {
includes.push(this.$tr('assessmentsIncludedText'));
}
return includes.length ? includes.join(', ') : this.defaultText;
},
licensesPrintable() {
- return this.details.licenses.map(this.translateConstant).join(', ');
+ return this._details.licenses.map(this.translateConstant).join(', ');
},
tagPrintable() {
return this.sortedTags.map(tag => tag.tag_name).join(', ');
},
levels() {
- return this.details.levels.map(level => {
+ return this._details.levels.map(level => {
level = LevelsLookup[level];
let translationKey;
if (level === 'PROFESSIONAL') {
@@ -446,7 +487,7 @@
return this.levels.join(', ');
},
categories() {
- return this.details.categories.map(category => {
+ return this._details.categories.map(category => {
category = CategoriesLookup[category];
return this.translateMetadataString(camelCase(category));
});
diff --git a/contentcuration/contentcuration/frontend/shared/views/form/DropdownWrapper.vue b/contentcuration/contentcuration/frontend/shared/views/form/DropdownWrapper.vue
index 56c38b0f58..63c39805cf 100644
--- a/contentcuration/contentcuration/frontend/shared/views/form/DropdownWrapper.vue
+++ b/contentcuration/contentcuration/frontend/shared/views/form/DropdownWrapper.vue
@@ -31,8 +31,12 @@
default: 'div',
},
menuHeight: {
- type: Number,
+ type: [Number, String],
default: 300,
+ validator: value => {
+ const parsed = parseInt(value, 10);
+ return !isNaN(parsed) && parsed > 0;
+ },
},
},
data() {
diff --git a/contentcuration/contentcuration/frontend/shared/vuetify/icons.js b/contentcuration/contentcuration/frontend/shared/vuetify/icons.js
index b119d983fa..fa0ce43e61 100644
--- a/contentcuration/contentcuration/frontend/shared/vuetify/icons.js
+++ b/contentcuration/contentcuration/frontend/shared/vuetify/icons.js
@@ -20,6 +20,7 @@ export const CONTENT_KIND_ICONS = {
[ContentKindsNames.EXERCISE]: 'assignment',
[ContentKindsNames.DOCUMENT]: 'class',
[ContentKindsNames.HTML5]: 'widgets',
+ [ContentKindsNames.ZIM]: 'widgets',
};
export function getContentKindIcon(kind, isEmpty = false) {
diff --git a/contentcuration/contentcuration/frontend/shared/vuetify/theme.js b/contentcuration/contentcuration/frontend/shared/vuetify/theme.js
index 6ea2a7ca5c..a23e99dd25 100644
--- a/contentcuration/contentcuration/frontend/shared/vuetify/theme.js
+++ b/contentcuration/contentcuration/frontend/shared/vuetify/theme.js
@@ -21,6 +21,7 @@ export default function theme() {
document: '#ff3d00',
exercise: '#4db6ac',
html5: '#ff8f00',
+ zim: '#ff8f00',
slideshow: '#4ece90',
channelHighlightDefault: colors.grey.lighten3,
draggableDropZone: '#dddddd',
diff --git a/contentcuration/contentcuration/frontend/shared/vuex/indexedDBPlugin/index.js b/contentcuration/contentcuration/frontend/shared/vuex/indexedDBPlugin/index.js
index 9f4fddfb57..f8afa7445b 100644
--- a/contentcuration/contentcuration/frontend/shared/vuex/indexedDBPlugin/index.js
+++ b/contentcuration/contentcuration/frontend/shared/vuex/indexedDBPlugin/index.js
@@ -116,7 +116,7 @@ export default function IndexedDBPlugin(db, listeners = []) {
}
// omit the last fetched attribute used only in resource layer
const mods = omit(obj, [LAST_FETCHED]);
- if (Object.keys(mods).length === 0) {
+ if (Object.keys(mods).length === 0 && change.type === CHANGE_TYPES.UPDATED) {
return;
}
events.emit(getEventName(change.table, change.type), {
diff --git a/contentcuration/contentcuration/frontend/shared/vuex/syncProgressPlugin/index.js b/contentcuration/contentcuration/frontend/shared/vuex/syncProgressPlugin/index.js
index 60a2324d2e..0943920c24 100644
--- a/contentcuration/contentcuration/frontend/shared/vuex/syncProgressPlugin/index.js
+++ b/contentcuration/contentcuration/frontend/shared/vuex/syncProgressPlugin/index.js
@@ -1,3 +1,4 @@
+import { liveQuery } from 'dexie';
import syncProgressModule from './syncProgressModule';
import db from 'shared/data/db';
import { CHANGES_TABLE } from 'shared/data/constants';
@@ -5,18 +6,23 @@ import { CHANGES_TABLE } from 'shared/data/constants';
const SyncProgressPlugin = store => {
store.registerModule('syncProgress', syncProgressModule);
- db.on('changes', function(changes) {
- const changesTableUpdated = changes.some(change => change.table === CHANGES_TABLE);
- if (!changesTableUpdated) {
- return;
- }
+ store.listenForIndexedDBChanges = () => {
+ const observable = liveQuery(() => {
+ return db[CHANGES_TABLE].toCollection()
+ .filter(c => !c.synced)
+ .first(Boolean);
+ });
- db[CHANGES_TABLE].toCollection()
- .filter(c => !c.synced)
- .limit(1)
- .count()
- .then(count => store.commit('SET_UNSAVED_CHANGES', count > 0));
- });
+ const subscription = observable.subscribe({
+ next(result) {
+ store.commit('SET_UNSAVED_CHANGES', result);
+ },
+ error() {
+ subscription.unsubscribe();
+ },
+ });
+ store.stopListeningForIndexedDBChanges = subscription.unsubscribe;
+ };
};
export default SyncProgressPlugin;
diff --git a/contentcuration/contentcuration/middleware/locale.py b/contentcuration/contentcuration/middleware/locale.py
index 5d526f3b8b..965312c0fa 100644
--- a/contentcuration/contentcuration/middleware/locale.py
+++ b/contentcuration/contentcuration/middleware/locale.py
@@ -1,4 +1,6 @@
+from django.conf import settings
from django.middleware.locale import LocaleMiddleware
+from django.utils import translation
LOCALE_EXEMPT = "_locale_exempt"
@@ -15,6 +17,8 @@ def _is_exempt(self, obj):
def process_view(self, request, callback, callback_args, callback_kwargs):
if self._is_exempt(callback):
setattr(request, LOCALE_EXEMPT, True)
+ translation.activate(settings.LANGUAGE_CODE)
+ request.LANGUAGE_CODE = translation.get_language()
return None
def process_response(self, request, response):
diff --git a/contentcuration/contentcuration/models.py b/contentcuration/contentcuration/models.py
index 2a9f99633f..86a400931c 100644
--- a/contentcuration/contentcuration/models.py
+++ b/contentcuration/contentcuration/models.py
@@ -1971,6 +1971,9 @@ def copy_to(
def copy(self):
return self.copy_to()
+ def is_publishable(self):
+ return self.complete and self.get_descendants(include_self=True).exclude(kind_id=content_kinds.TOPIC).exists()
+
class Meta:
verbose_name = "Topic"
verbose_name_plural = "Topics"
diff --git a/contentcuration/contentcuration/static/img/kolibri_placeholder.png b/contentcuration/contentcuration/static/img/kolibri_placeholder.png
new file mode 100644
index 0000000000..c12008541e
Binary files /dev/null and b/contentcuration/contentcuration/static/img/kolibri_placeholder.png differ
diff --git a/contentcuration/contentcuration/tests/test_contentnodes.py b/contentcuration/contentcuration/tests/test_contentnodes.py
index e2dfc0a846..57496362cf 100644
--- a/contentcuration/contentcuration/tests/test_contentnodes.py
+++ b/contentcuration/contentcuration/tests/test_contentnodes.py
@@ -78,7 +78,7 @@ def _check_files_for_object(source, copy):
def _check_tags_for_node(source, copy):
- source_tags = source.tags.all().order_by("tag_name")
+ source_tags = source.tags.all().order_by("tag_name").distinct("tag_name")
copy_tags = copy.tags.all().order_by("tag_name")
assert len(source_tags) == len(copy_tags)
for source_tag, copy_tag in zip(source_tags, copy_tags):
@@ -415,6 +415,40 @@ def test_duplicate_nodes_with_tags(self):
num_test_tags_before, ContentTag.objects.filter(tag_name="test").count()
)
+ def test_duplicate_nodes_with_duplicate_tags(self):
+ """
+ Ensures that when we copy nodes with duplicated tags they get copied
+ """
+ new_channel = testdata.channel()
+
+ tree = TreeBuilder(tags=True)
+ self.channel.main_tree = tree.root
+ self.channel.save()
+
+ # Add a legacy tag with a set channel to test the tag copying behaviour.
+ legacy_tag = ContentTag.objects.create(tag_name="test", channel=self.channel)
+ # Add an identical tag without a set channel to make sure it gets reused.
+ identical_tag = ContentTag.objects.create(tag_name="test")
+
+ num_test_tags_before = ContentTag.objects.filter(tag_name="test").count()
+
+ # Add both the legacy and the new style tag and ensure that it doesn't break.
+ self.channel.main_tree.get_children().first().tags.add(legacy_tag)
+ self.channel.main_tree.get_children().first().tags.add(identical_tag)
+
+ self.channel.main_tree.copy_to(new_channel.main_tree, batch_size=1000)
+
+ _check_node_copy(
+ self.channel.main_tree,
+ new_channel.main_tree.get_children().last(),
+ original_channel_id=self.channel.id,
+ channel=new_channel,
+ )
+
+ self.assertEqual(
+ num_test_tags_before, ContentTag.objects.filter(tag_name="test").count()
+ )
+
def test_duplicate_nodes_deep(self):
"""
Ensures that when we copy nodes in a deep way, a full copy happens
@@ -533,8 +567,7 @@ def test_duplicate_nodes_freeze_authoring_data_no_edit(self):
def test_duplicate_nodes_no_freeze_authoring_data_edit(self):
"""
- Ensures that when we copy nodes, we can exclude nodes from the descendant
- hierarchy
+ Ensures that when we copy nodes, we can modify fields if they are not frozen for editing
"""
new_channel = testdata.channel()
@@ -553,8 +586,7 @@ def test_duplicate_nodes_no_freeze_authoring_data_edit(self):
def test_duplicate_nodes_freeze_authoring_data_edit(self):
"""
- Ensures that when we copy nodes, we can exclude nodes from the descendant
- hierarchy
+ Ensures that when we copy nodes, we can't modify fields if they are frozen for editing
"""
new_channel = testdata.channel()
diff --git a/contentcuration/contentcuration/tests/test_exportchannel.py b/contentcuration/contentcuration/tests/test_exportchannel.py
index 8eeeb064ec..36e331c713 100644
--- a/contentcuration/contentcuration/tests/test_exportchannel.py
+++ b/contentcuration/contentcuration/tests/test_exportchannel.py
@@ -1,6 +1,5 @@
from __future__ import absolute_import
-import json
import os
import random
import string
@@ -10,6 +9,7 @@
from django.core.management import call_command
from django.db import connections
from kolibri_content import models as kolibri_models
+from kolibri_content.router import cleanup_content_database_connection
from kolibri_content.router import get_active_content_database
from kolibri_content.router import set_active_content_database
from le_utils.constants import exercises
@@ -29,6 +29,7 @@
from .testdata import slideshow
from .testdata import thumbnail_bytes
from contentcuration import models as cc
+from contentcuration.utils.publish import ChannelIncompleteError
from contentcuration.utils.publish import convert_channel_thumbnail
from contentcuration.utils.publish import create_content_database
from contentcuration.utils.publish import create_slideshow_manifest
@@ -118,6 +119,18 @@ def setUp(self):
new_exercise.complete = True
new_exercise.parent = current_exercise.parent
new_exercise.save()
+
+ bad_container = create_node({'kind_id': 'topic', 'title': 'Bad topic container', 'children': []})
+ bad_container.complete = True
+ bad_container.parent = self.content_channel.main_tree
+ bad_container.save()
+
+ # exercise without mastery model, but marked as complete
+ broken_exercise = create_node({'kind_id': 'exercise', 'title': 'Bad mastery test', 'extra_fields': {}})
+ broken_exercise.complete = True
+ broken_exercise.parent = bad_container
+ broken_exercise.save()
+
thumbnail_data = create_studio_file(thumbnail_bytes, preset="exercise_thumbnail", ext="png")
file_obj = thumbnail_data["db_file"]
file_obj.contentnode = new_exercise
@@ -205,8 +218,7 @@ def setUp(self):
def tearDown(self):
# Clean up datbase connection after the test
- connections[self.tempdb].close()
- del connections.databases[self.tempdb]
+ cleanup_content_database_connection(self.tempdb)
super(ExportChannelTestCase, self).tearDown()
set_active_content_database(None)
if os.path.exists(self.tempdb):
@@ -248,6 +260,9 @@ def test_contentnode_incomplete_not_published(self):
for node in incomplete_nodes:
assert kolibri_nodes.filter(pk=node.node_id).count() == 0
+ # bad exercise node should not be published (technically incomplete)
+ assert kolibri_models.ContentNode.objects.filter(title='Bad mastery test').count() == 0
+
def test_tags_greater_than_30_excluded(self):
tag_node = kolibri_models.ContentNode.objects.filter(title='kolibri tag test').first()
published_tags = tag_node.tags.all()
@@ -263,6 +278,14 @@ def test_duration_override_on_completion_criteria_time(self):
assert completion_criteria_node.duration == 20
assert non_completion_criteria_node.duration == 100
+ def test_completion_criteria_set(self):
+ completion_criteria_node = kolibri_models.ContentNode.objects.filter(title='Completion criteria test').first()
+
+ self.assertEqual(completion_criteria_node.options["completion_criteria"], {
+ "model": "time",
+ "threshold": 20
+ })
+
def test_contentnode_channel_id_data(self):
channel = kolibri_models.ChannelMetadata.objects.first()
nodes = kolibri_models.ContentNode.objects.all()
@@ -294,8 +317,8 @@ def test_channel_icon_encoding(self):
def test_assessment_metadata(self):
for i, exercise in enumerate(kolibri_models.ContentNode.objects.filter(kind="exercise")):
asm = exercise.assessmentmetadata.first()
- self.assertTrue(isinstance(json.loads(asm.assessment_item_ids), list))
- mastery = json.loads(asm.mastery_model)
+ self.assertTrue(isinstance(asm.assessment_item_ids, list))
+ mastery = asm.mastery_model
self.assertTrue(isinstance(mastery, dict))
self.assertEqual(mastery["type"], exercises.DO_ALL if i == 0 else exercises.M_OF_N)
self.assertEqual(mastery["m"], 3 if i == 0 else 1)
@@ -390,6 +413,12 @@ def test_publish_no_modify_exercise_extra_fields(self):
"n": 2,
"mastery_model": exercises.M_OF_N,
})
+ published_exercise = kolibri_models.ContentNode.objects.get(title="Mastery test")
+ self.assertEqual(published_exercise.options["completion_criteria"]["threshold"], {
+ "m": 1,
+ "n": 2,
+ "mastery_model": exercises.M_OF_N,
+ })
def test_publish_no_modify_legacy_exercise_extra_fields(self):
current_exercise = cc.ContentNode.objects.get(title="Legacy Mastery test")
@@ -401,6 +430,29 @@ def test_publish_no_modify_legacy_exercise_extra_fields(self):
})
+class EmptyChannelTestCase(StudioTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super(EmptyChannelTestCase, cls).setUpClass()
+ cls.patch_copy_db = patch('contentcuration.utils.publish.save_export_database')
+ cls.patch_copy_db.start()
+
+ @classmethod
+ def tearDownClass(cls):
+ super(EmptyChannelTestCase, cls).tearDownClass()
+ cls.patch_copy_db.stop()
+
+ def test_publish_empty_channel(self):
+ content_channel = channel()
+ set_channel_icon_encoding(content_channel)
+ content_channel.main_tree.complete = True
+ content_channel.main_tree.save()
+ content_channel.main_tree.get_descendants().exclude(kind_id="topic").delete()
+ with self.assertRaises(ChannelIncompleteError):
+ create_content_database(content_channel, True, self.admin_user.id, True)
+
+
class ChannelExportUtilityFunctionTestCase(StudioTestCase):
@classmethod
def setUpClass(cls):
diff --git a/contentcuration/contentcuration/tests/views/test_views_internal.py b/contentcuration/contentcuration/tests/views/test_views_internal.py
index 58a6792884..3a23654ee6 100644
--- a/contentcuration/contentcuration/tests/views/test_views_internal.py
+++ b/contentcuration/contentcuration/tests/views/test_views_internal.py
@@ -231,6 +231,33 @@ def test_tag_greater_than_30_chars_excluded(self):
self.assertEqual(response.status_code, 400, response.content)
+ def test_add_nodes__not_a_topic(self):
+ resource_node = self._make_node_data()
+ test_data = {
+ "root_id": self.root_node.id,
+ "content_data": [
+ resource_node,
+ ],
+ }
+ response = self.admin_client().post(
+ reverse_lazy("api_add_nodes_to_tree"), data=test_data, format="json"
+ )
+ # should succeed
+ self.assertEqual(response.status_code, 200, response.content)
+ resource_node_id = next(iter(response.json().get('root_ids').values()))
+
+ invalid_child = self._make_node_data()
+ test_data = {
+ "root_id": resource_node_id,
+ "content_data": [
+ invalid_child,
+ ],
+ }
+ response = self.admin_client().post(
+ reverse_lazy("api_add_nodes_to_tree"), data=test_data, format="json"
+ )
+ self.assertEqual(response.status_code, 400, response.content)
+
def test_invalid_metadata_label_excluded(self):
invalid_metadata_labels = self._make_node_data()
invalid_metadata_labels["title"] = "invalid_metadata_labels"
diff --git a/contentcuration/contentcuration/utils/publish.py b/contentcuration/contentcuration/utils/publish.py
index 1593fda172..5bfddf4b7b 100644
--- a/contentcuration/contentcuration/utils/publish.py
+++ b/contentcuration/contentcuration/utils/publish.py
@@ -11,6 +11,7 @@
import uuid
import zipfile
from builtins import str
+from copy import deepcopy
from itertools import chain
from django.conf import settings
@@ -31,6 +32,7 @@
from django.utils.translation import gettext_lazy as _
from django.utils.translation import override
from kolibri_content import models as kolibrimodels
+from kolibri_content.base_models import MAX_TAG_LENGTH
from kolibri_content.router import get_active_content_database
from kolibri_content.router import using_content_database
from kolibri_public.utils.mapper import ChannelMapper
@@ -72,6 +74,10 @@ class NoNodesChangedError(Exception):
pass
+class ChannelIncompleteError(Exception):
+ pass
+
+
class SlowPublishError(Exception):
"""
Used to track slow Publishing operations. We don't raise this error,
@@ -193,8 +199,8 @@ def __init__(
force_exercises=False,
progress_tracker=None,
):
- if not root_node.complete:
- raise ValueError("Attempted to publish a channel with an incomplete root node")
+ if not root_node.is_publishable():
+ raise ChannelIncompleteError("Attempted to publish a channel with an incomplete root node or no resources")
self.root_node = root_node
task_percent_total = 80.0
@@ -218,7 +224,18 @@ def recurse_nodes(self, node, inherited_fields): # noqa C901
logging.debug("Mapping node with id {id}".format(id=node.pk))
# Only process nodes that are either non-topics or have non-topic descendants
- if node.get_descendants(include_self=True).exclude(kind_id=content_kinds.TOPIC).exists() and node.complete:
+ if node.is_publishable():
+ # early validation to make sure we don't have any exercises without mastery models
+ # which should be unlikely when the node is complete, but just in case
+ if node.kind_id == content_kinds.EXERCISE:
+ try:
+ # migrates and extracts the mastery model from the exercise
+ _, mastery_model = parse_assessment_metadata(node)
+ if not mastery_model:
+ raise ValueError("Exercise does not have a mastery model")
+ except Exception as e:
+ logging.warning("Unable to parse exercise {id} mastery model: {error}".format(id=node.pk, error=str(e)))
+ return
metadata = {}
@@ -240,12 +257,12 @@ def recurse_nodes(self, node, inherited_fields): # noqa C901
kolibrinode = create_bare_contentnode(node, self.default_language, self.channel_id, self.channel_name, metadata)
- if node.kind.kind == content_kinds.EXERCISE:
+ if node.kind_id == content_kinds.EXERCISE:
exercise_data = process_assessment_metadata(node, kolibrinode)
if self.force_exercises or node.changed or not \
node.files.filter(preset_id=format_presets.EXERCISE).exists():
create_perseus_exercise(node, kolibrinode, exercise_data, user_id=self.user_id)
- elif node.kind.kind == content_kinds.SLIDESHOW:
+ elif node.kind_id == content_kinds.SLIDESHOW:
create_slideshow_manifest(node, user_id=self.user_id)
elif node.kind_id == content_kinds.TOPIC:
for child in node.children.all():
@@ -349,7 +366,7 @@ def create_bare_contentnode(ccnode, default_language, channel_id, channel_name,
'license_description': kolibri_license.license_description if kolibri_license is not None else None,
'coach_content': ccnode.role_visibility == roles.COACH,
'duration': duration,
- 'options': json.dumps(options),
+ 'options': options,
# Fields for metadata labels
"grade_levels": ",".join(grade_levels.keys()) if grade_levels else None,
"resource_types": ",".join(resource_types.keys()) if resource_types else None,
@@ -484,18 +501,23 @@ def create_perseus_exercise(ccnode, kolibrinode, exercise_data, user_id=None):
temppath and os.unlink(temppath)
-def process_assessment_metadata(ccnode, kolibrinode):
- # Get mastery model information, set to default if none provided
- assessment_items = ccnode.assessment_items.all().order_by('order')
+def parse_assessment_metadata(ccnode):
extra_fields = ccnode.extra_fields
if isinstance(extra_fields, basestring):
extra_fields = json.loads(extra_fields)
extra_fields = migrate_extra_fields(extra_fields) or {}
randomize = extra_fields.get('randomize') if extra_fields.get('randomize') is not None else True
+ return randomize, extra_fields.get('options').get('completion_criteria').get('threshold')
+
+
+def process_assessment_metadata(ccnode, kolibrinode):
+ # Get mastery model information, set to default if none provided
+ assessment_items = ccnode.assessment_items.all().order_by('order')
assessment_item_ids = [a.assessment_id for a in assessment_items]
- exercise_data = extra_fields.get('options').get('completion_criteria').get('threshold')
+ randomize, mastery_criteria = parse_assessment_metadata(ccnode)
+ exercise_data = deepcopy(mastery_criteria)
exercise_data_type = exercise_data.get('mastery_model', "")
mastery_model = {'type': exercise_data_type or exercises.M_OF_N}
@@ -526,9 +548,9 @@ def process_assessment_metadata(ccnode, kolibrinode):
kolibrimodels.AssessmentMetaData.objects.create(
id=uuid.uuid4(),
contentnode=kolibrinode,
- assessment_item_ids=json.dumps(assessment_item_ids),
+ assessment_item_ids=assessment_item_ids,
number_of_assessments=assessment_items.count(),
- mastery_model=json.dumps(mastery_model),
+ mastery_model=mastery_model,
randomize=randomize,
is_manipulable=ccnode.kind_id == content_kinds.EXERCISE,
)
@@ -760,7 +782,7 @@ def map_tags_to_node(kolibrinode, ccnode):
for tag in ccnode.tags.all():
t, _new = kolibrimodels.ContentTag.objects.get_or_create(pk=tag.pk, tag_name=tag.tag_name)
- if len(t.tag_name) <= 30:
+ if len(t.tag_name) <= MAX_TAG_LENGTH:
tags_to_add.append(t)
kolibrinode.tags.set(tags_to_add)
diff --git a/contentcuration/contentcuration/views/internal.py b/contentcuration/contentcuration/views/internal.py
index 4dbc51c8ef..5767928719 100644
--- a/contentcuration/contentcuration/views/internal.py
+++ b/contentcuration/contentcuration/views/internal.py
@@ -2,8 +2,8 @@
import logging
from builtins import str
from collections import namedtuple
-
from distutils.version import LooseVersion
+
from django.core.exceptions import ObjectDoesNotExist
from django.core.exceptions import PermissionDenied
from django.core.exceptions import SuspiciousOperation
@@ -632,11 +632,14 @@ def handle_remote_node(user, node_data, parent_node):
@delay_user_storage_calculation
-def convert_data_to_nodes(user, content_data, parent_node):
+def convert_data_to_nodes(user, content_data, parent_node): # noqa: C901
""" Parse dict and create nodes accordingly """
try:
root_mapping = {}
parent_node = ContentNode.objects.get(pk=parent_node)
+ if parent_node.kind_id != content_kinds.TOPIC:
+ raise NodeValidationError("Parent node must be a topic/folder | actual={}".format(parent_node.kind_id))
+
sort_order = parent_node.children.count() + 1
existing_node_ids = ContentNode.objects.filter(
parent_id=parent_node.pk
diff --git a/contentcuration/contentcuration/viewsets/base.py b/contentcuration/contentcuration/viewsets/base.py
index d83b3f9e9a..18e1771de0 100644
--- a/contentcuration/contentcuration/viewsets/base.py
+++ b/contentcuration/contentcuration/viewsets/base.py
@@ -960,6 +960,7 @@ def update_progress(progress=None):
task_object.status = states.FAILURE
task_object.traceback = traceback.format_exc()
task_object.save()
+ raise
finally:
if task_object.status == states.STARTED:
# No error reported, cleanup.
diff --git a/contentcuration/contentcuration/viewsets/channel.py b/contentcuration/contentcuration/viewsets/channel.py
index 737269acf1..6715cfd3e4 100644
--- a/contentcuration/contentcuration/viewsets/channel.py
+++ b/contentcuration/contentcuration/viewsets/channel.py
@@ -40,6 +40,7 @@
from contentcuration.utils.garbage_collect import get_deleted_chefs_root
from contentcuration.utils.pagination import CachedListPagination
from contentcuration.utils.pagination import ValuesViewsetPageNumberPagination
+from contentcuration.utils.publish import ChannelIncompleteError
from contentcuration.utils.publish import publish_channel
from contentcuration.utils.sync import sync_channel
from contentcuration.viewsets.base import BulkListSerializer
@@ -503,6 +504,13 @@ def publish(self, pk, version_notes="", language=None):
}, channel_id=channel.id
),
], applied=True)
+ except ChannelIncompleteError:
+ Change.create_changes([
+ generate_update_event(
+ channel.id, CHANNEL, {"publishing": False}, channel_id=channel.id
+ ),
+ ], applied=True)
+ raise ValidationError("Channel is not ready to be published")
except Exception:
Change.create_changes([
generate_update_event(
diff --git a/contentcuration/contentcuration/viewsets/contentnode.py b/contentcuration/contentcuration/viewsets/contentnode.py
index ca636c95a4..7366d5f28a 100644
--- a/contentcuration/contentcuration/viewsets/contentnode.py
+++ b/contentcuration/contentcuration/viewsets/contentnode.py
@@ -71,7 +71,7 @@
from contentcuration.viewsets.sync.constants import CREATED
from contentcuration.viewsets.sync.constants import DELETED
from contentcuration.viewsets.sync.utils import generate_update_event
-
+from contentcuration.viewsets.sync.utils import log_sync_exception
channel_query = Channel.objects.filter(main_tree__tree_id=OuterRef("tree_id"))
@@ -908,9 +908,11 @@ def copy_from_changes(self, changes):
for copy in changes:
# Copy change will have key, must also have other attributes, defined in `copy`
# Just pass as keyword arguments here to let copy do the validation
- copy_errors = self.copy(copy["key"], **copy)
- if copy_errors:
- copy.update({"errors": copy_errors})
+ try:
+ self.copy(copy["key"], **copy)
+ except Exception as e:
+ log_sync_exception(e, user=self.request.user, change=copy)
+ copy["errors"] = [str(e)]
errors.append(copy)
return errors
@@ -924,23 +926,18 @@ def copy(
excluded_descendants=None,
**kwargs
):
- try:
- target, position = self.validate_targeting_args(target, position)
- except ValidationError as e:
- return str(e)
+ target, position = self.validate_targeting_args(target, position)
try:
source = self.get_queryset().get(pk=from_key)
except ContentNode.DoesNotExist:
- error = ValidationError("Copy source node does not exist")
- return str(error)
+ raise ValidationError("Copy source node does not exist")
# Affected channel for the copy is the target's channel
channel_id = target.channel_id
if ContentNode.filter_by_pk(pk=pk).exists():
- error = ValidationError("Copy pk already exists")
- return str(error)
+ raise ValidationError("Copy pk already exists")
can_edit_source_channel = ContentNode.filter_edit_queryset(
ContentNode.filter_by_pk(pk=source.id), user=self.request.user
@@ -969,8 +966,6 @@ def copy(
created_by_id=self.request.user.id,
)
- return None
-
def perform_create(self, serializer, change=None):
instance = serializer.save()
diff --git a/contentcuration/contentcuration/viewsets/sync/utils.py b/contentcuration/contentcuration/viewsets/sync/utils.py
index fcfaa8a569..f86b9685d8 100644
--- a/contentcuration/contentcuration/viewsets/sync/utils.py
+++ b/contentcuration/contentcuration/viewsets/sync/utils.py
@@ -1,5 +1,7 @@
import logging
+from django.conf import settings
+
from contentcuration.utils.sentry import report_exception
from contentcuration.viewsets.sync.constants import ALL_TABLES
from contentcuration.viewsets.sync.constants import CHANNEL
@@ -92,7 +94,9 @@ def log_sync_exception(e, user=None, change=None, changes=None):
elif changes is not None:
contexts["changes"] = changes
- report_exception(e, user=user, contexts=contexts)
+ # in production, we'll get duplicates in Sentry if we log the exception here.
+ if settings.DEBUG:
+ # make sure we leave a record in the logs just in case.
+ logging.exception(e)
- # make sure we leave a record in the logs just in case.
- logging.exception(e)
+ report_exception(e, user=user, contexts=contexts)
diff --git a/contentcuration/kolibri_content/base_models.py b/contentcuration/kolibri_content/base_models.py
index 9fb574fa78..0fbe59c710 100644
--- a/contentcuration/kolibri_content/base_models.py
+++ b/contentcuration/kolibri_content/base_models.py
@@ -20,9 +20,12 @@
from mptt.models import TreeForeignKey
+MAX_TAG_LENGTH = 30
+
+
class ContentTag(models.Model):
id = UUIDField(primary_key=True)
- tag_name = models.CharField(max_length=30, blank=True)
+ tag_name = models.CharField(max_length=MAX_TAG_LENGTH, blank=True)
class Meta:
abstract = True
diff --git a/contentcuration/kolibri_content/router.py b/contentcuration/kolibri_content/router.py
index b7b2e69b70..9a991a9df1 100644
--- a/contentcuration/kolibri_content/router.py
+++ b/contentcuration/kolibri_content/router.py
@@ -72,6 +72,16 @@ def get_content_database_connection(alias=None):
return connections[alias].connection
+def cleanup_content_database_connection(alias):
+ try:
+ connection = connections[alias]
+ connection.close()
+ del connections.databases[alias]
+ except (ConnectionDoesNotExist, KeyError):
+ # Already cleaned up, nothing to do here!
+ pass
+
+
class ContentDBRouter(object):
"""A router that decides what content database to read from based on a thread-local variable."""
@@ -158,6 +168,7 @@ def __enter__(self):
def __exit__(self, exc_type, exc_value, traceback):
set_active_content_database(self.previous_alias)
+ cleanup_content_database_connection(self.alias)
def __call__(self, querying_func):
# allow using the context manager as a decorator
diff --git a/contentcuration/kolibri_public/management/commands/export_channels_to_kolibri_public.py b/contentcuration/kolibri_public/management/commands/export_channels_to_kolibri_public.py
index f20206abf0..076053ad90 100644
--- a/contentcuration/kolibri_public/management/commands/export_channels_to_kolibri_public.py
+++ b/contentcuration/kolibri_public/management/commands/export_channels_to_kolibri_public.py
@@ -27,6 +27,50 @@
class Command(BaseCommand):
+ def add_arguments(self, parser):
+ parser.add_argument(
+ "--channel-id",
+ type=str,
+ dest="channel_id",
+ help="The channel_id for which generate kolibri_public models [default: all channels]"
+ )
+
+ def handle(self, *args, **options):
+ ids_to_export = []
+
+ if options["channel_id"]:
+ ids_to_export.append(options["channel_id"])
+ else:
+ self._republish_problem_channels()
+ public_channel_ids = set(Channel.objects.filter(public=True, deleted=False, main_tree__published=True).values_list("id", flat=True))
+ kolibri_public_channel_ids = set(ChannelMetadata.objects.all().values_list("id", flat=True))
+ ids_to_export = public_channel_ids.difference(kolibri_public_channel_ids)
+
+ count = 0
+ for channel_id in ids_to_export:
+ try:
+ self._export_channel(channel_id)
+ count += 1
+ except FileNotFoundError:
+ logger.warning("Tried to export channel {} to kolibri_public but its published channel database could not be found".format(channel_id))
+ except Exception as e:
+ logger.exception("Failed to export channel {} to kolibri_public because of error: {}".format(channel_id, e))
+ logger.info("Successfully put {} channels into kolibri_public".format(count))
+
+ def _export_channel(self, channel_id):
+ logger.info("Putting channel {} into kolibri_public".format(channel_id))
+ db_location = os.path.join(settings.DB_ROOT, "{id}.sqlite3".format(id=channel_id))
+ with storage.open(db_location) as storage_file:
+ with tempfile.NamedTemporaryFile(suffix=".sqlite3") as db_file:
+ shutil.copyfileobj(storage_file, db_file)
+ db_file.seek(0)
+ with using_content_database(db_file.name):
+ # Run migration to handle old content databases published prior to current fields being added.
+ call_command("migrate", app_label=KolibriContentConfig.label, database=get_active_content_database())
+ channel = ExportedChannelMetadata.objects.get(id=channel_id)
+ logger.info("Found channel {} for id: {} mapping now".format(channel.name, channel_id))
+ mapper = ChannelMapper(channel)
+ mapper.run()
def _republish_problem_channels(self):
twenty_19 = datetime(year=2019, month=1, day=1)
@@ -46,38 +90,10 @@ def _republish_problem_channels(self):
)
for channel in channel_qs:
- kolibri_temp_db = create_content_database(channel, True, chef_user.id, False)
- os.remove(kolibri_temp_db)
- channel.last_published = timezone.now()
- channel.save()
-
- def _export_channel(self, channel_id):
- logger.info("Putting channel {} into kolibri_public".format(channel_id))
- db_location = os.path.join(settings.DB_ROOT, "{id}.sqlite3".format(id=channel_id))
- with storage.open(db_location) as storage_file:
- with tempfile.NamedTemporaryFile(suffix=".sqlite3") as db_file:
- shutil.copyfileobj(storage_file, db_file)
- db_file.seek(0)
- with using_content_database(db_file.name):
- # Run migration to handle old content databases published prior to current fields being added.
- call_command("migrate", app_label=KolibriContentConfig.label, database=get_active_content_database())
- channel = ExportedChannelMetadata.objects.get(id=channel_id)
- logger.info("Found channel {} for id: {} mapping now".format(channel.name, channel_id))
- mapper = ChannelMapper(channel)
- mapper.run()
-
- def handle(self, *args, **options):
- self._republish_problem_channels()
- public_channel_ids = set(Channel.objects.filter(public=True, deleted=False, main_tree__published=True).values_list("id", flat=True))
- kolibri_public_channel_ids = set(ChannelMetadata.objects.all().values_list("id", flat=True))
- ids_to_export = public_channel_ids.difference(kolibri_public_channel_ids)
- count = 0
- for channel_id in ids_to_export:
try:
- self._export_channel(channel_id)
- count += 1
- except FileNotFoundError:
- logger.warning("Tried to export channel {} to kolibri_public but its published channel database could not be found".format(channel_id))
+ kolibri_temp_db = create_content_database(channel, True, chef_user.id, False)
+ os.remove(kolibri_temp_db)
+ channel.last_published = timezone.now()
+ channel.save()
except Exception as e:
- logger.exception("Failed to export channel {} to kolibri_public because of error: {}".format(channel_id, e))
- logger.info("Successfully put {} channels into kolibri_public".format(count))
+ logger.exception("Failed to export channel {} to kolibri_public because of error: {}".format(channel.id, e))
diff --git a/contentcuration/kolibri_public/models.py b/contentcuration/kolibri_public/models.py
index 6b20c1f1d7..c0056d9cf9 100644
--- a/contentcuration/kolibri_public/models.py
+++ b/contentcuration/kolibri_public/models.py
@@ -29,7 +29,15 @@ def has_all_labels(self, field_name, labels):
annotations = {}
for bitmask_fieldname, bits in bits.items():
annotation_fieldname = "{}_{}".format(bitmask_fieldname, "masked")
- filters[annotation_fieldname + "__gt"] = 0
+ # To get the correct result, i.e. an AND that all the labels are present,
+ # we need to check that the aggregated value is euqal to the bits.
+ # If we wanted an OR (which would check for any being present),
+ # we would have to use GREATER THAN 0 here.
+ filters[annotation_fieldname] = bits
+ # This ensures that the annotated value is the result of the AND operation
+ # so if all the values are present, the result will be the same as the bits
+ # but if any are missing, it will not be equal to the bits, but will only be
+ # 0 if none of the bits are present.
annotations[annotation_fieldname] = F(bitmask_fieldname).bitand(bits)
return self.annotate(**annotations).filter(**filters)
diff --git a/contentcuration/kolibri_public/tests/base.py b/contentcuration/kolibri_public/tests/base.py
index 044755591d..1e5f341ab2 100644
--- a/contentcuration/kolibri_public/tests/base.py
+++ b/contentcuration/kolibri_public/tests/base.py
@@ -32,6 +32,17 @@ def choices(sequence, k):
return [random.choice(sequence) for _ in range(0, k)]
+OKAY_TAG = "okay_tag"
+BAD_TAG = "tag_is_too_long_because_it_is_over_30_characters"
+
+PROBLEMATIC_HTML5_NODE = "ab9d3fd905c848a6989936c609405abb"
+
+BUILDER_DEFAULT_OPTIONS = {
+ "problematic_tags": False,
+ "problematic_nodes": False,
+}
+
+
class ChannelBuilder(object):
"""
This class is purely to generate all the relevant data for a single
@@ -49,11 +60,15 @@ class ChannelBuilder(object):
"root_node",
)
- def __init__(self, levels=3, num_children=5, models=kolibri_public_models):
+ def __init__(self, levels=3, num_children=5, models=kolibri_public_models, options=None):
self.levels = levels
self.num_children = num_children
self.models = models
+ self.options = BUILDER_DEFAULT_OPTIONS.copy()
+ if options:
+ self.options.update(options)
+ self.content_tags = {}
self._excluded_channel_fields = None
self._excluded_node_fields = None
@@ -75,8 +90,16 @@ def generate_new_tree(self):
self.channel = self.channel_data()
self.files = {}
self.localfiles = {}
+
self.node_to_files_map = {}
self.localfile_to_files_map = {}
+ self.content_tag_map = {}
+
+ tags = [OKAY_TAG]
+ if self.options["problematic_tags"]:
+ tags.append(BAD_TAG)
+ for tag_name in tags:
+ self.content_tag_data(tag_name)
self.root_node = self.generate_topic()
if "root_id" in self.channel:
@@ -88,6 +111,22 @@ def generate_new_tree(self):
self.root_node["children"] = self.recurse_and_generate(
self.root_node["id"], self.levels
)
+ if self.options["problematic_nodes"]:
+ self.root_node["children"].extend(self.generate_problematic_nodes())
+
+ def generate_problematic_nodes(self):
+ nodes = []
+ html5_not_a_topic = self.contentnode_data(
+ node_id=PROBLEMATIC_HTML5_NODE,
+ kind=content_kinds.HTML5,
+ parent_id=self.root_node["id"],
+ )
+ # the problem: this node is not a topic, but it has children
+ html5_not_a_topic["children"] = [
+ self.contentnode_data(parent_id=PROBLEMATIC_HTML5_NODE)
+ ]
+ nodes.append(html5_not_a_topic)
+ return nodes
def load_data(self):
try:
@@ -117,7 +156,19 @@ def generate_nodes_from_root_node(self):
self.nodes = {n["id"]: n for n in map(to_dict, self._django_nodes)}
def insert_into_default_db(self):
+ self.models.ContentTag.objects.bulk_create(
+ (self.models.ContentTag(**tag) for tag in self.content_tags.values())
+ )
self.models.ContentNode.objects.bulk_create(self._django_nodes)
+ self.models.ContentNode.tags.through.objects.bulk_create(
+ (
+ self.models.ContentNode.tags.through(
+ contentnode_id=node["id"], contenttag_id=tag["id"]
+ )
+ for node in self.nodes.values()
+ for tag in self.content_tags.values()
+ )
+ )
self.models.ChannelMetadata.objects.create(**self.channel)
self.models.LocalFile.objects.bulk_create(
(self.models.LocalFile(**local) for local in self.localfiles.values())
@@ -153,6 +204,7 @@ def data(self):
"content_contentnode": list(self.nodes.values()),
"content_file": list(self.files.values()),
"content_localfile": list(self.localfiles.values()),
+ "content_contenttag": list(self.content_tags.values()),
}
def recurse_and_generate(self, parent_id, levels):
@@ -190,6 +242,8 @@ def generate_leaf(self, parent_id):
thumbnail=True,
preset=format_presets.VIDEO_THUMBNAIL,
)
+ for tag_id in self.content_tags:
+ self.content_tag_map[node["id"]] = [tag_id]
return node
def channel_data(self, channel_id=None, version=1):
@@ -218,6 +272,14 @@ def channel_data(self, channel_id=None, version=1):
del channel_data[field]
return channel_data
+ def content_tag_data(self, tag_name):
+ data = {
+ "id": uuid4_hex(),
+ "tag_name": tag_name,
+ }
+ self.content_tags[data["id"]] = data
+ return data
+
def localfile_data(self, extension="mp4"):
data = {
"file_size": random.randint(1, 1000),
diff --git a/contentcuration/kolibri_public/tests/test_content_app.py b/contentcuration/kolibri_public/tests/test_content_app.py
index c6f4327972..50c4a6a35b 100644
--- a/contentcuration/kolibri_public/tests/test_content_app.py
+++ b/contentcuration/kolibri_public/tests/test_content_app.py
@@ -11,6 +11,7 @@
from django.utils.http import http_date
from kolibri_public import models
from kolibri_public.tests.base import ChannelBuilder
+from kolibri_public.tests.base import OKAY_TAG
from le_utils.constants import content_kinds
from rest_framework.test import APITestCase
@@ -373,6 +374,8 @@ def test_contentnode_tags(self):
response = self.client.get(
reverse("publiccontentnode-detail", kwargs={"pk": self.root.id})
)
+ # added by channel builder
+ tags.append(OKAY_TAG)
self.assertEqual(set(response.data["tags"]), set(tags))
def test_channelmetadata_list(self):
diff --git a/contentcuration/kolibri_public/tests/test_mapper.py b/contentcuration/kolibri_public/tests/test_mapper.py
index 13066ede63..4b56813464 100644
--- a/contentcuration/kolibri_public/tests/test_mapper.py
+++ b/contentcuration/kolibri_public/tests/test_mapper.py
@@ -2,14 +2,16 @@
import tempfile
from django.core.management import call_command
-from django.db import connections
from django.test import TestCase
from kolibri_content import models as kolibri_content_models
+from kolibri_content.router import cleanup_content_database_connection
from kolibri_content.router import get_active_content_database
from kolibri_content.router import using_content_database
from kolibri_public import models as kolibri_public_models
from kolibri_public.tests.base import ChannelBuilder
+from kolibri_public.tests.base import OKAY_TAG
from kolibri_public.utils.mapper import ChannelMapper
+from le_utils.constants import content_kinds
from contentcuration.models import Channel
@@ -19,14 +21,14 @@ class ChannelMapperTest(TestCase):
@property
def overrides(self):
return {
- kolibri_public_models.ContentNode: {
- "available": True,
- "tree_id": self.mapper.tree_id,
- },
- kolibri_public_models.LocalFile: {
- "available": True,
+ kolibri_public_models.ContentNode: {
+ "available": True,
+ "tree_id": self.mapper.tree_id,
+ },
+ kolibri_public_models.LocalFile: {
+ "available": True,
+ }
}
- }
@classmethod
def setUpClass(cls):
@@ -36,7 +38,10 @@ def setUpClass(cls):
with using_content_database(cls.tempdb):
call_command("migrate", "content", database=get_active_content_database(), no_input=True)
- builder = ChannelBuilder(models=kolibri_content_models)
+ builder = ChannelBuilder(models=kolibri_content_models, options={
+ "problematic_tags": True,
+ "problematic_nodes": True,
+ })
builder.insert_into_default_db()
cls.source_root = kolibri_content_models.ContentNode.objects.get(id=builder.root_node["id"])
cls.channel = kolibri_content_models.ChannelMetadata.objects.get(id=builder.channel["id"])
@@ -57,6 +62,10 @@ def _assert_model(self, source, mapped, Model):
self.assertEqual(getattr(source, column), getattr(mapped, column))
def _assert_node(self, source, mapped):
+ """
+ :param source: kolibri_content_models.ContentNode
+ :param mapped: kolibri_public_models.ContentNode
+ """
self._assert_model(source, mapped, kolibri_public_models.ContentNode)
for src, mpd in zip(source.assessmentmetadata.all(), mapped.assessmentmetadata.all()):
@@ -66,13 +75,23 @@ def _assert_node(self, source, mapped):
self._assert_model(src, mpd, kolibri_public_models.File)
self._assert_model(src.local_file, mpd.local_file, kolibri_public_models.LocalFile)
+ # should only map OKAY_TAG and not BAD_TAG
+ for mapped_tag in mapped.tags.all():
+ self.assertEqual(OKAY_TAG, mapped_tag.tag_name)
+
+ self.assertEqual(mapped.ancestors, [{"id": ancestor.id, "title": ancestor.title} for ancestor in source.get_ancestors()])
+
def _recurse_and_assert(self, sources, mappeds, recursion_depth=0):
recursion_depths = []
for source, mapped in zip(sources, mappeds):
self._assert_node(source, mapped)
source_children = source.children.all()
mapped_children = mapped.children.all()
- self.assertEqual(len(source_children), len(mapped_children))
+ if mapped.kind == content_kinds.TOPIC:
+ self.assertEqual(len(source_children), len(mapped_children))
+ else:
+ self.assertEqual(0, len(mapped_children))
+
recursion_depths.append(
self._recurse_and_assert(
source_children,
@@ -97,8 +116,7 @@ def test_map_replace(self):
@classmethod
def tearDownClass(cls):
# Clean up datbase connection after the test
- connections[cls.tempdb].close()
- del connections.databases[cls.tempdb]
+ cleanup_content_database_connection(cls.tempdb)
super(ChannelMapperTest, cls).tearDownClass()
if os.path.exists(cls.tempdb):
os.remove(cls.tempdb)
diff --git a/contentcuration/kolibri_public/utils/annotation.py b/contentcuration/kolibri_public/utils/annotation.py
index a2fca0289f..3cb800a3d9 100644
--- a/contentcuration/kolibri_public/utils/annotation.py
+++ b/contentcuration/kolibri_public/utils/annotation.py
@@ -75,6 +75,9 @@ def calculate_next_order(channel, public=False):
# Ensure that this channel is always included in the list.
Q(public=True, deleted=False, main_tree__published=True) | Q(id=channel.id)
).order_by("-priority").values_list("id", flat=True))
- order = channel_list_order.index(channel.id)
- channel.order = order
- channel.save()
+ # this shouldn't happen, but if we're exporting a channel database to Kolibri Public
+ # and the channel does not actually exist locally, then this would fail
+ if channel.id in channel_list_order:
+ order = channel_list_order.index(channel.id)
+ channel.order = order
+ channel.save()
diff --git a/contentcuration/kolibri_public/utils/mapper.py b/contentcuration/kolibri_public/utils/mapper.py
index 2f3db0811f..f7605d7e1b 100644
--- a/contentcuration/kolibri_public/utils/mapper.py
+++ b/contentcuration/kolibri_public/utils/mapper.py
@@ -1,5 +1,7 @@
from django.db import transaction
+from django.db.models.functions import Length
from kolibri_content import models as kolibri_content_models
+from kolibri_content.base_models import MAX_TAG_LENGTH
from kolibri_public import models as kolibri_public_models
from kolibri_public.search import annotate_label_bitmasks
from kolibri_public.utils.annotation import set_channel_metadata_fields
@@ -24,14 +26,14 @@ def __init__(self, channel, public=True):
@property
def overrides(self):
return {
- kolibri_public_models.ContentNode: {
- "available": True,
- "tree_id": self.tree_id,
- },
- kolibri_public_models.LocalFile: {
- "available": True,
+ kolibri_public_models.ContentNode: {
+ "available": True,
+ "tree_id": self.tree_id,
+ },
+ kolibri_public_models.LocalFile: {
+ "available": True,
+ }
}
- }
def _handle_old_tree_if_exists(self):
try:
@@ -49,6 +51,8 @@ def run(self):
self.mapped_channel.public = self.public
self.mapped_channel.save_base(raw=True)
annotate_label_bitmasks(self.mapped_root.get_descendants(include_self=True))
+ # Rather than set the ancestors fields after mapping, like it is done in Kolibri
+ # here we set it during mapping as we are already recursing through the tree.
set_channel_metadata_fields(self.mapped_channel.id, public=self.public)
def _map_model(self, source, Model):
@@ -67,22 +71,30 @@ def _map_and_bulk_create_model(self, sources, Model):
Model.objects.bulk_create(cloned_sources, ignore_conflicts=True)
- def _map_node(self, source):
- return self._map_model(source, kolibri_public_models.ContentNode)
+ def _map_node(self, source, ancestors):
+ node = self._map_model(source, kolibri_public_models.ContentNode)
+ node.ancestors = ancestors
+ return node
+
+ def _extend_ancestors(self, ancestors, new_ancestor):
+ return ancestors + [{"id": new_ancestor.id, "title": new_ancestor.title.replace('"', '\\"')}]
def _recurse_to_create_tree(
self,
source,
nodes_by_parent,
+ ancestors,
):
- nodes_to_create = [self._map_node(source)]
+ nodes_to_create = [self._map_node(source, ancestors)]
if source.kind == content_kinds.TOPIC and source.id in nodes_by_parent:
children = sorted(nodes_by_parent[source.id], key=lambda x: x.lft)
+ ancestors = self._extend_ancestors(ancestors, source)
for child in children:
nodes_to_create.extend(self._recurse_to_create_tree(
child,
nodes_by_parent,
+ ancestors,
))
return nodes_to_create
@@ -108,6 +120,7 @@ def _map(
self,
node,
batch_size,
+ ancestors=[],
progress_tracker=None,
):
"""
@@ -116,13 +129,16 @@ def _map(
if node.rght - node.lft < batch_size:
copied_nodes = self._deep_map(
node,
+ ancestors,
)
if progress_tracker:
progress_tracker.increment(len(copied_nodes))
return copied_nodes
node_copy = self._shallow_map(
node,
+ ancestors,
)
+ ancestors = self._extend_ancestors(ancestors, node)
if progress_tracker:
progress_tracker.increment()
children = node.get_children().order_by("lft")
@@ -130,22 +146,37 @@ def _map(
self._map(
child,
batch_size,
+ ancestors=ancestors,
progress_tracker=progress_tracker,
)
return [node_copy]
def _copy_tags(self, node_ids):
- source_tags_mappings = kolibri_content_models.ContentNode.tags.through.objects.filter(
+ initial_source_tag_mappings = kolibri_content_models.ContentNode.tags.through.objects.filter(
contentnode_id__in=node_ids
)
- source_tags = kolibri_content_models.ContentTag.objects.filter(
- id__in=source_tags_mappings.values_list("contenttag_id", flat=True)
+ source_tags = (
+ kolibri_content_models.ContentTag.objects
+ .annotate(
+ tag_name_len=Length("tag_name"),
+ )
+ .filter(
+ id__in=initial_source_tag_mappings.values_list("contenttag_id", flat=True),
+ tag_name_len__lte=MAX_TAG_LENGTH,
+ )
+ )
+
+ source_tag_mappings = (
+ initial_source_tag_mappings
+ .filter(
+ contenttag_id__in=source_tags.values_list("id", flat=True),
+ )
)
self._map_and_bulk_create_model(source_tags, kolibri_public_models.ContentTag)
- self._map_and_bulk_create_model(source_tags_mappings, kolibri_public_models.ContentNode.tags.through)
+ self._map_and_bulk_create_model(source_tag_mappings, kolibri_public_models.ContentNode.tags.through)
def _copy_assessment_metadata(self, node_ids):
node_assessmentmetadata = kolibri_content_models.AssessmentMetaData.objects.filter(contentnode_id__in=node_ids)
@@ -173,8 +204,9 @@ def _copy_associated_objects(self, nodes):
def _shallow_map(
self,
node,
+ ancestors,
):
- mapped_node = self._map_node(node)
+ mapped_node = self._map_node(node, ancestors)
mapped_node.save_base(raw=True)
@@ -184,6 +216,7 @@ def _shallow_map(
def _deep_map(
self,
node,
+ ancestors,
):
source_nodes = node.get_descendants(include_self=True)
@@ -196,10 +229,15 @@ def _deep_map(
nodes_to_create = self._recurse_to_create_tree(
node,
nodes_by_parent,
+ ancestors,
)
mapped_nodes = kolibri_public_models.ContentNode.objects.bulk_create(nodes_to_create)
- self._copy_associated_objects(source_nodes)
+ # filter to only the nodes that were created, since some source nodes could have
+ # been problematic
+ self._copy_associated_objects(source_nodes.filter(
+ id__in=[mapped_node.id for mapped_node in mapped_nodes],
+ ))
return mapped_nodes
diff --git a/contentcuration/search/viewsets/contentnode.py b/contentcuration/search/viewsets/contentnode.py
index 676698031d..199c78817d 100644
--- a/contentcuration/search/viewsets/contentnode.py
+++ b/contentcuration/search/viewsets/contentnode.py
@@ -104,38 +104,44 @@ class SearchContentNodeViewSet(ReadOnlyValuesViewset):
"id": "contentnode__id",
"content_id": "contentnode__content_id",
"node_id": "contentnode__node_id",
+ "root_id": "channel__main_tree_id",
"title": "contentnode__title",
"description": "contentnode__description",
"author": "contentnode__author",
"provider": "contentnode__provider",
- "kind__kind": "contentnode__kind__kind",
+ "kind": "contentnode__kind__kind",
"thumbnail_encoding": "contentnode__thumbnail_encoding",
"published": "contentnode__published",
"modified": "contentnode__modified",
- "parent_id": "contentnode__parent_id",
+ "parent": "contentnode__parent_id",
"changed": "contentnode__changed",
+ "public": "channel__public",
}
values = (
+ "channel__public",
+ "channel__main_tree_id",
"contentnode__id",
"contentnode__content_id",
"contentnode__node_id",
- "contentnode__title",
- "contentnode__description",
- "contentnode__author",
- "contentnode__provider",
"contentnode__kind__kind",
- "contentnode__thumbnail_encoding",
- "contentnode__published",
- "contentnode__modified",
"contentnode__parent_id",
- "contentnode__changed",
"channel_id",
"resource_count",
- "thumbnail_checksum",
- "thumbnail_extension",
- "content_tags",
"original_channel_name",
+
+ # TODO: currently loading nodes separately
+ # "thumbnail_checksum",
+ # "thumbnail_extension",
+ # "content_tags",
+ # "contentnode__title",
+ # "contentnode__description",
+ # "contentnode__author",
+ # "contentnode__provider",
+ # "contentnode__changed",
+ # "contentnode__thumbnail_encoding",
+ # "contentnode__published",
+ # "contentnode__modified",
)
def annotate_queryset(self, queryset):
diff --git a/jest_config/jest.conf.js b/jest_config/jest.conf.js
index ff89cf2c24..88a490ebb2 100644
--- a/jest_config/jest.conf.js
+++ b/jest_config/jest.conf.js
@@ -27,7 +27,7 @@ module.exports = {
},
transformIgnorePatterns: ['/node_modules/(?!vuetify|epubjs|kolibri-design-system|kolibri-constants|axios)'],
snapshotSerializers: ['/node_modules/jest-serializer-vue'],
- setupFilesAfterEnv: [path.resolve(__dirname, './setup')],
+ setupFilesAfterEnv: ['/jest_config/setup.js'],
coverageDirectory: '/coverage',
collectCoverageFrom: ['!**/node_modules/**'],
verbose: false,
diff --git a/jest_config/setup.js b/jest_config/setup.js
index c96391f83f..d5fb127156 100644
--- a/jest_config/setup.js
+++ b/jest_config/setup.js
@@ -9,7 +9,16 @@ import KThemePlugin from 'kolibri-design-system/lib/KThemePlugin';
import 'shared/i18n/setup';
// Polyfill indexeddb
import 'fake-indexeddb/auto';
+// Ponyfill webstreams
+import {ReadableStream, WritableStream, TransformStream, CountQueuingStrategy} from 'web-streams-polyfill/ponyfill/es2018';
import jquery from 'jquery';
+
+window.jQuery = window.$ = jquery;
+window.ReadableStream = global.ReadableStream = ReadableStream;
+window.WritableStream = global.WritableStream = WritableStream;
+window.TransformStream = global.TransformStream = TransformStream;
+window.CountQueuingStrategy = global.CountQueuingStrategy = CountQueuingStrategy;
+
import AnalyticsPlugin from 'shared/analytics/plugin';
import { setupSchema } from 'shared/data';
import * as resources from 'shared/data/resources';
@@ -34,8 +43,6 @@ global.afterEach(() => {
});
});
-window.jQuery = window.$ = jquery;
-
window.storageBaseUrl = '/content/storage/';
Vue.use(VueRouter);
diff --git a/package.json b/package.json
index 505d860d73..7ac683e762 100644
--- a/package.json
+++ b/package.json
@@ -116,7 +116,8 @@
"npm-run-all": "^4.1.3",
"stylus": "^0.59.0",
"stylus-loader": "^7.1.3",
- "workbox-webpack-plugin": "^7.0.0"
+ "workbox-webpack-plugin": "^7.0.0",
+ "web-streams-polyfill": "^3.2.1"
},
"false": {},
"peerDependencies": {},
diff --git a/requirements.txt b/requirements.txt
index b809a2390b..8ebd6cc1e1 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -137,7 +137,7 @@ googleapis-common-protos[grpc]==1.57.0
# grpc-google-iam-v1
grpc-google-iam-v1==0.12.4
# via google-cloud-kms
-grpcio==1.51.1
+grpcio==1.53.0
# via
# google-api-core
# googleapis-common-protos