diff --git a/Makefile b/Makefile
index 619fcee41e..051053bab3 100644
--- a/Makefile
+++ b/Makefile
@@ -38,7 +38,7 @@ migrate:
# 4) Remove the management command from this `deploy-migrate` recipe
# 5) Repeat!
deploy-migrate:
- python contentcuration/manage.py rectify_incorrect_contentnode_source_fields
+ echo "Nothing to do here!"
contentnodegc:
python contentcuration/manage.py garbage_collect
diff --git a/contentcuration/contentcuration/apps.py b/contentcuration/contentcuration/apps.py
index 6d0fa858ff..2466492feb 100644
--- a/contentcuration/contentcuration/apps.py
+++ b/contentcuration/contentcuration/apps.py
@@ -8,6 +8,9 @@ class ContentConfig(AppConfig):
name = 'contentcuration'
def ready(self):
+ # Import signals
+ import contentcuration.signals # noqa
+
if settings.AWS_AUTO_CREATE_BUCKET and not is_gcs_backend():
from contentcuration.utils.minio_utils import ensure_storage_bucket_public
ensure_storage_bucket_public()
diff --git a/contentcuration/contentcuration/frontend/channelEdit/components/move/MoveModal.vue b/contentcuration/contentcuration/frontend/channelEdit/components/move/MoveModal.vue
index a68c50d51b..5865f28d30 100644
--- a/contentcuration/contentcuration/frontend/channelEdit/components/move/MoveModal.vue
+++ b/contentcuration/contentcuration/frontend/channelEdit/components/move/MoveModal.vue
@@ -98,6 +98,11 @@
+
+
+ {{ showMoreLabel }}
+
+
{
+ }).then(childrenResponse => {
this.loading = false;
+ this.more = childrenResponse.more || null;
});
}
return Promise.resolve();
@@ -302,6 +321,15 @@
});
this.moveNodesInProgress = false;
},
+ loadMore() {
+ if (this.more && !this.moreLoading) {
+ this.moreLoading = true;
+ this.loadContentNodes(this.more).then(response => {
+ this.more = response.more || null;
+ this.moreLoading = false;
+ });
+ }
+ },
},
$trs: {
moveItems:
@@ -350,4 +378,11 @@
}
}
+ .show-more-button-container {
+ display: flex;
+ justify-content: center;
+ width: 100%;
+ margin-bottom: 10px;
+ }
+
diff --git a/contentcuration/contentcuration/frontend/channelEdit/views/CurrentTopicView.vue b/contentcuration/contentcuration/frontend/channelEdit/views/CurrentTopicView.vue
index a994e1c9aa..943e7c2836 100644
--- a/contentcuration/contentcuration/frontend/channelEdit/views/CurrentTopicView.vue
+++ b/contentcuration/contentcuration/frontend/channelEdit/views/CurrentTopicView.vue
@@ -533,6 +533,7 @@
},
inheritanceParent() {
const firstNode = this.currentInheritingNodes[0];
+
if (!firstNode) {
return;
}
@@ -726,6 +727,7 @@
handleDragDrop(drop) {
const { data } = drop;
const { identity, section, relative } = data.target;
+ const targetMetadata = identity.metadata || {};
const isTargetTree =
drop.target && drop.target.region && drop.target.region.id === DraggableRegions.TREE;
@@ -742,7 +744,7 @@
position = this.relativePosition(relative > DraggableFlags.NONE ? relative : section);
} else {
// Safety check
- const { kind } = identity.metadata || {};
+ const { kind } = targetMetadata;
if (kind && kind !== ContentKindsNames.TOPIC) {
return Promise.reject('Cannot set child of non-topic');
}
@@ -756,7 +758,7 @@
const sources = drop.sources || [];
const sourceRegion = sources.length > 0 ? sources[0].region : null;
const payload = {
- target: identity.metadata.id,
+ target: targetMetadata.id,
position,
};
@@ -765,7 +767,7 @@
// `excluded_descendants` by accessing the copy trees through the clipboard node ID
if (sourceRegion && sourceRegion.id === DraggableRegions.CLIPBOARD) {
return Promise.all(
- data.sources.map(source => {
+ sources.map(source => {
// Using `getCopyTrees` we can access the `excluded_descendants` for the node, such
// that we make sure to skip copying nodes that aren't intended to be copied
const trees = this.getCopyTrees(source.metadata.clipboardNodeId, true);
@@ -789,10 +791,27 @@
);
}
+ // We want to avoid launching the inherit modal when the move operation is a prepend or
+ // append move, and target is the current parent. When the move operation is relative to
+ // the target, that is left or right, we want only launch the modal if the parent is
+ // changing
+ let inherit = false;
+ if (
+ position === RELATIVE_TREE_POSITIONS.FIRST_CHILD ||
+ position === RELATIVE_TREE_POSITIONS.LAST_CHILD
+ ) {
+ inherit = !sources.some(s => s.metadata.parent === targetMetadata.id);
+ } else if (
+ position === RELATIVE_TREE_POSITIONS.LEFT ||
+ position === RELATIVE_TREE_POSITIONS.RIGHT
+ ) {
+ inherit = !sources.some(s => s.metadata.parent === targetMetadata.parent);
+ }
+
return this.moveContentNodes({
...payload,
- id__in: data.sources.map(s => s.metadata.id),
- inherit: !data.sources.some(s => s.metadata.parent === payload.target),
+ id__in: sources.map(s => s.metadata.id),
+ inherit,
});
}
},
diff --git a/contentcuration/contentcuration/frontend/channelEdit/views/ImportFromChannels/ContentTreeList.vue b/contentcuration/contentcuration/frontend/channelEdit/views/ImportFromChannels/ContentTreeList.vue
index 72d046d3dd..8b2bf39ac6 100644
--- a/contentcuration/contentcuration/frontend/channelEdit/views/ImportFromChannels/ContentTreeList.vue
+++ b/contentcuration/contentcuration/frontend/channelEdit/views/ImportFromChannels/ContentTreeList.vue
@@ -43,6 +43,11 @@
/>
+
+
+ {{ showMoreLabel }}
+
+
@@ -55,6 +60,7 @@
import intersectionBy from 'lodash/intersectionBy';
import { mapActions, mapGetters } from 'vuex';
import find from 'lodash/find';
+ import NodePanel from '../NodePanel';
import { RouteNames } from '../../constants';
import BrowsingCard from './BrowsingCard';
import Breadcrumbs from 'shared/views/Breadcrumbs';
@@ -62,6 +68,9 @@
import LoadingText from 'shared/views/LoadingText';
import { constantsTranslationMixin } from 'shared/mixins';
import { ChannelListTypes } from 'shared/constants';
+ import { crossComponentTranslator } from 'shared/i18n';
+
+ const showMoreTranslator = crossComponentTranslator(NodePanel);
export default {
name: 'ContentTreeList',
@@ -85,6 +94,8 @@
data() {
return {
loading: false,
+ more: null,
+ moreLoading: false,
};
},
computed: {
@@ -142,39 +153,44 @@
...ancestorsLinks,
];
},
+ showMoreLabel() {
+ // eslint-disable-next-line kolibri/vue-no-undefined-string-uses
+ return showMoreTranslator.$tr('showMore');
+ },
},
watch: {
- topicId(parent) {
+ topicId(newTopicId, oldTopicId) {
+ if (newTopicId !== oldTopicId && newTopicId) {
+ this.loadData();
+ }
+ },
+ },
+ created() {
+ this.loadData();
+ },
+ methods: {
+ ...mapActions('contentNode', ['loadChildren', 'loadAncestors', 'loadContentNodes']),
+ loadData() {
this.loading = true;
- return this.loadChildren({
- parent,
+ const params = {
complete: true,
- }).then(() => {
+ };
+ const channelListType = this.$route.query.channel_list || ChannelListTypes.PUBLIC;
+ if (channelListType === ChannelListTypes.PUBLIC) {
+ // TODO: load from public API instead
+ // TODO: challenging because of node_id->id and root_id->channel_id
+ params.published = true;
+ }
+
+ return Promise.all([
+ this.loadChildren({ parent: this.topicId, ...params }).then(childrenResponse => {
+ this.more = childrenResponse.more || null;
+ }),
+ this.loadAncestors({ id: this.topicId }),
+ ]).then(() => {
this.loading = false;
});
},
- },
- mounted() {
- this.loading = true;
- const params = {
- complete: true,
- };
- const channelListType = this.$route.query.channel_list || ChannelListTypes.PUBLIC;
- if (channelListType === ChannelListTypes.PUBLIC) {
- // TODO: load from public API instead
- // TODO: challenging because of node_id->id and root_id->channel_id
- params.published = true;
- }
-
- return Promise.all([
- this.loadChildren({ parent: this.topicId, ...params }),
- this.loadAncestors({ id: this.topicId }),
- ]).then(() => {
- this.loading = false;
- });
- },
- methods: {
- ...mapActions('contentNode', ['loadChildren', 'loadAncestors']),
// @public
scrollToNode(nodeId) {
const ref = this.$refs[nodeId];
@@ -187,6 +203,15 @@
toggleSelected(node) {
this.$emit('change_selected', { nodes: [node], isSelected: !this.isSelected(node) });
},
+ loadMore() {
+ if (this.more && !this.moreLoading) {
+ this.moreLoading = true;
+ this.loadContentNodes(this.more).then(response => {
+ this.more = response.more || null;
+ this.moreLoading = false;
+ });
+ }
+ },
},
$trs: {
allChannelsLabel: 'Channels',
diff --git a/contentcuration/contentcuration/frontend/channelEdit/views/TreeView/TreeViewBase.vue b/contentcuration/contentcuration/frontend/channelEdit/views/TreeView/TreeViewBase.vue
index 2de7cae031..0f8ba07b3c 100644
--- a/contentcuration/contentcuration/frontend/channelEdit/views/TreeView/TreeViewBase.vue
+++ b/contentcuration/contentcuration/frontend/channelEdit/views/TreeView/TreeViewBase.vue
@@ -390,7 +390,7 @@
this.currentChannel.publishing ||
!this.isChanged ||
!this.currentChannel.language ||
- (this.rootNode && !this.rootNode.total_count)
+ (this.rootNode && !this.rootNode.resource_count)
);
},
publishButtonTooltip() {
diff --git a/contentcuration/contentcuration/frontend/channelEdit/views/trash/TrashModal.vue b/contentcuration/contentcuration/frontend/channelEdit/views/trash/TrashModal.vue
index edbd7eea3e..e1ba3e3625 100644
--- a/contentcuration/contentcuration/frontend/channelEdit/views/trash/TrashModal.vue
+++ b/contentcuration/contentcuration/frontend/channelEdit/views/trash/TrashModal.vue
@@ -66,6 +66,11 @@
+
+
+ {{ showMoreLabel }}
+
+
{
- this.loading = false;
- });
},
methods: {
...mapActions('contentNode', [
@@ -234,6 +240,17 @@
'loadContentNodes',
'loadAncestors',
]),
+ loadNodes() {
+ this.loading = true;
+ if (!this.trashId) {
+ this.loading = false;
+ return;
+ }
+ this.loadChildren({ parent: this.trashId }).then(childrenResponse => {
+ this.loading = false;
+ this.more = childrenResponse.more || null;
+ });
+ },
moveNodes(target) {
return this.moveContentNodes({
id__in: this.selected,
@@ -242,6 +259,8 @@
}).then(() => {
this.reset();
this.$refs.moveModal && this.$refs.moveModal.moveComplete();
+ // Reload after this to ensure that anything over the pagination fold is loaded now
+ this.loadNodes();
});
},
reset() {
@@ -254,6 +273,8 @@
this.showConfirmationDialog = false;
this.reset();
this.$store.dispatch('showSnackbar', { text });
+ // Reload after this to ensure that anything over the pagination fold is loaded now
+ this.loadNodes();
});
},
toggleSelectAll(selectAll) {
@@ -262,6 +283,15 @@
getItemBackground(id) {
return this.previewNodeId === id ? this.$vuetify.theme.greyBackground : 'transparent';
},
+ loadMore() {
+ if (this.more && !this.moreLoading) {
+ this.moreLoading = true;
+ this.loadContentNodes(this.more).then(response => {
+ this.more = response.more || null;
+ this.moreLoading = false;
+ });
+ }
+ },
},
$trs: {
trashModalTitle: 'Trash',
@@ -283,4 +313,10 @@
diff --git a/contentcuration/contentcuration/frontend/channelEdit/views/trash/__tests__/trashModal.spec.js b/contentcuration/contentcuration/frontend/channelEdit/views/trash/__tests__/trashModal.spec.js
index fd75caa234..4d4db70575 100644
--- a/contentcuration/contentcuration/frontend/channelEdit/views/trash/__tests__/trashModal.spec.js
+++ b/contentcuration/contentcuration/frontend/channelEdit/views/trash/__tests__/trashModal.spec.js
@@ -55,7 +55,7 @@ function makeWrapper(items) {
methods: {
loadContentNodes: jest.fn(),
loadAncestors: jest.fn(),
- loadChildren: jest.fn(() => Promise.resolve()),
+ loadChildren: jest.fn(() => Promise.resolve({ more: null, results: [] })),
},
stubs: {
ResourceDrawer: true,
diff --git a/contentcuration/contentcuration/frontend/shared/data/resources.js b/contentcuration/contentcuration/frontend/shared/data/resources.js
index eebbcdf0ad..3c0c74fbea 100644
--- a/contentcuration/contentcuration/frontend/shared/data/resources.js
+++ b/contentcuration/contentcuration/frontend/shared/data/resources.js
@@ -1998,7 +1998,7 @@ export const User = new Resource({
uuid: false,
updateAsAdmin(id, changes) {
- return client.post(window.Urls.adminUsersAccept(id)).then(() => {
+ return client.patch(window.Urls.adminUsersDetail(id), changes).then(() => {
return this.transaction({ mode: 'rw' }, () => {
return this.table.update(id, changes);
});
diff --git a/contentcuration/contentcuration/frontend/shared/mixins.js b/contentcuration/contentcuration/frontend/shared/mixins.js
index 01095f6df2..58cc27b727 100644
--- a/contentcuration/contentcuration/frontend/shared/mixins.js
+++ b/contentcuration/contentcuration/frontend/shared/mixins.js
@@ -141,6 +141,7 @@ export const constantStrings = createTranslator('ConstantStrings', {
low_res_video: 'Low resolution',
video_subtitle: 'Captions',
html5_zip: 'HTML5 zip',
+ imscp_zip: 'IMSCP zip',
video_thumbnail: 'Thumbnail',
audio_thumbnail: 'Thumbnail',
document_thumbnail: 'Thumbnail',
diff --git a/contentcuration/contentcuration/frontend/shared/views/channel/ChannelDetailsModal.vue b/contentcuration/contentcuration/frontend/shared/views/channel/ChannelDetailsModal.vue
index e336bb5340..017fe11f12 100644
--- a/contentcuration/contentcuration/frontend/shared/views/channel/ChannelDetailsModal.vue
+++ b/contentcuration/contentcuration/frontend/shared/views/channel/ChannelDetailsModal.vue
@@ -21,10 +21,10 @@
-
+
{{ $tr('downloadPDF') }}
-
+
{{ $tr('downloadCSV') }}
@@ -114,14 +114,26 @@
},
mounted() {
this.load();
+ this.$analytics.trackAction('channel_details', 'View', {
+ id: this.channelId,
+ });
},
methods: {
...mapActions('channel', ['loadChannel', 'loadChannelDetails']),
- async generatePdf() {
+ async generatePDF() {
+ this.$analytics.trackEvent('channel_details', 'Download PDF', {
+ id: this.channelId,
+ });
this.loading = true;
await this.generateChannelsPDF([this.channelWithDetails]);
this.loading = false;
},
+ generateCSV() {
+ this.$analytics.trackEvent('channel_details', 'Download CSV', {
+ id: this.channelId,
+ });
+ this.generateChannelsCSV([this.channelWithDetails]);
+ },
load() {
this.loading = true;
const channelPromise = this.loadChannel(this.channelId);
diff --git a/contentcuration/contentcuration/frontend/shared/views/details/Details.vue b/contentcuration/contentcuration/frontend/shared/views/details/Details.vue
index ce8cf89277..03ace0c8fd 100644
--- a/contentcuration/contentcuration/frontend/shared/views/details/Details.vue
+++ b/contentcuration/contentcuration/frontend/shared/views/details/Details.vue
@@ -493,6 +493,14 @@
return this.categories.join(', ');
},
},
+ mounted() {
+ if (!this.isChannel) {
+ // Track node details view when not a channel-- is this happening?
+ this.$analytics.trackAction('node_details', 'View', {
+ id: this._details.id,
+ });
+ }
+ },
methods: {
channelUrl(channel) {
return window.Urls.channel(channel.id);
diff --git a/contentcuration/contentcuration/models.py b/contentcuration/contentcuration/models.py
index 690325fc3c..7d1d1ca7ab 100644
--- a/contentcuration/contentcuration/models.py
+++ b/contentcuration/contentcuration/models.py
@@ -22,6 +22,7 @@
from django.core.mail import send_mail
from django.core.validators import MaxValueValidator
from django.core.validators import MinValueValidator
+from django.db import connection
from django.db import IntegrityError
from django.db import models
from django.db.models import Count
@@ -45,6 +46,7 @@
from django.dispatch import receiver
from django.utils import timezone
from django.utils.translation import gettext as _
+from django_cte import CTEManager
from django_cte import With
from le_utils import proquint
from le_utils.constants import content_kinds
@@ -261,9 +263,13 @@ def hard_delete_user_related_data(self):
editable_channels_user_query, field="id")).filter(num_editors=1, public=False)
# Point sole editor non-public channels' contentnodes to orphan tree to let
- # our garbage collection delete the nodes and underlying files.
- ContentNode._annotate_channel_id(ContentNode.objects).filter(channel_id__in=list(
- non_public_channels_sole_editor.values_list("id", flat=True))).update(parent_id=settings.ORPHANAGE_ROOT_ID)
+ # our garbage collection delete the nodes and underlying file.
+ tree_ids_to_update = non_public_channels_sole_editor.values_list('main_tree__tree_id', flat=True)
+
+ for tree_id in tree_ids_to_update:
+ ContentNode.objects.filter(tree_id=tree_id).update(parent_id=settings.ORPHANAGE_ROOT_ID)
+
+ logging.debug("Queries after updating content nodes parent ID: %s", connection.queries)
# Hard delete non-public channels associated with this user (if user is the only editor).
non_public_channels_sole_editor.delete()
@@ -1055,6 +1061,18 @@ def mark_publishing(self, user):
self.main_tree.publishing = True
self.main_tree.save()
+ def get_server_rev(self):
+ changes_cte = With(
+ Change.objects.filter(channel=self).values("server_rev", "applied"),
+ )
+ return (
+ changes_cte.queryset()
+ .with_cte(changes_cte)
+ .filter(applied=True)
+ .values_list("server_rev", flat=True)
+ .order_by("-server_rev").first()
+ ) or 0
+
@property
def deletion_history(self):
return self.history.filter(action=channel_history.DELETION)
@@ -1551,7 +1569,7 @@ def get_nodes_with_title(cls, title, limit_to_children_of=None):
return root.get_descendants().filter(title=title)
return cls.objects.filter(title=title)
- def get_details(self, channel_id=None):
+ def get_details(self, channel=None):
"""
Returns information about the node and its children, including total size, languages, files, etc.
@@ -1570,12 +1588,13 @@ def get_details(self, channel_id=None):
.values("id")
)
- if channel_id:
- channel = Channel.objects.filter(id=channel_id)[0]
- else:
+ # Get resources
+ resources = descendants.exclude(kind=content_kinds.TOPIC).order_by()
+
+ if not channel:
channel = self.get_channel()
- if not descendants.exists():
+ if not resources.exists():
data = {
"last_update": pytz.utc.localize(datetime.now()).strftime(
settings.DATE_TIME_FORMAT
@@ -1604,8 +1623,6 @@ def get_details(self, channel_id=None):
cache.set("details_{}".format(self.node_id), json.dumps(data), None)
return data
- # Get resources
- resources = descendants.exclude(kind=content_kinds.TOPIC).order_by()
nodes = With(
File.objects.filter(contentnode_id__in=Subquery(resources.values("id")))
.values("checksum", "file_size")
@@ -1816,10 +1833,10 @@ def get_details(self, channel_id=None):
"sample_pathway": pathway,
"sample_nodes": sample_nodes,
# source model fields for the below default to an empty string, but can also be null
- "authors": list(filter(bool, node["authors"])),
- "aggregators": list(filter(bool, node["aggregators"])),
- "providers": list(filter(bool, node["providers"])),
- "copyright_holders": list(filter(bool, node["copyright_holders"])),
+ "authors": list(filter(bool, node["authors"] or [])),
+ "aggregators": list(filter(bool, node["aggregators"] or [])),
+ "providers": list(filter(bool, node["providers"] or [])),
+ "copyright_holders": list(filter(bool, node["copyright_holders"] or [])),
"levels": node.get("levels") or [],
"categories": node.get("all_categories") or [],
}
@@ -2567,6 +2584,8 @@ class Change(models.Model):
# will be falsy.
unpublishable = models.BooleanField(null=True, blank=True, default=False)
+ objects = CTEManager()
+
@classmethod
def _create_from_change(
cls,
diff --git a/contentcuration/contentcuration/signals.py b/contentcuration/contentcuration/signals.py
new file mode 100644
index 0000000000..e96446569f
--- /dev/null
+++ b/contentcuration/contentcuration/signals.py
@@ -0,0 +1,18 @@
+from django.db.backends.postgresql.features import DatabaseFeatures
+from django.db.backends.signals import connection_created
+from django.dispatch import receiver
+
+
+@receiver(connection_created)
+def set_jit(sender, connection, **kwargs):
+ """
+ Disable Just-In-Time compilation for PostgreSQL databases >= 11, at least until we can
+ optimize its use.
+ https://www.postgresql.org/docs/12/runtime-config-query.html#GUC-JIT
+ """
+ if connection.vendor == 'postgresql':
+ db_features = DatabaseFeatures(connection)
+ # JIT is new in v11, and for reference this returns True for v11 and following
+ if db_features.is_postgresql_11:
+ with connection.cursor() as cursor:
+ cursor.execute("SET jit = 'off';")
diff --git a/contentcuration/contentcuration/tests/test_models.py b/contentcuration/contentcuration/tests/test_models.py
index 0f910fb7ec..9c093369cd 100644
--- a/contentcuration/contentcuration/tests/test_models.py
+++ b/contentcuration/contentcuration/tests/test_models.py
@@ -15,6 +15,7 @@
from contentcuration.constants import channel_history
from contentcuration.constants import user_history
from contentcuration.models import AssessmentItem
+from contentcuration.models import Change
from contentcuration.models import Channel
from contentcuration.models import ChannelHistory
from contentcuration.models import ChannelSet
@@ -32,6 +33,7 @@
from contentcuration.models import UserHistory
from contentcuration.tests import testdata
from contentcuration.tests.base import StudioTestCase
+from contentcuration.viewsets.sync.constants import DELETED
@pytest.fixture
@@ -251,6 +253,29 @@ def test_filter_edit_queryset__private_channel__anonymous(self):
queryset = Channel.filter_edit_queryset(self.base_queryset, user=self.anonymous_user)
self.assertQuerysetDoesNotContain(queryset, pk=channel.id)
+ def test_get_server_rev(self):
+ channel = testdata.channel()
+
+ def create_change(server_rev, applied):
+ return Change(
+ channel=channel,
+ server_rev=server_rev,
+ user=self.admin_user,
+ created_by=self.admin_user,
+ change_type=DELETED,
+ table=Channel.__name__,
+ applied=applied,
+ kwargs={},
+ )
+
+ Change.objects.bulk_create([
+ create_change(1, True),
+ create_change(2, True),
+ create_change(3, False),
+ ])
+
+ self.assertEqual(channel.get_server_rev(), 2)
+
class ContentNodeTestCase(PermissionQuerysetTestCase):
@property
diff --git a/contentcuration/contentcuration/tests/test_rectify_source_field_migraiton_command.py b/contentcuration/contentcuration/tests/test_rectify_source_field_migraiton_command.py
deleted file mode 100644
index a563fb712a..0000000000
--- a/contentcuration/contentcuration/tests/test_rectify_source_field_migraiton_command.py
+++ /dev/null
@@ -1,182 +0,0 @@
-# DELETE THIS FILE AFTER RUNNING THE MIGRATIONSSS
-import datetime
-import uuid
-
-from django.core.management import call_command
-from django.utils import timezone
-from le_utils.constants import content_kinds
-
-from contentcuration.models import Channel
-from contentcuration.models import ContentNode
-from contentcuration.models import License
-from contentcuration.tests import testdata
-from contentcuration.tests.base import StudioAPITestCase
-from contentcuration.utils.publish import publish_channel
-
-
-class TestRectifyMigrationCommand(StudioAPITestCase):
-
- @classmethod
- def setUpClass(cls):
- super(TestRectifyMigrationCommand, cls).setUpClass()
-
- def tearDown(self):
- super(TestRectifyMigrationCommand, self).tearDown()
-
- def setUp(self):
- super(TestRectifyMigrationCommand, self).setUp()
- self.original_channel = testdata.channel()
- self.license_original = License.objects.all()[0]
- self.original_contentnode = ContentNode.objects.create(
- id=uuid.uuid4().hex,
- title="Original Node",
- parent=self.original_channel.main_tree,
- license=self.license_original,
- original_channel_id=None,
- source_channel_id=None,
- author="old author"
- )
- self.user = testdata.user()
- self.original_channel.editors.add(self.user)
- self.client.force_authenticate(user=self.user)
-
- def create_base_channel_and_contentnode(self, source_contentnode, source_channel):
- base_channel = testdata.channel()
- base_channel.public = True
- base_channel.save()
- license_changed = License.objects.all()[2]
- base_node = ContentNode.objects.create(
- id=uuid.uuid4().hex,
- title="base contentnode",
- parent=base_channel.main_tree,
- kind_id=content_kinds.VIDEO,
- original_channel_id=self.original_channel.id,
- original_source_node_id=self.original_contentnode.node_id,
- source_channel_id=source_channel.id,
- source_node_id=source_contentnode.node_id,
- author="source author",
- license=license_changed,
- )
- return base_node, base_channel
-
- def create_source_channel_and_contentnode(self):
- source_channel = testdata.channel()
- source_channel.public = True
- source_channel.save()
- license_changed = License.objects.all()[1]
- source_node = ContentNode.objects.create(
- id=uuid.uuid4().hex,
- title="base contentnode",
- parent=source_channel.main_tree,
- kind_id=content_kinds.VIDEO,
- license=license_changed,
- original_channel_id=self.original_channel.id,
- source_channel_id=self.original_channel.id,
- source_node_id=self.original_contentnode.node_id,
- original_source_node_id=self.original_contentnode.node_id,
- author="source author",
- )
-
- return source_node, source_channel
-
- def run_migrations(self):
- call_command('rectify_incorrect_contentnode_source_fields', user_id=self.user.id, is_test=True)
-
- def test_two_node_case(self):
- base_node, base_channel = self.create_base_channel_and_contentnode(self.original_contentnode, self.original_channel)
-
- publish_channel(self.user.id, Channel.objects.get(pk=base_channel.pk).id)
-
- # main_tree node still has changed=true even after the publish
- for node in Channel.objects.get(pk=base_channel.pk).main_tree.get_family().filter(changed=True):
- print(node.id)
- print(node.id == base_channel.main_tree.id)
- print("checking if the node is complete or not ", node.complete)
- node.changed = False
- # This should probably again change the changed=true but suprisingly it doesnot
- # Meaning the changed boolean doesnot change for the main_tree no matter what we do
- # through ContentNode model methods like save.
- node.save()
-
- ContentNode.objects.filter(pk=base_node.pk).update(
- modified=datetime.datetime(2023, 7, 5, tzinfo=timezone.utc)
- )
-
- self.run_migrations()
- updated_base_node = ContentNode.objects.get(pk=base_node.pk)
- self.assertEqual(updated_base_node.license, self.original_contentnode.license)
- self.assertEqual(updated_base_node.author, self.original_contentnode.author)
- self.assertEqual(Channel.objects.get(pk=base_channel.id).main_tree.get_family().filter(changed=True).exists(), False)
-
- def test_three_node_case_implicit(self):
- source_node, source_channel = self.create_source_channel_and_contentnode()
- base_node, base_channel = self.create_base_channel_and_contentnode(source_node, source_channel)
- source_node.aggregator = "Nami"
- source_node.save()
- # Implicit case
- base_node.author = source_node.author
- base_node.license = source_node.license
- base_node.aggregator = source_node.aggregator
- base_node.save()
-
- publish_channel(self.user.id, Channel.objects.get(pk=base_channel.pk).id)
-
- for node in Channel.objects.get(pk=base_channel.pk).main_tree.get_family().filter(changed=True):
- print(node.id)
- print(node.id == base_channel.main_tree.id)
- print("checking if the node is complete or not ", node.complete)
- node.changed = False
- node.save()
-
- ContentNode.objects.filter(pk=base_node.pk).update(
- modified=datetime.datetime(2023, 7, 5, tzinfo=timezone.utc)
- )
-
- ContentNode.objects.filter(pk=source_node.pk).update(
- modified=datetime.datetime(2023, 3, 5, tzinfo=timezone.utc)
- )
-
- self.run_migrations()
- updated_base_node = ContentNode.objects.get(pk=base_node.pk)
- updated_source_node = ContentNode.objects.get(pk=source_node.pk)
- self.assertEqual(updated_base_node.license, self.original_contentnode.license)
- self.assertEqual(updated_base_node.author, self.original_contentnode.author)
- self.assertEqual(updated_source_node.license, self.original_contentnode.license)
- self.assertEqual(updated_source_node.author, self.original_contentnode.author)
- self.assertEqual(updated_source_node.aggregator, self.original_contentnode.aggregator)
- self.assertEqual(Channel.objects.get(pk=base_channel.id).main_tree.get_family().filter(changed=True).exists(), False)
-
- def test_three_node_case_explicit(self):
- source_node, source_channel = self.create_source_channel_and_contentnode()
- base_node, base_channel = self.create_base_channel_and_contentnode(source_node, source_channel)
- source_node.provider = "luffy"
- base_node.provider = "zoro"
- base_node.save()
- source_node.save()
- publish_channel(self.user.id, Channel.objects.get(pk=base_channel.pk).id)
-
- for node in Channel.objects.get(pk=base_channel.pk).main_tree.get_family().filter(changed=True):
- print(node.id)
- print(node.id == base_channel.main_tree.id)
- print("checking if the node is complete or not ", node.complete)
- node.changed = False
- node.save()
-
- ContentNode.objects.filter(pk=base_node.pk).update(
- modified=datetime.datetime(2023, 7, 5, tzinfo=timezone.utc)
- )
-
- ContentNode.objects.filter(pk=source_node.pk).update(
- modified=datetime.datetime(2023, 3, 5, tzinfo=timezone.utc)
- )
-
- self.run_migrations()
- updated_base_node = ContentNode.objects.get(pk=base_node.pk)
- updated_source_node = ContentNode.objects.get(pk=source_node.pk)
- self.assertEqual(updated_base_node.license, self.original_contentnode.license)
- self.assertEqual(updated_base_node.author, self.original_contentnode.author)
- self.assertEqual(updated_base_node.provider, self.original_contentnode.provider)
- self.assertEqual(updated_source_node.license, self.original_contentnode.license)
- self.assertEqual(updated_source_node.author, self.original_contentnode.author)
- self.assertEqual(updated_source_node.provider, self.original_contentnode.provider)
- self.assertEqual(Channel.objects.get(pk=base_channel.id).main_tree.get_family().filter(changed=True).exists(), False)
diff --git a/contentcuration/contentcuration/tests/views/test_nodes.py b/contentcuration/contentcuration/tests/views/test_nodes.py
index 2367263275..a981b84ca7 100644
--- a/contentcuration/contentcuration/tests/views/test_nodes.py
+++ b/contentcuration/contentcuration/tests/views/test_nodes.py
@@ -1,5 +1,6 @@
import datetime
import json
+from contextlib import contextmanager
import pytz
from django.conf import settings
@@ -8,11 +9,16 @@
from mock import Mock
from mock import patch
+from contentcuration.models import ContentNode
from contentcuration.tasks import generatenodediff_task
from contentcuration.tests.base import BaseAPITestCase
class NodesViewsTestCase(BaseAPITestCase):
+ def tearDown(self):
+ super().tearDown()
+ cache.clear()
+
def test_get_node_diff__missing_contentnode(self):
response = self.get(reverse("get_node_diff", kwargs=dict(updated_id="abc123", original_id="def456")))
self.assertEqual(response.status_code, 404)
@@ -32,29 +38,139 @@ def test_get_node_diff__task_processing(self, mock_find_incomplete_ids):
response = self.get(reverse("get_node_diff", kwargs=dict(updated_id=pk, original_id=pk)))
self.assertEqual(response.status_code, 302)
- def test_get_channel_details(self):
- url = reverse('get_channel_details', kwargs={"channel_id": self.channel.id})
- response = self.get(url)
+class DetailsTestCase(BaseAPITestCase):
+ def setUp(self):
+ super().setUp()
+ self.default_details = {
+ "resource_count": 5,
+ "resource_size": 100,
+ "kind_count": {"document": 3, "video": 2}
+ }
+ # see tree.json for where this comes from
+ self.node = ContentNode.objects.get(node_id="00000000000000000000000000000001")
+ # required by get_node_details
+ self.channel.make_public()
+
+ def tearDown(self):
+ super().tearDown()
+ cache.clear()
+
+ def _set_cache(self, node, last_update=None):
+ data = self.default_details.copy()
+ if last_update is not None:
+ data.update(last_update=pytz.utc.localize(last_update).strftime(settings.DATE_TIME_FORMAT))
+
+ cache_key = "details_{}".format(node.node_id)
+ cache.set(cache_key, json.dumps(data))
+
+ @contextmanager
+ def _check_details(self, node=None):
+ endpoint = "get_channel_details" if node is None else "get_node_details"
+ param = {"channel_id": self.channel.id} \
+ if endpoint == "get_channel_details" \
+ else {"node_id": node.id}
+ url = reverse(endpoint, kwargs=param)
+ response = self.get(url)
+ print(response.content)
details = json.loads(response.content)
- assert details['resource_count'] > 0
- assert details['resource_size'] > 0
- assert len(details['kind_count']) > 0
+ yield details
+
+ def assertDetailsEqual(self, details, expected):
+ self.assertEqual(details['resource_count'], expected['resource_count'])
+ self.assertEqual(details['resource_size'], expected['resource_size'])
+ self.assertEqual(details['kind_count'], expected['kind_count'])
+
+ @patch("contentcuration.models.ContentNode.get_details")
+ def test_get_channel_details__uncached(self, mock_get_details):
+ mock_get_details.return_value = {
+ "resource_count": 7,
+ "resource_size": 200,
+ "kind_count": {"document": 33, "video": 22}
+ }
+ with self._check_details() as details:
+ self.assertDetailsEqual(details, mock_get_details.return_value)
- def test_get_channel_details_cached(self):
- cache_key = "details_{}".format(self.channel.main_tree.id)
+ mock_get_details.assert_called_once_with(channel=self.channel)
+ @patch("contentcuration.views.nodes.getnodedetails_task")
+ def test_get_channel_details__cached(self, task_mock):
# force the cache to update by adding a very old cache entry. Since Celery tasks run sync in the test suite,
# get_channel_details will return an updated cache value rather than generate it async.
- data = {"last_update": pytz.utc.localize(datetime.datetime(1990, 1, 1)).strftime(settings.DATE_TIME_FORMAT)}
- cache.set(cache_key, json.dumps(data))
+ self._set_cache(self.channel.main_tree, last_update=datetime.datetime(1990, 1, 1))
- with patch("contentcuration.views.nodes.getnodedetails_task") as task_mock:
- url = reverse('get_channel_details', kwargs={"channel_id": self.channel.id})
- self.get(url)
+ with self._check_details() as details:
+ # check cache was returned
+ self.assertDetailsEqual(details, self.default_details)
# Check that the outdated cache prompts an asynchronous cache update
task_mock.enqueue.assert_called_once_with(self.user, node_id=self.channel.main_tree.id)
+ @patch("contentcuration.views.nodes.getnodedetails_task")
+ def test_get_channel_details__cached__not_updated__no_enqueue(self, task_mock):
+ # nothing changed,
+ self.channel.main_tree.get_descendants(include_self=False).update(changed=False)
+ self._set_cache(self.channel.main_tree, last_update=datetime.datetime(1990, 1, 1))
+
+ with self._check_details() as details:
+ # check cache was returned
+ self.assertDetailsEqual(details, self.default_details)
+ task_mock.enqueue.assert_not_called()
+
+ @patch("contentcuration.views.nodes.getnodedetails_task")
+ def test_get_channel_details__cached__no_enqueue(self, task_mock):
+ # test last update handling
+ self._set_cache(self.channel.main_tree, last_update=datetime.datetime(2099, 1, 1))
+
+ with self._check_details() as details:
+ # check cache was returned
+ self.assertDetailsEqual(details, self.default_details)
+ task_mock.enqueue.assert_not_called()
+
+ @patch("contentcuration.models.ContentNode.get_details")
+ def test_get_node_details__uncached(self, mock_get_details):
+ mock_get_details.return_value = {
+ "resource_count": 7,
+ "resource_size": 200,
+ "kind_count": {"document": 33, "video": 22}
+ }
+ with self._check_details(node=self.node) as details:
+ self.assertDetailsEqual(details, mock_get_details.return_value)
+
+ mock_get_details.assert_called_once_with(channel=self.channel)
+
+ @patch("contentcuration.views.nodes.getnodedetails_task")
+ def test_get_node_details__cached(self, task_mock):
+ # force the cache to update by adding a very old cache entry. Since Celery tasks run sync in the test suite,
+ # get_channel_details will return an updated cache value rather than generate it async.
+ self._set_cache(self.node, last_update=datetime.datetime(1990, 1, 1))
+
+ with self._check_details(node=self.node) as details:
+ # check cache was returned
+ self.assertDetailsEqual(details, self.default_details)
+ # Check that the outdated cache prompts an asynchronous cache update
+ task_mock.enqueue.assert_called_once_with(self.user, node_id=self.node.pk)
+
+ @patch("contentcuration.views.nodes.getnodedetails_task")
+ def test_get_node_details__cached__not_updated__no_enqueue(self, task_mock):
+ # nothing changed,
+ self.channel.main_tree.get_descendants(include_self=False).update(changed=False)
+ self._set_cache(self.node, last_update=datetime.datetime(1990, 1, 1))
+
+ with self._check_details(node=self.node) as details:
+ # check cache was returned
+ self.assertDetailsEqual(details, self.default_details)
+ task_mock.enqueue.assert_not_called()
+
+ @patch("contentcuration.views.nodes.getnodedetails_task")
+ def test_get_node_details__cached__no_enqueue(self, task_mock):
+ # test last update handling
+ self._set_cache(self.node, last_update=datetime.datetime(2099, 1, 1))
+
+ with self._check_details(node=self.node) as details:
+ # check cache was returned
+ self.assertDetailsEqual(details, self.default_details)
+ task_mock.enqueue.assert_not_called()
+
class ChannelDetailsEndpointTestCase(BaseAPITestCase):
def test_200_post(self):
diff --git a/contentcuration/contentcuration/tests/views/test_views_internal.py b/contentcuration/contentcuration/tests/views/test_views_internal.py
index 7689bace3a..3a8f50a6d2 100644
--- a/contentcuration/contentcuration/tests/views/test_views_internal.py
+++ b/contentcuration/contentcuration/tests/views/test_views_internal.py
@@ -777,6 +777,23 @@ def test_creates_channel(self):
except Channel.DoesNotExist:
self.fail("Channel was not created")
+ def test_updates_already_created_channel(self):
+ """
+ Test that it creates a channel with the given id
+ """
+ deleted_channel = channel()
+ deleted_channel.deleted = True
+ deleted_channel.save(actor_id=self.user.id)
+ self.channel_data.update({"name": "Updated name", "id": deleted_channel.id})
+ self.admin_client().post(
+ reverse_lazy("api_create_channel"), data={"channel_data": self.channel_data}, format="json"
+ )
+ try:
+ c = Channel.objects.get(id=self.channel_data["id"])
+ self.assertEqual(c.name, "Updated name")
+ except Channel.DoesNotExist:
+ self.fail("Channel was not created")
+
def test_creates_cheftree(self):
"""
Test that it creates a channel with the given id
diff --git a/contentcuration/contentcuration/views/base.py b/contentcuration/contentcuration/views/base.py
index 8ce3966e0c..0c3568dc22 100644
--- a/contentcuration/contentcuration/views/base.py
+++ b/contentcuration/contentcuration/views/base.py
@@ -1,22 +1,17 @@
import json
-from builtins import str
from urllib.parse import urlsplit
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.core.cache import cache
-from django.core.exceptions import PermissionDenied
from django.db.models import Count
-from django.db.models import Exists
from django.db.models import IntegerField
from django.db.models import OuterRef
from django.db.models import Subquery
from django.db.models import UUIDField
from django.db.models.functions import Cast
from django.http import HttpResponse
-from django.http import HttpResponseBadRequest
from django.http import HttpResponseForbidden
-from django.http import HttpResponseNotFound
from django.shortcuts import redirect
from django.shortcuts import render
from django.urls import is_valid_path
@@ -30,6 +25,7 @@
from django.views.decorators.http import require_POST
from django.views.i18n import LANGUAGE_QUERY_PARAMETER
from django_celery_results.models import TaskResult
+from django_cte import With
from rest_framework.authentication import BasicAuthentication
from rest_framework.authentication import SessionAuthentication
from rest_framework.authentication import TokenAuthentication
@@ -41,13 +37,11 @@
from rest_framework.response import Response
from .json_dump import json_for_parse_from_data
-from .json_dump import json_for_parse_from_serializer
from contentcuration.constants import channel_history
from contentcuration.decorators import browser_is_supported
from contentcuration.models import Change
from contentcuration.models import Channel
from contentcuration.models import ChannelHistory
-from contentcuration.models import ChannelSet
from contentcuration.models import ContentKind
from contentcuration.models import CustomTaskMetadata
from contentcuration.models import DEFAULT_USER_PREFERENCES
@@ -55,7 +49,6 @@
from contentcuration.models import License
from contentcuration.serializers import SimplifiedChannelProbeCheckSerializer
from contentcuration.utils.messages import get_messages
-from contentcuration.viewsets.channelset import PublicChannelSetSerializer
PUBLIC_CHANNELS_CACHE_DURATION = 30 # seconds
PUBLIC_CHANNELS_CACHE_KEYS = {
@@ -317,7 +310,7 @@ def channel(request, channel_id):
if channel.deleted:
channel_error = 'CHANNEL_EDIT_ERROR_CHANNEL_DELETED'
else:
- channel_rev = Change.objects.filter(applied=True, channel=channel).values_list("server_rev", flat=True).order_by("-server_rev").first() or 0
+ channel_rev = channel.get_server_rev()
return render(
request,
diff --git a/contentcuration/contentcuration/views/internal.py b/contentcuration/contentcuration/views/internal.py
index 13eba6376f..f2f268ca1a 100644
--- a/contentcuration/contentcuration/views/internal.py
+++ b/contentcuration/contentcuration/views/internal.py
@@ -519,7 +519,9 @@ def create_channel(channel_data, user):
if files:
map_files_to_node(user, channel.chef_tree, files)
channel.chef_tree.save()
- channel.save()
+ # If the channel was previously deleted, this save will undelete it
+ # so will require the actor_id to be set
+ channel.save(actor_id=user.id)
# Delete chef tree if it already exists
if old_chef_tree and old_chef_tree != channel.staging_tree:
diff --git a/contentcuration/contentcuration/views/nodes.py b/contentcuration/contentcuration/views/nodes.py
index a1e924c0c7..635c7b00c1 100644
--- a/contentcuration/contentcuration/views/nodes.py
+++ b/contentcuration/contentcuration/views/nodes.py
@@ -8,6 +8,7 @@
from django.http import HttpResponse
from django.http import HttpResponseNotFound
from django.shortcuts import get_object_or_404
+from django_cte import With
from rest_framework import status
from rest_framework.authentication import SessionAuthentication
from rest_framework.authentication import TokenAuthentication
@@ -36,7 +37,7 @@ def get_channel_details(request, channel_id):
channel = get_object_or_404(Channel.filter_view_queryset(Channel.objects.all(), request.user), id=channel_id)
if not channel.main_tree:
raise Http404
- data = get_node_details_cached(request.user, channel.main_tree, channel_id=channel_id)
+ data = get_node_details_cached(request.user, channel.main_tree, channel)
return HttpResponse(json.dumps(data))
@@ -47,29 +48,41 @@ def get_node_details(request, node_id):
channel = node.get_channel()
if channel and not channel.public:
return HttpResponseNotFound("No topic found for {}".format(node_id))
- data = get_node_details_cached(request.user, node)
+ data = get_node_details_cached(request.user, node, channel)
return HttpResponse(json.dumps(data))
-def get_node_details_cached(user, node, channel_id=None):
+def get_node_details_cached(user, node, channel):
cached_data = cache.get("details_{}".format(node.node_id))
- if cached_data:
- descendants = (
- node.get_descendants()
- .prefetch_related("children", "files", "tags")
- .select_related("license", "language")
- )
- channel = node.get_channel()
+ if cached_data:
# If channel is a sushi chef channel, use date created for faster query
# Otherwise, find the last time anything was updated in the channel
- last_update = (
- channel.main_tree.created
- if channel and channel.ricecooker_version
- else descendants.filter(changed=True)
- .aggregate(latest_update=Max("modified"))
- .get("latest_update")
- )
+ if channel and channel.ricecooker_version:
+ last_update = channel.main_tree.created
+ else:
+ # The query should never span across multiple trees, so we can filter by tree_id first.
+ # Since the tree_id is indexed, this should be a fast query, and perfect candidate
+ # for the CTE select query.
+ cte = With(
+ ContentNode.objects.filter(tree_id=node.tree_id)
+ .values("id", "modified", "changed", "tree_id", "parent_id", "lft", "rght")
+ .order_by()
+ )
+ last_update_qs = cte.queryset().with_cte(cte).filter(changed=True)
+
+ # If the node is not the root node, the intent is to calculate node details for the
+ # descendants of this node
+ if node.parent_id is not None:
+ # See MPTTModel.get_descendants for details on the lft/rght query
+ last_update_qs = last_update_qs.filter(
+ lft__gte=node.lft + 1, rght__lte=node.rght - 1
+ )
+ else:
+ # Maintain that query should not 'include_self'
+ last_update_qs = last_update_qs.filter(parent_id__isnull=False)
+
+ last_update = last_update_qs.aggregate(latest_update=Max("modified")).get("latest_update")
if last_update:
last_cache_update = datetime.strptime(
@@ -80,7 +93,7 @@ def get_node_details_cached(user, node, channel_id=None):
getnodedetails_task.enqueue(user, node_id=node.pk)
return json.loads(cached_data)
- return node.get_details(channel_id=channel_id)
+ return node.get_details(channel=channel)
@api_view(["GET"])
diff --git a/contentcuration/contentcuration/viewsets/channel.py b/contentcuration/contentcuration/viewsets/channel.py
index 796f7999e8..5cd9b84104 100644
--- a/contentcuration/contentcuration/viewsets/channel.py
+++ b/contentcuration/contentcuration/viewsets/channel.py
@@ -1,5 +1,4 @@
import logging
-import uuid
from functools import reduce
from operator import or_
from typing import Dict
@@ -9,13 +8,10 @@
from django.conf import settings
from django.db import IntegrityError
from django.db.models import Exists
-from django.db.models import F
from django.db.models import OuterRef
from django.db.models import Q
from django.db.models import Subquery
-from django.db.models import UUIDField
from django.db.models import Value
-from django.db.models.functions import Cast
from django.db.models.functions import Coalesce
from django.http import HttpResponse
from django.http import HttpResponseNotFound
@@ -25,7 +21,6 @@
from django_cte import With
from django_filters.rest_framework import BooleanFilter
from django_filters.rest_framework import CharFilter
-from kolibri_public.models import ContentNode as PublicContentNode
from le_utils.constants import content_kinds
from le_utils.constants import roles
from rest_framework import serializers
@@ -744,20 +739,39 @@ def _get_channel_content_languages(self, channel_id, main_tree_id=None) -> List[
if not channel_id:
return []
+ # determine the tree_id for the channel or from the root node (main_tree_id)
+ tree_id = None
+ if main_tree_id:
+ tree_id = (
+ ContentNode.objects.filter(id=main_tree_id)
+ .values_list("tree_id", flat=True)
+ .first()
+ )
+ elif not main_tree_id:
+ try:
+ tree_id = (
+ Channel.objects.filter(pk=channel_id)
+ .values_list("main_tree__tree_id", flat=True)
+ .first()
+ )
+ except Exception as e:
+ logging.error(str(e))
+ return []
+
try:
- ids_to_exclude = [main_tree_id] if main_tree_id else []
- node_ids_subquery = PublicContentNode.objects.filter(
- channel_id=uuid.UUID(channel_id),
- ).values_list("id", flat=True)
- lang_ids = ContentNode.objects.filter(
+ # performance: use a CTE to select just the tree's nodes, without default MPTT ordering,
+ # then filter against the CTE to get the distinct language IDs
+ cte = With(
+ ContentNode.objects.filter(tree_id=tree_id)
+ .values("id", "language_id")
+ .order_by()
+ )
+ qs = cte.queryset().with_cte(cte).filter(
language_id__isnull=False
- ).annotate(
- cast_node_id=Cast(F('node_id'), output_field=UUIDField())
- ).filter(
- cast_node_id__in=Subquery(node_ids_subquery)
- ).exclude(
- id__in=ids_to_exclude
- ).values_list('language_id', flat=True).distinct()
+ )
+ if main_tree_id:
+ qs = qs.exclude(id=main_tree_id)
+ lang_ids = qs.values_list('language_id', flat=True).distinct()
unique_lang_ids = list(set(lang_ids))
except Exception as e:
logging.error(str(e))
diff --git a/contentcuration/kolibri_public/import_metadata_view.py b/contentcuration/kolibri_public/import_metadata_view.py
index 534596cbb4..100ac870e3 100644
--- a/contentcuration/kolibri_public/import_metadata_view.py
+++ b/contentcuration/kolibri_public/import_metadata_view.py
@@ -14,6 +14,7 @@
from kolibri_content.constants.schema_versions import MIN_CONTENT_SCHEMA_VERSION # Use kolibri_content
from kolibri_public import models # Use kolibri_public models
from kolibri_public.views import metadata_cache
+from rest_framework import status
from rest_framework.generics import get_object_or_404
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
@@ -69,6 +70,14 @@ def retrieve(self, request, pk=None): # noqa: C901
:return: an object with keys for each content metadata table and a schema_version key
"""
+ try:
+ UUID(pk)
+ except ValueError:
+ return Response(
+ {"error": "Invalid UUID format."},
+ status=status.HTTP_400_BAD_REQUEST
+ )
+
content_schema = request.query_params.get(
"schema_version", self.default_content_schema
)
diff --git a/contentcuration/kolibri_public/management/commands/rectify_incorrect_contentnode_source_fields.py b/contentcuration/kolibri_public/management/commands/rectify_incorrect_contentnode_source_fields.py
deleted file mode 100644
index 5c9ab9c3aa..0000000000
--- a/contentcuration/kolibri_public/management/commands/rectify_incorrect_contentnode_source_fields.py
+++ /dev/null
@@ -1,160 +0,0 @@
-import datetime
-import logging
-
-from django.core.management.base import BaseCommand
-from django.db.models import Exists
-from django.db.models import F
-from django.db.models import OuterRef
-from django.db.models import Q
-from django.db.models import Value
-from django.db.models.functions import Coalesce
-from django.utils import timezone
-from django_cte import With
-
-from contentcuration.models import Channel
-from contentcuration.models import ContentNode
-from contentcuration.models import User
-from contentcuration.utils.publish import publish_channel
-
-logger = logging.getLogger(__file__)
-
-
-class Command(BaseCommand):
-
- def add_arguments(self, parser):
-
- parser.add_argument(
- '--is_test',
- action='store_true',
- help="Indicate if the command is running in a test environment.",
- )
-
- parser.add_argument(
- '--user_id',
- type=int,
- help="User ID for the operation",
- )
-
- def handle(self, *args, **options):
- # Filter Date : July 9, 2023
- # Link https://github.com/learningequality/studio/pull/4189
- # The PR date for the frontend change is July 10, 2023
- # we would set the filter day one day back just to be sure
-
- is_test = options['is_test']
- user_id = options['user_id']
-
- if not is_test:
- user_id = User.objects.get(email='channeladmin@learningequality.org').pk
-
- filter_date = datetime.datetime(2023, 7, 9, tzinfo=timezone.utc)
- main_trees_cte = With(
- (
- Channel.objects.filter(
- main_tree__isnull=False
- )
- .annotate(channel_id=F("id"))
- .values("channel_id", "deleted", tree_id=F("main_tree__tree_id"))
- ),
- name="main_trees",
- )
-
- nodes = main_trees_cte.join(
- ContentNode.objects.all(),
- tree_id=main_trees_cte.col.tree_id,
- ).annotate(channel_id=main_trees_cte.col.channel_id, deleted=main_trees_cte.col.deleted)
-
- original_source_nodes = (
- nodes.with_cte(main_trees_cte)
- .filter(
- node_id=OuterRef("original_source_node_id"),
- )
- .exclude(
- tree_id=OuterRef("tree_id"),
- )
- .annotate(
- coalesced_provider=Coalesce("provider", Value("")),
- coalesced_author=Coalesce("author", Value("")),
- coalesced_aggregator=Coalesce("aggregator", Value("")),
- coalesced_license_id=Coalesce("license_id", -1),
- )
- )
- # We filter out according to last_modified date before this PR
- # https://github.com/learningequality/studio/pull/4189
- # As we want to lossen up the public=True Filter and open the
- # migration for all the nodes even if they are not published
- diff = (
- nodes.with_cte(main_trees_cte).filter(
- deleted=False, # we dont want the channel to be deleted or else we are fixing ghost nodes
- source_node_id__isnull=False,
- original_source_node_id__isnull=False,
- modified__lt=filter_date
- )
- ).annotate(
- coalesced_provider=Coalesce("provider", Value("")),
- coalesced_author=Coalesce("author", Value("")),
- coalesced_aggregator=Coalesce("aggregator", Value("")),
- coalesced_license_id=Coalesce("license_id", -1),
- )
- diff_combined = diff.annotate(
- original_source_node_f_changed=Exists(
- original_source_nodes.filter(
- ~Q(coalesced_provider=OuterRef("coalesced_provider"))
- | ~Q(coalesced_author=OuterRef("coalesced_author"))
- | ~Q(coalesced_aggregator=OuterRef("coalesced_aggregator"))
- | ~Q(coalesced_license_id=OuterRef("coalesced_license_id"))
- )
- )
- ).filter(original_source_node_f_changed=True)
-
- final_nodes = diff_combined.values(
- "id",
- "channel_id",
- "original_channel_id",
- "original_source_node_id",
- "coalesced_provider",
- "coalesced_author",
- "coalesced_aggregator",
- "coalesced_license_id",
- "original_source_node_f_changed",
- ).order_by()
-
- channel_ids_to_republish = set()
-
- for item in final_nodes:
- base_node = ContentNode.objects.get(pk=item["id"])
-
- original_source_channel_id = item["original_channel_id"]
- original_source_node_id = item["original_source_node_id"]
- tree_id = (
- Channel.objects.filter(pk=original_source_channel_id)
- .values_list("main_tree__tree_id", flat=True)
- .get()
- )
- original_source_node = ContentNode.objects.filter(
- tree_id=tree_id, node_id=original_source_node_id
- )
-
- base_channel = Channel.objects.get(pk=item['channel_id'])
-
- to_be_republished = not (base_channel.main_tree.get_family().filter(changed=True).exists())
-
- if original_source_channel_id is not None and original_source_node.exists():
- # original source node exists and its source fields dont match
- # update the base node
- if base_node.author != original_source_node[0].author:
- base_node.author = original_source_node[0].author
- if base_node.provider != original_source_node[0].provider:
- base_node.provider = original_source_node[0].provider
- if base_node.aggregator != original_source_node[0].aggregator:
- base_node.aggregator = original_source_node[0].aggregator
- if base_node.license != original_source_node[0].license:
- base_node.license = original_source_node[0].license
- base_node.save()
-
- if to_be_republished and base_channel.last_published is not None:
- channel_ids_to_republish.add(base_channel.id)
-
- # we would repbulish the channel
- for channel_id in channel_ids_to_republish:
- publish_channel(user_id, channel_id)
diff --git a/contentcuration/kolibri_public/tests/test_content_app.py b/contentcuration/kolibri_public/tests/test_content_app.py
index 50c4a6a35b..0b6800e0e5 100644
--- a/contentcuration/kolibri_public/tests/test_content_app.py
+++ b/contentcuration/kolibri_public/tests/test_content_app.py
@@ -236,6 +236,17 @@ def _recurse_and_assert(self, data, nodes, recursion_depth=0):
)
return recursion_depth if not recursion_depths else max(recursion_depths)
+ def test_contentnode_tree_invalid_uuid(self):
+ invalid_uuid = "8f0a5b9d89795"
+
+ response = self._get(
+ reverse("publiccontentnode_tree-detail", kwargs={"pk": invalid_uuid})
+ )
+
+ self.assertEqual(response.status_code, 400)
+
+ self.assertEqual(response.data["error"], "Invalid UUID format.")
+
def test_contentnode_tree(self):
response = self._get(
reverse("publiccontentnode_tree-detail", kwargs={"pk": self.root.id})
diff --git a/contentcuration/kolibri_public/tests/test_importmetadata_api.py b/contentcuration/kolibri_public/tests/test_importmetadata_api.py
index b9184f6de1..484fa6c382 100644
--- a/contentcuration/kolibri_public/tests/test_importmetadata_api.py
+++ b/contentcuration/kolibri_public/tests/test_importmetadata_api.py
@@ -85,6 +85,17 @@ def test_import_metadata_through_tags(self):
def test_import_metadata_tags(self):
self._assert_data(public.ContentTag, content.ContentTag, self.tags)
+ def test_import_metadata_invalid_uuid(self):
+ invalid_uuid = "8f0a5b9d89795"
+
+ response = self.client.get(
+ reverse("publicimportmetadata-detail", kwargs={"pk": invalid_uuid})
+ )
+
+ self.assertEqual(response.status_code, 400)
+
+ self.assertEqual(response.data["error"], "Invalid UUID format.")
+
def test_schema_version_too_low(self):
response = self.client.get(
reverse("publicimportmetadata-detail", kwargs={"pk": self.node.id})
diff --git a/contentcuration/kolibri_public/views.py b/contentcuration/kolibri_public/views.py
index 8c66b1993b..faa5588d88 100644
--- a/contentcuration/kolibri_public/views.py
+++ b/contentcuration/kolibri_public/views.py
@@ -10,6 +10,7 @@
import re
from collections import OrderedDict
from functools import reduce
+from uuid import UUID
from django.core.exceptions import ValidationError
from django.db.models import Exists
@@ -35,6 +36,7 @@
from kolibri_public.search import get_available_metadata_labels
from kolibri_public.stopwords import stopwords_set
from le_utils.constants import content_kinds
+from rest_framework import status
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
@@ -45,7 +47,6 @@
from contentcuration.viewsets.base import BaseValuesViewset
from contentcuration.viewsets.base import ReadOnlyValuesViewset
-
logger = logging.getLogger(__name__)
@@ -697,8 +698,15 @@ def retrieve(self, request, pk=None):
:return: an object representing the parent with a pagination object as "children"
"""
- queryset = self.get_tree_queryset(request, pk)
+ try:
+ UUID(pk)
+ except ValueError:
+ return Response(
+ {"error": "Invalid UUID format."},
+ status=status.HTTP_400_BAD_REQUEST
+ )
+ queryset = self.get_tree_queryset(request, pk)
# We explicitly order by lft here, so that the nodes are in tree traversal order, so we can iterate over them and build
# out our nested representation, being sure that any ancestors have already been processed.
nodes = self.serialize(queryset.order_by("lft"))