diff --git a/openedx_learning/core/collections/__init__.py b/openedx_learning/core/collections/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/openedx_learning/core/collections/admin.py b/openedx_learning/core/collections/admin.py new file mode 100644 index 00000000..e2762490 --- /dev/null +++ b/openedx_learning/core/collections/admin.py @@ -0,0 +1,65 @@ +""" +Django admin for Collections. + +This is extremely bare-bones at the moment, and basically gives you just enough +information to let you know whether it's working or not. +""" +from django.contrib import admin + +from openedx_learning.lib.admin_utils import ReadOnlyModelAdmin + +from .models import ( + AddEntity, + Collection, + ChangeSet, + UpdateEntities, + RemoveEntity +) + + +class CollectionChangeSetTabularInline(admin.TabularInline): + model = ChangeSet + fields = ["version_num", "created"] + readonly_fields = ["version_num", "created"] + + +class PublishableEntityInline(admin.TabularInline): + model = Collection.entities.through + + +@admin.register(Collection) +class CollectionAdmin(ReadOnlyModelAdmin): + """ + Read-only admin for LearningPackage model + """ + fields = ["learning_package", "key", "title", "uuid", "created", "created_by"] + readonly_fields = ["learning_package", "key", "title", "uuid", "created", "created_by"] + list_display = ["learning_package", "key", "title", "uuid", "created", "created_by"] + search_fields = ["key", "title", "uuid"] + list_filter = ["learning_package"] + + inlines = [ + CollectionChangeSetTabularInline, + PublishableEntityInline, + ] + + +class AddToCollectionTabularInline(admin.TabularInline): + model = AddEntity + + +class RemoveFromCollectionTabularInline(admin.TabularInline): + model = RemoveEntity + + +class PublishEntityTabularInline(admin.TabularInline): + model = UpdateEntities + + +@admin.register(ChangeSet) +class CollectionChangeSetAdmin(ReadOnlyModelAdmin): + inlines = [ + AddToCollectionTabularInline, + RemoveFromCollectionTabularInline, + PublishEntityTabularInline, + ] diff --git a/openedx_learning/core/collections/api.py b/openedx_learning/core/collections/api.py new file mode 100644 index 00000000..85635eec --- /dev/null +++ b/openedx_learning/core/collections/api.py @@ -0,0 +1,166 @@ +""" +API to manipulate Collections. +""" +from __future__ import annotations + +from datetime import datetime, timezone + +from django.db.models import QuerySet +from django.db.transaction import atomic + +from ..publishing.models import PublishableEntity +from .models import ( + Collection, CollectionPublishableEntity, ChangeSet, + AddEntity, UpdateEntities, +) + + +def create_collection( + learning_package_id: int, + key: str, + title: str, + pub_entities_qset: QuerySet = PublishableEntity.objects.none, # default to empty qset + created: datetime | None = None, + created_by_id: int | None = None, +) -> Collection: + """ + Create a Collection and populate with a QuerySet of PublishableEntity. + """ + if not created: + created = datetime.now(tz=timezone.utc) + + with atomic(): + collection = Collection( + learning_package_id=learning_package_id, + key=key, + title=title, + created=created, + created_by_id=created_by_id, + ) + collection.full_clean() + collection.save() + + # add_to_collection is what creates our initial CollectionChangeSet, so + # we always call it, even if we're just creating an empty Collection. + add_to_collection(collection.id, pub_entities_qset, created=created) + + return collection + +def get_collection(collection_id: int) -> Collection: + """ + Get a Collection by ID. + """ + return Collection.objects.get(id=collection_id) + +def get_collections_matching_entities(entity_ids_qs: QuerySet) -> QuerySet: + """ + Get a QuerySet of Collections that have any of these PublishableEntities. + """ + return Collection.objects.filter(publishable_entities__in=entity_ids_qs).distinct() + +def get_last_change_set(collection_id: int) -> ChangeSet | None: + """ + Get the most recent ChangeSet for this Collection. + + This may return None if there is no matching ChangeSet (i.e. this is a newly + created Collection). + """ + return ChangeSet.objects \ + .filter(collection_id=collection_id) \ + .order_by('-version_num') \ + .first() + +def get_next_version_num(collection_id: int) -> int: + last_change_set = get_last_change_set(collection_id=collection_id) + return last_change_set.version_num + 1 if last_change_set else 1 + + +def update_collection_with_publish_log(collection_id: int, publish_log) -> ChangeSet: + change_set = create_next_change_set(collection_id, publish_log.published_at) + UpdateEntities.objects.create(change_set=change_set, publish_log=publish_log) + return change_set + + +def create_next_change_set(collection_id: int, created: datetime | None) -> ChangeSet: + return ChangeSet.objects.create( + collection_id=collection_id, + version_num=get_next_version_num(collection_id), + created=created, + ) + +def create_update_entities(): + pass + + + +def add_to_collection( + collection_id: int, + pub_entities_qset: QuerySet, + created: datetime | None = None +)-> ChangeSet: + """ + Add a QuerySet of PublishableEntities to a Collection. + """ + next_version_num = get_next_version_num(collection_id) + with atomic(): + change_set = ChangeSet.objects.create( + collection_id=collection_id, + version_num=next_version_num, + created=created, + ) + + # Add the joins so we can efficiently query the published versions. + qset = pub_entities_qset.select_related('published', 'published__version') + + # We're going to build our relationship models into big lists and then + # use bulk_create on them in order to reduce the number of queries + # required for this as the size of Collections grow. This should be + # reasonable for up to hundreds of PublishableEntities, but we may have + # to look into more complex chunking and async processing if we go + # beyond that. + change_set_adds = [] + collection_pub_entities = [] + for pub_ent in qset.all(): + if hasattr(pub_ent, 'published'): + published_version = pub_ent.published.version + else: + published_version = None + + # These will be associated with the ChangeSet for history tracking. + change_set_adds.append( + AddEntity( + change_set=change_set, + entity=pub_ent, + published_version=published_version, + ) + ) + + # These are the direct Collection <-> PublishableEntity M2M mappings + collection_pub_entities.append( + CollectionPublishableEntity( + collection_id=collection_id, + entity_id=pub_ent.id, + ) + ) + + AddEntity.objects.bulk_create(change_set_adds) + CollectionPublishableEntity.objects.bulk_create(collection_pub_entities) + + return change_set + + +def remove_from_collection( + collection_id: int, + pub_entities_qset: QuerySet, + created: datetime | None = None +) -> ChangeSet: + next_version_num = get_next_version_num(collection_id) + + with atomic(): + change_set = ChangeSet.objects.create( + collection_id=collection_id, + version_num=next_version_num, + created=created, + ) + + return change_set diff --git a/openedx_learning/core/collections/apps.py b/openedx_learning/core/collections/apps.py new file mode 100644 index 00000000..34399c98 --- /dev/null +++ b/openedx_learning/core/collections/apps.py @@ -0,0 +1,27 @@ +""" +Django metadata for the Collections Django application. +""" +from django.apps import AppConfig + + +class CollectionsConfig(AppConfig): + """ + Configuration for the Collections Django application. + """ + + name = "openedx_learning.core.collections" + verbose_name = "Learning Core: Collections" + default_auto_field = "django.db.models.BigAutoField" + label = "oel_collections" + + def ready(self): + """ + Register the ComponentCollection, ComponentCollectionVersion relation. + """ + from ..publishing.signals import PUBLISHED_PRE_COMMIT + from . import handlers + + PUBLISHED_PRE_COMMIT.connect( + handlers.update_collections_from_publish, + dispatch_uid="oel__collections__update_collections_from_publish", + ) diff --git a/openedx_learning/core/collections/handlers.py b/openedx_learning/core/collections/handlers.py new file mode 100644 index 00000000..f56d7262 --- /dev/null +++ b/openedx_learning/core/collections/handlers.py @@ -0,0 +1,29 @@ +""" +Signal handlers for Collections. + +This is to catch updates when things are published. The reason that we use +signals to do this kind of updating is because the ``publishing`` app exists at +a lower layer than the ``collections`` app, i.e. ``publishing`` should not know +that ``collections`` exists. If ``publishing`` updated Collections directly, it +would introduce a circular dependency. +""" +from django.db.transaction import atomic + +from .api import ( + get_collections_matching_entities, + update_collection_with_publish_log, +) + + +def update_collections_from_publish(sender, publish_log=None, **kwargs): + """ + Update all Collections affected by the publish described by publish_log. + """ + # Find all Collections that had at least one PublishableEntity that was + # published in this PublishLog. + affected_collections = get_collections_matching_entities( + publish_log.records.values('entity__id') + ) + with atomic(): + for collection in affected_collections: + update_collection_with_publish_log(collection.id, publish_log) diff --git a/openedx_learning/core/collections/migrations/0001_initial.py b/openedx_learning/core/collections/migrations/0001_initial.py new file mode 100644 index 00000000..18a624e9 --- /dev/null +++ b/openedx_learning/core/collections/migrations/0001_initial.py @@ -0,0 +1,113 @@ +# Generated by Django 3.2.23 on 2024-01-05 15:42 + +from django.conf import settings +import django.core.validators +from django.db import migrations, models +import django.db.models.deletion +import openedx_learning.lib.fields +import openedx_learning.lib.validators +import uuid + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('oel_publishing', '0003_configure_related_names'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name='ChangeSet', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('version_num', models.PositiveBigIntegerField(validators=[django.core.validators.MinValueValidator(1)])), + ('created', models.DateTimeField(validators=[openedx_learning.lib.validators.validate_utc_datetime])), + ], + ), + migrations.CreateModel( + name='Collection', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('uuid', models.UUIDField(default=uuid.uuid4, editable=False, unique=True, verbose_name='UUID')), + ('key', openedx_learning.lib.fields.MultiCollationCharField(db_collations={'mysql': 'utf8mb4_bin', 'sqlite': 'BINARY'}, max_length=500)), + ('title', openedx_learning.lib.fields.MultiCollationCharField(blank=True, db_collations={'mysql': 'utf8mb4_unicode_ci', 'sqlite': 'NOCASE'}, default='', max_length=500)), + ('created', models.DateTimeField(validators=[openedx_learning.lib.validators.validate_utc_datetime])), + ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)), + ], + ), + migrations.CreateModel( + name='UpdateEntities', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('change_set', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='oel_collections.changeset')), + ('publish_log', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='oel_publishing.publishlog')), + ], + ), + migrations.CreateModel( + name='RemoveEntity', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('change_set', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='oel_collections.changeset')), + ('entity', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='oel_publishing.publishableentity')), + ], + ), + migrations.CreateModel( + name='CollectionPublishableEntity', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('collection', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='oel_collections.collection')), + ('entity', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='oel_publishing.publishableentity')), + ], + ), + migrations.AddField( + model_name='collection', + name='entities', + field=models.ManyToManyField(related_name='collections', through='oel_collections.CollectionPublishableEntity', to='oel_publishing.PublishableEntity'), + ), + migrations.AddField( + model_name='collection', + name='learning_package', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='oel_publishing.learningpackage'), + ), + migrations.AddField( + model_name='changeset', + name='collection', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='change_sets', to='oel_collections.collection'), + ), + migrations.CreateModel( + name='AddEntity', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('change_set', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='oel_collections.changeset')), + ('entity', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='oel_publishing.publishableentity')), + ('published_version', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='oel_publishing.publishableentityversion')), + ], + ), + migrations.AddConstraint( + model_name='updateentities', + constraint=models.UniqueConstraint(fields=('change_set', 'publish_log'), name='oel_collections_pe_uniq_cs_pl'), + ), + migrations.AddConstraint( + model_name='removeentity', + constraint=models.UniqueConstraint(fields=('change_set', 'entity'), name='oel_collections_refc_uniq_cs_ent'), + ), + migrations.AddConstraint( + model_name='collectionpublishableentity', + constraint=models.UniqueConstraint(fields=('collection', 'entity'), name='oel_collections_cpe_uniq_col_ent'), + ), + migrations.AddConstraint( + model_name='collection', + constraint=models.UniqueConstraint(fields=('learning_package', 'key'), name='oel_collections_col_uniq_lp_key'), + ), + migrations.AddConstraint( + model_name='changeset', + constraint=models.UniqueConstraint(fields=('collection', 'version_num'), name='oel_collections_ccs_uniq_col_vn'), + ), + migrations.AddConstraint( + model_name='addentity', + constraint=models.UniqueConstraint(fields=('change_set', 'entity'), name='oel_collections_aetc_uniq_cs_ent'), + ), + ] diff --git a/openedx_learning/core/collections/migrations/__init__.py b/openedx_learning/core/collections/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/openedx_learning/core/collections/models.py b/openedx_learning/core/collections/models.py new file mode 100644 index 00000000..30a52889 --- /dev/null +++ b/openedx_learning/core/collections/models.py @@ -0,0 +1,285 @@ +""" +TLDR Guidelines: + +1. DO NOT modify these models to store full version snapshots. +2. DO NOT use these models to try to reconstruct historical versions of + Collections for fast querying. + +If you're trying to do either of these things, you probably want a new model or +app. For more details, read on. + +The goal of these models is to provide a lightweight method of organizing +PublishableEntities. The first use case for this is modeling the structure of a +v1 Content Library within a LearningPackage. This is what we'll use the +Collection model for. + +An important thing to note here is that Collections are *NOT* publishable +entities themselves. They have no "Draft" or "Published" versions. Collections +are never "published", though the things inside of them are. + +When a LibraryContentBlock makes use of a Content Library, it copies all of +the items it will use into the Course itself. It will also store a version +on the LibraryContentBlock–this is a MongoDB ObjectID in v1 and an integer in +v2 Libraries. Later on, the LibraryContentBlock will want to check back to see +if any updates have been made, using its version as a key. If a new version +exists, the course team has the option of re-copying data from the Library. + +ModuleStore based v1 Libraries and Blockstore-based v2 libraries both version +the entire library in a series of snapshots. This makes it difficult to have +very large libraries, which is an explicit goal for Modular Learning. In +Learning Core, we've moved to tracking the versions of individual Components to +address this issue. But that means we no longer have a single version indicator +for "has anything here changed"? + +We *could* have put that version in the ``publishing`` app's PublishLog, but +that would make it too broad. We want the ability to eventually collapse many v1 +Libraries into a single Learning Core backed v2 Library. If we tracked the +versioning in only a central location, then we'd have many false positives where +the version was bumped because something else in the Learning Package changed. +So instead, we're creating a new Collection model inside the LearningPackage to +track that concept. + +A critical takeaway is that we don't have to store snapshots of every version of +a Collection, because that data has been copied over by the LibraryContentBlock. +We only need to store the current state of the Collection, and increment the +version numbers when changes happen. This will allow the LibraryContentBlock to +check in and re-copy over the latest version if the course team desires. + +That's why these models only store the current state of a Collection. Unlike the +``components`` app, ``collections`` does not store fully materialized snapshots +of past versions. This is done intentionally in order to save space and reduce +the cost of writes. Collections may grow to be very large, and we don't want to +be writing N rows with every version, where N is the number of +PublishableEntities in a Collection. + +These models do store changesets, where the number of rows grows in proportion +to the number of things that are actually changing (instead of copying over +everything on every version). This is intended to make it easier to figure out +what changed between two given versions of a Collection. A LibraryContentBlock +in a course will have stored the version number of the last time it copied data +from the Collection, and we can eventually surface this data to the user. + +While it's possible to reconstruct past versions of Collections based off of +this changeset data, it's going to be a very slow process to do so, and it is +strongly discouraged. +""" +from __future__ import annotations + +from django.conf import settings +from django.core.validators import MinValueValidator +from django.db import models + +from ..publishing.models import ( + LearningPackage, + PublishableEntity, + PublishableEntityVersion, + PublishLog, +) + +from openedx_learning.lib.fields import ( + case_insensitive_char_field, + immutable_uuid_field, + key_field, + manual_date_time_field, +) + + +class Collection(models.Model): + """ + A Collection is a tracked grouping of PublishableEntities. + """ + uuid = immutable_uuid_field() + learning_package = models.ForeignKey(LearningPackage, on_delete=models.CASCADE) + key = key_field() + + title = case_insensitive_char_field(max_length=500, blank=True, default="") + created = manual_date_time_field() + created_by = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.SET_NULL, + null=True, + blank=True, + ) + + entities = models.ManyToManyField( + PublishableEntity, + through="CollectionPublishableEntity", + related_name="collections", + ) + + class Meta: + constraints = [ + # The version_num must be unique for any given Collection. + models.UniqueConstraint( + fields=[ + "learning_package", + "key", + ], + name="oel_collections_col_uniq_lp_key", + ) + ] + + def __str__(self): + return f"Collection {self.key} ({self.uuid})" + + +class CollectionPublishableEntity(models.Model): + """ + Collection -> PublishableEntity association. + """ + collection = models.ForeignKey( + Collection, + on_delete=models.CASCADE, + ) + entity = models.ForeignKey( + PublishableEntity, + on_delete=models.CASCADE, + ) + + class Meta: + constraints = [ + # Prevent race conditions from making multiple rows associating the + # same Collection to the same Entity. + models.UniqueConstraint( + fields=[ + "collection", + "entity", + ], + name="oel_collections_cpe_uniq_col_ent", + ) + ] + + +class ChangeSet(models.Model): + """ + Represents an atomic set of changes to a Collection. + + There are currently three ways a Collection can change: + + 1. PublishableEntities are added (AddToCollection) + 2. PublishableEntities are removed (RemoveFromCollection) + 3. The published version of a PublishableEntity changes (PublishLogRecord) + + TODO: Does this need a reverse index on collection -version_num, since we're + so often reaching for the most recent? + """ + collection = models.ForeignKey( + Collection, on_delete=models.CASCADE, related_name="change_sets" + ) + version_num = models.PositiveBigIntegerField( + null=False, + validators=[MinValueValidator(1)], + ) + created = manual_date_time_field() + + class Meta: + constraints = [ + # The version_num must be unique for any given Collection. + models.UniqueConstraint( + fields=[ + "collection", + "version_num", + ], + name="oel_collections_ccs_uniq_col_vn", + ) + ] + + +class AddEntity(models.Model): + """ + A record for when a PublishableEntity is added to a Collection. + + We also record the published version of the PublishableEntity at the time + it's added to the Collection. This will make it easier to reconstruct the + state of a given version of a Collection if it's necessary to do so. + + Note that something may be removed from a Collection and then re-added at a + later time. + """ + change_set = models.ForeignKey(ChangeSet, on_delete=models.CASCADE) + entity = models.ForeignKey(PublishableEntity, on_delete=models.CASCADE) + + # We want to capture the published version of the entity at the time it's + # added to the Collection. This may be null for entities that have not yet + # been published. + published_version = models.ForeignKey( + PublishableEntityVersion, on_delete=models.CASCADE, null=True, + ) + + class Meta: + constraints = [ + # We can't add the same Entity more than once in the same ChangeSet. + models.UniqueConstraint( + fields=[ + "change_set", + "entity", + ], + name="oel_collections_aetc_uniq_cs_ent", + ) + ] + + def __str__(self): + return f"Add {self.entity_id} in changeset {self.change_set_id}" + + +class RemoveEntity(models.Model): + """ + A record for when a PublishableEntity is removed from a Collection. + + Note that something may be removed from a Collection, re-added at a later + time, and then removed again. + """ + change_set = models.ForeignKey(ChangeSet, on_delete=models.CASCADE) + entity = models.ForeignKey(PublishableEntity, on_delete=models.CASCADE) + + class Meta: + constraints = [ + # We can't add the same Entity more than once in the same ChangeSet. + models.UniqueConstraint( + fields=[ + "change_set", + "entity", + ], + name="oel_collections_refc_uniq_cs_ent", + ) + ] + + +class UpdateEntities(models.Model): + """ + A record for when the published version of PublishableEntites changes. + + We store a reference to the PublishLog where the publishes happen instead of + storing each PublishLogRecord because many PublishableEntities may get + published at the same time, and they may exist in many Collections. That + would mean that we'd have to create (Collections X PublishableEntities) rows + worth of UpdateEntities for the Collections and PublishableEntities that + were affected. By tying it to the PublishLog, we at least reduce that to the + number of Collections affected. + + If you need to find out which things were published, you can query for the + intersection of the PublishableEntities from the PublishLogRecords tied to + the PublishLog and the PublishableEntities in the Collection. This isn't + completely accurate, since it would not return results for any + PublishableEntities that were removed from the Collection between the time + that the publish happened and you did the query. But this is not a query + pattern that we're optimizing for. It's technically possible to still + extract this information by replaying all the RemoveEntity entries between + the UpdateEntities and the time the query is being done, but that's not an + expected use case. + """ + change_set = models.ForeignKey(ChangeSet, on_delete=models.CASCADE) + publish_log = models.ForeignKey(PublishLog, on_delete=models.CASCADE) + + class Meta: + constraints = [ + # The same PublishLogRecord shouldn't show up multiple times for the + # same ChangeSet. + models.UniqueConstraint( + fields=[ + "change_set", + "publish_log", + ], + name="oel_collections_pe_uniq_cs_pl", + ) + ] diff --git a/openedx_learning/core/collections/readme.rst b/openedx_learning/core/collections/readme.rst new file mode 100644 index 00000000..94a3cf29 --- /dev/null +++ b/openedx_learning/core/collections/readme.rst @@ -0,0 +1,22 @@ +Collections App +=============== + +The ``collections`` app ... + +Motivation +---------- + + +Intended Use Cases +------------------ + +* + + + +Architecture Guidelines +----------------------- + +Things to remember: + +* Collections may grow very large. diff --git a/openedx_learning/core/publishing/api.py b/openedx_learning/core/publishing/api.py index ab4099ad..2f7cec08 100644 --- a/openedx_learning/core/publishing/api.py +++ b/openedx_learning/core/publishing/api.py @@ -22,6 +22,7 @@ PublishLog, PublishLogRecord, ) +from .signals import PUBLISHED_PRE_COMMIT def create_learning_package( @@ -112,7 +113,7 @@ def publish_all_drafts( message="", published_at: datetime | None = None, published_by: int | None = None -): +) -> None: """ Publish everything that is a Draft and is not already published. """ @@ -177,6 +178,14 @@ def publish_from_drafts( }, ) + # We are intentionally using ``send`` instead of ``send_robust`` here, + # because we want to allow listeners to throw an exception and rollback + # the publish transaction if necessary. If you replace this with more + # sophisticated error catching and reporting later, please remember that + # exceptions should generally be caught outside of the atomic() block: + # https://docs.djangoproject.com/en/4.2/topics/db/transactions/#controlling-transactions-explicitly + PUBLISHED_PRE_COMMIT.send(PublishLogRecord, publish_log=publish_log) + return publish_log diff --git a/openedx_learning/core/publishing/migrations/0003_configure_related_names.py b/openedx_learning/core/publishing/migrations/0003_configure_related_names.py new file mode 100644 index 00000000..95217c89 --- /dev/null +++ b/openedx_learning/core/publishing/migrations/0003_configure_related_names.py @@ -0,0 +1,24 @@ +# Generated by Django 3.2.23 on 2024-01-05 15:39 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('oel_publishing', '0002_alter_fk_on_delete'), + ] + + operations = [ + migrations.AlterField( + model_name='publishableentity', + name='learning_package', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='publishable_entities', to='oel_publishing.learningpackage'), + ), + migrations.AlterField( + model_name='publishlogrecord', + name='publish_log', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='records', to='oel_publishing.publishlog'), + ), + ] diff --git a/openedx_learning/core/publishing/models.py b/openedx_learning/core/publishing/models.py index 1eb6803c..936854d4 100644 --- a/openedx_learning/core/publishing/models.py +++ b/openedx_learning/core/publishing/models.py @@ -140,7 +140,11 @@ class PublishableEntity(models.Model): """ uuid = immutable_uuid_field() - learning_package = models.ForeignKey(LearningPackage, on_delete=models.CASCADE) + learning_package = models.ForeignKey( + LearningPackage, + on_delete=models.CASCADE, + related_name="publishable_entities", + ) key = key_field() created = manual_date_time_field() created_by = models.ForeignKey( @@ -362,7 +366,11 @@ class PublishLogRecord(models.Model): and ``new_version`` field values. """ - publish_log = models.ForeignKey(PublishLog, on_delete=models.CASCADE) + publish_log = models.ForeignKey( + PublishLog, + on_delete=models.CASCADE, + related_name="records", + ) entity = models.ForeignKey(PublishableEntity, on_delete=models.RESTRICT) old_version = models.ForeignKey( PublishableEntityVersion, diff --git a/openedx_learning/core/publishing/signals.py b/openedx_learning/core/publishing/signals.py new file mode 100644 index 00000000..d76f01b8 --- /dev/null +++ b/openedx_learning/core/publishing/signals.py @@ -0,0 +1,36 @@ +""" +Publishing related, process-internal signals. +""" +from django.dispatch import Signal + + +# The PUBLISHED_PRE_COMMIT is sent: +# +# * AFTER a set of PublishableEntity models has been published–i.e. its entries +# in the publishing.models.Published model have been updated to new versions +# and a PublishLog entry has been created with associated PublishLogRecords. +# * BEFORE those publishing changes are committed to the database. +# +# This is the signal that you catch if you need to take actions when content is +# published, and failing those actions should cancel/rollback the publish. One +# case in which you might want to do this is if you have data models that need +# to track and add supplemental data to every PublishLog entry. A transient +# failure that occurs during this process might introduce data inconsistencies +# that we want to avoid. It's better to fail the entire request and force the +# system (or user) to try again. +# +# Do NOT try to catch this signal to launch a celery task. It is sent before +# the publishing model additions have been committed to the database, so they +# will not be accessible from another process. It may look like it's working +# because your celery processes are running in-process during development, or +# because delays in celery process launch allow the original request to commit +# before the celery task actually tries to run its query. But this kind of usage +# will cause issues in production environments at some point. +# +# Signal handlers should be simple and fast. Handlers should not do external web +# service calls, or anything else that is prone to unpredictable latency. +# +# providing_args=[ +# 'publish_log', # instance of saved PublishLog +# ] +PUBLISHED_PRE_COMMIT = Signal() diff --git a/test_settings.py b/test_settings.py index 28676e13..29c1fe40 100644 --- a/test_settings.py +++ b/test_settings.py @@ -40,9 +40,10 @@ def root(*args): # django-rules based authorization 'rules.apps.AutodiscoverRulesConfig', # Our own apps - "openedx_learning.core.components.apps.ComponentsConfig", - "openedx_learning.core.contents.apps.ContentsConfig", - "openedx_learning.core.publishing.apps.PublishingConfig", + "openedx_learning.core.components", + "openedx_learning.core.contents", + "openedx_learning.core.publishing", + "openedx_learning.core.collections", "openedx_tagging.core.tagging.apps.TaggingConfig", ] diff --git a/tests/openedx_learning/core/collections/__init__.py b/tests/openedx_learning/core/collections/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/openedx_learning/core/collections/test_api.py b/tests/openedx_learning/core/collections/test_api.py new file mode 100644 index 00000000..c2a018b3 --- /dev/null +++ b/tests/openedx_learning/core/collections/test_api.py @@ -0,0 +1,77 @@ +""" +Tests of the Collection app's python API +""" +from datetime import datetime, timezone + + +from openedx_learning.core.collections import api as collections_api +from openedx_learning.core.publishing import api as publishing_api +from openedx_learning.core.publishing.models import PublishableEntity + +from openedx_learning.lib.test_utils import TestCase + + +class CollectionsTestCase(TestCase): + """ + Test creating Collections + """ + @classmethod + def setUpTestData(cls) -> None: + super().setUpTestData() + cls.created = datetime(2023, 12, 7, 18, 23, 50, tzinfo=timezone.utc) + cls.package = publishing_api.create_learning_package( + "collections_test_learning_pkg_key", + "Collections Testing LearningPackage 🔥", + created=cls.created, + ) + + # Make and Publish one PublishableEntity + cls.published_entity = publishing_api.create_publishable_entity( + cls.package.id, + "my_entity_published_example", + cls.created, + created_by=None, + ) + cls.pe_version = publishing_api.create_publishable_entity_version( + entity_id=cls.published_entity.id, + version_num=1, + title="An Entity that we'll Publish 🌴", + created=cls.created, + created_by=None, + ) + publishing_api.publish_all_drafts( + cls.package.id, + message="Publish from CollectionsTestCase.setUpTestData", + published_at=cls.created, + ) + + # Leave another PublishableEntity in Draft. + cls.draft_entity = publishing_api.create_publishable_entity( + cls.package.id, + "my_entity_draft_example", + cls.created, + created_by=None, + ) + cls.de_version = publishing_api.create_publishable_entity_version( + entity_id=cls.draft_entity.id, + version_num=1, + title="An Entity that we'll keep in Draft 🌴", + created=cls.created, + created_by=None, + ) + + def test_bootstrap_only_published(self) -> None: + """ + Normal flow with no errors. + """ + collection = collections_api.create_collection( + self.package.id, + key="test_bootstrap_only_published_collection", + title="Test Bootstrap 🦃 Only Published Collection", + pub_entities_qset=PublishableEntity.objects.filter( + id=self.published_entity.id + ), + created=self.created, + ) + entities = list(collection.publishable_entities.all()) + assert len(entities) == 1