diff --git a/olx_importer/management/commands/load_components.py b/olx_importer/management/commands/load_components.py index da51fbae5..a6c59211e 100644 --- a/olx_importer/management/commands/load_components.py +++ b/olx_importer/management/commands/load_components.py @@ -61,12 +61,12 @@ def init_known_types(self): def add_arguments(self, parser): parser.add_argument("course_data_path", type=pathlib.Path) - parser.add_argument("learning_package_key", type=str) + parser.add_argument("learning_package_ref", type=str) - def handle(self, course_data_path, learning_package_key, **options): + def handle(self, course_data_path, learning_package_ref, **options): self.course_data_path = course_data_path - self.learning_package_key = learning_package_key - self.load_course_data(learning_package_key) + self.learning_package_ref = learning_package_ref + self.load_course_data(learning_package_ref) def get_course_title(self): course_type_dir = self.course_data_path / "course" @@ -74,20 +74,20 @@ def get_course_title(self): course_root = ET.parse(course_xml_file).getroot() return course_root.attrib.get("display_name", "Unknown Course") - def load_course_data(self, learning_package_key): + def load_course_data(self, learning_package_ref): print(f"Importing course from: {self.course_data_path}") now = datetime.now(timezone.utc) title = self.get_course_title() - if content_api.learning_package_exists(learning_package_key): + if content_api.learning_package_exists(learning_package_ref): raise CommandError( - f"{learning_package_key} already exists. " + f"{learning_package_ref} already exists. " "This command currently only supports initial import." ) with transaction.atomic(): self.learning_package = content_api.create_learning_package( - learning_package_key, title, created=now, + learning_package_ref, title, created=now, ) for block_type in SUPPORTED_TYPES: self.import_block_type(block_type, now) #, publish_log_entry) diff --git a/src/openedx_content/applets/backup_restore/api.py b/src/openedx_content/applets/backup_restore/api.py index b4cda8828..1f768b0b0 100644 --- a/src/openedx_content/applets/backup_restore/api.py +++ b/src/openedx_content/applets/backup_restore/api.py @@ -5,26 +5,28 @@ from django.contrib.auth.models import User as UserType # pylint: disable=imported-auth-user -from ..publishing.api import get_learning_package_by_key +from ..publishing.api import get_learning_package_by_ref from .zipper import LearningPackageUnzipper, LearningPackageZipper -def create_zip_file(lp_key: str, path: str, user: UserType | None = None, origin_server: str | None = None) -> None: +def create_zip_file( + package_ref: str, path: str, user: UserType | None = None, origin_server: str | None = None +) -> None: """ Creates a dump zip file for the given learning package key at the given path. The zip file contains a TOML representation of the learning package and its contents. - Can throw a NotFoundError at get_learning_package_by_key + Can throw a NotFoundError at get_learning_package_by_ref """ - learning_package = get_learning_package_by_key(lp_key) + learning_package = get_learning_package_by_ref(package_ref) LearningPackageZipper(learning_package, user, origin_server).create_zip(path) -def load_learning_package(path: str, key: str | None = None, user: UserType | None = None) -> dict: +def load_learning_package(path: str, package_ref: str | None = None, user: UserType | None = None) -> dict: """ Loads a learning package from a zip file at the given path. Restores the learning package and its contents to the database. Returns a dictionary with the status of the operation and any errors encountered. """ with zipfile.ZipFile(path, "r") as zipf: - return LearningPackageUnzipper(zipf, key, user).load() + return LearningPackageUnzipper(zipf, package_ref, user).load() diff --git a/src/openedx_content/applets/backup_restore/serializers.py b/src/openedx_content/applets/backup_restore/serializers.py index 8c5f8e955..eb43528cb 100644 --- a/src/openedx_content/applets/backup_restore/serializers.py +++ b/src/openedx_content/applets/backup_restore/serializers.py @@ -1,5 +1,11 @@ """ The serializers module for restoration of authoring data. + +Please note that the serializers are defined from the perspective of the +TOML format, with the models as the "source". That is, when the model fields +and TOML fields differ, we'll declare it like this: + + my_toml_field = serializers.BlahField(source="my_model_field") """ from datetime import timezone @@ -14,11 +20,14 @@ class LearningPackageSerializer(serializers.Serializer): # pylint: disable=abst Serializer for learning packages. Note: - The `key` field is serialized, but it is generally not trustworthy for restoration. - During restore, a new key may be generated or overridden. + The ref/key field is serialized but is generally not trustworthy for + restoration. During restore, a new ref may be generated or overridden. """ + title = serializers.CharField(required=True) - key = serializers.CharField(required=True) + # The model field is now LearningPackage.package_ref, but the archive format + # still uses "key". A future v2 format may align the name. + key = serializers.CharField(required=True, source="package_ref") description = serializers.CharField(required=True, allow_blank=True) created = serializers.DateTimeField(required=True, default_timezone=timezone.utc) @@ -42,8 +51,11 @@ class EntitySerializer(serializers.Serializer): # pylint: disable=abstract-meth """ Serializer for publishable entities. """ + can_stand_alone = serializers.BooleanField(required=True) - key = serializers.CharField(required=True) + # The model field is now PublishableEntity.entity_ref, but the archive format + # still uses "key". A future v2 format may align the name. + key = serializers.CharField(required=True, source="entity_ref") created = serializers.DateTimeField(required=True, default_timezone=timezone.utc) @@ -52,10 +64,13 @@ class EntityVersionSerializer(serializers.Serializer): # pylint: disable=abstra Serializer for publishable entity versions. """ title = serializers.CharField(required=True) - entity_key = serializers.CharField(required=True) created = serializers.DateTimeField(required=True, default_timezone=timezone.utc) version_num = serializers.IntegerField(required=True) + # Note: Unlike the fields above, `entity_ref` does not appear on the model + # nor in the TOML. This is just added by the validation pipeline for convenience. + entity_ref = serializers.CharField(required=True) + class ComponentSerializer(EntitySerializer): # pylint: disable=abstract-method """ @@ -68,7 +83,7 @@ def validate(self, attrs): Custom validation logic: parse the entity_key into (component_type, component_code). """ - entity_key = attrs["key"] + entity_key = attrs["entity_ref"] try: component_type_obj, component_code = _get_or_create_component_type_by_entity_key(entity_key) attrs["component_type"] = component_type_obj diff --git a/src/openedx_content/applets/backup_restore/toml.py b/src/openedx_content/applets/backup_restore/toml.py index 520d6b877..b5afa6174 100644 --- a/src/openedx_content/applets/backup_restore/toml.py +++ b/src/openedx_content/applets/backup_restore/toml.py @@ -43,7 +43,9 @@ def toml_learning_package( # Learning package main info section = tomlkit.table() section.add("title", learning_package.title) - section.add("key", learning_package.key) + # The model field is now LearningPackage.package_ref, but the archive format + # still uses "key". A future v2 format may align the name. + section.add("key", learning_package.package_ref) section.add("description", learning_package.description) section.add("created", learning_package.created) section.add("updated", learning_package.updated) @@ -89,8 +91,9 @@ def _get_toml_publishable_entity_table( """ entity_table = tomlkit.table() entity_table.add("can_stand_alone", entity.can_stand_alone) - # Add key since the toml filename doesn't show the real key - entity_table.add("key", entity.key) + # The model field is now PublishableEntity.entity_ref, but the archive format + # still uses "key". A future v2 format may align the name. + entity_table.add("key", entity.entity_ref) entity_table.add("created", entity.created) if not include_versions: @@ -191,13 +194,13 @@ def toml_publishable_entity_version(version: PublishableEntityVersion) -> tomlki if hasattr(version, 'containerversion'): # If the version has a container version, add its children container_table = tomlkit.table() - children = containers_api.get_container_children_entities_keys(version.containerversion) + children = containers_api.get_container_children_entity_refs(version.containerversion) container_table.add("children", children) version_table.add("container", container_table) return version_table -def toml_collection(collection: Collection, entity_keys: list[str]) -> str: +def toml_collection(collection: Collection, entity_refs: list[str]) -> str: """ Create a TOML representation of a collection. @@ -215,7 +218,7 @@ def toml_collection(collection: Collection, entity_keys: list[str]) -> str: doc = tomlkit.document() entities_array = tomlkit.array() - entities_array.extend(entity_keys) + entities_array.extend(entity_refs) entities_array.multiline(True) collection_table = tomlkit.table() diff --git a/src/openedx_content/applets/backup_restore/zipper.py b/src/openedx_content/applets/backup_restore/zipper.py index 4e2a82cb3..4a5413577 100644 --- a/src/openedx_content/applets/backup_restore/zipper.py +++ b/src/openedx_content/applets/backup_restore/zipper.py @@ -202,7 +202,7 @@ def get_publishable_entities(self) -> QuerySet[PublishableEntity]: to_attr="prefetched_media", ), ) - .order_by("key") + .order_by("entity_ref") ) def get_collections(self) -> QuerySet[Collection]: @@ -238,25 +238,25 @@ def get_versions_to_write( versions_to_write.append(published_version) return versions_to_write, draft_version, published_version - def get_entity_toml_filename(self, entity_key: str) -> str: + def get_entity_toml_filename(self, entity_ref: str) -> str: """ Generate a unique TOML filename for a publishable entity. Ensures that the filename is unique within the zip file. Behavior: - - If the slugified key has not been used yet, use it as the filename. + - If the slugified ref has not been used yet, use it as the filename. - If it has been used, append a short hash to ensure uniqueness. Args: - entity_key (str): The key of the publishable entity. + entity_ref (str): The entity_ref of the publishable entity. Returns: str: A unique TOML filename for the entity. """ - slugify_name = slugify(entity_key, allow_unicode=True) + slugify_name = slugify(entity_ref, allow_unicode=True) if slugify_name in self.entities_filenames_already_created: - filename = slugify_hashed_filename(entity_key) + filename = slugify_hashed_filename(entity_ref) else: filename = slugify_name @@ -316,7 +316,7 @@ def create_zip(self, path: str) -> None: ) if hasattr(entity, 'container'): - entity_filename = self.get_entity_toml_filename(entity.key) + entity_filename = self.get_entity_toml_filename(entity.entity_ref) entity_toml_filename = f"{entity_filename}.toml" entity_toml_path = entities_folder / entity_toml_filename self.add_file_to_zip(zipf, entity_toml_path, entity_toml_content, timestamp=latest_modified) @@ -403,11 +403,11 @@ def create_zip(self, path: str) -> None: for collection in collections: collection_hash_slug = self.get_entity_toml_filename(collection.collection_code) collection_toml_file_path = collections_folder / f"{collection_hash_slug}.toml" - entity_keys_related = collection.entities.order_by("key").values_list("key", flat=True) + entity_refs_related = collection.entities.order_by("entity_ref").values_list("entity_ref", flat=True) self.add_file_to_zip( zipf, collection_toml_file_path, - toml_collection(collection, list(entity_keys_related)), + toml_collection(collection, list(entity_refs_related)), timestamp=collection.modified, ) @@ -418,10 +418,10 @@ class RestoreLearningPackageData: Data about the restored learning package. """ id: int # The ID of the restored learning package - key: str # The key of the restored learning package (may be different if staged) - archive_lp_key: str # The original key from the archive - archive_org_key: str # The original organization key from the archive - archive_slug: str # The original slug from the archive + package_ref: str # The package_ref of the restored learning package (may be different if staged) + archive_package_ref: str # The original package_ref from the archive + archive_org_code: str | None # The org code parsed from archive_package_ref, or None if unparseable + archive_package_code: str | None # The package code parsed from archive_package_ref, or None if unparseable title: str num_containers: int num_sections: int @@ -454,35 +454,53 @@ class RestoreResult: backup_metadata: BackupMetadata | None = None -def unpack_lp_key(lp_key: str) -> tuple[str, str]: +def unpack_package_ref(package_ref: str) -> tuple[str | None, str | None]: """ - Unpack a learning package key into its components. + Try to parse org_code and package_code from a package_ref. + + By convention, package_refs take the form ``"{prefix}:{org_code}:{package_code}"``, + but this is only a convention — package_ref is opaque and the parse may fail. + Returns ``(None, None)`` if the ref does not match the expected format. """ - parts = lp_key.split(":") + parts = package_ref.split(":") if len(parts) < 3: - raise ValueError(f"Invalid learning package key: {lp_key}") - _, org_key, lp_slug = parts[:3] - return org_key, lp_slug + return None, None + _, org_code, package_code = parts[:3] + return org_code, package_code -def generate_staged_lp_key(archive_lp_key: str, user: UserType) -> str: +def generate_staged_package_ref(archive_package_ref: str, user: UserType) -> str: """ - Generate a staged learning package key based on the given base key. + Generate a staged learning package ref based on the archive's package_ref. + + We can't trust package_ref from the archive directly, because the archive + could specify *any* arbitrary package_ref, and the user may or may not be + permitted to create an Package using that ref. So, instead, this function + generates a unique and semi-human-readable package_ref which is namespaced + to the current user and appropriate to provisionally save the package under. + The package_ref from the archive can then be presented to the user as a + *suggestion*, which they may or may not choose to use. + + Please note that the ref returned by this function is valid for Packages is + a generic sense, but it's not a valid Content Library key. Callers who are + restoring a Package for Library usage will need to replace this staged + package_ref before being able to render the Library's content. Arguments: - archive_lp_key (str): The original learning package key from the archive. + archive_package_ref (str): The original package_ref from the archive. user (UserType | None): The user performing the restore operation. Example: Input: "lib:WGU:LIB_C001" Output: "lp-restore:dave:WGU:LIB_C001:1728575321" - - The timestamp at the end ensures the key is unique. """ username = user.username - org_key, lp_slug = unpack_lp_key(archive_lp_key) + org_code, package_code = unpack_package_ref(archive_package_ref) timestamp = int(time.time() * 1000) # Current time in milliseconds - return f"lp-restore:{username}:{org_key}:{lp_slug}:{timestamp}" + if org_code and package_code: + return f"lp-restore:{username}:{org_code}:{package_code}:{timestamp}" + # Fallback for non-conventional package_refs + return f"lp-restore:{username}:{archive_package_ref}:{timestamp}" class LearningPackageUnzipper: @@ -507,21 +525,21 @@ class LearningPackageUnzipper: result = unzipper.load() """ - def __init__(self, zipf: zipfile.ZipFile, key: str | None = None, user: UserType | None = None): + def __init__(self, zipf: zipfile.ZipFile, package_ref: str | None = None, user: UserType | None = None): self.zipf = zipf self.user = user self.user_id = getattr(self.user, "id", None) - self.lp_key = key # If provided, use this key for the restored learning package + self.package_ref = package_ref # If provided, use this package_ref for the restored learning package self.learning_package_id: LearningPackage.ID | None = None # Will be set upon restoration self.utc_now: datetime = datetime.now(timezone.utc) self.component_types_cache: dict[tuple[str, str], ComponentType] = {} self.errors: list[dict[str, Any]] = [] # Maps for resolving relationships - self.components_map_by_key: dict[str, Any] = {} - self.units_map_by_key: dict[str, Any] = {} - self.subsections_map_by_key: dict[str, Any] = {} - self.sections_map_by_key: dict[str, Any] = {} - self.all_publishable_entities_keys: set[str] = set() + self.components_map_by_ref: dict[str, Any] = {} + self.units_map_by_ref: dict[str, Any] = {} + self.subsections_map_by_ref: dict[str, Any] = {} + self.sections_map_by_ref: dict[str, Any] = {} + self.all_publishable_entity_refs: set[str] = set() self.all_published_entities_versions: set[tuple[str, int]] = set() # To track published entity versions # -------------------------- @@ -574,7 +592,7 @@ def load(self) -> dict[str, Any]: # Step 3.2: Save everything to the DB # All validations passed, we can proceed to save everything # Save the learning package first to get its ID - archive_lp_key = learning_package_validated["key"] + archive_package_ref = learning_package_validated["package_ref"] learning_package = self._save( learning_package_validated, components_validated, @@ -588,16 +606,16 @@ def load(self) -> dict[str, Any]: for container_type in ["section", "subsection", "unit"] ) - org_key, lp_slug = unpack_lp_key(archive_lp_key) + org_code, package_code = unpack_package_ref(archive_package_ref) result = RestoreResult( status="success", log_file_error=None, lp_restored_data=RestoreLearningPackageData( id=learning_package.id, - key=learning_package.key, - archive_lp_key=archive_lp_key, # The original key from the backup archive - archive_org_key=org_key, # The original organization key from the backup archive - archive_slug=lp_slug, # The original slug from the backup archive + package_ref=learning_package.package_ref, + archive_package_ref=archive_package_ref, + archive_org_code=org_code, + archive_package_code=package_code, title=learning_package.title, num_containers=num_containers, num_sections=len(containers_validated.get("section", [])), @@ -678,7 +696,7 @@ def _extract_entities( continue entity_data = serializer.validated_data - self.all_publishable_entities_keys.add(entity_data["key"]) + self.all_publishable_entity_refs.add(entity_data["entity_ref"]) entity_type = entity_data.pop("container_type", "components") results[entity_type].append(entity_data) @@ -716,11 +734,11 @@ def _extract_collections( continue collection_validated = serializer.validated_data entities_list = collection_validated["entities"] - for entity_key in entities_list: - if entity_key not in self.all_publishable_entities_keys: + for entity_ref in entities_list: + if entity_ref not in self.all_publishable_entity_refs: self.errors.append({ "file": file, - "errors": f"Entity key {entity_key} not found for collection {collection_validated.get('key')}" + "errors": f"Entity ref {entity_ref} not found for collection {collection_validated.get('key')}" }) results["collections"].append(collection_validated) @@ -741,18 +759,18 @@ def _save( ) -> LearningPackage: """Persist all validated entities in two phases: published then drafts.""" - # Important: If not using a specific LP key, generate a temporary one + # Important: If not using a specific LP ref/key, generate a temporary one # We cannot use the original key because it may generate security issues - if not self.lp_key: - # Generate a tmp key for the staged learning package + if not self.package_ref: + # Generate a tmp ref for the staged learning package if not self.user: - raise ValueError("User is required to create lp_key") - learning_package["key"] = generate_staged_lp_key( - archive_lp_key=learning_package["key"], + raise ValueError("User is required to generate a staged package_ref") + learning_package["package_ref"] = generate_staged_package_ref( + archive_package_ref=learning_package["package_ref"], user=self.user ) else: - learning_package["key"] = self.lp_key + learning_package["package_ref"] = self.package_ref learning_package_obj = publishing_api.create_learning_package(**learning_package) self.learning_package_id = learning_package_obj.id @@ -780,25 +798,30 @@ def _save_collections(self, learning_package, collections): collection = collections_api.add_to_collection( learning_package_id=learning_package.id, collection_code=collection.collection_code, - entities_qset=publishing_api.get_publishable_entities(learning_package.id).filter(key__in=entities) + entities_qset=publishing_api.get_publishable_entities(learning_package.id).filter( + entity_ref__in=entities + ) ) def _save_components(self, learning_package, components, component_static_files): """Save components and published component versions.""" for valid_component in components.get("components", []): - entity_key = valid_component.pop("key") + entity_ref = valid_component.pop("entity_ref") component = components_api.create_component(learning_package.id, created_by=self.user_id, **valid_component) - self.components_map_by_key[entity_key] = component + self.components_map_by_ref[entity_ref] = component for valid_published in components.get("components_published", []): - entity_key = valid_published.pop("entity_key") + entity_ref = valid_published.pop("entity_ref") version_num = valid_published["version_num"] # Should exist, validated earlier - media_to_replace = self._resolve_static_files(version_num, entity_key, component_static_files) + component = self.components_map_by_ref[entity_ref] + media_to_replace = self._resolve_static_files( + version_num, entity_ref, component.component_type, component_static_files + ) self.all_published_entities_versions.add( - (entity_key, version_num) + (entity_ref, version_num) ) # Track published version components_api.create_next_component_version( - self.components_map_by_key[entity_key].publishable_entity.id, + component.publishable_entity.id, media_to_replace=media_to_replace, force_version_num=valid_published.pop("version_num", None), created_by=self.user_id, @@ -817,28 +840,28 @@ def _save_container( """Internal logic for _save_units, _save_subsections, and _save_sections""" type_code = container_cls.type_code # e.g. "unit" for data in containers.get(type_code, []): - entity_key = data.get("key") + entity_ref = data.pop("entity_ref") container = containers_api.create_container( learning_package.id, # As of Verawood, the primary identity of a container is its - # `container_code`. By convention, this equals the entity's - # `key` (aka `entity_ref`). It's safe to assume that all "v1" - # archives have an identical `key` and `container_code` for each - # entity-container. This assumpion may not hold true v2+. - container_code=data.pop("key"), + # `container_code`. By convention, this equals the `entity_ref` + # (aka `[entity].key`). It's safe to assume that all v1 + # archives have an identical `entity_ref` and `container_code` for each + # entity-container. BUT, this assumpion may not hold true v2+. + container_code=entity_ref, **data, # should this be allowed to override any of the following fields? created_by=self.user_id, container_cls=container_cls, ) - container_map[entity_key] = container # e.g. `self.units_map_by_key[entity_key] = unit` + container_map[entity_ref] = container # e.g. `self.units_map_by_ref[entity_ref] = unit` for valid_published in containers.get(f"{type_code}_published", []): - entity_key = valid_published.pop("entity_key") + entity_ref = valid_published.pop("entity_ref") children = self._resolve_children(valid_published, children_map) version_num = valid_published.pop("version_num", None) - self.all_published_entities_versions.add((entity_key, version_num)) + self.all_published_entities_versions.add((entity_ref, version_num)) containers_api.create_next_container_version( - container_map[entity_key], + container_map[entity_ref], force_version_num=version_num, **valid_published, # should this be allowed to override any of the following fields? entities=children, @@ -851,8 +874,8 @@ def _save_units(self, learning_package, containers): learning_package, containers, container_cls=Unit, - container_map=self.units_map_by_key, - children_map=self.components_map_by_key, + container_map=self.units_map_by_ref, + children_map=self.components_map_by_ref, ) def _save_subsections(self, learning_package, containers): @@ -861,8 +884,8 @@ def _save_subsections(self, learning_package, containers): learning_package, containers, container_cls=Subsection, - container_map=self.subsections_map_by_key, - children_map=self.units_map_by_key, + container_map=self.subsections_map_by_ref, + children_map=self.units_map_by_ref, ) def _save_sections(self, learning_package, containers): @@ -871,20 +894,23 @@ def _save_sections(self, learning_package, containers): learning_package, containers, container_cls=Section, - container_map=self.sections_map_by_key, - children_map=self.subsections_map_by_key, + container_map=self.sections_map_by_ref, + children_map=self.subsections_map_by_ref, ) def _save_draft_versions(self, components, containers, component_static_files): """Save draft versions for all entity types.""" for valid_draft in components.get("components_drafts", []): - entity_key = valid_draft.pop("entity_key") + entity_ref = valid_draft.pop("entity_ref") version_num = valid_draft["version_num"] # Should exist, validated earlier - if self._is_version_already_exists(entity_key, version_num): + if self._is_version_already_exists(entity_ref, version_num): continue - media_to_replace = self._resolve_static_files(version_num, entity_key, component_static_files) + component = self.components_map_by_ref[entity_ref] + media_to_replace = self._resolve_static_files( + version_num, entity_ref, component.component_type, component_static_files + ) components_api.create_next_component_version( - self.components_map_by_key[entity_key].publishable_entity.id, + component.publishable_entity.id, media_to_replace=media_to_replace, force_version_num=valid_draft.pop("version_num", None), # Drafts can diverge from published, so we allow ignoring previous media @@ -900,23 +926,23 @@ def _process_draft_containers( children_map: dict, ): for valid_draft in containers.get(f"{container_cls.type_code}_drafts", []): - entity_key = valid_draft.pop("entity_key") + entity_ref = valid_draft.pop("entity_ref") version_num = valid_draft["version_num"] # Should exist, validated earlier - if self._is_version_already_exists(entity_key, version_num): + if self._is_version_already_exists(entity_ref, version_num): continue children = self._resolve_children(valid_draft, children_map) del valid_draft["version_num"] containers_api.create_next_container_version( - container_map[entity_key], + container_map[entity_ref], **valid_draft, # should this be allowed to override any of the following fields? entities=children, force_version_num=version_num, created_by=self.user_id, ) - _process_draft_containers(Unit, self.units_map_by_key, children_map=self.components_map_by_key) - _process_draft_containers(Subsection, self.subsections_map_by_key, children_map=self.units_map_by_key) - _process_draft_containers(Section, self.sections_map_by_key, children_map=self.subsections_map_by_key) + _process_draft_containers(Unit, self.units_map_by_ref, children_map=self.components_map_by_ref) + _process_draft_containers(Subsection, self.subsections_map_by_ref, children_map=self.units_map_by_ref) + _process_draft_containers(Section, self.sections_map_by_ref, children_map=self.subsections_map_by_ref) # -------------------------- # Utilities @@ -938,9 +964,9 @@ def _write_errors(self) -> StringIO | None: return None return StringIO(content) - def _is_version_already_exists(self, entity_key: str, version_num: int) -> bool: + def _is_version_already_exists(self, entity_ref: str, version_num: int) -> bool: """ - Check if a version already exists for a given entity key and version number. + Check if a version already exists for a given entity_ref and version number. Note: Skip creating draft if this version is already published @@ -949,20 +975,21 @@ def _is_version_already_exists(self, entity_key: str, version_num: int) -> bool: Otherwise, we will raise an IntegrityError on PublishableEntityVersion due to unique constraints between publishable_entity and version_num. """ - identifier = (entity_key, version_num) + identifier = (entity_ref, version_num) return identifier in self.all_published_entities_versions def _resolve_static_files( self, num_version: int, - entity_key: str, + entity_ref: str, + component_type: ComponentType, static_files_map: dict[str, List[str]] ) -> dict[str, bytes | int]: """Resolve static file paths into their binary media content.""" resolved_files: dict[str, bytes | int] = {} - static_file_key = f"{entity_key}:v{num_version}" # e.g., "xblock.v1:html:my_component_123456:v1" - block_type = entity_key.split(":")[1] # e.g., "html" + static_file_key = f"{entity_ref}:v{num_version}" # e.g., "xblock.v1:html:my_component_123456:v1" + block_type = component_type.name # e.g., "html" static_files = static_files_map.get(static_file_key, []) for static_file in static_files: local_key = static_file.split(f"v{num_version}/")[-1] @@ -985,9 +1012,9 @@ def _resolve_static_files( return resolved_files def _resolve_children(self, entity_data: dict[str, Any], lookup_map: dict[str, Any]) -> list[Any]: - """Resolve child entity keys into model instances.""" - children_keys = entity_data.pop("children", []) - return [lookup_map[key] for key in children_keys if key in lookup_map] + """Resolve child entity refs into model instances.""" + children_refs = entity_data.pop("children", []) + return [lookup_map[ref] for ref in children_refs if ref in lookup_map] def _load_entity_data( self, entity_file: str @@ -1007,7 +1034,7 @@ def _validate_versions(self, entity_data, draft, published, serializer_cls, *, f continue serializer = serializer_cls( data={ - "entity_key": entity_data["key"], + "entity_ref": entity_data["entity_ref"], "created": self.utc_now, "created_by": None, **version diff --git a/src/openedx_content/applets/collections/api.py b/src/openedx_content/applets/collections/api.py index ef9dae340..6abe5e2b7 100644 --- a/src/openedx_content/applets/collections/api.py +++ b/src/openedx_content/applets/collections/api.py @@ -185,15 +185,15 @@ def remove_from_collection( return collection -def get_entity_collections(learning_package_id: LearningPackage.ID, entity_key: str) -> QuerySet[Collection]: +def get_entity_collections(learning_package_id: LearningPackage.ID, entity_ref: str) -> QuerySet[Collection]: """ Get all collections in the given learning package which contain this entity. Only enabled collections are returned. """ - entity = publishing_api.get_publishable_entity_by_key( + entity = publishing_api.get_publishable_entity_by_ref( learning_package_id, - key=entity_key, + entity_ref=entity_ref, ) return entity.collections.filter(enabled=True).order_by("pk") diff --git a/src/openedx_content/applets/components/admin.py b/src/openedx_content/applets/components/admin.py index 3899fa050..018f21bf1 100644 --- a/src/openedx_content/applets/components/admin.py +++ b/src/openedx_content/applets/components/admin.py @@ -37,16 +37,16 @@ class ComponentAdmin(ReadOnlyModelAdmin): """ Django admin configuration for Component """ - list_display = ("key", "uuid", "component_type", "created") + list_display = ("component_code", "uuid", "component_type", "created") readonly_fields = [ "learning_package", "uuid", "component_type", - "key", + "component_code", "created", ] list_filter = ("component_type", "learning_package") - search_fields = ["publishable_entity__uuid", "publishable_entity__key"] + search_fields = ["component_code"] inlines = [ComponentVersionInline] diff --git a/src/openedx_content/applets/components/api.py b/src/openedx_content/applets/components/api.py index 0a70b8928..814fbf1ef 100644 --- a/src/openedx_content/applets/components/api.py +++ b/src/openedx_content/applets/components/api.py @@ -34,7 +34,6 @@ # to be callable only by other apps in the authoring package. __all__ = [ "get_or_create_component_type", - "get_or_create_component_type_by_entity_key", "create_component", "create_component_version", "create_next_component_version", @@ -74,27 +73,6 @@ def get_or_create_component_type(namespace: str, name: str) -> ComponentType: return component_type -def get_or_create_component_type_by_entity_key(entity_key: str) -> tuple[ComponentType, str]: - """ - Get or create a ComponentType based on a full entity key string. - - The entity key is expected to be in the format - ``"{namespace}:{type_name}:{component_code}"``. This function will parse out - the ``namespace`` and ``type_name`` parts and use those to get or create the - ComponentType. - - Raises ValueError if the entity_key is not in the expected format. - """ - try: - namespace, type_name, component_code = entity_key.split(':', 2) - except ValueError as exc: - raise ValueError( - f"Invalid entity_key format: {entity_key!r}. " - "Expected format: '{namespace}:{type_name}:{component_code}'" - ) from exc - return get_or_create_component_type(namespace, type_name), component_code - - def create_component( learning_package_id: LearningPackage.ID, /, @@ -106,13 +84,17 @@ def create_component( can_stand_alone: bool = True, ) -> Component: """ - Create a new Component (an entity like a Problem or Video) + Create a new Component (an entity like a Problem or Video). + + The ``entity_ref`` is conventionally derived as + ``"{namespace}:{type_name}:{component_code}"``, although callers should not assume + that this will always be true. """ - key = f"{component_type.namespace}:{component_type.name}:{component_code}" + entity_ref = f"{component_type.namespace}:{component_type.name}:{component_code}" with atomic(): publishable_entity = publishing_api.create_publishable_entity( learning_package_id, - key, + entity_ref, created, created_by, can_stand_alone=can_stand_alone @@ -301,7 +283,7 @@ def create_component_and_version( # pylint: disable=too-many-positional-argumen can_stand_alone: bool = True, ) -> tuple[Component, ComponentVersion]: """ - Create a Component and associated ComponentVersion atomically + Create a Component and associated ComponentVersion atomically. """ with atomic(): component = create_component( @@ -450,13 +432,13 @@ def get_collection_components( def look_up_component_version_media( - learning_package_key: str, - component_key: str, + learning_package_ref: str, + entity_ref: str, version_num: int, key: Path, ) -> ComponentVersionMedia: """ - Look up ComponentVersionMedia by human readable keys. + Look up ComponentVersionMedia by human readable identifiers. Can raise a django.core.exceptions.ObjectDoesNotExist error if there is no matching ComponentVersionMedia. @@ -465,8 +447,8 @@ def look_up_component_version_media( I don't know if we wantto make it a part of the public interface. """ queries = ( - Q(component_version__component__learning_package__key=learning_package_key) - & Q(component_version__component__publishable_entity__key=component_key) + Q(component_version__component__learning_package__package_ref=learning_package_ref) + & Q(component_version__component__publishable_entity__entity_ref=entity_ref) & Q(component_version__publishable_entity_version__version_num=version_num) & Q(key=key) ) @@ -529,13 +511,13 @@ def _get_component_version_info_headers(component_version: ComponentVersion) -> learning_package = component.learning_package return { # Component - "X-Open-edX-Component-Key": component.publishable_entity.key, + "X-Open-edX-Component-Ref": component.publishable_entity.entity_ref, "X-Open-edX-Component-Uuid": component.uuid, # Component Version "X-Open-edX-Component-Version-Uuid": component_version.uuid, "X-Open-edX-Component-Version-Num": str(component_version.version_num), # Learning Package - "X-Open-edX-Learning-Package-Key": learning_package.key, + "X-Open-edX-Learning-Package-Ref": learning_package.package_ref, "X-Open-edX-Learning-Package-Uuid": learning_package.uuid, } diff --git a/src/openedx_content/applets/components/models.py b/src/openedx_content/applets/components/models.py index 0fbeaccb0..2e16e7727 100644 --- a/src/openedx_content/applets/components/models.py +++ b/src/openedx_content/applets/components/models.py @@ -22,7 +22,7 @@ from django.db import models from typing_extensions import deprecated -from openedx_django_lib.fields import case_sensitive_char_field, code_field, code_field_check, key_field +from openedx_django_lib.fields import case_sensitive_char_field, code_field, code_field_check, ref_field from openedx_django_lib.managers import WithRelationsManager from ..media.models import Media @@ -178,8 +178,8 @@ def pk(self): component_type = models.ForeignKey(ComponentType, on_delete=models.PROTECT) # component_code is an identifier that is local to the learning_package and - # component_type. The publishable.key is derived from component_type and - # component_code. + # component_type. The publishable.entity_ref is derived from component_type + # and component_code. component_code = code_field() class Meta: @@ -274,7 +274,7 @@ class ComponentVersionMedia(models.Model): # alternative name for this would be "path", since it's most often used as # an internal file path. However, we might also want to put special # identifiers that don't map as cleanly to file paths at some point. - key = key_field(db_column="_key") + key = ref_field(db_column="_key") class Meta: constraints = [ diff --git a/src/openedx_content/applets/containers/admin.py b/src/openedx_content/applets/containers/admin.py index 45784bd9c..ae86e83c7 100644 --- a/src/openedx_content/applets/containers/admin.py +++ b/src/openedx_content/applets/containers/admin.py @@ -105,13 +105,13 @@ class ContainerAdmin(ReadOnlyModelAdmin): ] # container_code is a model field; container_type_display is a method readonly_fields = fields # type: ignore[assignment] - search_fields = ["publishable_entity__uuid", "publishable_entity__key", "container_code"] + search_fields = ["publishable_entity__uuid", "publishable_entity__entity_ref", "container_code"] inlines = [ContainerVersionInlineForContainer] def learning_package(self, obj: Container) -> SafeText: return model_detail_link( obj.publishable_entity.learning_package, - obj.publishable_entity.learning_package.key, + obj.publishable_entity.learning_package.package_ref, ) def get_queryset(self, request): @@ -241,7 +241,7 @@ def pinned_version_num(self, obj: EntityListRow): def entity_models(self, obj: EntityListRow): return format_html( "{}", - model_detail_link(obj.entity, obj.entity.key), + model_detail_link(obj.entity, obj.entity.entity_ref), one_to_one_related_model_html(obj.entity), ) @@ -304,7 +304,7 @@ def recent_container(self, obj: EntityList) -> SafeText | None: Link to the Container of the newest ContainerVersion that references this EntityList """ if latest := _latest_container_version(obj): - return format_html("of: {}", model_detail_link(latest.container, latest.container.key)) + return format_html("of: {}", model_detail_link(latest.container, latest.container.entity_ref)) else: return None @@ -317,7 +317,7 @@ def recent_container_package(self, obj: EntityList) -> SafeText | None: "in: {}", model_detail_link( latest.container.publishable_entity.learning_package, - latest.container.publishable_entity.learning_package.key, + latest.container.publishable_entity.learning_package.package_ref, ), ) else: diff --git a/src/openedx_content/applets/containers/api.py b/src/openedx_content/applets/containers/api.py index db42a98ea..c6b24a34a 100644 --- a/src/openedx_content/applets/containers/api.py +++ b/src/openedx_content/applets/containers/api.py @@ -55,7 +55,7 @@ "create_next_container_version", "get_container", "get_container_version", - "get_container_by_key", + "get_container_by_code", "get_all_container_subclasses", "get_container_subclass", "get_container_type_code_of", @@ -68,7 +68,7 @@ "contains_unpublished_changes", "get_containers_with_entity", "get_container_children_count", - "get_container_children_entities_keys", + "get_container_children_entity_refs", ] @@ -161,13 +161,14 @@ def create_container( """ assert issubclass(container_cls, Container) assert container_cls is not Container, "Creating plain containers is not allowed; use a subclass of Container" + entity_ref = container_code with atomic(): - # By convention, a Container's entity_key is set to its container_code. + # By convention, a Container's entity_ref is set to its container_code. # Do not bake this assumption into other systems. We may change it at some point. - entity_key = container_code + entity_ref = container_code publishable_entity = publishing_api.create_publishable_entity( learning_package_id, - entity_key, + entity_ref, created, created_by, can_stand_alone=can_stand_alone, @@ -571,22 +572,22 @@ def get_container_version(container_version_pk: int) -> ContainerVersion: return ContainerVersion.objects.get(pk=container_version_pk) -def get_container_by_key(learning_package_id: LearningPackage.ID, /, key: str) -> Container: +def get_container_by_code(learning_package_id: LearningPackage.ID, /, container_code: str) -> Container: """ [ 🛑 UNSTABLE ] - Get a container by its learning package and primary key. + Get a container by its learning package and container code. Args: learning_package_id: The ID of the learning package that contains the container. - key: The primary key of the container. + container_code: The container code of the container. Returns: - The container with the given primary key (as `Container`, not as its typed subclass). + The container with the given container code (as `Container`, not as its typed subclass). """ try: return Container.objects.select_related("container_type").get( - publishable_entity__learning_package_id=learning_package_id, - publishable_entity__key=key, + learning_package_id=learning_package_id, + container_code=container_code, ) except Container.DoesNotExist: # Check if it's the container or the learning package that does not exist: @@ -874,15 +875,17 @@ def get_container_children_count( return container_version.entity_list.entitylistrow_set.filter(**filter_deleted).count() -def get_container_children_entities_keys(container_version: ContainerVersion) -> list[str]: +def get_container_children_entity_refs(container_version: ContainerVersion) -> list[str]: """ - Fetch the list of entity keys for all entities in the given container version. + Fetch the list of entity refs for all entities in the given container version. Args: - container_version: The ContainerVersion to fetch the entity keys for. + container_version: The ContainerVersion to fetch the entity refs for. Returns: - A list of entity keys for all entities in the container version, ordered by entity key. + A list of entity refs for all entities in the container version, ordered by position. """ return list( - container_version.entity_list.entitylistrow_set.values_list("entity__key", flat=True).order_by("order_num") + container_version.entity_list.entitylistrow_set + .values_list("entity__entity_ref", flat=True) + .order_by("order_num") ) diff --git a/src/openedx_content/applets/publishing/admin.py b/src/openedx_content/applets/publishing/admin.py index 85adcc5d9..7488937bf 100644 --- a/src/openedx_content/applets/publishing/admin.py +++ b/src/openedx_content/applets/publishing/admin.py @@ -26,10 +26,10 @@ class LearningPackageAdmin(ReadOnlyModelAdmin): """ Read-only admin for LearningPackage model """ - fields = ["key", "title", "uuid", "created", "updated"] - readonly_fields = ["key", "title", "uuid", "created", "updated"] - list_display = ["key", "title", "uuid", "created", "updated"] - search_fields = ["key", "title", "uuid"] + fields = ["package_ref", "title", "uuid", "created", "updated"] + readonly_fields = ["package_ref", "title", "uuid", "created", "updated"] + list_display = ["package_ref", "title", "uuid", "created", "updated"] + search_fields = ["package_ref", "title", "uuid"] class PublishLogRecordTabularInline(admin.TabularInline): @@ -102,7 +102,7 @@ class PublishableEntityVersionTabularInline(admin.TabularInline): def dependencies_list(self, version: PublishableEntityVersion): identifiers = sorted( - [str(dep.key) for dep in version.dependencies.all()] + [str(dep.entity_ref) for dep in version.dependencies.all()] ) return "\n".join(identifiers) @@ -152,7 +152,7 @@ class PublishableEntityAdmin(ReadOnlyModelAdmin): inlines = [PublishableEntityVersionTabularInline] list_display = [ - "key", + "entity_ref", "published_version", "draft_version", "uuid", @@ -162,10 +162,10 @@ class PublishableEntityAdmin(ReadOnlyModelAdmin): "can_stand_alone", ] list_filter = ["learning_package", PublishStatusFilter] - search_fields = ["key", "uuid"] + search_fields = ["entity_ref", "uuid"] fields = [ - "key", + "entity_ref", "published_version", "draft_version", "uuid", @@ -176,7 +176,7 @@ class PublishableEntityAdmin(ReadOnlyModelAdmin): "can_stand_alone", ] readonly_fields = [ - "key", + "entity_ref", "published_version", "draft_version", "uuid", @@ -295,7 +295,7 @@ class DraftChangeLogRecordTabularInline(admin.TabularInline): def get_queryset(self, request): queryset = super().get_queryset(request) return queryset.select_related("entity", "old_version", "new_version") \ - .order_by("entity__key") + .order_by("entity__entity_ref") def old_version_num(self, draft_change: DraftChangeLogRecord): if draft_change.old_version is None: diff --git a/src/openedx_content/applets/publishing/api.py b/src/openedx_content/applets/publishing/api.py index edf55b80e..dc2a0abf0 100644 --- a/src/openedx_content/applets/publishing/api.py +++ b/src/openedx_content/applets/publishing/api.py @@ -43,14 +43,14 @@ # to be callable only by other apps in the authoring package. __all__ = [ "get_learning_package", - "get_learning_package_by_key", + "get_learning_package_by_ref", "create_learning_package", "update_learning_package", "learning_package_exists", "create_publishable_entity", "create_publishable_entity_version", "get_publishable_entity", - "get_publishable_entity_by_key", + "get_publishable_entity_by_ref", "get_publishable_entities", "get_last_publish", "get_all_drafts", @@ -76,22 +76,22 @@ def get_learning_package(learning_package_id: LearningPackage.ID, /) -> Learning return LearningPackage.objects.get(id=learning_package_id) -def get_learning_package_by_key(key: str) -> LearningPackage: +def get_learning_package_by_ref(package_ref: str) -> LearningPackage: """ - Get LearningPackage by key. + Get LearningPackage by its package_ref. Can throw a NotFoundError """ - return LearningPackage.objects.get(key=key) + return LearningPackage.objects.get(package_ref=package_ref) def create_learning_package( - key: str, title: str, description: str = "", created: datetime | None = None + package_ref: str, title: str, description: str = "", created: datetime | None = None ) -> LearningPackage: """ Create a new LearningPackage. - The ``key`` must be unique. + The ``package_ref`` must be unique. Errors that can be raised: @@ -101,7 +101,7 @@ def create_learning_package( created = datetime.now(tz=timezone.utc) package = LearningPackage( - key=key, + package_ref=package_ref, title=title, description=description, created=created, @@ -116,7 +116,7 @@ def create_learning_package( def update_learning_package( learning_package_id: LearningPackage.ID, /, - key: str | None = None, + package_ref: str | None = None, title: str | None = None, description: str | None = None, updated: datetime | None = None, @@ -130,11 +130,11 @@ def update_learning_package( # If no changes were requested, there's nothing to update, so just return # the LearningPackage as-is. - if all(field is None for field in [key, title, description, updated]): + if all(field is None for field in [package_ref, title, description, updated]): return lp - if key is not None: - lp.key = key + if package_ref is not None: + lp.package_ref = package_ref if title is not None: lp.title = title if description is not None: @@ -150,17 +150,17 @@ def update_learning_package( return lp -def learning_package_exists(key: str) -> bool: +def learning_package_exists(package_ref: str) -> bool: """ - Check whether a LearningPackage with a particular key exists. + Check whether a LearningPackage with a particular package_ref exists. """ - return LearningPackage.objects.filter(key=key).exists() + return LearningPackage.objects.filter(package_ref=package_ref).exists() def create_publishable_entity( learning_package_id: LearningPackage.ID, /, - key: str, + entity_ref: str, created: datetime, # User ID who created this created_by: int | None, @@ -175,7 +175,7 @@ def create_publishable_entity( """ return PublishableEntity.objects.create( learning_package_id=learning_package_id, - key=key, + entity_ref=entity_ref, created=created, created_by_id=created_by, can_stand_alone=can_stand_alone, @@ -286,10 +286,10 @@ def get_publishable_entity(publishable_entity_id: PublishableEntity.ID, /) -> Pu return PublishableEntity.objects.get(pk=publishable_entity_id) -def get_publishable_entity_by_key(learning_package_id: LearningPackage.ID, /, key: str) -> PublishableEntity: +def get_publishable_entity_by_ref(learning_package_id: LearningPackage.ID, /, entity_ref: str) -> PublishableEntity: return PublishableEntity.objects.get( learning_package_id=learning_package_id, - key=key, + entity_ref=entity_ref, ) diff --git a/src/openedx_content/applets/publishing/models/learning_package.py b/src/openedx_content/applets/publishing/models/learning_package.py index 9266b3a99..9baf9a5f2 100644 --- a/src/openedx_content/applets/publishing/models/learning_package.py +++ b/src/openedx_content/applets/publishing/models/learning_package.py @@ -12,8 +12,8 @@ TypedAutoField, case_insensitive_char_field, immutable_uuid_field, - key_field, manual_date_time_field, + ref_field, ) @@ -45,19 +45,15 @@ def pk(self) -> ID: uuid = immutable_uuid_field() - # "key" is a reserved word for MySQL, so we're temporarily using the column - # name of "_key" to avoid breaking downstream tooling. There's an open - # question as to whether this field needs to exist at all, or whether the - # top level library key it's currently used for should be entirely in the - # LibraryContent model. - key = key_field(db_column="_key") + # package_ref is an opaque reference string for the LearningPackage. + package_ref = ref_field() title = case_insensitive_char_field(max_length=500, blank=False) # TODO: We should probably defer this field, since many things pull back # LearningPackage as select_related. Usually those relations only care about - # the UUID and key, so maybe it makes sense to separate the model at some - # point. + # the UUID and package_ref, so maybe it makes sense to separate the model at + # some point. description = MultiCollationTextField( blank=True, null=False, @@ -76,16 +72,13 @@ def pk(self) -> ID: updated = manual_date_time_field() def __str__(self): - return f"{self.key}" + return f"{self.package_ref}" class Meta: constraints = [ - # LearningPackage keys must be globally unique. This is something - # that might be relaxed in the future if this system were to be - # extensible to something like multi-tenancy, in which case we'd tie - # it to something like a Site or Org. + # package_refs must be globally unique. models.UniqueConstraint( - fields=["key"], + fields=["package_ref"], name="oel_publishing_lp_uniq_key", ) ] diff --git a/src/openedx_content/applets/publishing/models/publishable_entity.py b/src/openedx_content/applets/publishing/models/publishable_entity.py index c897b26cb..106d88859 100644 --- a/src/openedx_content/applets/publishing/models/publishable_entity.py +++ b/src/openedx_content/applets/publishing/models/publishable_entity.py @@ -19,8 +19,8 @@ TypedBigAutoField, case_insensitive_char_field, immutable_uuid_field, - key_field, manual_date_time_field, + ref_field, ) from openedx_django_lib.managers import WithRelationsManager @@ -85,7 +85,7 @@ class PublishableEntity(models.Model): * Published things need to have the right identifiers so they can be used throughout the system, and the UUID is serving the role of ISBN in physical book publishing. - * We want to be able to enforce the idea that "key" is locally unique across + * We want to be able to enforce the idea that "entity_ref" is locally unique across all PublishableEntities within a given LearningPackage. Component and Unit can't do that without a shared model. @@ -128,10 +128,11 @@ class IDField(TypedBigAutoField[ID]): # Boilerplate for fully-typed ID field. related_name="publishable_entities", ) - # "key" is a reserved word for MySQL, so we're temporarily using the column - # name of "_key" to avoid breaking downstream tooling. Consider renaming - # this later. - key = key_field(db_column="_key") + # entity_ref is an opaque reference string assigned by the creator of this + # entity (e.g. derived from component_type + component_code for Components). + # Consumers must treat it as an atomic string — do not parse or reconstruct + # it. + entity_ref = ref_field() created = manual_date_time_field() created_by = models.ForeignKey( @@ -152,11 +153,11 @@ def pk(self) -> ID: class Meta: constraints = [ - # Keys are unique within a given LearningPackage. + # entity_refs are unique within a given LearningPackage. models.UniqueConstraint( fields=[ "learning_package", - "key", + "entity_ref", ], name="oel_pub_ent_uniq_lp_key", ) @@ -166,7 +167,7 @@ class Meta: # * Search by key across all PublishableEntities on the site. This # would be a support-oriented tool from Django Admin. models.Index( - fields=["key"], + fields=["entity_ref"], name="oel_pub_ent_idx_key", ), # LearningPackage (reverse) Created Index: @@ -183,7 +184,7 @@ class Meta: verbose_name_plural = "Publishable Entities" def __str__(self): - return f"{self.key}" + return f"{self.entity_ref}" class PublishableEntityVersion(models.Model): @@ -250,7 +251,7 @@ class PublishableEntityVersion(models.Model): ) def __str__(self): - return f"{self.entity.key} @ v{self.version_num} - {self.title}" + return f"{self.entity.entity_ref} @ v{self.version_num} - {self.title}" class Meta: constraints = [ @@ -367,8 +368,8 @@ def can_stand_alone(self) -> bool: return self.publishable_entity.can_stand_alone @property - def key(self) -> str: - return self.publishable_entity.key + def entity_ref(self) -> str: + return self.publishable_entity.entity_ref @property def created(self) -> datetime: diff --git a/src/openedx_content/management/commands/add_assets_to_component.py b/src/openedx_content/management/commands/add_assets_to_component.py index ccd5b6f07..03bbaae50 100644 --- a/src/openedx_content/management/commands/add_assets_to_component.py +++ b/src/openedx_content/management/commands/add_assets_to_component.py @@ -9,7 +9,7 @@ from django.core.management.base import BaseCommand -from ...api import create_next_component_version, get_component_by_code, get_learning_package_by_key +from ...api import create_next_component_version, get_component_by_code, get_learning_package_by_ref class Command(BaseCommand): @@ -26,14 +26,14 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( - "learning_package_key", + "learning_package_ref", type=str, - help="LearningPackage.key value for where the Component is located." + help="LearningPackage.package_ref value for where the Component is located." ) parser.add_argument( - "component_key", + "entity_ref", type=str, - help="Component.key that you want to add assets to." + help="Entity ref (e.g. 'xblock.v1:problem:my_component') of the Component to add assets to." ) parser.add_argument( "file_mappings", @@ -53,13 +53,13 @@ def handle(self, *args, **options): """ Add files to a Component as ComponentVersion -> Content associations. """ - learning_package_key = options["learning_package_key"] - component_key = options["component_key"] + learning_package_ref = options["learning_package_ref"] + entity_ref = options["entity_ref"] file_mappings = options["file_mappings"] - learning_package = get_learning_package_by_key(learning_package_key) + learning_package = get_learning_package_by_ref(learning_package_ref) # Parse something like: "xblock.v1:problem:area_of_circle_1" - namespace, type_name, component_code = component_key.split(":", 2) + namespace, type_name, component_code = entity_ref.split(":", 2) component = get_component_by_code( learning_package.id, namespace, type_name, component_code ) @@ -80,7 +80,7 @@ def handle(self, *args, **options): self.stdout.write( f"Created v{next_version.version_num} of " - f"{next_version.component.key} ({next_version.uuid}):" + f"{next_version.component.entity_ref} ({next_version.uuid}):" ) for cvm in next_version.componentversionmedia_set.all(): self.stdout.write(f"- {cvm.key} ({cvm.uuid})") diff --git a/src/openedx_content/management/commands/lp_dump.py b/src/openedx_content/management/commands/lp_dump.py index c23038b2d..af2c3dea7 100644 --- a/src/openedx_content/management/commands/lp_dump.py +++ b/src/openedx_content/management/commands/lp_dump.py @@ -24,7 +24,7 @@ class Command(BaseCommand): help = 'Export a learning package to a zip file.' def add_arguments(self, parser): - parser.add_argument('lp_key', type=str, help='The key of the LearningPackage to dump') + parser.add_argument('package_ref', type=str, help='The package_ref of the LearningPackage to dump') parser.add_argument('file_name', type=str, help='The name of the output zip file') parser.add_argument( '--username', @@ -40,7 +40,7 @@ def add_arguments(self, parser): ) def handle(self, *args, **options): - lp_key = options['lp_key'] + package_ref = options['package_ref'] file_name = options['file_name'] username = options['username'] origin_server = options['origin_server'] @@ -52,18 +52,18 @@ def handle(self, *args, **options): if username: user = User.objects.get(username=username) start_time = time.time() - create_zip_file(lp_key, file_name, user=user, origin_server=origin_server) + create_zip_file(package_ref, file_name, user=user, origin_server=origin_server) elapsed = time.time() - start_time - message = f'{lp_key} written to {file_name} (create_zip_file: {elapsed:.2f} seconds)' + message = f'{package_ref} written to {file_name} (create_zip_file: {elapsed:.2f} seconds)' self.stdout.write(self.style.SUCCESS(message)) except LearningPackage.DoesNotExist as exc: - message = f"Learning package with key {lp_key} not found" + message = f"Learning package {package_ref!r} not found" raise CommandError(message) from exc except Exception as e: - message = f"Failed to export learning package '{lp_key}': {e}" + message = f"Failed to export learning package {package_ref!r}: {e}" logger.exception( - "Failed to create zip file %s (learning‑package key %s)", + "Failed to create zip file %s (package_ref %s)", file_name, - lp_key, + package_ref, ) raise CommandError(message) from e diff --git a/src/openedx_content/migrations/0011_rename_entity_key_and_package_key_to_refs.py b/src/openedx_content/migrations/0011_rename_entity_key_and_package_key_to_refs.py new file mode 100644 index 000000000..90db00a16 --- /dev/null +++ b/src/openedx_content/migrations/0011_rename_entity_key_and_package_key_to_refs.py @@ -0,0 +1,84 @@ +""" +Rename PublishableEntity.key -> entity_ref and LearningPackage.key -> package_ref. + +Both fields previously had db_column='_key'; the AlterField steps drop that +override, which causes Django's schema editor to rename the DB column too. +""" +from django.db import migrations, models + +import openedx_django_lib.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('openedx_content', '0010_add_container_code'), + ] + + operations = [ + # ---- PublishableEntity.key -> entity_ref ---- + migrations.RemoveConstraint( + model_name='publishableentity', + name='oel_pub_ent_uniq_lp_key', + ), + migrations.RemoveIndex( + model_name='publishableentity', + name='oel_pub_ent_idx_key', + ), + migrations.RenameField( + model_name='publishableentity', + old_name='key', + new_name='entity_ref', + ), + # RenameField only changes the Django field name; the DB column is still + # '_key' (set via db_column). AlterField drops db_column, so Django sees + # old column='_key' vs new column='entity_ref' and renames it. + migrations.AlterField( + model_name='publishableentity', + name='entity_ref', + field=openedx_django_lib.fields.MultiCollationCharField( + db_collations={'mysql': 'utf8mb4_bin', 'sqlite': 'BINARY'}, + max_length=500, + ), + ), + migrations.AddConstraint( + model_name='publishableentity', + constraint=models.UniqueConstraint( + fields=['learning_package', 'entity_ref'], + name='oel_pub_ent_uniq_lp_key', + ), + ), + migrations.AddIndex( + model_name='publishableentity', + index=models.Index( + fields=['entity_ref'], + name='oel_pub_ent_idx_key', + ), + ), + + # ---- LearningPackage.key -> package_ref ---- + migrations.RemoveConstraint( + model_name='learningpackage', + name='oel_publishing_lp_uniq_key', + ), + migrations.RenameField( + model_name='learningpackage', + old_name='key', + new_name='package_ref', + ), + migrations.AlterField( + model_name='learningpackage', + name='package_ref', + field=openedx_django_lib.fields.MultiCollationCharField( + db_collations={'mysql': 'utf8mb4_bin', 'sqlite': 'BINARY'}, + max_length=500, + ), + ), + migrations.AddConstraint( + model_name='learningpackage', + constraint=models.UniqueConstraint( + fields=['package_ref'], + name='oel_publishing_lp_uniq_key', + ), + ), + ] diff --git a/src/openedx_django_lib/fields.py b/src/openedx_django_lib/fields.py index 3374ac12b..20f7a0125 100644 --- a/src/openedx_django_lib/fields.py +++ b/src/openedx_django_lib/fields.py @@ -172,16 +172,13 @@ class Meta: ) -def key_field(**kwargs) -> MultiCollationCharField: +def ref_field(**kwargs) -> MultiCollationCharField: """ - Externally created Identifier fields. + Opaque reference string fields. - These will often be local to a particular scope, like within a - LearningPackage. It's up to the application as to whether they're - semantically meaningful or look more machine-generated. - - Other apps should *not* make references to these values directly, since - these values may in theory change (even if this is rare in practice). + These hold externally-created identifiers that are local to a particular + scope, like within a LearningPackage. Consumers must treat the value as + an atomic string and must never parse or reconstruct it. """ return case_sensitive_char_field(max_length=500, blank=False, **kwargs) diff --git a/tests/openedx_content/applets/backup_restore/test_backup.py b/tests/openedx_content/applets/backup_restore/test_backup.py index 8f60ff20a..74c4b320e 100644 --- a/tests/openedx_content/applets/backup_restore/test_backup.py +++ b/tests/openedx_content/applets/backup_restore/test_backup.py @@ -51,7 +51,7 @@ def setUpTestData(cls): # Create a Learning Package for the test cls.learning_package = api.create_learning_package( - key="ComponentTestCase-test-key", + package_ref="ComponentTestCase-test-key", title="Components Test Case Learning Package", description="This is a test learning package for components.", ) @@ -216,8 +216,8 @@ def check_zip_file_structure(self, zip_path: Path): self.assertIn(expected_path, zip_name_list) def test_lp_dump_command(self): - lp_key = self.learning_package.key - file_name = f"{lp_key}.zip" + package_ref = self.learning_package.package_ref + file_name = f"{package_ref}.zip" try: out = StringIO() @@ -225,7 +225,7 @@ def test_lp_dump_command(self): # Call the management command to dump the learning package call_command( - "lp_dump", lp_key, file_name, username=self.user.username, origin_server=origin_server, stdout=out + "lp_dump", package_ref, file_name, username=self.user.username, origin_server=origin_server, stdout=out ) # Check that the zip file was created @@ -242,7 +242,7 @@ def test_lp_dump_command(self): Path("package.toml"), [ '[learning_package]', - f'key = "{self.learning_package.key}"', + f'key = "{self.learning_package.package_ref}"', f'title = "{self.learning_package.title}"', f'description = "{self.learning_package.description}"', '[meta]', @@ -278,7 +278,7 @@ def test_lp_dump_command(self): self.check_toml_file(zip_path, Path(file_path), expected_content) # Check the output message - message = f'{lp_key} written to {file_name}' + message = f'{package_ref} written to {file_name}' self.assertIn(message, out.getvalue()) except Exception as e: # pylint: disable=broad-exception-caught self.fail(f"lp_dump command failed with error: {e}") @@ -289,11 +289,11 @@ def test_lp_dump_command(self): def test_dump_nonexistent_learning_package(self): out = StringIO() - lp_key = "nonexistent_lp" - file_name = f"{lp_key}.zip" + package_ref = "nonexistent_lp" + file_name = f"{package_ref}.zip" with self.assertRaises(CommandError): # Attempt to dump a learning package that does not exist - call_command("lp_dump", lp_key, file_name, stdout=out) + call_command("lp_dump", package_ref, file_name, stdout=out) self.assertIn("Learning package 'nonexistent_lp' does not exist", out.getvalue()) def test_queries_n_plus_problem(self): diff --git a/tests/openedx_content/applets/backup_restore/test_restore.py b/tests/openedx_content/applets/backup_restore/test_restore.py index 46d2f94ea..ef8156a92 100644 --- a/tests/openedx_content/applets/backup_restore/test_restore.py +++ b/tests/openedx_content/applets/backup_restore/test_restore.py @@ -8,7 +8,7 @@ from django.core.management import call_command from django.test import TestCase -from openedx_content.applets.backup_restore.zipper import LearningPackageUnzipper, generate_staged_lp_key +from openedx_content.applets.backup_restore.zipper import LearningPackageUnzipper, generate_staged_package_ref from openedx_content.applets.collections import api as collections_api from openedx_content.applets.components import api as components_api from openedx_content.applets.containers import api as containers_api @@ -25,7 +25,7 @@ def setUp(self): super().setUp() self.fixtures_folder = os.path.join(os.path.dirname(__file__), "fixtures/library_backup") self.zip_file = folder_to_inmemory_zip(self.fixtures_folder) - self.lp_key = "lib:WGU:LIB_C001" + self.package_ref = "lib:WGU:LIB_C001" self.user = User.objects.create_user(username='lp_user', password='12345') @@ -42,14 +42,14 @@ def test_restore_command(self, mock_load_learning_package): # You can pass any dummy path, since load_learning_package is mocked call_command("lp_load", "dummy.zip", "lp_user", stdout=out) - lp = self.verify_lp(restore_result["lp_restored_data"]["key"]) + lp = self.verify_lp(restore_result["lp_restored_data"]["package_ref"]) self.verify_containers(lp) self.verify_components(lp) self.verify_collections(lp) - def verify_lp(self, key): + def verify_lp(self, package_ref): """Verify the learning package was restored correctly.""" - lp = publishing_api.LearningPackage.objects.filter(key=key).first() + lp = publishing_api.LearningPackage.objects.filter(package_ref=package_ref).first() assert lp is not None, "Learning package was not restored." assert lp.title == "Library test" assert lp.description == "" @@ -61,7 +61,7 @@ def verify_containers(self, lp): expected_container_keys = ["unit1-b7eafb", "subsection1-48afa3", "section1-8ca126"] for container in container_qs: - assert container.key in expected_container_keys + assert container.entity_ref in expected_container_keys draft_version = publishing_api.get_draft_version(container.publishable_entity.id) published_version = publishing_api.get_published_version(container.publishable_entity.id) assert container.created_by is not None @@ -69,7 +69,7 @@ def verify_containers(self, lp): # The unit has been published. The other two containers haven't been. # It's important that we test with at least one published container in order to # fully cover _create_container in zipper.py. - if container.key == "unit1-b7eafb": + if container.entity_ref == "unit1-b7eafb": assert containers_api.get_container_type_code_of(container) == "unit" assert draft_version is not None assert draft_version.version_num == 2 @@ -79,14 +79,14 @@ def verify_containers(self, lp): assert published_version.version_num == 2 assert published_version.created_by is not None assert published_version.created_by.username == "lp_user" - elif container.key == "subsection1-48afa3": + elif container.entity_ref == "subsection1-48afa3": assert containers_api.get_container_type_code_of(container) == "subsection" assert draft_version is not None assert draft_version.version_num == 2 assert draft_version.created_by is not None assert draft_version.created_by.username == "lp_user" assert published_version is None - elif container.key == "section1-8ca126": + elif container.entity_ref == "section1-8ca126": assert containers_api.get_container_type_code_of(container) == "section" assert draft_version is not None assert draft_version.version_num == 2 @@ -94,7 +94,7 @@ def verify_containers(self, lp): assert draft_version.created_by.username == "lp_user" assert published_version is None else: - assert False, f"Unexpected container key: {container.key}" + assert False, f"Unexpected container key: {container.entity_ref}" def verify_components(self, lp): # pylint: disable=too-many-statements @@ -110,12 +110,12 @@ def verify_components(self, lp): "xblock.v1:html:c22b9f97-f1e9-4e8f-87f0-d5a3c26083e2" ] for component in component_qs: - assert component.key in expected_component_keys + assert component.entity_ref in expected_component_keys draft_version = publishing_api.get_draft_version(component.publishable_entity.id) published_version = publishing_api.get_published_version(component.publishable_entity.id) assert component.created_by is not None assert component.created_by.username == "lp_user" - if component.key == "xblock.v1:drag-and-drop-v2:4d1b2fac-8b30-42fb-872d-6b10ab580b27": + if component.entity_ref == "xblock.v1:drag-and-drop-v2:4d1b2fac-8b30-42fb-872d-6b10ab580b27": assert component.component_type.name == "drag-and-drop-v2" assert component.component_type.namespace == "xblock.v1" assert draft_version is not None @@ -130,7 +130,7 @@ def verify_components(self, lp): assert " None: cls.learning_package = api.create_learning_package( - key="ComponentTestCase-test-key", + package_ref="ComponentTestCase-test-key", title="Components Test Case Learning Package", ) cls.learning_package_2 = api.create_learning_package( - key="ComponentTestCase-test-key-2", + package_ref="ComponentTestCase-test-key-2", title="Components Test Case another Learning Package", ) cls.now = datetime(2024, 8, 5, tzinfo=timezone.utc) @@ -434,7 +434,7 @@ def test_get_entity_collections(self): """ collections = api.get_entity_collections( self.learning_package.id, - self.published_component.publishable_entity.key, + self.published_component.publishable_entity.entity_ref, ) assert list(collections) == [ self.collection1, @@ -624,11 +624,11 @@ def test_delete(self): # ...and the entities have been removed from this collection assert list(api.get_entity_collections( self.learning_package.id, - self.published_component.publishable_entity.key, + self.published_component.publishable_entity.entity_ref, )) == [self.collection1] assert not list(api.get_entity_collections( self.learning_package.id, - self.draft_component.publishable_entity.key, + self.draft_component.publishable_entity.entity_ref, )) def test_restore(self): @@ -736,7 +736,7 @@ def test_set_collection_wrong_learning_package(self): We cannot set collections with a different learning package than the component. """ learning_package_3 = api.create_learning_package( - key="ComponentTestCase-test-key-3", + package_ref="ComponentTestCase-test-key-3", title="Components Test Case Learning Package-3", ) collection = api.create_collection( diff --git a/tests/openedx_content/applets/components/test_api.py b/tests/openedx_content/applets/components/test_api.py index c46746061..b55a97730 100644 --- a/tests/openedx_content/applets/components/test_api.py +++ b/tests/openedx_content/applets/components/test_api.py @@ -35,7 +35,7 @@ class ComponentTestCase(TestCase): @classmethod def setUpTestData(cls) -> None: cls.learning_package = publishing_api.create_learning_package( - key="ComponentTestCase-test-key", + package_ref="ComponentTestCase-test-key", title="Components Test Case Learning Package", ) cls.now = datetime(2023, 5, 8, tzinfo=timezone.utc) @@ -342,9 +342,9 @@ def test_simple_get(self): with self.assertRaises(ObjectDoesNotExist): components_api.get_component(-1) - def test_publishing_entity_key_convention(self): - """Our mapping convention is {namespace}:{component_type}:{component_code}""" - assert self.problem.key == "xblock.v1:problem:my_component" + def test_publishing_entity_ref_convention(self): + """entity_ref convention: {namespace}:{component_type}:{component_code}""" + assert self.problem.entity_ref == "xblock.v1:problem:my_component" def test_stand_alone_flag(self): """Check if can_stand_alone flag is set""" @@ -696,31 +696,19 @@ class TestComponentTypeUtils(TestCase): Test the component type utility functions. """ - def test_get_or_create_component_type_by_entity_key_creates_new(self): - comp_type, component_code = components_api.get_or_create_component_type_by_entity_key( - "video:youtube:abcd1234" - ) + def test_get_or_create_component_type_creates_new(self): + comp_type = components_api.get_or_create_component_type("video", "youtube") assert isinstance(comp_type, ComponentType) assert comp_type.namespace == "video" assert comp_type.name == "youtube" - assert component_code == "abcd1234" assert ComponentType.objects.count() == 1 - def test_get_or_create_component_type_by_entity_key_existing(self): + def test_get_or_create_component_type_existing(self): ComponentType.objects.create(namespace="video", name="youtube") - comp_type, component_code = components_api.get_or_create_component_type_by_entity_key( - "video:youtube:efgh5678" - ) + comp_type = components_api.get_or_create_component_type("video", "youtube") assert comp_type.namespace == "video" assert comp_type.name == "youtube" - assert component_code == "efgh5678" assert ComponentType.objects.count() == 1 - - def test_get_or_create_component_type_by_entity_key_invalid_format(self): - with self.assertRaises(ValueError) as ctx: - components_api.get_or_create_component_type_by_entity_key("not-enough-parts") - - self.assertIn("Invalid entity_key format", str(ctx.exception)) diff --git a/tests/openedx_content/applets/components/test_assets.py b/tests/openedx_content/applets/components/test_assets.py index f146b3298..2c10984cc 100644 --- a/tests/openedx_content/applets/components/test_assets.py +++ b/tests/openedx_content/applets/components/test_assets.py @@ -53,7 +53,7 @@ def setUpTestData(cls) -> None: cls.html_media_type = media_api.get_or_create_media_type("text/html") cls.learning_package = publishing_api.create_learning_package( - key="ComponentTestCase-test-key", + package_ref="ComponentTestCase-test-key", title="Components Test Case Learning Package", ) cls.component, cls.component_version = components_api.create_component_and_version( @@ -125,11 +125,11 @@ def _assert_has_component_version_headers(self, headers): Note: The request header values in an HttpResponse will all have been serialized to strings. """ - assert headers["X-Open-edX-Component-Key"] == self.component.key + assert headers["X-Open-edX-Component-Ref"] == self.component.entity_ref assert headers["X-Open-edX-Component-Uuid"] == str(self.component.uuid) assert headers["X-Open-edX-Component-Version-Uuid"] == str(self.component_version.uuid) assert headers["X-Open-edX-Component-Version-Num"] == str(self.component_version.version_num) - assert headers["X-Open-edX-Learning-Package-Key"] == self.learning_package.key + assert headers["X-Open-edX-Learning-Package-Ref"] == self.learning_package.package_ref assert headers["X-Open-edX-Learning-Package-Uuid"] == str(self.learning_package.uuid) def test_404s_with_component_version_info(self): diff --git a/tests/openedx_content/applets/containers/test_api.py b/tests/openedx_content/applets/containers/test_api.py index a902578f7..01fa81664 100644 --- a/tests/openedx_content/applets/containers/test_api.py +++ b/tests/openedx_content/applets/containers/test_api.py @@ -92,18 +92,18 @@ def _other_user(django_user_model): @pytest.fixture(name="lp") def _lp() -> LearningPackage: """Get a Learning Package.""" - return publishing_api.create_learning_package(key="containers-test-lp", title="Testing Containers Main LP") + return publishing_api.create_learning_package(package_ref="containers-test-lp", title="Testing Containers Main LP") @pytest.fixture(name="lp2") def _lp2() -> LearningPackage: """Get a Second Learning Package.""" - return publishing_api.create_learning_package(key="containers-test-lp2", title="Testing Containers (📦 2)") + return publishing_api.create_learning_package(package_ref="containers-test-lp2", title="Testing Containers (📦 2)") -def create_test_entity(learning_package: LearningPackage, key: str, title: str) -> TestEntity: +def create_test_entity(learning_package: LearningPackage, ref: str, title: str) -> TestEntity: """Create a TestEntity with a draft version""" - pe = publishing_api.create_publishable_entity(learning_package.id, key, created=now, created_by=None) + pe = publishing_api.create_publishable_entity(learning_package.id, ref, created=now, created_by=None) new_entity = TestEntity.objects.create(publishable_entity=pe) pev = publishing_api.create_publishable_entity_version( new_entity.pk, @@ -119,25 +119,25 @@ def create_test_entity(learning_package: LearningPackage, key: str, title: str) @pytest.fixture(name="child_entity1") def _child_entity1(lp: LearningPackage) -> TestEntity: """An example entity, such as a component""" - return create_test_entity(lp, key="child_entity1", title="Child 1 🌴") + return create_test_entity(lp, ref="child_entity1", title="Child 1 🌴") @pytest.fixture(name="child_entity2") def _child_entity2(lp: LearningPackage) -> TestEntity: """An example entity, such as a component""" - return create_test_entity(lp, key="child_entity2", title="Child 2 🌈") + return create_test_entity(lp, ref="child_entity2", title="Child 2 🌈") @pytest.fixture(name="child_entity3") def _child_entity3(lp: LearningPackage) -> TestEntity: """An example entity, such as a component""" - return create_test_entity(lp, key="child_entity3", title="Child 3 ⛵️") + return create_test_entity(lp, ref="child_entity3", title="Child 3 ⛵️") @pytest.fixture(name="other_lp_child") def _other_lp_child(lp2: LearningPackage) -> TestEntity: """An example entity, such as a component""" - return create_test_entity(lp2, key="other_lp_child", title="Child in other Learning Package 📦") + return create_test_entity(lp2, ref="other_lp_child", title="Child in other Learning Package 📦") def create_test_container( @@ -336,7 +336,7 @@ def test_create_generic_empty_container(lp: LearningPackage, admin_user) -> None assert isinstance(container_v1, TestContainerVersion) assert container.versioning.draft == container_v1 assert container.versioning.published is None - assert container.key == "new-container-1" + assert container.entity_ref == "new-container-1" assert container.versioning.draft.title == "Test Container 1" assert container.created == now assert container.created_by == admin_user @@ -420,9 +420,9 @@ def test_create_next_container_version_no_changes(parent_of_two: TestContainer, assert version_2.entity_list_id == original_version.entity_list_id assert version_2.created == v2_date assert version_2.created_by == other_user - assert containers_api.get_container_children_entities_keys( + assert containers_api.get_container_children_entity_refs( original_version - ) == containers_api.get_container_children_entities_keys(version_2) + ) == containers_api.get_container_children_entity_refs(version_2) def test_create_next_container_version_with_changes( @@ -454,7 +454,7 @@ def test_create_next_container_version_with_changes( assert version_5.created_by is None assert version_5.title == "New Title - children reversed" assert version_5.entity_list_id != original_version.entity_list_id - assert containers_api.get_container_children_entities_keys(version_5) == ["child_entity2", "child_entity1"] + assert containers_api.get_container_children_entity_refs(version_5) == ["child_entity2", "child_entity1"] def test_create_next_container_version_with_append( @@ -757,29 +757,29 @@ def test_get_container_version_nonexistent() -> None: containers_api.get_container_version(-500) -# get_container_by_key +# get_container_by_code -def test_get_container_by_key(lp: LearningPackage, parent_of_two: TestContainer) -> None: +def test_get_container_by_code(lp: LearningPackage, parent_of_two: TestContainer) -> None: """ - Test getting a specific container by key + Test getting a specific container by container code. """ - result = containers_api.get_container_by_key(lp.id, parent_of_two.key) + result = containers_api.get_container_by_code(lp.id, parent_of_two.container_code) assert result == parent_of_two.container # The API always returns "Container", not specific subclasses like TestContainer: assert result.__class__ is Container -def test_get_container_by_key_nonexistent(lp: LearningPackage) -> None: +def test_get_container_by_code_nonexistent(lp: LearningPackage) -> None: """ - Test getting a specific container by key, where the key and/or learning package is invalid + Test getting a specific container by container code, where the code and/or learning package is invalid. """ FAKE_ID = cast(LearningPackage.ID, -500) with pytest.raises(LearningPackage.DoesNotExist): - containers_api.get_container_by_key(FAKE_ID, "invalid-key") + containers_api.get_container_by_code(FAKE_ID, "invalid-code") with pytest.raises(Container.DoesNotExist): - containers_api.get_container_by_key(lp.id, "invalid-key") + containers_api.get_container_by_code(lp.id, "invalid-code") # get_container_subclass @@ -990,7 +990,8 @@ def test_no_publish_parent(parent_of_two: TestContainer, child_entity1: TestEnti Test that publishing an entity does NOT publish changes to its parent containers """ # "child_entity1" is a child of "parent_of_two" - assert child_entity1.key in containers_api.get_container_children_entities_keys(parent_of_two.versioning.draft) + children_refs = containers_api.get_container_children_entity_refs(parent_of_two.versioning.draft) + assert child_entity1.entity_ref in children_refs # Neither are published: assert child_entity1.versioning.published is None assert parent_of_two.versioning.published is None @@ -1148,7 +1149,7 @@ def test_publishing_shared_component(lp: LearningPackage): Everything is "unpinned". """ # 1️⃣ Create the units and publish them: - c1, c2, c3, c4, c5 = [create_test_entity(lp, key=f"C{i}", title=f"Component {i}") for i in range(1, 6)] + c1, c2, c3, c4, c5 = [create_test_entity(lp, ref=f"C{i}", title=f"Component {i}") for i in range(1, 6)] c1_v1 = c1.versioning.draft c3_v1 = c3.versioning.draft c4_v1 = c4.versioning.draft @@ -1231,7 +1232,7 @@ def test_shallow_publish_log( ) -> None: """Simple test of publishing a container plus children and reviewing the publish log""" publish_log = publish_entity(parent_of_two) - assert list(publish_log.records.order_by("entity__pk").values_list("entity__key", flat=True)) == [ + assert list(publish_log.records.order_by("entity__pk").values_list("entity__entity_ref", flat=True)) == [ # The container and its two children should be the only things published: "child_entity1", "child_entity2", @@ -1250,7 +1251,7 @@ def test_uninstalled_publish( with django_assert_num_queries(50): publish_log = publish_entity(container_of_uninstalled_type) # Nothing else should have been affected by the publish: - assert list(publish_log.records.order_by("entity__pk").values_list("entity__key", flat=True)) == [ + assert list(publish_log.records.order_by("entity__pk").values_list("entity__entity_ref", flat=True)) == [ "child_entity1", "abandoned-container", ] @@ -1288,7 +1289,7 @@ def test_deep_publish_log( with django_assert_num_queries(50): publish_log = publish_entity(container_of_uninstalled_type) # Nothing else should have been affected by the publish: - assert list(publish_log.records.order_by("entity__pk").values_list("entity__key", flat=True)) == [ + assert list(publish_log.records.order_by("entity__pk").values_list("entity__entity_ref", flat=True)) == [ "child_entity1", "abandoned-container", ] @@ -1296,7 +1297,7 @@ def test_deep_publish_log( # Publish great_grandparent. Should publish the whole tree. with django_assert_num_queries(127): publish_log = publish_entity(great_grandparent) - assert list(publish_log.records.order_by("entity__pk").values_list("entity__key", flat=True)) == [ + assert list(publish_log.records.order_by("entity__pk").values_list("entity__entity_ref", flat=True)) == [ "child_entity2", "parent_of_two", "parent_of_three", @@ -1615,21 +1616,21 @@ def test_get_container_children_count_queries( assert containers_api.get_container_children_count(parent_of_six, published=True) == 6 -# get_container_children_entities_keys +# get_container_children_entity_refs -def test_get_container_children_entities_keys(grandparent: ContainerContainer, parent_of_six: TestContainer) -> None: - """Test `get_container_children_entities_keys()`""" +def test_get_container_children_entity_refs(grandparent: ContainerContainer, parent_of_six: TestContainer) -> None: + """Test `get_container_children_entity_refs()`""" - # TODO: is get_container_children_entities_keys() a useful API method? It's not used in edx-platform. + # TODO: is get_container_children_entity_refs() a useful API method? It's not used in edx-platform. - assert containers_api.get_container_children_entities_keys(grandparent.versioning.draft) == [ + assert containers_api.get_container_children_entity_refs(grandparent.versioning.draft) == [ # These are the two children of "grandparent" - see diagram near the top of this file. "parent_of_two", "parent_of_three", ] - assert containers_api.get_container_children_entities_keys(parent_of_six.versioning.draft) == [ + assert containers_api.get_container_children_entity_refs(parent_of_six.versioning.draft) == [ "child_entity3", "child_entity2", "child_entity1", diff --git a/tests/openedx_content/applets/media/test_file_storage.py b/tests/openedx_content/applets/media/test_file_storage.py index 7cf6ea664..a4aec1ba5 100644 --- a/tests/openedx_content/applets/media/test_file_storage.py +++ b/tests/openedx_content/applets/media/test_file_storage.py @@ -29,7 +29,7 @@ def setUp(self) -> None: """ super().setUp() learning_package = publishing_api.create_learning_package( - key="ContentFileStorageTestCase-test-key", + package_ref="ContentFileStorageTestCase-test-key", title="Content File Storage Test Case Learning Package", ) self.html_media_type = media_api.get_or_create_media_type("text/html") diff --git a/tests/openedx_content/applets/publishing/test_api.py b/tests/openedx_content/applets/publishing/test_api.py index b39a74ea3..b76e30c0b 100644 --- a/tests/openedx_content/applets/publishing/test_api.py +++ b/tests/openedx_content/applets/publishing/test_api.py @@ -39,13 +39,13 @@ def test_normal(self) -> None: # Note: we must specify '-> None' to opt in to t created = datetime(2023, 4, 2, 15, 9, 0, tzinfo=timezone.utc) description = "A fun Description!" package = publishing_api.create_learning_package( - key=key, + package_ref=key, title=title, description=description, created=created ) - assert package.key == "my_key" + assert package.package_ref == "my_key" assert package.title == "My Excellent Title with Emoji 🔥" assert package.description == "A fun Description!" assert package.created == created @@ -60,11 +60,11 @@ def test_normal(self) -> None: # Note: we must specify '-> None' to opt in to t # Now test editing the fields. updated_package = publishing_api.update_learning_package( package.id, - key="new_key", + package_ref="new_key", title="new title", description="new description", ) - assert updated_package.key == "new_key" + assert updated_package.package_ref == "new_key" assert updated_package.title == "new title" assert updated_package.description == "new description" assert updated_package.created == created @@ -78,7 +78,7 @@ def test_auto_datetime(self) -> None: title = "My Excellent Title with Emoji 🔥" package = publishing_api.create_learning_package(key, title) - assert package.key == "my_key" + assert package.package_ref == "my_key" assert package.title == "My Excellent Title with Emoji 🔥" # Auto-generated datetime checking... @@ -98,7 +98,7 @@ def test_non_utc_time(self) -> None: """ with pytest.raises(ValidationError) as excinfo: publishing_api.create_learning_package( - key="my_key", + package_ref="my_key", title="A Title", created=datetime(2023, 4, 2) ) @@ -116,7 +116,7 @@ def test_already_exists(self) -> None: with pytest.raises(ValidationError) as excinfo: publishing_api.create_learning_package("my_key", "Duplicate") message_dict = excinfo.value.message_dict - assert "key" in message_dict + assert "package_ref" in message_dict class DraftTestCase(TestCase): diff --git a/tests/openedx_tagging/test_system_defined_models.py b/tests/openedx_tagging/test_system_defined_models.py index dc58e7b8b..5fd8565c9 100644 --- a/tests/openedx_tagging/test_system_defined_models.py +++ b/tests/openedx_tagging/test_system_defined_models.py @@ -42,7 +42,7 @@ def tag_class_model(self): @property def tag_class_value_field(self) -> str: - return "key" + return "package_ref" @property def tag_class_key_field(self) -> str: @@ -87,8 +87,8 @@ def _create_learning_pkg(**kwargs) -> LearningPackage: def setUpClass(cls): super().setUpClass() # Create two learning packages and a taxonomy that can tag any object using learning packages as tags: - cls.learning_pkg_1 = cls._create_learning_pkg(key="p1", title="Learning Package 1") - cls.learning_pkg_2 = cls._create_learning_pkg(key="p2", title="Learning Package 2") + cls.learning_pkg_1 = cls._create_learning_pkg(package_ref="p1", title="Learning Package 1") + cls.learning_pkg_2 = cls._create_learning_pkg(package_ref="p2", title="Learning Package 2") cls.lp_taxonomy = LPTaxonomyTest.objects.create( taxonomy_class=LPTaxonomyTest, name="LearningPackage Taxonomy", @@ -108,11 +108,11 @@ def test_lp_taxonomy_validation(self): Test that the validation methods of the Learning Package Taxonomy are working """ # Create a new LearningPackage - we know no Tag instances will exist for it yet. - valid_lp = self._create_learning_pkg(key="valid-lp", title="New Learning Packacge") + valid_lp = self._create_learning_pkg(package_ref="valid-lp", title="New Learning Packacge") # The taxonomy can validate tags by value which we've defined as they 'key' of the LearningPackage: - assert self.lp_taxonomy.validate_value(self.learning_pkg_2.key) is True - assert self.lp_taxonomy.validate_value(self.learning_pkg_2.key) is True - assert self.lp_taxonomy.validate_value(valid_lp.key) is True + assert self.lp_taxonomy.validate_value(self.learning_pkg_2.package_ref) is True + assert self.lp_taxonomy.validate_value(self.learning_pkg_2.package_ref) is True + assert self.lp_taxonomy.validate_value(valid_lp.package_ref) is True assert self.lp_taxonomy.validate_value("foo") is False # The taxonomy can also validate tags by external_id, which we've defined as the UUID of the LearningPackage: assert self.lp_taxonomy.validate_external_id(self.learning_pkg_2.uuid) is True