diff --git a/CHANGES/1175.bugfix b/CHANGES/1175.bugfix new file mode 100644 index 000000000..0b9116e42 --- /dev/null +++ b/CHANGES/1175.bugfix @@ -0,0 +1 @@ +Properly return 400 error when trying to create/upload a duplicate Collection. diff --git a/CHANGES/1175.feature b/CHANGES/1175.feature new file mode 100644 index 000000000..e95bb2b6f --- /dev/null +++ b/CHANGES/1175.feature @@ -0,0 +1 @@ +An existing artifact or upload object can now be used to create a Collection. diff --git a/CHANGES/1176.removal b/CHANGES/1176.removal new file mode 100644 index 000000000..4549bdc37 --- /dev/null +++ b/CHANGES/1176.removal @@ -0,0 +1,5 @@ +Renamed CollectionVersion upload fields [namespace, name, version] to expected_[namespace, name, version]. + +Deprecated /ansible/collections/ upload endpoint. Use /pulp/api/v3/content/ansible/collection_versions/ instead. + +Deprecated Galaxy V2 Collection upload endpoint. Use Galaxy V3 Collection Artifact upload endpoint instead. diff --git a/pulp_ansible/app/galaxy/mixins.py b/pulp_ansible/app/galaxy/mixins.py index 403d0ce1d..235121347 100644 --- a/pulp_ansible/app/galaxy/mixins.py +++ b/pulp_ansible/app/galaxy/mixins.py @@ -19,3 +19,9 @@ def _dispatch_import_collection_task(self, temp_file_pk, repository=None, **kwar kwargs["repository_pk"] = repository.pk return dispatch(import_collection, exclusive_resources=locks, kwargs=kwargs) + + def get_deferred_context(self, request): + context = {} + if "file" in request.data: + context["filename"] = request.data["file"].name + return context diff --git a/pulp_ansible/app/galaxy/serializers.py b/pulp_ansible/app/galaxy/serializers.py index 54687c1fe..f0b5c3720 100644 --- a/pulp_ansible/app/galaxy/serializers.py +++ b/pulp_ansible/app/galaxy/serializers.py @@ -6,6 +6,7 @@ from rest_framework.reverse import reverse from rest_framework import serializers +from pulpcore.plugin.models import Artifact from pulp_ansible.app.models import Collection, CollectionVersion, Role from pulp_ansible.app.galaxy.v3.serializers import CollectionMetadataSerializer @@ -187,3 +188,10 @@ class GalaxyCollectionUploadSerializer(serializers.Serializer): file = serializers.FileField( help_text=_("The file containing the Artifact binary data."), required=True ) + + def validate(self, data): + """Ensure duplicate artifact isn't uploaded.""" + sha256 = data["file"].hashers["sha256"].hexdigest() + artifact = Artifact.objects.filter(sha256=sha256).first() + if artifact: + raise serializers.ValidationError(_("Artifact already exists")) diff --git a/pulp_ansible/app/galaxy/v3/views.py b/pulp_ansible/app/galaxy/v3/views.py index 18b0d2097..fdd0516d9 100644 --- a/pulp_ansible/app/galaxy/v3/views.py +++ b/pulp_ansible/app/galaxy/v3/views.py @@ -26,10 +26,13 @@ from rest_framework.exceptions import NotFound from rest_framework import status -from pulpcore.plugin.exceptions import DigestValidationError -from pulpcore.plugin.models import PulpTemporaryFile, Content +from pulpcore.plugin.models import Content from pulpcore.plugin.serializers import AsyncOperationResponseSerializer -from pulpcore.plugin.viewsets import BaseFilterSet, OperationPostponedResponse +from pulpcore.plugin.viewsets import ( + BaseFilterSet, + OperationPostponedResponse, + SingleArtifactContentUploadViewSet, +) from pulpcore.plugin.tasking import add_and_remove, dispatch from pulp_ansible.app.galaxy.v3.exceptions import ExceptionHandlerMixin @@ -50,8 +53,9 @@ CollectionImport, ) from pulp_ansible.app.serializers import ( - CollectionOneShotSerializer, CollectionImportDetailSerializer, + CollectionOneShotSerializer, + CollectionVersionUploadSerializer, ) from pulp_ansible.app.galaxy.mixins import UploadGalaxyCollectionMixin @@ -454,13 +458,14 @@ def urlpattern(*args, **kwargs): class CollectionUploadViewSet( - ExceptionHandlerMixin, viewsets.GenericViewSet, UploadGalaxyCollectionMixin + ExceptionHandlerMixin, UploadGalaxyCollectionMixin, SingleArtifactContentUploadViewSet ): """ ViewSet for Collection Uploads. """ - serializer_class = CollectionOneShotSerializer + queryset = CollectionVersion.objects.all() + serializer_class = CollectionVersionUploadSerializer pulp_tag_name = "Pulp_Ansible: Artifacts Collections V3" DEFAULT_ACCESS_POLICY = _PERMISSIVE_ACCESS_POLICY @@ -481,44 +486,26 @@ def create(self, request, distro_base_path): Dispatch a Collection creation task. """ distro = get_object_or_404(AnsibleDistribution, base_path=distro_base_path) - serializer = self.get_serializer(data=request.data, context={"request": request}) - serializer.is_valid(raise_exception=True) - - expected_digests = {} - if serializer.validated_data["sha256"]: - expected_digests["sha256"] = serializer.validated_data["sha256"] - try: - temp_file = PulpTemporaryFile.init_and_validate( - serializer.validated_data["file"], - expected_digests=expected_digests, - ) - except DigestValidationError: + repo = distro.repository or distro.repository_version.repository + if repo is None: raise serializers.ValidationError( - _("The provided sha256 value does not match the sha256 of the uploaded file.") + _("Distribution must have either repository or repository_version set") ) + # Check that invalid fields were not specified + serializer = CollectionOneShotSerializer(data=request.data) + serializer.is_valid(raise_exception=True) - temp_file.save() - - kwargs = {} - - if serializer.validated_data["expected_namespace"]: - kwargs["expected_namespace"] = serializer.validated_data["expected_namespace"] - - if serializer.validated_data["expected_name"]: - kwargs["expected_name"] = serializer.validated_data["expected_name"] - - if serializer.validated_data["expected_version"]: - kwargs["expected_version"] = serializer.validated_data["expected_version"] + request.data["repository"] = reverse("repositories-ansible/ansible-detail", args=[repo.pk]) + async_response = super().create(request) + # Is there a href-to-pk util somewhere? + task = async_response.data["task"].rstrip("/").rsplit("/", maxsplit=1)[-1] - async_result = self._dispatch_import_collection_task( - temp_file.pk, distro.repository, **kwargs - ) - CollectionImport.objects.create(task_id=async_result.pk) + CollectionImport.objects.create(task_id=task) data = { "task": reverse( settings.ANSIBLE_URL_NAMESPACE + "collection-imports-detail", - kwargs={"pk": async_result.pk}, + kwargs={"pk": task}, request=None, ) } diff --git a/pulp_ansible/app/galaxy/views.py b/pulp_ansible/app/galaxy/views.py index 3148ecadc..dd9bd8df9 100644 --- a/pulp_ansible/app/galaxy/views.py +++ b/pulp_ansible/app/galaxy/views.py @@ -2,6 +2,7 @@ from django.conf import settings from django.shortcuts import get_object_or_404, HttpResponse +from drf_spectacular.utils import extend_schema from rest_framework import generics, pagination, response, views from pulpcore.plugin.models import PulpTemporaryFile @@ -167,6 +168,7 @@ def get_queryset(self): """ return Collection.objects.filter(versions__pk__in=self._distro_content).distinct() + @extend_schema(deprecated=True) def post(self, request, path): """ Queues a task that creates a new Collection from an uploaded artifact. diff --git a/pulp_ansible/app/serializers.py b/pulp_ansible/app/serializers.py index 026b7a369..fe5c1ee99 100644 --- a/pulp_ansible/app/serializers.py +++ b/pulp_ansible/app/serializers.py @@ -4,7 +4,7 @@ from jsonschema import Draft7Validator from rest_framework import serializers -from pulpcore.plugin.models import Artifact, PulpTemporaryFile, SigningService +from pulpcore.plugin.models import Artifact, SigningService from pulpcore.plugin.serializers import ( DetailRelatedField, ContentChecksumSerializer, @@ -16,7 +16,6 @@ RepositorySyncURLSerializer, SingleArtifactContentSerializer, SingleArtifactContentUploadSerializer, - SingleContentArtifactField, DistributionSerializer, RepositoryVersionRelatedField, validate_unknown_fields, @@ -37,8 +36,12 @@ Tag, ) from pulp_ansible.app.schema import COPY_CONFIG_SCHEMA -from pulp_ansible.app.tasks.utils import parse_collections_requirements_file +from pulp_ansible.app.tasks.utils import ( + parse_collections_requirements_file, + parse_collection_filename, +) from pulp_ansible.app.tasks.signature import verify_signature_upload +from pulp_ansible.app.tasks.upload import process_collection_artifact, finish_collection_upload class RoleSerializer(SingleArtifactContentSerializer): @@ -285,6 +288,15 @@ class CollectionOneShotSerializer(serializers.Serializer): default=None, ) + def validate(self, data): + """Ensure duplicate artifact isn't uploaded.""" + data = super().validate(data) + sha256 = data["file"].hashers["sha256"].hexdigest() + artifact = Artifact.objects.filter(sha256=sha256).first() + if artifact: + raise ValidationError(_("Artifact already exists")) + return data + class AnsibleDistributionSerializer(DistributionSerializer): """ @@ -361,116 +373,221 @@ class Meta: class CollectionVersionUploadSerializer(SingleArtifactContentUploadSerializer): """ - A serializer for CollectionVersion Content. + A serializer with the logic necessary to upload a CollectionVersion. + + Used in ``.viewsets.CollectionVersionViewSet`` and ``.galaxy.v3.views.CollectionUploadViewSet`` + to perform the creation and validation of a CollectionVersion and add to repository if + necessary. This serializer is meant to be compliant with + ``pulpcore.plugin.viewsets.SingleArtifactContentUploadViewSet`` and thus follows these steps on + creation: + + 1. ``SingleArtifactContentUploadViewSet.create()`` calls ``validate()`` with request in context + 2. Task payload is created, converting uploaded-file (if present) to an artifact + 3. ``pulpcore.plugin.tasks.general_create`` is dispatched with this serializer, the task payload + and a deferred context determine by the viewset (default an empty context). + 4. ``general_create`` calls ``validate()`` again with deferred context now. + ``deferred_validate()`` is now called and the upload object (if present) is converted to an + artifact. + 5. ``general_create`` calls ``save()`` which will call ``create()``. ``create`` uses the + validated data to create and save the CollectionVersion. If repository is specified the + CollectionVersion is then added to the repository. """ - name = serializers.CharField(help_text=_("The name of the collection."), max_length=64) + sha256 = serializers.CharField( + help_text=_("An optional sha256 checksum of the uploaded file."), + required=False, + write_only=True, + ) - namespace = serializers.CharField( - help_text=_("The namespace of the collection."), max_length=64 + expected_name = serializers.CharField( + help_text=_("The name of the collection."), + max_length=64, + required=False, + write_only=True, + ) + + expected_namespace = serializers.CharField( + help_text=_("The namespace of the collection."), + max_length=64, + required=False, + write_only=True, ) - version = serializers.CharField(help_text=_("The version of the collection."), max_length=128) + expected_version = serializers.CharField( + help_text=_("The version of the collection."), + max_length=128, + required=False, + write_only=True, + ) def validate(self, data): - """Validate that we have a file or can create one.""" - if "artifact" in data: - raise serializers.ValidationError(_("Only 'file' may be specified.")) - - if "request" not in self.context: - data = self.deferred_validate(data) + """Check and set the namespace, name & version.""" + fields = ("namespace", "name", "version") + if not all((f"expected_{x}" in data for x in fields)): + if not ("file" in data or "filename" in self.context): + raise ValidationError( + _( + "expected_namespace, expected_name, and expected_version must be " + "specified when using artifact or upload objects" + ) + ) + filename = self.context.get("filename") or data["file"].name + try: + collection = parse_collection_filename(filename) + except ValueError: + raise ValidationError( + _("Failed to parse Collection file upload '{}'").format(filename) + ) + data["expected_namespace"] = collection.namespace + data["expected_name"] = collection.name + data["expected_version"] = collection.version + + if CollectionVersion.objects.filter(**{f: data[f"expected_{f}"] for f in fields}).exists(): + raise ValidationError( + _("Collection {}.{}-{} already exists").format( + data["expected_namespace"], data["expected_name"], data["expected_version"] + ) + ) - sha256 = data["file"].hashers["sha256"].hexdigest() - artifact = Artifact.objects.filter(sha256=sha256).first() - if artifact: - ValidationError(_("Artifact already exists")) - temp_file = PulpTemporaryFile.init_and_validate(data.pop("file")) - temp_file.save() - data["temp_file_pk"] = str(temp_file.pk) + # Super will call deferred_validate on second call in task context + return super().validate(data) + + def deferred_validate(self, data): + """Import the CollectionVersion extracting the metadata from its artifact.""" + # Call super to ensure that data contains artifact + data = super().deferred_validate(data) + artifact = data.get("artifact") + if (sha256 := data.pop("sha256", None)) and sha256 != artifact.sha256: + raise ValidationError(_("Expected sha256 did not match uploaded artifact's sha256")) + + collection_info = process_collection_artifact( + artifact=artifact, + namespace=data.pop("expected_namespace"), + name=data.pop("expected_name"), + version=data.pop("expected_version"), + ) + # repository field clashes + collection_info["origin_repository"] = collection_info.pop("repository", None) + data.update(collection_info) return data + def create(self, validated_data): + """Final step in creating the CollectionVersion.""" + tags = validated_data.pop("tags") + origin_repository = validated_data.pop("origin_repository") + # Create CollectionVersion from its metadata and adds to repository if specified + content = super().create(validated_data) + + # Now add tags and update latest CollectionVersion + finish_collection_upload(content, tags=tags, origin_repository=origin_repository) + + return content + class Meta: fields = tuple( - set(SingleArtifactContentUploadSerializer.Meta.fields) - {"artifact", "relative_path"} + set(SingleArtifactContentUploadSerializer.Meta.fields) - {"relative_path"} ) + ( - "name", - "namespace", - "version", + "sha256", + "expected_name", + "expected_namespace", + "expected_version", ) model = CollectionVersion -class CollectionVersionSerializer(SingleArtifactContentSerializer, ContentChecksumSerializer): +class CollectionVersionSerializer(ContentChecksumSerializer, CollectionVersionUploadSerializer): """ A serializer for CollectionVersion Content. """ - artifact = SingleContentArtifactField( - help_text=_("Artifact file representing the physical content"), - required=False, - ) - - id = serializers.UUIDField(source="pk", help_text="A collection identifier.") + id = serializers.UUIDField(source="pk", help_text="A collection identifier.", read_only=True) authors = serializers.ListField( help_text=_("A list of the CollectionVersion content's authors."), child=serializers.CharField(max_length=64), + read_only=True, ) - contents = serializers.JSONField(help_text=_("A JSON field with data about the contents.")) + contents = serializers.JSONField( + help_text=_("A JSON field with data about the contents."), read_only=True + ) dependencies = serializers.JSONField( help_text=_( "A dict declaring Collections that this collection requires to be installed for it to " "be usable." - ) + ), + read_only=True, ) description = serializers.CharField( - help_text=_("A short summary description of the collection."), allow_blank=True + help_text=_("A short summary description of the collection."), + allow_blank=True, + read_only=True, ) docs_blob = serializers.JSONField( - help_text=_("A JSON field holding the various documentation blobs in the collection.") + help_text=_("A JSON field holding the various documentation blobs in the collection."), + read_only=True, ) - manifest = serializers.JSONField(help_text=_("A JSON field holding MANIFEST.json data.")) + manifest = serializers.JSONField( + help_text=_("A JSON field holding MANIFEST.json data."), read_only=True + ) - files = serializers.JSONField(help_text=_("A JSON field holding FILES.json data.")) + files = serializers.JSONField( + help_text=_("A JSON field holding FILES.json data."), read_only=True + ) documentation = serializers.CharField( - help_text=_("The URL to any online docs."), allow_blank=True, max_length=2000 + help_text=_("The URL to any online docs."), + allow_blank=True, + max_length=2000, + read_only=True, ) homepage = serializers.CharField( help_text=_("The URL to the homepage of the collection/project."), allow_blank=True, max_length=2000, + read_only=True, ) issues = serializers.CharField( - help_text=_("The URL to the collection issue tracker."), allow_blank=True, max_length=2000 + help_text=_("The URL to the collection issue tracker."), + allow_blank=True, + max_length=2000, + read_only=True, ) license = serializers.ListField( help_text=_("A list of licenses for content inside of a collection."), child=serializers.CharField(max_length=32), + read_only=True, ) - name = serializers.CharField(help_text=_("The name of the collection."), max_length=64) + name = serializers.CharField( + help_text=_("The name of the collection."), max_length=64, read_only=True + ) namespace = serializers.CharField( - help_text=_("The namespace of the collection."), max_length=64 + help_text=_("The namespace of the collection."), max_length=64, read_only=True ) - repository = serializers.CharField( - help_text=_("The URL of the originating SCM repository."), allow_blank=True, max_length=2000 + origin_repository = serializers.CharField( + help_text=_("The URL of the originating SCM repository."), + source="repository", + allow_blank=True, + max_length=2000, + read_only=True, ) tags = TagNestedSerializer(many=True, read_only=True) - version = serializers.CharField(help_text=_("The version of the collection."), max_length=128) + version = serializers.CharField( + help_text=_("The version of the collection."), max_length=128, read_only=True + ) requires_ansible = serializers.CharField( help_text=_( @@ -478,34 +595,76 @@ class CollectionVersionSerializer(SingleArtifactContentSerializer, ContentChecks "Multiple versions can be separated with a comma." ), allow_null=True, - required=False, + read_only=True, max_length=255, ) + creating = True + + def validate(self, data): + """Run super() validate if creating, else return data.""" + # This validation is for creating CollectionVersions + if self.creating or not self.instance: + return super().validate(data) + return data + + def is_valid(self, raise_exception=False): + """ + Allow this serializer to be used for validating before saving a model. + + See Validating Models: + https://docs.pulpproject.org/pulpcore/plugins/plugin-writer/concepts/index.html + """ + write_fields = set(CollectionVersionUploadSerializer.Meta.fields) - {"pulp_created"} + if hasattr(self, "initial_data"): + if any((x in self.initial_data for x in self.Meta.read_fields)): + # Pop shared fields: artifact & repository + artifact = self.initial_data.pop("artifact", None) + repository = self.initial_data.pop("repository", None) + if any((x in self.initial_data for x in write_fields)): + if raise_exception: + raise ValidationError( + _("Read and write fields can not be used at the same time") + ) + return False + # Only read fields set, change each one from read_only so they are validated + for name, field in self.fields.items(): + if name in self.Meta.read_fields: + field.read_only = False + # Put back in shared fields + if artifact is not None: + self.initial_data["artifact"] = artifact + if repository is not None: + self.initial_data["origin_repository"] = repository + self.creating = False + + return super().is_valid(raise_exception=raise_exception) + class Meta: + read_fields = ( + "id", + "authors", + "contents", + "dependencies", + "description", + "docs_blob", + "manifest", + "files", + "documentation", + "homepage", + "issues", + "license", + "name", + "namespace", + "origin_repository", + "tags", + "version", + "requires_ansible", + ) fields = ( - tuple(set(SingleArtifactContentSerializer.Meta.fields) - {"relative_path"}) + CollectionVersionUploadSerializer.Meta.fields + ContentChecksumSerializer.Meta.fields - + ( - "id", - "authors", - "contents", - "dependencies", - "description", - "docs_blob", - "manifest", - "files", - "documentation", - "homepage", - "issues", - "license", - "name", - "namespace", - "repository", - "tags", - "version", - "requires_ansible", - ) + + read_fields ) model = CollectionVersion diff --git a/pulp_ansible/app/tasks/upload.py b/pulp_ansible/app/tasks/upload.py new file mode 100644 index 000000000..397ba1ca0 --- /dev/null +++ b/pulp_ansible/app/tasks/upload.py @@ -0,0 +1,88 @@ +import json +import logging +import tarfile + +from django.db import transaction +from django.urls import reverse + +from galaxy_importer.collection import import_collection +from pulpcore.plugin.models import Task + +from pulp_ansible.app.models import Collection, CollectionImport, Tag +from pulp_ansible.app.tasks.utils import CollectionFilename, get_file_obj_from_tarball + +log = logging.getLogger(__name__) + + +def process_collection_artifact(artifact, namespace, name, version): + """ + Helper method to extract a Collection's metadata. + + This is called from ``CollectionVersionUploadSerializer.deferred_validate()``. + """ + # Avoid circular import + from .collections import _get_backend_storage_url + + # Set up logging for CollectionImport object + CollectionImport.objects.get_or_create(task_id=Task.current().pulp_id) + user_facing_logger = logging.getLogger("pulp_ansible.app.tasks.collection.import_collection") + + artifact_url = reverse("artifacts-detail", args=[artifact.pk]) + filename = CollectionFilename(namespace, name, version) + log.info(f"Processing collection {filename} from {artifact_url}") + + # Extra CollectionVersion metadata + with artifact.file.open() as artifact_file: + url = _get_backend_storage_url(artifact_file) + importer_result = import_collection( + artifact_file, filename=filename, file_url=url, logger=user_facing_logger + ) + artifact_file.seek(0) + with tarfile.open(fileobj=artifact_file, mode="r") as tar: + # I think this might not work on S3... + manifest_data = json.load( + get_file_obj_from_tarball(tar, "MANIFEST.json", artifact.file.name) + ) + files_data = json.load(get_file_obj_from_tarball(tar, "FILES.json", artifact.file.name)) + + # Set CollectionVersion metadata + collection_info = importer_result["metadata"] + + with transaction.atomic(): + collection, created = Collection.objects.get_or_create( + namespace=collection_info["namespace"], name=collection_info["name"] + ) + collection_info["collection"] = collection + collection_info["manifest"] = manifest_data + collection_info["files"] = files_data + collection_info["requires_ansible"] = importer_result.get("requires_ansible") + collection_info["contents"] = importer_result["contents"] + collection_info["docs_blob"] = importer_result["docs_blob"] + # Remove fields not used by this model + collection_info.pop("license_file") + collection_info.pop("readme") + # the importer returns many None values. We need to let the defaults in the model prevail + for key in ["description", "documentation", "homepage", "issues", "repository"]: + if collection_info[key] is None: + collection_info.pop(key) + + collection_info["relative_path"] = ( + f"{collection_info['namespace']}-{collection_info['name']}-{collection_info['version']}" + ".tar.gz" + ) + return collection_info + + +def finish_collection_upload(collection_version, tags, origin_repository): + """After CollectionVersion has been created update its tags and latest_version.""" + # Avoid circular import + from .collections import _update_highest_version + + for name in tags: + tag, created = Tag.objects.get_or_create(name=name) + collection_version.tags.add(tag) + + _update_highest_version(collection_version) + if origin_repository is not None: + collection_version.repository = origin_repository + collection_version.save() diff --git a/pulp_ansible/app/tasks/utils.py b/pulp_ansible/app/tasks/utils.py index 62eccfb57..47134f9a9 100644 --- a/pulp_ansible/app/tasks/utils.py +++ b/pulp_ansible/app/tasks/utils.py @@ -8,10 +8,66 @@ from rest_framework.serializers import ValidationError from yaml.error import YAMLError +from galaxy_importer.schema import MAX_LENGTH_NAME, MAX_LENGTH_VERSION from pulp_ansible.app.constants import PAGE_SIZE log = logging.getLogger(__name__) +CollectionFilename = namedtuple("CollectionFilename", ["namespace", "name", "version"]) +FILENAME_REGEXP = re.compile( + r"^(?P\w+)-(?P\w+)-" r"(?P[0-9a-zA-Z.+-]+)\.tar\.gz$" +) +VERSION_REGEXP = re.compile( + r""" +^ +(?P0|[1-9][0-9]*) +\. +(?P0|[1-9][0-9]*) +\. +(?P0|[1-9][0-9]*) +(?: + -(?P
[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*)
+)?
+(?:
+    \+(?P[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*)
+)?
+$
+""",
+    re.VERBOSE | re.ASCII,
+)
+
+
+def parse_collection_filename(filename):
+    """
+    Parses collection filename. (Copied from GalaxyNG)
+    Parses and validates collection filename. Returns CollectionFilename named tuple.
+    Raises ValueError if filename is not a valid collection filename.
+    """
+    match = FILENAME_REGEXP.match(filename)
+
+    if not match:
+        msg = _("Invalid filename {filename}. Expected format: namespace-name-version.tar.gz")
+        raise ValueError(msg.format(filename=filename))
+
+    namespace, name, version = match.groups()
+
+    match = VERSION_REGEXP.match(version)
+    if not match:
+        msg = _(
+            "Invalid version string {version} from filename {filename}. "
+            "Expected semantic version format."
+        )
+        raise ValueError(msg.format(version=version, filename=filename))
+
+    if len(namespace) > MAX_LENGTH_NAME:
+        raise ValueError(_("Expected namespace to be max length of %s") % MAX_LENGTH_NAME)
+    if len(name) > MAX_LENGTH_NAME:
+        raise ValueError(_("Expected name to be max length of %s") % MAX_LENGTH_NAME)
+    if len(version) > MAX_LENGTH_VERSION:
+        raise ValueError(_("Expected version to be max length of %s") % MAX_LENGTH_VERSION)
+
+    return CollectionFilename(namespace, name, version)
+
 
 def get_api_version(url):
     """Get API version."""
diff --git a/pulp_ansible/app/viewsets.py b/pulp_ansible/app/viewsets.py
index 87ab58629..f0ac68c12 100644
--- a/pulp_ansible/app/viewsets.py
+++ b/pulp_ansible/app/viewsets.py
@@ -53,7 +53,6 @@
     CollectionSerializer,
     CollectionVersionSerializer,
     CollectionVersionSignatureSerializer,
-    CollectionVersionUploadSerializer,
     CollectionRemoteSerializer,
     CollectionOneShotSerializer,
     CopySerializer,
@@ -172,7 +171,7 @@ class Meta:
         fields = ["namespace", "name", "version", "q", "is_highest", "tags"]
 
 
-class CollectionVersionViewSet(SingleArtifactContentUploadViewSet, UploadGalaxyCollectionMixin):
+class CollectionVersionViewSet(UploadGalaxyCollectionMixin, SingleArtifactContentUploadViewSet):
     """
     ViewSet for Ansible Collection.
     """
@@ -183,33 +182,6 @@ class CollectionVersionViewSet(SingleArtifactContentUploadViewSet, UploadGalaxyC
     filterset_class = CollectionVersionFilter
     ordering_fields = ("pulp_created", "name", "version", "namespace")
 
-    @extend_schema(
-        description="Trigger an asynchronous task to create content,"
-        "optionally create new repository version.",
-        request=CollectionVersionUploadSerializer,
-        responses={202: AsyncOperationResponseSerializer},
-    )
-    def create(self, request):
-        """Create a content unit."""
-        serializer = CollectionVersionUploadSerializer(data=request.data)
-        serializer.is_valid(raise_exception=True)
-
-        kwargs = {}
-        if serializer.validated_data["namespace"]:
-            kwargs["expected_namespace"] = serializer.validated_data["namespace"]
-
-        if serializer.validated_data["name"]:
-            kwargs["expected_name"] = serializer.validated_data["name"]
-
-        if serializer.validated_data["version"]:
-            kwargs["expected_version"] = serializer.validated_data["version"]
-
-        temp_file_pk = serializer.validated_data["temp_file_pk"]
-        repository = serializer.validated_data.get("repository")
-        async_result = self._dispatch_import_collection_task(temp_file_pk, repository, **kwargs)
-
-        return OperationPostponedResponse(async_result, request)
-
 
 class SignatureFilter(ContentFilter):
     """
@@ -425,6 +397,7 @@ class CollectionUploadViewSet(viewsets.ViewSet, UploadGalaxyCollectionMixin):
         operation_id="upload_collection",
         request=CollectionOneShotSerializer,
         responses={202: AsyncOperationResponseSerializer},
+        deprecated=True,  # Should I just remove this endpoint?
     )
     def create(self, request):
         """
diff --git a/pulp_ansible/tests/functional/api/collection/test_crud_collection_versions.py b/pulp_ansible/tests/functional/api/collection/test_crud_collection_versions.py
index 43ffc6609..c44303c52 100644
--- a/pulp_ansible/tests/functional/api/collection/test_crud_collection_versions.py
+++ b/pulp_ansible/tests/functional/api/collection/test_crud_collection_versions.py
@@ -101,7 +101,10 @@ def upload_collection(self, namespace="pulp", name="squeezer", version="0.0.9"):
         with NamedTemporaryFile() as temp_file:
             temp_file.write(collection_content)
             return self.cv_content_api.create(
-                file=temp_file.name, namespace=namespace, name=name, version=version
+                file=temp_file.name,
+                expected_namespace=namespace,
+                expected_name=name,
+                expected_version=version,
             )
 
     def test_01_create_content_unit(self):
@@ -175,6 +178,4 @@ def test_05_duplicate_raise_error(self):
         attrs = dict(namespace="pulp", name="squeezer", version="0.0.9")
         with self.assertRaises(ApiException) as ctx:
             self.upload_collection(**attrs)
-        self.assertIn(
-            "The fields namespace, name, version must make a unique set.", ctx.exception.body
-        )
+        self.assertIn("Collection pulp.squeezer-0.0.9 already exists", ctx.exception.body)
diff --git a/pulp_ansible/tests/functional/api/collection/v2/test_upload.py b/pulp_ansible/tests/functional/api/collection/v2/test_upload.py
index 7ab39a223..b0db9e183 100644
--- a/pulp_ansible/tests/functional/api/collection/v2/test_upload.py
+++ b/pulp_ansible/tests/functional/api/collection/v2/test_upload.py
@@ -2,10 +2,14 @@
 import hashlib
 from tempfile import NamedTemporaryFile
 
-from pulp_smash.pulp3.bindings import delete_orphans, monitor_task, PulpTestCase, PulpTaskError
+from pulp_smash.pulp3.bindings import delete_orphans, monitor_task, PulpTestCase
 from pulp_smash.utils import http_get
 
-from pulpcore.client.pulp_ansible import AnsibleCollectionsApi, ContentCollectionVersionsApi
+from pulpcore.client.pulp_ansible import (
+    AnsibleCollectionsApi,
+    ContentCollectionVersionsApi,
+    ApiException,
+)
 
 from pulp_ansible.tests.functional.utils import gen_ansible_client
 from pulp_ansible.tests.functional.utils import set_up_module as setUpModule  # noqa:F401
@@ -51,13 +55,10 @@ def test_collection_upload(self):
 
         self.assertEqual(response.sha256, self.collection_sha256, response)
 
-        with self.assertRaises(PulpTaskError) as exc:
+        with self.assertRaises(ApiException) as exc:
             self.upload_collection()
 
-        task_result = exc.exception.task.to_dict()
-        self.assertEqual(task_result["state"], "failed")
-        error = task_result["error"]
-        for key in ("artifact", "already", "exists"):
-            self.assertIn(key, task_result["error"]["description"].lower(), error)
+        assert exc.exception.status == 400
+        assert "Artifact already exists" in exc.exception.body
 
         delete_orphans()
diff --git a/pulp_ansible/tests/functional/api/collection/v3/test_collection.py b/pulp_ansible/tests/functional/api/collection/v3/test_collection.py
index 1ba00b77d..a18968a55 100644
--- a/pulp_ansible/tests/functional/api/collection/v3/test_collection.py
+++ b/pulp_ansible/tests/functional/api/collection/v3/test_collection.py
@@ -20,7 +20,6 @@
     ANSIBLE_COLLECTION_UPLOAD_FIXTURE_URL,
     ANSIBLE_DISTRIBUTION_PATH,
     ANSIBLE_REPO_PATH,
-    COLLECTION_METADATA,
 )
 from pulp_ansible.tests.functional.utils import set_up_module as setUpModule  # noqa:F401
 
@@ -59,7 +58,7 @@ def get_galaxy_url(base, path):
 
 
 @pytest.fixture(scope="session")
-def artifact():
+def collection_artifact():
     """Generate a randomized collection for testing."""
     # build_collection will only store one collection, so copy to new location and delete later
     artifact = build_collection("skeleton")
@@ -69,7 +68,7 @@ def artifact():
 
 
 @pytest.fixture(scope="session")
-def artifact2():
+def collection_artifact2():
     """
     Generate a second randomized collection for testing.
 
@@ -87,39 +86,40 @@ def get_metadata_published(pulp_client, pulp_dist):
     return datetime.strptime(metadata["published"], "%Y-%m-%dT%H:%M:%S.%fZ")
 
 
+def upload_collection(client, filename, base_path):
+    """Helper to upload collections to pulp_ansible/galaxy."""
+    UPLOAD_PATH = get_galaxy_url(base_path, "/v3/artifacts/collections/")
+    collection = {"file": (open(filename, "rb"))}
+
+    return client.using_handler(upload_handler).post(UPLOAD_PATH, files=collection)
+
+
 @pytest.fixture(scope="session")
-def collection_upload(pulp_client, artifact, pulp_dist):
+def collection_upload(pulp_client, collection_artifact, pulp_dist):
     """Publish a new collection and return the processed response data."""
-    UPLOAD_PATH = get_galaxy_url(pulp_dist["base_path"], "/v3/artifacts/collections/")
     published_before_upload = get_metadata_published(pulp_client, pulp_dist)
-    logging.info(f"Uploading collection to '{UPLOAD_PATH}'...")
-    collection = {"file": (ANSIBLE_COLLECTION_FILE_NAME, open(artifact.filename, "rb"))}
-
-    response = pulp_client.using_handler(upload_handler).post(UPLOAD_PATH, files=collection)
+    response = upload_collection(pulp_client, collection_artifact.filename, pulp_dist["base_path"])
     published_after_upload = get_metadata_published(pulp_client, pulp_dist)
     assert published_after_upload > published_before_upload
     return response
 
 
 @pytest.fixture(scope="session")
-def collection_upload2(pulp_client, artifact2, pulp_dist):
+def collection_upload2(pulp_client, collection_artifact2, pulp_dist):
     """Publish the second new collection and return the processed response data."""
-    UPLOAD_PATH = get_galaxy_url(pulp_dist["base_path"], "/v3/artifacts/collections/")
     published_before_upload = get_metadata_published(pulp_client, pulp_dist)
-    logging.info(f"Uploading collection to '{UPLOAD_PATH}'...")
-    collection = {"file": (open(artifact2.filename, "rb"))}
-
-    response = pulp_client.using_handler(upload_handler).post(UPLOAD_PATH, files=collection)
+    response = upload_collection(pulp_client, collection_artifact2.filename, pulp_dist["base_path"])
     published_after_upload = get_metadata_published(pulp_client, pulp_dist)
     assert published_after_upload > published_before_upload
     return response
 
 
 @pytest.fixture(scope="session")
-def collection_detail(collection_upload, pulp_client, pulp_dist, artifact):
+def collection_detail(collection_upload, pulp_client, pulp_dist, collection_artifact):
     """Fetch and parse a collection details response from an uploaded collection."""
     url = get_galaxy_url(
-        pulp_dist["base_path"], f"/v3/collections/{artifact.namespace}/{artifact.name}/"
+        pulp_dist["base_path"],
+        f"/v3/collections/{collection_artifact.namespace}/{collection_artifact.name}/",
     )
     response = pulp_client.using_handler(api.json_handler).get(url)
     return response
@@ -196,13 +196,19 @@ def test_collection_upload(collection_upload):
     assert "finished_at" in collection_upload
     assert "messages" in collection_upload
 
-    for key, value in collection_upload.items():
-        if key in COLLECTION_METADATA.keys():
-            assert COLLECTION_METADATA[key] == value, collection_upload
+    # TODO: Add this back when namespace, name, and version are apart of the CollectionImport
+    # for key, value in collection_upload.items():
+    #     if key in COLLECTION_METADATA.keys():
+    #         assert COLLECTION_METADATA[key] == value, collection_upload
 
 
 def test_collection_list(
-    artifact, artifact2, collection_upload, collection_upload2, pulp_client, pulp_dist
+    collection_artifact,
+    collection_artifact2,
+    collection_upload,
+    collection_upload2,
+    pulp_client,
+    pulp_dist,
 ):
     """Tests the collection list endpoint after uploading both collections."""
     url = get_galaxy_url(pulp_dist["base_path"], "v3/collections/")
@@ -211,20 +217,20 @@ def test_collection_list(
     assert response["meta"]["count"] >= 2
     present_collections = {c["href"].split("collections/")[1] for c in response["data"]}
     uploaded_collections = {
-        f"index/{artifact.namespace}/{artifact.name}/",
-        f"index/{artifact2.namespace}/{artifact2.name}/",
+        f"index/{collection_artifact.namespace}/{collection_artifact.name}/",
+        f"index/{collection_artifact2.namespace}/{collection_artifact2.name}/",
     }
     assert uploaded_collections.issubset(present_collections)
 
 
-def test_collection_detail(artifact, collection_detail, pulp_dist):
+def test_collection_detail(collection_artifact, collection_detail, pulp_dist):
     """Test collection detail resulting from a successful upload of one version.
 
     Includes information of the most current version.
     """
     url = (
         f"plugin/ansible/content/{pulp_dist['base_path']}"
-        f"/collections/index/{artifact.namespace}/{artifact.name}/"
+        f"/collections/index/{collection_artifact.namespace}/{collection_artifact.name}/"
     )
 
     assert not collection_detail["deprecated"]
@@ -232,12 +238,14 @@ def test_collection_detail(artifact, collection_detail, pulp_dist):
     # Check that the URL ends with the correct path so that this test doesn't fail
     # when galaxy_ng is installed
     assert collection_detail["href"].endswith(url)
-    assert collection_detail["namespace"] == artifact.namespace
-    assert collection_detail["name"] == artifact.name
+    assert collection_detail["namespace"] == collection_artifact.namespace
+    assert collection_detail["name"] == collection_artifact.name
     assert collection_detail["highest_version"]["version"] == "1.0.0"
 
 
-def test_collection_version_list(artifact, pulp_client, collection_detail, collection_upload2):
+def test_collection_version_list(
+    collection_artifact, pulp_client, collection_detail, collection_upload2
+):
     """Test the versions endpoint, listing the available versions of a given collection."""
     # Version List Endpoint
     versions = pulp_client.using_handler(api.json_handler).get(collection_detail["versions_url"])
@@ -248,7 +256,7 @@ def test_collection_version_list(artifact, pulp_client, collection_detail, colle
     assert version["href"] == collection_detail["highest_version"]["href"]
 
 
-def test_collection_version(artifact, pulp_client, collection_detail):
+def test_collection_version(collection_artifact, pulp_client, collection_detail):
     """Test collection version endpoint.
 
     Each collection version details a specific uploaded artifact for the collection.
@@ -258,15 +266,15 @@ def test_collection_version(artifact, pulp_client, collection_detail):
         collection_detail["highest_version"]["href"]
     )
 
-    assert version["name"] == artifact.name
-    assert version["namespace"] == {"name": artifact.namespace}
+    assert version["name"] == collection_artifact.name
+    assert version["namespace"] == {"name": collection_artifact.namespace}
     assert version["version"] == "1.0.0"
 
-    tarball = open(artifact.filename, "rb").read()
+    tarball = open(collection_artifact.filename, "rb").read()
     assert version["artifact"]["sha256"] == hashlib.sha256(tarball).hexdigest()
     assert version["artifact"]["size"] == len(tarball)
 
-    # assert version['artifact']['filename'] == artifact.filename
+    assert version["artifact"]["filename"] == collection_artifact.filename.strip("/tmp/")
 
     assert "updated_at" in version
     assert "created_at" in version
@@ -289,7 +297,7 @@ def test_collection_version(artifact, pulp_client, collection_detail):
 
 
 @pytest.mark.skip("Blocked by open ticket: https://pulp.plan.io/issues/5647")
-def test_collection_download(artifact, pulp_client, collection_detail):
+def test_collection_download(collection_artifact, pulp_client, collection_detail):
     """Test collection download URL.
 
     Should require authentication and redirect to a download location.
@@ -301,7 +309,7 @@ def test_collection_download(artifact, pulp_client, collection_detail):
     # Artifact Download Endoint
     url = version["download_url"]
 
-    tarball = open(artifact.filename, "rb").read()
+    tarball = open(collection_artifact.filename, "rb").read()
 
     c = pulp_client.using_handler(api.echo_handler)
     f = c.get(url)
@@ -309,27 +317,17 @@ def test_collection_download(artifact, pulp_client, collection_detail):
     assert f.content == tarball
 
 
-def test_collection_upload_repeat(pulp_client, known_collection, pulp_dist):
+def test_collection_upload_repeat(pulp_client, collection_artifact, pulp_dist, collection_upload):
     """Upload a duplicate collection.
 
     Should fail, because of the conflict of collection name and version.
     """
-    cfg = config.get_config()
-    url = urljoin(cfg.get_base_url(), f"api/{pulp_dist['base_path']}/v3/artifacts/collections/")
-
     with pytest.raises(HTTPError) as ctx:
-        response = pulp_client.post(url, files=known_collection)
-
-        assert ctx.exception.response.json()["errors"][0] == {
-            "status": "400",
-            "code": "invalid",
-            "title": "Invalid input.",
-            "detail": "Artifact already exists.",
-        }
+        upload_collection(pulp_client, collection_artifact.filename, pulp_dist["base_path"])
 
-        for key, value in collection_upload.items():
-            if key in COLLECTION_METADATA.keys():
-                assert COLLECTION_METADATA[key] == value, response
-
-        collection_sha256 = hashlib.sha256(known_collection["files"][1]).hexdigest()
-        assert response["sha256"] == collection_sha256, response
+    assert ctx.value.response.json()["errors"][0] == {
+        "status": "400",
+        "code": "invalid",
+        "title": "Invalid input.",
+        "detail": "Artifact already exists",
+    }
diff --git a/pulp_ansible/tests/functional/api/collection/v3/test_deletion.py b/pulp_ansible/tests/functional/api/collection/v3/test_deletion.py
index 24606a9b7..d04b67e69 100644
--- a/pulp_ansible/tests/functional/api/collection/v3/test_deletion.py
+++ b/pulp_ansible/tests/functional/api/collection/v3/test_deletion.py
@@ -1,30 +1,44 @@
 import unittest
 
 from pulp_ansible.tests.functional.utils import (
-    gen_collection_in_distribution,
     SyncHelpersMixin,
     TestCaseUsingBindings,
 )
 
 from pulpcore.client.pulp_ansible.exceptions import ApiException
 from pulp_smash.pulp3.bindings import monitor_task
+from orionutils.generator import build_collection, randstr
 
 
 class CollectionDeletionTestCase(TestCaseUsingBindings, SyncHelpersMixin):
     """Test collection deletion."""
 
+    def upload_to_distro(self, collection_config=None, distro=None):
+        """Helper method to upload a collection to the distribution."""
+        distro = distro or self.distribution
+        repo = distro.repository
+
+        collection_config = collection_config or {}
+        config = {"namespace": randstr(), "name": randstr(), "version": "1.0.0"}
+        config.update(collection_config)
+
+        col = build_collection("skeleton", config=config)
+        response = self.cv_api.create(file=col.filename, repository=repo)
+        monitor_task(response.task)
+        return {"namespace": col.namespace, "name": col.name, "version": col.version}
+
     def setUp(self):
         """Set up the collection deletion tests."""
         (self.repo, self.distribution) = self._create_empty_repo_and_distribution()
 
         self.collection_versions = ["1.0.0", "1.0.1"]
+        self.collection_name = randstr()
+        self.collection_namespace = randstr()
+        config = {"namespace": self.collection_namespace, "name": self.collection_name}
 
-        collection = gen_collection_in_distribution(
-            self.distribution.base_path, versions=self.collection_versions
-        )
-
-        self.collection_name = collection["name"]
-        self.collection_namespace = collection["namespace"]
+        for version in self.collection_versions:
+            config["version"] = version
+            self.upload_to_distro(collection_config=config)
 
     def test_collection_deletion(self):
         """Test deleting an entire collection."""
@@ -56,7 +70,7 @@ def test_collection_deletion(self):
                 namespace=self.collection_namespace,
             )
 
-            assert e.status == 404
+        assert e.exception.status == 404
 
     def test_collection_version_deletion(self):
         """Test deleting a specific collection version."""
@@ -132,14 +146,17 @@ def test_collection_version_deletion(self):
                 name=self.collection_name,
                 namespace=self.collection_namespace,
             )
-            assert e.status == 404
+        assert e.exception.status == 404
 
     def test_invalid_deletion(self):
         """Test deleting collections that are dependencies for other collections."""
         dependent_version = self.collection_versions.pop()
-        dependent_collection = gen_collection_in_distribution(
-            self.distribution.base_path,
-            dependencies={f"{self.collection_namespace}.{self.collection_name}": dependent_version},
+        dependent_collection = self.upload_to_distro(
+            collection_config={
+                "dependencies": {
+                    f"{self.collection_namespace}.{self.collection_name}": dependent_version
+                }
+            },
         )
 
         err_msg = f"{dependent_collection['namespace']}.{dependent_collection['name']} 1.0.0"
@@ -152,8 +169,8 @@ def test_invalid_deletion(self):
                 namespace=self.collection_namespace,
             )
 
-            # check error message includes collection that's blocking delete
-            assert err_msg in e.body
+        # check error message includes collection that's blocking delete
+        assert err_msg in e.exception.body
 
         # Verify specific version that's used can't be deleted
         with self.assertRaises(ApiException) as e:
@@ -163,10 +180,10 @@ def test_invalid_deletion(self):
                 namespace=self.collection_namespace,
                 version=dependent_version,
             )
-            assert e.status == 400
+        assert e.exception.status == 400
 
-            # check error message includes collection that's blocking delete
-            assert err_msg in e.body
+        # check error message includes collection that's blocking delete
+        assert err_msg in e.exception.body
 
         # Verify non dependent version can be deleted.
         resp = self.collections_versions_v3api.delete(
@@ -234,9 +251,10 @@ def test_delete_signed_content(self):
     def test_version_deletion_with_range_of_versions(self):
         """Verify collections can be deleted when another version satisfies requirements."""
         # Create a collection that depends on any version of an existing collection
-        gen_collection_in_distribution(
-            self.distribution.base_path,
-            dependencies={f"{self.collection_namespace}.{self.collection_name}": "*"},
+        self.upload_to_distro(
+            collection_config={
+                "dependencies": {f"{self.collection_namespace}.{self.collection_name}": "*"}
+            },
         )
 
         to_delete = self.collection_versions.pop()
@@ -262,4 +280,4 @@ def test_version_deletion_with_range_of_versions(self):
                 version=self.collection_versions[0],
             )
 
-            assert e.status == 400
+        assert e.exception.status == 400
diff --git a/pulp_ansible/tests/functional/conftest.py b/pulp_ansible/tests/functional/conftest.py
index 8dbf0b442..beefbe324 100644
--- a/pulp_ansible/tests/functional/conftest.py
+++ b/pulp_ansible/tests/functional/conftest.py
@@ -124,12 +124,12 @@ def _ansible_collection_remote_factory(**kwargs):
 
 
 @pytest.fixture
-def build_and_upload_collection(ansible_collections_api_client):
+def build_and_upload_collection(ansible_collection_version_api_client):
     """A factory to locally create, build, and upload a collection."""
 
     def _build_and_upload_collection():
         collection = build_collection("skeleton")
-        response = ansible_collections_api_client.upload_collection(collection.filename)
+        response = ansible_collection_version_api_client.create(file=collection.filename)
         task = monitor_task(response.task)
         return collection, task.created_resources[0]