Skip to content

Commit

Permalink
Merge branch 'feature/drs-imports' into 'develop'
Browse files Browse the repository at this point in the history
Feature/drs imports

See merge request core/sevenbridges-python!91
  • Loading branch information
Perica Prokic committed Sep 14, 2021
2 parents 1a9f718 + 8341dfe commit ff08a0a
Show file tree
Hide file tree
Showing 8 changed files with 251 additions and 4 deletions.
8 changes: 4 additions & 4 deletions sevenbridges/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
AppCopyStrategy, AppRawFormat, AsyncFileOperations, AsyncJobStates,
AutomationRunActions, DivisionRole, FileStorageType, ImportExportState,
TaskStatus, TransferState, VolumeAccessMode, VolumeType, PartSize,
AutomationStatus,
AutomationStatus
)
from sevenbridges.errors import (
SbgError, ResourceNotModified, ReadOnlyPropertyError, ValidationError,
Expand All @@ -66,9 +66,9 @@
# Enums
'AppCopyStrategy', 'AppRawFormat', 'AppCopyStrategy',
'AsyncFileOperations', 'AsyncJobStates', 'AutomationRunActions',
'DivisionRole', 'FileStorageType', 'ImportExportState', 'TaskStatus',
'TransferState', 'VolumeAccessMode', 'VolumeType', 'PartSize',
'AutomationStatus',
'DivisionRole', 'FileStorageType', 'ImportExportState',
'TaskStatus', 'TransferState', 'VolumeAccessMode', 'VolumeType',
'PartSize', 'AutomationStatus',
# Errors
'SbgError', 'ResourceNotModified', 'ReadOnlyPropertyError',
'ValidationError', 'TaskValidationError', 'PaginationError', 'BadRequest',
Expand Down
2 changes: 2 additions & 0 deletions sevenbridges/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from sevenbridges.models.rate_limit import RateLimit
from sevenbridges.models.storage_export import Export
from sevenbridges.models.storage_import import Import
from sevenbridges.models.drs_import import DRSImportBulk
from sevenbridges.models.enums import RequestParameters
from sevenbridges.models.billing_group import BillingGroup
from sevenbridges.models.automation import (
Expand All @@ -41,6 +42,7 @@ class Api(HttpClient):
billing_groups = BillingGroup
datasets = Dataset
divisions = Division
drs_imports = DRSImportBulk
endpoints = Endpoints
exports = Export
files = File
Expand Down
15 changes: 15 additions & 0 deletions sevenbridges/models/compound/import_result.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
from sevenbridges.meta.fields import CompoundField
from sevenbridges.meta.resource import Resource
from sevenbridges.models.file import File
from sevenbridges.models.compound.error import Error


class FileImportResult(Resource):
"""
File result resource used for actions that may return resource or error out
"""
error = CompoundField(Error, read_only=True)
resource = CompoundField(File, read_only=True)

def __str__(self):
return f'{str(self.error) if self.error else str(self.resource)}'
126 changes: 126 additions & 0 deletions sevenbridges/models/drs_import.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
import logging

from sevenbridges.errors import SbgError
from sevenbridges.meta.fields import (
HrefField, StringField, DateTimeField, CompoundListField
)
from sevenbridges.meta.resource import Resource
from sevenbridges.meta.transformer import Transform
from sevenbridges.models.compound.import_result import FileImportResult
from sevenbridges.models.file import File

logger = logging.getLogger(__name__)


class DRSImportBulk(Resource):
"""
Central resource for managing DRS imports.
"""
_URL = {
'get': '/bulk/drs/imports/{id}',
'create': '/bulk/drs/imports/create',
}

id = StringField(read_only=True)
href = HrefField(read_only=True)
result = CompoundListField(FileImportResult, read_only=True)
_result_files = [] # cache for result_files property
state = StringField(read_only=True)
started_on = DateTimeField(read_only=True)
finished_on = DateTimeField(read_only=True)

def __str__(self):
return f'<DRSBulkImport: id={self.id}>'

def __eq__(self, other):
if type(other) is not type(self):
return False
return self is other or self.id == other.id

@property
def result_files(self):
"""
Retrieve files that were successfully imported.
:return: List of File objects
"""
try:
cached_file_ids = set([
file.resource.id for file in self._result_files
])

imported_file_ids = set([
file.resource.id
for file in self.result if file.resource
])
file_ids_to_retrieve = imported_file_ids - cached_file_ids
if file_ids_to_retrieve:
files = File.bulk_get(
files=file_ids_to_retrieve, api=self._api
)
self._result_files.extend(files)
return self._result_files if self._result_files else None
except TypeError:
return None

@classmethod
def bulk_get(cls, import_job_id, api=None):
"""
Retrieve DRS bulk import details
:param import_job_id: Import id to be retrieved.
:param api: Api instance.
:return: DRSImportBulk object.
"""
api = api or cls._API

if not import_job_id:
raise SbgError('DRS import is required!')
elif not isinstance(import_job_id, str):
raise SbgError('Invalid DRS import parameter!')

response = api.get(
url=cls._URL['get'].format(id=import_job_id)
).json()
return DRSImportBulk(api=api, **response)

@classmethod
def bulk_submit(
cls, imports, tags=None, conflict_resolution='SKIP', api=None
):
"""
Submit DRS bulk import
:param imports: List of dicts describing a wanted import.
:param tags: list of tags to be applied.
:param conflict_resolution: Type of file naming conflict resolution.
:param api: Api instance.
:return: DRSImportBulk object.
"""
if not imports:
raise SbgError('Imports are required')

api = api or cls._API

items = []
for import_ in imports:
project = import_.get('project')
parent = import_.get('parent')

if project and parent:
raise SbgError(
'Project and parent identifiers are mutually exclusive'
)
elif project:
import_['project'] = Transform.to_project(project)
elif parent:
import_['parent'] = Transform.to_file(parent)
else:
raise SbgError('Project or parent identifier is required.')

items.append(import_)

data = {
'conflict_resolution': conflict_resolution,
'tags': tags,
'items': items
}
response = api.post(url=cls._URL['create'], data=data).json()
return DRSImportBulk(api=api, **response)
4 changes: 4 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,9 @@ def __init__(self, request_mocker, base_url):
request_mocker, base_url
)
self.uploads = providers.FileUploadProvider(request_mocker, base_url)
self.drs_imports = providers.DRSImportProvider(
request_mocker, base_url
)


class Verifier:
Expand Down Expand Up @@ -93,6 +96,7 @@ def __init__(self, request_mocker):
self.automation_packages = verifiers.AutomationPackageVerifier(
request_mocker
)
self.drs_imports = verifiers.DRSImportsVerifier(request_mocker)


@pytest.fixture
Expand Down
35 changes: 35 additions & 0 deletions tests/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1857,3 +1857,38 @@ def deleted(self, failed=False):
regx = f'^{self.base_url}/upload/multipart/.*'
matcher = re.compile(regx)
self.request_mocker.delete(matcher, status_code=status_code)


class DRSImportProvider:
def __init__(self, request_mocker, base_url):
self.request_mocker = request_mocker
self.base_url = base_url

@staticmethod
def default_import():
return {
'id': generator.uuid4(),
'href': generator.url(),
'result': [],
'state': generator.state(),
'failed_files': 0,
'completed_files': 2,
'total_files': 2,
'started_on': generator.time(),
'finished_on': generator.time()
}

def can_be_retrieved_in_bulk(self, import_data):
data = self.default_import()
data.update(import_data)
self.request_mocker.get(
'/bulk/drs/imports/{id}'.format(id=import_data['id']), json=data
)

def can_be_submitted_in_bulk(self, imports_data):
data = self.default_import()
data['result'] = [
{'id': generator.uuid4(), 'href': generator.url()}
for _ in range(len(imports_data))
]
self.request_mocker.post('/bulk/drs/imports/create', json=data)
53 changes: 53 additions & 0 deletions tests/test_drs_import.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import faker

generator = faker.Factory.create()


def test_imports_bulk_get(api, given, verifier):
# preconditions
total = 10
file_ids = [generator.uuid4() for _ in range(total)]
_import = {
'id': generator.uuid4(),
'result': [
{'resource': {'id': _id, 'href': generator.url()}}
for _id in file_ids
]
}

given.drs_imports.can_be_retrieved_in_bulk(_import)

# action
response = api.drs_imports.bulk_get(_import['id'])

# verification
assert len(response.result) == total
verifier.drs_imports.bulk_retrieved(response.id)


def test_imports_bulk_submit(api, given, verifier):
# preconditions
total = 10

imports = [
{
"drs_uri": generator.name(),
"project": generator.name(),
"metadata": {
generator.name(): generator.name(),
generator.name(): generator.name()
},
"name": generator.name()
}
for _ in range(total)
]
tags = [generator.name()]

given.drs_imports.can_be_submitted_in_bulk(imports)

# action
response = api.drs_imports.bulk_submit(imports, tags)

# verification
assert len(response.result) == total
verifier.drs_imports.bulk_submitted()
12 changes: 12 additions & 0 deletions tests/verifiers.py
Original file line number Diff line number Diff line change
Expand Up @@ -603,3 +603,15 @@ def file_move_job_fetched(self, id):

def async_files_moved(self):
self.checker.check_url('/async/files/move')


class DRSImportsVerifier:
def __init__(self, request_mocker):
self.request_mocker = request_mocker
self.checker = Assert(self.request_mocker)

def bulk_retrieved(self, _id):
self.checker.check_url('/bulk/drs/imports/{id}'.format(id=_id))

def bulk_submitted(self):
self.checker.check_url('/bulk/drs/imports/create')

0 comments on commit ff08a0a

Please sign in to comment.