Supposing you wanted to use Django instead of SQLAlchemy and Flask, how might things look?
First thing is to choose where to install it. I put it in a separate package next to our main allocation code:
├── src
│ ├── allocation
│ │ ├── config.py
│ │ ├── model.py
│ │ ├── repository.py
│ │ ├── services.py
│ │ └── unit_of_work.py
│ ├── djangoproject
│ │ ├── alloc
│ │ │ ├── apps.py
│ │ │ ├── __init__.py
│ │ │ ├── migrations
│ │ │ │ ├── 0001_initial.py
│ │ │ │ └── __init__.py
│ │ │ ├── models.py
│ │ │ └── views.py
│ │ ├── django_project
│ │ │ ├── __init__.py
│ │ │ ├── settings.py
│ │ │ ├── urls.py
│ │ │ └── wsgi.py
│ │ └── manage.py
│ └── setup.py
└── tests
├── conftest.py
├── e2e
│ └── test_api.py
├── integration
│ ├── test_repository.py
#...
I used a plugin called pytest-django to help with test database management.
Rewriting the first repository test was a minimal change, just rewriting some raw SQL with a call to the Django ORM / Queryset language:
from djangoproject.alloc import models as django_models
@pytest.mark.django_db
def test_repository_can_save_a_batch():
batch = model.Batch("batch1", "RUSTY-SOAPDISH", 100, eta=date(2011, 12, 25))
repo = repository.DjangoRepository()
repo.add(batch)
[saved_batch] = django_models.Batch.objects.all()
assert saved_batch.reference == batch.reference
assert saved_batch.sku == batch.sku
assert saved_batch.qty == batch._purchased_quantity
assert saved_batch.eta == batch.eta
The second test is a bit more involved since it has allocations, but it is still made up of familiar-looking django code:
@pytest.mark.django_db
def test_repository_can_retrieve_a_batch_with_allocations():
sku = "PONY-STATUE"
d_line = django_models.OrderLine.objects.create(orderid="order1", sku=sku, qty=12)
d_batch1 = django_models.Batch.objects.create(reference="batch1", sku=sku, qty=100, eta=None)
d_batch2 = django_models.Batch.objects.create(reference="batch2", sku=sku, qty=100, eta=None)
django_models.Allocation.objects.create(line=d_line, batch=d_batch1)
repo = repository.DjangoRepository()
retrieved = repo.get("batch1")
expected = model.Batch("batch1", sku, 100, eta=None)
assert retrieved == expected # Batch.__eq__ only compares reference
assert retrieved.sku == expected.sku
assert retrieved._purchased_quantity == expected._purchased_quantity
assert retrieved._allocations == {model.OrderLine("order1", sku, 12)}
Here’s how the actual repository ends up looking:
class DjangoRepository(AbstractRepository):
def add(self, batch):
super().add(batch)
self.update(batch)
def update(self, batch):
django_models.Batch.update_from_domain(batch)
def _get(self, reference):
return django_models.Batch.objects.filter(
reference=reference
).first().to_domain()
def list(self):
return [b.to_domain() for b in django_models.Batch.objects.all()]
You can see that the implementation relies on the Django models having some custom methods for translating to and from our domain model.
Note
|
As in [chapter_02_repository], we use dependency inversion. The ORM (Django) depends on the model, and not the other way around |
Those custom methods look something like this:
from django.db import models
from allocation import model as domain_model
class Batch(models.Model):
reference = models.CharField(max_length=255)
sku = models.CharField(max_length=255)
qty = models.IntegerField()
eta = models.DateField(blank=True, null=True)
@staticmethod
def update_from_domain(batch: domain_model.Batch):
try:
b = Batch.objects.get(reference=batch.reference) #(1)
except Batch.DoesNotExist:
b = Batch(reference=batch.reference) #(1)
b.sku = batch.sku
b.qty = batch._purchased_quantity
b.eta = batch.eta #(2)
b.save()
b.allocation_set.set(
Allocation.from_domain(l, b) #(3)
for l in batch._allocations
)
def to_domain(self) -> domain_model.Batch:
b = domain_model.Batch(
ref=self.reference, sku=self.sku, qty=self.qty, eta=self.eta
)
b._allocations = set(
a.line.to_domain()
for a in self.allocation_set.all()
)
return b
class OrderLine(models.Model):
#...
-
For value objects,
objects.get_or_create
can work, but for Entities, you need an explict try-get/except to handle the upsert. -
I’ve shown the most complex example here. If you do decide to do this, be aware that there will be boilerplate! Thankfully it’s not very complex boilerplate…
-
Relationships also need some careful, custom handling.
The tests don’t change too much
def insert_batch(ref, sku, qty, eta): #(1)
django_models.Batch.objects.create(reference=ref, sku=sku, qty=qty, eta=eta)
def get_allocated_batch_ref(orderid, sku): #(1)
return django_models.Allocation.objects.get(
line__orderid=orderid, line__sku=sku
).batch.reference
@pytest.mark.django_db(transaction=True)
def test_uow_can_retrieve_a_batch_and_allocate_to_it():
insert_batch('batch1', 'HIPSTER-WORKBENCH', 100, None)
uow = unit_of_work.DjangoUnitOfWork()
with uow:
batch = uow.batches.get(reference='batch1')
line = model.OrderLine('o1', 'HIPSTER-WORKBENCH', 10)
batch.allocate(line)
uow.commit()
batchref = get_allocated_batch_ref('o1', 'HIPSTER-WORKBENCH')
assert batchref == 'batch1'
@pytest.mark.django_db(transaction=True) #(2)
def test_rolls_back_uncommitted_work_by_default():
...
@pytest.mark.django_db(transaction=True) #(2)
def test_rolls_back_on_error():
...
-
Because we had little helper functions in these tests, the actual main body of the tests are pretty much the same as they were with SQLA
-
the pytest-django
mark.django_db(transaction=True)
is required to test our custom transaction/rollback behaviors.
And the implementation is quite simple, although it took me a few goes to find what actual invocation of Django’s transaction magic would work:
class DjangoUnitOfWork(AbstractUnitOfWork):
def __init__(self):
self.init_repositories(repository.DjangoRepository())
def __enter__(self):
transaction.set_autocommit(False) #(1)
return super().__enter__()
def __exit__(self, *args):
super().__exit__(*args)
transaction.set_autocommit(True)
def commit(self):
for batch in self.batches.seen: #(3)
self.batches.update(batch) #(3)
transaction.commit() #(2)
def rollback(self):
transaction.rollback() #(2)
-
set_autocommit(False)
was the best way to tell Django to stop automatically committing each ORM operation immediately, and begin a transaction. -
Then we use the explicit rollback and commits.
-
One difficulty: because, unlike with SQLAlchemy, we’re not instrumenting the domain model instances themselves, the
commit()
command needs to explicitly got through all the objects that have been touched by every repository and manually updated them back to the ORM.
The Django views.py file ends up being almost identical to the old flask_app.py, because our architecture means it’s a very thin wrapper around our service layer (which didn’t change at all btw).
os.environ['DJANGO_SETTINGS_MODULE'] = 'djangoproject.django_project.settings'
django.setup()
@csrf_exempt
def add_batch(request):
data = json.loads(request.body)
eta = data['eta']
if eta is not None:
eta = datetime.fromisoformat(eta).date()
services.add_batch(
data['ref'], data['sku'], data['qty'], eta,
unit_of_work.DjangoUnitOfWork(),
)
return HttpResponse('OK', status=201)
@csrf_exempt
def allocate(request):
data = json.loads(request.body)
try:
batchref = services.allocate(
data['orderid'],
data['sku'],
data['qty'],
unit_of_work.DjangoUnitOfWork(),
)
except (model.OutOfStock, services.InvalidSku) as e:
return JsonResponse({'message': str(e)}, status=400)
return JsonResponse({'batchref': batchref}, status=201)
OK it works but it does feel like more effort than Flask/SQLAlchemy. Why is that, and when might you still choose Django?
-
it’s hard because the ORM doesn’t work in the same way. We don’t have an equivalent of the SQLAlchemy classical mapper, so our ActiveRecord and our domain model can’t be the same object. Instead we have to build a manual translation layer behind the repository instead. That’s more work (although once it’s done the ongoing maintenance burden shouldn’t be too high).
-
it’s also hard because you need to integrate
pytest-django
and think carefully about test databases etc
So why might you still do it?
-
when migrating an existing project that has Django?
-
or because you want the Django Admin? (but we’d have to say that’s likely to be a bad idea, it goes against the grain of wanting to decouple your model and business logic from the ORM…)