Skip to content

Commit

Permalink
Attempting refactor of priority requirement. Peer review feedback
Browse files Browse the repository at this point in the history
  • Loading branch information
chrispenny committed Apr 4, 2023
1 parent 92f4617 commit 9dd92ab
Show file tree
Hide file tree
Showing 14 changed files with 280 additions and 192 deletions.
40 changes: 40 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,46 @@ It generates the cache files using the [QueuedJobs module](https://github.com/sy
* "symbiote/silverstripe-queuedjobs": "^5",
* "silverstripe/versioned": "^2"

## Migrating from v4

### Changes to `PublishableSiteTree::urlsToCache()`

**Please be aware that there is one significant change between 4 and 5 that will not be immediately obvious.**

`PublishableSiteTree::urlsToCache()` used to require you to return an array with URLs as the key, and a priority (int)
as the value.

`PublishableSiteTree::urlsToCache()` now requires you to simply return an array of URLs (URLs as values). There is no
more support for "priority".

### Changes to `PublishableSiteTree::objectsToUpdate()` & `PublishableSiteTree::objectsToDelete()`

It might be worth us noting that no changes have been made to `objectsToUpdate()` or `objectsToDelete()`. These still
function the same.

### `SiteTreePublishingEngine` changes

The following methods have been removed:

* `getToUpdate()`
* `setToUpdate()`
* `getToDelete()`
* `setToDelete()`

These methods used to deal with `DataObjects`. We are now collecting URLs instead of `DataObjects`, and these URLs are
being stored and manipulated by the following methods.

* `getUrlsToUpdate()`
* `addUrlsToUpdate()`
* `setUrlsToUpdate()`
* `getUrlsToDelete()`
* `addUrlsToDelete()`
* `setUrlsToDelete()`

Why did this change happen? TL;DR: We realised that purging URLs for pages that change their URL (EG: their parent page
changes, or their `URLSegment` changes) was not functioning as expected. This was really the only way to enable this
functionality to work correctly while maintaining cache integrety.

## Unit-testing with StaticPublisherState to disable queuedjobs for unit-tests

You can use `StaticPublisherState` to disable queuejobs job queueing and logging in unit-testing to improve performance.
Expand Down
2 changes: 1 addition & 1 deletion src/Contract/StaticallyPublishable.php
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ interface StaticallyPublishable
*
* Note: include the URL of the object itself!
*
* @return array associative array of URL (string) => Priority (int)
* @return array associative array of URL (string)
*/
public function urlsToCache(): array;
}
130 changes: 68 additions & 62 deletions src/Extension/Engine/SiteTreePublishingEngine.php
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,12 @@ class SiteTreePublishingEngine extends SiteTreeExtension implements Resettable
/**
* Queues the urls to be flushed into the queue.
*/
private array|SS_List $toUpdate = [];
private array $urlsToUpdate = [];

/**
* Queues the urls to be deleted as part of a next flush operation.
*/
private array|SS_List $toDelete = [];
private array $urlsToDelete = [];

public static function reset(): void
{
Expand All @@ -57,50 +57,41 @@ public static function reset(): void
* Force inject queue service
* Used for unit tests only to cover edge cases where Injector doesn't cover
*
*
* @param QueuedJobService $service
*/
public static function setQueueService(QueuedJobService $service): void
{
static::$queueService = $service;
}

/**
* @return array
*/
public function getToUpdate()
protected function getUrlsToUpdate(): array
{
return $this->toUpdate;
return $this->urlsToUpdate;
}

/**
* @return array
*/
public function getToDelete()
protected function getUrlsToDelete(): array
{
return $this->toDelete;
return $this->urlsToDelete;
}

/**
* @param array $toUpdate
* @return $this
*/
public function setToUpdate($toUpdate)
protected function addUrlsToUpdate(array $urlsToUpdate): void
{
$this->toUpdate = $toUpdate;
$this->urlsToUpdate = array_merge($this->getUrlsToUpdate(), $urlsToUpdate);
}

return $this;
protected function addUrlsToDelete(array $urlsToDelete): void
{
$this->urlsToDelete = array_merge($this->getUrlsToDelete(), $urlsToDelete);
}

/**
* @param array $toDelete
* @return $this
*/
public function setToDelete($toDelete)
protected function setUrlsToUpdate(array $urlsToUpdate): void
{
$this->toDelete = $toDelete;
$this->urlsToUpdate = $urlsToUpdate;
}

return $this;
protected function setUrlsToDelete(array $urlsToDelete): void
{
$this->urlsToDelete = $urlsToDelete;
}

/**
Expand All @@ -113,20 +104,20 @@ public function onBeforePublishRecursive($original)
return;
}

$owner = $this->getOwner();

// We want to find out if the URL for this page has changed at all. That can happen 2 ways: Either the page is
// moved in the SiteTree (ParentID changes), or the URLSegment is updated
// Apparently ParentID can sometimes be string, so make sure we cast to (int) for our comparison
if ((int) $original->ParentID !== (int) $this->getOwner()->ParentID
|| $original->URLSegment !== $this->getOwner()->URLSegment
if ((int) $original->ParentID !== (int) $owner->ParentID
|| $original->URLSegment !== $owner->URLSegment
) {
// We have detected a change to the URL. We need to purge the old URLs for this page and any children
$context = [
'action' => self::ACTION_UNPUBLISH,
];
// We'll collect these changes now, but they won't be actioned until onAfterPublishRecursive()
$original->collectChanges($context);
// We need to flushChanges() immediately so that Jobs are queued for the Pages while they still have their
// old URLs
$original->flushChanges();
}
}

Expand All @@ -138,18 +129,22 @@ public function onAfterPublishRecursive($original)
$parentId = $original->ParentID ?? null;
$urlSegment = $original->URLSegment ?? null;

$owner = $this->getOwner();

// Apparently ParentID can sometimes be string, so make sure we cast to (int) for our comparison
$parentChanged = $parentId && (int) $parentId !== (int) $this->getOwner()->ParentID;
$urlChanged = $urlSegment && $original->URLSegment !== $this->getOwner()->URLSegment;
$parentChanged = $parentId && (int) $parentId !== (int) $owner->ParentID;
$urlSegmentChanged = $urlSegment && $original->URLSegment !== $owner->URLSegment;

$context = [
'action' => self::ACTION_PUBLISH,
// If a URL change has been detected, then we need to force the recursive regeneration of all child
// pages
'urlChanged' => $parentChanged || $urlChanged,
'urlSegmentChanged' => $parentChanged || $urlSegmentChanged,
];

// Collect any additional changes (noting that some could already have been added in onBeforePublishRecursive())
$this->collectChanges($context);
// Flush any/all changes that we have detected
$this->flushChanges();
}

Expand All @@ -158,7 +153,13 @@ public function onBeforeUnpublish()
$context = [
'action' => self::ACTION_UNPUBLISH,
];
// We'll collect these changes now, but they won't be actioned until onAfterUnpublish()
$this->collectChanges($context);
}

public function onAfterUnpublish()
{
// Flush any/all changes that we have detected
$this->flushChanges();
}

Expand All @@ -174,24 +175,38 @@ public function collectChanges(array $context): void
// Collection of changes needs to happen within the context of our Published/LIVE state
Versioned::set_stage(Versioned::LIVE);

$owner = $this->getOwner();

// Re-fetch our page, now within a LIVE context
$page = DataObject::get($this->getOwner()->ClassName)->byID($this->getOwner()->ID);
$siteTree = DataObject::get($owner->ClassName)->byID($owner->ID);

// This page isn't LIVE/Published, so there is nothing for us to do here
if (!$page?->exists()) {
if (!$siteTree?->exists()) {
return;
}

// The page does not include the required extension, and it doesn't implement a Trigger
if (!$page->hasExtension(PublishableSiteTree::class) && !$page instanceof StaticPublishingTrigger) {
if (!$siteTree->hasExtension(PublishableSiteTree::class) && !$siteTree instanceof StaticPublishingTrigger) {
return;
}

$toUpdate = $page->objectsToUpdate($context);
$this->setToUpdate($toUpdate);
// Fetch our objects to be actioned
$objectsToUpdate = $siteTree->objectsToUpdate($context);
$objectsToDelete = $siteTree->objectsToDelete($context);
// Start collecting URLs from the above objects
$urlsToUpdate = [];
$urlsToDelete = [];

foreach ($objectsToUpdate as $objectToUpdate) {
$urlsToUpdate = array_merge($urlsToUpdate, $objectToUpdate->urlsToCache());
}

$toDelete = $page->objectsToDelete($context);
$this->setToDelete($toDelete);
foreach ($objectsToDelete as $objectToDelete) {
$urlsToDelete = array_merge($urlsToDelete, $objectToDelete->urlsToCache());
}

$this->addUrlsToUpdate($urlsToUpdate);
$this->addUrlsToDelete($urlsToDelete);
});
}

Expand All @@ -201,45 +216,36 @@ public function collectChanges(array $context): void
public function flushChanges()
{
$queueService = static::$queueService ?? QueuedJobService::singleton();
$owner = $this->getOwner();
$urlsToUpdate = $this->getUrlsToUpdate();
$urlsToDelete = $this->getUrlsToDelete();

if ($this->toUpdate) {
if ($urlsToUpdate) {
/** @var UrlBundleInterface $urlService */
$urlService = Injector::inst()->create(UrlBundleInterface::class);
$urlService->addUrls($urlsToUpdate);

foreach ($this->toUpdate as $item) {
$urls = $item->urlsToCache();
ksort($urls);
$urls = array_keys($urls);
$urlService->addUrls($urls);
}

$jobs = $urlService->getJobsForUrls(GenerateStaticCacheJob::class, 'Building URLs', $this->owner);
$jobs = $urlService->getJobsForUrls(GenerateStaticCacheJob::class, 'Building URLs', $owner);

foreach ($jobs as $job) {
$queueService->queueJob($job);
}

$this->toUpdate = [];
$this->setUrlsToUpdate([]);
}

if ($this->toDelete) {
if ($urlsToDelete) {
/** @var UrlBundleInterface $urlService */
$urlService = Injector::inst()->create(UrlBundleInterface::class);
$urlService->addUrls($urlsToDelete);

foreach ($this->toDelete as $item) {
$urls = $item->urlsToCache();
ksort($urls);
$urls = array_keys($urls);
$urlService->addUrls($urls);
}

$jobs = $urlService->getJobsForUrls(DeleteStaticCacheJob::class, 'Purging URLs', $this->owner);
$jobs = $urlService->getJobsForUrls(DeleteStaticCacheJob::class, 'Purging URLs', $owner);

foreach ($jobs as $job) {
$queueService->queueJob($job);
}

$this->toDelete = [];
$this->setUrlsToDelete([]);
}
}
}
Loading

0 comments on commit 9dd92ab

Please sign in to comment.