Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WIP: Add quick migration, if no fields are modified/added/removed (only #1489

Open
wants to merge 1 commit into
base: hotfix-1.8.x
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import com.gentics.mesh.core.data.branch.HibBranch;
import com.gentics.mesh.core.data.dao.ContentDao;
import com.gentics.mesh.core.data.dao.PersistingBranchDao;
import com.gentics.mesh.core.data.schema.HibAddFieldChange;
import com.gentics.mesh.core.data.schema.HibFieldSchemaVersionElement;
import com.gentics.mesh.core.data.schema.HibFieldTypeChange;
import com.gentics.mesh.core.data.schema.HibMicroschemaVersion;
Expand Down Expand Up @@ -77,8 +78,10 @@ public AbstractMigrationHandler(Database db, BinaryUploadHandlerImpl binaryField
* Container which contains the expected migration changes
* @param touchedFields
* Set of touched fields (will be modified)
* @param addedFields
* Set of added fields (will be modified)
*/
protected void prepareMigration(HibFieldSchemaVersionElement<?, ?, ?, ?, ?> fromVersion, Set<String> touchedFields) {
protected void prepareMigration(HibFieldSchemaVersionElement<?, ?, ?, ?, ?> fromVersion, Set<String> touchedFields, Set<String> addedFields) {
HibSchemaChange<?> change = fromVersion.getNextChange();
while (change != null) {
// if either the type changes or the field is removed, the field is "touched"
Expand All @@ -88,6 +91,8 @@ protected void prepareMigration(HibFieldSchemaVersionElement<?, ?, ?, ?, ?> from
touchedFields.add(((HibFieldTypeChange) change).getFieldName());
} else if (change instanceof HibRemoveFieldChange) {
touchedFields.add(((HibRemoveFieldChange) change).getFieldName());
} else if (change instanceof HibAddFieldChange) {
addedFields.add(((HibAddFieldChange) change).getFieldName());
}

change = change.getNextChange();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,10 @@ public Completable migrateMicronodes(MicronodeMigrationContext context) {
// Collect the migration scripts
NodeMigrationActionContextImpl ac = new NodeMigrationActionContextImpl();
Set<String> touchedFields = new HashSet<>();
Set<String> addedFields = new HashSet<>();
try {
db.tx(() -> {
prepareMigration(reloadVersion(fromVersion), touchedFields);
prepareMigration(reloadVersion(fromVersion), touchedFields, addedFields);
ac.setProject(branch.getProject());
ac.setBranch(branch);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,12 +159,14 @@ public Completable migrateNodes(NodeMigrationActionContext context) {
String branchUuid = db.tx(() -> branch.getUuid());
String fromUuud = db.tx(() -> fromVersion.getUuid());
String toUuid = db.tx(() -> context.getToVersion().getUuid());
boolean autoPurge = db.tx(() -> context.getToVersion().isAutoPurgeEnabled());

// Prepare the migration - Collect the migration scripts
Set<String> touchedFields = new HashSet<>();
Set<String> addedFields = new HashSet<>();
try {
db.tx(() -> {
prepareMigration(reloadVersion(fromVersion), touchedFields);
prepareMigration(reloadVersion(fromVersion), touchedFields, addedFields);
if (status != null) {
status.setStatus(RUNNING);
status.commit();
Expand Down Expand Up @@ -209,14 +211,23 @@ public Completable migrateNodes(NodeMigrationActionContext context) {
return Completable.complete();
}

// determine, whether we can do a "quick" migration (which will not create new
// versions of the content, but just modify existing contents to point to the
// new schema version)
boolean doQuickMigration = touchedFields.isEmpty() && addedFields.isEmpty() && autoPurge;

errorsDetected = migrateLoop(containers, cause, status, (batch, containerList, errors) -> {
try (WriteLock lock = writeLock.lock(context)) {
beforeBatchMigration(containerList, context);
List<Pair<HibNodeFieldContainer, HibNodeFieldContainer>> toPurge = new ArrayList<>();
for (HibNodeFieldContainer container : containerList) {
Pair<HibNodeFieldContainer, HibNodeFieldContainer> toPurgePair = migrateContainer(context, batch, container, errors, touchedFields);
if (toPurgePair != null) {
toPurge.add(toPurgePair);
if (doQuickMigration) {
migrateContainerQuick(context, batch, container, errors);
} else {
Pair<HibNodeFieldContainer, HibNodeFieldContainer> toPurgePair = migrateContainer(context, batch, container, errors, touchedFields);
if (toPurgePair != null) {
toPurge.add(toPurgePair);
}
}
}

Expand Down Expand Up @@ -434,4 +445,53 @@ private void cloneUntouchedFields(HibNodeFieldContainer oldContainer, HibNodeFie
}
}
}

/**
* Do a quick migration by only changing the reference to the schema version (content itself stays untouched)
* @param ac action context
* @param batch event queue batch
* @param container container to migrate
* @param errorsDetected list collecting the detected errors
*/
private void migrateContainerQuick(NodeMigrationActionContext ac, EventQueueBatch batch,
HibNodeFieldContainer container, List<Exception> errorsDetected) {
ContentDao contentDao = Tx.get().contentDao();

String containerUuid = container.getUuid();
HibNode node = contentDao.getNode(container);
String parentNodeUuid = node.getUuid();
if (log.isDebugEnabled()) {
log.debug("Migrating container {" + containerUuid + "} of node {" + parentNodeUuid + "}");
}

HibBranch branch = reloadBranch(ac.getBranch());
HibSchemaVersion toVersion = reloadVersion(ac.getToVersion());
try {
String languageTag = container.getLanguageTag();
ac.getNodeParameters().setLanguages(languageTag);
ac.getVersioningParameters().setVersion("draft");

HibNodeFieldContainer oldPublished = contentDao.getFieldContainer(node, languageTag, branch.getUuid(),
PUBLISHED);

// 1. Check whether there is any other published container which we need to handle separately
if (oldPublished != null && !oldPublished.equals(container)) {
// We only need to migrate the container if the container's schema version is also "old"
boolean hasSameOldSchemaVersion = container != null && container.getSchemaContainerVersion().getId()
.equals(container.getSchemaContainerVersion().getId());
if (hasSameOldSchemaVersion) {
oldPublished.setSchemaContainerVersion(toVersion);
batch.add(contentDao.onUpdated(oldPublished, branch.getUuid(), PUBLISHED));
}
}

// 2. Migrate the draft container. This will also update the draft edge.
container.setSchemaContainerVersion(toVersion);
batch.add(contentDao.onUpdated(container, branch.getUuid(), DRAFT));
} catch (Exception e1) {
log.error("Error while handling container {" + containerUuid + "} of node {" + parentNodeUuid
+ "} during schema migration.", e1);
errorsDetected.add(e1);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1297,6 +1297,50 @@ public void testMicroschemaMigrationMixedList() throws Throwable {

}

/**
* Test changing the field order
*/
@Test
public void testChangeFieldOrder() {
// Create schema with two fields
SchemaCreateRequest request = new SchemaCreateRequest();
request.setName("dummy");
request.addField(FieldUtil.createStringFieldSchema("first_text"));
request.addField(FieldUtil.createStringFieldSchema("second_text"));
request.setAutoPurge(true);
request.validate();
SchemaResponse schemaResponse = call(() -> client().createSchema(request));
call(() -> client().assignSchemaToProject(PROJECT_NAME, schemaResponse.getUuid()));
call(() -> client().assignBranchSchemaVersions(PROJECT_NAME, initialBranchUuid(), schemaResponse.toReference()));

// Create node
NodeCreateRequest nodeCreateRequest = new NodeCreateRequest();
nodeCreateRequest.setLanguage("en");
nodeCreateRequest.setSchemaName("dummy");
nodeCreateRequest.setParentNodeUuid(tx(() -> folder("2015").getUuid()));
nodeCreateRequest.getFields().put("first_text", new StringFieldImpl().setString("value of first_text"));
nodeCreateRequest.getFields().put("second_text", new StringFieldImpl().setString("value of second_text"));
NodeResponse draftResponse = call(() -> client().createNode(PROJECT_NAME, nodeCreateRequest));
assertThat(draftResponse).hasVersion("0.1");

// update schema (switch order of fields)
SchemaUpdateRequest updateRequest = new SchemaUpdateRequest();
updateRequest.setName("dummy");
updateRequest.addField(FieldUtil.createStringFieldSchema("second_text"));
updateRequest.addField(FieldUtil.createStringFieldSchema("first_text"));
updateRequest.validate();

waitForJobs(() -> {
call(() -> client().updateSchema(schemaResponse.getUuid(), updateRequest));
}, COMPLETED, 1);

assertThat(call(() -> client().findNodeByUuid(PROJECT_NAME, draftResponse.getUuid())))
.hasSchemaVersion("dummy", "2.0")
.hasVersion("0.1")
.hasStringField("first_text", "value of first_text")
.hasStringField("second_text", "value of second_text");
}

private HibSchema createDummySchemaWithChanges(String oldFieldName, String newFieldName, boolean setAddRaw) {
PersistingSchemaDao schemaDao = CommonTx.get().schemaDao();

Expand Down