Skip to content

Commit

Permalink
Further separated the way the PublishDatasetCommand and the FinalizeD…
Browse files Browse the repository at this point in the history
…atasetPublicationCommand update the dataset object;

to avoid the condition when the first command is still holding onto the same object as it initiate the execution of the
second command, asynchronously - which can result in concurrency and optimistic lock issues.
Also, removed the redundant/unnecessary merge() calls. (#2438)
  • Loading branch information
landreev committed Apr 30, 2018
1 parent b0e1b7a commit 804643b
Show file tree
Hide file tree
Showing 4 changed files with 24 additions and 21 deletions.
4 changes: 2 additions & 2 deletions src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
Original file line number Diff line number Diff line change
Expand Up @@ -1609,9 +1609,9 @@ private String init(boolean initFull) {
//This is a hack to remove dataset locks for File PID registration if
//the dataset is released
//in testing we had cases where datasets with 1000 files were remaining locked after being published successfully
if(dataset.getLatestVersion().isReleased() && dataset.isLockedFor(DatasetLock.Reason.pidRegister)){
/*if(dataset.getLatestVersion().isReleased() && dataset.isLockedFor(DatasetLock.Reason.pidRegister)){
datasetService.removeDatasetLocks(dataset.getId(), DatasetLock.Reason.pidRegister);
}
}*/
if (dataset.isLockedFor(DatasetLock.Reason.pidRegister)) {
JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.pidRegister.workflow.inprogress"),
BundleUtil.getStringFromBundle("dataset.publish.workflow.inprogress"));
Expand Down
13 changes: 12 additions & 1 deletion src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
Original file line number Diff line number Diff line change
Expand Up @@ -912,8 +912,19 @@ public WorkflowComment addWorkflowComment(WorkflowComment workflowComment) {
}

@Asynchronous
public void callFinalizePublishCommandAsynchronously(Dataset theDataset, CommandContext ctxt, DataverseRequest request) throws CommandException {
public void callFinalizePublishCommandAsynchronously(Long datasetId, CommandContext ctxt, DataverseRequest request) throws CommandException {

// Since we are calling the next command asynchronously anyway - sleep here
// for a few seconds, just in case, to make sure the database update of
// the dataset initiated by the PublishDatasetCommand has finished,
// to avoid any concurrency/optimistic lock issues.
try {
Thread.sleep(15000);
} catch (Exception ex) {
logger.warning("Failed to sleep for 15 seconds.");
}
logger.fine("Running FinalizeDatasetPublicationCommand, asynchronously");
Dataset theDataset = find(datasetId);
String nonNullDefaultIfKeyNotFound = "";
String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound);
commandEngine.submit(new FinalizeDatasetPublicationCommand(theDataset, doiProvider, request));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,11 +174,12 @@ private void publicizeExternalIdentifier(CommandContext ctxt) throws CommandExce
theDataset.setGlobalIdCreateTime(new Date());
theDataset.setIdentifierRegistered(true);
for (DataFile df : theDataset.getFiles()) {
logger.fine("registering global id for file "+df.getId());
idServiceBean.publicizeIdentifier(df);
df.setGlobalIdCreateTime(new Date());
df.setIdentifierRegistered(true);
DataFile merged = ctxt.em().merge(df);
merged = null;
// this merge() on an individual file below is unnecessary:
//DataFile merged = ctxt.em().merge(df);
}
} catch (Throwable e) {
//if publicize fails remove the lock for registration
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType;
import java.util.Date;
import java.util.Optional;
import java.util.logging.Logger;
import static java.util.stream.Collectors.joining;
import javax.ejb.Asynchronous;

/**
* Kick-off a dataset publication process. The process may complete immediately,
Expand All @@ -30,6 +30,7 @@
*/
@RequiredPermissions(Permission.PublishDataset)
public class PublishDatasetCommand extends AbstractPublishDatasetCommand<PublishDatasetResult> {
private static final Logger logger = Logger.getLogger(PublishDatasetCommand.class.getName());

boolean minorRelease;
DataverseRequest request;
Expand Down Expand Up @@ -65,12 +66,11 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException
theDataset.getEditVersion().setVersionNumber(new Long(theDataset.getVersionNumber() + 1));
theDataset.getEditVersion().setMinorVersionNumber(new Long(0));
}

theDataset = ctxt.em().merge(theDataset);

Optional<Workflow> prePubWf = ctxt.workflows().getDefaultWorkflow(TriggerType.PrePublishDataset);
if ( prePubWf.isPresent() ) {
// We start a workflow
theDataset = ctxt.em().merge(theDataset);
ctxt.workflows().start(prePubWf.get(), buildContext(doiProvider, TriggerType.PrePublishDataset) );
return new PublishDatasetResult(theDataset, false);

Expand All @@ -85,24 +85,15 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException
lock.setStartTime(new Date());
theDataset.getLocks().add(lock);
theDataset = ctxt.em().merge(theDataset);
return callFinalizeAsync(ctxt);
ctxt.datasets().callFinalizePublishCommandAsynchronously(theDataset.getId(), ctxt, request);
return new PublishDatasetResult(theDataset, false);
}
// Synchronous publishing (no workflow involved)
theDataset = ctxt.engine().submit(new FinalizeDatasetPublicationCommand(theDataset, doiProvider, getRequest()));
return new PublishDatasetResult(ctxt.em().merge(theDataset), true);
theDataset = ctxt.engine().submit(new FinalizeDatasetPublicationCommand(ctxt.em().merge(theDataset), doiProvider, getRequest()));
return new PublishDatasetResult(theDataset, true);
}
}


private PublishDatasetResult callFinalizeAsync(CommandContext ctxt) throws CommandException {
try {
ctxt.datasets().callFinalizePublishCommandAsynchronously(theDataset, ctxt, request);
return new PublishDatasetResult(theDataset, false);
} catch (CommandException ce){
throw new CommandException("Publish Dataset failed", this);
}
}

/**
* See that publishing the dataset in the requested manner makes sense, at
* the given state of the dataset.
Expand Down

0 comments on commit 804643b

Please sign in to comment.