Skip to content

Commit 45dc099

Browse files
committed
lock in workflow again
1 parent c0e4aeb commit 45dc099

4 files changed

Lines changed: 6 additions & 42 deletions

File tree

src/main/java/edu/harvard/iq/dataverse/api/Datasets.java

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1399,16 +1399,9 @@ public Response publishMigratedDataset(@Context ContainerRequestContext crc, Str
13991399
Optional<Workflow> prePubWf = wfService.getDefaultWorkflow(TriggerType.PrePublishDataset);
14001400
DataverseRequest dataverseRequest = createDataverseRequest(user);
14011401
try {
1402-
// ToDo - should this be in onSuccess()? May relate to todo above
14031402
if (prePubWf.isPresent()) {
1404-
// Create the workflow lock BEFORE starting the workflow
1405-
DatasetLock workflowLock = new DatasetLock(DatasetLock.Reason.Workflow, user);
1406-
workflowLock.setDataset(ds);
1407-
datasetSvc.addDatasetLock(ds, workflowLock);
1408-
1409-
// Build context with the lock attached
1403+
// Build context
14101404
WorkflowContext context = new WorkflowContext(dataverseRequest, ds, TriggerType.PrePublishDataset, !contactPIDProvider);
1411-
context.setLockId(ds.getLockFor(DatasetLock.Reason.Workflow).getId());
14121405
// Start the workflow, the workflow will call FinalizeDatasetPublication later
14131406
wfService.start(prePubWf.get(),
14141407
new WorkflowContext(dataverseRequest, ds, TriggerType.PrePublishDataset, !contactPIDProvider),

src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -22,16 +22,12 @@
2222
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
2323
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
2424
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
25-
import edu.harvard.iq.dataverse.export.ExportService;
2625
import edu.harvard.iq.dataverse.pidproviders.PidProvider;
27-
import edu.harvard.iq.dataverse.pidproviders.PidUtil;
2826
import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
29-
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
3027
import edu.harvard.iq.dataverse.util.BundleUtil;
3128
import edu.harvard.iq.dataverse.workflow.WorkflowContext;
3229
import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType;
3330

34-
import java.awt.datatransfer.StringSelection;
3531
import java.io.IOException;
3632
import java.sql.Timestamp;
3733
import java.util.*;
@@ -245,13 +241,6 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
245241
//Remove any pre-pub workflow lock (not needed as WorkflowServiceBean.workflowComplete() should already have removed it after setting the finalizePublication lock?)
246242
ctxt.datasets().removeDatasetLocks(ds, DatasetLock.Reason.Workflow);
247243

248-
ctxt.workflows().getDefaultWorkflow(TriggerType.PostPublishDataset).ifPresent(wf -> {
249-
// Create the workflow lock BEFORE starting the workflow
250-
DatasetLock workflowLock = new DatasetLock(DatasetLock.Reason.Workflow, (AuthenticatedUser) getRequest().getUser());
251-
workflowLock.setDataset(ds);
252-
ctxt.datasets().addDatasetLock(ds, workflowLock);
253-
});
254-
255244
Dataset readyDataset = ctxt.em().merge(ds);
256245

257246
setDataset(readyDataset);
@@ -291,7 +280,6 @@ public boolean onSuccess(CommandContext ctxt, Object r) {
291280
ctxt.workflows().getDefaultWorkflow(TriggerType.PostPublishDataset).ifPresent(wf -> {
292281
// Build context with the lock attached
293282
WorkflowContext context = buildContext(ds, TriggerType.PostPublishDataset, datasetExternallyReleased);
294-
context.setLockId(ds.getLockFor(DatasetLock.Reason.Workflow).getId());
295283
try {
296284
ctxt.workflows().start(wf, context, false);
297285
} catch (CommandException e) {

src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -111,10 +111,6 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException
111111
if (prePubWf.isPresent()) {
112112
// We start a workflow
113113
try {
114-
// Create the workflow lock BEFORE starting the workflow
115-
DatasetLock workflowLock = new DatasetLock(DatasetLock.Reason.Workflow, (AuthenticatedUser) getRequest().getUser());
116-
workflowLock.setDataset(theDataset);
117-
ctxt.datasets().addDatasetLock(theDataset, workflowLock);
118114
theDataset = ctxt.em().merge(theDataset);
119115
ctxt.em().flush();
120116

@@ -267,7 +263,6 @@ public boolean onSuccess(CommandContext ctxt, Object r) {
267263
// A pre-publication workflow will call FinalizeDatasetPublicationCommand itself when it completes
268264
if (prePubWf.isPresent()) {
269265
WorkflowContext context = buildContext(ds, TriggerType.PrePublishDataset, datasetExternallyReleased);
270-
context.setLockId(ds.getLockFor(DatasetLock.Reason.Workflow).getId());
271266
try {
272267
ctxt.workflows().start(prePubWf.get(), context, true);
273268
} catch (CommandException e) {

src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java

Lines changed: 5 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
import edu.harvard.iq.dataverse.RoleAssigneeServiceBean;
1010
import edu.harvard.iq.dataverse.UserNotification;
1111
import edu.harvard.iq.dataverse.UserNotificationServiceBean;
12-
import edu.harvard.iq.dataverse.DatasetLock.Reason;
1312
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
1413
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
1514
import edu.harvard.iq.dataverse.engine.command.CommandContext;
@@ -79,9 +78,6 @@ public class WorkflowServiceBean {
7978
@EJB
8079
EjbDataverseEngine engine;
8180

82-
@EJB
83-
WorkflowServiceBean self;
84-
8581
@Inject
8682
DataverseRequestServiceBean dvRequestService;
8783

@@ -137,9 +133,7 @@ public void start(Workflow wf, WorkflowContext ctxt, boolean findDataset) throws
137133
* (e.g. if this method is not asynchronous)
138134
*
139135
*/
140-
logger.info("Ctxt lock id is " + ctxt.getLockId());
141-
boolean isLocked = ctxt.getLockId()!=null;
142-
if (!findDataset && !isLocked) {
136+
if (!findDataset) {
143137
/*
144138
* Sleep here briefly to make sure the database update from the callers
145139
* transaction completes which avoids any concurrency/optimistic lock issues.
@@ -157,9 +151,7 @@ public void start(Workflow wf, WorkflowContext ctxt, boolean findDataset) throws
157151
}
158152
//Refresh will only em.find the dataset if findDataset is true. (otherwise the dataset is em.merged)
159153
ctxt = refresh(ctxt, retrieveRequestedSettings( wf.getRequiredSettings()), getCurrentApiToken(ctxt.getRequest().getAuthenticatedUser()), findDataset);
160-
if(!isLocked) {
161-
lockDataset(ctxt, new DatasetLock(DatasetLock.Reason.Workflow, ctxt.getRequest().getAuthenticatedUser()));
162-
}
154+
lockDataset(ctxt, new DatasetLock(DatasetLock.Reason.Workflow, ctxt.getRequest().getAuthenticatedUser()));
163155
forward(wf, ctxt);
164156
}
165157

@@ -274,7 +266,7 @@ private void rollback(Workflow wf, WorkflowContext ctxt, Failure failure, int la
274266

275267
logger.log( Level.INFO, "Removing workflow lock");
276268
try {
277-
self.unlockDataset(ctxt);
269+
unlockDataset(ctxt);
278270
} catch (CommandException ex) {
279271
logger.log(Level.SEVERE, "Error restoring dataset locks state after rollback: " + ex.getMessage(), ex);
280272
}
@@ -355,12 +347,11 @@ void lockDataset(WorkflowContext ctxt, DatasetLock datasetLock) throws CommandEx
355347
em.persist(datasetLock);
356348
//flush creates the id
357349
em.flush();
358-
logger.info("Adding new lock id " + datasetLock.getId());
359350
ctxt.setLockId(datasetLock.getId());
360351
}
361352

362353
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
363-
public void unlockDataset(WorkflowContext ctxt) throws CommandException {
354+
void unlockDataset(WorkflowContext ctxt) throws CommandException {
364355
/*
365356
* Since the lockDataset command above directly persists a lock to the database,
366357
* the ctxt.getDataset() is not updated and its list of locks can't be used.
@@ -372,15 +363,12 @@ public void unlockDataset(WorkflowContext ctxt) throws CommandException {
372363
lockCounter.setParameter("datasetId", ctxt.getDataset().getId());
373364
List<DatasetLock> locks = lockCounter.getResultList();
374365
for (DatasetLock lock : locks) {
375-
logger.info("Found lock id " + lock.getId());
376366
if (lock.getReason() == DatasetLock.Reason.Workflow) {
377367
ctxt.getDataset().removeLock(lock);
378-
logger.info("Removing lock id " + lock.getId());
379368
em.remove(lock);
380369
}
381370
}
382371
em.flush();
383-
logger.info("dataset locked " + (null != ctxt.getDataset().getLockFor(Reason.Workflow)));
384372
}
385373

386374
//
@@ -416,7 +404,7 @@ private void workflowCompleted(Workflow wf, WorkflowContext ctxt) {
416404
lockDataset(ctxt, lock);
417405
ctxt.getDataset().addLock(lock);
418406

419-
self.unlockDataset(ctxt);
407+
unlockDataset(ctxt);
420408
ctxt.setLockId(null); //the workflow lock
421409
//Refreshing merges the dataset
422410
ctxt = refresh(ctxt);

0 commit comments

Comments
 (0)