diff --git a/conf/docker-aio/configure_doi.bash b/conf/docker-aio/configure_doi.bash old mode 100755 new mode 100644 diff --git a/doc/sphinx-guides/source/developers/deployment.rst b/doc/sphinx-guides/source/developers/deployment.rst old mode 100755 new mode 100644 diff --git a/scripts/installer/ec2-destroy-all.sh b/scripts/installer/ec2-destroy-all.sh old mode 100755 new mode 100644 diff --git a/scripts/installer/ec2-list-all.sh b/scripts/installer/ec2-list-all.sh old mode 100755 new mode 100644 diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 714fb54313e..e74e054cb4e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -731,9 +731,7 @@ public void callFinalizePublishCommandAsynchronously(Long datasetId, CommandCont } logger.fine("Running FinalizeDatasetPublicationCommand, asynchronously"); Dataset theDataset = find(datasetId); - String nonNullDefaultIfKeyNotFound = ""; - String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound); - commandEngine.submit(new FinalizeDatasetPublicationCommand(theDataset, doiProvider, request, isPidPrePublished)); + commandEngine.submit(new FinalizeDatasetPublicationCommand(theDataset, request, isPidPrePublished)); } /* diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractPublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractPublishDatasetCommand.java index 0e9712777a9..2a753c17c23 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractPublishDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractPublishDatasetCommand.java @@ -17,8 +17,8 @@ public AbstractPublishDatasetCommand(Dataset datasetIn, DataverseRequest aReques super(aRequest, datasetIn); } - protected WorkflowContext buildContext( String doiProvider, WorkflowContext.TriggerType triggerType) { - return new WorkflowContext(getRequest(), getDataset(), doiProvider, triggerType); + protected WorkflowContext buildContext( Dataset theDataset, WorkflowContext.TriggerType triggerType) { + return new WorkflowContext(getRequest(), theDataset, triggerType); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index fd998b47e70..35651a8f832 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -41,19 +41,16 @@ public class FinalizeDatasetPublicationCommand extends AbstractPublishDatasetCom private static final Logger logger = Logger.getLogger(FinalizeDatasetPublicationCommand.class.getName()); - String doiProvider; - /** * mirror field from {@link PublishDatasetCommand} of same name */ final boolean datasetExternallyReleased; - public FinalizeDatasetPublicationCommand(Dataset aDataset, String aDoiProvider, DataverseRequest aRequest) { - this( aDataset, aDoiProvider, aRequest, false ); + public FinalizeDatasetPublicationCommand(Dataset aDataset, DataverseRequest aRequest) { + this( aDataset, aRequest, false ); } - public FinalizeDatasetPublicationCommand(Dataset aDataset, String aDoiProvider, DataverseRequest aRequest, boolean isPidPrePublished) { + public FinalizeDatasetPublicationCommand(Dataset aDataset, DataverseRequest aRequest, boolean isPidPrePublished) { super(aDataset, aRequest); - doiProvider = aDoiProvider; datasetExternallyReleased = isPidPrePublished; } @@ -87,7 +84,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // comes from there. There's a chance that the final merge, at the end of this // command, would be sufficient. -- L.A. Sep. 6 2017 theDataset = ctxt.em().merge(theDataset); - + setDataset(theDataset); updateDatasetUser(ctxt); //if the publisher hasn't contributed to this version @@ -135,9 +132,11 @@ public Dataset execute(CommandContext ctxt) throws CommandException { new RemoveLockCommand(getRequest(), theDataset, DatasetLock.Reason.InReview) ); } + final Dataset ds = ctxt.em().merge(theDataset); + ctxt.workflows().getDefaultWorkflow(TriggerType.PostPublishDataset).ifPresent(wf -> { try { - ctxt.workflows().start(wf, buildContext(doiProvider, TriggerType.PostPublishDataset)); + ctxt.workflows().start(wf, buildContext(ds, TriggerType.PostPublishDataset)); } catch (CommandException ex) { logger.log(Level.SEVERE, "Error invoking post-publish workflow: " + ex.getMessage(), ex); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java index 4433b23cefb..6e6d03f99ca 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java @@ -81,11 +81,10 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException } Optional prePubWf = ctxt.workflows().getDefaultWorkflow(TriggerType.PrePublishDataset); - String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, ""); if ( prePubWf.isPresent() ) { // We start a workflow theDataset = ctxt.em().merge(theDataset); - ctxt.workflows().start(prePubWf.get(), buildContext(doiProvider, TriggerType.PrePublishDataset) ); + ctxt.workflows().start(prePubWf.get(), buildContext(theDataset, TriggerType.PrePublishDataset) ); return new PublishDatasetResult(theDataset, false); } else{ @@ -125,7 +124,7 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException } else { // Synchronous publishing (no workflow involved) - theDataset = ctxt.engine().submit(new FinalizeDatasetPublicationCommand(ctxt.em().merge(theDataset), doiProvider, getRequest(),datasetExternallyReleased)); + theDataset = ctxt.engine().submit(new FinalizeDatasetPublicationCommand(theDataset, getRequest(),datasetExternallyReleased)); return new PublishDatasetResult(theDataset, true); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index ceccc574000..a79072d18ae 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -41,7 +41,6 @@ import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData; -import java.math.BigDecimal; import java.util.ArrayList; import java.util.Set; import javax.json.Json; @@ -231,7 +230,8 @@ public static JsonObjectBuilder json(Workflow wf){ for ( WorkflowStepData stp : wf.getSteps() ) { arr.add( jsonObjectBuilder().add("stepType", stp.getStepType()) .add("provider", stp.getProviderId()) - .add("parameters", mapToObject(stp.getStepParameters())) ); + .add("parameters", mapToObject(stp.getStepParameters())) + .add("requiredSettings", mapToObject(stp.getStepParameters())) ); } bld.add("steps", arr ); } diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java b/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java index b2f4171a190..121eaa2f1de 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java @@ -47,7 +47,6 @@ public class PendingWorkflowInvocation implements Serializable { int pendingStepIdx; - String doiProvider; String userId; String ipAddress; int typeOrdinal; @@ -66,15 +65,13 @@ public PendingWorkflowInvocation(Workflow wf, WorkflowContext ctxt, Pending resu userId = ctxt.getRequest().getUser().getIdentifier(); ipAddress = ctxt.getRequest().getSourceAddress().toString(); localData = new HashMap<>(result.getData()); - doiProvider = ctxt.getDoiProvider(); typeOrdinal = ctxt.getType().ordinal(); } public WorkflowContext reCreateContext(RoleAssigneeServiceBean roleAssignees) { DataverseRequest aRequest = new DataverseRequest((User)roleAssignees.getRoleAssignee(userId), IpAddress.valueOf(ipAddress)); final WorkflowContext workflowContext = new WorkflowContext(aRequest, dataset, nextVersionNumber, - nextMinorVersionNumber, WorkflowContext.TriggerType.values()[typeOrdinal], - doiProvider); + nextMinorVersionNumber, WorkflowContext.TriggerType.values()[typeOrdinal], null, null); workflowContext.setInvocationId(invocationId); return workflowContext; } @@ -151,14 +148,6 @@ public void setPendingStepIdx(int pendingStepIdx) { this.pendingStepIdx = pendingStepIdx; } - public String getDoiProvider() { - return doiProvider; - } - - public void setDoiProvider(String doiProvider) { - this.doiProvider = doiProvider; - } - public int getTypeOrdinal() { return typeOrdinal; } diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java b/src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java index c72d5a67724..6c73ed0e64b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java @@ -2,7 +2,9 @@ import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData; import java.io.Serializable; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Objects; import javax.persistence.CascadeType; import javax.persistence.Entity; @@ -63,6 +65,14 @@ public void setSteps(List steps) { } } + Map getRequiredSettings() { + Map settings = new HashMap(); + for(WorkflowStepData step: steps) { + settings.putAll(step.getStepSettings()); + } + return settings; + } + @Override public int hashCode() { int hash = 7; diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowContext.java b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowContext.java index 0cca2bd64a9..3edee57b17b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowContext.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowContext.java @@ -1,8 +1,11 @@ package edu.harvard.iq.dataverse.workflow; import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.workflow.step.WorkflowStep; + +import java.util.Map; import java.util.UUID; /** @@ -25,26 +28,27 @@ public enum TriggerType { private final long nextVersionNumber; private final long nextMinorVersionNumber; private final TriggerType type; - private final String doiProvider; + private final ApiToken apiToken; + private Map settings; private String invocationId = UUID.randomUUID().toString(); - public WorkflowContext( DataverseRequest aRequest, Dataset aDataset, String doiProvider, TriggerType aTriggerType ) { + public WorkflowContext( DataverseRequest aRequest, Dataset aDataset, TriggerType aTriggerType ) { this( aRequest, aDataset, aDataset.getLatestVersion().getVersionNumber(), aDataset.getLatestVersion().getMinorVersionNumber(), - aTriggerType, - doiProvider); + aTriggerType, null, null); } public WorkflowContext(DataverseRequest request, Dataset dataset, long nextVersionNumber, - long nextMinorVersionNumber, TriggerType type, String doiProvider) { + long nextMinorVersionNumber, TriggerType type, Map settings, ApiToken apiToken) { this.request = request; this.dataset = dataset; this.nextVersionNumber = nextVersionNumber; this.nextMinorVersionNumber = nextMinorVersionNumber; this.type = type; - this.doiProvider = doiProvider; + this.settings = settings; + this.apiToken = apiToken; } public Dataset getDataset() { @@ -75,12 +79,16 @@ public String getInvocationId() { return invocationId; } - public String getDoiProvider() { - return doiProvider; - } - public TriggerType getType() { return type; } + + public Map getSettings() { + return settings; + } + + public ApiToken getApiToken() { + return apiToken; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java index 4fc7eb63bd1..e215d2b54b3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java @@ -2,8 +2,12 @@ import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.DatasetServiceBean; +import edu.harvard.iq.dataverse.DataverseRequestServiceBean; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.RoleAssigneeServiceBean; +import edu.harvard.iq.dataverse.authorization.users.ApiToken; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.impl.FinalizeDatasetPublicationCommand; import edu.harvard.iq.dataverse.engine.command.impl.RemoveLockCommand; @@ -15,6 +19,8 @@ import edu.harvard.iq.dataverse.workflow.step.WorkflowStep; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult; + +import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -26,9 +32,11 @@ import javax.ejb.Stateless; import javax.ejb.TransactionAttribute; import javax.ejb.TransactionAttributeType; +import javax.inject.Inject; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.Query; +import javax.persistence.TypedQuery; /** * Service bean for managing and executing {@link Workflow}s @@ -56,6 +64,9 @@ public class WorkflowServiceBean { @EJB EjbDataverseEngine engine; + @Inject + DataverseRequestServiceBean dvRequestService; + final Map providers = new HashMap<>(); public WorkflowServiceBean() { @@ -86,13 +97,46 @@ public WorkflowServiceBean() { */ @Asynchronous public void start(Workflow wf, WorkflowContext ctxt) throws CommandException { - ctxt = refresh(ctxt); + ctxt = refresh(ctxt, retrieveRequestedSettings( wf.getRequiredSettings()), getCurrentApiToken(ctxt.getRequest().getAuthenticatedUser())); lockDataset(ctxt); forward(wf, ctxt); } + private ApiToken getCurrentApiToken(AuthenticatedUser au) { + if (au != null) { + CommandContext ctxt = engine.getContext(); + ApiToken token = ctxt.authentication().findApiTokenByUser(au); + if ((token == null) || (token.getExpireTime().before(new Date()))) { + token = ctxt.authentication().generateApiTokenForUser(au); + } + return token; + } + return null; + } + private Map retrieveRequestedSettings(Map requiredSettings) { + Map retrievedSettings = new HashMap(); + for (String setting : requiredSettings.keySet()) { + String settingType = requiredSettings.get(setting); + switch (settingType) { + case "string": { + retrievedSettings.put(setting, settings.get(setting)); + break; + } + case "boolean": { + retrievedSettings.put(setting, settings.isTrue(settingType, false)); + break; + } + case "long": { + retrievedSettings.put(setting, + settings.getValueForKeyAsLong(SettingsServiceBean.Key.valueOf(setting))); + break; + } + } + } + return retrievedSettings; + } /** * Starting the resume process for a pending workflow. We first delete the @@ -122,8 +166,8 @@ private void doResume(PendingWorkflowInvocation pending, String body) { List stepsLeft = wf.getSteps().subList(pending.getPendingStepIdx(), wf.getSteps().size()); WorkflowStep pendingStep = createStep(stepsLeft.get(0)); - final WorkflowContext ctxt = pending.reCreateContext(roleAssignees); - + WorkflowContext newCtxt = pending.reCreateContext(roleAssignees); + final WorkflowContext ctxt = refresh(newCtxt,retrieveRequestedSettings( wf.getRequiredSettings()), getCurrentApiToken(newCtxt.getRequest().getAuthenticatedUser())); WorkflowStepResult res = pendingStep.resume(ctxt, pending.getLocalData(), body); if (res instanceof Failure) { rollback(wf, ctxt, (Failure) res, pending.getPendingStepIdx() - 1); @@ -186,7 +230,8 @@ private void executeSteps(Workflow wf, WorkflowContext ctxt, int initialStepIdx try { if (res == WorkflowStepResult.OK) { logger.log(Level.INFO, "Workflow {0} step {1}: OK", new Object[]{ctxt.getInvocationId(), stepIdx}); - + em.merge(ctxt.getDataset()); + ctxt = refresh(ctxt); } else if (res instanceof Failure) { logger.log(Level.WARNING, "Workflow {0} failed: {1}", new Object[]{ctxt.getInvocationId(), ((Failure) res).getReason()}); rollback(wf, ctxt, (Failure) res, stepIdx-1 ); @@ -231,12 +276,36 @@ void rollbackStep( WorkflowStep step, WorkflowContext ctxt, Failure reason ) { @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) void lockDataset( WorkflowContext ctxt ) throws CommandException { final DatasetLock datasetLock = new DatasetLock(DatasetLock.Reason.Workflow, ctxt.getRequest().getAuthenticatedUser()); -// engine.submit(new AddLockCommand(ctxt.getRequest(), ctxt.getDataset(), datasetLock)); + /* Note that this method directly adds a lock to the database rather than adding it via + * engine.submit(new AddLockCommand(ctxt.getRequest(), ctxt.getDataset(), datasetLock)); + * which would update the dataset's list of locks, etc. + * An em.find() for the dataset would get a Dataset that has an updated list of locks, but this copy would not have any changes + * made in a calling command (e.g. for a PostPublication workflow, the fact that the latest version is 'released' is not yet in the + * database. + */ datasetLock.setDataset(ctxt.getDataset()); em.persist(datasetLock); em.flush(); } + @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) + void unlockDataset( WorkflowContext ctxt ) { + /* Since the lockDataset command above directly persists a lock to the database, + * the ctxt.getDataset() is not updated and its list of locks can't be used. Using the named query below will find the workflow + * lock and remove it (actually all workflow locks for this Dataset but only one workflow should be active). + */ + TypedQuery lockCounter = em.createNamedQuery("DatasetLock.getLocksByDatasetId", DatasetLock.class); + lockCounter.setParameter("datasetId", ctxt.getDataset().getId()); + List locks = lockCounter.getResultList(); + for(DatasetLock lock: locks) { + if(lock.getReason() == DatasetLock.Reason.Workflow) { + logger.info("Removing lock"); + em.remove(lock); + } + } + em.flush(); + } + // // ////////////////////////////////////////////////////////////// @@ -251,13 +320,15 @@ private void workflowCompleted(Workflow wf, WorkflowContext ctxt) { logger.log(Level.INFO, "Workflow {0} completed.", ctxt.getInvocationId()); if ( ctxt.getType() == TriggerType.PrePublishDataset ) { try { - engine.submit( new FinalizeDatasetPublicationCommand(ctxt.getDataset(), ctxt.getDoiProvider(), ctxt.getRequest()) ); + engine.submit( new FinalizeDatasetPublicationCommand(ctxt.getDataset(), ctxt.getRequest()) ); } catch (CommandException ex) { logger.log(Level.SEVERE, "Exception finalizing workflow " + ctxt.getInvocationId() +": " + ex.getMessage(), ex); rollback(wf, ctxt, new Failure("Exception while finalizing the publication: " + ex.getMessage()), wf.steps.size()-1); } } + unlockDataset(ctxt); + } public List listWorkflows() { @@ -350,9 +421,20 @@ private WorkflowStep createStep(WorkflowStepData wsd) { } private WorkflowContext refresh( WorkflowContext ctxt ) { + return refresh(ctxt, ctxt.getSettings(), ctxt.getApiToken()); + } + + private WorkflowContext refresh( WorkflowContext ctxt, Map settings, ApiToken apiToken ) { + /* An earlier version of this class used em.find() to 'refresh' the Dataset in the context. + * For a PostPublication workflow, this had the consequence of hiding/removing changes to the Dataset + * made in the FinalizeDatasetPublicationCommand (i.e. the fact that the draft version is now released and + * has a version number). It is not clear to me if the em.merge below is needed or if it handles the case of + * resumed workflows. (The overall method is needed to allow the context to be updated in the start() method with the + * settings and APItoken retrieved by the WorkflowServiceBean) - JM - 9/18. + */ return new WorkflowContext( ctxt.getRequest(), - datasets.find( ctxt.getDataset().getId() ), ctxt.getNextVersionNumber(), - ctxt.getNextMinorVersionNumber(), ctxt.getType(), ctxt.getDoiProvider() ); + em.merge(ctxt.getDataset()), ctxt.getNextVersionNumber(), + ctxt.getNextMinorVersionNumber(), ctxt.getType(), settings, apiToken); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/step/WorkflowStepData.java b/src/main/java/edu/harvard/iq/dataverse/workflow/step/WorkflowStepData.java index 1e10d0ecb7b..a06531a2666 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/step/WorkflowStepData.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/step/WorkflowStepData.java @@ -38,6 +38,11 @@ public class WorkflowStepData implements Serializable { @Column(length = 2048) private Map stepParameters; + @ElementCollection( fetch=FetchType.EAGER ) + @Column(length = 2048) + private Map stepSettings; + + public Workflow getParent() { return parent; } @@ -80,7 +85,15 @@ public void setId(long id) { @Override public String toString() { - return "WorkflowStepData{" + "parent=" + parent + ", providerId=" + providerId + ", stepType=" + stepType + ", parameters=" + stepParameters + '}'; + return "WorkflowStepData{" + "parent=" + parent + ", providerId=" + providerId + ", stepType=" + stepType + ", parameters=" + stepParameters + ", settings=" + stepSettings + '}'; + } + + public void setStepSettings(Map settingsMap) { + this.stepSettings=settingsMap; + } + + public Map getStepSettings() { + return stepSettings; }