Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Workflow enhancements #5048

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file modified conf/docker-aio/configure_doi.bash
100755 → 100644
Empty file.
Empty file modified doc/sphinx-guides/source/developers/deployment.rst
100755 → 100644
Empty file.
Empty file modified scripts/installer/ec2-destroy-all.sh
100755 → 100644
Empty file.
Empty file modified scripts/installer/ec2-list-all.sh
100755 → 100644
Empty file.
Original file line number Diff line number Diff line change
Expand Up @@ -731,9 +731,7 @@ public void callFinalizePublishCommandAsynchronously(Long datasetId, CommandCont
}
logger.fine("Running FinalizeDatasetPublicationCommand, asynchronously");
Dataset theDataset = find(datasetId);
String nonNullDefaultIfKeyNotFound = "";
String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound);
commandEngine.submit(new FinalizeDatasetPublicationCommand(theDataset, doiProvider, request, isPidPrePublished));
commandEngine.submit(new FinalizeDatasetPublicationCommand(theDataset, request, isPidPrePublished));
}

/*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ public AbstractPublishDatasetCommand(Dataset datasetIn, DataverseRequest aReques
super(aRequest, datasetIn);
}

protected WorkflowContext buildContext( String doiProvider, WorkflowContext.TriggerType triggerType) {
return new WorkflowContext(getRequest(), getDataset(), doiProvider, triggerType);
protected WorkflowContext buildContext( Dataset theDataset, WorkflowContext.TriggerType triggerType) {
return new WorkflowContext(getRequest(), theDataset, triggerType);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -41,19 +41,16 @@ public class FinalizeDatasetPublicationCommand extends AbstractPublishDatasetCom

private static final Logger logger = Logger.getLogger(FinalizeDatasetPublicationCommand.class.getName());

String doiProvider;

/**
* mirror field from {@link PublishDatasetCommand} of same name
*/
final boolean datasetExternallyReleased;

public FinalizeDatasetPublicationCommand(Dataset aDataset, String aDoiProvider, DataverseRequest aRequest) {
this( aDataset, aDoiProvider, aRequest, false );
public FinalizeDatasetPublicationCommand(Dataset aDataset, DataverseRequest aRequest) {
this( aDataset, aRequest, false );
}
public FinalizeDatasetPublicationCommand(Dataset aDataset, String aDoiProvider, DataverseRequest aRequest, boolean isPidPrePublished) {
public FinalizeDatasetPublicationCommand(Dataset aDataset, DataverseRequest aRequest, boolean isPidPrePublished) {
super(aDataset, aRequest);
doiProvider = aDoiProvider;
datasetExternallyReleased = isPidPrePublished;
}

Expand Down Expand Up @@ -87,7 +84,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
// comes from there. There's a chance that the final merge, at the end of this
// command, would be sufficient. -- L.A. Sep. 6 2017
theDataset = ctxt.em().merge(theDataset);

setDataset(theDataset);
updateDatasetUser(ctxt);

//if the publisher hasn't contributed to this version
Expand Down Expand Up @@ -135,9 +132,11 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
new RemoveLockCommand(getRequest(), theDataset, DatasetLock.Reason.InReview) );
}

final Dataset ds = ctxt.em().merge(theDataset);

ctxt.workflows().getDefaultWorkflow(TriggerType.PostPublishDataset).ifPresent(wf -> {
try {
ctxt.workflows().start(wf, buildContext(doiProvider, TriggerType.PostPublishDataset));
ctxt.workflows().start(wf, buildContext(ds, TriggerType.PostPublishDataset));
} catch (CommandException ex) {
logger.log(Level.SEVERE, "Error invoking post-publish workflow: " + ex.getMessage(), ex);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,11 +81,10 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException
}

Optional<Workflow> prePubWf = ctxt.workflows().getDefaultWorkflow(TriggerType.PrePublishDataset);
String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, "");
if ( prePubWf.isPresent() ) {
// We start a workflow
theDataset = ctxt.em().merge(theDataset);
ctxt.workflows().start(prePubWf.get(), buildContext(doiProvider, TriggerType.PrePublishDataset) );
ctxt.workflows().start(prePubWf.get(), buildContext(theDataset, TriggerType.PrePublishDataset) );
return new PublishDatasetResult(theDataset, false);

} else{
Expand Down Expand Up @@ -125,7 +124,7 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException

} else {
// Synchronous publishing (no workflow involved)
theDataset = ctxt.engine().submit(new FinalizeDatasetPublicationCommand(ctxt.em().merge(theDataset), doiProvider, getRequest(),datasetExternallyReleased));
theDataset = ctxt.engine().submit(new FinalizeDatasetPublicationCommand(theDataset, getRequest(),datasetExternallyReleased));
return new PublishDatasetResult(theDataset, true);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@
import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
import edu.harvard.iq.dataverse.workflow.Workflow;
import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Set;
import javax.json.Json;
Expand Down Expand Up @@ -231,7 +230,8 @@ public static JsonObjectBuilder json(Workflow wf){
for ( WorkflowStepData stp : wf.getSteps() ) {
arr.add( jsonObjectBuilder().add("stepType", stp.getStepType())
.add("provider", stp.getProviderId())
.add("parameters", mapToObject(stp.getStepParameters())) );
.add("parameters", mapToObject(stp.getStepParameters()))
.add("requiredSettings", mapToObject(stp.getStepParameters())) );
}
bld.add("steps", arr );
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@ public class PendingWorkflowInvocation implements Serializable {

int pendingStepIdx;

String doiProvider;
String userId;
String ipAddress;
int typeOrdinal;
Expand All @@ -66,15 +65,13 @@ public PendingWorkflowInvocation(Workflow wf, WorkflowContext ctxt, Pending resu
userId = ctxt.getRequest().getUser().getIdentifier();
ipAddress = ctxt.getRequest().getSourceAddress().toString();
localData = new HashMap<>(result.getData());
doiProvider = ctxt.getDoiProvider();
typeOrdinal = ctxt.getType().ordinal();
}

public WorkflowContext reCreateContext(RoleAssigneeServiceBean roleAssignees) {
DataverseRequest aRequest = new DataverseRequest((User)roleAssignees.getRoleAssignee(userId), IpAddress.valueOf(ipAddress));
final WorkflowContext workflowContext = new WorkflowContext(aRequest, dataset, nextVersionNumber,
nextMinorVersionNumber, WorkflowContext.TriggerType.values()[typeOrdinal],
doiProvider);
nextMinorVersionNumber, WorkflowContext.TriggerType.values()[typeOrdinal], null, null);
workflowContext.setInvocationId(invocationId);
return workflowContext;
}
Expand Down Expand Up @@ -151,14 +148,6 @@ public void setPendingStepIdx(int pendingStepIdx) {
this.pendingStepIdx = pendingStepIdx;
}

public String getDoiProvider() {
return doiProvider;
}

public void setDoiProvider(String doiProvider) {
this.doiProvider = doiProvider;
}

public int getTypeOrdinal() {
return typeOrdinal;
}
Expand Down
10 changes: 10 additions & 0 deletions src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@

import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
Expand Down Expand Up @@ -63,6 +65,14 @@ public void setSteps(List<WorkflowStepData> steps) {
}
}

Map<String, String> getRequiredSettings() {
Map<String, String> settings = new HashMap<String, String>();
for(WorkflowStepData step: steps) {
settings.putAll(step.getStepSettings());
}
return settings;
}

@Override
public int hashCode() {
int hash = 7;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
package edu.harvard.iq.dataverse.workflow;

import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.workflow.step.WorkflowStep;

import java.util.Map;
import java.util.UUID;

/**
Expand All @@ -25,26 +28,27 @@ public enum TriggerType {
private final long nextVersionNumber;
private final long nextMinorVersionNumber;
private final TriggerType type;
private final String doiProvider;
private final ApiToken apiToken;
private Map<String, Object> settings;

private String invocationId = UUID.randomUUID().toString();

public WorkflowContext( DataverseRequest aRequest, Dataset aDataset, String doiProvider, TriggerType aTriggerType ) {
public WorkflowContext( DataverseRequest aRequest, Dataset aDataset, TriggerType aTriggerType ) {
this( aRequest, aDataset,
aDataset.getLatestVersion().getVersionNumber(),
aDataset.getLatestVersion().getMinorVersionNumber(),
aTriggerType,
doiProvider);
aTriggerType, null, null);
}

public WorkflowContext(DataverseRequest request, Dataset dataset, long nextVersionNumber,
long nextMinorVersionNumber, TriggerType type, String doiProvider) {
long nextMinorVersionNumber, TriggerType type, Map<String, Object> settings, ApiToken apiToken) {
this.request = request;
this.dataset = dataset;
this.nextVersionNumber = nextVersionNumber;
this.nextMinorVersionNumber = nextMinorVersionNumber;
this.type = type;
this.doiProvider = doiProvider;
this.settings = settings;
this.apiToken = apiToken;
}

public Dataset getDataset() {
Expand Down Expand Up @@ -75,12 +79,16 @@ public String getInvocationId() {
return invocationId;
}

public String getDoiProvider() {
return doiProvider;
}

public TriggerType getType() {
return type;
}

public Map<String, Object> getSettings() {
return settings;
}

public ApiToken getApiToken() {
return apiToken;
}

}
Loading