From d28777268f993b6e628666a48428398f1e78d8c1 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 14 Sep 2017 16:00:48 -0400 Subject: [PATCH 001/483] stub out Docker and OpenShift files #4040 --- Dockerfile | 1 + conf/docker/build.sh | 2 + conf/docker/dataverse-glassfish/Dockerfile | 5 +++ conf/docker/postgresql/Dockerfile | 3 ++ conf/docker/solr/Dockerfile | 8 ++++ conf/openshift/openshift.json | 40 +++++++++++++++++++ .../source/installation/index.rst | 1 + .../source/installation/openshift.rst | 24 +++++++++++ .../source/installation/prep.rst | 5 +++ 9 files changed, 89 insertions(+) create mode 100644 Dockerfile create mode 100644 conf/docker/build.sh create mode 100644 conf/docker/dataverse-glassfish/Dockerfile create mode 100644 conf/docker/postgresql/Dockerfile create mode 100644 conf/docker/solr/Dockerfile create mode 100644 conf/openshift/openshift.json create mode 100644 doc/sphinx-guides/source/installation/openshift.rst diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000000..5f492ea0594 --- /dev/null +++ b/Dockerfile @@ -0,0 +1 @@ +# See `conf/docker` for Docker images diff --git a/conf/docker/build.sh b/conf/docker/build.sh new file mode 100644 index 00000000000..95a3a15979a --- /dev/null +++ b/conf/docker/build.sh @@ -0,0 +1,2 @@ +#!/bin/sh +echo "TODO: Build docker images and push to DockerHub." diff --git a/conf/docker/dataverse-glassfish/Dockerfile b/conf/docker/dataverse-glassfish/Dockerfile new file mode 100644 index 00000000000..603660b5264 --- /dev/null +++ b/conf/docker/dataverse-glassfish/Dockerfile @@ -0,0 +1,5 @@ +# Glassfish and Dataverse in one Docker image, as suggested by danmcp. +# +# TODO: Start with the Dataverse 4.2.3 version at https://github.com/pdurbin/ndslabs-dataverse/blob/v4.2.3.1/dockerfiles/dataverse/Dockerfile +# +# TODO: Once we get Dataverse 4.2.3 working, we can try Dataverse 4.7 from https://github.com/pdurbin/ndslabs-dataverse/blob/upgrade-4.7/dockerfiles/dataverse/Dockerfile diff --git a/conf/docker/postgresql/Dockerfile b/conf/docker/postgresql/Dockerfile new file mode 100644 index 00000000000..81ecf0fdeb8 --- /dev/null +++ b/conf/docker/postgresql/Dockerfile @@ -0,0 +1,3 @@ +# PostgreSQL for Dataverse (but consider switching to the image from CentOS) +# +# See also conf/docker/dataverse-glassfish/Dockerfile diff --git a/conf/docker/solr/Dockerfile b/conf/docker/solr/Dockerfile new file mode 100644 index 00000000000..7d4bf9292e2 --- /dev/null +++ b/conf/docker/solr/Dockerfile @@ -0,0 +1,8 @@ +# Solr for Dataverse +# +# See the note about Dataverse 4.2.3 vs. 4.7 at conf/docker/dataverse-glassfish/Dockerfile +# +# Note that there is a separate Dockerfile we can start with: +# +# Dataverse 4.2.3: https://github.com/pdurbin/ndslabs-dataverse/blob/v4.2.3.1/dockerfiles/solr/Dockerfile +# Dataverse 4.7: https://github.com/pdurbin/ndslabs-dataverse/blob/upgrade-4.7/dockerfiles/solr/Dockerfile diff --git a/conf/openshift/openshift.json b/conf/openshift/openshift.json new file mode 100644 index 00000000000..e1f92c0fbdb --- /dev/null +++ b/conf/openshift/openshift.json @@ -0,0 +1,40 @@ +{ + "kind": "Project", + "apiVersion": "v1", + "metadata": { + "name": "dataverse", + "creationTimestamp": null, + "labels": { + "name": "dataverse" + }, + "annotations": { + "openshift.io/description": "Dataverse is open source research data repository software: https://dataverse.org", + "openshift.io/display-name": "Dataverse" + } + }, + "spec": { + "containers": [ + { + "name": "dataverse-glassfish", + "image": "ndslabs/dataverse", + "ports": [ + { + "containerPort": 8080, + "protocol": "TCP" + } + ] + }, + { + "name": "dataverse-solr", + "image": "ndslabs/dataverse-solr", + "ports": [ + { + "containerPort": 8983, + "protocol": "TCP" + } + ] + } + ] + }, + "status": {} +} diff --git a/doc/sphinx-guides/source/installation/index.rst b/doc/sphinx-guides/source/installation/index.rst index b8423e77ae5..185fb94b4e3 100755 --- a/doc/sphinx-guides/source/installation/index.rst +++ b/doc/sphinx-guides/source/installation/index.rst @@ -20,3 +20,4 @@ Installation Guide geoconnect shibboleth oauth2 + openshift diff --git a/doc/sphinx-guides/source/installation/openshift.rst b/doc/sphinx-guides/source/installation/openshift.rst new file mode 100644 index 00000000000..71758cd23f9 --- /dev/null +++ b/doc/sphinx-guides/source/installation/openshift.rst @@ -0,0 +1,24 @@ +================================= +Installing Dataverse on OpenShift +================================= + +.. contents:: |toctitle| + :local: + +Familiarize Yourself with OpenShift +----------------------------------- + +https://docs.openshift.com/online/getting_started/index.html + +Create OpenShift Account +------------------------ + +Create Project +-------------- + +Import YAML/JSON +---------------- + +Upload ``conf/openshift/openshift.json``. + +FIXME: Getting "cannot create projects at the cluster scope" diff --git a/doc/sphinx-guides/source/installation/prep.rst b/doc/sphinx-guides/source/installation/prep.rst index 9662b5c40b6..ba8bf08cd4a 100644 --- a/doc/sphinx-guides/source/installation/prep.rst +++ b/doc/sphinx-guides/source/installation/prep.rst @@ -14,6 +14,11 @@ We'll try to get you up and running as quickly as possible, but we thought you m Choose Your Own Installation Adventure -------------------------------------- +OpenShift (for Testing Only) +++++++++++++++++++++++++++++ + +If you would like to kick the tires on your own installation of Dataverse, you can install it for free on OpenShift Online, a platform as a service (PaaS) offering from Red Hat. For details, please see the :doc:`openshift` section. + Vagrant (for Testing Only) ++++++++++++++++++++++++++ From 77b3f67376f6ba340f49a4dcd515bf38c438dfbc Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 15 Sep 2017 14:24:25 -0400 Subject: [PATCH 002/483] switch from Project to Template, add dev docs #4040 --- conf/openshift/openshift.json | 39 ++++++----- .../source/developers/dev-environment.rst | 66 +++++++++++++++++++ 2 files changed, 85 insertions(+), 20 deletions(-) diff --git a/conf/openshift/openshift.json b/conf/openshift/openshift.json index e1f92c0fbdb..ea09a272347 100644 --- a/conf/openshift/openshift.json +++ b/conf/openshift/openshift.json @@ -1,9 +1,8 @@ { - "kind": "Project", + "kind": "Template", "apiVersion": "v1", "metadata": { "name": "dataverse", - "creationTimestamp": null, "labels": { "name": "dataverse" }, @@ -12,29 +11,29 @@ "openshift.io/display-name": "Dataverse" } }, - "spec": { - "containers": [ - { - "name": "dataverse-glassfish", - "image": "ndslabs/dataverse", + "objects": [ + { + "kind": "Service", + "apiVersion": "v1", + "metadata": { + "name": "dataverse-glassfish-service" + }, + "spec": { "ports": [ { - "containerPort": 8080, - "protocol": "TCP" + "name": "web", + "protocol": "TCP", + "port": 8080, + "targetPort": 8080 } - ] - }, - { - "name": "dataverse-solr", - "image": "ndslabs/dataverse-solr", - "ports": [ + ], + "containers": [ { - "containerPort": 8983, - "protocol": "TCP" + "name": "dataverse-glassfish", + "image": "ndslabs/dataverse" } ] } - ] - }, - "status": {} + } + ] } diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index 5eda6b1e308..798604cc621 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -328,6 +328,72 @@ If you've reconfigured from EZID to DataCite and are seeing ``Response code: 400 ``asadmin create-jvm-options '-Ddataverse.siteUrl=http\://demo.dataverse.org'`` +OpenShift +--------- + +From the Dataverse perspective, we are in the business of providing a "template" for OpenShift that describes how the various components we build our application on (Glassfish, PostgreSQL, Solr, the Dataverse war file itself, etc.) work together. We also plan to publish Docker images to DockerHub but for now we are trying to use the Dataverse images published to https://hub.docker.com/r/ndslabs/ + +The OpenShift template for Dataverse can be found at ``conf/openshift/openshift.json`` and if you need to hack on the template or related files under ``conf/docker`` it is recommended that you iterate on them using Minishift. + +Install Minishift +~~~~~~~~~~~~~~~~~ + +Minishift requires a hypervisor and since we already use VirtualBox for Vagrant, you should install VirtualBox from http://virtualbox.org . + +Download the Minishift tarball from https://docs.openshift.org/latest/minishift/getting-started/installing.html and put the ``minishift`` binary in ``/usr/local/bin`` or somewhere in your ``$PATH``. This assumes Mac or Linux. + +At this point, you might want to consider going through the Minishift quickstart to get oriented: https://docs.openshift.org/latest/minishift/getting-started/quickstart.html + +Start Minishift +~~~~~~~~~~~~~~~ + +``minishift start --vm-driver=virtualbox`` + +Make the oc Command Executable +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``eval $(minishift oc-env)`` + +Create a Minishift Project +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``oc new-project project1`` + +Create a Dataverse App within the Minishift Project +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``oc new-app conf/openshift/openshift.json`` + +Make the Dataverse App Available to Your Browser +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``oc expose svc/dataverse-glassfish-service`` + +Log into Minishift and Visit Dataverse in your Browser +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +FIXME: This doesn't work yet. Rather than Dataverse running, for now you are expected to see "Application is not available". + +- https://192.168.99.100:8443 +- username: developer +- password: developer + +Visit https://192.168.99.100:8443/console/project/project1/browse/routes and click http://dataverse-glassfish-service-project1.192.168.99.100.nip.io/ or whatever is shows. This assumes you named your project ``project1``. + +Cleaning up +~~~~~~~~~~~ + +``oc delete project project1`` + +Minishift Resources +~~~~~~~~~~~~~~~~~~~ + +The following resources might be helpful. + +- https://blog.openshift.com/part-1-from-app-to-openshift-runtimes-and-templates/ +- https://blog.openshift.com/part-2-creating-a-template-a-technical-walkthrough/ +- https://docs.openshift.com/enterprise/3.0/architecture/core_concepts/templates.html + ---- Previous: :doc:`intro` | Next: :doc:`version-control` From 4702e0af6e0db773e35b89a804e5eef743e29b65 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 15 Sep 2017 19:37:18 -0400 Subject: [PATCH 003/483] use config from @danmcp #4040 --- conf/openshift/openshift.json | 83 ++++++++++++++++++++++++++++++++--- 1 file changed, 78 insertions(+), 5 deletions(-) diff --git a/conf/openshift/openshift.json b/conf/openshift/openshift.json index ea09a272347..6af0d0531ab 100644 --- a/conf/openshift/openshift.json +++ b/conf/openshift/openshift.json @@ -26,13 +26,86 @@ "port": 8080, "targetPort": 8080 } - ], - "containers": [ + ] + } + }, + { + "kind": "ImageStream", + "apiVersion": "v1", + "metadata": { + "name": "ndslabs-dataverse" + }, + "spec": { + "dockerImageRepository": "ndslabs/dataverse" + } + }, + { + "kind": "DeploymentConfig", + "apiVersion": "v1", + "metadata": { + "name": "dataverse-glassfish", + "annotations": { + "template.alpha.openshift.io/wait-for-ready": "true" + } + }, + "spec": { + "template": { + "metadata": { + "labels": { + "name": "ndslabs-dataverse" + } + }, + "spec": { + "containers": [ + { + "name": "ndslabs-dataverse", + "image": "ndslabs-dataverse", + "ports": [ + { + "containerPort": 8080, + "protocol": "TCP" + } + ], + "imagePullPolicy": "IfNotPresent", + "securityContext": { + "capabilities": {}, + "privileged": false + } + } + ] + } + }, + "strategy": { + "type": "Rolling", + "rollingParams": { + "updatePeriodSeconds": 1, + "intervalSeconds": 1, + "timeoutSeconds": 120 + }, + "resources": {} + }, + "triggers": [ + { + "type": "ImageChange", + "imageChangeParams": { + "automatic": true, + "containerNames": [ + "ndslabs-dataverse" + ], + "from": { + "kind": "ImageStreamTag", + "name": "ndslabs-dataverse:latest" + } + } + }, { - "name": "dataverse-glassfish", - "image": "ndslabs/dataverse" + "type": "ConfigChange" } - ] + ], + "replicas": 1, + "selector": { + "name": "ndslabs-dataverse" + } } } ] From f41d7533098b256c42a8d7effda645f267f89242 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 18 Sep 2017 11:41:29 -0400 Subject: [PATCH 004/483] add centos/postgresql-94-centos7 #4040 --- conf/openshift/openshift.json | 52 +++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/conf/openshift/openshift.json b/conf/openshift/openshift.json index 6af0d0531ab..f34ae9e95d3 100644 --- a/conf/openshift/openshift.json +++ b/conf/openshift/openshift.json @@ -40,6 +40,16 @@ } }, { + "kind": "ImageStream", + "apiVersion": "v1", + "metadata": { + "name": "centos-postgresql-94-centos7" + }, + "spec": { + "dockerImageRepository": "centos/postgresql-94-centos7" + } + }, + { "kind": "DeploymentConfig", "apiVersion": "v1", "metadata": { @@ -71,6 +81,35 @@ "capabilities": {}, "privileged": false } + }, + { + "name": "centos-postgresql-94-centos7", + "image": "centos-postgresql-94-centos7", + "ports": [ + { + "containerPort": 5432, + "protocol": "TCP" + } + ], + "env": [ + { + "name": "POSTGRESQL_USER", + "value": "pgUserValue" + }, + { + "name": "POSTGRESQL_PASSWORD", + "value": "pgPasswordValue" + }, + { + "name": "POSTGRESQL_DATABASE", + "value": "pgDatabaseValue" + } + ], + "imagePullPolicy": "IfNotPresent", + "securityContext": { + "capabilities": {}, + "privileged": false + } } ] } @@ -98,6 +137,19 @@ } } }, + { + "type": "ImageChange", + "imageChangeParams": { + "automatic": true, + "containerNames": [ + "centos-postgresql-94-centos7" + ], + "from": { + "kind": "ImageStreamTag", + "name": "centos-postgresql-94-centos7:latest" + } + } + }, { "type": "ConfigChange" } From c20dd3904e523504d2e1965cb52b0f2a30d9f274 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 18 Sep 2017 12:31:14 -0400 Subject: [PATCH 005/483] add ndslabs/dataverse-solr image #4040 --- conf/openshift/openshift.json | 38 +++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/conf/openshift/openshift.json b/conf/openshift/openshift.json index f34ae9e95d3..c3adbbcfc57 100644 --- a/conf/openshift/openshift.json +++ b/conf/openshift/openshift.json @@ -49,6 +49,16 @@ "dockerImageRepository": "centos/postgresql-94-centos7" } }, + { + "kind": "ImageStream", + "apiVersion": "v1", + "metadata": { + "name": "ndslabs-dataverse-solr" + }, + "spec": { + "dockerImageRepository": "ndslabs/dataverse-solr" + } + }, { "kind": "DeploymentConfig", "apiVersion": "v1", @@ -110,6 +120,21 @@ "capabilities": {}, "privileged": false } + }, + { + "name": "ndslabs-dataverse-solr", + "image": "ndslabs-dataverse-solr", + "ports": [ + { + "containerPort": 8983, + "protocol": "TCP" + } + ], + "imagePullPolicy": "IfNotPresent", + "securityContext": { + "capabilities": {}, + "privileged": false + } } ] } @@ -150,6 +175,19 @@ } } }, + { + "type": "ImageChange", + "imageChangeParams": { + "automatic": true, + "containerNames": [ + "ndslabs-dataverse-solr" + ], + "from": { + "kind": "ImageStreamTag", + "name": "ndslabs-dataverse-solr:latest" + } + } + }, { "type": "ConfigChange" } From 3ab921c69c08668d1d3f8160c8de6003b5216731 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 19 Sep 2017 13:36:09 -0400 Subject: [PATCH 006/483] add more handy `oc` commands to docs #4040 --- .../source/developers/dev-environment.rst | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index 798604cc621..fd27b734ec6 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -364,6 +364,21 @@ Create a Dataverse App within the Minishift Project ``oc new-app conf/openshift/openshift.json`` +Check Status of Dataverse Deployment to Minishift +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``oc get all`` + +Review Logs of Dataverse Deployment to Minishift +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``oc logs -c ndslabs-dataverse $(oc get po -o json | jq '.items[] | select(.kind=="Pod").metadata.name' -r | grep -v dataverse-glassfish-1-deploy)`` + +Get a Shell (ssh/rsh) on Glassfish Server Deployed to Minishift +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``oc rsh $(oc get po -o json | jq '.items[] | select(.kind=="Pod").metadata.name' -r | grep -v dataverse-glassfish-1-deploy)`` + Make the Dataverse App Available to Your Browser ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 21983673171d6483ab9acdb9065312a953429d12 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 19 Sep 2017 17:30:03 -0400 Subject: [PATCH 007/483] expose postgres env vars needed by init-glassfish #4040 --- conf/openshift/openshift.json | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/conf/openshift/openshift.json b/conf/openshift/openshift.json index c3adbbcfc57..3289b1db453 100644 --- a/conf/openshift/openshift.json +++ b/conf/openshift/openshift.json @@ -59,7 +59,7 @@ "dockerImageRepository": "ndslabs/dataverse-solr" } }, - { + { "kind": "DeploymentConfig", "apiVersion": "v1", "metadata": { @@ -86,6 +86,24 @@ "protocol": "TCP" } ], + "env": [ + { + "name": "SMTP_HOST", + "value": "localhost" + }, + { + "name": "POSTGRES_USER", + "value": "dvnapp" + }, + { + "name": "POSTGRES_PASSWORD", + "value": "dvnappPassword" + }, + { + "name": "POSTGRES_DATABASE", + "value": "dvndb" + } + ], "imagePullPolicy": "IfNotPresent", "securityContext": { "capabilities": {}, From 6ec54c2b7f1518a31331788db2d613e12fbfee3c Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 19 Sep 2017 17:43:08 -0400 Subject: [PATCH 008/483] must have selector for expose route to work #4040 --- conf/openshift/openshift.json | 3 +++ doc/sphinx-guides/source/developers/dev-environment.rst | 7 +++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/conf/openshift/openshift.json b/conf/openshift/openshift.json index 3289b1db453..b9c3ad8f80e 100644 --- a/conf/openshift/openshift.json +++ b/conf/openshift/openshift.json @@ -19,6 +19,9 @@ "name": "dataverse-glassfish-service" }, "spec": { + "selector": { + "name": "ndslabs-dataverse" + }, "ports": [ { "name": "web", diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index fd27b734ec6..fbd0866c786 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -379,6 +379,11 @@ Get a Shell (ssh/rsh) on Glassfish Server Deployed to Minishift ``oc rsh $(oc get po -o json | jq '.items[] | select(.kind=="Pod").metadata.name' -r | grep -v dataverse-glassfish-1-deploy)`` +From the ``rsh`` prompt you could run something like the following to build confidence that Dataverse is running on port 8080: + +``curl -L localhost:8080`` + + Make the Dataverse App Available to Your Browser ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -387,8 +392,6 @@ Make the Dataverse App Available to Your Browser Log into Minishift and Visit Dataverse in your Browser ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -FIXME: This doesn't work yet. Rather than Dataverse running, for now you are expected to see "Application is not available". - - https://192.168.99.100:8443 - username: developer - password: developer From e90f7714c352aa6c43ee061384cb9b4abfa0e280 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 19 Sep 2017 17:54:38 -0400 Subject: [PATCH 009/483] set dataverseAdmin password to "admin" #4040 --- conf/openshift/openshift.json | 4 ++++ doc/sphinx-guides/source/developers/dev-environment.rst | 9 +++++++++ 2 files changed, 13 insertions(+) diff --git a/conf/openshift/openshift.json b/conf/openshift/openshift.json index b9c3ad8f80e..c2f033c8912 100644 --- a/conf/openshift/openshift.json +++ b/conf/openshift/openshift.json @@ -90,6 +90,10 @@ } ], "env": [ + { + "name": "ADMIN_PASSWORD", + "value": "admin" + }, { "name": "SMTP_HOST", "value": "localhost" diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index fbd0866c786..5685751aaf9 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -354,6 +354,13 @@ Make the oc Command Executable ``eval $(minishift oc-env)`` +Log in to Minishift +~~~~~~~~~~~~~~~~~~~ + +``oc login`` + +Use "developer" as the username and a couple characters as the password. + Create a Minishift Project ~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -398,6 +405,8 @@ Log into Minishift and Visit Dataverse in your Browser Visit https://192.168.99.100:8443/console/project/project1/browse/routes and click http://dataverse-glassfish-service-project1.192.168.99.100.nip.io/ or whatever is shows. This assumes you named your project ``project1``. +You should be able to log in with username "dataverseAdmin" and password "admin". + Cleaning up ~~~~~~~~~~~ From 975d38e9a5eaa115db3c25bb17492f19de955096 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 19 Sep 2017 19:02:53 -0400 Subject: [PATCH 010/483] explain how to run containers as root #4040 --- doc/sphinx-guides/source/developers/dev-environment.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index 5685751aaf9..03b239e6337 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -361,6 +361,13 @@ Log in to Minishift Use "developer" as the username and a couple characters as the password. +Allow Containers to Run as Root in Minishift +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This isn't ideal, but for now we're allowing containers to run as root. FIXME: Eventually, we should create containers that don't require root. + +``oc adm policy add-scc-to-user anyuid -z default --as system:admin`` + Create a Minishift Project ~~~~~~~~~~~~~~~~~~~~~~~~~~ From 0a444105a2fdb5924e9598f0d2bf5f98b4dff700 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 20 Sep 2017 06:54:18 -0400 Subject: [PATCH 011/483] build and switch to iqss/dataverse-solr on Docker Hub #4040 --- conf/docker/build.sh | 6 +++++- conf/docker/solr/Dockerfile | 36 ++++++++++++++++++++++++++-------- conf/docker/solr/entrypoint.sh | 10 ++++++++++ conf/openshift/openshift.json | 12 ++++++------ 4 files changed, 49 insertions(+), 15 deletions(-) mode change 100644 => 100755 conf/docker/build.sh create mode 100755 conf/docker/solr/entrypoint.sh diff --git a/conf/docker/build.sh b/conf/docker/build.sh old mode 100644 new mode 100755 index 95a3a15979a..44100c99aff --- a/conf/docker/build.sh +++ b/conf/docker/build.sh @@ -1,2 +1,6 @@ #!/bin/sh -echo "TODO: Build docker images and push to DockerHub." +# Creates images and pushes them to Docker Hub. +GIT_BRANCH=$(git rev-parse --abbrev-ref HEAD) +# Use "conf" directory as context so we can copy schema.xml into Solr image. +docker build -t iqss/dataverse-solr:$GIT_BRANCH -f solr/Dockerfile ../../conf +docker push iqss/dataverse-solr:$GIT_BRANCH diff --git a/conf/docker/solr/Dockerfile b/conf/docker/solr/Dockerfile index 7d4bf9292e2..99114ce6a6d 100644 --- a/conf/docker/solr/Dockerfile +++ b/conf/docker/solr/Dockerfile @@ -1,8 +1,28 @@ -# Solr for Dataverse -# -# See the note about Dataverse 4.2.3 vs. 4.7 at conf/docker/dataverse-glassfish/Dockerfile -# -# Note that there is a separate Dockerfile we can start with: -# -# Dataverse 4.2.3: https://github.com/pdurbin/ndslabs-dataverse/blob/v4.2.3.1/dockerfiles/solr/Dockerfile -# Dataverse 4.7: https://github.com/pdurbin/ndslabs-dataverse/blob/upgrade-4.7/dockerfiles/solr/Dockerfile +FROM centos:7.2.1511 +MAINTAINER Dataverse (support@dataverse.org) + +RUN yum install -y wget unzip perl git java-1.8.0-openjdk-devel postgresql.x86_64 + +# Install Solr 4.6.0 +# The context of the build is the "conf" directory. +COPY solr/4.6.0/schema.xml /tmp + +RUN cd /tmp && wget https://archive.apache.org/dist/lucene/solr/4.6.0/solr-4.6.0.tgz && \ + tar xvzf solr-4.6.0.tgz && \ + mv solr-4.6.0 /usr/local/ && \ + cd /usr/local/solr-4.6.0/example/solr/collection1/conf/ && \ + mv schema.xml schema.xml.backup && \ + cp /tmp/schema.xml . && \ + rm /tmp/solr-4.6.0.tgz + +RUN ln -s /usr/local/solr-4.6.0/example/logs /var/log/solr + +VOLUME /usr/local/solr-4.6.0/example/solr/collection1/data + +EXPOSE 8983 + +COPY docker/solr/Dockerfile /Dockerfile +COPY docker/solr/entrypoint.sh / + +ENTRYPOINT ["/entrypoint.sh"] +CMD ["solr"] diff --git a/conf/docker/solr/entrypoint.sh b/conf/docker/solr/entrypoint.sh new file mode 100755 index 00000000000..7fd8d6380c2 --- /dev/null +++ b/conf/docker/solr/entrypoint.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +if [ "$1" = 'solr' ]; then + cd /usr/local/solr-4.6.0/example/ + java -jar start.jar +elif [ "$1" = 'usage' ]; then + echo 'docker run -d iqss/dataverse-solr solr' +else + exec "$@" +fi diff --git a/conf/openshift/openshift.json b/conf/openshift/openshift.json index c2f033c8912..36a29a87cbc 100644 --- a/conf/openshift/openshift.json +++ b/conf/openshift/openshift.json @@ -56,10 +56,10 @@ "kind": "ImageStream", "apiVersion": "v1", "metadata": { - "name": "ndslabs-dataverse-solr" + "name": "iqss-dataverse-solr" }, "spec": { - "dockerImageRepository": "ndslabs/dataverse-solr" + "dockerImageRepository": "iqss/dataverse-solr" } }, { @@ -147,8 +147,8 @@ } }, { - "name": "ndslabs-dataverse-solr", - "image": "ndslabs-dataverse-solr", + "name": "iqss-dataverse-solr", + "image": "iqss-dataverse-solr", "ports": [ { "containerPort": 8983, @@ -205,11 +205,11 @@ "imageChangeParams": { "automatic": true, "containerNames": [ - "ndslabs-dataverse-solr" + "iqss-dataverse-solr" ], "from": { "kind": "ImageStreamTag", - "name": "ndslabs-dataverse-solr:latest" + "name": "iqss-dataverse-solr:4040-docker-openshift" } } }, From 7c81b4e73a3570513dd372b489a66b603f6d594a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 21 Sep 2017 11:30:50 -0400 Subject: [PATCH 012/483] switch from ndslabs to iqss Dataverse/Glassfish image #4040 --- conf/docker/build.sh | 8 ++ conf/docker/dataverse-glassfish/.gitignore | 5 + conf/docker/dataverse-glassfish/Dockerfile | 102 ++++++++++++++- conf/docker/dataverse-glassfish/entrypoint.sh | 122 ++++++++++++++++++ conf/openshift/openshift.json | 22 ++-- .../source/developers/dev-environment.rst | 24 +++- 6 files changed, 266 insertions(+), 17 deletions(-) create mode 100644 conf/docker/dataverse-glassfish/.gitignore create mode 100755 conf/docker/dataverse-glassfish/entrypoint.sh diff --git a/conf/docker/build.sh b/conf/docker/build.sh index 44100c99aff..9eb20f9a857 100755 --- a/conf/docker/build.sh +++ b/conf/docker/build.sh @@ -4,3 +4,11 @@ GIT_BRANCH=$(git rev-parse --abbrev-ref HEAD) # Use "conf" directory as context so we can copy schema.xml into Solr image. docker build -t iqss/dataverse-solr:$GIT_BRANCH -f solr/Dockerfile ../../conf docker push iqss/dataverse-solr:$GIT_BRANCH +# TODO: Think about if we really need dataverse.war because it's in dvinstall.zip. +cp ../../target/dataverse*.war dataverse-glassfish/dataverse.war +cp ../../scripts/installer/dvinstall.zip dataverse-glassfish +cp ../../doc/sphinx-guides/source/_static/util/default.config dataverse-glassfish +cp ../../downloads/glassfish-4.1.zip dataverse-glassfish +cp ../../downloads/weld-osgi-bundle-2.2.10.Final-glassfish4.jar dataverse-glassfish +docker build -t iqss/dataverse-glassfish:$GIT_BRANCH dataverse-glassfish +docker push iqss/dataverse-glassfish:$GIT_BRANCH diff --git a/conf/docker/dataverse-glassfish/.gitignore b/conf/docker/dataverse-glassfish/.gitignore new file mode 100644 index 00000000000..b0e6e38894f --- /dev/null +++ b/conf/docker/dataverse-glassfish/.gitignore @@ -0,0 +1,5 @@ +glassfish-4.1.zip +weld-osgi-bundle-2.2.10.Final-glassfish4.jar +dvinstall.zip +dataverse.war +default.config diff --git a/conf/docker/dataverse-glassfish/Dockerfile b/conf/docker/dataverse-glassfish/Dockerfile index 603660b5264..a71aabd6ee1 100644 --- a/conf/docker/dataverse-glassfish/Dockerfile +++ b/conf/docker/dataverse-glassfish/Dockerfile @@ -1,5 +1,99 @@ -# Glassfish and Dataverse in one Docker image, as suggested by danmcp. -# -# TODO: Start with the Dataverse 4.2.3 version at https://github.com/pdurbin/ndslabs-dataverse/blob/v4.2.3.1/dockerfiles/dataverse/Dockerfile +FROM centos:7.2.1511 +MAINTAINER Dataverse (support@dataverse.org) + +COPY glassfish-4.1.zip /tmp +COPY weld-osgi-bundle-2.2.10.Final-glassfish4.jar /tmp +COPY default.config /tmp +COPY dvinstall.zip /tmp + +# Install dependencies +#RUN yum install -y unzip +RUN yum install -y \ + cronie \ + git \ + java-1.8.0-openjdk-devel \ + nc \ + perl \ + postgresql \ + sha1sum \ + unzip \ + wget + +ENV GLASSFISH_DOWNLOAD_SHA1 d1a103d06682eb08722fbc9a93089211befaa080 +ENV GLASSFISH_DIRECTORY "/usr/local/glassfish4" +ENV HOST_DNS_ADDRESS "localhost" +ENV POSTGRES_DB "dvndb" +ENV POSTGRES_USER "dvnapp" +ENV RSERVE_USER "rserve" +ENV RSERVE_PASSWORD "rserve" + +#RUN ls /tmp # -# TODO: Once we get Dataverse 4.2.3 working, we can try Dataverse 4.7 from https://github.com/pdurbin/ndslabs-dataverse/blob/upgrade-4.7/dockerfiles/dataverse/Dockerfile +RUN find /tmp +# +#RUN exitEarly + +# Install Glassfish 4.1 +RUN cd /tmp \ + && unzip glassfish-4.1.zip \ + && mv glassfish4 /usr/local \ + && cd /usr/local/glassfish4/glassfish/modules \ + && rm weld-osgi-bundle.jar \ + #&& rm weld-osgi-bundle.jar + #&& wget http://central.maven.org/maven2/org/jboss/weld/weld-osgi-bundle/2.2.10.SP1/weld-osgi-bundle-2.2.10.SP1-glassfish4.jar \ + #&& cp /tmp/weld-osgi-bundle-2.2.10.SP1-glassfish4.jar . \ + && cp /tmp/weld-osgi-bundle-2.2.10.Final-glassfish4.jar . \ + #&& cp /tmp/weld-osgi-bundle-2.2.10.SP1-glassfish4.jar . + #&& echo "$GLASSFISH_DOWNLOAD_SHA1 weld-osgi-bundle-2.2.10.SP1-glassfish4.jar" | sha1sum -c - \ + #&& rm -rf /usr/local/glassfish4/glassfish/domains/domain1 \ + && rm /tmp/glassfish-4.1.zip + +#RUN exitEarlyBeforeJq +RUN yum -y install epel-release +RUN yum install -y jq + +# Install jq +#RUN cd /tmp \ +# && wget https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64 \ +# && mv jq-linux64 /usr/local/bin \ +# && chmod +x /usr/local/bin/jq-linux64 \ +# && ln -s /usr/local/bin/jq-linux64 /usr/local/bin/jq + +# Customized persistence xml to avoid database recreation +#RUN mkdir -p /tmp/WEB-INF/classes/META-INF/ +#COPY WEB-INF/classes/META-INF/persistence.xml /tmp/WEB-INF/classes/META-INF/ + +# Install Dataverse WAR +RUN cd ~ \ +# && wget https://github.com/IQSS/dataverse/releases/download/v4.7/dvinstall.zip \ + && cp /tmp/dvinstall.zip . \ + && unzip dvinstall.zip + #&& unzip dvinstall.zip \ + #&& jar -uf dvinstall/dataverse.war -C /tmp/ /WEB-INF/classes/META-INF/persistence.xml + +# Install iRods iCommands +#RUN cd /tmp \ +# && yum -y install epel-release \ +# && yum -y install ftp://ftp.renci.org/pub/irods/releases/4.1.6/centos7/irods-icommands-4.1.6-centos7-x86_64.rpm + +#COPY config-glassfish /root/dvinstall +#COPY restart-glassfish /root/dvinstall +#COPY config-dataverse /root/dvinstall + +#RUN cd /root/dvinstall && ./config-dataverse + +COPY ./entrypoint.sh / +#COPY ./ddl /root/dvinstall +#COPY ./init-postgres /root/dvinstall +#COPY ./init-glassfish /root/dvinstall +#COPY ./init-dataverse /root/dvinstall +#COPY ./setup-all.sh /root/dvinstall +#COPY ./setup-irods.sh /root/dvinstall +COPY ./Dockerfile / + +VOLUME /usr/local/glassfish4/glassfish/domains/domain1/files + +EXPOSE 8080 + +ENTRYPOINT ["/entrypoint.sh"] +CMD ["dataverse"] diff --git a/conf/docker/dataverse-glassfish/entrypoint.sh b/conf/docker/dataverse-glassfish/entrypoint.sh new file mode 100755 index 00000000000..b50790f53ab --- /dev/null +++ b/conf/docker/dataverse-glassfish/entrypoint.sh @@ -0,0 +1,122 @@ +#!/bin/bash -x + +# Entrypoint script for Dataverse web application. This script waits +# for dependent services (Rserve, Postgres, Solr) to start before +# initializing Glassfish. + + + +set -e + +if [ "$1" = 'dataverse' ]; then + + export GLASSFISH_DIRECTORY=/usr/local/glassfish4 + export HOST_DNS_ADDRESS=localhost + + TIMEOUT=30 + + if [ -n "$RSERVE_SERVICE_HOST" ]; then + RSERVE_HOST=$RSERVE_SERVICE_HOST + elif [ -n "$RSERVE_PORT_6311_TCP_ADDR" ]; then + RSERVE_HOST=$RSERVE_PORT_6311_TCP_ADDR + elif [ -z "$RSERVE_HOST" ]; then + RSERVE_HOST="localhost" + fi + export RSERVE_HOST + + if [ -n "$RSERVE_SERVICE_PORT" ]; then + RSERVE_PORT=$RSERVE_SERVICE_PORT + elif [ -n "$RSERVE_PORT_6311_TCP_PORT" ]; then + RSERVE_PORT=$RSERVE_PORT_6311_TCP_PORT + elif [ -z "$RSERVE_PORT" ]; then + RSERVE_PORT="6311" + fi + export RSERVE_PORT + + echo "Using Rserve at $RSERVE_HOST:$RSERVE_PORT" + + if ncat $RSERVE_HOST $RSERVE_PORT -w $TIMEOUT --send-only < /dev/null > /dev/null 2>&1 ; then + echo Rserve running; + else + echo Optional service Rserve not running. + fi + + + # postgres + if [ -n "$POSTGRES_SERVICE_HOST" ]; then + POSTGRES_HOST=$POSTGRES_SERVICE_HOST + elif [ -n "$POSTGRES_PORT_5432_TCP_ADDR" ]; then + POSTGRES_HOST=$POSTGRES_PORT_5432_TCP_ADDR + elif [ -z "$POSTGRES_HOST" ]; then + POSTGRES_HOST="localhost" + fi + export POSTGRES_HOST + + if [ -n "$POSTGRES_SERVICE_PORT" ]; then + POSTGRES_PORT=$POSTGRES_SERVICE_PORT + elif [ -n "$POSTGRES_PORT_5432_TCP_PORT" ]; then + POSTGRES_PORT=$POSTGRES_PORT_5432_TCP_PORT + else + POSTGRES_PORT=5432 + fi + export POSTGRES_PORT + + echo "Using Postgres at $POSTGRES_HOST:$POSTGRES_PORT" + + if ncat $POSTGRES_HOST $POSTGRES_PORT -w $TIMEOUT --send-only < /dev/null > /dev/null 2>&1 ; then + echo Postgres running; + else + echo Required service Postgres not running. Have you started the required services? + exit 1 + fi + + # solr + if [ -n "$SOLR_SERVICE_HOST" ]; then + SOLR_HOST=$SOLR_SERVICE_HOST + elif [ -n "$SOLR_PORT_8983_TCP_ADDR" ]; then + SOLR_HOST=$SOLR_PORT_8983_TCP_ADDR + elif [ -z "$SOLR_HOST" ]; then + SOLR_HOST="localhost" + fi + export SOLR_HOST + + if [ -n "$SOLR_SERVICE_PORT" ]; then + SOLR_PORT=$SOLR_SERVICE_PORT + elif [ -n "$SOLR_PORT_8983_TCP_PORT" ]; then + SOLR_PORT=$SOLR_PORT_8983_TCP_PORT + else + SOLR_PORT=8983 + fi + export SOLR_PORT + + echo "Using Solr at $SOLR_HOST:$SOLR_PORT" + + if ncat $SOLR_HOST $SOLR_PORT -w $TIMEOUT --send-only < /dev/null > /dev/null 2>&1 ; then + echo Solr running; + else + echo Required service Solr not running. Have you started the required services? + exit 1 + fi + + echo changing to dvinstall directory + cd ~/dvinstall + echo Copying the non-interactive file into place + cp /tmp/default.config . + echo Looking at first few lines of default.config + head default.config + # non-interactive install + echo Running non-interactive install + #./install -y -f > install.out 2> install.err + ./install -y -f + +# if [ -n "$DVICAT_PORT_1247_TCP_PORT" ]; then +# ./setup-irods.sh +# fi + + echo -e "\n\nDataverse started" + + sleep infinity +else + exec "$@" +fi + diff --git a/conf/openshift/openshift.json b/conf/openshift/openshift.json index 36a29a87cbc..bb3930dc59d 100644 --- a/conf/openshift/openshift.json +++ b/conf/openshift/openshift.json @@ -20,7 +20,7 @@ }, "spec": { "selector": { - "name": "ndslabs-dataverse" + "name": "iqss-dataverse-glassfish" }, "ports": [ { @@ -36,10 +36,10 @@ "kind": "ImageStream", "apiVersion": "v1", "metadata": { - "name": "ndslabs-dataverse" + "name": "dataverse-plus-glassfish" }, "spec": { - "dockerImageRepository": "ndslabs/dataverse" + "dockerImageRepository": "iqss/dataverse-glassfish" } }, { @@ -66,7 +66,7 @@ "kind": "DeploymentConfig", "apiVersion": "v1", "metadata": { - "name": "dataverse-glassfish", + "name": "deploy-dataverse-glassfish", "annotations": { "template.alpha.openshift.io/wait-for-ready": "true" } @@ -75,14 +75,14 @@ "template": { "metadata": { "labels": { - "name": "ndslabs-dataverse" + "name": "iqss-dataverse-glassfish" } }, "spec": { "containers": [ { - "name": "ndslabs-dataverse", - "image": "ndslabs-dataverse", + "name": "dataverse-plus-glassfish", + "image": "dataverse-plus-glassfish", "ports": [ { "containerPort": 8080, @@ -169,7 +169,7 @@ "rollingParams": { "updatePeriodSeconds": 1, "intervalSeconds": 1, - "timeoutSeconds": 120 + "timeoutSeconds": 300 }, "resources": {} }, @@ -179,11 +179,11 @@ "imageChangeParams": { "automatic": true, "containerNames": [ - "ndslabs-dataverse" + "dataverse-plus-glassfish" ], "from": { "kind": "ImageStreamTag", - "name": "ndslabs-dataverse:latest" + "name": "dataverse-plus-glassfish:4040-iqss-glassfish" } } }, @@ -219,7 +219,7 @@ ], "replicas": 1, "selector": { - "name": "ndslabs-dataverse" + "name": "iqss-dataverse-glassfish" } } } diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index 03b239e6337..6827c30aeaa 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -331,7 +331,8 @@ If you've reconfigured from EZID to DataCite and are seeing ``Response code: 400 OpenShift --------- -From the Dataverse perspective, we are in the business of providing a "template" for OpenShift that describes how the various components we build our application on (Glassfish, PostgreSQL, Solr, the Dataverse war file itself, etc.) work together. We also plan to publish Docker images to DockerHub but for now we are trying to use the Dataverse images published to https://hub.docker.com/r/ndslabs/ +From the Dataverse perspective, we are in the business of providing a "template" for OpenShift that describes how the various components we build our application on (Glassfish, PostgreSQL, Solr, the Dataverse war file itself, etc.) work together. We publish Docker images to DockerHub at https://hub.docker.com/u/iqss/ that are used in the OpenShift template. Dataverse's use of Docker is documented below in a separate section. + The OpenShift template for Dataverse can be found at ``conf/openshift/openshift.json`` and if you need to hack on the template or related files under ``conf/docker`` it is recommended that you iterate on them using Minishift. @@ -386,7 +387,7 @@ Check Status of Dataverse Deployment to Minishift Review Logs of Dataverse Deployment to Minishift ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -``oc logs -c ndslabs-dataverse $(oc get po -o json | jq '.items[] | select(.kind=="Pod").metadata.name' -r | grep -v dataverse-glassfish-1-deploy)`` +``oc logs -c dataverse-plus-glassfish $(oc get po -o json | jq '.items[] | select(.kind=="Pod").metadata.name' -r | grep -v dataverse-glassfish-1-deploy)`` Get a Shell (ssh/rsh) on Glassfish Server Deployed to Minishift ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -428,6 +429,25 @@ The following resources might be helpful. - https://blog.openshift.com/part-2-creating-a-template-a-technical-walkthrough/ - https://docs.openshift.com/enterprise/3.0/architecture/core_concepts/templates.html +Docker +------ + +Minishift makes use of Docker images on Docker Hub. To build new Docker images and push them to Docker Hub, you'll need to install Docker. + +Installing Docker +~~~~~~~~~~~~~~~~~ + +On Linux, you can probably get Docker from your package manager. + +On Mac, download the ``.dmg`` from https://www.docker.com and install it. As of this writing is it known as Docker Community Edition for Mac. + +We're working with Docker in the context of Minishift so if you haven't installed Minishift yet, follow the instructions above and make sure you get the Dataverse Docker images running in Minishift before you start messing with them. + +Get Set Up to Push Docker Images to Minishift Registry +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +FIXME https://docs.openshift.org/latest/minishift/openshift/openshift-docker-registry.html indicates that it should be possible to make use of the builtin registry in Minishift while iterating on Docker images but you may get "unauthorized: authentication required" when trying to push to it as reported at https://github.com/minishift/minishift/issues/817 so until we figure this out, you must push to Docker Hub instead. Run ``docker login`` and use the ``conf/docker/build.sh`` script to push Docker images you create to https://hub.docker.com/u/iqss/ + ---- Previous: :doc:`intro` | Next: :doc:`version-control` From 6e4ef452a172b2b4d56bccee20f03b679438f175 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 22 Sep 2017 15:30:56 -0400 Subject: [PATCH 013/483] use the right Docker tag for Dataverse/Glassfish #4040 --- conf/openshift/openshift.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conf/openshift/openshift.json b/conf/openshift/openshift.json index bb3930dc59d..6732a08c13c 100644 --- a/conf/openshift/openshift.json +++ b/conf/openshift/openshift.json @@ -183,7 +183,7 @@ ], "from": { "kind": "ImageStreamTag", - "name": "dataverse-plus-glassfish:4040-iqss-glassfish" + "name": "dataverse-plus-glassfish:4040-docker-openshift" } } }, From 87b4314e7c5207316ed33fdf5d1d456322acdad3 Mon Sep 17 00:00:00 2001 From: Michael Bar-Sinai Date: Mon, 25 Sep 2017 18:58:42 +0300 Subject: [PATCH 014/483] Make a Dataset have multiple locks. Re-arrange the workflow steps execution system from recursive to iterative, in order to support single transaction per step execution. --- .../dataverse/DOIDataCiteRegisterService.java | 12 +- .../iq/dataverse/DataCiteRESTfullClient.java | 5 +- .../edu/harvard/iq/dataverse/Dataset.java | 76 ++++++-- .../edu/harvard/iq/dataverse/DatasetLock.java | 10 +- .../edu/harvard/iq/dataverse/DatasetPage.java | 25 +-- .../iq/dataverse/DatasetServiceBean.java | 51 ++---- .../harvard/iq/dataverse/DatasetVersion.java | 3 +- .../iq/dataverse/EditDatafilesPage.java | 19 +- .../harvard/iq/dataverse/api/Datasets.java | 40 +--- .../harvard/iq/dataverse/api/Workflows.java | 13 ++ .../api/datadeposit/StatementManagerImpl.java | 12 +- .../dataverse/api/datadeposit/SwordUtil.java | 12 +- .../filesystem/FileRecordJobListener.java | 9 +- .../impl/AbstractPublishDatasetCommand.java | 6 +- .../engine/command/impl/AddLockCommand.java | 3 +- .../FinalizeDatasetPublicationCommand.java | 10 +- .../command/impl/PublishDatasetCommand.java | 27 +-- .../command/impl/RemoveLockCommand.java | 7 +- .../impl/ReturnDatasetToAuthorCommand.java | 5 +- .../dataverse/ingest/IngestMessageBean.java | 11 +- .../workflow/PendingWorkflowInvocation.java | 4 +- .../dataverse/workflow/WorkflowContext.java | 16 +- .../workflow/WorkflowServiceBean.java | 173 +++++++++++++----- .../HttpSendReceiveClientStep.java | 3 +- src/main/resources/META-INF/persistence.xml | 3 +- .../edu/harvard/iq/dataverse/DatasetTest.java | 53 ++++++ .../iq/dataverse/DatasetVersionTest.java | 4 +- .../ReturnDatasetToAuthorCommandTest.java | 2 +- 28 files changed, 378 insertions(+), 236 deletions(-) create mode 100644 src/test/java/edu/harvard/iq/dataverse/DatasetTest.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java index 4224b565159..17a6b1759eb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java +++ b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java @@ -15,11 +15,9 @@ import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import javax.annotation.PreDestroy; import javax.ejb.Stateless; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; -import javax.persistence.Query; import javax.persistence.TypedQuery; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; @@ -57,7 +55,7 @@ public String createIdentifier(String identifier, HashMap metada metadataTemplate.setPublisherYear(metadata.get("datacite.publicationyear")); String xmlMetadata = metadataTemplate.generateXML(); - logger.fine("XML to send to DataCite: " + xmlMetadata); + logger.log(Level.FINE, "XML to send to DataCite: {0}", xmlMetadata); String status = metadata.get("_status").trim(); String target = metadata.get("_target"); @@ -92,8 +90,14 @@ public String createIdentifier(String identifier, HashMap metada try (DataCiteRESTfullClient client = openClient()) { retString = client.postMetadata(xmlMetadata); client.postUrl(identifier.substring(identifier.indexOf(":") + 1), target); + } catch (UnsupportedEncodingException ex) { - Logger.getLogger(DOIDataCiteRegisterService.class.getName()).log(Level.SEVERE, null, ex); + logger.log(Level.SEVERE, null, ex); + + } catch ( RuntimeException rte ) { + logger.log(Level.SEVERE, "Error creating DOI at DataCite: {0}", rte.getMessage()); + logger.log(Level.SEVERE, "Exception", rte); + } } } else if (status.equals("unavailable")) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DataCiteRESTfullClient.java b/src/main/java/edu/harvard/iq/dataverse/DataCiteRESTfullClient.java index 93607a56541..a329f663fb5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataCiteRESTfullClient.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataCiteRESTfullClient.java @@ -169,11 +169,11 @@ public boolean testDOIExists(String doi) { * @param metadata * @return */ - public String postMetadata(String metadata) throws UnsupportedEncodingException { + public String postMetadata(String metadata) { HttpPost httpPost = new HttpPost(this.url + "/metadata"); httpPost.setHeader("Content-Type", "application/xml;charset=UTF-8"); - httpPost.setEntity(new StringEntity(metadata, "utf-8")); try { + httpPost.setEntity(new StringEntity(metadata, "utf-8")); HttpResponse response = httpClient.execute(httpPost,context); String data = EntityUtils.toString(response.getEntity(), encoding); @@ -183,6 +183,7 @@ public String postMetadata(String metadata) throws UnsupportedEncodingException throw new RuntimeException(errMsg); } return data; + } catch (IOException ioe) { logger.log(Level.SEVERE, "IOException when post metadata"); throw new RuntimeException("IOException when post metadata", ioe); diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 144285299ad..5925b6cc48f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -9,9 +9,10 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Date; +import java.util.HashSet; import java.util.List; import java.util.Objects; -import java.util.logging.Logger; +import java.util.Set; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; @@ -73,7 +74,6 @@ sequence. Used when the Dataverse is (optionally) configured to use @Index(columnList = "thumbnailfile_id")}, uniqueConstraints = @UniqueConstraint(columnNames = {"authority,protocol,identifier,doiseparator"})) public class Dataset extends DvObjectContainer { - private static final Logger logger = Logger.getLogger(Dataset.class.getCanonicalName()); public static final String TARGET_URL = "/citation?persistentId="; private static final long serialVersionUID = 1L; @@ -100,8 +100,8 @@ public class Dataset extends DvObjectContainer { @OrderBy("versionNumber DESC, minorVersionNumber DESC") private List versions = new ArrayList<>(); - @OneToOne(mappedBy = "dataset", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}, orphanRemoval = true) - private DatasetLock datasetLock; + @OneToMany(mappedBy = "dataset", cascade = CascadeType.ALL, orphanRemoval = true) + private Set datasetLocks; @OneToOne(cascade = {CascadeType.MERGE, CascadeType.PERSIST}) @JoinColumn(name = "thumbnailfile_id") @@ -154,7 +154,63 @@ public Dataset() { datasetVersion.setMinorVersionNumber((long) 0); versions.add(datasetVersion); } + + /** + * Tests whether {@code this} dataset is locked for a given reason. + * @param reason the reason we test for. + * @return {@code true} iff the data set is locked for {@code reason}. + */ + public boolean isLockedFor( DatasetLock.Reason reason ) { + for ( DatasetLock l : getLocks() ) { + if ( l.getReason() == reason ) { + return true; + } + } + return false; + } + + /** + * Retrieves the dataset lock for the passed reason. + * @param reason + * @return the dataset lock, or {@code null}. + */ + public DatasetLock getLockFor( DatasetLock.Reason reason ) { + for ( DatasetLock l : getLocks() ) { + if ( l.getReason() == reason ) { + return l; + } + } + return null; + } + + public Set getLocks() { + // lazy set creation + if ( datasetLocks == null ) { + setLocks( new HashSet<>() ); + } + return datasetLocks; + } + /** + * JPA use only! + * @param datasetLocks + */ + void setLocks(Set datasetLocks) { + this.datasetLocks = datasetLocks; + } + + public void addLock(DatasetLock datasetLock) { + getLocks().add(datasetLock); + } + + public void removeLock( DatasetLock aDatasetLock ) { + getLocks().remove( aDatasetLock ); + } + + public boolean isLocked() { + return !getLocks().isEmpty(); + } + public String getProtocol() { return protocol; } @@ -240,18 +296,6 @@ public void setFiles(List files) { this.files = files; } - public DatasetLock getDatasetLock() { - return datasetLock; - } - - public void setDatasetLock(DatasetLock datasetLock) { - this.datasetLock = datasetLock; - } - - public boolean isLocked() { - return (getDatasetLock()!=null); - } - public boolean isDeaccessioned() { // return true, if all published versions were deaccessioned boolean hasDeaccessionedVersions = false; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java b/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java index 71d0456fa67..0353039df69 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java @@ -20,6 +20,7 @@ package edu.harvard.iq.dataverse; +import static edu.harvard.iq.dataverse.DatasetLock.Reason.Workflow; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import java.util.Date; import java.io.Serializable; @@ -33,7 +34,6 @@ import javax.persistence.Index; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; -import javax.persistence.OneToOne; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; @@ -52,7 +52,7 @@ @Table(indexes = {@Index(columnList="user_id"), @Index(columnList="dataset_id")}) @NamedQueries( @NamedQuery(name="DatasetLock.getLocksByDatasetId", - query="SELECT l FROM DatasetLock l WHERE l.dataset.id=:datasetId") + query="SELECT lock FROM DatasetLock lock WHERE lock.dataset.id=:datasetId") ) public class DatasetLock implements Serializable { @@ -76,13 +76,13 @@ public enum Reason { @Temporal(value = TemporalType.TIMESTAMP) private Date startTime; - @OneToOne + @ManyToOne @JoinColumn(nullable=false) private Dataset dataset; @ManyToOne @JoinColumn(nullable=false) - private AuthenticatedUser user; + private AuthenticatedUser user; @Enumerated(EnumType.STRING) @Column(nullable=false) @@ -116,7 +116,7 @@ public DatasetLock(Reason aReason, AuthenticatedUser aUser, String infoMessage) startTime = new Date(); user = aUser; info = infoMessage; - + } /** diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 8f6301f0b70..53cd8ec20f0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -78,10 +78,7 @@ import java.util.logging.Level; import edu.harvard.iq.dataverse.datasetutility.TwoRavensHelper; import edu.harvard.iq.dataverse.datasetutility.WorldMapPermissionHelper; -import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.engine.command.impl.AddLockCommand; import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetResult; -import edu.harvard.iq.dataverse.engine.command.impl.RemoveLockCommand; import edu.harvard.iq.dataverse.engine.command.impl.RestrictFileCommand; import edu.harvard.iq.dataverse.engine.command.impl.ReturnDatasetToAuthorCommand; import edu.harvard.iq.dataverse.engine.command.impl.SubmitDatasetForReviewCommand; @@ -1445,9 +1442,10 @@ private String init(boolean initFull) { // when we sync up with the rsync-upload branch, there will be a merge // conflict here; once resolved, there will also be code here for // rsync upload in progress, and maybe other kinds of locks. - if (dataset.getDatasetLock().getReason().equals(DatasetLock.Reason.Workflow)) { + if (dataset.isLockedFor(DatasetLock.Reason.Workflow)) { JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.publish.workflow.inprogress")); - } else if (dataset.getDatasetLock().getReason().equals(DatasetLock.Reason.InReview)) { + } + if (dataset.isLockedFor(DatasetLock.Reason.InReview)) { JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.inreview.infoMessage")); } } @@ -1798,7 +1796,7 @@ private String releaseDataset(boolean minor) { // has been published. If a publishing workflow is configured, this may have sent the // dataset into a workflow limbo, potentially waiting for a third party system to complete // the process. So it may be premature to show the "success" message at this point. - if (dataset.isLocked() && dataset.getDatasetLock().getReason().equals(DatasetLock.Reason.Workflow)) { + if (dataset.isLockedFor(DatasetLock.Reason.Workflow)) { JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.publish.workflow.inprogress")); } else { JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.publishSuccess")); @@ -2596,7 +2594,7 @@ public void refreshLock() { public boolean isLockedInProgress() { if (dataset != null) { - logger.fine("checking lock status of dataset " + dataset.getId()); + logger.log(Level.FINE, "checking lock status of dataset {0}", dataset.getId()); if (dataset.isLocked()) { return true; } @@ -2605,19 +2603,14 @@ public boolean isLockedInProgress() { } public boolean isDatasetLockedInWorkflow() { - if (dataset != null) { - if (dataset.isLocked()) { - if (dataset.getDatasetLock().getReason().equals(DatasetLock.Reason.Workflow)) { - return true; - } - } - } - return false; + return (dataset != null) + ? dataset.isLockedFor(DatasetLock.Reason.Workflow) + : false; } public boolean isStillLocked() { if (dataset != null && dataset.getId() != null) { - logger.fine("checking lock status of dataset " + dataset.getId()); + logger.log(Level.FINE, "checking lock status of dataset {0}", dataset.getId()); if (datasetService.checkDatasetLock(dataset.getId())) { return true; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index a020a17fde4..c5abe0b5cc1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -1,8 +1,3 @@ -/* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. - */ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; @@ -24,6 +19,7 @@ import java.util.ArrayList; import java.util.Date; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -39,11 +35,8 @@ import javax.inject.Inject; import javax.inject.Named; import javax.persistence.EntityManager; -import javax.persistence.NamedStoredProcedureQuery; -import javax.persistence.ParameterMode; import javax.persistence.PersistenceContext; import javax.persistence.Query; -import javax.persistence.StoredProcedureParameter; import javax.persistence.StoredProcedureQuery; import javax.persistence.TypedQuery; import javax.xml.stream.XMLOutputFactory; @@ -511,8 +504,9 @@ public boolean checkDatasetLock(Long datasetId) { @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public void addDatasetLock(Dataset dataset, DatasetLock lock) { - dataset.setDatasetLock(lock); - em.persist(lock); + lock.setDataset(dataset); + dataset.addLock(lock); + em.merge(dataset); } public void addDatasetLock(Long datasetId, DatasetLock.Reason reason, Long userId, String info) { @@ -536,28 +530,21 @@ public void addDatasetLock(Long datasetId, DatasetLock.Reason reason, Long userI } @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) - public void removeDatasetLock(Long datasetId) { + public void removeDatasetLock(Long datasetId, DatasetLock.Reason aReason) { Dataset dataset = em.find(Dataset.class, datasetId); - //em.refresh(dataset); (?) - DatasetLock lock = dataset.getDatasetLock(); - if (lock != null) { - AuthenticatedUser user = lock.getUser(); - dataset.setDatasetLock(null); - user.getDatasetLocks().remove(lock); - /* - * TODO - ? - * throw an exception if for whatever reason we can't remove the lock? - try { - */ - em.remove(lock); - /* - } catch (TransactionRequiredException te) { - ... - } catch (IllegalArgumentException iae) { - ... - } - */ - } + Logger.getLogger(DatasetServiceBean.class.getName()).log(Level.INFO, "Removing locks from dataset " + dataset.getId()); + Logger.getLogger(DatasetServiceBean.class.getName()).log(Level.INFO, dataset.getLocks().toString() ); + new HashSet<>(dataset.getLocks()).stream() + .filter( l -> l.getReason() == aReason ) + .forEach( lock -> { + Logger.getLogger(DatasetServiceBean.class.getName()).log(Level.INFO, "Removing lock " + lock); + dataset.removeLock(lock); + + AuthenticatedUser user = lock.getUser(); + user.getDatasetLocks().remove(lock); + + em.remove(lock); + }); } /* @@ -594,7 +581,7 @@ public String getTitleFromLatestVersion(Long datasetId, boolean includeDraft){ + ";").getSingleResult(); } catch (Exception ex) { - logger.info("exception trying to get title from latest version: " + ex); + logger.log(Level.INFO, "exception trying to get title from latest version: {0}", ex); return ""; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 9e97e8d475a..030a10244a2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -221,8 +221,7 @@ public void setDatasetFields(List datasetFields) { */ public boolean isInReview() { if (versionState != null && versionState.equals(VersionState.DRAFT)) { - DatasetLock l = getDataset().getDatasetLock(); - return (l != null) && l.getReason()==DatasetLock.Reason.InReview; + return getDataset().isLockedFor(DatasetLock.Reason.InReview); } else { return false; } diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 8b1aaea7f27..35eca0f1d98 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -2083,19 +2083,16 @@ private boolean isFileAlreadyUploaded(DataFile dataFile) { public boolean isLocked() { if (dataset != null) { - logger.fine("checking lock status of dataset " + dataset.getId()); + logger.log(Level.FINE, "checking lock status of dataset {0}", dataset.getId()); if (dataset.isLocked()) { // refresh the dataset and version, if the current working // version of the dataset is locked: } Dataset lookedupDataset = datasetService.find(dataset.getId()); - DatasetLock datasetLock = null; - if (lookedupDataset != null) { - datasetLock = lookedupDataset.getDatasetLock(); - if (datasetLock != null) { - logger.fine("locked!"); - return true; - } + + if ( (lookedupDataset!=null) && lookedupDataset.isLocked() ) { + logger.fine("locked!"); + return true; } } return false; @@ -2126,12 +2123,12 @@ public void setFileMetadataSelected(FileMetadata fm){ public void setFileMetadataSelected(FileMetadata fm, String guestbook) { fileMetadataSelected = fm; - logger.fine("set the file for the advanced options popup (" + fileMetadataSelected.getLabel() + ")"); + logger.log(Level.FINE, "set the file for the advanced options popup ({0})", fileMetadataSelected.getLabel()); } public FileMetadata getFileMetadataSelected() { if (fileMetadataSelected != null) { - logger.fine("returning file metadata for the advanced options popup (" + fileMetadataSelected.getLabel() + ")"); + logger.log(Level.FINE, "returning file metadata for the advanced options popup ({0})", fileMetadataSelected.getLabel()); } else { logger.fine("file metadata for the advanced options popup is null."); } @@ -2225,7 +2222,7 @@ public void saveAsDesignatedThumbnail() { } public void deleteDatasetLogoAndUseThisDataFileAsThumbnailInstead() { - logger.fine("For dataset id " + dataset.getId() + " the current thumbnail is from a dataset logo rather than a dataset file, blowing away the logo and using this FileMetadata id instead: " + fileMetadataSelectedForThumbnailPopup); + logger.log(Level.FINE, "For dataset id {0} the current thumbnail is from a dataset logo rather than a dataset file, blowing away the logo and using this FileMetadata id instead: {1}", new Object[]{dataset.getId(), fileMetadataSelectedForThumbnailPopup}); /** * @todo Rather than deleting and merging right away, try to respect how * this page seems to stage actions and giving the user a chance to diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 6b854f9f30b..eb6abb6576b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -405,26 +405,6 @@ public Response publishDataseUsingGetDeprecated( @PathParam("id") String id, @Qu return publishDataset(id, type); } - // TODO SBG: Delete me - @EJB - WorkflowServiceBean workflows; - - @PUT - @Path("{id}/actions/wf/{wfid}") - public Response DELETEME(@PathParam("id") String id, @PathParam("wfid") String wfid) { - try { - Workflow wf = workflows.getWorkflow(Long.parseLong(wfid)).get(); - Dataset ds = findDatasetOrDie(id); - WorkflowContext ctxt = new WorkflowContext(createDataverseRequest(findUserOrDie()), ds, 0, 0, WorkflowContext.TriggerType.PostPublishDataset, "DataCite"); - workflows.start(wf, ctxt); - return ok("Started workflow " + wf.getName() + " on dataset " + ds.getId() ); - - } catch (WrappedResponse ex) { - return ex.getResponse(); - } - } - // TODO SBG: /Delete me - @POST @Path("{id}/actions/:publish") public Response publishDataset(@PathParam("id") String id, @QueryParam("type") String type) { @@ -655,7 +635,7 @@ public Response getRsync(@PathParam("identifier") String id) { @POST @Path("{identifier}/dataCaptureModule/checksumValidation") public Response receiveChecksumValidationResults(@PathParam("identifier") String id, JsonObject jsonFromDcm) { - logger.fine("jsonFromDcm: " + jsonFromDcm); + logger.log(Level.FINE, "jsonFromDcm: {0}", jsonFromDcm); AuthenticatedUser authenticatedUser = null; try { authenticatedUser = findAuthenticatedUserOrDie(); @@ -712,13 +692,7 @@ public Response submitForReview(@PathParam("id") String idSupplied) { Dataset updatedDataset = execCommand(new SubmitDatasetForReviewCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied))); JsonObjectBuilder result = Json.createObjectBuilder(); - boolean inReview = false; - try{ - inReview = updatedDataset.getDatasetLock().getReason().equals(DatasetLock.Reason.InReview); - } catch (Exception e){ - System.out.print("submit exception: " + e.getMessage()); - // if there's no lock then it can't be in review by definition - } + boolean inReview = updatedDataset.isLockedFor(DatasetLock.Reason.InReview); result.add("inReview", inReview); result.add("message", "Dataset id " + updatedDataset.getId() + " has been submitted for review."); @@ -747,12 +721,7 @@ public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBo } AuthenticatedUser authenticatedUser = findAuthenticatedUserOrDie(); Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(authenticatedUser), dataset, reasonForReturn )); - boolean inReview = false; - try{ - inReview = updatedDataset.getDatasetLock().getReason().equals(DatasetLock.Reason.InReview); - } catch (Exception e){ - // if there's no lock then it can't be in review by definition - } + boolean inReview = updatedDataset.isLockedFor(DatasetLock.Reason.InReview); JsonObjectBuilder result = Json.createObjectBuilder(); result.add("inReview", inReview); @@ -767,9 +736,8 @@ public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBo * Add a File to an existing Dataset * * @param idSupplied - * @param datasetId * @param jsonData - * @param testFileInputStream + * @param fileInputStream * @param contentDispositionHeader * @param formDataBodyPart * @return diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Workflows.java b/src/main/java/edu/harvard/iq/dataverse/api/Workflows.java index 4269a0215bf..77961369c7b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Workflows.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Workflows.java @@ -1,14 +1,17 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddressRange; +import edu.harvard.iq.dataverse.engine.command.impl.AddLockCommand; import edu.harvard.iq.dataverse.workflow.PendingWorkflowInvocation; import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean; import java.util.Arrays; import java.util.logging.Level; import java.util.logging.Logger; import javax.ejb.EJB; +import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; @@ -51,6 +54,16 @@ public Response resumeWorkflow( @PathParam("invocationId") String invocationId, return Response.accepted("/api/datasets/" + pending.getDataset().getId() ).build(); } + @Path("lock/{dsId}") + @GET + public Response lockDataset( @PathParam("dsId") String dsId ) { + return response( req -> { + DatasetLock dl = new DatasetLock(DatasetLock.Reason.Workflow, findAuthenticatedUserOrDie()); + execCommand( new AddLockCommand( req, findDatasetOrDie(dsId), dl) ) ; + return ok("locked dataset " + dsId); + }); + } + private boolean isAllowed(IpAddress addr) { if ( System.currentTimeMillis()-lastWhitelistUpdate > 60*1000 ) { updateWhitelist(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java index 5089204f854..f6c9bcca18c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java @@ -14,7 +14,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.logging.Logger; +import static java.util.stream.Collectors.joining; import javax.ejb.EJB; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; @@ -91,14 +93,16 @@ public Statement getStatement(String editUri, Map map, AuthCrede states.put("latestVersionState", dataset.getLatestVersion().getVersionState().toString()); Boolean isMinorUpdate = dataset.getLatestVersion().isMinorUpdate(); states.put("isMinorUpdate", isMinorUpdate.toString()); - DatasetLock lock = dataset.getDatasetLock(); - if (lock != null) { + + if ( dataset.isLocked() ) { states.put("locked", "true"); - states.put("lockedDetail", lock.getInfo()); - states.put("lockedStartTime", lock.getStartTime().toString()); + states.put("lockedDetail", dataset.getLocks().stream().map( l-> l.getInfo() ).collect( joining(",")) ); + Optional earliestLock = dataset.getLocks().stream().min((l1, l2) -> (int)Math.signum(l1.getStartTime().getTime()-l2.getStartTime().getTime()) ); + states.put("lockedStartTime", earliestLock.get().getStartTime().toString()); } else { states.put("locked", "false"); } + statement.setStates(states); List fileMetadatas = dataset.getLatestVersion().getFileMetadatas(); for (FileMetadata fileMetadata : fileMetadatas) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordUtil.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordUtil.java index a35acfb200e..a5efd54559d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordUtil.java @@ -1,7 +1,7 @@ package edu.harvard.iq.dataverse.api.datadeposit; import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetLock; +import static java.util.stream.Collectors.joining; import org.swordapp.server.SwordError; import org.swordapp.server.UriRegistry; @@ -12,7 +12,7 @@ public class SwordUtil { static String DCTERMS = "http://purl.org/dc/terms/"; - /** + /* * @todo get rid of this method */ public static SwordError throwSpecialSwordErrorWithoutStackTrace(String SwordUriRegistryError, String error) { @@ -28,7 +28,7 @@ public static SwordError throwSpecialSwordErrorWithoutStackTrace(String SwordUri return swordError; } - /** + /* * @todo get rid of this method */ public static SwordError throwRegularSwordErrorWithoutStackTrace(String error) { @@ -42,9 +42,9 @@ public static SwordError throwRegularSwordErrorWithoutStackTrace(String error) { } public static void datasetLockCheck(Dataset dataset) throws SwordError { - DatasetLock datasetLock = dataset.getDatasetLock(); - if (datasetLock != null) { - String message = "Please try again later. Unable to perform operation due to dataset lock: " + datasetLock.getInfo(); + if ( dataset.isLocked() ) { + String message = "Please try again later. Unable to perform operation due to dataset lock: " + + dataset.getLocks().stream().map(l->l.getReason().name() + ": " + l.getInfo()).collect( joining(",") ); throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, message); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java index fa941b9a99b..76e57dd94e4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java +++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java @@ -226,9 +226,12 @@ public void afterJob() throws Exception { } // remove dataset lock - if (dataset != null && dataset.getId() != null) { - datasetServiceBean.removeDatasetLock(dataset.getId()); - } + // Disabled now, see L.A.'s comment at beforeJob() +// if (dataset != null && dataset.getId() != null) { +// datasetServiceBean.removeDatasetLock(dataset.getId(), DatasetLock.Reason.Ingest); +// } + + getJobLogger().log(Level.INFO, "Removing dataset lock."); // job step info diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractPublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractPublishDatasetCommand.java index 38708a8efac..9f04f64e0b6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractPublishDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractPublishDatasetCommand.java @@ -21,11 +21,7 @@ public AbstractPublishDatasetCommand(Dataset datasetIn, DataverseRequest aReques } protected WorkflowContext buildContext( String doiProvider, WorkflowContext.TriggerType triggerType) { - return new WorkflowContext(getRequest(), theDataset, - theDataset.getLatestVersion().getVersionNumber(), - theDataset.getLatestVersion().getMinorVersionNumber(), - triggerType, - doiProvider); + return new WorkflowContext(getRequest(), theDataset, doiProvider, triggerType); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddLockCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddLockCommand.java index 1f9ee1e96c2..3001d1532e1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddLockCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddLockCommand.java @@ -28,8 +28,9 @@ public AddLockCommand(DataverseRequest aRequest, Dataset aDataset, DatasetLock a @Override public DatasetLock execute(CommandContext ctxt) throws CommandException { - lock.setDataset(dataset); + ctxt.datasets().addDatasetLock(dataset, lock); + return lock; } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index a031e047902..a904057a067 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -21,7 +21,6 @@ import edu.harvard.iq.dataverse.export.ExportException; import edu.harvard.iq.dataverse.export.ExportService; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType; @@ -29,7 +28,6 @@ import java.sql.Timestamp; import java.util.Date; import java.util.Optional; -import java.util.ResourceBundle; import java.util.logging.Level; import java.util.logging.Logger; @@ -111,10 +109,12 @@ public Dataset execute(CommandContext ctxt) throws CommandException { ctxt.index().indexDataset(theDataset, doNormalSolrDocCleanUp); ctxt.solrIndex().indexPermissionsForOneDvObject(theDataset); - ctxt.engine().submit(new RemoveLockCommand(getRequest(), theDataset)); + ctxt.engine().submit(new RemoveLockCommand(getRequest(), theDataset, DatasetLock.Reason.Workflow)); + final Optional postPubWorkflow = ctxt.workflows().getDefaultWorkflow(TriggerType.PostPublishDataset); - ctxt.workflows().getDefaultWorkflow(TriggerType.PostPublishDataset) - .ifPresent(wf -> ctxt.workflows().start(wf, buildContext(doiProvider, TriggerType.PostPublishDataset))); + if ( postPubWorkflow.isPresent() ) { + ctxt.workflows().start(postPubWorkflow.get(), buildContext(doiProvider, TriggerType.PostPublishDataset)); + } return ctxt.em().merge(theDataset); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java index 22732ea34f7..2d67fb5cff7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java @@ -1,14 +1,8 @@ package edu.harvard.iq.dataverse.engine.command.impl; -import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetLock; -import edu.harvard.iq.dataverse.DatasetVersionUser; -import edu.harvard.iq.dataverse.DvObject; -import edu.harvard.iq.dataverse.UserNotification; -import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.authorization.Permission; -import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; @@ -17,14 +11,11 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType; -import edu.harvard.iq.dataverse.util.BundleUtil; -import java.io.IOException; -import java.sql.Timestamp; -import java.util.Date; import java.util.Optional; +import static java.util.stream.Collectors.joining; /** - * Kick-off a dataset publication process. The process may complete immediatly, + * Kick-off a dataset publication process. The process may complete immediately, * but may also result in a workflow being started and pending on some external * response. Either way, the process will be completed by an instance of * {@link FinalizeDatasetPublicationCommand}. @@ -64,7 +55,8 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException theDataset.getEditVersion().setVersionNumber(new Long(theDataset.getVersionNumber())); theDataset.getEditVersion().setMinorVersionNumber(new Long(theDataset.getMinorVersionNumber() + 1)); - } else /* major, non-first release */ { + } else { + // major, non-first release theDataset.getEditVersion().setVersionNumber(new Long(theDataset.getVersionNumber() + 1)); theDataset.getEditVersion().setMinorVersionNumber(new Long(0)); } @@ -72,12 +64,12 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException theDataset = ctxt.em().merge(theDataset); //Move remove lock to after merge... SEK 9/1/17 (why? -- L.A.) - ctxt.engine().submit( new RemoveLockCommand(getRequest(), theDataset)); + ctxt.engine().submit( new RemoveLockCommand(getRequest(), theDataset, DatasetLock.Reason.InReview) ); + ctxt.engine().submit( new RemoveLockCommand(getRequest(), theDataset, DatasetLock.Reason.Ingest) ); Optional prePubWf = ctxt.workflows().getDefaultWorkflow(TriggerType.PrePublishDataset); if ( prePubWf.isPresent() ) { // We start a workflow - ctxt.engine().submit( new AddLockCommand(getRequest(), theDataset, new DatasetLock(DatasetLock.Reason.Workflow, getRequest().getAuthenticatedUser()))); ctxt.workflows().start(prePubWf.get(), buildContext(doiProvider, TriggerType.PrePublishDataset) ); return new PublishDatasetResult(theDataset, false); @@ -100,9 +92,10 @@ private void verifyCommandArguments() throws IllegalCommandException { throw new IllegalCommandException("This dataset may not be published because its host dataverse (" + theDataset.getOwner().getAlias() + ") has not been published.", this); } - if (theDataset.isLocked() && !theDataset.getDatasetLock().getReason().equals(DatasetLock.Reason.InReview)) { - - throw new IllegalCommandException("This dataset is locked. Reason: " + theDataset.getDatasetLock().getReason().toString() + ". Please try publishing later.", this); + if ( theDataset.isLocked() ) { + throw new IllegalCommandException("This dataset is locked. Reason: " + + theDataset.getLocks().stream().map(l -> l.getReason().name()).collect( joining(",") ) + + ". Please try publishing later.", this); } if (theDataset.getLatestVersion().isReleased()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RemoveLockCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RemoveLockCommand.java index 669e00ea9ba..9ed38fc6493 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RemoveLockCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RemoveLockCommand.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -16,15 +17,17 @@ public class RemoveLockCommand extends AbstractVoidCommand { private final Dataset dataset; + private final DatasetLock.Reason reason; - public RemoveLockCommand(DataverseRequest aRequest, Dataset aDataset) { + public RemoveLockCommand(DataverseRequest aRequest, Dataset aDataset, DatasetLock.Reason aReason) { super(aRequest, aDataset); dataset = aDataset; + reason = aReason; } @Override protected void executeImpl(CommandContext ctxt) throws CommandException { - ctxt.datasets().removeDatasetLock(dataset.getId()); + ctxt.datasets().removeDatasetLock(dataset.getId(), reason); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java index 3ee601bde30..ba8b0434761 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.DatasetVersionUser; import edu.harvard.iq.dataverse.UserNotification; import edu.harvard.iq.dataverse.authorization.Permission; @@ -44,7 +45,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { throw new IllegalCommandException("You must enter a reason for returning a dataset to the author(s).", this); } */ - ctxt.engine().submit( new RemoveLockCommand(getRequest(), theDataset)); + ctxt.engine().submit( new RemoveLockCommand(getRequest(), theDataset, DatasetLock.Reason.InReview)); Dataset updatedDataset = save(ctxt); return updatedDataset; @@ -56,7 +57,7 @@ public Dataset save(CommandContext ctxt) throws CommandException { theDataset.getEditVersion().setLastUpdateTime(updateTime); // We set "in review" to false because now the ball is back in the author's court. theDataset.setModificationTime(updateTime); - theDataset.setDatasetLock(null); + theDataset.addLock(null); Dataset savedDataset = ctxt.em().merge(theDataset); ctxt.em().flush(); diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java index 46fa7370d3f..48048fd0d74 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java @@ -20,30 +20,23 @@ package edu.harvard.iq.dataverse.ingest; -import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.DataFileServiceBean; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.ingest.IngestServiceBean; +import edu.harvard.iq.dataverse.DatasetLock; -import java.io.File; -import java.util.ArrayList; import java.util.Iterator; -import java.util.List; import java.util.logging.Logger; import javax.ejb.ActivationConfigProperty; import javax.ejb.EJB; import javax.ejb.MessageDriven; import javax.ejb.TransactionAttribute; import javax.ejb.TransactionAttributeType; -import javax.faces.application.FacesMessage; import javax.jms.JMSException; import javax.jms.Message; import javax.jms.MessageListener; import javax.jms.ObjectMessage; -import javax.naming.Context; -import javax.naming.InitialContext; /** * @@ -135,7 +128,7 @@ public void onMessage(Message message) { if (datafile != null) { Dataset dataset = datafile.getOwner(); if (dataset != null && dataset.getId() != null) { - datasetService.removeDatasetLock(dataset.getId()); + datasetService.removeDatasetLock(dataset.getId(), DatasetLock.Reason.Ingest); } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java b/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java index c335436f5b7..b2f4171a190 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java @@ -20,7 +20,7 @@ /** * A workflow whose current step waits for an external system to complete a - * (probably lengthy) process. Meanwhile, it sits in the database, pending. + * (probably lengthy) process. Meanwhile, it sits in the database, pending away. * * @author michael */ @@ -38,6 +38,7 @@ public class PendingWorkflowInvocation implements Serializable { @OneToOne Dataset dataset; + long nextVersionNumber; long nextMinorVersionNumber; @@ -165,5 +166,4 @@ public int getTypeOrdinal() { public void setTypeOrdinal(int typeOrdinal) { this.typeOrdinal = typeOrdinal; } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowContext.java b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowContext.java index 09129a6d796..0cca2bd64a9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowContext.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowContext.java @@ -6,8 +6,8 @@ import java.util.UUID; /** - * The context in which the workflow is performed. Contains information steps might - * need, such as the dataset being worked on an version data. + * The context in which a workflow is performed. Contains information steps might + * need, such as the dataset being worked on and version data. * * Design-wise, this class allows us to add parameters to {@link WorkflowStep} without * changing its method signatures, which would break break client code. @@ -29,7 +29,16 @@ public enum TriggerType { private String invocationId = UUID.randomUUID().toString(); - public WorkflowContext(DataverseRequest request, Dataset dataset, long nextVersionNumber, long nextMinorVersionNumber, TriggerType type, String doiProvider) { + public WorkflowContext( DataverseRequest aRequest, Dataset aDataset, String doiProvider, TriggerType aTriggerType ) { + this( aRequest, aDataset, + aDataset.getLatestVersion().getVersionNumber(), + aDataset.getLatestVersion().getMinorVersionNumber(), + aTriggerType, + doiProvider); + } + + public WorkflowContext(DataverseRequest request, Dataset dataset, long nextVersionNumber, + long nextMinorVersionNumber, TriggerType type, String doiProvider) { this.request = request; this.dataset = dataset; this.nextVersionNumber = nextVersionNumber; @@ -74,5 +83,4 @@ public TriggerType getType() { return type; } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java index 3791e9f3851..d55200ee038 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java @@ -1,8 +1,11 @@ package edu.harvard.iq.dataverse.workflow; +import edu.harvard.iq.dataverse.DatasetLock; +import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.RoleAssigneeServiceBean; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.impl.AddLockCommand; import edu.harvard.iq.dataverse.engine.command.impl.FinalizeDatasetPublicationCommand; import edu.harvard.iq.dataverse.engine.command.impl.RemoveLockCommand; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -17,15 +20,16 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.ServiceConfigurationError; -import java.util.ServiceLoader; import java.util.logging.Level; import java.util.logging.Logger; import javax.ejb.Asynchronous; import javax.ejb.EJB; import javax.ejb.Stateless; +import javax.ejb.TransactionAttribute; +import javax.ejb.TransactionAttributeType; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; +import javax.persistence.Query; /** * Service bean for managing and executing {@link Workflow}s @@ -38,8 +42,11 @@ public class WorkflowServiceBean { private static final Logger logger = Logger.getLogger(WorkflowServiceBean.class.getName()); private static final String WORKFLOW_ID_KEY = "WorkflowServiceBean.WorkflowId:"; - @PersistenceContext + @PersistenceContext(unitName = "VDCNet-ejbPU") EntityManager em; + + @EJB + DatasetServiceBean datasets; @EJB SettingsServiceBean settings; @@ -76,9 +83,13 @@ public WorkflowServiceBean() { * * @param wf the workflow to execute. * @param ctxt the context in which the workflow is executed. + * @throws CommandException If the dataset could not be locked. */ - public void start(Workflow wf, WorkflowContext ctxt) { - forward(wf, ctxt, 0); + @Asynchronous + public void start(Workflow wf, WorkflowContext ctxt) throws CommandException { + ctxt = refresh(ctxt); + lockDataset(ctxt); + forward(wf, ctxt); } /** @@ -92,37 +103,22 @@ public void start(Workflow wf, WorkflowContext ctxt) { * #doResume(edu.harvard.iq.dataverse.workflow.PendingWorkflowInvocation, * java.lang.String) */ + @Asynchronous public void resume(PendingWorkflowInvocation pending, String body) { em.remove(em.merge(pending)); doResume(pending, body); } + @Asynchronous - private void forward(Workflow wf, WorkflowContext ctxt, int idx) { - WorkflowStepData wsd = wf.getSteps().get(idx); - WorkflowStep step = createStep(wsd); - WorkflowStepResult res = step.run(ctxt); - - if (res == WorkflowStepResult.OK) { - if (idx == wf.getSteps().size() - 1) { - workflowCompleted(wf, ctxt); - } else { - forward(wf, ctxt, ++idx); - } - - } else if (res instanceof Failure) { - logger.log(Level.WARNING, "Workflow {0} failed: {1}", new Object[]{ctxt.getInvocationId(), ((Failure) res).getReason()}); - rollback(wf, ctxt, (Failure) res, idx - 1); - - } else if (res instanceof Pending) { - pauseAndAwait(wf, ctxt, (Pending) res, idx); - } + private void forward(Workflow wf, WorkflowContext ctxt) { + executeSteps(wf, ctxt, 0); } - @Asynchronous private void doResume(PendingWorkflowInvocation pending, String body) { Workflow wf = pending.getWorkflow(); List stepsLeft = wf.getSteps().subList(pending.getPendingStepIdx(), wf.getSteps().size()); + WorkflowStep pendingStep = createStep(stepsLeft.get(0)); final WorkflowContext ctxt = pending.reCreateContext(roleAssignees); @@ -132,52 +128,129 @@ private void doResume(PendingWorkflowInvocation pending, String body) { } else if (res instanceof Pending) { pauseAndAwait(wf, ctxt, (Pending) res, pending.getPendingStepIdx()); } else { - forward(wf, ctxt, pending.getPendingStepIdx() + 1); + executeSteps(wf, ctxt, pending.getPendingStepIdx() + 1); } } @Asynchronous - private void rollback(Workflow wf, WorkflowContext ctxt, Failure failure, int idx) { - WorkflowStepData wsd = wf.getSteps().get(idx); - logger.log(Level.INFO, "{0} rollback of step {1}", new Object[]{ctxt.getInvocationId(), idx}); - try { - createStep(wsd).rollback(ctxt, failure); - } finally { - if (idx > 0) { - rollback(wf, ctxt, failure, --idx); - } else { - unlockDataset(ctxt); + private void rollback(Workflow wf, WorkflowContext ctxt, Failure failure, int lastCompletedStepIdx) { + ctxt = refresh(ctxt); + final List steps = wf.getSteps(); + + for ( int stepIdx = lastCompletedStepIdx; stepIdx >= 0; --stepIdx ) { + WorkflowStepData wsd = steps.get(stepIdx); + WorkflowStep step = createStep(wsd); + + try { + logger.log(Level.INFO, "Workflow {0} step {1}: Rollback", new Object[]{ctxt.getInvocationId(), stepIdx}); + rollbackStep(step, ctxt, failure); + + } catch (Exception e) { + logger.log(Level.WARNING, "Workflow " + ctxt.getInvocationId() + + " step " + stepIdx + ": Rollback error: " + e.getMessage(), e); } + + } + + logger.log( Level.INFO, "Removing workflow lock"); + try { + engine.submit( new RemoveLockCommand(ctxt.getRequest(), ctxt.getDataset(), DatasetLock.Reason.Workflow) ); + + // Corner case - delete locks generated within this same transaction. + Query deleteQuery = em.createQuery("DELETE from DatasetLock l WHERE l.dataset.id=:id AND l.reason=:reason"); + deleteQuery.setParameter("id", ctxt.getDataset().getId() ); + deleteQuery.setParameter("reason", DatasetLock.Reason.Workflow ); + deleteQuery.executeUpdate(); + + } catch (CommandException ex) { + logger.log(Level.SEVERE, "Error restoring dataset locks state after rollback: " + ex.getMessage(), ex); } } /** - * Unlocks the dataset after the workflow is over. - * @param ctxt + * Execute the passed workflow, starting from {@code initialStepIdx}. + * @param wf The workflow to run. + * @param ctxt Execution context to run the workflow in. + * @param initialStepIdx 0-based index of the first step to run. */ - @Asynchronous - private void unlockDataset( WorkflowContext ctxt ) { - try { - engine.submit( new RemoveLockCommand(ctxt.getRequest(), ctxt.getDataset()) ); - } catch (CommandException ex) { - logger.log(Level.SEVERE, "Cannot unlock dataset after rollback: " + ex.getMessage(), ex); + private void executeSteps(Workflow wf, WorkflowContext ctxt, int initialStepIdx ) { + final List steps = wf.getSteps(); + + for ( int stepIdx = initialStepIdx; stepIdx < steps.size(); stepIdx++ ) { + WorkflowStepData wsd = steps.get(stepIdx); + WorkflowStep step = createStep(wsd); + WorkflowStepResult res = runStep(step, ctxt); + + try { + if (res == WorkflowStepResult.OK) { + logger.log(Level.INFO, "Workflow {0} step {1}: OK", new Object[]{ctxt.getInvocationId(), stepIdx}); + + } else if (res instanceof Failure) { + logger.log(Level.WARNING, "Workflow {0} failed: {1}", new Object[]{ctxt.getInvocationId(), ((Failure) res).getReason()}); + rollback(wf, ctxt, (Failure) res, stepIdx-1 ); + return; + + } else if (res instanceof Pending) { + pauseAndAwait(wf, ctxt, (Pending) res, stepIdx); + return; + } + + } catch ( Exception e ) { + logger.log(Level.WARNING, "Workflow {0} step {1}: Uncought exception:", new Object[]{ctxt.getInvocationId(), e.getMessage()}); + logger.log(Level.WARNING, "Trace:", e); + rollback(wf, ctxt, (Failure) res, stepIdx-1 ); + return; + } } + + workflowCompleted(wf, ctxt); + + } + + ////////////////////////////////////////////////////////////// + // Internal methods to run each step in its own transaction. + // + + @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) + WorkflowStepResult runStep( WorkflowStep step, WorkflowContext ctxt ) { + return step.run(ctxt); + } + + @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) + WorkflowStepResult resumeStep( WorkflowStep step, WorkflowContext ctxt, Map localData, String externalData ) { + return step.resume(ctxt, localData, externalData); + } + + @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) + void rollbackStep( WorkflowStep step, WorkflowContext ctxt, Failure reason ) { + step.rollback(ctxt, reason); + } + + @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) + void lockDataset( WorkflowContext ctxt ) throws CommandException { + final DatasetLock datasetLock = new DatasetLock(DatasetLock.Reason.Workflow, ctxt.getRequest().getAuthenticatedUser()); +// engine.submit(new AddLockCommand(ctxt.getRequest(), ctxt.getDataset(), datasetLock)); + datasetLock.setDataset(ctxt.getDataset()); + em.persist(datasetLock); + em.flush(); } + // + // + ////////////////////////////////////////////////////////////// + private void pauseAndAwait(Workflow wf, WorkflowContext ctxt, Pending pendingRes, int idx) { PendingWorkflowInvocation pending = new PendingWorkflowInvocation(wf, ctxt, pendingRes); pending.setPendingStepIdx(idx); em.persist(pending); } - @Asynchronous private void workflowCompleted(Workflow wf, WorkflowContext ctxt) { logger.log(Level.INFO, "Workflow {0} completed.", ctxt.getInvocationId()); if ( ctxt.getType() == TriggerType.PrePublishDataset ) { try { engine.submit( new FinalizeDatasetPublicationCommand(ctxt.getDataset(), ctxt.getDoiProvider(), ctxt.getRequest()) ); - unlockDataset(ctxt); - + } catch (CommandException ex) { logger.log(Level.SEVERE, "Exception finalizing workflow " + ctxt.getInvocationId() +": " + ex.getMessage(), ex); rollback(wf, ctxt, new Failure("Exception while finalizing the publication: " + ex.getMessage()), wf.steps.size()-1); @@ -273,5 +346,11 @@ private WorkflowStep createStep(WorkflowStepData wsd) { } return provider.getStep(wsd.getStepType(), wsd.getStepParameters()); } + + private WorkflowContext refresh( WorkflowContext ctxt ) { + return new WorkflowContext( ctxt.getRequest(), + datasets.find( ctxt.getDataset().getId() ), ctxt.getNextVersionNumber(), + ctxt.getNextMinorVersionNumber(), ctxt.getType(), ctxt.getDoiProvider() ); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/HttpSendReceiveClientStep.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/HttpSendReceiveClientStep.java index 8d882de5303..3bbd294ee72 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/HttpSendReceiveClientStep.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/HttpSendReceiveClientStep.java @@ -54,7 +54,7 @@ public WorkflowStepResult run(WorkflowContext context) { } } catch (Exception ex) { - logger.log(Level.SEVERE, "Error communicating with remote server: " + ex.getMessage(), ex ); + logger.log(Level.SEVERE, "Error communicating with remote server: " + ex.getMessage(), ex); return new Failure("Error executing request: " + ex.getLocalizedMessage(), "Cannot communicate with remote server."); } } @@ -66,6 +66,7 @@ public WorkflowStepResult resume(WorkflowContext context, Map in if ( pat.matcher(response).matches() ) { return OK; } else { + logger.log(Level.WARNING, "Remote system returned a bad reposonse: {0}", externalData); return new Failure("Response from remote server did not match expected one (response:" + response + ")"); } } diff --git a/src/main/resources/META-INF/persistence.xml b/src/main/resources/META-INF/persistence.xml index 9303aa98ea4..8b4e33858ac 100644 --- a/src/main/resources/META-INF/persistence.xml +++ b/src/main/resources/META-INF/persistence.xml @@ -15,8 +15,9 @@ - + + +
+
+

+ +

+ + + + + + + + + + + + +
+
+
From 8a48b4f8aaeb0dae72e8da7f3a158c8eea1edcc7 Mon Sep 17 00:00:00 2001 From: matthew-a-dunlap Date: Wed, 11 Oct 2017 17:13:52 -0400 Subject: [PATCH 030/483] Documentation edits minishift #4040 --- .../source/developers/dev-environment.rst | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index 7893d64edca..041722fbd6d 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -29,7 +29,7 @@ As a `Java Enterprise Edition Date: Thu, 12 Oct 2017 09:50:32 -0400 Subject: [PATCH 031/483] doc fix: all containers run as non-root now #4040 --- .../source/developers/dev-environment.rst | 35 ++++++++++--------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index 041722fbd6d..853c188601d 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -356,7 +356,7 @@ Start Minishift ``minishift start --vm-driver=virtualbox`` Make the OpenShift Client Binary (oc) Executable -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ``eval $(minishift oc-env)`` @@ -369,19 +369,6 @@ Note that if you just installed Minishift, you are probably logged in already, b Use "developer" as the username and a couple characters as the password. -Allow Containers to Run as Root in Minishift -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -For now we're allowing containers to run as root. Until the images are fixed to run as non-root, run the following command: - -``oc adm policy add-scc-to-user anyuid -z default --as system:admin`` - -FIXME: Eventually, we should create containers that don't require root. When we do, run the following command to ensure Dataverse still runs on Minishift after you've stopped allowing containers to run as root: - -``oc adm policy remove-scc-from-user anyuid -z default --as system:admin`` - -For more information on improving Docker images to run as non-root, see "Support Arbitrary User IDs" at https://docs.openshift.org/latest/creating_images/guidelines.html#openshift-origin-specific-guidelines - Create a Minishift Project ~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -428,7 +415,7 @@ First, check the IP address of your minishift cluster. If this differs from the ``minishift ip`` -The following curl command is expected to fail until you "expose" the HTTP service. +The following curl command is expected to fail until you "expose" the HTTP service. Please note that the IP address may be different. ``curl http://dataverse-glassfish-service-project1.192.168.99.100.nip.io/api/info/version`` @@ -439,7 +426,7 @@ Expose the Dataverse web service: Make Sure the Dataverse API is Working ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -This should show a version number: +This should show a version number but please note that the IP address may be different: ``curl http://dataverse-glassfish-service-project1.192.168.99.100.nip.io/api/info/version`` @@ -450,7 +437,7 @@ Log into Minishift and Visit Dataverse in your Browser - username: developer - password: developer -Visit https://192.168.99.100:8443/console/project/project1/browse/routes and click http://dataverse-glassfish-service-project1.192.168.99.100.nip.io/ or whatever is shows under "Routes External Traffic". This assumes you named your project ``project1``. +Visit https://192.168.99.100:8443/console/project/project1/browse/routes and click http://dataverse-glassfish-service-project1.192.168.99.100.nip.io/ or whatever is shows under "Routes External Traffic" (the IP address may be different). This assumes you named your project ``project1``. You should be able to log in with username "dataverseAdmin" and password "admin". @@ -466,6 +453,20 @@ Making Changes If you're interested in using Minishift for development and want to change the Dataverse code, you will need to get set up to create Docker images based on your changes and push them to a Docker registry such as Docker Hub. See the section below on Docker for details. +Runnning Containers to Run as Root in Minishift +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +It is **not** recommended to run containers as root in Minishift because for security reasons OpenShift doesn't support running containers as root. However, it's good to know how to allow containers to run as root in case you need to work on a Docker image to make it run as non-root. + +For more information on improving Docker images to run as non-root, see "Support Arbitrary User IDs" at https://docs.openshift.org/latest/creating_images/guidelines.html#openshift-origin-specific-guidelines + +Let's say you have a container that you suspect works fine when it runs as root. You want to see it working as-is before you start hacking on the Dockerfile and entrypoint file. You can configure Minishift to allow containers to run as root with this command: + +``oc adm policy add-scc-to-user anyuid -z default --as system:admin`` + +Once you are done testing you can revert Minishift back to not allowing containers to run as root with this command: + +``oc adm policy remove-scc-from-user anyuid -z default --as system:admin`` Minishift Resources ~~~~~~~~~~~~~~~~~~~ From a460f3627975341895aa69fa2a159de0aacabe68 Mon Sep 17 00:00:00 2001 From: Michael Heppler Date: Thu, 12 Oct 2017 13:46:36 -0400 Subject: [PATCH 032/483] Fixed render logic bug for file search box on dataset pg. [ref #4184] --- src/main/webapp/filesFragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index dc1cba80f3e..9f99475d3ea 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -61,7 +61,7 @@
-
+
From e9a62a6d0a39e1605a41b8259224e84ede609b54 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 12 Oct 2017 14:08:21 -0400 Subject: [PATCH 033/483] after three failed login attempts, make them do math #3153 --- .../edu/harvard/iq/dataverse/LoginPage.java | 50 +++++++++++++++++++ .../iq/dataverse/SendFeedbackDialog.java | 2 + src/main/webapp/loginpage.xhtml | 15 ++---- 3 files changed, 56 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java index 99a1af7571a..b119939f47d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java @@ -21,12 +21,15 @@ import java.util.Iterator; import java.util.LinkedList; import java.util.List; +import java.util.Random; import java.util.logging.Level; import java.util.logging.Logger; import javax.ejb.EJB; import javax.faces.application.FacesMessage; +import javax.faces.component.UIComponent; import javax.faces.context.FacesContext; import javax.faces.event.AjaxBehaviorEvent; +import javax.faces.validator.ValidatorException; import javax.faces.view.ViewScoped; import javax.inject.Inject; import javax.inject.Named; @@ -101,6 +104,11 @@ public enum EditMode {LOGIN, SUCCESS, FAILED}; private String redirectPage = "dataverse.xhtml"; private AuthenticationProvider authProvider; + private int numFailedLoginAttempts; + Random random; + long op1; + long op2; + long userSum; public void init() { Iterator credentialsIterator = authSvc.getAuthenticationProviderIdsOfType( CredentialsAuthenticationProvider.class ).iterator(); @@ -109,6 +117,7 @@ public void init() { } resetFilledCredentials(null); authProvider = authSvc.getAuthenticationProvider(systemConfig.getDefaultAuthProvider()); + random = new Random(); } public List listCredentialsAuthenticationProviders() { @@ -179,6 +188,9 @@ public String login() { } catch (AuthenticationFailedException ex) { + numFailedLoginAttempts++; + op1 = new Long(random.nextInt(10)); + op2 = new Long(random.nextInt(10)); AuthenticationResponse response = ex.getResponse(); switch ( response.getStatus() ) { case FAIL: @@ -256,4 +268,42 @@ public String getLoginButtonText() { return BundleUtil.getStringFromBundle("login.button", Arrays.asList("???")); } } + + public int getNumFailedLoginAttempts() { + return numFailedLoginAttempts; + } + + public boolean isRequireExtraValidation() { + if (numFailedLoginAttempts > 2) { + return true; + } else { + return false; + } + } + + public long getOp1() { + return op1; + } + + public long getOp2() { + return op2; + } + + public long getUserSum() { + return userSum; + } + + public void setUserSum(long userSum) { + this.userSum = userSum; + } + + // TODO: Consolidate with SendFeedbackDialog.validateUserSum? + public void validateUserSum(FacesContext context, UIComponent component, Object value) throws ValidatorException { + if (op1 + op2 != (Long) value) { + // The FacesMessage text is on the xhtml side. + FacesMessage msg = new FacesMessage(""); + throw new ValidatorException(msg); + } + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java index d68a610bd1a..67d6e673438 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java +++ b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java @@ -31,6 +31,7 @@ public class SendFeedbackDialog implements java.io.Serializable { private String userMessage = ""; private String messageSubject = ""; private String messageTo = ""; + // FIXME: Remove "support@thedata.org". There's no reason to email the Dataverse *project*. People should email the *installation* instead. private String defaultRecipientEmail = "support@thedata.org"; Long op1, op2, userSum; // Either the dataverse or the dataset that the message is pertaining to @@ -161,6 +162,7 @@ public void validateUserSum(FacesContext context, UIComponent component, Object if (op1 + op2 !=(Long)value) { + // TODO: Remove this English "Sum is incorrect" string. contactFormFragment.xhtml uses contact.sum.invalid instead. FacesMessage msg = new FacesMessage("Sum is incorrect, please try again."); msg.setSeverity(FacesMessage.SEVERITY_ERROR); diff --git a/src/main/webapp/loginpage.xhtml b/src/main/webapp/loginpage.xhtml index 7a0a7648cf9..6ded99c5eb6 100644 --- a/src/main/webapp/loginpage.xhtml +++ b/src/main/webapp/loginpage.xhtml @@ -74,22 +74,15 @@ -
+

- - - - - - - - - + + validatorMessage="#{bundle['contact.sum.invalid']}" validator="#{LoginPage.validateUserSum}"> From ef7a6121f362e70fa66ec3f7ee4ec4998ec7f65c Mon Sep 17 00:00:00 2001 From: Derek Murphy Date: Thu, 12 Oct 2017 14:27:23 -0400 Subject: [PATCH 034/483] Added new message text in bundle #4188 Added new messaging for Submit for Review modal. --- src/main/java/Bundle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index dc0219f519c..e9633883150 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1117,7 +1117,7 @@ dataset.publish.header=Publish Dataset dataset.rejectBtn=Return to Author dataset.submitBtn=Submit for Review dataset.disabledSubmittedBtn=Submitted for Review -dataset.submitMessage=Submit this dataset for review by the curator of this dataverse for possible publishing. +dataset.submitMessage=You will not be able to make changes to this dataset while it is in review. dataset.submit.success=Your dataset has been submitted for review. dataset.inreview.infoMessage=This dataset has been submitted for review. dataset.submit.failure=Dataset Submission Failed - {0} From 87e9ac307475cd79cdd4fa6a4d30778fd7eadf99 Mon Sep 17 00:00:00 2001 From: Michael Heppler Date: Thu, 12 Oct 2017 14:32:52 -0400 Subject: [PATCH 035/483] Fixed render logic bug for download subset on dataset pg. Moved popup header text to bundle. [ref #4186] --- src/main/java/Bundle.properties | 1 + src/main/webapp/dataset.xhtml | 4 ++-- src/main/webapp/file.xhtml | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index dc0219f519c..fb92ea0f4c3 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1275,6 +1275,7 @@ file.restrict=Restrict file.unrestrict=Unrestrict file.restricted.success=Files "{0}" will be restricted once you click on the Save Changes button. file.download.header=Download +file.download.subset.header=Download Data Subset file.preview=Preview: file.previewMap=Preview Map:o file.fileName=File Name diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 78955d3ddae..cacb10cc7d0 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1206,9 +1206,9 @@ - -
+
diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index b97bb965cd2..e8d2cb6dfeb 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -442,7 +442,7 @@ -
From 449be20bfd6d1d65cf829874a53aa71d2bf140e9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 12 Oct 2017 15:54:55 -0400 Subject: [PATCH 036/483] don't auto fill "0" as an answer #3153 --- .../java/edu/harvard/iq/dataverse/LoginPage.java | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java index b119939f47d..1f5328f9b35 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java @@ -108,7 +108,7 @@ public enum EditMode {LOGIN, SUCCESS, FAILED}; Random random; long op1; long op2; - long userSum; + Long userSum; public void init() { Iterator credentialsIterator = authSvc.getAuthenticationProviderIdsOfType( CredentialsAuthenticationProvider.class ).iterator(); @@ -289,20 +289,24 @@ public long getOp2() { return op2; } - public long getUserSum() { + public Long getUserSum() { return userSum; } - public void setUserSum(long userSum) { + public void setUserSum(Long userSum) { this.userSum = userSum; } // TODO: Consolidate with SendFeedbackDialog.validateUserSum? public void validateUserSum(FacesContext context, UIComponent component, Object value) throws ValidatorException { + // The FacesMessage text is on the xhtml side. + FacesMessage msg = new FacesMessage(""); + ValidatorException validatorException = new ValidatorException(msg); + if (value == null) { + throw validatorException; + } if (op1 + op2 != (Long) value) { - // The FacesMessage text is on the xhtml side. - FacesMessage msg = new FacesMessage(""); - throw new ValidatorException(msg); + throw validatorException; } } From fd743e284c2dc870aaac5377d9e613c3e61c7c29 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 12 Oct 2017 16:32:13 -0400 Subject: [PATCH 037/483] disallow API token lookup via API by default #3153 --- doc/sphinx-guides/source/api/native-api.rst | 7 ------- .../source/installation/config.rst | 7 +++++++ .../iq/dataverse/api/BuiltinUsers.java | 9 +++++++++ .../settings/SettingsServiceBean.java | 1 + .../iq/dataverse/api/BuiltinUsersIT.java | 19 +++++++++++++++++++ 5 files changed, 36 insertions(+), 7 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 7cce55b81db..756966a610c 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -437,13 +437,6 @@ Place this ``user-add.json`` file in your current directory and run the followin curl -d @user-add.json -H "Content-type:application/json" "$SERVER_URL/api/builtin-users?password=$NEWUSER_PASSWORD&key=$BUILTIN_USERS_KEY" -Retrieving the API Token of a Builtin User -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -To retrieve the API token of a builtin user, given that user's password, use the curl command below:: - - curl "$SERVER_URL/api/builtin-users/$DV_USER_NAME/api-token?password=$DV_USER_PASSWORD" - Roles ~~~~~ diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 31bbed45c07..d42bab467a4 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1204,3 +1204,10 @@ You can replace the default dataset metadata fields that are displayed above fil ``curl http://localhost:8080/api/admin/settings/:CustomDatasetSummaryFields -X PUT -d 'producer,subtitle,alternativeTitle'`` You have to put the datasetFieldType name attribute in the :CustomDatasetSummaryFields setting for this to work. + +:AllowApiTokenLookupViaApi +++++++++++++++++++++++++++ + +Dataverse 4.8.1 and below allowed API Token lookup via API but for better security this has been disabled by default. Set this to true if you really want the old behavior. + +``curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:AllowApiTokenLookupViaApi`` diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java b/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java index 233d57e1b45..633623719a4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java @@ -10,6 +10,7 @@ import edu.harvard.iq.dataverse.authorization.providers.builtin.PasswordEncryption; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import java.sql.Timestamp; import java.util.Calendar; import java.util.logging.Level; @@ -53,6 +54,14 @@ public class BuiltinUsers extends AbstractApiBean { @GET @Path("{username}/api-token") public Response getApiToken( @PathParam("username") String username, @QueryParam("password") String password ) { + boolean disabled = true; + boolean lookupAllowed = settingsSvc.isTrueForKey(SettingsServiceBean.Key.AllowApiTokenLookupViaApi, false); + if (lookupAllowed) { + disabled = false; + } + if (disabled) { + return error(Status.FORBIDDEN, "This API endpoint has been disabled."); + } BuiltinUser u = null; if (retrievingApiTokenViaEmailEnabled) { u = builtinUserSvc.findByUsernameOrEmail(username); diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 29376f04d97..94024bf5949 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -32,6 +32,7 @@ public class SettingsServiceBean { * So there. */ public enum Key { + AllowApiTokenLookupViaApi, /** * Ordered, comma-separated list of custom fields to show above the fold * on dataset page such as "data_type,sample,pdb" diff --git a/src/test/java/edu/harvard/iq/dataverse/api/BuiltinUsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/BuiltinUsersIT.java index 43b5219195e..99a7b395379 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/BuiltinUsersIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/BuiltinUsersIT.java @@ -17,6 +17,7 @@ import javax.json.Json; import javax.json.JsonObjectBuilder; import static javax.ws.rs.core.Response.Status.OK; +import static javax.ws.rs.core.Response.Status.FORBIDDEN; import static junit.framework.Assert.assertEquals; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.startsWith; @@ -37,6 +38,11 @@ public class BuiltinUsersIT { @BeforeClass public static void setUp() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); + + Response removeIdentifierGenerationStyle = UtilIT.deleteSetting(SettingsServiceBean.Key.AllowApiTokenLookupViaApi); + removeIdentifierGenerationStyle.then().assertThat() + .statusCode(200); + } @Test @@ -171,6 +177,15 @@ public void testLogin() { String createdToken = createdUser.getString("data.apiToken"); logger.info(createdToken); + Response getApiTokenShouldFail = getApiTokenUsingUsername(usernameToCreate, usernameToCreate); + getApiTokenShouldFail.then().assertThat() + .body("message", equalTo("This API endpoint has been disabled.")) + .statusCode(FORBIDDEN.getStatusCode()); + + Response setAllowApiTokenLookupViaApi = UtilIT.setSetting(SettingsServiceBean.Key.AllowApiTokenLookupViaApi, "true"); + setAllowApiTokenLookupViaApi.then().assertThat() + .statusCode(OK.getStatusCode()); + Response getApiTokenUsingUsername = getApiTokenUsingUsername(usernameToCreate, usernameToCreate); getApiTokenUsingUsername.prettyPrint(); assertEquals(200, getApiTokenUsingUsername.getStatusCode()); @@ -189,6 +204,10 @@ public void testLogin() { assertEquals(createdToken, retrievedTokenUsingEmail); } + Response removeIdentifierGenerationStyle = UtilIT.deleteSetting(SettingsServiceBean.Key.AllowApiTokenLookupViaApi); + removeIdentifierGenerationStyle.then().assertThat() + .statusCode(200); + } @Test From 112a776bd1be1c664c5cea82249f17fb5ac82c2f Mon Sep 17 00:00:00 2001 From: Michael Heppler Date: Fri, 13 Oct 2017 08:56:11 -0400 Subject: [PATCH 038/483] Fixed inconsistent style colors and icons in warning popup msg's. [ref #4188] --- src/main/java/Bundle.properties | 6 +-- src/main/webapp/dashboard-users.xhtml | 4 +- src/main/webapp/dataset.xhtml | 60 ++++++++++++------------- src/main/webapp/dataverse.xhtml | 18 ++++---- src/main/webapp/editFilesFragment.xhtml | 4 +- src/main/webapp/file.xhtml | 12 ++--- 6 files changed, 52 insertions(+), 52 deletions(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index e9633883150..c09c9c9566a 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1519,10 +1519,10 @@ file.results.btn.sort.option.oldest=Oldest file.results.btn.sort.option.size=Size file.results.btn.sort.option.type=Type file.compute.fileRestricted=File Restricted -file.compute.fileAccessDenied=You cannot compute on this restricted file because you don't have permission to access it. +file.compute.fileAccessDenied=You cannot compute on this restricted file because you do not have permission to access it. dataset.compute.datasetCompute=Dataset Compute Not Supported -dataset.compute.datasetAccessDenied=You cannot compute on this dataset because you don't have permission to access all of the restricted files. -dataset.compute.datasetComputeDisabled=You cannot compute on this dataset because this functionality is not enabled yet. Please click on a file to access computing capalibities. +dataset.compute.datasetAccessDenied=You cannot compute on this dataset because you do not have permission to access all of the restricted files. +dataset.compute.datasetComputeDisabled=You cannot compute on this dataset because this functionality is not enabled yet. Please click on a file to access computing features. # dataset-widgets.xhtml dataset.widgets.title=Dataset Thumbnail + Widgets diff --git a/src/main/webapp/dashboard-users.xhtml b/src/main/webapp/dashboard-users.xhtml index 5a970b5c2ed..98e2b6abddf 100644 --- a/src/main/webapp/dashboard-users.xhtml +++ b/src/main/webapp/dashboard-users.xhtml @@ -90,7 +90,7 @@ -

+

@@ -104,7 +104,7 @@

-

#{DashboardUsersPage.confirmRemoveRolesMessage}

+

#{DashboardUsersPage.confirmRemoveRolesMessage}

diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 78955d3ddae..5d241f37acc 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -641,7 +641,7 @@ -

#{bundle['dataset.noValidSelectedFilesForDownload']}

+

#{bundle['dataset.noValidSelectedFilesForDownload']}

#{bundle['dataset.requestAccessToRestrictedFiles']}

@@ -650,7 +650,7 @@
-

#{bundle['dataset.mixedSelectedFilesForDownload']}

+

#{bundle['dataset.mixedSelectedFilesForDownload']}

@@ -671,14 +671,14 @@ -

#{bundle['file.deleteDialog.tip']}

+

#{bundle['file.deleteDialog.tip']}

-

#{bundle['file.deleteDraftDialog.tip']}

+

#{bundle['file.deleteDraftDialog.tip']}

@@ -715,23 +715,23 @@
-

#{bundle['dataset.privateurl.cannotCreate']}

+

#{bundle['dataset.privateurl.cannotCreate']}

-

#{bundle['dataset.privateurl.disableConfirmationText']}

+

#{bundle['dataset.privateurl.disableConfirmationText']}

-

#{bundle['file.deleteFileDialog.multiple.immediate']}

+

#{bundle['file.deleteFileDialog.multiple.immediate']}

-

#{bundle['file.deleteFileDialog.failed.tip']}

+

#{bundle['file.deleteFileDialog.failed.tip']}

-

#{bundle['dataset.compute.datasetComputeDisabled']}

+

#{bundle['dataset.compute.datasetComputeDisabled']}

-

#{bundle['file.deaccessionDialog.tip']}

+

#{bundle['file.deaccessionDialog.tip']}

- -

#{bundle['file.deaccessionDialog.deaccession.tip']}

+ +

#{bundle['file.deaccessionDialog.deaccession.tip']}

@@ -803,7 +803,7 @@
-

#{bundle['file.deaccessionDialog.deaccessionDataset.tip']}

+

#{bundle['file.deaccessionDialog.deaccessionDataset.tip']}

-

#{bundle['dataset.noSelectedFilesForDownload']}

+

#{bundle['dataset.noSelectedFilesForDownload']}

@@ -969,7 +969,7 @@ -

#{bundle['dataset.noSelectedFilesForRequestAccess']}

+

#{bundle['dataset.noSelectedFilesForRequestAccess']}

@@ -978,7 +978,7 @@ -

#{bundle['dataset.noSelectedFilesForDelete']}

+

#{bundle['dataset.noSelectedFilesForDelete']}

@@ -986,7 +986,7 @@ -

#{bundle['dataset.noSelectedFilesForRestrict']}

+

#{bundle['dataset.noSelectedFilesForRestrict']}

@@ -994,7 +994,7 @@ -

#{bundle['dataset.noSelectedFilesForUnRestrict']}

+

#{bundle['dataset.noSelectedFilesForUnRestrict']}

@@ -1002,7 +1002,7 @@ -

#{bundle['dataset.noSelectedFilesForMetadataEdit']}

+

#{bundle['dataset.noSelectedFilesForMetadataEdit']}

@@ -1010,7 +1010,7 @@ -

#{bundle['dataset.noSelectedFilesForMetadataEdit']}

+

#{bundle['dataset.noSelectedFilesForMetadataEdit']}

@@ -1312,11 +1312,11 @@
-

+

#{bundle['dataset.submitMessage']}

-
@@ -1324,7 +1324,7 @@ -

+

#{bundle['dataset.publish.tip']}

@@ -1338,7 +1338,7 @@ -

+

@@ -1362,7 +1362,7 @@ -

+

#{bundle['dataset.republish.tip']}

@@ -1390,9 +1390,9 @@
- +

- + @@ -1407,9 +1407,9 @@

- +

- + @@ -1430,7 +1430,7 @@ -

+

#{bundle['dataset.rejectMessage']}

diff --git a/src/main/webapp/dataverse.xhtml b/src/main/webapp/dataverse.xhtml index 63df08816d7..55818bdd09c 100644 --- a/src/main/webapp/dataverse.xhtml +++ b/src/main/webapp/dataverse.xhtml @@ -587,9 +587,9 @@
- -

- #{bundle['dataverse.nopublished.tip']} + +

+ #{bundle['dataverse.nopublished.tip']}

@@ -599,7 +599,7 @@ -

+

#{bundle['dataverse.publish.tip']}

@@ -608,9 +608,9 @@
- +

- #{bundle['dataverse.publish.failed.tip']} + #{bundle['dataverse.publish.failed.tip']}

@@ -618,7 +618,7 @@ -

+

#{bundle['dataverse.delete.tip']}

@@ -721,7 +721,7 @@

- #{bundle['dataverse.link.no.linkable']} + #{bundle['dataverse.link.no.linkable']}

@@ -731,7 +731,7 @@ -

+

#{bundle['dataverse.resetModifications.text']}

diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 04877e591a4..9c2fd66383b 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -278,9 +278,9 @@ -

#{bundle['file.deleteFileDialog.tip']}

+

#{bundle['file.deleteFileDialog.tip']}

-

#{bundle['file.deleteFileDialog.failed.tip']}

+

#{bundle['file.deleteFileDialog.failed.tip']}

-

#{bundle['file.deleteFileDialog.immediate']}

+

#{bundle['file.deleteFileDialog.immediate']}

-

#{bundle['file.deleteFileDialog.failed.tip']}

+

#{bundle['file.deleteFileDialog.failed.tip']}

-

#{bundle['file.replaced.warning.draft.warningMessage']}

+

#{bundle['file.replaced.warning.draft.warningMessage']}

@@ -512,10 +512,10 @@ -

#{bundle['file.replaced.warning.previous.warningMessage']}

+

#{bundle['file.replaced.warning.previous.warningMessage']}

-

#{bundle['file.alreadyDeleted.previous.warningMessage']}

+

#{bundle['file.alreadyDeleted.previous.warningMessage']}

@@ -530,7 +530,7 @@ -

#{bundle['file.compute.fileAccessDenied']}

+

#{bundle['file.compute.fileAccessDenied']}

From d3125d5c55e44b2742e8e547e74f53a5e96d7d7d Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 13 Oct 2017 11:06:35 -0400 Subject: [PATCH 039/483] remove FIXME in test now that code is working #4139 I'm not sure when this was fixed. Probably 4.8. --- .../java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java index e60e168d114..46a37e41731 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java @@ -286,8 +286,7 @@ public void testCuratorSendsCommentsToAuthor() { Response authorsChecksForCommentsPostPublication = UtilIT.getNotifications(authorApiToken); authorsChecksForCommentsPostPublication.prettyPrint(); authorsChecksForCommentsPostPublication.then().assertThat() - // FIXME: Why is this ASSIGNROLE and not "your dataset has been published"? - .body("data.notifications[0].type", equalTo("ASSIGNROLE")) + .body("data.notifications[0].type", equalTo("PUBLISHEDDS")) .body("data.notifications[1].type", equalTo("RETURNEDDS")) // .body("data.notifications[1].reasonsForReturn[0].message", equalTo("You forgot to upload any files.")) // .body("data.notifications[1].reasonsForReturn[1].message", equalTo("A README is required.")) From 736a1ca56c444b29318e7bf2e4851c3ad464d9b4 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 13 Oct 2017 11:45:29 -0400 Subject: [PATCH 040/483] tests shouldn't depend on DCM running or being on Mac #4189 --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index d19b000a276..91aa921df67 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -956,6 +956,11 @@ public void testCreateDatasetWithDcmDependency() { .body("message", equalTo("User @" + noPermsUsername + " is not permitted to perform requested action.")) .statusCode(UNAUTHORIZED.getStatusCode()); + boolean stopEarlyBecauseYouDoNotHaveDcmInstalled = true; + if (stopEarlyBecauseYouDoNotHaveDcmInstalled) { + return; + } + boolean stopEarlyToVerifyTheScriptWasCreated = false; if (stopEarlyToVerifyTheScriptWasCreated) { logger.info("On the DCM, does /deposit/gen/upload-" + datasetId + ".bash exist? It should! Creating the dataset should be enough to create it."); @@ -1138,7 +1143,7 @@ public void testDcmChecksumValidationMessages() throws IOException, InterruptedE * "files.sha" and (if checksum validation succeeds) the DCM moves the * files and the "files.sha" file into the uploadFolder. */ - boolean doExtraTesting = true; + boolean doExtraTesting = false; if (doExtraTesting) { From fc94556adfdcb0acf7ad099d2cb2f9546ccc99c3 Mon Sep 17 00:00:00 2001 From: Derek Murphy Date: Fri, 13 Oct 2017 11:47:12 -0400 Subject: [PATCH 041/483] Grammar fix in "was published" notification #4188 Removed a comma that caused a grammatical error in a notification. --- src/main/java/Bundle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index c09c9c9566a..9ee3b5e489a 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -158,7 +158,7 @@ notification.createDataset={0} was created in {1}. To learn more about what you notification.dataset.management.title=Dataset Management - Dataset User Guide notification.wasSubmittedForReview={0} was submitted for review to be published in {1}. Don''t forget to publish it or send it back to the contributor\! notification.wasReturnedByReviewer={0} was returned by the curator of {1}. -notification.wasPublished={0}, was published in {1}. +notification.wasPublished={0} was published in {1}. notification.worldMap.added={0}, dataset had WorldMap layer data added to it. notification.maplayer.deletefailed=Failed to delete the map layer associated with the restricted file {0} from WorldMap. Please try again, or contact WorldMap and/or Dataverse support. (Dataset: {1}) notification.generic.objectDeleted=The dataverse, dataset, or file for this notification has been deleted. From e2341f36efb641d7b4e14b991fdb3811b831796d Mon Sep 17 00:00:00 2001 From: matthew-a-dunlap Date: Fri, 13 Oct 2017 15:30:39 -0400 Subject: [PATCH 042/483] Logging underly error in failing method #4185 --- .../harvard/iq/dataverse/dataaccess/ImageThumbConverter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index ef580feb4af..f40bedc21eb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -309,11 +309,11 @@ private static boolean generateWorldMapThumbnail(StorageIO storageIO, return false; } } catch (FileNotFoundException fnfe) { - logger.fine("No .img file for this worldmap file yet; giving up."); + logger.fine("No .img file for this worldmap file yet; giving up. Original Error: " + fnfe); return false; } catch (IOException ioex) { - logger.warning("caught IOException trying to open an input stream for worldmap .img file (" + storageIO.getDataFile().getStorageIdentifier() + ")"); + logger.warning("caught IOException trying to open an input stream for worldmap .img file (" + storageIO.getDataFile().getStorageIdentifier() + "). Original Error: " + ioex); return false; } From 1f1a79e1f6280d328fc5eafc02bd2f7478d7b6cc Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 13 Oct 2017 15:40:36 -0400 Subject: [PATCH 043/483] assert current behavior: curator can't edit while in review #4139 --- .../dataverse/api/datadeposit/SwordUtil.java | 1 + .../iq/dataverse/api/InReviewWorkflowIT.java | 25 +++++++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordUtil.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordUtil.java index a35acfb200e..bbcbb3714a7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordUtil.java @@ -44,6 +44,7 @@ public static SwordError throwRegularSwordErrorWithoutStackTrace(String error) { public static void datasetLockCheck(Dataset dataset) throws SwordError { DatasetLock datasetLock = dataset.getDatasetLock(); if (datasetLock != null) { + // FIXME: Why is datasetLock.getInfo() sometimes null? String message = "Please try again later. Unable to perform operation due to dataset lock: " + datasetLock.getInfo(); throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, message); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java index 46a37e41731..1152c2c112b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java @@ -2,6 +2,7 @@ import com.jayway.restassured.RestAssured; import com.jayway.restassured.path.json.JsonPath; +import com.jayway.restassured.path.xml.XmlPath; import com.jayway.restassured.response.Response; import edu.harvard.iq.dataverse.authorization.DataverseRole; import java.util.logging.Logger; @@ -12,7 +13,9 @@ import static javax.ws.rs.core.Response.Status.FORBIDDEN; import static javax.ws.rs.core.Response.Status.OK; import static javax.ws.rs.core.Response.Status.UNAUTHORIZED; +import org.hamcrest.CoreMatchers; import static org.hamcrest.CoreMatchers.equalTo; +import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; @@ -148,6 +151,28 @@ public void testCuratorSendsCommentsToAuthor() { .body("message", equalTo("User @" + joeRandomUsername + " is not permitted to perform requested action.")) .statusCode(UNAUTHORIZED.getStatusCode()); + // BEGIN https://github.com/IQSS/dataverse/issues/4139 + /** + * FIXME: We are capturing the current "unable to perform operation due + * to dataset lock" (odd null error and all) for curators trying to make + * changes but for issue 4139 we intend to let curators make edits while + * the dataset is in review, which was the pre-4.8 behavior. + */ + // The curator tries to update the title while the dataset is in review. + Response updateTitleResponse = UtilIT.updateDatasetTitleViaSword(datasetPersistentId, "A Better Title", curatorApiToken); + updateTitleResponse.prettyPrint(); + updateTitleResponse.then().assertThat() + .body("error.summary", equalTo("Please try again later. Unable to perform operation due to dataset lock: null")) + .statusCode(BAD_REQUEST.getStatusCode()); + Response atomEntry = UtilIT.getSwordAtomEntry(datasetPersistentId, curatorApiToken); + atomEntry.prettyPrint(); + atomEntry.then().assertThat() + .statusCode(OK.getStatusCode()); + String citation = XmlPath.from(atomEntry.body().asString()).getString("bibliographicCitation"); + System.out.println("citation: " + citation); + Assert.assertTrue(citation.contains("Darwin's Finches")); + // END https://github.com/IQSS/dataverse/issues/4139 + // TODO: test where curator neglecting to leave a comment. Should fail with "reason for return" required. String noComments = ""; JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); From e5df2a8e581730b43b1411052affacca561e889c Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 13 Oct 2017 16:29:46 -0400 Subject: [PATCH 044/483] permit metadata updates by curators via SWORD #4139 --- .../api/datadeposit/ContainerManagerImpl.java | 10 ++++++- .../dataverse/api/datadeposit/SwordUtil.java | 12 ++++++-- .../iq/dataverse/api/InReviewWorkflowIT.java | 28 +++++++++++-------- 3 files changed, 35 insertions(+), 15 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java index 3409f419969..ef4c4932d33 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetField; +import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.Dataverse; @@ -129,12 +130,19 @@ public DepositReceipt replaceMetadata(String uri, Deposit deposit, AuthCredentia String globalId = urlManager.getTargetIdentifier(); Dataset dataset = datasetService.findByGlobalId(globalId); if (dataset != null) { - SwordUtil.datasetLockCheck(dataset); Dataverse dvThatOwnsDataset = dataset.getOwner(); UpdateDatasetCommand updateDatasetCommand = new UpdateDatasetCommand(dataset, dvReq); if (!permissionService.isUserAllowedOn(user, updateDatasetCommand, dataset)) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is not authorized to modify dataverse " + dvThatOwnsDataset.getAlias()); } + DatasetLock datasetLock = SwordUtil.getDatasetLock(dataset); + if (datasetLock != null) { + // TODO: This logic to allow only curators to edit datasets should be consolidated into a command. + PublishDatasetCommand publishDatasetCommand = new PublishDatasetCommand(dataset, dvReq, true); + if (!permissionService.isUserAllowedOn(user, publishDatasetCommand, dataset)) { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is unable to edit metadata due to dataset lock (" + datasetLock.getReason() + ")."); + } + } DatasetVersion datasetVersion = dataset.getEditVersion(); // erase all metadata before creating populating dataset version List emptyDatasetFields = new ArrayList<>(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordUtil.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordUtil.java index bbcbb3714a7..f9d2e39203f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordUtil.java @@ -41,13 +41,21 @@ public static SwordError throwRegularSwordErrorWithoutStackTrace(String error) { return swordError; } + // TODO: Review usages of this datasetLockCheck method to see if there's a better way. Should this check be in commands? public static void datasetLockCheck(Dataset dataset) throws SwordError { DatasetLock datasetLock = dataset.getDatasetLock(); if (datasetLock != null) { - // FIXME: Why is datasetLock.getInfo() sometimes null? - String message = "Please try again later. Unable to perform operation due to dataset lock: " + datasetLock.getInfo(); + String message = "Please try again later. Unable to perform operation due to dataset lock: " + datasetLock.getReason(); throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, message); } } + public static DatasetLock getDatasetLock(Dataset dataset) throws SwordError { + DatasetLock datasetLock = dataset.getDatasetLock(); + if (datasetLock != null) { + return datasetLock; + } + return null; + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java index 1152c2c112b..64887fa6f4a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java @@ -152,25 +152,29 @@ public void testCuratorSendsCommentsToAuthor() { .statusCode(UNAUTHORIZED.getStatusCode()); // BEGIN https://github.com/IQSS/dataverse/issues/4139 - /** - * FIXME: We are capturing the current "unable to perform operation due - * to dataset lock" (odd null error and all) for curators trying to make - * changes but for issue 4139 we intend to let curators make edits while - * the dataset is in review, which was the pre-4.8 behavior. - */ - // The curator tries to update the title while the dataset is in review. - Response updateTitleResponse = UtilIT.updateDatasetTitleViaSword(datasetPersistentId, "A Better Title", curatorApiToken); - updateTitleResponse.prettyPrint(); - updateTitleResponse.then().assertThat() - .body("error.summary", equalTo("Please try again later. Unable to perform operation due to dataset lock: null")) + // The author tries to edit the title after submitting the dataset for review. This is not allowed because the dataset is locked. + Response updateTitleResponseAuthor = UtilIT.updateDatasetTitleViaSword(datasetPersistentId, "New Title from Author", authorApiToken); + updateTitleResponseAuthor.prettyPrint(); + updateTitleResponseAuthor.then().assertThat() + .body("error.summary", equalTo("User " + authorUsername + " " + authorUsername + " is unable to edit metadata due to dataset lock (InReview).")) .statusCode(BAD_REQUEST.getStatusCode()); + + // The curator tries to update the title while the dataset is in review. + Response updateTitleResponseCurator = UtilIT.updateDatasetTitleViaSword(datasetPersistentId, "A Better Title", curatorApiToken); + updateTitleResponseCurator.prettyPrint(); + updateTitleResponseCurator.then().assertThat() + .statusCode(OK.getStatusCode()); Response atomEntry = UtilIT.getSwordAtomEntry(datasetPersistentId, curatorApiToken); atomEntry.prettyPrint(); atomEntry.then().assertThat() .statusCode(OK.getStatusCode()); String citation = XmlPath.from(atomEntry.body().asString()).getString("bibliographicCitation"); System.out.println("citation: " + citation); - Assert.assertTrue(citation.contains("Darwin's Finches")); + Assert.assertTrue(citation.contains("A Better Title")); + // TODO: Can curator add files via SWORD? + // TODO: Can curator edit metadata via native API? + // TODO: Can curator add files via native API? + // TODO: Can curator make edits via GUI? // END https://github.com/IQSS/dataverse/issues/4139 // TODO: test where curator neglecting to leave a comment. Should fail with "reason for return" required. From c758569863faadde32ec2515b909113a90ceec51 Mon Sep 17 00:00:00 2001 From: matthew-a-dunlap Date: Fri, 13 Oct 2017 17:45:23 -0400 Subject: [PATCH 045/483] S3Access aux save bugfix (size == 0) #4185 --- .../java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 4729051e1ba..bb1cd38d195 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -345,7 +345,7 @@ public void savePathAsAux(Path fileSystemPath, String auxItemTag) throws IOExcep @Override public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag, Long filesize) throws IOException { - if (filesize == null || filesize < 0) { + if (filesize == null || filesize <= 0) { saveInputStreamAsAux(inputStream, auxItemTag); } else { if (!this.canWrite()) { From 043752909653d094bb1ebafc7f8ce91b1ac412c8 Mon Sep 17 00:00:00 2001 From: matthew-a-dunlap Date: Fri, 13 Oct 2017 18:06:03 -0400 Subject: [PATCH 046/483] Input stream size test #4185 --- .../java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index bb1cd38d195..c28f5f6f6dc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -345,7 +345,7 @@ public void savePathAsAux(Path fileSystemPath, String auxItemTag) throws IOExcep @Override public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag, Long filesize) throws IOException { - if (filesize == null || filesize <= 0) { + if (filesize == null || filesize < 0) { saveInputStreamAsAux(inputStream, auxItemTag); } else { if (!this.canWrite()) { @@ -379,7 +379,7 @@ public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag) thr byte[] bytes = IOUtils.toByteArray(inputStream); long length = bytes.length; ObjectMetadata metadata = new ObjectMetadata(); - metadata.setContentLength(length); + //metadata.setContentLength(length); try { s3.putObject(bucketName, destinationKey, inputStream, metadata); } catch (SdkClientException ioex) { From 06a003ad1afc35a471293ef0f2ad8454e3d6ac9a Mon Sep 17 00:00:00 2001 From: Michael Bar-Sinai Date: Sun, 15 Oct 2017 16:12:24 +0300 Subject: [PATCH 047/483] Renamed method removeDatasetLock to removeDatasetLocks, based on @matthew-a-dunlap 's code review. --- .../iq/dataverse/DatasetServiceBean.java | 21 +++++++------------ .../AuthenticationServiceBean.java | 3 ++- .../command/impl/RemoveLockCommand.java | 2 +- .../dataverse/ingest/IngestMessageBean.java | 2 +- .../workflow/WorkflowServiceBean.java | 1 - .../ReturnDatasetToAuthorCommandTest.java | 2 +- 6 files changed, 12 insertions(+), 19 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 546fff79d73..d10bb7912a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -32,7 +32,6 @@ import javax.ejb.Stateless; import javax.ejb.TransactionAttribute; import javax.ejb.TransactionAttributeType; -import javax.inject.Inject; import javax.inject.Named; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; @@ -499,18 +498,6 @@ public boolean checkDatasetLock(Long datasetId) { return lock.size()>0; } - public String checkDatasetLockInfo(Long datasetId) { - String nativeQuery = "SELECT sl.info FROM DatasetLock sl WHERE sl.dataset_id = " + datasetId + " LIMIT 1;"; - String infoMessage; - try { - infoMessage = (String)em.createNativeQuery(nativeQuery).getSingleResult(); - } catch (Exception ex) { - infoMessage = null; - } - - return infoMessage; - } - @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public DatasetLock addDatasetLock(Dataset dataset, DatasetLock lock) { lock.setDataset(dataset); @@ -546,8 +533,14 @@ public DatasetLock addDatasetLock(Long datasetId, DatasetLock.Reason reason, Lon return addDatasetLock(dataset, lock); } + /** + * Removes all {@link DatasetLock}s for the dataset whose id is passed and reason + * is {@code aReason}. + * @param datasetId Id of the dataset whose locks will b removed. + * @param aReason The reason of the locks that will be removed. + */ @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) - public void removeDatasetLock(Long datasetId, DatasetLock.Reason aReason) { + public void removeDatasetLocks(Long datasetId, DatasetLock.Reason aReason) { Dataset dataset = em.find(Dataset.class, datasetId); new HashSet<>(dataset.getLocks()).stream() .filter( l -> l.getReason() == aReason ) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java index 401a5d0a932..9e3a438b11b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java @@ -114,7 +114,8 @@ public void startup() { registerProviderFactory( new BuiltinAuthenticationProviderFactory(builtinUserServiceBean, passwordValidatorService) ); registerProviderFactory( new ShibAuthenticationProviderFactory() ); registerProviderFactory( new OAuth2AuthenticationProviderFactory() ); - } catch (AuthorizationSetupException ex) { + + } catch (AuthorizationSetupException ex) { logger.log(Level.SEVERE, "Exception setting up the authentication provider factories: " + ex.getMessage(), ex); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RemoveLockCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RemoveLockCommand.java index 9ed38fc6493..b9c2f20f37c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RemoveLockCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RemoveLockCommand.java @@ -27,7 +27,7 @@ public RemoveLockCommand(DataverseRequest aRequest, Dataset aDataset, DatasetLoc @Override protected void executeImpl(CommandContext ctxt) throws CommandException { - ctxt.datasets().removeDatasetLock(dataset.getId(), reason); + ctxt.datasets().removeDatasetLocks(dataset.getId(), reason); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java index 48048fd0d74..c9886dcab13 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java @@ -128,7 +128,7 @@ public void onMessage(Message message) { if (datafile != null) { Dataset dataset = datafile.getOwner(); if (dataset != null && dataset.getId() != null) { - datasetService.removeDatasetLock(dataset.getId(), DatasetLock.Reason.Ingest); + datasetService.removeDatasetLocks(dataset.getId(), DatasetLock.Reason.Ingest); } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java index d55200ee038..4b581883274 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java @@ -5,7 +5,6 @@ import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.RoleAssigneeServiceBean; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.impl.AddLockCommand; import edu.harvard.iq.dataverse.engine.command.impl.FinalizeDatasetPublicationCommand; import edu.harvard.iq.dataverse.engine.command.impl.RemoveLockCommand; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java index 9abcea821af..abef20b0e53 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java @@ -89,7 +89,7 @@ public WorkflowComment addWorkflowComment(WorkflowComment comment){ } @Override - public void removeDatasetLock(Long datasetId, DatasetLock.Reason aReason) { + public void removeDatasetLocks(Long datasetId, DatasetLock.Reason aReason) { } }; From 445e1ed54ce9410995d475b184f89bb3ba236b85 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 16 Oct 2017 11:11:58 -0400 Subject: [PATCH 048/483] assert authors (!) and curators can edit metadata in review #4139 #3777 Authors should not be able to edit metadata while in review. The GUI disallows this as of Dataverse 4.8 but it was not fully implemented because authors can make edits via the native API. Also add a sample JSON file for making edits for progress toward closing #3777 some day. --- .../_static/api/dataset-update-metadata.json | 86 +++++++++++++++++++ .../iq/dataverse/api/InReviewWorkflowIT.java | 39 +++++++-- .../edu/harvard/iq/dataverse/api/UtilIT.java | 11 +++ 3 files changed, 131 insertions(+), 5 deletions(-) create mode 100644 doc/sphinx-guides/source/_static/api/dataset-update-metadata.json diff --git a/doc/sphinx-guides/source/_static/api/dataset-update-metadata.json b/doc/sphinx-guides/source/_static/api/dataset-update-metadata.json new file mode 100644 index 00000000000..6e499d4e164 --- /dev/null +++ b/doc/sphinx-guides/source/_static/api/dataset-update-metadata.json @@ -0,0 +1,86 @@ +{ + "metadataBlocks": { + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "typeName": "title", + "multiple": false, + "typeClass": "primitive", + "value": "newTitle" + }, + { + "typeName": "author", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "authorName": { + "typeName": "authorName", + "multiple": false, + "typeClass": "primitive", + "value": "Spruce, Sabrina" + } + } + ] + }, + { + "typeName": "datasetContact", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "datasetContactName": { + "typeName": "datasetContactName", + "multiple": false, + "typeClass": "primitive", + "value": "Spruce, Sabrina" + }, + "datasetContactEmail": { + "typeName": "datasetContactEmail", + "multiple": false, + "typeClass": "primitive", + "value": "spruce@mailinator.com" + } + } + ] + }, + { + "typeName": "dsDescription", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "multiple": false, + "typeClass": "primitive", + "value": "test" + } + } + ] + }, + { + "typeName": "subject", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "Other" + ] + }, + { + "typeName": "depositor", + "multiple": false, + "typeClass": "primitive", + "value": "Spruce, Sabrina" + }, + { + "typeName": "dateOfDeposit", + "multiple": false, + "typeClass": "primitive", + "value": "2017-04-19" + } + ] + } + } +} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java index 64887fa6f4a..d7f0cb33191 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java @@ -159,10 +159,10 @@ public void testCuratorSendsCommentsToAuthor() { .body("error.summary", equalTo("User " + authorUsername + " " + authorUsername + " is unable to edit metadata due to dataset lock (InReview).")) .statusCode(BAD_REQUEST.getStatusCode()); - // The curator tries to update the title while the dataset is in review. - Response updateTitleResponseCurator = UtilIT.updateDatasetTitleViaSword(datasetPersistentId, "A Better Title", curatorApiToken); - updateTitleResponseCurator.prettyPrint(); - updateTitleResponseCurator.then().assertThat() + // The curator tries to update the title while the dataset is in review via SWORD. + Response updateTitleResponseCuratorViaSword = UtilIT.updateDatasetTitleViaSword(datasetPersistentId, "A Better Title", curatorApiToken); + updateTitleResponseCuratorViaSword.prettyPrint(); + updateTitleResponseCuratorViaSword.then().assertThat() .statusCode(OK.getStatusCode()); Response atomEntry = UtilIT.getSwordAtomEntry(datasetPersistentId, curatorApiToken); atomEntry.prettyPrint(); @@ -171,8 +171,37 @@ public void testCuratorSendsCommentsToAuthor() { String citation = XmlPath.from(atomEntry.body().asString()).getString("bibliographicCitation"); System.out.println("citation: " + citation); Assert.assertTrue(citation.contains("A Better Title")); + + // The author tries to update the title while the dataset is in review via native. + String pathToJsonFile = "doc/sphinx-guides/source/_static/api/dataset-update-metadata.json"; + Response updateTitleResponseAuthorViaNative = UtilIT.updateDatasetMetadataViaNative(datasetPersistentId, pathToJsonFile, authorApiToken); + updateTitleResponseAuthorViaNative.prettyPrint(); + updateTitleResponseAuthorViaNative.then().assertThat() + // FIXME: This should fail with "unauthorized" or whatever. + .statusCode(OK.getStatusCode()); + Response atomEntryAuthorNative = UtilIT.getSwordAtomEntry(datasetPersistentId, authorApiToken); + atomEntryAuthorNative.prettyPrint(); + atomEntryAuthorNative.then().assertThat() + .statusCode(OK.getStatusCode()); + String citationAuthorNative = XmlPath.from(atomEntryAuthorNative.body().asString()).getString("bibliographicCitation"); + System.out.println("citation: " + citationAuthorNative); + // The author was unable to change the title. + // FIXME: The author *is* able to change the title! This should say "A Better Title"; + Assert.assertTrue(citationAuthorNative.contains("newTitle")); + + // The curator tries to update the title while the dataset is in review via native. + Response updateTitleResponseCuratorViaNative = UtilIT.updateDatasetMetadataViaNative(datasetPersistentId, pathToJsonFile, curatorApiToken); + updateTitleResponseCuratorViaNative.prettyPrint(); + updateTitleResponseCuratorViaNative.then().assertThat() + .statusCode(OK.getStatusCode()); + Response atomEntryCuratorNative = UtilIT.getSwordAtomEntry(datasetPersistentId, curatorApiToken); + atomEntryCuratorNative.prettyPrint(); + atomEntryCuratorNative.then().assertThat() + .statusCode(OK.getStatusCode()); + String citationCuratorNative = XmlPath.from(atomEntryCuratorNative.body().asString()).getString("bibliographicCitation"); + System.out.println("citation: " + citationCuratorNative); + Assert.assertTrue(citationCuratorNative.contains("newTitle")); // TODO: Can curator add files via SWORD? - // TODO: Can curator edit metadata via native API? // TODO: Can curator add files via native API? // TODO: Can curator make edits via GUI? // END https://github.com/IQSS/dataverse/issues/4139 diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 0d28e3f86fa..102ea6ed5e1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -328,6 +328,17 @@ static Response updateDatasetTitleViaSword(String persistentId, String newTitle, return updateDatasetResponse; } + // https://github.com/IQSS/dataverse/issues/3777 + static Response updateDatasetMetadataViaNative(String persistentId, String pathToJsonFile, String apiToken) { + String jsonIn = getDatasetJson(pathToJsonFile); + Response response = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(jsonIn) + .contentType("application/json") + .put("/api/datasets/:persistentId/versions/:draft?persistentId=" + persistentId); + return response; + } + static private String getDatasetXml(String title, String author, String description) { String xmlIn = "\n" + "\n" From 830fd97c45ed91c41ecfcd0d1f1eb655716f36b6 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 16 Oct 2017 13:53:52 -0400 Subject: [PATCH 049/483] enable Edit button if you can publish #4139 --- .../java/edu/harvard/iq/dataverse/DatasetPage.java | 11 ++++++++++- src/main/webapp/dataset.xhtml | 2 +- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 030553916d3..fae72edb8c1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2661,7 +2661,16 @@ public boolean isLocked() { } return false; } - + + public boolean isLockedFromEdits() { + // Authors are not allowed to edit but curators are allowed. + if (permissionsWrapper.canIssuePublishDatasetCommand(dataset)) { + return false; + } else { + return true; + } + } + public void setLocked(boolean locked) { // empty method, so that we can use DatasetPage.locked in a hidden // input on the page. diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 78955d3ddae..798812cd9f3 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -136,7 +136,7 @@ -
- -
-

- -

-
+ or !empty termsOfUseAndAccess.conditions or !empty termsOfUseAndAccess.disclaimer)}"> --> + +
-
-
+ +
-
-
- + + +
@@ -303,17 +300,13 @@
- -
-

- -

-
+ or !empty termsOfUseAndAccess.studyCompletion)}"> --> + +
-
-
- + +
From e589c592737ea422b4ec4c272b20197201b2e7b2 Mon Sep 17 00:00:00 2001 From: Michael Heppler Date: Thu, 26 Oct 2017 16:10:25 -0400 Subject: [PATCH 114/483] Removed references in Dataset Management > User Guide to Additional Information panels in Terms tab for dataset pg (view and create). [ref #4149] --- doc/sphinx-guides/source/user/dataset-management.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index cf6465e3e55..f0e9bd8a6e5 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -225,14 +225,14 @@ your own custom Terms of Use for your Datasets. Custom Terms of Use for Datasets -------------------------------- -If you are unable to use a CC0 waiver for your datasets you are able to set your own custom terms of use. To do so, select "No, do not apply CC0 - "Public Domain Dedication" and a Terms of Use textbox will show up allowing you to enter your own custom terms of use for your dataset. To add more information about the Terms of Use, click on "Additional Information \[+]". +If you are unable to use a CC0 waiver for your datasets you are able to set your own custom terms of use. To do so, select "No, do not apply CC0 - "Public Domain Dedication" and a Terms of Use textbox will show up allowing you to enter your own custom terms of use for your dataset. To add more information about the Terms of Use, we have provided fields like Special Permissions, Restrictions, Citation Requirements, etc. Here is an `example of a Data Usage Agreement `_ for datasets that have de-identified human subject data. Restricted Files + Terms of Access ---------------------------------- -If you restrict any files in your dataset, you will be prompted by a pop-up to enter Terms of Access for the data. This can also be edited in the Terms tab or selecting Terms in the "Edit" dropdown button in the dataset. You may also allow users to request access for your restricted files by enabling "Request Access". To add more information about the Terms of Access, click on "Additional Information \[+]". +If you restrict any files in your dataset, you will be prompted by a pop-up to enter Terms of Access for the data. This can also be edited in the Terms tab or selecting Terms in the "Edit" dropdown button in the dataset. You may also allow users to request access for your restricted files by enabling "Request Access". To add more information about the Terms of Access, we have provided fields like Data Access Place, Availability Status, Contact for Access, etc. **Note:** Some Dataverse installations do not allow for file restriction. From 6fc769ccb4c2aca4b5e919ac99353977fb04e890 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 26 Oct 2017 16:34:02 -0400 Subject: [PATCH 115/483] stub out externaltool table #4230 --- .../files/root/external-tools/psi.json | 6 ++ .../source/installation/index.rst | 1 + doc/sphinx-guides/source/installation/psi.rst | 24 ++++++ src/main/java/Bundle.properties | 6 ++ .../edu/harvard/iq/dataverse/api/Admin.java | 35 +++++++- .../dataverse/externaltools/ExternalTool.java | 83 +++++++++++++++++++ 6 files changed, 154 insertions(+), 1 deletion(-) create mode 100644 doc/sphinx-guides/source/_static/installation/files/root/external-tools/psi.json create mode 100644 doc/sphinx-guides/source/installation/psi.rst create mode 100644 src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java diff --git a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/psi.json b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/psi.json new file mode 100644 index 00000000000..f4282199a2d --- /dev/null +++ b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/psi.json @@ -0,0 +1,6 @@ +{ + "displayNameBundleKey": "psi.displayName", + "descriptionBundleKey": "psi.description", + "toolUrl": "https://beta.dataverse.org/custom/DifferentialPrivacyPrototype/", + "toolParameters": {} +} diff --git a/doc/sphinx-guides/source/installation/index.rst b/doc/sphinx-guides/source/installation/index.rst index b8423e77ae5..b835e303c60 100755 --- a/doc/sphinx-guides/source/installation/index.rst +++ b/doc/sphinx-guides/source/installation/index.rst @@ -20,3 +20,4 @@ Installation Guide geoconnect shibboleth oauth2 + psi diff --git a/doc/sphinx-guides/source/installation/psi.rst b/doc/sphinx-guides/source/installation/psi.rst new file mode 100644 index 00000000000..fa7ddbd10a4 --- /dev/null +++ b/doc/sphinx-guides/source/installation/psi.rst @@ -0,0 +1,24 @@ +PSI +=== + +PSI (Ψ) is a private data sharing interface: http://privacytools.seas.harvard.edu/psi + +.. contents:: |toctitle| + :local: + +Introduction +------------ + +FIXME: Link to the User Guide once PSI has been added there. + +Installation +------------ + +To install PSI for use with Dataverse, follow the steps below. + +Add PSI as an External Tool +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Download :download:`psi.json <../_static/installation/files/root/external-tools/psi.json>` + +``curl -X POST -H 'Content-type: application/json' --upload-file psi.json http://localhost:8080/api/admin/externalTools`` diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index de637c3c455..76492a56420 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1697,3 +1697,9 @@ ingest.csv.invalidHeader=Invalid header row. One of the cells is empty. ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, {0} found on first pass, but {1} found on second. ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. ingest.csv.nullStream=Stream can't be null. + +# PSI +# Do not rename the "psi.displayName" key without changing the addExternalTool method in Admin.java. +psi.displayName=Privacy-Preserving Data Preview +# Do not rename the "psi.description" key without changing the addExternalTool method in Admin.java. +psi.description=The link below goes to a tool that helps data depositors release privacy preserving statistics describing a dataset.\n\nThis is done by distributing a privacy budget across different possible statistical calculations. This tool is called the budgeter. Users select which statistics they would like to calculate and are given estimates of how accurately each statistic can be computed. They can also redistribute their privacy budget according to which statistics they think are most valuable in their dataset. \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 92086575199..8f7ceae8e4e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -60,9 +60,11 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataset.DatasetThumbnail; import edu.harvard.iq.dataverse.dataset.DatasetUtil; +import edu.harvard.iq.dataverse.externaltools.ExternalTool; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.userdata.UserListMaker; import edu.harvard.iq.dataverse.userdata.UserListResult; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.StringUtil; import java.math.BigDecimal; import java.util.Date; @@ -246,7 +248,7 @@ public Response checkAuthenticationProviderEnabled(@PathParam("id")String id){ return ok(Boolean.toString(prvs.get(0).isEnabled())); } } - + @DELETE @Path("authenticationProviders/{id}/") public Response deleteAuthenticationProvider( @PathParam("id") String id ) { @@ -261,6 +263,37 @@ public Response deleteAuthenticationProvider( @PathParam("id") String id ) { ? "WARNING: no enabled authentication providers left." : "")); } + @Path("externalTools") + @POST + public Response addExternalTool(String userInput) { + ExternalTool externalTool = new ExternalTool(); + JsonReader jsonReader = Json.createReader(new StringReader(userInput)); + JsonObject jsonObject = jsonReader.readObject(); + // Get display name. + String displayNameBundleKey = jsonObject.getString("displayNameBundleKey"); + externalTool.setDisplayNameBundleKey(displayNameBundleKey); + String displayName = BundleUtil.getStringFromBundle(externalTool.getDisplayNameBundleKey()); + // Get description. + String descriptionBundleKey = jsonObject.getString("descriptionBundleKey"); + externalTool.setDescriptionBundleKey(descriptionBundleKey); + String description = BundleUtil.getStringFromBundle(externalTool.getDescriptionBundleKey()); + // Get URL + String toolUrlString = "toolUrl"; + String toolUrl = jsonObject.getString(toolUrlString); + externalTool.setToolUrl(toolUrl); + // Get parameters + String toolParametersString = "toolParameters"; + JsonObject toolParameters = jsonObject.getJsonObject(toolParametersString); + externalTool.setToolParameters(toolParameters.toString()); + JsonObjectBuilder jab = Json.createObjectBuilder(); + jab.add("displayName", displayName); + jab.add("description", description); + jab.add(toolUrlString, toolUrl); + jab.add(toolParametersString, toolParameters); + em.persist(externalTool); + return ok(jab); + } + @GET @Path("authenticatedUsers/{identifier}/") public Response getAuthenticatedUser(@PathParam("identifier") String identifier) { diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java new file mode 100644 index 00000000000..6f04cf6d17f --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java @@ -0,0 +1,83 @@ +package edu.harvard.iq.dataverse.externaltools; + +import java.io.Serializable; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; + +@Entity +public class ExternalTool implements Serializable { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + /** + * This text appears on the button and refers to a bundle key that can be + * translated. + */ + @Column(nullable = false) + private String displayNameBundleKey; + + /** + * This popup text describes the tool and refers to a bundle key that can be + * translated. + */ + @Column(nullable = false) + private String descriptionBundleKey; + + /** + * The base URL of the tool without parameters. + */ + @Column(nullable = false) + private String toolUrl; + + /** + * Parameters the tool requires such as DataFile DOI and API Token + */ + @Column(nullable = false) + private String toolParameters; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getDisplayNameBundleKey() { + return displayNameBundleKey; + } + + public void setDisplayNameBundleKey(String displayNameBundleKey) { + this.displayNameBundleKey = displayNameBundleKey; + } + + public String getDescriptionBundleKey() { + return descriptionBundleKey; + } + + public void setDescriptionBundleKey(String descriptionBundleKey) { + this.descriptionBundleKey = descriptionBundleKey; + } + + public String getToolUrl() { + return toolUrl; + } + + public void setToolUrl(String toolUrl) { + this.toolUrl = toolUrl; + } + + public String getToolParameters() { + return toolParameters; + } + + public void setToolParameters(String toolParameters) { + this.toolParameters = toolParameters; + } + +} From 9e25438ba6c1b1b0020fce95547234c99826d53a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 27 Oct 2017 11:18:15 -0400 Subject: [PATCH 116/483] make tools show up dynamically in GUI #4230 --- .../edu/harvard/iq/dataverse/DatasetPage.java | 15 +++++++++-- .../dataverse/externaltools/ExternalTool.java | 5 ++++ .../ExternalToolServiceBean.java | 25 +++++++++++++++++++ src/main/webapp/filesFragment.xhtml | 5 ++++ 4 files changed, 48 insertions(+), 2 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 030553916d3..58c582da43a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -84,6 +84,8 @@ import edu.harvard.iq.dataverse.engine.command.impl.RestrictFileCommand; import edu.harvard.iq.dataverse.engine.command.impl.ReturnDatasetToAuthorCommand; import edu.harvard.iq.dataverse.engine.command.impl.SubmitDatasetForReviewCommand; +import edu.harvard.iq.dataverse.externaltools.ExternalTool; +import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean; import java.util.Collections; import javax.faces.event.AjaxBehaviorEvent; @@ -167,6 +169,8 @@ public enum DisplayMode { DataverseRoleServiceBean dataverseRoleService; @EJB PrivateUrlServiceBean privateUrlService; + @EJB + ExternalToolServiceBean externalToolService; @Inject DataverseRequestServiceBean dvRequestService; @Inject @@ -244,6 +248,7 @@ public enum DisplayMode { private boolean removeUnusedTags; private Boolean hasRsyncScript = false; + private List externalTools; public Boolean isHasRsyncScript() { return hasRsyncScript; @@ -1511,7 +1516,9 @@ private String init(boolean initFull) { JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.locked.message"), BundleUtil.getStringFromBundle("dataset.inreview.infoMessage")); } } - + + externalTools = externalToolService.findAll(); + return null; } @@ -3984,5 +3991,9 @@ public List getDatasetSummaryFields() { return DatasetUtil.getDatasetSummaryFields(workingVersion, customFields); } - + + public List getExternalTools() { + return externalTools; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java index 6f04cf6d17f..6067e752ce8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.externaltools; +import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.Serializable; import javax.persistence.Column; import javax.persistence.Entity; @@ -80,4 +81,8 @@ public void setToolParameters(String toolParameters) { this.toolParameters = toolParameters; } + public String getButtonLabel() { + return BundleUtil.getStringFromBundle(displayNameBundleKey); + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java new file mode 100644 index 00000000000..89ea8aa911e --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java @@ -0,0 +1,25 @@ +package edu.harvard.iq.dataverse.externaltools; + +import java.util.List; +import java.util.logging.Logger; +import javax.ejb.Stateless; +import javax.inject.Named; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.persistence.TypedQuery; + +@Stateless +@Named +public class ExternalToolServiceBean { + + private static final Logger logger = Logger.getLogger(ExternalToolServiceBean.class.getCanonicalName()); + + @PersistenceContext(unitName = "VDCNet-ejbPU") + private EntityManager em; + + public List findAll() { + TypedQuery typedQuery = em.createQuery("SELECT OBJECT(o) FROM ExternalTool AS o ORDER BY o.id", ExternalTool.class); + return typedQuery.getResultList(); + } + +} diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 9f99475d3ea..6bbddacdd01 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -312,6 +312,11 @@ #{bundle['file.tags']} + +
  • + #{tool.buttonLabel} +
  • +
    From 0e326d3df09a87bd4c32231ed176f8607f7cd699 Mon Sep 17 00:00:00 2001 From: Derek Murphy Date: Fri, 27 Oct 2017 15:24:40 -0400 Subject: [PATCH 117/483] Expanded PSI documentation [#4230] Added a short introduction to the PSI tool for sysadmins. --- doc/sphinx-guides/source/installation/psi.rst | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/installation/psi.rst b/doc/sphinx-guides/source/installation/psi.rst index fa7ddbd10a4..0b7b9d5296a 100644 --- a/doc/sphinx-guides/source/installation/psi.rst +++ b/doc/sphinx-guides/source/installation/psi.rst @@ -1,15 +1,14 @@ PSI === -PSI (Ψ) is a private data sharing interface: http://privacytools.seas.harvard.edu/psi - +`PSI (Ψ) `_ is a Private data Sharing Interface. .. contents:: |toctitle| :local: Introduction ------------ -FIXME: Link to the User Guide once PSI has been added there. +The PSI tool can be integrated into Dataverse to allow researchers with sensitive or confidential datasets to make differentially private summary statistics about their data available. The PSI tool is used to introduce just enough noise to the summary statistics to ensure privacy while still allowing a useful (if blurry) window into the contents of the data. This way, Dataverse users who lack the permission to view the raw data can still learn something about that data without any sensitive or private information being leaked. The sensitive data remains safe, while interested parties can learn more about it before they decide to undergo the potentially difficult process of seeking approval to view it. Installation ------------ From 7dd088db43c45cffc36b9cb3f4be6b0a9203b092 Mon Sep 17 00:00:00 2001 From: Derek Murphy Date: Fri, 27 Oct 2017 15:28:04 -0400 Subject: [PATCH 118/483] Fixed sphinx indentation error [4230] Fixed a syntax error. --- doc/sphinx-guides/source/installation/psi.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/installation/psi.rst b/doc/sphinx-guides/source/installation/psi.rst index 0b7b9d5296a..a4a9b50fef6 100644 --- a/doc/sphinx-guides/source/installation/psi.rst +++ b/doc/sphinx-guides/source/installation/psi.rst @@ -2,13 +2,14 @@ PSI === `PSI (Ψ) `_ is a Private data Sharing Interface. + .. contents:: |toctitle| :local: - + Introduction ------------ -The PSI tool can be integrated into Dataverse to allow researchers with sensitive or confidential datasets to make differentially private summary statistics about their data available. The PSI tool is used to introduce just enough noise to the summary statistics to ensure privacy while still allowing a useful (if blurry) window into the contents of the data. This way, Dataverse users who lack the permission to view the raw data can still learn something about that data without any sensitive or private information being leaked. The sensitive data remains safe, while interested parties can learn more about it before they decide to undergo the potentially difficult process of seeking approval to view it. +The PSI tool can be integrated into Dataverse to allow researchers with sensitive or confidential datasets to make differentially private summary statistics about their data available. The PSI tool is used to introduce just enough noise to the summary statistics to ensure privacy while still allowing a useful (if blurry) window into the contents of the data. This way, Dataverse users who lack the permission to view the raw data can still learn something about that data without any sensitive or private information being leaked. The sensitive data remains safe, while interested parties can learn more about it before they decide to undergo the potentially difficult process of seeking approval to view it. Installation ------------ From 1b625965835ea1814e2e4534153142dc426f0f79 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 31 Oct 2017 14:57:01 -0400 Subject: [PATCH 119/483] stub out dataset in json-ld format #3793 --- .../edu/harvard/iq/dataverse/DatasetPage.java | 5 +- .../iq/dataverse/dataset/DatasetUtil.java | 89 +++++++++++++++++++ src/main/webapp/dataset.xhtml | 5 ++ src/main/webapp/dataverse_template.xhtml | 1 + 4 files changed, 99 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 030553916d3..fbdb53cbbcd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -3984,5 +3984,8 @@ public List getDatasetSummaryFields() { return DatasetUtil.getDatasetSummaryFields(workingVersion, customFields); } - + + public String getJsonLd() { + return DatasetUtil.getJsonLd(workingVersion); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index 101c5fb7804..156774172a7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -2,9 +2,13 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetAuthor; import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo; +import edu.harvard.iq.dataverse.authorization.providers.shib.ShibUserNameFields; +import edu.harvard.iq.dataverse.authorization.providers.shib.ShibUtil; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import static edu.harvard.iq.dataverse.dataaccess.DataAccess.getStorageIO; import edu.harvard.iq.dataverse.dataaccess.StorageIO; @@ -33,6 +37,9 @@ import java.util.HashSet; import java.util.Map; import javax.imageio.ImageIO; +import javax.json.Json; +import javax.json.JsonArrayBuilder; +import javax.json.JsonObjectBuilder; import org.apache.commons.io.IOUtils; public class DatasetUtil { @@ -416,4 +423,86 @@ public static List getDatasetSummaryFields(DatasetVersion datasetV return datasetFields; } +/* +{ + "@context": "http://schema.org", + "@type": "Dataset", + "@id": "https://doi.org/10.7910/dvn/icfngt", + "additionalType": "Dataset", + "name": "Replication Data for: Parties, Legislators, and the Origins of Proportional Representation", + "author": [ + { + "@type": "Person", + "name": "Gary W. Cox", + "givenName": "Gary W.", + "familyName": "Cox" + }, + { + "@type": "Person", + "name": "Jon H. Fiva", + "givenName": "Jon H.", + "familyName": "Fiva" + }, + { + "@type": "Person", + "name": "Daniel M. Smith", + "givenName": "Daniel M.", + "familyName": "Smith" + } + ], + "datePublished": "2017", + "schemaVersion": "http://datacite.org/schema/kernel-4", + "publisher": { + "@type": "Organization", + "name": "Harvard Dataverse" + }, + "provider": { + "@type": "Organization", + "name": "DataCite" + } +} +*/ + public static String getJsonLd(DatasetVersion workingVersion) { + JsonObjectBuilder job = Json.createObjectBuilder(); + job.add("@context", "http://schema.org"); + job.add("@type", "Dataset"); + // FIXME + job.add("@id", "https://doi.org/10.7910/dvn/icfngt"); + job.add("additionalType", "Dataset"); + job.add("name", workingVersion.getTitle()); + job.add("@context", "http://schema.org"); + JsonArrayBuilder authors = Json.createArrayBuilder(); + for (DatasetAuthor datasetAuthor : workingVersion.getDatasetAuthors()) { + JsonObjectBuilder author = Json.createObjectBuilder(); + author.add("@type", "Person"); + ShibUserNameFields shibUserNameFields = ShibUtil.findBestFirstAndLastName(null, null, datasetAuthor.getName().getValue().replaceAll(",", "")); + AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo( + shibUserNameFields.getFirstName(), + shibUserNameFields.getLastName(), + "", + "", + "" + ); + author.add("name", displayInfo.getFirstName() + " " + displayInfo.getLastName()); + author.add("givenName", displayInfo.getFirstName()); + author.add("familyName", displayInfo.getLastName()); + authors.add(author); + } + job.add("author", authors); + // FIXME + job.add("datePublished", "2017"); + job.add("schemaVersion", "http://datacite.org/schema/kernel-4"); + job.add("publisher", Json.createObjectBuilder() + .add("@type", "Organization") + // FIXME + .add("name", "Harvard Dataverse") + ); + job.add("provider", Json.createObjectBuilder() + .add("@type", "Organization") + // FIXME + .add("name", "DataCite") + ); + return job.build().toString(); + } + } diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 4a71e3b274f..7d1192140b2 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -34,6 +34,11 @@ + + + diff --git a/src/main/webapp/dataverse_template.xhtml b/src/main/webapp/dataverse_template.xhtml index 1d581b1f19d..5726173e328 100644 --- a/src/main/webapp/dataverse_template.xhtml +++ b/src/main/webapp/dataverse_template.xhtml @@ -14,6 +14,7 @@ <h:outputText value="#{pageTitle}"/> + From cd16df271cd0275ffa9ec93e10d385442d3963f2 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 1 Nov 2017 10:15:49 -0400 Subject: [PATCH 120/483] #4139 Allow author to download files while in review and other minor changes --- src/main/java/Bundle.properties | 2 ++ .../edu/harvard/iq/dataverse/DatasetPage.java | 11 ++++++ .../edu/harvard/iq/dataverse/FilePage.java | 13 +++++++ .../iq/dataverse/PermissionServiceBean.java | 23 ++++++++++++ .../file-download-button-fragment.xhtml | 36 +++++++++++++------ src/main/webapp/file.xhtml | 2 ++ src/main/webapp/filesFragment.xhtml | 11 +++--- 7 files changed, 82 insertions(+), 16 deletions(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index 18c82c4f37a..a4e3fa63c58 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1185,6 +1185,8 @@ dataset.message.uploadFiles=Upload Dataset Files - You can drag and drop files f dataset.message.editMetadata=Edit Dataset Metadata - Add more metadata about this dataset to help others easily find it. dataset.message.editTerms=Edit Dataset Terms - Update this dataset's terms of use. dataset.message.locked.editNotAllowedInReview=Dataset cannot be edited due to In Review dataset lock. +dataset.message.locked.downloadNotAllowedInReview=Dataset file(s) may not be downloaded due to In Review dataset lock. +dataset.message.locked.downloadNotAllowed=Dataset file(s) may not be downloaded due to dataset lock. dataset.message.locked.editNotAllowed=Dataset cannot be edited due to dataset lock. dataset.message.createSuccess=This dataset has been created. dataset.message.linkSuccess= {0} has been successfully linked to {1}. diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index dcc224bbf8c..17e9a5abb0c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2676,6 +2676,17 @@ public boolean isLockedFromEdits() { } return false; } + + public boolean isLockedFromDownload(){ + + try { + permissionService.checkDownloadFileLock(dataset, dvRequestService.getDataverseRequest(), new CreateDatasetCommand(dataset, dvRequestService.getDataverseRequest())); + } catch (IllegalCommandException ex) { + return true; + } + return false; + + } public void setLocked(boolean locked) { // empty method, so that we can use DatasetPage.locked in a hidden diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index fb1cb5207c8..1c902d33f92 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -15,6 +15,7 @@ import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.RestrictFileCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand; import edu.harvard.iq.dataverse.export.ExportException; @@ -708,6 +709,18 @@ public boolean isLockedFromEdits() { } return false; } + + public boolean isLockedFromDownload(){ + Dataset testDataset = fileMetadata.getDataFile().getOwner(); + try { + permissionService.checkDownloadFileLock(testDataset, dvRequestService.getDataverseRequest(), new CreateDatasetCommand(testDataset, dvRequestService.getDataverseRequest())); + } catch (IllegalCommandException ex) { + return true; + } + System.out.print("returning false from is locked from Download...."); + return false; + + } public String getPublicDownloadUrl() { try { diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index 6431b16488a..3a00be24b78 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -27,6 +27,7 @@ import static edu.harvard.iq.dataverse.engine.command.CommandHelper.CH; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand; import edu.harvard.iq.dataverse.util.BundleUtil; import java.util.Arrays; @@ -563,5 +564,27 @@ public void checkEditDatasetLock(Dataset dataset, DataverseRequest dataverseRequ } } } + + public void checkDownloadFileLock(Dataset dataset, DataverseRequest dataverseRequest, Command command) throws IllegalCommandException { + if (dataset.isLocked()) { + if (dataset.isLockedFor(DatasetLock.Reason.InReview)) { + // The "InReview" lock is not really a lock for curators or contributors. They can still download. + if (!isUserAllowedOn(dataverseRequest.getUser(), new CreateDatasetCommand(dataset, dataverseRequest, true), dataset)) { + throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.downloadNotAllowedInReview"), command); + } + } + if (dataset.isLockedFor(DatasetLock.Reason.Ingest)) { + throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.downloadNotAllowed"), command); + } + // TODO: Do we need to check for "Workflow"? Should the message be more specific? + if (dataset.isLockedFor(DatasetLock.Reason.Workflow)) { + throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.downloadNotAllowed"), command); + } + // TODO: Do we need to check for "DcmUpload"? Should the message be more specific? + if (dataset.isLockedFor(DatasetLock.Reason.DcmUpload)) { + throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.downloadNotAllowed"), command); + } + } + } } diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index db760786aab..072cb82a603 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -56,12 +56,13 @@
  • #{bundle['file.explore.twoRavens']} @@ -70,14 +71,15 @@
  • #{bundle['file.mapData.worldMap']} #{bundle.explore} @@ -102,7 +104,7 @@ process="@this" action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'explore' )}" update="@widgetVar(downloadPopup)" - disabled="#{fileMetadata.dataFile.ingestInProgress}" + disabled="#{fileMetadata.dataFile.ingestInProgress or lockedFromDownload}" oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');"> #{bundle.explore} @@ -113,7 +115,7 @@ #{bundle.explore} @@ -122,7 +124,7 @@ process="@this" action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'worldMap' )}" update="@widgetVar(downloadPopup)" - disabled="#{fileMetadata.dataFile.ingestInProgress}" + disabled="#{fileMetadata.dataFile.ingestInProgress or lockedFromDownload}" oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');"> #{bundle.explore} @@ -140,6 +142,7 @@ @@ -149,6 +152,7 @@ @@ -181,6 +185,7 @@
  • @@ -188,6 +193,7 @@ @@ -199,11 +205,13 @@
  • #{bundle['file.downloadBtn.format.tab']} #{bundle['file.downloadBtn.format.tab']} @@ -213,11 +221,13 @@
  • #{bundle['file.downloadBtn.format.rdata']} @@ -228,11 +238,13 @@
  • #{bundle['file.downloadBtn.format.var']} @@ -242,18 +254,20 @@
  • + /> + />
  • - -
  • - #{tool.buttonLabel} -
  • -
    + + +
    + + +
    + @@ -366,6 +378,8 @@ + +
    From dfcfe01bc2f412b2cd7ce4ff59c510ba80ac1893 Mon Sep 17 00:00:00 2001 From: kcondon Date: Tue, 7 Nov 2017 18:33:56 -0500 Subject: [PATCH 158/483] Update pom.xml Increment version to 4.8.2 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 06da0203cf1..50782bab444 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ edu.harvard.iq dataverse - 4.8.1 + 4.8.2 war dataverse From 2fba915d5100dd92eac1877c672fd8c3de97e3eb Mon Sep 17 00:00:00 2001 From: kcondon Date: Tue, 7 Nov 2017 18:35:24 -0500 Subject: [PATCH 159/483] Update conf.py Increment version to 4.8.2 --- doc/sphinx-guides/source/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index 0efeed88168..4f40290c5cf 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -64,9 +64,9 @@ # built documents. # # The short X.Y version. -version = '4.8.1' +version = '4.8.2' # The full version, including alpha/beta/rc tags. -release = '4.8.1' +release = '4.8.2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. From b70397794786632539a2913c6dbd66223b3010b7 Mon Sep 17 00:00:00 2001 From: kcondon Date: Tue, 7 Nov 2017 18:36:33 -0500 Subject: [PATCH 160/483] Update index.rst Increment version to 4.8.2 --- doc/sphinx-guides/source/index.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/index.rst b/doc/sphinx-guides/source/index.rst index a06f75cb120..afb73458bc8 100755 --- a/doc/sphinx-guides/source/index.rst +++ b/doc/sphinx-guides/source/index.rst @@ -3,10 +3,10 @@ You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. -Dataverse 4.8.1 Guides +Dataverse 4.8.2 Guides ====================== -These guides are for the most recent version of Dataverse. For the guides for **version 4.7.1** please go `here `_. +These guides are for the most recent version of Dataverse. For the guides for **version 4.8.1** please go `here `_. .. toctree:: :glob: From d82d9cee02095700fb89623945ab0040065ed994 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 8 Nov 2017 16:52:01 -0500 Subject: [PATCH 161/483] move parseAddExternalToolInput out of ExternalToolHandler #4230 Also add FIXMEs based on code review. --- .../iq/dataverse/api/ExternalTools.java | 5 +-- .../dataverse/externaltools/ExternalTool.java | 7 ++++ .../externaltools/ExternalToolHandler.java | 28 ++++----------- .../ExternalToolServiceBean.java | 1 + .../externaltools/ExternalToolUtil.java | 34 +++++++++++++++++++ .../ExternalToolHandlerTest.java | 22 ------------ .../externaltools/ExternalToolUtilTest.java | 34 +++++++++++++++++++ 7 files changed, 86 insertions(+), 45 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtil.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtilTest.java diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java b/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java index d92697f6c7a..9235f17416b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java @@ -4,7 +4,7 @@ import static edu.harvard.iq.dataverse.api.AbstractApiBean.error; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.externaltools.ExternalTool; -import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler; +import edu.harvard.iq.dataverse.externaltools.ExternalToolUtil; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.ws.rs.GET; @@ -47,7 +47,8 @@ public Response getExternalToolsByFile(@PathParam("id") Long fileIdFromUser) { @POST public Response addExternalTool(String userInput) { try { - ExternalTool externalTool = ExternalToolHandler.parseAddExternalToolInput(userInput); + ExternalTool externalTool = ExternalToolUtil.parseAddExternalToolInput(userInput); + // FIXME: Write to ActionLogRecord. ExternalTool saved = externalToolService.save(externalTool); return ok(saved.toJson()); } catch (Exception ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java index c24af7c6dcf..7f69a1b137e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java @@ -36,9 +36,11 @@ public class ExternalTool implements Serializable { @Column(nullable = false) private String toolParameters; + // FIXME: Remove this. @Transient private DataFile dataFile; + // FIXME: Remove this. @Transient private ApiToken apiToken; @@ -67,6 +69,10 @@ public void setDescription(String description) { } public String getToolUrl() { + return toolUrl; + } + + public String getToolUrlWithQueryParams() { // TODO: In addition to (or rather than) supporting API tokens as query parameters, support them as HTTP headers. return toolUrl + ExternalToolHandler.getQueryParametersForUrl(this, dataFile, apiToken); } @@ -99,6 +105,7 @@ public void setApiToken(ApiToken apiToken) { this.apiToken = apiToken; } + // FIXME: remove this. public String getButtonLabel() { return getDisplayName(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java index 4f93e5bd72c..ce48dd6b2f0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java @@ -18,27 +18,11 @@ public class ExternalToolHandler { public static final String TOOL_URL = "toolUrl"; public static final String TOOL_PARAMETERS = "toolParameters"; - public static ExternalTool parseAddExternalToolInput(String userInput) { - try { - - ExternalTool externalTool = new ExternalTool(); - JsonReader jsonReader = Json.createReader(new StringReader(userInput)); - JsonObject jsonObject = jsonReader.readObject(); - externalTool.setDisplayName(jsonObject.getString(DISPLAY_NAME)); - externalTool.setDescription(jsonObject.getString(DESCRIPTION)); - externalTool.setToolUrl(jsonObject.getString(TOOL_URL)); - // Get parameters - JsonObject toolParameters = jsonObject.getJsonObject(TOOL_PARAMETERS); - String toolParametersAsString = toolParameters.toString(); - System.out.println("toolParametersAsString: " + toolParametersAsString); - externalTool.setToolParameters(toolParametersAsString); - return externalTool; - } catch (Exception ex) { - System.out.println("ex: " + ex); - return null; - } - } - + // FIXME: Have the entity be part of the handler. +// private ExternalTool externalTool; + // FIXME: Start using these. The are being removed from the entity. +// private DataFile dataFile; +// private ApiToken apiToken; public static String getQueryParametersForUrl(ExternalTool externalTool) { DataFile nullDataFile = null; ApiToken nullApiToken = null; @@ -46,6 +30,8 @@ public static String getQueryParametersForUrl(ExternalTool externalTool) { } // FIXME: Do we really need two methods? + // FIXME: rename to handleRequest() to someday handle sending headers as well as query parameters. + // FIXME: Stop using the arguments when you uncomment the fields above. public static String getQueryParametersForUrl(ExternalTool externalTool, DataFile dataFile, ApiToken apiToken) { String toolParameters = externalTool.getToolParameters(); JsonReader jsonReader = Json.createReader(new StringReader(toolParameters)); diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java index 0ff3f9401c9..9ef5ff4fbe6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java @@ -35,6 +35,7 @@ public List findAll(DataFile file, ApiToken apiToken) { public ExternalTool save(ExternalTool externalTool) { em.persist(externalTool); + // FIXME: Remove this flush. em.flush(); return em.merge(externalTool); } diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtil.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtil.java new file mode 100644 index 00000000000..4fd2f23db2f --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtil.java @@ -0,0 +1,34 @@ +package edu.harvard.iq.dataverse.externaltools; + +import static edu.harvard.iq.dataverse.externaltools.ExternalToolHandler.DESCRIPTION; +import static edu.harvard.iq.dataverse.externaltools.ExternalToolHandler.DISPLAY_NAME; +import static edu.harvard.iq.dataverse.externaltools.ExternalToolHandler.TOOL_PARAMETERS; +import static edu.harvard.iq.dataverse.externaltools.ExternalToolHandler.TOOL_URL; +import java.io.StringReader; +import javax.json.Json; +import javax.json.JsonObject; +import javax.json.JsonReader; + +public class ExternalToolUtil { + + public static ExternalTool parseAddExternalToolInput(String userInput) { + try { + ExternalTool externalTool = new ExternalTool(); + JsonReader jsonReader = Json.createReader(new StringReader(userInput)); + JsonObject jsonObject = jsonReader.readObject(); + externalTool.setDisplayName(jsonObject.getString(DISPLAY_NAME)); + externalTool.setDescription(jsonObject.getString(DESCRIPTION)); + externalTool.setToolUrl(jsonObject.getString(TOOL_URL)); + // Get parameters + JsonObject toolParameters = jsonObject.getJsonObject(TOOL_PARAMETERS); + String toolParametersAsString = toolParameters.toString(); +// System.out.println("toolParametersAsString: " + toolParametersAsString); + externalTool.setToolParameters(toolParametersAsString); + return externalTool; + } catch (Exception ex) { +// System.out.println("ex: " + ex); + return null; + } + } + +} diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java index d03c4f5a7c0..90cc38cb160 100644 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java @@ -2,34 +2,12 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.authorization.users.ApiToken; -import java.nio.file.Files; -import java.nio.file.Paths; import javax.json.Json; import org.junit.Test; import static org.junit.Assert.assertEquals; public class ExternalToolHandlerTest { - @Test - public void testParseAddExternalToolInput() throws Exception { - assertEquals(null, ExternalToolHandler.parseAddExternalToolInput(null)); - assertEquals(null, ExternalToolHandler.parseAddExternalToolInput("")); - assertEquals(null, ExternalToolHandler.parseAddExternalToolInput(Json.createObjectBuilder().build().toString())); - String psiTool = new String(Files.readAllBytes(Paths.get("doc/sphinx-guides/source/_static/installation/files/root/external-tools/psi.json"))); - System.out.println("psiTool: " + psiTool); - ExternalTool externalTool = ExternalToolHandler.parseAddExternalToolInput(psiTool); - DataFile dataFile = new DataFile(); - dataFile.setId(42l); - externalTool.setDataFile(dataFile); - ApiToken apiToken = new ApiToken(); - apiToken.setTokenString("7196b5ce-f200-4286-8809-03ffdbc255d7"); - externalTool.setApiToken(apiToken); - String toolUrl = externalTool.getToolUrl(); - System.out.println("result: " + toolUrl); - assertEquals("https://beta.dataverse.org/custom/DifferentialPrivacyPrototype/UI/code/interface.html?fileid=42&key=7196b5ce-f200-4286-8809-03ffdbc255d7", toolUrl); - - } - @Test public void testGetToolUrlWithOptionalQueryParameters() { ExternalTool externalTool = new ExternalTool(); diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtilTest.java new file mode 100644 index 00000000000..2386a7542ae --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtilTest.java @@ -0,0 +1,34 @@ +package edu.harvard.iq.dataverse.externaltools; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.authorization.users.ApiToken; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import javax.json.Json; +import org.junit.Test; +import static org.junit.Assert.*; + +public class ExternalToolUtilTest { + + @Test + public void testParseAddExternalToolInput() throws IOException { + assertEquals(null, ExternalToolUtil.parseAddExternalToolInput(null)); + assertEquals(null, ExternalToolUtil.parseAddExternalToolInput("")); + assertEquals(null, ExternalToolUtil.parseAddExternalToolInput(Json.createObjectBuilder().build().toString())); + String psiTool = new String(Files.readAllBytes(Paths.get("doc/sphinx-guides/source/_static/installation/files/root/external-tools/psi.json"))); + System.out.println("psiTool: " + psiTool); + ExternalTool externalTool = ExternalToolUtil.parseAddExternalToolInput(psiTool); + DataFile dataFile = new DataFile(); + dataFile.setId(42l); + externalTool.setDataFile(dataFile); + ApiToken apiToken = new ApiToken(); + apiToken.setTokenString("7196b5ce-f200-4286-8809-03ffdbc255d7"); + externalTool.setApiToken(apiToken); + String toolUrl = externalTool.getToolUrl(); + System.out.println("result: " + toolUrl); + assertEquals("https://beta.dataverse.org/custom/DifferentialPrivacyPrototype/UI/code/interface.html?fileid=42&key=7196b5ce-f200-4286-8809-03ffdbc255d7", toolUrl); + + } + +} From 562cb3aa28f481209c777f48939f17b4d414c3fc Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 8 Nov 2017 17:15:17 -0500 Subject: [PATCH 162/483] remove external tools from UI and fix test #4230 --- .../edu/harvard/iq/dataverse/DatasetPage.java | 12 ------------ .../edu/harvard/iq/dataverse/FilePage.java | 19 ------------------- .../dataverse/externaltools/ExternalTool.java | 5 ----- src/main/webapp/file.xhtml | 5 ----- src/main/webapp/filesFragment.xhtml | 5 ----- .../externaltools/ExternalToolTest.java | 15 ++++++++++----- .../externaltools/ExternalToolUtilTest.java | 2 +- 7 files changed, 11 insertions(+), 52 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 782a51a173d..e3fd16d1a18 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -85,8 +85,6 @@ import edu.harvard.iq.dataverse.engine.command.impl.RestrictFileCommand; import edu.harvard.iq.dataverse.engine.command.impl.ReturnDatasetToAuthorCommand; import edu.harvard.iq.dataverse.engine.command.impl.SubmitDatasetForReviewCommand; -import edu.harvard.iq.dataverse.externaltools.ExternalTool; -import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean; import java.util.Collections; import javax.faces.event.AjaxBehaviorEvent; @@ -170,8 +168,6 @@ public enum DisplayMode { DataverseRoleServiceBean dataverseRoleService; @EJB PrivateUrlServiceBean privateUrlService; - @EJB - ExternalToolServiceBean externalToolService; @Inject DataverseRequestServiceBean dvRequestService; @Inject @@ -249,7 +245,6 @@ public enum DisplayMode { private boolean removeUnusedTags; private Boolean hasRsyncScript = false; - private List externalTools; public Boolean isHasRsyncScript() { return hasRsyncScript; @@ -1523,8 +1518,6 @@ private String init(boolean initFull) { } } - externalTools = externalToolService.findAll(); - return null; } @@ -4073,9 +4066,4 @@ public List getDatasetSummaryFields() { return DatasetUtil.getDatasetSummaryFields(workingVersion, customFields); } - // FIXME: We need to return a list of tools per file. - public List getExternalTools() { - return externalTools; - } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index 774ababef78..71745a0d4e7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -9,9 +9,6 @@ import edu.harvard.iq.dataverse.dataaccess.SwiftAccessIO; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.Permission; -import edu.harvard.iq.dataverse.authorization.users.ApiToken; -import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.datasetutility.TwoRavensHelper; import edu.harvard.iq.dataverse.datasetutility.WorldMapPermissionHelper; @@ -24,8 +21,6 @@ import edu.harvard.iq.dataverse.export.ExportException; import edu.harvard.iq.dataverse.export.ExportService; import edu.harvard.iq.dataverse.export.spi.Exporter; -import edu.harvard.iq.dataverse.externaltools.ExternalTool; -import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.JsfHelper; @@ -69,7 +64,6 @@ public class FilePage implements java.io.Serializable { private Dataset dataset; private List datasetVersionsForTab; private List fileMetadatasForTab; - private List externalTools; @EJB DataFileServiceBean datafileService; @@ -96,8 +90,6 @@ public class FilePage implements java.io.Serializable { DataverseSession session; @EJB EjbDataverseEngine commandEngine; - @EJB - ExternalToolServiceBean externalToolService; @Inject DataverseRequestServiceBean dvRequestService; @@ -164,13 +156,6 @@ public String init() { this.guestbookResponse = this.guestbookResponseService.initGuestbookResponseForFragment(fileMetadata, session); - User user = session.getUser(); - ApiToken apitoken = new ApiToken(); - if (user instanceof AuthenticatedUser) { - apitoken = authService.findApiTokenByUser((AuthenticatedUser) user); - } - externalTools = externalToolService.findAll(file, apitoken); - } else { return permissionsWrapper.notFound(); @@ -775,8 +760,4 @@ public String getPublicDownloadUrl() { return FileUtil.getPublicDownloadUrl(systemConfig.getDataverseSiteUrl(), fileId); } - public List getExternalTools() { - return externalTools; - } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java index 7f69a1b137e..d634ef3c2e5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java @@ -105,11 +105,6 @@ public void setApiToken(ApiToken apiToken) { this.apiToken = apiToken; } - // FIXME: remove this. - public String getButtonLabel() { - return getDisplayName(); - } - public JsonObjectBuilder toJson() { JsonObjectBuilder jab = Json.createObjectBuilder(); jab.add("id", this.getId()); diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index 5613ccf5305..ee0c674948e 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -139,11 +139,6 @@ - -
  • - #{tool.buttonLabel} -
  • -
    diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 4fc4564af09..19f36ab4fca 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -312,11 +312,6 @@ #{bundle['file.tags']} - -
  • - #{tool.buttonLabel} -
  • -
    diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolTest.java index bcabfb0d8ef..97d0b615e32 100644 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolTest.java @@ -1,17 +1,22 @@ package edu.harvard.iq.dataverse.externaltools; +import javax.json.JsonObjectBuilder; import static org.junit.Assert.assertEquals; import org.junit.Test; public class ExternalToolTest { + // TODO: Write test for toJson. @Test - public void testGetButtonLabel() { - System.out.println("getButtonLabel"); + public void testToJson() { + System.out.println("toJson"); ExternalTool externalTool = new ExternalTool(); - externalTool.setDisplayName("Privacy-Preserving Data Preview"); - String buttonLabel = externalTool.getButtonLabel(); - assertEquals("Privacy-Preserving Data Preview", buttonLabel); + try { + JsonObjectBuilder json = externalTool.toJson(); + System.out.println("JSON: " + json); + } catch (Exception ex) { + assertEquals(null, ex.getMessage()); + } } } diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtilTest.java index 2386a7542ae..beec571cd16 100644 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtilTest.java @@ -25,7 +25,7 @@ public void testParseAddExternalToolInput() throws IOException { ApiToken apiToken = new ApiToken(); apiToken.setTokenString("7196b5ce-f200-4286-8809-03ffdbc255d7"); externalTool.setApiToken(apiToken); - String toolUrl = externalTool.getToolUrl(); + String toolUrl = externalTool.getToolUrlWithQueryParams(); System.out.println("result: " + toolUrl); assertEquals("https://beta.dataverse.org/custom/DifferentialPrivacyPrototype/UI/code/interface.html?fileid=42&key=7196b5ce-f200-4286-8809-03ffdbc255d7", toolUrl); From 0e41588e8dcda2498bfab810195e55502d93da35 Mon Sep 17 00:00:00 2001 From: Derek Murphy Date: Thu, 9 Nov 2017 12:51:27 -0500 Subject: [PATCH 163/483] First draft of External Tools doc page [#4230] Created new Configuring External Tools page in the Installation Guide. Currently only includes PSI but will be expanded later --- .../source/installation/external-tools.rst | 25 +++++++++++++++++++ .../source/installation/index.rst | 2 +- doc/sphinx-guides/source/installation/psi.rst | 24 ------------------ 3 files changed, 26 insertions(+), 25 deletions(-) create mode 100644 doc/sphinx-guides/source/installation/external-tools.rst delete mode 100644 doc/sphinx-guides/source/installation/psi.rst diff --git a/doc/sphinx-guides/source/installation/external-tools.rst b/doc/sphinx-guides/source/installation/external-tools.rst new file mode 100644 index 00000000000..8c14114848e --- /dev/null +++ b/doc/sphinx-guides/source/installation/external-tools.rst @@ -0,0 +1,25 @@ +Configuring External Tools +========================== + +The Dataverse Team encourages the wider community to contribute to our software. Sometimes these contributions take the form of external tools that can be integrated into Dataverse as modular features. This page introduces some of these tools. For more information or technical support on these tools, it's recommended that you reach out to their creators. + +.. contents:: |toctitle| + :local: + +PSI +---- +`PSI (Ψ) `_ is a Private data Sharing Interface created by the `Privacy Tools for Sharing Research Data project `_. + +When integrated into Dataverse, the PSI tool allows researchers with sensitive tabular data to create safe, non-privacy-leaking summary statistics about their data. The PSI tool protects data using `differential privacy `_, a framework that provides a mathematical guarantee of privacy protection for any individual represented in the data. The PSI tool allows researchers depositing data to introduce just enough noise into their data's summary statistics to ensure privacy while still allowing a useful (if blurry) window into the contents of the data. + +In this way, Dataverse users who lack the permission to access the raw data can still learn something about that data through its summary statistics, without any sensitive or private information being leaked. The sensitive data remains safe, while interested parties can learn more about the data before they decide to undergo the potentially lengthy and effortful process of seeking approval to view it. + + +Installation +~~~~~~~~~~~~~ + +To install PSI for use with Dataverse, follow the steps below: + +Download :download:`psi.json <../_static/installation/files/root/external-tools/psi.json>` + +``curl -X POST -H 'Content-type: application/json' --upload-file psi.json http://localhost:8080/api/admin/externalTools`` diff --git a/doc/sphinx-guides/source/installation/index.rst b/doc/sphinx-guides/source/installation/index.rst index b835e303c60..94a0d1231d1 100755 --- a/doc/sphinx-guides/source/installation/index.rst +++ b/doc/sphinx-guides/source/installation/index.rst @@ -20,4 +20,4 @@ Installation Guide geoconnect shibboleth oauth2 - psi + external-tools diff --git a/doc/sphinx-guides/source/installation/psi.rst b/doc/sphinx-guides/source/installation/psi.rst deleted file mode 100644 index a4a9b50fef6..00000000000 --- a/doc/sphinx-guides/source/installation/psi.rst +++ /dev/null @@ -1,24 +0,0 @@ -PSI -=== - -`PSI (Ψ) `_ is a Private data Sharing Interface. - -.. contents:: |toctitle| - :local: - -Introduction ------------- - -The PSI tool can be integrated into Dataverse to allow researchers with sensitive or confidential datasets to make differentially private summary statistics about their data available. The PSI tool is used to introduce just enough noise to the summary statistics to ensure privacy while still allowing a useful (if blurry) window into the contents of the data. This way, Dataverse users who lack the permission to view the raw data can still learn something about that data without any sensitive or private information being leaked. The sensitive data remains safe, while interested parties can learn more about it before they decide to undergo the potentially difficult process of seeking approval to view it. - -Installation ------------- - -To install PSI for use with Dataverse, follow the steps below. - -Add PSI as an External Tool -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Download :download:`psi.json <../_static/installation/files/root/external-tools/psi.json>` - -``curl -X POST -H 'Content-type: application/json' --upload-file psi.json http://localhost:8080/api/admin/externalTools`` From 61851ddf000da22a51987486c9b15170270490e4 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 9 Nov 2017 14:15:54 -0500 Subject: [PATCH 164/483] remove Transient fields from ExternalTool #4230 --- .../iq/dataverse/api/ExternalTools.java | 14 ++- .../dataverse/externaltools/ExternalTool.java | 74 ++++++-------- .../externaltools/ExternalToolHandler.java | 93 ++++++------------ .../ExternalToolServiceBean.java | 9 +- .../externaltools/ExternalToolUtil.java | 94 +++++++++++++++--- .../ExternalToolHandlerTest.java | 76 ++------------- .../externaltools/ExternalToolTest.java | 22 ----- .../externaltools/ExternalToolUtilTest.java | 97 ++++++++++++++++++- 8 files changed, 259 insertions(+), 220 deletions(-) delete mode 100644 src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolTest.java diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java b/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java index 9235f17416b..454b442837d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java @@ -4,9 +4,11 @@ import static edu.harvard.iq.dataverse.api.AbstractApiBean.error; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.externaltools.ExternalTool; +import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler; import edu.harvard.iq.dataverse.externaltools.ExternalToolUtil; import javax.json.Json; import javax.json.JsonArrayBuilder; +import javax.json.JsonObjectBuilder; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; @@ -21,7 +23,8 @@ public class ExternalTools extends AbstractApiBean { public Response getExternalTools() { JsonArrayBuilder jab = Json.createArrayBuilder(); externalToolService.findAll().forEach((externalTool) -> { - jab.add(externalTool.toJson()); + // FIXME: Show more than the ID in the output. + jab.add(externalTool.getId()); }); return ok(jab); } @@ -38,8 +41,8 @@ public Response getExternalToolsByFile(@PathParam("id") Long fileIdFromUser) { String apiTokenString = getRequestApiKey(); apiToken.setTokenString(apiTokenString); externalToolService.findAll(dataFile, apiToken) - .forEach((externalTool) -> { - tools.add(externalTool.toJson()); + .forEach((externalToolHandler) -> { + tools.add(externalToolHandler.toJson()); }); return ok(tools); } @@ -50,7 +53,10 @@ public Response addExternalTool(String userInput) { ExternalTool externalTool = ExternalToolUtil.parseAddExternalToolInput(userInput); // FIXME: Write to ActionLogRecord. ExternalTool saved = externalToolService.save(externalTool); - return ok(saved.toJson()); + JsonObjectBuilder tool = Json.createObjectBuilder(); + tool.add("id", saved.getId()); + tool.add(ExternalToolHandler.DISPLAY_NAME, saved.getDisplayName()); + return ok(tool); } catch (Exception ex) { return error(BAD_REQUEST, ex.getMessage()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java index d634ef3c2e5..ad1fd6ac7d6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java @@ -1,16 +1,11 @@ package edu.harvard.iq.dataverse.externaltools; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.authorization.users.ApiToken; import java.io.Serializable; -import javax.json.Json; -import javax.json.JsonObjectBuilder; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; -import javax.persistence.Transient; @Entity public class ExternalTool implements Serializable { @@ -19,11 +14,17 @@ public class ExternalTool implements Serializable { @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; - // TODO: How are we going to internationize the display name? + /** + * The display name (on the button, for example) of the tool in English. + */ + // TODO: How are we going to internationalize the display name? @Column(nullable = false) private String displayName; - // TODO: How are we going to internationize the description? + /** + * The description of the tool in English. + */ + // TODO: How are we going to internationalize the description? @Column(nullable = false, columnDefinition = "TEXT") private String description; @@ -31,18 +32,32 @@ public class ExternalTool implements Serializable { private String toolUrl; /** - * Parameters the tool requires such as DataFile id and API Token + * Parameters the tool requires such as DataFile id and API Token as a JSON + * object, persisted as a String. */ @Column(nullable = false) private String toolParameters; - // FIXME: Remove this. - @Transient - private DataFile dataFile; + /** + * This default constructor is only here to prevent this error at + * deployment: + * + * Exception Description: The instance creation method + * [...ExternalTool.], with no parameters, does not + * exist, or is not accessible + * + * Don't use it. + */ + @Deprecated + public ExternalTool() { + } - // FIXME: Remove this. - @Transient - private ApiToken apiToken; + public ExternalTool(String displayName, String description, String toolUrl, String toolParameters) { + this.displayName = displayName; + this.description = description; + this.toolUrl = toolUrl; + this.toolParameters = toolParameters; + } public Long getId() { return id; @@ -72,11 +87,6 @@ public String getToolUrl() { return toolUrl; } - public String getToolUrlWithQueryParams() { - // TODO: In addition to (or rather than) supporting API tokens as query parameters, support them as HTTP headers. - return toolUrl + ExternalToolHandler.getQueryParametersForUrl(this, dataFile, apiToken); - } - public void setToolUrl(String toolUrl) { this.toolUrl = toolUrl; } @@ -89,30 +99,4 @@ public void setToolParameters(String toolParameters) { this.toolParameters = toolParameters; } - public DataFile getDataFile() { - return dataFile; - } - - public void setDataFile(DataFile dataFile) { - this.dataFile = dataFile; - } - - public ApiToken getApiToken() { - return apiToken; - } - - public void setApiToken(ApiToken apiToken) { - this.apiToken = apiToken; - } - - public JsonObjectBuilder toJson() { - JsonObjectBuilder jab = Json.createObjectBuilder(); - jab.add("id", this.getId()); - jab.add(ExternalToolHandler.DISPLAY_NAME, this.getDisplayName()); - jab.add(ExternalToolHandler.DESCRIPTION, this.getDescription()); - jab.add(ExternalToolHandler.TOOL_URL, this.getToolUrl()); - jab.add(ExternalToolHandler.TOOL_PARAMETERS, this.getToolParameters()); - return jab; - } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java index ce48dd6b2f0..527b0df7687 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java @@ -2,81 +2,52 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.authorization.users.ApiToken; -import java.io.StringReader; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; +import java.util.logging.Logger; import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonObject; -import javax.json.JsonReader; +import javax.json.JsonObjectBuilder; public class ExternalToolHandler { + private static final Logger logger = Logger.getLogger(ExternalToolHandler.class.getCanonicalName()); + public static final String DISPLAY_NAME = "displayName"; public static final String DESCRIPTION = "description"; public static final String TOOL_URL = "toolUrl"; public static final String TOOL_PARAMETERS = "toolParameters"; - // FIXME: Have the entity be part of the handler. -// private ExternalTool externalTool; - // FIXME: Start using these. The are being removed from the entity. -// private DataFile dataFile; -// private ApiToken apiToken; - public static String getQueryParametersForUrl(ExternalTool externalTool) { - DataFile nullDataFile = null; - ApiToken nullApiToken = null; - return getQueryParametersForUrl(externalTool, nullDataFile, nullApiToken); + private final ExternalTool externalTool; + private final DataFile dataFile; + + private final ApiToken apiToken; + + /** + * @param externalTool The database entity. + * @param dataFile Required. + * @param apiToken The apiToken can be null because in the future, "explore" + * tools can be used anonymously. + */ + public ExternalToolHandler(ExternalTool externalTool, DataFile dataFile, ApiToken apiToken) { + this.externalTool = externalTool; + this.dataFile = dataFile; + this.apiToken = apiToken; } - // FIXME: Do we really need two methods? - // FIXME: rename to handleRequest() to someday handle sending headers as well as query parameters. - // FIXME: Stop using the arguments when you uncomment the fields above. - public static String getQueryParametersForUrl(ExternalTool externalTool, DataFile dataFile, ApiToken apiToken) { - String toolParameters = externalTool.getToolParameters(); - JsonReader jsonReader = Json.createReader(new StringReader(toolParameters)); - JsonObject obj = jsonReader.readObject(); - JsonArray queryParams = obj.getJsonArray("queryParameters"); - if (queryParams == null || queryParams.isEmpty()) { - return ""; - } - int numQueryParam = queryParams.size(); - if (numQueryParam == 1) { - JsonObject jsonObject = queryParams.getJsonObject(0); - Set firstPair = jsonObject.keySet(); - String key = firstPair.iterator().next(); - String value = jsonObject.getString(key); - return "?" + getQueryParam(key, value, dataFile, apiToken); - } else { - List params = new ArrayList<>(); - queryParams.getValuesAs(JsonObject.class).forEach((queryParam) -> { - queryParam.keySet().forEach((key) -> { - String value = queryParam.getString(key); - params.add(getQueryParam(key, value, dataFile, apiToken)); - }); - }); - return "?" + String.join("&", params); + public DataFile getDataFile() { + return dataFile; + } - } + public ApiToken getApiToken() { + return apiToken; } - private static String getQueryParam(String key, String value, DataFile dataFile, ApiToken apiToken) { - if (dataFile == null) { - return key + "=" + value; - } - String apiTokenString = null; - if (apiToken != null) { - apiTokenString = apiToken.getTokenString(); - } - // TODO: Put reserved words like "{fileId}" and "{apiToken}" into an enum. - switch (value) { - case "{fileId}": - return key + "=" + dataFile.getId(); - case "{apiToken}": - return key + "=" + apiTokenString; - default: - return key + "=" + value; - } + public JsonObjectBuilder toJson() { + JsonObjectBuilder jab = Json.createObjectBuilder(); + jab.add("id", externalTool.getId()); + jab.add(ExternalToolHandler.DISPLAY_NAME, externalTool.getDisplayName()); + jab.add(ExternalToolHandler.DESCRIPTION, externalTool.getDescription()); + jab.add(ExternalToolHandler.TOOL_URL, ExternalToolUtil.getToolUrlWithQueryParams(this, externalTool)); + jab.add(ExternalToolHandler.TOOL_PARAMETERS, externalTool.getToolParameters()); + return jab; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java index 9ef5ff4fbe6..e4276a1c6c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java @@ -24,13 +24,8 @@ public List findAll() { return typedQuery.getResultList(); } - public List findAll(DataFile file, ApiToken apiToken) { - List externalTools = findAll(); - externalTools.forEach((externalTool) -> { - externalTool.setDataFile(file); - externalTool.setApiToken(apiToken); - }); - return externalTools; + public List findAll(DataFile file, ApiToken apiToken) { + return ExternalToolUtil.findAll(findAll(), file, apiToken); } public ExternalTool save(ExternalTool externalTool) { diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtil.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtil.java index 4fd2f23db2f..838a4e99132 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtil.java @@ -1,34 +1,106 @@ package edu.harvard.iq.dataverse.externaltools; +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.authorization.users.ApiToken; import static edu.harvard.iq.dataverse.externaltools.ExternalToolHandler.DESCRIPTION; import static edu.harvard.iq.dataverse.externaltools.ExternalToolHandler.DISPLAY_NAME; import static edu.harvard.iq.dataverse.externaltools.ExternalToolHandler.TOOL_PARAMETERS; import static edu.harvard.iq.dataverse.externaltools.ExternalToolHandler.TOOL_URL; import java.io.StringReader; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; +import java.util.logging.Logger; import javax.json.Json; +import javax.json.JsonArray; import javax.json.JsonObject; import javax.json.JsonReader; public class ExternalToolUtil { + private static final Logger logger = Logger.getLogger(ExternalToolUtil.class.getCanonicalName()); + + // Perhaps this could use a better name than "findAll". This method takes a list of tools from the database and + // returns "handlers" that have inserted parameters in the right places. + public static List findAll(List externalTools, DataFile file, ApiToken apiToken) { + List externalToolHandlers = new ArrayList<>(); + externalTools.forEach((externalTool) -> { + ExternalToolHandler externalToolHandler = new ExternalToolHandler(externalTool, file, apiToken); + externalToolHandlers.add(externalToolHandler); + }); + return externalToolHandlers; + + } + public static ExternalTool parseAddExternalToolInput(String userInput) { try { - ExternalTool externalTool = new ExternalTool(); JsonReader jsonReader = Json.createReader(new StringReader(userInput)); JsonObject jsonObject = jsonReader.readObject(); - externalTool.setDisplayName(jsonObject.getString(DISPLAY_NAME)); - externalTool.setDescription(jsonObject.getString(DESCRIPTION)); - externalTool.setToolUrl(jsonObject.getString(TOOL_URL)); - // Get parameters - JsonObject toolParameters = jsonObject.getJsonObject(TOOL_PARAMETERS); - String toolParametersAsString = toolParameters.toString(); -// System.out.println("toolParametersAsString: " + toolParametersAsString); - externalTool.setToolParameters(toolParametersAsString); - return externalTool; + String displayName = jsonObject.getString(DISPLAY_NAME); + String description = jsonObject.getString(DESCRIPTION); + String toolUrl = jsonObject.getString(TOOL_URL); + JsonObject toolParametersObj = jsonObject.getJsonObject(TOOL_PARAMETERS); + String toolParameters = toolParametersObj.toString(); + return new ExternalTool(displayName, description, toolUrl, toolParameters); } catch (Exception ex) { -// System.out.println("ex: " + ex); + System.out.println("ex: " + ex); return null; } } + // FIXME: rename to handleRequest() to someday handle sending headers as well as query parameters. + public static String getQueryParametersForUrl(ExternalToolHandler externalToolHandler, ExternalTool externalTool) { + DataFile dataFile = externalToolHandler.getDataFile(); + ApiToken apiToken = externalToolHandler.getApiToken(); + String toolParameters = externalTool.getToolParameters(); + JsonReader jsonReader = Json.createReader(new StringReader(toolParameters)); + JsonObject obj = jsonReader.readObject(); + JsonArray queryParams = obj.getJsonArray("queryParameters"); + if (queryParams == null || queryParams.isEmpty()) { + return ""; + } + int numQueryParam = queryParams.size(); + if (numQueryParam == 1) { + JsonObject jsonObject = queryParams.getJsonObject(0); + Set firstPair = jsonObject.keySet(); + String key = firstPair.iterator().next(); + String value = jsonObject.getString(key); + return "?" + getQueryParam(key, value, dataFile, apiToken); + } else { + List params = new ArrayList<>(); + queryParams.getValuesAs(JsonObject.class).forEach((queryParam) -> { + queryParam.keySet().forEach((key) -> { + String value = queryParam.getString(key); + params.add(getQueryParam(key, value, dataFile, apiToken)); + }); + }); + return "?" + String.join("&", params); + + } + } + + private static String getQueryParam(String key, String value, DataFile dataFile, ApiToken apiToken) { + if (dataFile == null) { + logger.info("DataFile was null!"); + return key + "=" + value; + } + String apiTokenString = null; + if (apiToken != null) { + apiTokenString = apiToken.getTokenString(); + } + // TODO: Put reserved words like "{fileId}" and "{apiToken}" into an enum. + switch (value) { + case "{fileId}": + return key + "=" + dataFile.getId(); + case "{apiToken}": + return key + "=" + apiTokenString; + default: + return key + "=" + value; + } + } + + public static String getToolUrlWithQueryParams(ExternalToolHandler externalToolHandler, ExternalTool externalTool) { + return externalTool.getToolUrl() + getQueryParametersForUrl(externalToolHandler, externalTool); + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java index 90cc38cb160..bae6c00f375 100644 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java @@ -2,79 +2,23 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.authorization.users.ApiToken; -import javax.json.Json; -import org.junit.Test; +import javax.json.JsonObject; import static org.junit.Assert.assertEquals; +import org.junit.Test; public class ExternalToolHandlerTest { @Test - public void testGetToolUrlWithOptionalQueryParameters() { - ExternalTool externalTool = new ExternalTool(); - externalTool.setToolUrl("http://example.com"); - - // One query parameter. - externalTool.setToolParameters(Json.createObjectBuilder() - .add("queryParameters", Json.createArrayBuilder() - .add(Json.createObjectBuilder() - .add("key1", "value1") - ) - ) - .build().toString()); - String result1 = ExternalToolHandler.getQueryParametersForUrl(externalTool); - System.out.println("result1: " + result1); - assertEquals("?key1=value1", ExternalToolHandler.getQueryParametersForUrl(externalTool)); - - // Two query parameters. - externalTool.setToolParameters(Json.createObjectBuilder() - .add("queryParameters", Json.createArrayBuilder() - .add(Json.createObjectBuilder() - .add("key1", "value1") - ) - .add(Json.createObjectBuilder() - .add("key2", "value2") - ) - ) - .build().toString()); - String result2 = ExternalToolHandler.getQueryParametersForUrl(externalTool); - System.out.println("result2: " + result2); - assertEquals("?key1=value1&key2=value2", result2); - - // Two query parameters, both reserved words - externalTool.setToolParameters(Json.createObjectBuilder() - .add("queryParameters", Json.createArrayBuilder() - .add(Json.createObjectBuilder() - .add("key1", "{fileId}") - ) - .add(Json.createObjectBuilder() - .add("key2", "{apiToken}") - ) - ) - .build().toString()); + public void testToJson() { + System.out.println("toJson"); + ExternalTool externalTool = new ExternalTool("displayName", "description", "toolUrl", "{}"); + externalTool.setId(42l); DataFile dataFile = new DataFile(); - dataFile.setId(42l); ApiToken apiToken = new ApiToken(); - apiToken.setTokenString("7196b5ce-f200-4286-8809-03ffdbc255d7"); - String result3 = ExternalToolHandler.getQueryParametersForUrl(externalTool, dataFile, apiToken); - System.out.println("result3: " + result3); - assertEquals("?key1=42&key2=7196b5ce-f200-4286-8809-03ffdbc255d7", result3); - - // Two query parameters, both reserved words, no apiToken - externalTool.setToolParameters(Json.createObjectBuilder() - .add("queryParameters", Json.createArrayBuilder() - .add(Json.createObjectBuilder() - .add("key1", "{fileId}") - ) - .add(Json.createObjectBuilder() - .add("key2", "{apiToken}") - ) - ) - .build().toString()); - ApiToken nullApiToken = null; - String result4 = ExternalToolHandler.getQueryParametersForUrl(externalTool, dataFile, nullApiToken); - System.out.println("result4: " + result4); - assertEquals("?key1=42&key2=null", result4); + ExternalToolHandler externalToolHandler = new ExternalToolHandler(externalTool, dataFile, apiToken); + JsonObject json = externalToolHandler.toJson().build(); + System.out.println("JSON: " + json); + assertEquals("displayName", json.getString("displayName")); } - } diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolTest.java deleted file mode 100644 index 97d0b615e32..00000000000 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolTest.java +++ /dev/null @@ -1,22 +0,0 @@ -package edu.harvard.iq.dataverse.externaltools; - -import javax.json.JsonObjectBuilder; -import static org.junit.Assert.assertEquals; -import org.junit.Test; - -public class ExternalToolTest { - - // TODO: Write test for toJson. - @Test - public void testToJson() { - System.out.println("toJson"); - ExternalTool externalTool = new ExternalTool(); - try { - JsonObjectBuilder json = externalTool.toJson(); - System.out.println("JSON: " + json); - } catch (Exception ex) { - assertEquals(null, ex.getMessage()); - } - } - -} diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtilTest.java index beec571cd16..966de13f6b3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolUtilTest.java @@ -5,9 +5,11 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.List; import javax.json.Json; +import static org.junit.Assert.assertEquals; import org.junit.Test; -import static org.junit.Assert.*; public class ExternalToolUtilTest { @@ -19,16 +21,103 @@ public void testParseAddExternalToolInput() throws IOException { String psiTool = new String(Files.readAllBytes(Paths.get("doc/sphinx-guides/source/_static/installation/files/root/external-tools/psi.json"))); System.out.println("psiTool: " + psiTool); ExternalTool externalTool = ExternalToolUtil.parseAddExternalToolInput(psiTool); + assertEquals("Privacy-Preserving Data Preview", externalTool.getDisplayName()); DataFile dataFile = new DataFile(); dataFile.setId(42l); - externalTool.setDataFile(dataFile); ApiToken apiToken = new ApiToken(); apiToken.setTokenString("7196b5ce-f200-4286-8809-03ffdbc255d7"); - externalTool.setApiToken(apiToken); - String toolUrl = externalTool.getToolUrlWithQueryParams(); + ExternalToolHandler externalToolHandler = new ExternalToolHandler(externalTool, dataFile, apiToken); + String toolUrl = ExternalToolUtil.getToolUrlWithQueryParams(externalToolHandler, externalTool); System.out.println("result: " + toolUrl); assertEquals("https://beta.dataverse.org/custom/DifferentialPrivacyPrototype/UI/code/interface.html?fileid=42&key=7196b5ce-f200-4286-8809-03ffdbc255d7", toolUrl); } + @Test + public void testGetToolUrlWithOptionalQueryParameters() { + String toolUrl = "http://example.com"; + ExternalTool externalTool = new ExternalTool("displayName", "description", toolUrl, "{}"); + + // One query parameter. + externalTool.setToolParameters(Json.createObjectBuilder() + .add("queryParameters", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("key1", "value1") + ) + ) + .build().toString()); + DataFile nullDataFile = null; + ApiToken nullApiToken = null; + ExternalToolHandler externalToolHandler1 = new ExternalToolHandler(externalTool, nullDataFile, nullApiToken); + String result1 = ExternalToolUtil.getQueryParametersForUrl(externalToolHandler1, externalTool); + System.out.println("result1: " + result1); + assertEquals("?key1=value1", result1); + + // Two query parameters. + externalTool.setToolParameters(Json.createObjectBuilder() + .add("queryParameters", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("key1", "value1") + ) + .add(Json.createObjectBuilder() + .add("key2", "value2") + ) + ) + .build().toString()); + ExternalToolHandler externalToolHandler2 = new ExternalToolHandler(externalTool, nullDataFile, nullApiToken); + String result2 = ExternalToolUtil.getQueryParametersForUrl(externalToolHandler2, externalTool); + System.out.println("result2: " + result2); + assertEquals("?key1=value1&key2=value2", result2); + + // Two query parameters, both reserved words + externalTool.setToolParameters(Json.createObjectBuilder() + .add("queryParameters", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("key1", "{fileId}") + ) + .add(Json.createObjectBuilder() + .add("key2", "{apiToken}") + ) + ) + .build().toString()); + DataFile dataFile = new DataFile(); + dataFile.setId(42l); + ApiToken apiToken = new ApiToken(); + apiToken.setTokenString("7196b5ce-f200-4286-8809-03ffdbc255d7"); + ExternalToolHandler externalToolHandler3 = new ExternalToolHandler(externalTool, dataFile, apiToken); + String result3 = ExternalToolUtil.getQueryParametersForUrl(externalToolHandler3, externalTool); + System.out.println("result3: " + result3); + assertEquals("?key1=42&key2=7196b5ce-f200-4286-8809-03ffdbc255d7", result3); + + // Two query parameters, both reserved words, no apiToken + externalTool.setToolParameters(Json.createObjectBuilder() + .add("queryParameters", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("key1", "{fileId}") + ) + .add(Json.createObjectBuilder() + .add("key2", "{apiToken}") + ) + ) + .build().toString()); + ExternalToolHandler externalToolHandler4 = new ExternalToolHandler(externalTool, dataFile, nullApiToken); + String result4 = ExternalToolUtil.getQueryParametersForUrl(externalToolHandler4, externalTool); + System.out.println("result4: " + result4); + assertEquals("?key1=42&key2=null", result4); + } + + @Test + public void testfindAll() { + DataFile dataFile = new DataFile(); + dataFile.setId(42l); + ApiToken apiToken = new ApiToken(); + apiToken.setTokenString("7196b5ce-f200-4286-8809-03ffdbc255d7"); + ExternalTool externalTool = new ExternalTool("displayName", "description", "http://foo.com", "{}"); + ExternalToolHandler externalToolHandler4 = new ExternalToolHandler(externalTool, dataFile, apiToken); + List externalTools = new ArrayList<>(); + externalTools.add(externalTool); + List externalToolHandlers = ExternalToolUtil.findAll(externalTools, dataFile, apiToken); + assertEquals(dataFile.getId(), externalToolHandlers.get(0).getDataFile().getId()); + } + } From f3f66a5944e9fccec50264ac0752810d2255fa9c Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 9 Nov 2017 14:18:04 -0500 Subject: [PATCH 165/483] remove superfluous em.flush() #4230 --- .../iq/dataverse/externaltools/ExternalToolServiceBean.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java index e4276a1c6c6..107727d0e60 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java @@ -30,8 +30,6 @@ public List findAll(DataFile file, ApiToken apiToken) { public ExternalTool save(ExternalTool externalTool) { em.persist(externalTool); - // FIXME: Remove this flush. - em.flush(); return em.merge(externalTool); } From 6abb931e19e388025fa8d634764bc8e28eebdc4c Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 9 Nov 2017 15:23:35 -0500 Subject: [PATCH 166/483] write to ActionLogRecord #4230 Also make it possible to read entries via API. --- .../actionlogging/ActionLogRecord.java | 7 +++++ .../actionlogging/ActionLogServiceBean.java | 14 +++++++++ .../edu/harvard/iq/dataverse/api/Admin.java | 31 +++++++++++++++++++ .../iq/dataverse/api/ExternalTools.java | 6 ++-- .../iq/dataverse/api/ExternalToolsIT.java | 14 +++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 11 +++++++ 6 files changed, 81 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecord.java b/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecord.java index fd11dbdd0af..6b3ca20a016 100644 --- a/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecord.java +++ b/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecord.java @@ -41,6 +41,8 @@ public enum ActionType { Auth, Admin, + + ExternalTool, GlobalGroups } @@ -70,6 +72,11 @@ public enum ActionType { public ActionLogRecord(){} + /** + * @param anActionType + * @param anActionSubType + */ + // TODO: Add ability to set `info` in constructor. public ActionLogRecord( ActionType anActionType, String anActionSubType ) { actionType = anActionType; actionSubType = anActionSubType; diff --git a/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogServiceBean.java index 0273dcd77a9..bd4bb868d8a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogServiceBean.java @@ -1,11 +1,13 @@ package edu.harvard.iq.dataverse.actionlogging; import java.util.Date; +import java.util.List; import javax.ejb.Stateless; import javax.ejb.TransactionAttribute; import javax.ejb.TransactionAttributeType; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; +import javax.persistence.TypedQuery; /** * A service bean that persists {@link ActionLogRecord}s to the DB. @@ -32,4 +34,16 @@ public void log( ActionLogRecord rec ) { } em.persist(rec); } + + public List findAll() { + TypedQuery typedQuery = em.createQuery("SELECT OBJECT(o) FROM ActionLogRecord AS o ORDER BY o.id", ActionLogRecord.class); + return typedQuery.getResultList(); + } + + public List findByActionType(ActionLogRecord.ActionType actionType) { + TypedQuery typedQuery = em.createQuery("SELECT OBJECT(o) FROM ActionLogRecord AS o WHERE o.actionType = :actionType ORDER BY o.id", ActionLogRecord.class); + typedQuery.setParameter("actionType", actionType); + return typedQuery.getResultList(); + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 61bd018c13e..2d636fa11c3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1002,4 +1002,35 @@ public Response validatePassword(String password) { .add("errors", errorArray) ); } + + @Path("actionLogRecord") + @GET + public Response getActionLogRecordAll() { + final JsonArrayBuilder jab = Json.createArrayBuilder(); + actionLogSvc.findAll().forEach((entry) -> { + jab.add(entry.getId()); + }); + return ok(jab); + } + + @Path("actionLogRecord/type/{type}") + @GET + public Response getActionLogRecordByType(@PathParam("type") String userInputType) { + ActionLogRecord.ActionType type = ActionLogRecord.ActionType.valueOf(userInputType); + final JsonArrayBuilder jab = Json.createArrayBuilder(); + actionLogSvc.findByActionType(type).forEach((entry) -> { + JsonObjectBuilder job = Json.createObjectBuilder(); + job.add("id", entry.getId()); + job.add("actionType", entry.getActionType().toString()); + job.add("actionSubType", entry.getActionSubType()); + String info = entry.getInfo(); + if (info != null) { + job.add("info", info); + } + job.add("startTime", entry.getStartTime().toString()); + jab.add(job); + }); + return ok(jab); + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java b/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java index 454b442837d..eec5c34255e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.api; import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import static edu.harvard.iq.dataverse.api.AbstractApiBean.error; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.externaltools.ExternalTool; @@ -51,10 +52,11 @@ public Response getExternalToolsByFile(@PathParam("id") Long fileIdFromUser) { public Response addExternalTool(String userInput) { try { ExternalTool externalTool = ExternalToolUtil.parseAddExternalToolInput(userInput); - // FIXME: Write to ActionLogRecord. ExternalTool saved = externalToolService.save(externalTool); + Long toolId = saved.getId(); + actionLogSvc.log(new ActionLogRecord(ActionLogRecord.ActionType.ExternalTool, "addExternalTool").setInfo("External tool added with id " + toolId + ".")); JsonObjectBuilder tool = Json.createObjectBuilder(); - tool.add("id", saved.getId()); + tool.add("id", toolId); tool.add(ExternalToolHandler.DISPLAY_NAME, saved.getDisplayName()); return ok(tool); } catch (Exception ex) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java index a16065608d5..6e9869d8022 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java @@ -2,6 +2,7 @@ import com.jayway.restassured.RestAssured; import com.jayway.restassured.response.Response; +import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import java.io.IOException; import java.io.StringReader; import java.nio.file.Paths; @@ -49,4 +50,17 @@ public void testAddExternalTool() throws IOException { .statusCode(OK.getStatusCode()); } + @Test + public void testGetActionLogRecordAll() { + Response getActionLogRecordAll = UtilIT.getActionLogRecordAll(); + getActionLogRecordAll.prettyPrint(); + } + + @Test + public void testGetActionLogRecordsForExternalTools() { + ActionLogRecord.ActionType type = ActionLogRecord.ActionType.ExternalTool; + Response getActionLogRecordAll = UtilIT.getActionLogRecordByType(type); + getActionLogRecordAll.prettyPrint(); + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 0a2a330c8d6..3bd8b03cdce 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -29,6 +29,7 @@ import org.apache.commons.io.IOUtils; import static com.jayway.restassured.RestAssured.given; import static com.jayway.restassured.path.xml.XmlPath.from; +import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import java.nio.file.Path; import java.util.ArrayList; import java.util.HashMap; @@ -1157,6 +1158,16 @@ static Response addExternalTool(JsonObject jsonObject) { return requestSpecification.post("/api/admin/externalTools"); } + static Response getActionLogRecordAll() { + return given() + .get("/api/admin/actionLogRecord"); + } + + static Response getActionLogRecordByType(ActionLogRecord.ActionType type) { + return given() + .get("/api/admin/actionLogRecord/type/" + type); + } + @Test public void testGetFileIdFromSwordStatementWithNoFiles() { String swordStatementWithNoFiles = "\n" From 5d262708a3e1ddb39b32136c829dde97c1e9ff29 Mon Sep 17 00:00:00 2001 From: matthew-a-dunlap Date: Thu, 9 Nov 2017 15:58:49 -0500 Subject: [PATCH 167/483] Configure dropdown in dataset page #4233 --- .../edu/harvard/iq/dataverse/DatasetPage.java | 57 ++++++++++++++----- src/main/webapp/file.xhtml | 2 +- src/main/webapp/filesFragment.xhtml | 5 +- 3 files changed, 47 insertions(+), 17 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 90ae5156890..42af127b8c9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -251,7 +251,7 @@ public enum DisplayMode { private boolean removeUnusedTags; private Boolean hasRsyncScript = false; - private List externalTools; + private Map> externalTools = new HashMap>(); //> public Boolean isHasRsyncScript() { return hasRsyncScript; @@ -1532,17 +1532,8 @@ private String init(boolean initFull) { //I need to generate a list of lists of external tools, one for each file. I don't understand how this data will be consumed in fileFragments... //In the jsf it looks like I can reference #{fileMetadata.dataFile.id} and the like... so as long as I can create a list that can be referenced? //Alternatively, maybe this info should be in the fileMetadata? - - User user = session.getUser(); - ApiToken apitoken = new ApiToken(); - if (user instanceof AuthenticatedUser) { - apitoken = authService.findApiTokenByUser((AuthenticatedUser) user); - } - - //for(FileMetadata mData : fileMetadatasSearch) - //{ - externalTools = externalToolService.findAll(null, apitoken); - //} + + generateExternalTools(); @@ -4096,9 +4087,47 @@ public List getDatasetSummaryFields() { return DatasetUtil.getDatasetSummaryFields(workingVersion, customFields); } + //MAD: probably not the right way to do this + private void generateExternalTools() { + User user = session.getUser(); //redundant? + ApiToken apitoken = new ApiToken(); + if (user instanceof AuthenticatedUser) { + apitoken = authService.findApiTokenByUser((AuthenticatedUser) user); + } + + //List> dsTools = new ArrayList();//not convinced this structure will be processed in the correct order by jsf or that I've put things in the right place on that side + + for (FileMetadata fm : fileMetadatasSearch) { //why does normal fileMetadatas not exist at this point? should I trigger it? how is search different? + DataFile fmFile = fm.getDataFile(); + List fileTools = externalToolService.findAll(fmFile, apitoken); //MAD: rename these + externalTools.put(fmFile.getId(),fileTools); + } + } + // FIXME: We need to return a list of tools per file. - public List getExternalTools() { - return externalTools; + //public List getExternalTools() { + // return externalTools; + //} + + //I need this to return one list of tools based upon an id + //??? How is externalTools being populated? should it be attached to an object? + //eh there is a service bean I can query... maybe I'm doing this wrong and still need that double list... + public Map getExternalTools() { + return externalTools; } + + public List getExternalToolsForDataFile(Long fileId) { + return externalTools.get(fileId); + } + + //MAD : need to do this for each file and populate... ugh. Also clean up the double "findAll" + + //fileMetadatasSearch is this what I'm suppose to be using???? private List fileMetadatasSearch; + + //I need to generate a list of lists of external tools, one for each file. I don't understand how this data will be consumed in fileFragments... + //In the jsf it looks like I can reference #{fileMetadata.dataFile.id} and the like... so as long as I can create a list that can be referenced? + //Alternatively, maybe this info should be in the fileMetadata? + + // fileMetadatasSearch } diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index 5613ccf5305..f34dde0da1f 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -139,7 +139,7 @@ - +
  • #{tool.buttonLabel}
  • diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 207fb3291ce..018374f7991 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -358,12 +358,13 @@ -
    + +
    @@ -533,6 +540,14 @@
    + + + + + + + + diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 018374f7991..a76a68fc5cd 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -359,6 +359,7 @@ +
    + + From 26eb11d2ed1c3e8c3f5f983bfed6401b9a2c48c9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 13 Nov 2017 14:18:57 -0500 Subject: [PATCH 185/483] move `describe` from EjbDataverseEngine to Command interface #4262 --- .../harvard/iq/dataverse/EjbDataverseEngine.java | 15 ++------------- .../dataverse/engine/command/AbstractCommand.java | 13 +++++++++++++ .../iq/dataverse/engine/command/Command.java | 3 ++- 3 files changed, 17 insertions(+), 14 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index 411c72ac8b7..ee44e7b3459 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -182,7 +182,7 @@ public R submit(Command aCommand) throws CommandException { DataverseRequest dvReq = aCommand.getRequest(); Map affectedDvObjects = aCommand.getAffectedDvObjects(); - logRec.setInfo( describe(affectedDvObjects) ); + logRec.setInfo( aCommand.describe(affectedDvObjects) ); for (Map.Entry> pair : requiredMap.entrySet()) { String dvName = pair.getKey(); if (!affectedDvObjects.containsKey(dvName)) { @@ -442,16 +442,5 @@ public DataCaptureModuleServiceBean dataCaptureModule() { return ctxt; } - - - private String describe( Map dvObjMap ) { - StringBuilder sb = new StringBuilder(); - for ( Map.Entry ent : dvObjMap.entrySet() ) { - DvObject value = ent.getValue(); - sb.append(ent.getKey()).append(":"); - sb.append( (value!=null) ? value.accept(DvObject.NameIdPrinter) : ""); - sb.append(" "); - } - return sb.toString(); - } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/AbstractCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/AbstractCommand.java index e4d0593835b..042585501c8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/AbstractCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/AbstractCommand.java @@ -81,4 +81,17 @@ public Map> getRequiredPermissions() { protected User getUser() { return getRequest().getUser(); } + + @Override + public String describe(Map dvObjMap) { + StringBuilder sb = new StringBuilder(); + for (Map.Entry ent : dvObjMap.entrySet()) { + DvObject value = ent.getValue(); + sb.append(ent.getKey()).append(":"); + sb.append((value != null) ? value.accept(DvObject.NameIdPrinter) : ""); + sb.append(" "); + } + return sb.toString(); + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/Command.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/Command.java index 32a8a3cb282..f78fc80bb41 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/Command.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/Command.java @@ -41,5 +41,6 @@ public interface Command { * @return A map of the permissions required for this command */ Map> getRequiredPermissions(); - + + public String describe(Map dvObjMap); } From e434dd0a2b3f5b4c22c35a33030f5a62109bd011 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 13 Nov 2017 14:28:23 -0500 Subject: [PATCH 186/483] make it clear that file upload is complete #4250 --- src/main/java/Bundle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index a4e3fa63c58..8c3394ad6f5 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1338,7 +1338,7 @@ file.spss-savEncoding.current=Current Selection: file.spss-porExtraLabels=Variable Labels file.spss-porExtraLabels.title=Upload an additional text file with extra variable labels. file.spss-porExtraLabels.selectToAddBtn=Select File to Add -file.ingestFailed=Tabular Data Ingest Failed +file.ingestFailed=Upload Complete. Tabular Data Ingest Failed file.explore.twoRavens=TwoRavens file.map=Map file.mapData=Map Data From 7795e7008640d4b7c491ed613e4b944349ae3fe3 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 13 Nov 2017 15:22:08 -0500 Subject: [PATCH 187/483] change header background from gray to white #4197 --- src/main/webapp/dataverse_header.xhtml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/main/webapp/dataverse_header.xhtml b/src/main/webapp/dataverse_header.xhtml index 8ec2ced172f..cf8c4ad6610 100644 --- a/src/main/webapp/dataverse_header.xhtml +++ b/src/main/webapp/dataverse_header.xhtml @@ -154,11 +154,13 @@
    +
    -