diff --git a/.gitignore b/.gitignore index 87c2918d902..e1915ef7ddc 100644 --- a/.gitignore +++ b/.gitignore @@ -44,3 +44,7 @@ conf/docker-aio/dv/install/dvinstall.zip conf/docker-aio/testdata/ scripts/installer/default.config *.pem + +# do not track IntelliJ IDEA files +.idea +**/*.iml diff --git a/conf/docker-dcm/0prep.sh b/conf/docker-dcm/0prep.sh index 01abf8f4c77..a77f8775495 100755 --- a/conf/docker-dcm/0prep.sh +++ b/conf/docker-dcm/0prep.sh @@ -1,4 +1,4 @@ #!/bin/sh -wget https://github.com/sbgrid/data-capture-module/releases/download/0.2/dcm-0.2-0.noarch.rpm +wget https://github.com/sbgrid/data-capture-module/releases/download/0.3/dcm-0.3-0.noarch.rpm diff --git a/conf/docker-dcm/dcmsrv.dockerfile b/conf/docker-dcm/dcmsrv.dockerfile index 04fe2e24769..02d88b6c3a2 100644 --- a/conf/docker-dcm/dcmsrv.dockerfile +++ b/conf/docker-dcm/dcmsrv.dockerfile @@ -1,7 +1,7 @@ # build from repo root FROM centos:6 RUN yum install -y epel-release -ARG RPMFILE=dcm-0.2-0.noarch.rpm +ARG RPMFILE=dcm-0.3-0.noarch.rpm COPY ${RPMFILE} /tmp/ COPY bashrc /root/.bashrc COPY test_install.sh /root/ diff --git a/conf/docker/solr/solrconfig_master.xml b/conf/docker/solr/solrconfig_master.xml index ee348b9d9f7..3c2460fa6fb 100644 --- a/conf/docker/solr/solrconfig_master.xml +++ b/conf/docker/solr/solrconfig_master.xml @@ -686,13 +686,55 @@ of SearchComponents (see below) and supports distributed queries across multiple shards --> - + explicit 10 + edismax + 0.075 + + dvName^400 + authorName^180 + dvSubject^190 + dvDescription^180 + dvAffiliation^170 + title^130 + subject^120 + keyword^110 + topicClassValue^100 + dsDescriptionValue^90 + authorAffiliation^80 + publicationCitation^60 + producerName^50 + fileName^30 + fileDescription^30 + variableLabel^20 + variableName^10 + text^1.0 + + + dvName^200 + authorName^100 + dvSubject^100 + dvDescription^100 + dvAffiliation^100 + title^75 + subject^75 + keyword^75 + topicClassValue^75 + dsDescriptionValue^75 + authorAffiliation^75 + publicationCitation^75 + producerName^75 + + + + isHarvested:false^25000 + + @@ -924,27 +966,6 @@ true 10 5 - edismax - -dvName^170 -dvSubject^160 -dvDescription^150 -dvAffiliation^140 -title^130 -subject^120 -keyword^110 -topicClassValue^100 -dsDescriptionValue^90 -authorName^80 -authorAffiliation^70 -publicationCitation^60 -producerName^50 -fileName^40 -fileDescription^30 -variableLabel^20 -variableName^10 -text^1.0 - spellcheck diff --git a/conf/docker/solr/solrconfig_slave.xml b/conf/docker/solr/solrconfig_slave.xml index ac5e5124efb..ac10cc995a3 100644 --- a/conf/docker/solr/solrconfig_slave.xml +++ b/conf/docker/solr/solrconfig_slave.xml @@ -686,13 +686,55 @@ of SearchComponents (see below) and supports distributed queries across multiple shards --> - + explicit 10 + edismax + 0.075 + + dvName^400 + authorName^180 + dvSubject^190 + dvDescription^180 + dvAffiliation^170 + title^130 + subject^120 + keyword^110 + topicClassValue^100 + dsDescriptionValue^90 + authorAffiliation^80 + publicationCitation^60 + producerName^50 + fileName^30 + fileDescription^30 + variableLabel^20 + variableName^10 + text^1.0 + + + dvName^200 + authorName^100 + dvSubject^100 + dvDescription^100 + dvAffiliation^100 + title^75 + subject^75 + keyword^75 + topicClassValue^75 + dsDescriptionValue^75 + authorAffiliation^75 + publicationCitation^75 + producerName^75 + + + + isHarvested:false^25000 + + @@ -924,27 +966,6 @@ true 10 5 - edismax - -dvName^170 -dvSubject^160 -dvDescription^150 -dvAffiliation^140 -title^130 -subject^120 -keyword^110 -topicClassValue^100 -dsDescriptionValue^90 -authorName^80 -authorAffiliation^70 -publicationCitation^60 -producerName^50 -fileName^40 -fileDescription^30 -variableLabel^20 -variableName^10 -text^1.0 - spellcheck diff --git a/conf/solr/7.3.0/solrconfig.xml b/conf/solr/7.3.0/solrconfig.xml index 599506fd454..161dddca8cb 100644 --- a/conf/solr/7.3.0/solrconfig.xml +++ b/conf/solr/7.3.0/solrconfig.xml @@ -693,6 +693,48 @@ explicit 10 + edismax + 0.075 + + dvName^400 + authorName^180 + dvSubject^190 + dvDescription^180 + dvAffiliation^170 + title^130 + subject^120 + keyword^110 + topicClassValue^100 + dsDescriptionValue^90 + authorAffiliation^80 + publicationCitation^60 + producerName^50 + fileName^30 + fileDescription^30 + variableLabel^20 + variableName^10 + text^1.0 + + + dvName^200 + authorName^100 + dvSubject^100 + dvDescription^100 + dvAffiliation^100 + title^75 + subject^75 + keyword^75 + topicClassValue^75 + dsDescriptionValue^75 + authorAffiliation^75 + publicationCitation^75 + producerName^75 + + + + isHarvested:false^25000 + + @@ -924,27 +966,6 @@ true 10 5 - edismax - -dvName^170 -dvSubject^160 -dvDescription^150 -dvAffiliation^140 -title^130 -subject^120 -keyword^110 -topicClassValue^100 -dsDescriptionValue^90 -authorName^80 -authorAffiliation^70 -publicationCitation^60 -producerName^50 -fileName^40 -fileDescription^30 -variableLabel^20 -variableName^10 -text^1.0 - spellcheck diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index c9cb01ce869..5d4c6bf8db3 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -513,11 +513,11 @@ Dataset Locks To check if a dataset is locked:: - curl -H "$SERVER_URL/api/datasets/{database_id}/locks + curl "$SERVER_URL/api/datasets/{database_id}/locks Optionally, you can check if there's a lock of a specific type on the dataset:: - curl -H "$SERVER_URL/api/datasets/{database_id}/locks?type={lock_type} + curl "$SERVER_URL/api/datasets/{database_id}/locks?type={lock_type} Currently implemented lock types are ``Ingest, Workflow, InReview, DcmUpload and pidRegister``. diff --git a/doc/sphinx-guides/source/developers/big-data-support.rst b/doc/sphinx-guides/source/developers/big-data-support.rst index 90c49f5dd77..567fb99ec7a 100644 --- a/doc/sphinx-guides/source/developers/big-data-support.rst +++ b/doc/sphinx-guides/source/developers/big-data-support.rst @@ -25,7 +25,7 @@ Please note that S3 support for DCM is highly experimental. Files can be uploade Once you have installed a DCM, you will need to configure two database settings on the Dataverse side. These settings are documented in the :doc:`/installation/config` section of the Installation Guide: - ``:DataCaptureModuleUrl`` should be set to the URL of a DCM you installed. -- ``:UploadMethods`` should be set to ``dcm/rsync+ssh``. +- ``:UploadMethods`` should include ``dcm/rsync+ssh``. This will allow your Dataverse installation to communicate with your DCM, so that Dataverse can download rsync scripts for your users. @@ -155,7 +155,7 @@ In order to see the rsync URLs, you must run this command: ``curl -X PUT -d 'rsal/rsync' http://localhost:8080/api/admin/settings/:DownloadMethods`` -TODO: Document these in the Installation Guide once they're final. +.. TODO: Document these in the Installation Guide once they're final. To specify replication sites that appear in rsync URLs: diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 8c0ed4e9b2d..a19536228b1 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -26,6 +26,8 @@ The :doc:`/api/native-api` contains a useful but potentially dangerous API endpo By default, all APIs can be operated on remotely and a number of endpoints do not require authentication. https://github.com/IQSS/dataverse/issues/1886 was opened to explore changing these defaults, but until then it is very important to block both the "admin" endpoint (and at least consider blocking ``builtin-users``). For details please see also the section on ``:BlockedApiPolicy`` below. +It's also possible to prevent file uploads via API by adjusting the ``:UploadMethods`` database setting. + Forcing HTTPS +++++++++++++ @@ -260,59 +262,142 @@ if your installation's :ref:`:PublicInstall` setting is true, or: You can configure this redirect properly in your cloud environment to generate a temporary URL for access to the Swift objects for computing. -Amazon S3 Storage -+++++++++++++++++ +Amazon S3 Storage (or Compatible) ++++++++++++++++++++++++++++++++++ -For institutions and organizations looking to use Amazon's S3 cloud storage for their installation, this can be set up manually through creation of the credentials and config files or automatically via the AWS console commands. +For institutions and organizations looking to use some kind of S3-based object storage for files uploaded to Dataverse, +this is entirely possible. You can either use Amazon Web Services or use some other, even on-site S3-compatible +storage (like Minio, Ceph RADOS S3 Gateway and many more). -You'll need an AWS account with an associated S3 bucket for your installation to use. From the S3 management console (e.g. ``_), you can poke around and get familiar with your bucket. We recommend using IAM (Identity and Access Management) to create a user with full S3 access and nothing more, for security reasons. See ``_ for more info on this process. +**Note:** The Dataverse Team is most familiar with AWS S3, and can provide support on its usage with Dataverse. Thanks to community contributions, the application's architecture also allows non-AWS S3 providers. The Dataverse Team can provide very limited support on these other providers. We recommend reaching out to the wider Dataverse community if you have questions. -Make note of the bucket's name and the region its data is hosted in. Dataverse and the AWS SDK make use of "AWS credentials profile file" and "AWS config profile file" located in ``~/.aws/`` where ``~`` is the home directory of the user you run Glassfish as. This file can be generated via either of two methods described below. It's also possible to use IAM Roles rather than the credentials file. Please note that in this case you will need anyway the config file to specify the region. +First: Set Up Accounts and Access Credentials +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Set Up credentials File Manually -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Dataverse and the AWS SDK make use of the "AWS credentials profile file" and "AWS config profile file" located in +``~/.aws/`` where ``~`` is the home directory of the user you run Glassfish as. This file can be generated via either +of two methods described below: -To create the ``credentials`` file manually, you will need to generate a key/secret key. The first step is to log onto your AWS web console (e.g. ``_). If you have created a user in AWS IAM, you can click on that user and generate the keys needed for Dataverse. +1. Manually through creation of the credentials and config files or +2. Automatically via the AWS console commands. -Once you have acquired the keys, they need to be added to the ``credentials`` file. The format for credentials is as follows: +Preparation When Using Amazon's S3 Service +########################################## -| ``[default]`` -| ``aws_access_key_id = `` -| ``aws_secret_access_key = `` +You'll need an AWS account with an associated S3 bucket for your installation to use. From the S3 management console +(e.g. ``_), you can poke around and get familiar with your bucket. -You must also specify the AWS region in the ``config`` file, for example: +**Make note** of the **bucket's name** and the **region** its data is hosted in. -| ``[default]`` -| ``region = us-east-1`` +To **create a user** with full S3 access and nothing more for security reasons, we recommend using IAM +(Identity and Access Management). See `IAM User Guide `_ +for more info on this process. -Place these two files in a folder named ``.aws`` under the home directory for the user running your Dataverse Glassfish instance. (From the `AWS Command Line Interface Documentation `_: "In order to separate credentials from less sensitive options, region and output format are stored in a separate file named config in the same folder") +**Generate the user keys** needed for Dataverse afterwards by clicking on the created user. +(You can skip this step when running on EC2, see below.) -Set Up Access Configuration Via Command Line Tools -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +.. TIP:: + If you are hosting Dataverse on an AWS EC2 instance alongside storage in S3, it is possible to use IAM Roles instead + of the credentials file (the file at ``~/.aws/credentials`` mentioned below). Please note that you will still need the + ``~/.aws/config`` file to specify the region. For more information on this option, see + http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html -Begin by installing the CLI tool `pip `_ to install the `AWS command line interface `_ if you don't have it. +Preparation When Using Custom S3-Compatible Service +################################################### -First, we'll get our access keys set up. If you already have your access keys configured, skip this step. From the command line, run: +We assume you have your S3-compatible custom storage in place, up and running, ready for service. -``pip install awscli`` +Please make note of the following details: -``aws configure`` +- **Endpoint URL** - consult the documentation of your service on how to find it. -You'll be prompted to enter your Access Key ID and secret key, which should be issued to your AWS account. The subsequent config steps after the access keys are up to you. For reference, the keys will be stored in ``~/.aws/credentials``, and your AWS access region in ``~/.aws/config``. + * Example: https://play.minio.io:9000 + +- **Region:** Optional, but some services might use it. Consult your service documentation. -Using an IAM Role with EC2 -^^^^^^^^^^^^^^^^^^^^^^^^^^ + * Example: *us-east-1* + +- **Access key ID and secret access key:** Usually you can generate access keys within the user profile of your service. -If you are hosting Dataverse on an AWS EC2 instance alongside storage in S3, it is possible to use IAM Roles instead of the credentials file (the file at ``~/.aws/credentials`` mentioned above). Please note that you will still need the ``~/.aws/config`` file to specify the region. For more information on this option, see http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html + * Example: + + - ID: *Q3AM3UQ867SPQQA43P2F* + + - Key: *zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG* + +- **Bucket name:** Dataverse will fail opening and uploading files on S3 if you don't create one. -Configure Dataverse to Use AWS/S3 -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + * Example: *dataverse* -With your access to your bucket in place, we'll want to navigate to ``/usr/local/glassfish4/glassfish/bin/`` and execute the following ``asadmin`` commands to set up the proper JVM options. Recall that out of the box, Dataverse is configured to use local file storage. You'll need to delete the existing storage driver before setting the new one. -``./asadmin $ASADMIN_OPTS delete-jvm-options "\-Ddataverse.files.storage-driver-id=file"`` +Reported Working S3-Compatible Storage +###################################### + +`Minio v2018-09-12 `_ + Set ``dataverse.files.s3-path-style-access=true``, as Minio works path-based. Works pretty smooth, easy to setup. + **Can be used for quick testing, too:** just use the example values above. Uses the public (read: unsecure and + possibly slow) https://play.minio.io:9000 service. + + +**HINT:** If you are successfully using an S3 storage implementation not yet listed above, please feel free to +`open an issue at Github `_ and describe your setup. +We will be glad to add it here. + + +Manually Set Up Credentials File +################################ + +To create the ``~/.aws/credentials`` file manually, you will need to generate a key/secret key (see above). Once you have +acquired the keys, they need to be added to the ``credentials`` file. The format for credentials is as follows: + +:: + + [default] + aws_access_key_id = + aws_secret_access_key = + +While using Amazon's service, you must also specify the AWS region in the ``~/.aws/config`` file, for example: + +:: + + [default] + region = us-east-1 + +Place these two files in a folder named ``.aws`` under the home directory for the user running your Dataverse Glassfish +instance. (From the `AWS Command Line Interface Documentation `_: +"In order to separate credentials from less sensitive options, region and output format are stored in a separate file +named config in the same folder") -``./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files.storage-driver-id=s3"`` +Console Commands to Set Up Access Configuration +############################################### + +Begin by installing the CLI tool `pip `_ to install the +`AWS command line interface `_ if you don't have it. + +First, we'll get our access keys set up. If you already have your access keys configured, skip this step. +From the command line, run: + +- ``pip install awscli`` +- ``aws configure`` + +You'll be prompted to enter your Access Key ID and secret key, which should be issued to your AWS account. +The subsequent config steps after the access keys are up to you. For reference, the keys will be stored in +``~/.aws/credentials``, and your AWS access region in ``~/.aws/config``. + +**TIP:** When using a custom S3 URL endpoint, you need to add it to every ``aws`` call: ``aws --endpoint-url s3 ...`` + (you may omit it while configuring). + +Second: Configure Dataverse to use S3 Storage +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +With access to your bucket in place, we'll want to navigate to ``/usr/local/glassfish4/glassfish/bin/`` +and execute the following ``asadmin`` commands to set up the proper JVM options. Recall that out of the box, Dataverse +is configured to use local file storage. You'll need to delete the existing storage driver before setting the new one. + +:: + + ./asadmin $ASADMIN_OPTS delete-jvm-options "-Ddataverse.files.storage-driver-id=file" + ./asadmin $ASADMIN_OPTS create-jvm-options "-Ddataverse.files.storage-driver-id=s3" Then, we'll need to identify which S3 bucket we're using. Replace ``your_bucket_name`` with, of course, your bucket: @@ -324,10 +409,29 @@ Optionally, you can have users download files from S3 directly rather than havin If you enable ``dataverse.files.s3-download-redirect`` as described above, note that the S3 URLs expire after an hour by default but you can configure the expiration time using the ``dataverse.files.s3-url-expiration-minutes`` JVM option. Here's an example of setting the expiration time to 120 minutes: -``./asadmin create-jvm-options "-D dataverse.files.s3-url-expiration-minutes=120"`` +``./asadmin create-jvm-options "-Ddataverse.files.s3-url-expiration-minutes=120"`` + +In case you would like to configure Dataverse to use a custom S3 service instead of Amazon S3 services, please +add the options for the custom URL and region as documented below. Please read above if your desired combination has +been tested already and what other options have been set for a successful integration. Lastly, go ahead and restart your glassfish server. With Dataverse deployed and the site online, you should be able to upload datasets and data files and see the corresponding files in your S3 bucket. Within a bucket, the folder structure emulates that found in local file storage. +S3 Storage Options +################## + +========================================= ================== ================================================================== ============= +JVM Option Value Description Default value +========================================= ================== ================================================================== ============= +dataverse.files.storage-driver-id s3 Enable S3 storage driver. ``file`` +dataverse.files.s3-bucket-name The bucket name. See above. (none) +dataverse.files.s3-download-redirect ``true``/``false`` Enable direct download or proxy through Dataverse. ``false`` +dataverse.files.s3-url-expiration-minutes If direct downloads: time until links expire. Optional. 60 +dataverse.files.s3-custom-endpoint-url Use custom S3 endpoint. Needs URL either with or without protocol. (none) +dataverse.files.s3-custom-endpoint-region Only used when using custom endpoint. Optional. ``dataverse`` +dataverse.files.s3-path-style-access ``true``/``false`` Use path style buckets instead of subdomains. Optional. ``false`` +========================================= ================== ================================================================== ============= + .. _Branding Your Installation: Branding Your Installation @@ -547,7 +651,7 @@ Configuration for :doc:`r-rapache-tworavens`. dataverse.dropbox.key +++++++++++++++++++++ -Dropbox provides a Chooser app, which is a Javascript component that allows you to upload files to Dataverse from Dropbox. It is an optional configuration setting, which requires you to pass it an app key. For more information on setting up your Chooser app, visit https://www.dropbox.com/developers/chooser. +Dropbox provides a Chooser app, which is a Javascript component that allows you to upload files to Dataverse from Dropbox. It is an optional configuration setting, which requires you to pass it an app key and configure the ``:UploadMethods`` database setting. For more information on setting up your Chooser app, visit https://www.dropbox.com/developers/chooser. ``./asadmin create-jvm-options "-Ddataverse.dropbox.key={{YOUR_APP_KEY}}"`` @@ -1297,9 +1401,18 @@ The URL for your Repository Storage Abstraction Layer (RSAL) installation. This :UploadMethods ++++++++++++++ -This setting is experimental and to be used with the Data Capture Module (DCM). For now, if you set the upload methods to ``dcm/rsync+ssh`` it will allow your users to download rsync scripts from the DCM. +This setting controls which upload methods are available to users of your installation of Dataverse. The following upload methods are available: + +- ``native/http``: Corresponds to "Upload with HTTP via your browser" and APIs that use HTTP (SWORD and native). +- ``dcm/rsync+ssh``: Corresponds to "Upload with rsync+ssh via Data Capture Module (DCM)". A lot of setup is required, as explained in the :doc:`/developers/big-data-support` section of the Dev Guide. + +Out of the box only ``native/http`` is enabled and will work without further configuration. To add multiple upload method, separate them using a comma like this: + +``curl -X PUT -d 'native/http,dcm/rsync+ssh' http://localhost:8080/api/admin/settings/:UploadMethods`` + +You'll always want at least one upload method, so the easiest way to remove one of them is to simply ``PUT`` just the one you want, like this: -``curl -X PUT -d 'dcm/rsync+ssh' http://localhost:8080/api/admin/settings/:UploadMethods`` +``curl -X PUT -d 'native/http' http://localhost:8080/api/admin/settings/:UploadMethods`` :DownloadMethods ++++++++++++++++ diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index eda0f293e85..06b41e9e7d3 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -114,7 +114,7 @@ It is not necessary for Glassfish to be running before you execute the Dataverse Please note that you must run Glassfish in an English locale. If you are using something like ``LANG=de_DE.UTF-8``, ingest of tabular data will fail with the message "RoundRoutines:decimal separator no in right place". -Also note that Glassfish may utilize more than the default number of file descriptors, especially when running batch jobs such as harvesting. We have increased ours by adding ulimit -n 32768 to our glassfish init script. On operating systems which use systemd such as RHEL or CentOS 7, file descriptor limits may be increased by adding a line like LimitNOFILE=32768 to the systemd unit file. You may adjust the file descriptor limits on running processes by using the prlimit utility: +Also note that Glassfish may utilize more than the default number of file descriptors, especially when running batch jobs such as harvesting. We have increased ours by adding ulimit -n 32768 to our glassfish init script. On operating systems which use systemd such as RHEL or CentOS 7, file descriptor limits may be increased by adding a line like LimitNOFILE=32768 to the systemd unit file. You may adjust the file descriptor limits on running processes by using the prlimit utility:: # sudo prlimit --pid pid --nofile=32768:32768 @@ -212,20 +212,29 @@ You should already have a "dvinstall.zip" file that you downloaded from https:// cp /tmp/dvinstall/schema.xml /usr/local/solr/solr-7.3.0/server/solr/collection1/conf cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-7.3.0/server/solr/collection1/conf -Note: Dataverse has customized Solr to boost results that come from certain indexed elements inside Dataverse, for example results matching on the name of a dataset. If you would like to remove this, edit your ``solrconfig.xml`` and remove the ```` element and its contents. +Note: Dataverse has customized Solr to boost results that come from certain indexed elements inside Dataverse, for example prioritizing results from Dataverses over Datasets. If you would like to remove this, edit your ``solrconfig.xml`` and remove the ```` element and its contents. If you have ideas about how this boosting could be improved, feel free to contact us through our Google Group https://groups.google.com/forum/#!forum/dataverse-dev . Dataverse requires a change to the ``jetty.xml`` file that ships with Solr. Edit ``/usr/local/solr/solr-7.3.0/server/etc/jetty.xml`` , increasing ``requestHeaderSize`` from ``8192`` to ``102400`` -With the Dataverse-specific config in place, you can now start Solr and create the core that will be used to manage search information:: +Solr will warn about needing to increase the number of file descriptors and max processes in a production environment but will still run with defaults. We have increased these values to the recommended levels by adding ulimit -n 65000 to the init script, and the following to ``/etc/security/limits.conf``:: + + solr soft nproc 65000 + solr hard nproc 65000 + solr soft nofile 65000 + solr hard nofile 65000 + +On operating systems which use systemd such as RHEL or CentOS 7, you may then add a line like LimitNOFILE=65000 to the systemd unit file, or adjust the limits on a running process using the prlimit tool:: + + # sudo prlimit --pid pid --nofile=65000:65000 + +Solr launches asynchronously and attempts to use the ``lsof`` binary to watch for its own availability. Installation of this package isn't required but will prevent a warning in the log at startup. + +Finally, you may start Solr and create the core that will be used to manage search information:: cd /usr/local/solr/solr-7.3.0 bin/solr start bin/solr create_core -c collection1 -d server/solr/collection1/conf/ -Please note: Solr will warn about needing to increase the number of file descriptors and max processes in a production environment but will still run with defaults. We have increased these values to the recommended levels by adding ulimit -n 65000 to the init script and adding solr soft nproc 65000 to /etc/security/limits.conf. On operating systems which use systemd such as RHEL or CentOS 7, you may add a line like LimitNOFILE=65000 to the systemd unit file, or adjust the limits on a running process using the prlimit tool: - - # sudo prlimit --pid pid --nofile=65000:65000 - Solr Init Script ================ diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 337063e1caa..e913e9e97bb 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -40,23 +40,77 @@ Note: You can add additional metadata once you have completed the initial datase Supported HTML Fields --------------------- -We currently only support the following HTML tags for any of our textbox meatdata fields (i.e., Description) : , ,
, +We currently only support the following HTML tags for any of our textbox metadata fields (i.e., Description) : , ,
,
, , ,
,
,
, ,
,

-

, , , ,
  • ,
      ,

      ,

      , , , , 
       , , 
        . -File Handling and Uploading -=========================== +File Upload +============== -To upload new files to a dataset, click the "Edit" button at the top of the dataset page and from the dropdown list select "Files (Upload)" or click the "Upload Files" button above the files table in the Files tab. From either option you will be brought to the Upload Files page for that dataset. +The Dataverse software offers multiple methods of uploading files to a dataset. These upload methods are configurable by the administrator of a Dataverse installation, so you might not see some of these options on the Dataverse site you're using. -Once you have uploaded files, you will be able to edit file metadata, restrict access to files [#f1]_ , and/or add tags. Click "Save Changes" to complete the upload. If you uploaded a file by mistake, you can delete it before saving by clicking the checkbox to select the file, and then clicking the "Delete" button above the Files Table. +If there are multiple upload options available, then you must choose which one to use for your dataset. A dataset may only use one upload method. Once you upload a file using one of the available upload methods, that method is locked in for that dataset. If you need to switch upload methods for a dataset that already contains files, then please contact Support by clicking on the Support link at the top of the application. + +You can upload files to a dataset while first creating that dataset. You can also upload files after creating a dataset by clicking the "Edit" button at the top of the dataset page and from the dropdown list selecting "Files (Upload)" or clicking the "Upload Files" button above the files table in the Files tab. From either option you will be brought to the Upload Files page for that dataset. + +Certain file types in Dataverse are supported by additional functionality, which can include downloading in different formats, subsets, file-level metadata preservation, file-level data citation; and exploration through data visualization and analysis. See the File Handling section of this page for more information. + + +HTTP Upload +----------- + +HTTP Upload is a common browser-based file upload tool you may be familiar with from other web applications. You can upload files via HTTP by selecting them from your browser or dragging and dropping them into the upload widget. -File upload limit size varies based on Dataverse installation. The file upload size limit can be found in the text above where files are uploaded in the application. If you have further questions, contact support for that installation by clicking on the Support link at the top of the application. +Once you have uploaded files, you will be able to edit file metadata, restrict access to files [#f1]_ , and/or add tags. Click "Save Changes" to complete the upload. If you uploaded a file by mistake, you can delete it before saving by clicking the checkbox to select the file, and then clicking the "Delete" button above the Files Table. -The file types listed in the following sections are supported by additional functionality, which can include downloading in different formats, subsets, file-level metadata preservation, file-level data citation; and exploration through data visualization and analysis. +File upload limit size varies based on Dataverse installation. The file upload size limit can be found in the text above the HTTP upload widget. If you need to upload a very large file or a very large *number* of files, consider using rsync + SSH upload if your installation of Dataverse offers it. .. [#f1] Some Dataverse installations do not allow this feature. +Dropbox Upload +-------------- + +Some Dataverse installations support the ability to upload files directly from Dropbox. To do so, click the "Upload from Dropbox" button, log in to Dropbox in the pop-up window, and select the files you'd like to transfer over. + +.. _rsync_upload: + +rsync + SSH Upload +------------------ + +rsync is typically used for synchronizing files and directories between two different systems, using SSH to connect rather than HTTP. Some Dataverse installations allow uploads using rsync, to facilitate large file transfers in a reliable and secure manner. + +File Upload Script +~~~~~~~~~~~~~~~~~~ + +An rsync-enabled Dataverse installation has a file upload process that differs from the traditional browser-based upload process you may be used to. In order to transfer your data to Dataverse's storage, you will need to complete the following steps: + +1. Create your dataset. In rsync-enabled Dataverse installations, you cannot upload files until the dataset creation process is complete. After you hit "Save Dataset" on the Dataset Creation page, you will be taken to the page for your dataset. + +2. On the dataset page, click the "+ Upload Files" button. This will open a box with instructions and a link to the file upload script. + +3. Make sure your files are ready for upload. You will need to have one directory that you can point the upload script to. All files in this directory and in any subdirectories will be uploaded. The directory structure will be preserved, and will be reproduced when your dataset is downloaded from Dataverse. Note that your data will be uploaded in the form of a data package, and each dataset can only host one such package. Be sure that all files you want to include are present before you upload. + +4. Download the rsync file upload script by clicking the "Download Script" button in the Upload Files instruction box. There are no requirements for where you save the script; put it somewhere you can find it. Downloading the upload script will put a temporary lock on your dataset to prepare it for upload. While your dataset is locked, you will not be able to delete or publish your dataset, or edit its metadata. Once you upload your files and Dataverse processes them, your dataset will be automatically unlocked and these disabled functions will be enabled again. If you have downloaded the script and locked your dataset, but you have then changed your mind and decided *not* to upload files, please contact Support about unlocking your dataset. + +5. To begin the upload process, you will need to run the script you downloaded. For this, you will have to go outside your browser and open a terminal (AKA command line) window on your computer. Use the terminal to navigate to the directory where you saved the upload script, and run the command that the Upload Files instruction box provides. This will begin the upload script. Please note that this upload script will expire 7 days after you downloaded it. If it expires and you still need to use it, simply download the script from Dataverse again. + +**Note:** Unlike other operating systems, Windows does not come with rsync supported by default. We have not optimized this feature for Windows users, but you may be able to get it working if you install the right Unix utilities. (If you have found a way to get this feature working for you on Windows, you can contribute it to our project. Please reference our `Contributing to Dataverse `_ document in the root of the source tree.) + +6. Follow the instructions provided by the upload script running in your terminal. It will direct you to enter the full path of the directory where your dataset files are located, and then it will start the upload process. Once you've initiated the upload, if you need to cancel it then you can do so by canceling the script running in your terminal window. If your upload gets interrupted, you can resume it from the same point later. + +7. Once the upload script completes its job, Dataverse will begin processing your data upload and running a checksum validation. This may take some time depending on the file size of your upload. During processing, you will see a blue bar at the bottom of the dataset page that reads "Upload in progress..." + +8. Once processing is complete, you will be notified. At this point you can publish your dataset and your data will be available for download on the dataset page. + +**Note:** A dataset can only hold one data package. If you need to replace the data package in your dataset, contact Support. + + +File Handling +============= + +Certain file types in Dataverse are supported by additional functionality, which can include downloading in different formats, subsets, file-level metadata preservation, file-level data citation; and exploration through data visualization and analysis. See the sections below for information about special functionality for specific file types. + + Tabular Data Files ------------------ @@ -113,7 +167,7 @@ Compressed files in zip format are unpacked automatically. If it fails to unpack Support for unpacking tar files will be added when this ticket is closed: https://github.com/IQSS/dataverse/issues/2195. -Advanced Options +Other File Types ---------------- There are several advanced options available for certain file types. @@ -121,94 +175,6 @@ There are several advanced options available for certain file types. - Image files: .jpg, .png, and .tif files are able to be selected as the default thumbnail for a dataset. The selected thumbnail will appear on the search result card for that dataset. - SPSS files: SPSS files can be tagged with the language they were originally coded in. This is found by clicking on Advanced Options and selecting the language from the list provided. -.. _provenance: - -Data Provenance ---------------- - -Data Provenance is a record of where your data came from and how it reached its current form. It describes the origin of a data file, any transformations that have been made to that file, and any persons or organizations associated with that file. A data file's provenance can aid in reproducibility and compliance with legal regulations. Dataverse can help you keep track of your data's provenance. Currently, Dataverse only makes provenance information available to those who have edit permissions on your dataset, but in the near future we plan to expand this feature to make provenance information available to the public. You can track our progress in `this issue `_ on the Dataverse GitHub repository. - -.. COMMENTED OUT UNTIL PROV FILE DOWNLOAD IS ADDED: , and make it available to those who need it. - -Dataverse accepts provenance information in two forms: a *Provenance File* or a free-text *Provenance Description*. You can attach this provenance information to your data files in Dataverse as part of the file upload process, by clicking Edit -> Provenance: - -|file-upload-prov-button| - -This will open a window where you can add your Provenance File and/or Provenance Description: - -|file-upload-prov-window| - -A **Provenance File** is the preferred way of submitting provenance information to Dataverse because it provides a detailed and trustworthy record. Provenance files are typically generated during the process of data analysis, using provenance capture tools like provR, RDataTracker, NoWorkFlow, recordr, or CamFlow. - -Once you upload a provenance file, Dataverse will need some additional information in order to accurately connect it to your data file. Once provenance file upload finishes, an input box labeled "Connect entity" will appear under the file. Provenance files contain a list of "entities", which include your data file as well as any objects associated with it (e.g. a chart, a spellchecker, etc.). You will need to tell Dataverse which entity within the provenance file represents your data file. You may type the name of the entity into the box, or click the arrow next to the box and select the entity from a list of all entities in the provenance file. - -For more information on entities and the contents of provenance files, see `the W3C PROV Model Primer `_. - -Once you've uploaded your Provenance File and connected the proper entity, you can hit the Preview button to view the raw JSON of the Provenance File. This can help you confirm that you've uploaded the right file. Be sure to double-check it, because the Provenance File will made *permanent* once it's finalized. At that point you will not be able to *replace*, *remove*, or otherwise *edit* the Provenance File. This ensures that the Provenance File maintains a stable, immutable record of the data file's history. This finalization of the Provenance File happens at different points depending on the status of your data file. If this is a brand new data file that has never been published before, then its associated Provenance File will be made permanent once you publish the dataset. If this data file *has* been published in a previous version of your dataset, then its associated Provenance File will be made permanent as soon as you upload the Provenance File and click "Save Changes" on the warning popup. - -.. COMMENTED OUT UNTIL PROV GRAPH IS ADDED: A **Provenance File** is the preferred way of submitting provenance information to Dataverse, as it allows Dataverse to automatically generate a detailed graph of the data file's provenance. Provenance files are typically generated during the process of data analysis, using provenance capture tools like provR, RDataTracker, NoWorkFlow, recordr, or CamFlow. Each data file in Dataverse can have one provenance file attached to it. Dataverse uses this provenance file to generate a provenance graph that can be viewed under the Provenance tab of the file page. Once you've added your provenance file, you can click the Preview button to make sure it's accurate. - -A **Provenance Description** allows you to add more provenance information in addition to or in place of a provenance file. This is a free-text field that allows you to enter any information you feel might be relevant to those interested in learning about the provenance of your data. This might be a good place to describe provenance factors like what operating system you used when working with the data file, what functions or libraries you used, how data was merged into the file, what version of the file you used, etc. The Provenance Description is not as useful or trustworthy as a provenance file, but it can still provide value. Unlike the Provenance File, the Provenance Description is never made permanent: you can always edit, remove, or replace it at any time. - -You can return to attach provenance to your data file later on by clicking the "Add + Edit Metadata" button on the file page, and then clicking the "Edit -> Provenance" button. - -.. COMMENTED OUT UNTIL PROV TAB IS ADDED: -.. You can also attach provenance to your data file later on by clicking the "Add Provenance" button on the file page, under the Provenance tab: -.. -.. **(Insert screenshot of Provenance Tab's "Add Provenance button" here, once that functionality is developed)** - -.. COMMENTED OUT UNTIL PROV GRAPH IS ADDED: -.. Once a data file with an attached provenance file is published, you can see a graph of that file's provenance under the Provenance tab on the file page. - -.. _rsync_upload: - -rsync Upload ------------- - -rsync is typically used for synchronizing files and directories between two different systems, using SSH to connect rather than HTTP. Some Dataverse installations allow uploads using rsync, to facilitate large file transfers in a reliable and secure manner. - -File Upload Script -~~~~~~~~~~~~~~~~~~ - -An rsync-enabled Dataverse installation has a file upload process that differs from the traditional browser-based upload process you may be used to. In order to transfer your data to Dataverse's storage, you will need to complete the following steps: - -1. Create your dataset. In rsync-enabled Dataverse installations, you cannot upload files until the dataset creation process is complete. After you hit "Save Dataset" on the Dataset Creation page, you will be taken to the page for your dataset. - -2. On the dataset page, click the "+ Upload Files" button. This will open a box with instructions and a link to the file upload script. - -3. Make sure your files are ready for upload. You will need to have one directory that you can point the upload script to. All files in this directory and in any subdirectories will be uploaded. The directory structure will be preserved, and will be reproduced when your dataset is downloaded from Dataverse. Note that your data will be uploaded in the form of a data package, and each dataset can only host one such package. Be sure that all files you want to include are present before you upload. - -4. Download the rsync file upload script by clicking the "Download Script" button in the Upload Files instruction box. There are no requirements for where you save the script; put it somewhere you can find it. Downloading the upload script will put a temporary lock on your dataset to prepare it for upload. While your dataset is locked, you will not be able to delete or publish your dataset, or edit its metadata. Once you upload your files and Dataverse processes them, your dataset will be automatically unlocked and these disabled functions will be enabled again. If you have downloaded the script and locked your dataset, but you have then changed your mind and decided *not* to upload files, please contact Support about unlocking your dataset. - -5. To begin the upload process, you will need to run the script you downloaded. For this, you will have to go outside your browser and open a terminal (AKA command line) window on your computer. Use the terminal to navigate to the directory where you saved the upload script, and run the command that the Upload Files instruction box provides. This will begin the upload script. Please note that this upload script will expire 7 days after you downloaded it. If it expires and you still need to use it, simply download the script from Dataverse again. - -**Note:** Unlike other operating systems, Windows does not come with rsync supported by default. We have not optimized this feature for Windows users, but you may be able to get it working if you install the right Unix utilities. (If you have found a way to get this feature working for you on Windows, you can contribute it to our project. Please reference our `Contributing to Dataverse `_ document in the root of the source tree.) - -6. Follow the instructions provided by the upload script running in your terminal. It will direct you to enter the full path of the directory where your dataset files are located, and then it will start the upload process. Once you've initiated the upload, if you need to cancel it then you can do so by canceling the script running in your terminal window. If your upload gets interrupted, you can resume it from the same point later. - -7. Once the upload script completes its job, Dataverse will begin processing your data upload and running a checksum validation. This may take some time depending on the file size of your upload. During processing, you will see a blue bar at the bottom of the dataset page that reads "Upload in progress..." - -8. Once processing is complete, you will be notified. At this point you can publish your dataset and your data will be available for download on the dataset page. **Note:** A dataset can only hold one data package. If you need to replace the data package in your dataset, contact Support. - -**Note:** A dataset can only hold one data package. If you need to replace the data package in your dataset, contact Support. - -.. _cloud-storage: - -Cloud Storage + Computing -------------------------- - -Dataverse installations can be configured to facilitate cloud-based storage and/or computing (this feature is considered experimental at this time, and some of the kinks are still being worked out). While the default configuration for Dataverse uses a local file system for storing data, a cloud-enabled Dataverse installation can use a Swift object storage database for its data. This allows users to perform computations on data using an integrated cloud computing environment. - -Cloud Computing -~~~~~~~~~~~~~~~ - -The "Compute" button on dataset and file pages will allow you to compute on a single dataset, multiple datasets, or a single file. You can use it to build a compute batch and go directly to the cloud computing environment that is integrated with Dataverse. - -Cloud Storage Access -~~~~~~~~~~~~~~~~~~~~ - -If you need to access a dataset in a more flexible way than the Compute button provides, then you can use the Cloud Storage Access box on the dataset page to copy the dataset's container name. This unique identifer can then be used to allow direct access to the dataset. - Edit Files ========== @@ -314,6 +280,45 @@ When you access a dataset's file-level permissions page, you will see two sectio **Restricted Files:** In this section, you can see the same information, but broken down by each individual file in your dataset. For each file, you can click the "Assign Access" button to see a box where you can grant access to that file to specific users or groups. +.. _provenance: + +Data Provenance +=============== + +Data Provenance is a record of where your data came from and how it reached its current form. It describes the origin of a data file, any transformations that have been made to that file, and any persons or organizations associated with that file. A data file's provenance can aid in reproducibility and compliance with legal regulations. Dataverse can help you keep track of your data's provenance. Currently, Dataverse only makes provenance information available to those who have edit permissions on your dataset, but in the near future we plan to expand this feature to make provenance information available to the public. You can track our progress in `this issue `_ on the Dataverse GitHub repository. + +.. COMMENTED OUT UNTIL PROV FILE DOWNLOAD IS ADDED: , and make it available to those who need it. + +Dataverse accepts provenance information in two forms: a *Provenance File* or a free-text *Provenance Description*. You can attach this provenance information to your data files in Dataverse as part of the file upload process, by clicking Edit -> Provenance: + +|file-upload-prov-button| + +This will open a window where you can add your Provenance File and/or Provenance Description: + +|file-upload-prov-window| + +A **Provenance File** is the preferred way of submitting provenance information to Dataverse because it provides a detailed and trustworthy record. Provenance files are typically generated during the process of data analysis, using provenance capture tools like provR, RDataTracker, NoWorkFlow, recordr, or CamFlow. + +Once you upload a provenance file, Dataverse will need some additional information in order to accurately connect it to your data file. Once provenance file upload finishes, an input box labeled "Connect entity" will appear under the file. Provenance files contain a list of "entities", which include your data file as well as any objects associated with it (e.g. a chart, a spellchecker, etc.). You will need to tell Dataverse which entity within the provenance file represents your data file. You may type the name of the entity into the box, or click the arrow next to the box and select the entity from a list of all entities in the provenance file. + +For more information on entities and the contents of provenance files, see `the W3C PROV Model Primer `_. + +Once you've uploaded your Provenance File and connected the proper entity, you can hit the Preview button to view the raw JSON of the Provenance File. This can help you confirm that you've uploaded the right file. Be sure to double-check it, because the Provenance File will made *permanent* once it's finalized. At that point you will not be able to *replace*, *remove*, or otherwise *edit* the Provenance File. This ensures that the Provenance File maintains a stable, immutable record of the data file's history. This finalization of the Provenance File happens at different points depending on the status of your data file. If this is a brand new data file that has never been published before, then its associated Provenance File will be made permanent once you publish the dataset. If this data file *has* been published in a previous version of your dataset, then its associated Provenance File will be made permanent as soon as you upload the Provenance File and click "Save Changes" on the warning popup. + +.. COMMENTED OUT UNTIL PROV GRAPH IS ADDED: A **Provenance File** is the preferred way of submitting provenance information to Dataverse, as it allows Dataverse to automatically generate a detailed graph of the data file's provenance. Provenance files are typically generated during the process of data analysis, using provenance capture tools like provR, RDataTracker, NoWorkFlow, recordr, or CamFlow. Each data file in Dataverse can have one provenance file attached to it. Dataverse uses this provenance file to generate a provenance graph that can be viewed under the Provenance tab of the file page. Once you've added your provenance file, you can click the Preview button to make sure it's accurate. + +A **Provenance Description** allows you to add more provenance information in addition to or in place of a provenance file. This is a free-text field that allows you to enter any information you feel might be relevant to those interested in learning about the provenance of your data. This might be a good place to describe provenance factors like what operating system you used when working with the data file, what functions or libraries you used, how data was merged into the file, what version of the file you used, etc. The Provenance Description is not as useful or trustworthy as a provenance file, but it can still provide value. Unlike the Provenance File, the Provenance Description is never made permanent: you can always edit, remove, or replace it at any time. + +You can return to attach provenance to your data file later on by clicking the "Add + Edit Metadata" button on the file page, and then clicking the "Edit -> Provenance" button. + +.. COMMENTED OUT UNTIL PROV TAB IS ADDED: +.. You can also attach provenance to your data file later on by clicking the "Add Provenance" button on the file page, under the Provenance tab: +.. +.. **(Insert screenshot of Provenance Tab's "Add Provenance button" here, once that functionality is developed)** + +.. COMMENTED OUT UNTIL PROV GRAPH IS ADDED: +.. Once a data file with an attached provenance file is published, you can see a graph of that file's provenance under the Provenance tab on the file page. + .. _thumbnails-widgets: Thumbnails + Widgets @@ -417,6 +422,23 @@ To view exactly what has changed, starting from the originally published version Once you have more than one version (this can simply be version 1 and a draft), you can click the "View Details" link next to each summary to learn more about the metadata fields and files that were either added or edited. You can also click the checkboxes to select any two dataset versions, then click the "View Differences" button to open the Version Differences Details popup and compare the differences between them. +.. _cloud-storage: + +Cloud Storage + Computing +========================= + +Dataverse installations can be configured to facilitate cloud-based storage and/or computing (this feature is considered experimental at this time, and some of the kinks are still being worked out). While the default configuration for Dataverse uses a local file system for storing data, a cloud-enabled Dataverse installation can use a Swift object storage database for its data. This allows users to perform computations on data using an integrated cloud computing environment. + +Cloud Computing +--------------- + +The "Compute" button on dataset and file pages will allow you to compute on a single dataset, multiple datasets, or a single file. You can use it to build a compute batch and go directly to the cloud computing environment that is integrated with Dataverse. + +Cloud Storage Access +-------------------- + +If you need to access a dataset in a more flexible way than the Compute button provides, then you can use the Cloud Storage Access box on the dataset page to copy the dataset's container name. This unique identifer can then be used to allow direct access to the dataset. + .. _deaccession: Dataset Deaccession diff --git a/doc/sphinx-guides/source/user/find-use-data.rst b/doc/sphinx-guides/source/user/find-use-data.rst index 846f2a4eb8f..4d57afa11ec 100755 --- a/doc/sphinx-guides/source/user/find-use-data.rst +++ b/doc/sphinx-guides/source/user/find-use-data.rst @@ -80,9 +80,8 @@ You may also download a file from its file page by clicking the Download button Tabular data files offer additional options: You can explore using the TwoRavens data visualization tool (or other :doc:`/installation/external-tools` if they have been enabled) by clicking the Explore button, or choose from a number of tabular-data-specific download options available as a dropdown under the Download button. - Tabular Data ------------- +~~~~~~~~~~~~ Ingested files can be downloaded in several different ways. @@ -96,19 +95,21 @@ Ingested files can be downloaded in several different ways. - A subset of the columns of the data + .. _rsync_download: Downloading a Dataverse Package via rsync ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -rsync is typically used for synchronizing files and directories between two different systems, using SSH to connect rather than HTTP. Some Dataverse installations allow downloads using rsync, to facilitate large file transfers in a reliable and secure manner. +rsync is typically used for synchronizing files and directories between two different systems. Some Dataverse installations allow downloads using rsync, to facilitate large file transfers in a reliable and secure manner. -rsync-enabled Dataverse installations have a new file download process that differs from traditional browser-based downloading. Instead of multiple files, each dataset contains a single "Dataverse Package". When you download this package you will receive a folder that contains all files from the dataset, arranged in the exact folder structure in which they were originally uploaded. +rsync-enabled Dataverse installations offer a new file download process that differs from traditional browser-based downloading. Instead of multiple files, each dataset uploaded via rsync contains a single "Dataverse Package". When you download this package you will receive a folder that contains all files from the dataset, arranged in the exact folder structure in which they were originally uploaded. -At the bottom of the dataset page, under the **Data Access** tab, instead of a download button you will find the information you need in order to download a Dataverse Package using rsync. If the data is locally available to you (on a shared drive, for example) then you can find it at the folder path under **Local Access**. Otherwise, to download the Dataverse Package you will have to use one of the rsync commands under **Download Access**. There may be multiple commands listed, each corresponding to a different mirror that hosts the Dataverse Package. Go outside your browser and open a terminal (AKA command line) window on your computer. Use the terminal to run the command that corresponds with the mirror of your choice. It's usually best to choose the mirror that is geographically closest to you. Running this command will initiate the download process. +In a dataset containing a Dataverse Package, at the bottom of the dataset page, under the **Data Access** tab, instead of a download button you will find the information you need in order to download the Dataverse Package using rsync. If the data is locally available to you (on a shared drive, for example) then you can find it at the folder path under **Local Access**. Otherwise, to download the Dataverse Package you will have to use one of the rsync commands under **Download Access**. There may be multiple commands listed, each corresponding to a different mirror that hosts the Dataverse Package. Go outside your browser and open a terminal (AKA command line) window on your computer. Use the terminal to run the command that corresponds with the mirror of your choice. It's usually best to choose the mirror that is geographically closest to you. Running this command will initiate the download process. After you've downloaded the Dataverse Package, you may want to double-check that your download went perfectly. Under **Verify Data**, you'll find a command that you can run in your terminal that will initiate a checksum to ensure that the data you downloaded matches the data in Dataverse precisely. This way, you can ensure the integrity of the data you're working with. + Explore Data ------------ diff --git a/pom.xml b/pom.xml index c2e4f8a82a2..0dccecd0708 100644 --- a/pom.xml +++ b/pom.xml @@ -13,6 +13,8 @@ ${project.build.directory}/endorsed UTF-8 -Xdoclint:none + + 1.11.172 UTC en @@ -27,6 +29,7 @@ 5.3.1 5.3.1 1.3.1 + 2.22.0 @@ -383,7 +386,13 @@ org.mockito mockito-core - 2.22.0 + ${mockito.version} + test + + + org.mockito + mockito-junit-jupiter + ${mockito.version} test diff --git a/scripts/api/setup-all.sh b/scripts/api/setup-all.sh index b5b36516806..eec2dba0a72 100755 --- a/scripts/api/setup-all.sh +++ b/scripts/api/setup-all.sh @@ -54,6 +54,7 @@ curl -X PUT -d "FK2/" "$SERVER/admin/settings/:Shoulder" curl -X PUT -d DataCite "$SERVER/admin/settings/:DoiProvider" curl -X PUT -d burrito $SERVER/admin/settings/BuiltinUsers.KEY curl -X PUT -d localhost-only $SERVER/admin/settings/:BlockedApiPolicy +curl -X PUT -d 'native/http' $SERVER/admin/settings/:UploadMethods echo echo "Setting up the admin user (and as superuser)" diff --git a/scripts/database/upgrades/upgrade_v4.9.1_to_v4.9.2.sql b/scripts/database/upgrades/upgrade_v4.9.1_to_v4.9.2.sql index 7230c16e90f..0108298df68 100644 --- a/scripts/database/upgrades/upgrade_v4.9.1_to_v4.9.2.sql +++ b/scripts/database/upgrades/upgrade_v4.9.1_to_v4.9.2.sql @@ -1,2 +1,3 @@ +INSERT INTO setting(name, content) VALUES (':UploadMethods', 'native/http'); ALTER TABLE datavariable ADD COLUMN factor BOOLEAN; ALTER TABLE ingestrequest ADD COLUMN forceTypeCheck BOOLEAN; diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index 3fbf30317d8..2ba9743f70f 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1247,7 +1247,9 @@ dataset.keywordDisplay.title=Keyword dataset.subjectDisplay.title=Subject dataset.contact.tip=Use email button above to contact. dataset.asterisk.tip=Asterisks indicate required fields -dataset.message.uploadFiles=Upload Dataset Files - You can drag and drop files from your desktop, directly into the upload widget. +dataset.message.uploadFiles.label=Upload Dataset Files +dataset.message.uploadFilesSingle.message=For more information about supported file formats, please refer to the User Guide. +dataset.message.uploadFilesMultiple.message=Multiple file upload/download methods are available for this dataset. Once you upload a file using one of these methods, your choice will be locked in for this dataset. dataset.message.editMetadata=Edit Dataset Metadata - Add more metadata about this dataset to help others easily find it. dataset.message.editTerms=Edit Dataset Terms - Update this dataset's terms of use. dataset.message.locked.editNotAllowedInReview=Dataset cannot be edited due to In Review dataset lock. @@ -1323,25 +1325,24 @@ dataset.privateurl.roleassigeeTitle=Private URL Enabled dataset.privateurl.createdSuccess=Success! dataset.privateurl.disabledSuccess=You have successfully disabled the Private URL for this unpublished dataset. dataset.privateurl.noPermToCreate=To create a Private URL you must have the following permissions: {0}. - - +file.count.one=1 File file.count={0} to {1} of {2} {2, choice, 0#Files|1#File|2#Files} file.count.shown={0} {0, choice, 0#Files Selected|1#File|2#Files} - - file.clearSelection=Clear selection. file.numFilesSelected={0} {0, choice, 0#files are|1#file is|2#files are} currently selected. file.selectAllFiles=Select all {0} files in this dataset. file.dynamicCounter.filesPerPage=Files Per Page - - file.selectToAddBtn=Select Files to Add file.selectToAdd.tipLimit=File upload limit is {0} per file. -file.selectToAdd.tipMoreInformation=For more information about supported file formats, please refer to the User Guide. +file.selectToAdd.tipMoreInformation=Select files or drag and drop into the upload widget. file.selectToAdd.dragdropMsg=Drag and drop files here. -file.createUploadDisabled=Once you have saved your dataset, you can upload your data using the "Upload Files" button on the dataset page. For more information about supported file formats, please refer to the User Guide. +file.createUploadDisabled=Upload files using rsync via SSH. This method is recommended for large file transfers. The upload script will be available on the Upload Files page once you save this dataset. +file.fromHTTP=Upload with HTTP via your browser file.fromDropbox=Upload from Dropbox -file.fromDropbox.tip=Files can also be uploaded directly from Dropbox. +file.fromDropbox.tip=Select files from Dropbox. +file.fromRsync=Upload with rsync + SSH via Data Capture Module (DCM) +file.api.httpDisabled=File upload via HTTP is not available for this installation of Dataverse. +file.api.alreadyHasPackageFile=File upload via HTTP disabled since this dataset already contains a package file. file.replace.original=Original File file.editFiles=Edit Files file.editFilesSelected=Edit @@ -1380,16 +1381,21 @@ file.cloudStorageAccess.tip=The container name for this dataset needed to access file.cloudStorageAccess.help=To directly access this data in the {2} cloud environment, use the container name in the Cloud Storage Access box below. To learn more about the cloud environment, visit the Cloud Storage Access section of the User Guide. file.copy=Copy file.compute=Compute -file.rsyncUpload.info=Follow these steps to upload your data. To learn more about the upload process and how to prepare your data, please refer to the User Guide. -file.rsyncUpload.noScriptAvailable=Rsync script not available! -file.rsyncUpload.filesExist=You can not upload additional files to this dataset. +file.rsyncUpload.info=Upload files using rsync + SSH. This method is recommended for large file transfers. Follow the steps below to upload your data. (User Guide - rsync Upload). +file.rsyncUpload.filesExist=You cannot upload additional files to this dataset. A dataset can only hold one data package. If you need to replace the data package in this dataset, please contact {0}. +file.rsyncUpload.noScriptBroken=The Data Capture Module failed to generate the rsync script. Please contact {0}. +file.rsyncUpload.noScriptBusy=Currently generating rsync script. If the script takes longer than ten minutes to generate, please contact {0}. file.rsyncUpload.step1=Make sure your data is stored under a single directory. All files within this directory and its subdirectories will be uploaded to your dataset. file.rsyncUpload.step2=Download this file upload script: -file.rsyncUpload.step2.downloadScriptButton=Download Script +file.rsyncUpload.step2.downloadScriptButton=Download DCM Script file.rsyncUpload.step3=Open a terminal window in the same directory you saved the script and run this command: bash ./{0} file.rsyncUpload.step4=Follow the instructions in the script. It will ask for a full path (beginning with "/") to the directory containing your data. Note: this script will expire after 7 days. file.rsyncUpload.inProgressMessage.summary=DCM File Upload file.rsyncUpload.inProgressMessage.details=This dataset is locked until the data files have been transferred and verified. +file.rsyncUpload.httpUploadDisabledDueToRsyncFileExisting=HTTP upload is disabled for this dataset because you have already uploaded files via rsync. If you would like to switch to HTTP upload, please contact {0}. +file.rsyncUpload.httpUploadDisabledDueToRsyncFileExistingAndPublished=HTTP upload is disabled for this dataset because you have already uploaded files via rsync and published the dataset. +file.rsyncUpload.rsyncUploadDisabledDueFileUploadedViaHttp=Upload with rsync + SSH is disabled for this dataset because you have already uploaded files via HTTP. If you would like to switch to rsync upload, then you must first remove all uploaded files from this dataset. Once this dataset is published, the chosen upload method is permanently locked in. +file.rsyncUpload.rsyncUploadDisabledDueFileUploadedViaHttpAndPublished=Upload with rsync + SSH is disabled for this dataset because you have already uploaded files via HTTP and published the dataset. file.metaData.dataFile.dataTab.variables=Variables file.metaData.dataFile.dataTab.observations=Observations diff --git a/src/main/java/Bundle_fr.properties b/src/main/java/Bundle_fr.properties index 8b6ec5df332..8d37eb21f23 100644 --- a/src/main/java/Bundle_fr.properties +++ b/src/main/java/Bundle_fr.properties @@ -12,6 +12,7 @@ restricted=En acc restrictedaccess=Accès réservé find=Chercher search=Recherche +language=Langue unpublished=Non publié cancel=Annuler ok=ok @@ -39,9 +40,11 @@ remove=Supprimer done=Terminé editor=Collaborateur manager=Gestionnaire -curator=Intendant des données +curator=Curateur explore=Explorer download=Télécharger +downloadOriginal=Format original +downloadArchival=Format d'archivage (.tab) deaccession=Retrait share=Partager link=Lier @@ -141,7 +144,7 @@ contact.context.dataset.intro={0}\n\nVous venez de recevoir le message suivant d contact.context.dataset.ending=\n\n---\n\n{0}\n{1}\n\nAccéder à l''ensemble de données {2}/dataset.xhtml?persistentId={3}\n\nVous avez reçu ce courriel car vous avez été enregistré en tant que personne-ressource pour l''ensemble de données. Si vous pensez qu''il s''agit d''une erreur, veuillez contacter {4} à {5}. Pour répondre directement à la personne qui a envoyé le message, répondez simplement à ce courriel. contact.context.dataset.noContact=Il n'y a pas d'adresse de contact enregistrée pour ce ensemble de données. Par conséquent ce message est envoyé à l'adresse du système.\n\n---\n\n contact.context.file.intro={0}\n\nVous venez de recevoir le message suivant de {1} via le fichier hébergé {2} nommé «\u00A0{3}\u00A0» provenant de l''ensemble de données nommé «\u00A0{4}\u00A0» ({5})\u00A0:\n\n---\n\n -contact.context.file.ending=\n\n---\n\n{0}\n{1}\n\nAccéder au fichier {2}/file.xhtml?fileId={3}\n\nVous avez reçu ce courriel car vous avez été enregistré en tant que personne-ressource pour l''ensemble de données. Si vous pensez qu''il s''agit d'une erreur, veuillez contacter {4} à {5}. Pour répondre directement à la personne qui a envoyé le message, répondez simplement à ce courriel. +contact.context.file.ending=\n\n---\n\n{0}\n{1}\n\nAccéder au fichier {2}/file.xhtml?fileId={3}\n\nVous avez reçu ce courriel car vous avez été enregistré en tant que personne-ressource pour l''ensemble de données. Si vous pensez qu''il s''agit d''une erreur, veuillez contacter {4} à {5}. Pour répondre directement à la personne qui a envoyé le message, répondez simplement à ce courriel. contact.context.support.intro={0},\n\nLe message suivant a été envoyé depuis {1}.\n\n---\n\n contact.context.support.ending=\n\n---\n\nMessage envoyé depuis le formulaire de demande de soutien. @@ -188,7 +191,7 @@ notification.access.granted.fileDownloader.additionalDataset={0} Vous avez maint notification.access.revoked.dataverse=Votre rôle dans {0} a été retiré. notification.access.revoked.dataset=Votre rôle dans {0} a été retiré. notification.access.revoked.datafile=Votre rôle dans {0} a été retiré. -notification.checksumfail=La validation de la somme de contrôle pour l''ensemble de données {0} a échoué pour un ou plus d''un fichier(s) téléversé(s). Veuillez relancer le script de téléversement. Si le problème persiste, prière de consulter le service de soutien. +notification.checksumfail=La validation de la somme de contrôle pour l''ensemble de données {1} a échoué pour un ou plus d''un fichier(s) téléversé(s). Veuillez relancer le script de téléversement. Si le problème persiste, prière de contacter le service de soutien. notification.mail.import.filesystem=L''ensemble de données {2} ({0}/dataset.xhtml?persistentId={1}) a bien été téléversé et vérifié. notification.import.filesystem=L''ensemble de données {1} a bien été téléversé et vérifié. notification.import.checksum={1}, l''ensemble de données a ajouté les sommes de contrôle des fichiers par l''entremise d''un traitement en lot. @@ -229,7 +232,7 @@ user.newPassword=Nouveau mot de passe authenticationProvidersAvailable.tip={0}Il n''y a aucun système d''authentification actif{1}Si vous êtes administrateur système, veuillez en autoriser un au moyen de l''API.{2}Si vous n''êtes pas administrateur système, veuillez communiquer avec celui de votre établissement. passwdVal.passwdReq.title=Votre mot de passe doit contenir\u00A0: -passwdVal.passwdReq.goodStrength =Les mots de passe d'un minimum de {0} caractères sont exempts de tout autre exigence +passwdVal.passwdReq.goodStrength =Les mots de passe d''un minimum de {0} caractères sont exempts de tout autre exigence passwdVal.passwdReq.lengthReq =Au minimum {0} caractères passwdVal.passwdReq.characteristicsReq =Au moins un caractère provenant de {0} des types suivants\u00A0: passwdVal.passwdReq.notInclude =Il ne doit pas contenir\u00A0: @@ -369,13 +372,13 @@ apitoken.regenerateBtn=Cr dashboard.title=Tableau de bord dashboard.card.harvestingclients.header=Clients de moissonnage dashboard.card.harvestingclients.btn.manage=Gestion des clients -dashboard.card.harvestingclients.clients={0, choice, 0#Clients|1#Client|2#Clients} -dashboard.card.harvestingclients.datasets={0, choice, 0#Ensembles de données|1#Ensemble de donnéest|2#Ensembles de données} +dashboard.card.harvestingclients.clients={0, choice, 0#Client|1#Client|2#Clients} +dashboard.card.harvestingclients.datasets={0, choice, 0#Ensemble de données|1#Ensemble de données|2#Ensembles de données} dashboard.card.harvestingserver.header=Serveur de moissonnage dashboard.card.harvestingserver.enabled=Serveur OAI activé dashboard.card.harvestingserver.disabled=Serveur OAI désactivé dashboard.card.harvestingserver.status=Statut -dashboard.card.harvestingserver.sets={0, choice, 0#Ensembles|1#Ensemble|2#Ensembles} +dashboard.card.harvestingserver.sets={0, choice, 0#Ensemble|1#Ensemble|2#Ensembles} dashboard.card.harvestingserver.btn.manage=Gestion du serveur dashboard.card.metadataexport.header=Exportation des métadonnées dashboard.card.metadataexport.message=L''exportation des métadonnées de l''ensemble de données n''est disponible que via l''API de {0}. Pour en apprendre davantage, consultez le {1}Guide API{2} du {0}. @@ -486,11 +489,13 @@ harvestserver.noSets.how.tip2=Une fois le service activ harvestserver.noSets.getStarted=Pour commencer, activez le serveur OAI et cliquez sur le bouton «\u00A0Ajouter un ensemble (set)\u00A0». Pour en apprendre plus sur le moissonnage, consultez la section moissonnage du guide d''utilisation. harvestserver.btn.add=Ajouter un ensemble (set) harvestserver.tab.header.spec=setSpec OAI (identifiant OAI de l'ensemble) +harvestserver.tab.col.spec.default=DÉFAUT harvestserver.tab.header.description=Description harvestserver.tab.header.definition=Définition de la requête +harvestserver.tab.col.definition.default=Tous les ensembles de données locaux publiés harvestserver.tab.header.stats=Ensembles de données harvestserver.tab.col.stats.empty=Aucun enregistrement (ensemble vide) -harvestserver.tab.col.stats.results={0} {0, choice, 0#Ensembles de données|1#Ensemble de données|2#Ensembles de données} ({1} {1, choice, 0#enregistrements|1#enregistrement|2#enregistrements} exporté(s), {2} marqué(s) comme supprimé(s)) +harvestserver.tab.col.stats.results={0} {0, choice, 0#Ensemble de données|1#Ensemble de données|2#Ensembles de données} ({1} {1, choice, 0#enregistrement|1#enregistrement|2#enregistrements} exporté(s), {2} marqué(s) comme supprimé(s)) harvestserver.tab.header.action=Opérations harvestserver.tab.header.action.btn.export=Lancer l'exportation harvestserver.actions.runreexport.success=La tâche asynchrone de réexportation de l''ensemble OAI «\u00A0{0}\u00A0» a bien été lancée (veuillez recharger la page pour suivre la progression de l''exportation). @@ -505,6 +510,7 @@ harvestserver.newSetDialog.setspec=Nom/setSpec OAI harvestserver.newSetDialog.setspec.tip=Un nom unique (OAI setSpec) identifiant cet ensemble. harvestserver.newSetDialog.setspec.helptext=Se compose de lettres, de chiffres, de traits de soulignement (_) et de tirets (-). harvestserver.editSetDialog.setspec.helptext=Le nom ne peut pas être modifié une fois l'ensemble créé. +harvestserver.editSetDialog.setspec.helptext.default=Ceci est l'ensemble par défaut, sans nom harvestserver.newSetDialog.setspec.required=Le nom (setSpec OAI) ne peut être vide! harvestserver.newSetDialog.setspec.invalid=Le nom (setSpec OAI) ne peut contenir que des lettres, des chiffres, des traits de soulignement (_) et des tirets (-). harvestserver.newSetDialog.setspec.alreadyused=Ce nom d'ensemble (setSpec OAI) est déjà utilisé. @@ -514,6 +520,7 @@ harvestserver.newSetDialog.setspec.superUser.required=Seuls les super-utilisateu harvestserver.newSetDialog.setdescription=Description harvestserver.newSetDialog.setdescription.tip=Fournir une brève description de cet ensemble OAI. harvestserver.newSetDialog.setdescription.required=La description de l'ensemble ne peut être vide! +harvestserver.newSetDialog.setdescription.default=L'ensemble par défaut, sans nom. Le serveur OAI retournera les enregistrements de cet ensemble quand aucun argument "setspec" n'est spécifié par le client. harvestserver.newSetDialog.setquery=Requête de recherche harvestserver.newSetDialog.setquery.tip=Requête de recherche qui définit le contenu de l'ensemble de données. harvestserver.newSetDialog.setquery.helptext=Exemple de requête\u00A0: authorName:king @@ -664,7 +671,7 @@ dataverse.edit=Modifier dataverse.option.generalInfo=Renseignements généraux dataverse.option.themeAndWidgets=Thème + widgets dataverse.option.featuredDataverse=Dataverses en vedette -dataverse.option.permissions=Permissions +dataverse.option.permissions=Autorisations dataverse.option.dataverseGroups=Groupes dataverse.option.datasetTemplates=Modèles d'ensembles de données dataverse.option.datasetGuestbooks=Registre des visiteurs de l'ensemble de données @@ -677,11 +684,16 @@ dataverse.contact=Communiquer par courriel avec Dataverse dataset.link=Lier l'ensemble de données dataverse.link=Lier le dataverse dataverse.link.btn.tip=Lier à votre dataverse -dataverse.link.yourDataverses={0, choice, 1#Votre dataverse|2#Vos dataverses} +dataverse.link.yourDataverses=Votre dataverse +dataverse.link.yourDataverses.inputPlaceholder=Entrer le nom du dataverse dataverse.link.save=Enregistrer la liaison du dataverse dataset.link.save=Enregistrer la liaison de l'ensemble de données +dataset.link.not.to.owner=Impossible de lier un ensemble de données à son dataverse +dataset.link.not.to.parent.dataverse=Impossible de lier un ensemble de données à ses dataverses parents +dataset.link.not.published=Impossible de lier un ensemble de données qui n'a pas été publié dataverse.link.dataverse.choose=Déterminer avec lequel de vos dataverses vous souhaitez lier ce dataverse. -dataverse.link.dataset.choose=Déterminer avec lequel de vos dataverses vous souhaitez lier cet ensemble de données. +dataverse.link.dataset.choose=Déterminer avec lequel de vos dataverses vous souhaitez lier cet ensemble de données. Si vous devez supprimer ce lien à l''avenir, veuillez contacter {0}. +dataverse.link.dataset.none=Il n'y a pas de dataverses disponibles pour créer une liaison. dataverse.link.no.choice=Vous avez un dataverse dans lequel vous pouvez ajouter des ensembles de données et des dataverses liés. dataverse.link.no.linkable=Vous devez posséder votre propre dataverse pour pouvoir lier un dataverse ou un ensemble de données. Cliquer sur le bouton «\u00A0Ajouter des données\u00A0» à la page d'accueil pour commencer. dataverse.link.no.linkable.remaining=Vous avez déjà lié tous vos dataverses admissibles. @@ -766,12 +778,12 @@ dataverse.results.empty.browse.guest.zero=Ce dataverse ne contient actuellement dataverse.results.empty.browse.guest.hidden=Ce dataverse ne contient aucun dataverse. Veuillez vous authentifier pour voir si vous pouvez y ajouter du contenu. dataverse.results.empty.browse.loggedin.noperms.zero=Ce dataverse ne contient actuellement aucun dataverse, ensemble de données ou fichier. Vous pouvez utiliser le bouton «\u00A0Envoyer un courriel à la personne-ressource du dataverse\u00A0» ci-dessus pour toute question sur ce dataverse ou pour effectuer une demande d'accès à ce dataverse. dataverse.results.empty.browse.loggedin.noperms.hidden=Il n'y a aucun dataverse dans ce dataverse. -dataverse.results.empty.browse.loggedin.perms.zero=Ce dataverse ne contient actuellement aucun dataverse, ensemble de données ou fichier. Vous pouvez en ajouter à l'aide du bouton «\u00A0Ajouter des données\u00A0» se trouvant sur cette page. +dataverse.results.empty.browse.loggedin.perms.zero=Ce dataverse ne contient actuellement aucun dataverse, ensemble de données ou fichier. Vous pouvez en ajouter à l''aide du bouton «\u00A0Ajouter des données\u00A0» se trouvant sur cette page. account.results.empty.browse.loggedin.perms.zero=Il n''y a aucun dataverse, ensemble de données ou fichier associé à votre compte. Vous pouvez ajouter un dataverse ou un ensemble de données en cliquant sur le bouton «\u00A0Ajouter des données\u00A0» ci-dessus. Pour en apprendre davantage sur l''ajout de données, consultez le guide d''utilisation. dataverse.results.empty.browse.loggedin.perms.hidden=Il n'y a aucun dataverse dans ce dataverse. Vous pouvez en ajouter à l'aide du bouton «\u00A0Ajouter des données\u00A0» qui se trouve sur cette page. dataverse.results.empty.link.technicalDetails=Plus de détails techniques dataverse.search.facet.error=Une erreur s''est produite avec vos paramètres de recherche. Veuillez supprimer votre recherche et essayer de nouveau. -dataverse.results.count.toofresults={0} à {1} de {2} {2, choice, 0#résultats|1#résultat|2#résultats} +dataverse.results.count.toofresults={0} à {1} de {2} {2, choice, 0#résultat|1#résultat|2#résultats} dataverse.results.paginator.current=(Actuel) dataverse.results.btn.sort=Tri dataverse.results.btn.sort.option.nameAZ=Nom (A-Z) @@ -852,8 +864,8 @@ dataverse.widgets.advanced.success.message=Mise dataverse.widgets.advanced.failure.message=L'URL du site web personnel associé à ce dataverse n'a pas été mis à jour. # permissions-manage.xhtml -dataverse.permissions.title=Permissions -dataverse.permissions.dataset.title=Permissions pour l'ensemble de données +dataverse.permissions.title=Autorisations +dataverse.permissions.dataset.title=Autorisations sur l'ensemble de données dataverse.permissions.access.accessBtn=Modifier l'accès dataverse.permissions.usersOrGroups=Utilisateurs/Groupes dataverse.permissions.requests=Requêtes @@ -874,7 +886,7 @@ dataverse.permissions.roles.edit=Modifier le r dataverse.permissions.roles.copy=Copier le rôle # permissions-manage-files.xhtml -dataverse.permissionsFiles.title=Permissions des fichiers à accès réservé +dataverse.permissionsFiles.title=Autorisations sur les fichiers à accès réservé dataverse.permissionsFiles.usersOrGroups=Utilisateurs/Groupes dataverse.permissionsFiles.usersOrGroups.assignBtn=Accorder l'accès aux utilisateurs/groupes dataverse.permissionsFiles.usersOrGroups.description=Tous les utilisateurs et les groupes qui ont accès aux fichiers à accès réservé de cet ensemble de données. @@ -887,7 +899,7 @@ dataverse.permissionsFiles.usersOrGroups.file=Fichier dataverse.permissionsFiles.usersOrGroups.files=Fichiers dataverse.permissionsFiles.usersOrGroups.invalidMsg=Aucun utilisateur ou groupe n'a accès aux fichiers réservés de cet ensemble de données. dataverse.permissionsFiles.files=Fichiers à accès réservé -dataverse.permissionsFiles.files.label={0, choice, 0#Fichiers à accès réservé|1#Fichier à accès réservé|2#Fichiers à accès réservé} +dataverse.permissionsFiles.files.label={0, choice, 0#Fichier à accès réservé|1#Fichier à accès réservé|2#Fichiers à accès réservé} dataverse.permissionsFiles.files.description=Tous les fichiers en accès réservé dans cet ensemble de données. dataverse.permissionsFiles.files.tabHeader.fileName=Nom du fichier dataverse.permissionsFiles.files.tabHeader.roleAssignees=Utilisateurs/Groupes @@ -899,7 +911,7 @@ dataverse.permissionsFiles.files.public=Public dataverse.permissionsFiles.files.restricted=Accès réservé dataverse.permissionsFiles.files.roleAssignee=Utilisateur/Groupe dataverse.permissionsFiles.files.roleAssignees=Utilisateurs/Groupes -dataverse.permissionsFiles.files.roleAssignees.label={0, choice, 0#Utilisateurs/Groupes|1#Utilisateur/Groupe|2#Utilisateurs/Groupes} +dataverse.permissionsFiles.files.roleAssignees.label={0, choice, 0#Utilisateur/Groupe|1#Utilisateur/Groupe|2#Utilisateurs/Groupes} dataverse.permissionsFiles.files.assignBtn=Accorder l'accès dataverse.permissionsFiles.files.invalidMsg=Cet ensemble de données ne contient aucun fichier en accès réservé. dataverse.permissionsFiles.files.requested=Fichiers demandés @@ -929,17 +941,17 @@ dataverse.permissions.Q1.answer4=Toute personne poss dataverse.permissions.Q2=Lorsqu'un utilisateur ajoute un nouvel ensemble de données à ce dataverse, quel rôle doit-il lui être attribué automatiquement sur cet ensemble de données? dataverse.permissions.Q2.answer.editor.description=\u2014 Modifier les métadonnées, téléverser les fichiers et modifier les fichiers, modifier les conditions, le registre des visiteurs, soumettre les ensembles de données aux fins d'examen. dataverse.permissions.Q2.answer.manager.description=\u2014 Modifier les métadonnées, téléverser les fichiers et modifier les fichiers, modifier les conditions, le registre des visiteurs, les restrictions relatives aux fichiers (accès aux fichiers + utilisation) -dataverse.permissions.Q2.answer.curator.description=\u2014 Modifier les métadonnées, téléverser les fichiers et modifier les fichiers, modifier les conditions, le registre des visiteurs, les restrictions relatives aux fichiers (accès aux fichiers + utilisation), modifier les permissions/assigner les rôles + publier +dataverse.permissions.Q2.answer.curator.description=\u2014 Modifier les métadonnées, téléverser les fichiers et modifier les fichiers, modifier les conditions, le registre des visiteurs, les restrictions relatives aux fichiers (accès aux fichiers + utilisation), modifier les autorisations/assigner les rôles + publier permission.anyoneWithAccount=Toute personne possédant un compte Dataverse # roles-assign.xhtml dataverse.permissions.usersOrGroups.assignDialog.header=Assigner le rôle -dataverse.permissions.usersOrGroups.assignDialog.description=Accorder les permissions aux utilisateurs et aux groupes en leur attribuant un rôle. +dataverse.permissions.usersOrGroups.assignDialog.description=Accorder des autorisations aux utilisateurs et aux groupes en leur attribuant un rôle. dataverse.permissions.usersOrGroups.assignDialog.userOrGroup=Utilisateurs/groupes dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.enterName=Indiquer le nom de l'utilisateur ou du groupe. dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.invalidMsg=Aucun résultat dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.requiredMsg=Veuillez sélectionner au moins un utilisateur ou un groupe. -dataverse.permissions.usersOrGroups.assignDialog.role.description=Voici les permissions associées au rôle sélectionné. +dataverse.permissions.usersOrGroups.assignDialog.role.description=Voici les autorisations associées au rôle sélectionné. dataverse.permissions.usersOrGroups.assignDialog.role.warning=L''attribution du rôle {0} signifie que le ou les utilisateurs auront également le rôle {0} qui s''applique à tous les {1} dans ce {2}. dataverse.permissions.usersOrGroups.assignDialog.role.requiredMsg=Veuillez sélectionner un rôle à attribuer. @@ -951,8 +963,8 @@ dataverse.permissions.roles.id=Identifiant dataverse.permissions.roles.id.title=Indiquer un nom pour l'alias. dataverse.permissions.roles.description.title=Décrire le rôle (1000 caractères maximum). dataverse.permissions.roles.description.counter={0} caractère(s) restant(s) -dataverse.permissions.roles.roleList.header=Permissions du rôle -dataverse.permissions.roles.roleList.authorizedUserOnly=Les permissions comportant l'icône Information indiquent que les actions peuvent être faites par des utilisateurs non authentifiés dans Dataverse. +dataverse.permissions.roles.roleList.header=Autorisations du rôle +dataverse.permissions.roles.roleList.authorizedUserOnly=Les autorisations comportant l'icône Information indiquent que les actions peuvent être faites par des utilisateurs non authentifiés dans Dataverse. # explicitGroup-new-dialog.xhtml dataverse.permissions.explicitGroupEditDialog.title.new=Créer un groupe @@ -1019,11 +1031,11 @@ dataset.create.add.terms=Sauvegarder et ajouter des conditions d'utilisation dataverse.manageGroups.pageTitle=Gérer des groupes Dataverse dataverse.manageGroups.createBtn=Créer un groupe dataverse.manageGroups.noGroups.why.header=Pourquoi faire appel aux groupes? -dataverse.manageGroups.noGroups.why.reason1=Les groupes vous permettent d'attribuer des rôles et permissions à plusieurs personnes à la fois. +dataverse.manageGroups.noGroups.why.reason1=Les groupes vous permettent d'attribuer des rôles et autorisations à plusieurs personnes à la fois. dataverse.manageGroups.noGroups.why.reason2=Vous pouvez faire appel aux groupes pour la gestion de différents types d'utilisateurs (étudiants, collaborateurs, etc.). dataverse.manageGroups.noGroups.how.header=Comment utiliser les groupes dataverse.manageGroups.noGroups.how.tip1=Un groupe peut comprendre à la fois des individus et d'autres groupes. -dataverse.manageGroups.noGroups.how.tip2=Vous pouvez attribuer des permissions à un groupe dans le volet «\u00A0Permissions\u00A0». +dataverse.manageGroups.noGroups.how.tip2=Vous pouvez attribuer des autorisations à un groupe dans le volet «\u00A0Autorisations\u00A0». dataverse.manageGroups.noGroups.getStarted=Pour débuter, cliquez sur le bouton «\u00A0Créer un groupe\u00A0» ci-dessus. dataverse.manageGroups.tab.header.name=Nom du groupe dataverse.manageGroups.tab.header.id=Identifiant du groupe @@ -1059,7 +1071,7 @@ dataset.manageGuestbooks.noGuestbooks.why.reason1=Les registres de visiteurs vou dataset.manageGuestbooks.noGuestbooks.why.reason2=Vous pouvez télécharger les données recueillies dans les registres de visiteurs activés afin de pouvoir les enregistrer en dehors de Dataverse. dataset.manageGuestbooks.noGuestbooks.how.header=Comment utiliser les registres de visiteurs dataset.manageGuestbooks.noGuestbooks.how.tip1=Un registre des visiteurs peut être utilisé pour plusieurs ensembles de données, mais un seul registre des visiteurs peut être utilisé pour un ensemble de données. -dataset.manageGuestbooks.noGuestbooks.how.tip2=Les questions personnalisées peuvent comprendre des réponses en texte libre ou des questions à choice de réponses. +dataset.manageGuestbooks.noGuestbooks.how.tip2=Les questions personnalisées peuvent comprendre des réponses en texte libre ou des questions à choix de réponses. dataset.manageGuestbooks.noGuestbooks.getStarted=Pour commencer, cliquez ci-dessus sur le bouton «\u00A0Créer un registre des visiteurs pour l''ensemble de données\u00A0». Pour en apprendre davantage sur les registres de visiteurs, visitez la section registre des visiteurs du guide d''utilisation. dataset.manageGuestbooks.tab.header.name=Nom du registre des visiteurs dataset.manageGuestbooks.tab.header.date=Date de création @@ -1098,8 +1110,8 @@ dataset.guestbooksResponses.date=Date dataset.guestbooksResponses.type=Type dataset.guestbooksResponses.file=Fichier dataset.guestbooksResponses.tip.title=Entrées du registre de visiteur -dataset.guestbooksResponses.count.responses={0} {0, choice, 0#Entrées|1#Entrée|2#Entrées} -dataset.guestbooksResponses.count.toofresults={0} à {1} de {2} {2, choice, 0#Entrées|1#Entrée|2#Entrées} +dataset.guestbooksResponses.count.responses={0} {0, choice, 0#Entrée|1#Entrée|2#Entrées} +dataset.guestbooksResponses.count.toofresults={0} à {1} de {2} {2, choice, 0#Entrée|1#Entrée|2#Entrées} dataset.guestbooksResponses.tip.downloadascsv=Cliquer sur «\u00A0Télécharger les entrées\u00A0» pour télécharger dans un fichier CSV les entrées recueillies dans le registre de visiteurs de ce dataverse. Pour naviguer et analyser les données ainsi collectées, nous vous recommandons d'importer ce fichier CSV dans Excel, Google Sheets ou un logiciel similaire. dataset.guestbooksResponses.tooManyResponses.message=Note\u00A0: ce registre de visiteurs contient trop d''entrées pour qu''elles puissent être affichées entièrement sur cette page. Seules les {0} entrées les plus récentes sont affichées ci-dessous. Cliquez sur «\u00A0Télécharger les entrées\u00A0» pour télécharger toutes les entrées recueillies ({1} au total) sous forme de fichier CSV. @@ -1133,7 +1145,7 @@ dataset.editBtn=Modifier dataset.editBtn.itemLabel.upload=Fichiers (téléverser) dataset.editBtn.itemLabel.metadata=Métadonnées dataset.editBtn.itemLabel.terms=Conditions d'utilisation -dataset.editBtn.itemLabel.permissions=Permissions +dataset.editBtn.itemLabel.permissions=Autorisations dataset.editBtn.itemLabel.thumbnailsAndWidgets=Vignettes + Widgets dataset.editBtn.itemLabel.privateUrl=URL privé dataset.editBtn.itemLabel.permissionsDataset=Ensemble de données @@ -1150,7 +1162,7 @@ metrics.title=Statistiques metrics.title.tip=Afficher plus d'informations sur les statistiques d'utilisation metrics.comingsoon=Bientôt disponible\u2026 metrics.views=Pages consultées -metrics.downloads={0, choice, 0#téléchargements|1#téléchargement|2#téléchargements} +metrics.downloads={0, choice, 0#téléchargement|1#téléchargement|2#téléchargements} metrics.citations=Citations metrics.shares=Partages dataset.publish.btn=Publier @@ -1190,7 +1202,7 @@ dataset.viewVersion.unpublished=Voir la version non publi dataset.viewVersion.published=Voir la version publiée dataset.email.datasetContactBtn=Envoyer un courriel à la personne-ressource de l'ensemble de données dataset.email.hiddenMessage= -dataset.email.messageSubject=Objet du message +dataset.email.messageSubject=Test objet du message dataset.email.datasetLinkBtn.tip=Lier l'ensemble de données à votre dataverse dataset.share.datasetShare=Partager l'ensemble de données dataset.share.datasetShare.tip=Partager cet ensemble de données sur vos médias sociaux préférés. @@ -1244,7 +1256,7 @@ dataset.message.locked.editNotAllowed=L'ensemble de donn dataset.message.createSuccess=Cet ensemble de données a été créé dataset.message.createSuccess.failedToSaveFiles=Succès partiel\u00A0: l'ensemble de données a été créé mais le(s) fichier(s) n'a(ont) pas pu être sauvegardé(s). Veuillez réessayer de téléverser le(s) fichier(s) de nouveau. dataset.message.createSuccess.partialSuccessSavingFiles=Succès partiel\u00A0: l''ensemble de données a été créé mais seul(s) {0} sur {1} fichier(s) a(ont) été enregistré(s). Veuillez réessayer de téléverser le(s) fichier(s) manquant(s) de nouveau. -dataset.message.linkSuccess=Cet ensemble de données est maintenant lié à {1}. +dataset.message.linkSuccess={0} est maintenant lié à {1}. Si vous devez supprimer ce lien à l''avenir, veuillez contacter {2}. dataset.message.metadataSuccess=Les métadonnées de cet ensemble de données ont été mises à jour. dataset.message.termsSuccess=Les conditions de cet ensemble de données ont été mises à jour. dataset.message.filesSuccess=Les fichiers de cet ensemble de données ont été mis à jour. @@ -1271,10 +1283,14 @@ dataset.message.termsFailure=Les conditions de cet ensemble de donn dataset.message.publicInstall=Accès aux fichiers \u2014 Les fichiers sont stockés sur un serveur de stockage accessible publiquement. dataset.metadata.publicationDate=Date de publication dataset.metadata.publicationDate.tip=La date de publication d'un ensemble de données. +dataset.metadata.publicationYear=Année de publication +dataset.metadata.publicationYear.tip=L'année de publication d'un ensemble de données. dataset.metadata.persistentId=Identifiant pérenne de l'ensemble de données dataset.metadata.persistentId.tip=L'identifiant unique permanent pour un ensemble de données, lequel peut être dans Dataverse un Handle ou un DOI. +dataset.metadata.alternativePersistentId=Identifiant permanent précédent de l'ensemble de données +dataset.metadata.alternativePersistentId.tip=Un identifiant permanent précédemment utilisé pour un ensemble de données. Dans Dataverse celà peut être un Handle ou un DOI. file.metadata.persistentId=Identifiant permanent du fichier -file.metadata.persistentId.tip=L'identifiant unique permanent pour un fichier, lequel peut être dans Dataverse un Handle ou un DOI. +file.metadata.persistentId.tip=L'identifiant unique permanent pour un fichier, lequel peut être, dans Dataverse, un Handle ou un DOI. dataset.versionDifferences.termsOfUseAccess=Conditions d'utilisation et d'accès dataset.versionDifferences.termsOfUseAccessChanged=Conditions d'utilisation et d'accès modifiées file.viewDiffDialog.restricted=Accès réservé @@ -1308,12 +1324,12 @@ dataset.privateurl.disabledSuccess=Vous avez bien d dataset.privateurl.noPermToCreate=Pour créer une adresse URL privé, vous devez disposer des autorisations suivantes\u00A0: {0}. -file.count={0} {0, choice, 0#Fichiers|1#Fichiers|2#Fichiers} -file.count.shown={0} {0, choice, 0#Fichiers sélectionnés|1#Fichier|2#Fichiers} +file.count={0} {0, choice, 0#Fichier|1#Fichiers|2#Fichiers} +file.count.shown={0} {0, choice, 0#Fichier sélectionné|1#Fichier|2#Fichiers} file.clearSelection=Effacer la sélection. -file.numFilesSelected={0} {0, choice, 0#fichiers sont|1#fichier est|2#fichiers sont} actuellement sélectionné(s). +file.numFilesSelected={0} {0, choice, 0#fichier est|1#fichier est|2#fichiers sont} actuellement sélectionné(s). file.selectAllFiles=Sélectionner tous les {0} fichiers de cet ensemble de données. file.dynamicCounter.filesPerPage=Fichiers par page @@ -1416,8 +1432,8 @@ file.mapData.worldMap=WorldMap file.mapData.unpublished.header=Données non publiées file.mapData.unpublished.message=Pour géolocaliser vos données avec WorldMap, vos données doivent être publiées. Veuillez publier cet ensemble de données et essayer à nouveau. file.downloadBtn.format.all=Tous les formats de fichier + renseignements -file.downloadBtn.format.tab=Séparé par des tabulateurs -file.downloadBtn.format.original=Format du fichier original ({0}) +file.downloadBtn.format.tab=Valeurs séparées par tabulations +file.downloadBtn.format.original=Format original du fichier ({0}) file.downloadBtn.format.rdata=Format RData file.downloadBtn.format.var=Métadonnées des variables file.downloadBtn.format.citation=Référence bibliographique du fichier de données @@ -1451,8 +1467,8 @@ file.dataFilesTab.terms.list.termsOfUse.termsOfUse.description=Si vous n' file.dataFilesTab.terms.list.termsOfUse.addInfo=Renseignements supplémentaires file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration=Déclaration de confidentialité file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration.title=Indique s'il faut signer une déclaration de confidentialité pour avoir accès à une ressource. -file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions=Permissions spéciales -file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions.title=Déterminer si des permissions spéciales sont requises pour avoir accès à une ressource (p.\u00A0ex. si un formulaire est nécessaire et où obtenir le formulaire). +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions=Autorisations spéciales +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions.title=Déterminer si des autorisations spéciales sont requises pour avoir accès à une ressource (p.\u00A0ex. si un formulaire est nécessaire et où obtenir le formulaire). file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions=Restrictions file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions.title=Toute restriction s'appliquant à l'accès à l'ensemble de données et à son utilisation, comme la certification relative à la vie privée ou les restrictions concernant la diffusion, doit être indiquée à cet endroit. Il peut s'agir de restrictions établies selon l'auteur, le producteur ou le diffuseur des données. Si l'accès aux données est limité à une certaine catégorie d'utilisateurs, veuillez le préciser. file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements=Exigences de citation @@ -1466,7 +1482,7 @@ file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer.title=Renseignements file.dataFilesTab.terms.list.termsOfAccess.header=Fichiers en accès réservé + Conditions d'accès file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles=Fichiers en accès réservé file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.title=Nombre de fichiers en accès réservé dans cet ensemble de données -file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=Il y a {0} {0, choice, 0#fichiers|1#fichier|2#fichiers} en accès réservé dans cet ensemble de données. +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=Il y a {0} {0, choice, 0#fichier|1#fichier|2#fichiers} en accès réservé dans cet ensemble de données. file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess=Conditions d'accès file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess.title=Renseignements sur la façon dont les utilisateurs peuvent avoir accès aux fichiers en accès réservé de cet ensemble de données. file.dataFilesTab.terms.list.termsOfAccess.requestAccess=Demander l'accès @@ -1606,7 +1622,7 @@ dataset.widgets.notPublished.why.reason2=Permet aux autres de parcourir votre da dataset.widgets.notPublished.how.header=Comment utiliser les widgets dataset.widgets.notPublished.how.tip1=Pour pouvoir utiliser des widgets, votre dataverse et vos ensembles de données doivent être publiés. dataset.widgets.notPublished.how.tip2=Suite à la publication, le code sera disponible sur cette page pour que vous puissiez le copier et l'ajouter à votre site web personnel ou de projet. -dataset.widgets.notPublished.how.tip3=Avez-vous un site web OpenScholar? Si oui, apprenez-en davantage sur l'ajout de widgets Dataverse dans votre site web ici. +dataset.widgets.notPublished.how.tip3=Avez-vous un site web OpenScholar? Si oui, apprenez-en davantage sur l''ajout de widgets Dataverse dans votre site web ici. dataset.widgets.notPublished.getStarted=Pour débuter, publiez votre dataverse. Pour en apprendre davantage sur les widgets, consultez la section thème et widgets du guide d''utilisation. dataset.widgets.editAdvanced=Modifier les options avancées dataset.widgets.editAdvanced.tip=Options avancées – Options supplémentaires pour configurer votre widget sur votre site personnel ou de projet. @@ -1648,8 +1664,11 @@ file.share.fileShare.tip=Partager ce fichier sur vos m file.share.fileShare.shareText=Afficher ce fichier. file.title.label=Titre file.citation.label=Référence bibliographique -file.citation.notice=Ce fichier fait partie de «\u00A0{0}\u00A0». Si vous utilisez ce fichier, prière de citer l''ensemble de données\u00A0: +file.citation.notice=Ce fichier fait partie de «\u00A0{0}\u00A0». +file.citation.dataset=Référence bibliographique de l'ensemble de données +file.citation.datafile=Référence bibliographique du fichier file.cite.downloadBtn=Citer l'ensemble de données +file.cite.file.downloadBtn=Citer le fichier de l'ensemble de données file.pid.label=Identifiant permanent du fichier\u00A0: file.unf.lable= Fichier UNF\u00A0: file.general.metadata.label=Métadonnées générales @@ -1710,7 +1729,7 @@ file.addreplace.error.file_exceeds_limit=La taille de ce fichier ({0}) d file.addreplace.error.dataset_is_null=L'ensemble de données ne peut être nul. file.addreplace.error.dataset_id_is_null=L'identifiant de l'ensemble de données ne peut être nul. find.dataset.error.dataset_id_is_null=L''accès à un ensemble de données basé sur un identifiant pérenne requiert qu''un paramètre de requête {0} soit présent. -find.dataset.error.dataset.not.found.persistentId=L''ensemble de données basé sur l'identifiant pérenne {0} est introuvable. +find.dataset.error.dataset.not.found.persistentId=L''ensemble de données basé sur l''identifiant pérenne {0} est introuvable. find.dataset.error.dataset.not.found.id=L''ensemble de données ayant l''identifiant {0} est introuvable. find.dataset.error.dataset.not.found.bad.id=Identifiant de l''ensemble de données erroné\u00A0: {0}. find.datasetlinking.error.not.found.ids=L''ensemble de données du dataverse lié ayant l''identifiant d''ensemble de données {0} et l''ensemble de données du dataverse lié ayant l''identifiant {1} sont introuvables. @@ -1721,13 +1740,13 @@ find.datafile.error.datafile.not.found.id=Le fichier ayant l''identifiant {0} es find.datafile.error.datafile.not.found.bad.id=Identifiant de fichier erroné\u00A0: {0}. find.datafile.error.dataset.not.found.persistentId=Le fichier de données ayant l''identifiant pérenne {0} est introuvable. file.addreplace.error.dataset_id_not_found=Aucun ensemble de données n'a été trouvé pour l'identifiant\u00A0: -file.addreplace.error.no_edit_dataset_permission=Vous n'avez pas la permission de modifier cet ensemble de données. +file.addreplace.error.no_edit_dataset_permission=Vous n'avez pas l'autorisation de modifier cet ensemble de données. file.addreplace.error.filename_undetermined=Le nom du fichier ne peut être établi. file.addreplace.error.file_content_type_undetermined=Le type de contenu du fichier ne peut être établi. file.addreplace.error.file_upload_failed=Le téléversement du fichier a échoué. file.addreplace.error.duplicate_file=Ce fichier existe déjà dans l'ensemble de données. file.addreplace.error.existing_file_to_replace_id_is_null=L'identifiant du fichier existant à remplacer doit être fourni. -file.addreplace.error.existing_file_to_replace_not_found_by_id=Fichier de remplacement non trouvé. Aucun fichier n''a été trouvé pour l'identifiant\u00A0: {0} +file.addreplace.error.existing_file_to_replace_not_found_by_id=Fichier de remplacement non trouvé. Aucun fichier n''a été trouvé pour l''identifiant\u00A0: {0} file.addreplace.error.existing_file_to_replace_is_null=Le fichier à remplacer ne peut être nul. file.addreplace.error.existing_file_to_replace_not_in_dataset=Le fichier à remplacer n'appartient pas à cet ensemble de données. file.addreplace.error.existing_file_not_in_latest_published_version=Vous ne pouvez pas remplacer un fichier qui n'est pas dans le dernier ensemble de données publié. (Le fichier est non publié ou a été supprimé d'une version précédente.) @@ -1897,6 +1916,7 @@ dataverse.edit.msg=Modifier le dataverse dataverse.edit.detailmsg= \u2014 Modifier votre dataverse puis cliquer sur Enregistrer. Les astérisques indiquent les champs obligatoires dataverse.feature.update=Les dataverses en vedette pour ce dataverse ont été mis à jour. dataverse.link.select=Vous devez sélectionner un dataverse lié. +dataset.noSelectedDataverse.header=Sélectionner le(s) dataverse(s) dataverse.link.user=Seuls les utilisateurs authentifiés peuvent lier un dataverse. dataverse.link.error=Impossible de lier {0} à {1}. Une erreur interne est survenue. dataverse.search.user=Seuls les utilisateurs authentifiés peuvent enregistrer une recherche. @@ -1931,6 +1951,7 @@ harvest.save.failure2= #HarvestingSetsPage.java harvest.oaicreate.fail=Échec de la création de l'ensemble OAI +harvest.oaicreate.defaultset.fail=Échec de la création de l'ensemble OAI par défaut harvest.oaiupdate.fail=Échec de la mise à jour de l'ensemble OAI. harvest.oaiupdate.success=Mise à jour de l''ensemble OAI «\u00A0{0}\u00A0» réussie. harvest.delete.fail=Échec de la suppression de l'ensemble moissonné; exception inconnue\u00A0: @@ -1938,7 +1959,7 @@ harvest.reexport.fail=D harvest.search.failed=La recherche a échoué pour la requête fournie. Message du serveur de recherche Dataverse\u00A0: #LoginPage.java -login.UserName/Email=Veuillez entrer un nom d'utilisateur. +login.Username/Email=Veuillez entrer un nom d'utilisateur. login.Password=Veuillez entrer un mot de passe. #SystemConfig.java @@ -1993,7 +2014,7 @@ permission.updated=mis permission.created=créé permission.roleWas=Le rôle était {0}. Pour l''attribuer à un utilisateur et/ou un groupe, cliquer sur le bouton «\u00A0Assigner des rôles aux utilisateurs/groupes\u00A0» dans la section Utilisateurs/Groupes de cette page. permission.roleNotSaved=Le rôle n'a pu être sauvegardé. -permission.permissionsMissing= Les permissions {0} sont manquantes. +permission.permissionsMissing= Les autorisations {0} sont manquantes. permission.CannotAssigntDefaultPermissions=Impossible d'attribuer des autorisations par défaut. #ManageFilePermissionsPage.java @@ -2027,10 +2048,18 @@ page.copy=Copie de permission.roleAssignedToOn=Rôle {0} assigné à {1} pour {2} permission.cannotAssignRole=Le rôle n''a pu être assigné\u00A0: {0} permission.roleRevoked=Attribution de rôle révoquée avec succès -permission.cannotRevokeRole1=Impossible de révoquer l''attribution de rôle \u2014 il vous manque la permission {0} +permission.cannotRevokeRole1=Impossible de révoquer l''attribution de rôle \u2014 il vous manque l''autorisation {0} permission.cannotRevokeRole2=Impossible de révoquer l''attribution de rôle\u00A0: {0} permission.roleSave=Le rôle «\u00A0{0}\u00A0» a été sauvegardé permission.cannotSaveRole=Impossible de sauvegarder le rôle {0} #GlobalId.java -pid.allowedCharacters=^[A-Za-z0-9._/:\\-]* \ No newline at end of file +pid.allowedCharacters=^[A-Za-z0-9._/:\\-]* + +#Admin-API +admin.api.auth.mustBeSuperUser=Interdit. Vous devez être un super-utilisateur. +admin.api.migrateHDL.failure.must.be.set.for.doi=Peut ne pas être migré lorsque le protocole d'installation est défini avec "hdl". Le protocole doit être "doi". +admin.api.migrateHDL.failure.must.be.hdl.dataset=L'ensemble de données n'a pas été enregistré en tant que HDL. Il ne peut pas être migré. +admin.api.migrateHDL.success=La migration de l'ensemble de données est terminée. Ensemble de données ré-enregistré avec succès. +admin.api.migrateHDL.failure=Échec de la migration de l'ensemble de données ayant l''identifiant Handle\u00A0: {0} +admin.api.migrateHDL.failureWithException=Échec de la migration de l'ensemble de données ayant l''identifiant Handle\u00A0: {0}. Exception inattendue\u00A0: {1} diff --git a/src/main/java/MimeTypeDisplay_fr.properties b/src/main/java/MimeTypeDisplay_fr.properties new file mode 100644 index 00000000000..7fb56a48304 --- /dev/null +++ b/src/main/java/MimeTypeDisplay_fr.properties @@ -0,0 +1,53 @@ +# MimeTypeDisplay properties file +# User friendly names for displaying mime types. +# Documentation, Data, Archive files: +application/pdf=Adobe PDF +application/msword=MS Word +application/vnd.ms-excel=MS Excel +application/vnd.openxmlformats-officedocument.spreadsheetml.sheet=MS Excel (XLSX) +application/vnd.openxmlformats-officedocument.wordprocessingml.document=MS Word (docx) +application/zip=Archive compressée ZIP +text/plain=Texte +text/xml=XML +text/tab-separated-values=Valeurs séparées par tabulations +text/tsv=Valeurs séparées par tabulations +text/csv=Valeurs séparées par des virgules +text/x-fixed-field=Données textuelles à champ fixe +application/x-rlang-transport=Données R +type/x-r-syntax=Syntaxe R +application/x-R-2=Format binaire R +application/x-stata=Format binaire Stata +application/x-stata-6=Format binaire Stata +application/x-stata-13=Format binaire Stata 13 +application/x-stata-14=Format binaire Stata 14 +application/x-stata-15=Format binaire Stata 15 +text/x-stata-syntax=Syntaxe Stata +application/x-spss-por=SPSS Portable +application/x-spss-sav=SPSS SAV +text/x-spss-syntax=Syntaxe SPSS +application/x-sas-transport=SAS Transport +application/x-sas-system=SAS System +text/x-sas-syntax=SAS Syntax +application/x-dvn-csvspss-zip=CSV (carte w/SPSS) +application/x-dvn-tabddi-zip=TAB (w/DDI) +application/fits=FITS +#Images files +image/gif=Image GIF +image/jpeg=Image JPEG +image/x-portable-bitmap=Image Bitmap +image/x-portable-graymap=Image Graymap +image/png=Image PNG +image/x-portable-anymap=Image Anymap +image/x-portable-pixmap=Image Pixmap +image/cmu-raster=Image CMU Raster +image/x-rgb=Image RGB +image/tiff=Image TIFF +image/x-xbitmap=Image XBitmap +image/x-xpixmap=Image XPixmap +image/x-xwindowdump=Image XWD +# Network Data files +text/xml-graphml=Données en réseau GraphML +# Other +application/octet-stream=Inconnu +# Dataverse-specific +application/vnd.dataverse.file-package=Ensemble Dataverse \ No newline at end of file diff --git a/src/main/java/MimeTypeFacets_fr.properties b/src/main/java/MimeTypeFacets_fr.properties new file mode 100644 index 00000000000..30a4e09b979 --- /dev/null +++ b/src/main/java/MimeTypeFacets_fr.properties @@ -0,0 +1,62 @@ +# MimeTypeFacets properties file +# Defines "facetable" groups of files by mime type; +# For example, all image formats will be grouped under "image", etc. +# +# Documentation: +application/pdf=Document +application/msword=Document +application/vnd.ms-excel=Document +application/vnd.openxmlformats-officedocument.spreadsheetml.sheet=Document +application/vnd.openxmlformats-officedocument.wordprocessingml.document=Document +# Text: +text/plain=Texte +text/xml=Texte +# Ingested +text/tab-separated-values=Données tabulaires + +# Data files: +text/tsv=Données +text/csv=Données +text/x-fixed-field=Données +application/x-rlang-transport=Données +type/x-r-syntax=Données +application/x-R-2=Données +application/x-stata=Données +application/x-stata-6=Données +application/x-stata-13=Données +application/x-stata-14=Données +application/x-stata-15=Données +text/x-stata-syntax=Données +application/x-spss-por=Données +application/x-spss-sav=Données +text/x-spss-syntax=Données +application/x-sas-transport=Données +application/x-sas-system=Données +text/x-sas-syntax=Données +application/x-dvn-csvspss-zip=Données +application/x-dvn-tabddi-zip=Données +application/fits=FITS +application/zipped-shapefile=Formes +# Archive files: +application/zip=ZIP +# Images files +# (should be safe to just split the mime type on "/" in "image/*" though...) +image/gif=Image +image/jpeg=Image +image/x-portable-bitmap=Image +image/x-portable-graymap=Image +image/png=Image +image/x-portable-anymap=Image +image/x-portable-pixmap=Image +image/cmu-raster=Image +image/x-rgb=Image +image/tiff=Image +image/x-xbitmap=Image +image/x-xpixmap=Image +image/x-xwindowdump=Image +# Network Data files +text/xml-graphml=Données en réseau +# Other +application/octet-stream=Inconnu +# Dataverse-specific +application/vnd.dataverse.file-package=Données \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java index 98d8079146e..86bb270bcae 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java @@ -5,6 +5,7 @@ */ package edu.harvard.iq.dataverse; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.MarkupChecker; import java.io.Serializable; import java.util.ArrayList; @@ -153,7 +154,7 @@ public Map getDisplayValueMap() { .replace("#NAME", childDatasetField.getDatasetFieldType().getTitle()) //todo: this should be handled in more generic way for any other text that can then be internationalized // if we need to use replaceAll for regexp, then make sure to use: java.util.regex.Matcher.quoteReplacement() - .replace("#EMAIL", ResourceBundle.getBundle("Bundle").getString("dataset.email.hiddenMessage")) + .replace("#EMAIL", BundleUtil.getStringFromBundle("dataset.email.hiddenMessage")) .replace("#VALUE", sanitizedValue ); fieldMap.put(childDatasetField,displayValue); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValue.java index c10be2e9163..cf6c762f3ed 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValue.java @@ -6,6 +6,7 @@ package edu.harvard.iq.dataverse; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.MarkupChecker; import java.io.Serializable; import java.util.Comparator; @@ -106,7 +107,7 @@ public String getDisplayValue() { // want any issues if the value itself has #NAME in it) String displayValue = format .replace("#NAME", this.datasetField.getDatasetFieldType().getTitle() == null ? "" : this.datasetField.getDatasetFieldType().getTitle()) - .replace("#EMAIL", ResourceBundle.getBundle("Bundle").getString("dataset.email.hiddenMessage")) + .replace("#EMAIL", BundleUtil.getStringFromBundle("dataset.email.hiddenMessage")) .replace("#VALUE", sanitizedValue); retVal = displayValue; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index ad23b569fb9..e5ca1c63a51 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -1561,6 +1561,7 @@ private String init(boolean initFull) { if (dataset.isLockedFor(DatasetLock.Reason.DcmUpload)) { JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.rsyncUpload.inProgressMessage.summary"), BundleUtil.getStringFromBundle("file.rsyncUpload.inProgressMessage.details")); + lockedDueToDcmUpload = true; } //This is a hack to remove dataset locks for File PID registration if //the dataset is released @@ -2934,6 +2935,7 @@ public boolean isLockedForAnyReason() { private Boolean lockedFromEditsVar; private Boolean lockedFromDownloadVar; + private boolean lockedDueToDcmUpload; /** * Authors are not allowed to edit but curators are allowed - when Dataset is inReview * For all other locks edit should be locked for all editors. @@ -2976,6 +2978,10 @@ public boolean isLockedFromDownload(){ return lockedFromDownloadVar; } + public boolean isLockedDueToDcmUpload() { + return lockedDueToDcmUpload; + } + public void setLocked(boolean locked) { // empty method, so that we can use DatasetPage.locked in a hidden // input on the page. diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 7e4f4ecde39..371d36c6ce4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -481,6 +481,24 @@ public boolean isMinorUpdate() { return true; } + public boolean isHasPackageFile(){ + if (this.fileMetadatas.isEmpty()){ + return false; + } + if(this.fileMetadatas.size() > 1){ + return false; + } + return this.fileMetadatas.get(0).getDataFile().getContentType().equals(DataFileServiceBean.MIME_TYPE_PACKAGE_FILE); + } + + public boolean isHasNonPackageFile(){ + if (this.fileMetadatas.isEmpty()){ + return false; + } + // The presence of any non-package file means that HTTP Upload was used (no mixing allowed) so we just check the first file. + return !this.fileMetadatas.get(0).getDataFile().getContentType().equals(DataFileServiceBean.MIME_TYPE_PACKAGE_FILE); + } + public void updateDefaultValuesFromTemplate(Template template) { if (!template.getDatasetFields().isEmpty()) { this.setDatasetFields(this.copyDatasetFields(template.getDatasetFields())); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java index 04eca2ff629..a157ae38a90 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java @@ -196,77 +196,77 @@ private void getTermsDifferences() { changedTermsAccess = new ArrayList<>(); if (newVersion.getTermsOfUseAndAccess() != null && originalVersion.getTermsOfUseAndAccess() != null) { if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()))) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.header"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()))) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()))) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()))) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()))) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()))) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()))) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()))) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()))) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()))) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()))) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()))) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()))) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()))) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()))) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion())); } @@ -274,75 +274,75 @@ private void getTermsDifferences() { if (newVersion.getTermsOfUseAndAccess() != null && originalVersion.getTermsOfUseAndAccess() == null) { if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse()).isEmpty()) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.header"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection())); } if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion())); } @@ -350,75 +350,75 @@ private void getTermsDifferences() { if (newVersion.getTermsOfUseAndAccess() == null && originalVersion.getTermsOfUseAndAccess() != null) { if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()).isEmpty()) { - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.header"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()), ""); } if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()), ""); } if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()), ""); } if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()), ""); } if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()), ""); } if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()), ""); } if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()), ""); } if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()), ""); } if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()), ""); } if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()), ""); } if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()), ""); } if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()), ""); } if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()), ""); } if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()), ""); } if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()).isEmpty()){ - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()), ""); } @@ -780,7 +780,7 @@ private void initDatasetFilesDifferencesList() { fileMetadatasOriginal.remove(replacedFile); datasetFileDifferenceItem fdi = selectFileMetadataDiffs(replacedFile, newFile); datasetReplaceFileItem fdr = new datasetReplaceFileItem(); - String diffLabel = ResourceBundle.getBundle("Bundle").getString("file.dataFilesTab.versions.replaced"); + String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.versions.replaced"); fdr.setLeftColumn(diffLabel); fdr.setFdi(fdi); fdr.setFile1Id(replacedFile.getDataFile().getId().toString()); diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 61a6519b0fd..397343162a5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -9,24 +9,29 @@ import edu.harvard.iq.dataverse.datasetutility.FileReplaceException; import edu.harvard.iq.dataverse.datasetutility.FileReplacePageHelper; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; +import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil; +import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse; import edu.harvard.iq.dataverse.dataset.DatasetThumbnail; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataFileCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.impl.RequestRsyncScriptCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetThumbnailCommand; +import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.ingest.IngestRequest; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.ingest.IngestUtil; import edu.harvard.iq.dataverse.search.FileView; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.JsfHelper; import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.EjbUtil; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; @@ -67,6 +72,8 @@ import java.util.logging.Level; import javax.faces.event.AjaxBehaviorEvent; import javax.faces.event.FacesEvent; +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang.StringUtils; import org.primefaces.context.RequestContext; @@ -118,7 +125,8 @@ public enum FileEditMode { @Inject PermissionsWrapper permissionsWrapper; @Inject FileDownloadHelper fileDownloadHelper; @Inject ProvPopupFragmentBean provPopupFragmentBean; - + @Inject + SettingsWrapper settingsWrapper; private final DateFormat displayDateFormat = DateFormat.getDateInstance(DateFormat.MEDIUM); private Dataset dataset = new Dataset(); @@ -476,7 +484,10 @@ public String init() { if (!permissionService.on(dataset).has(Permission.EditDataset)) { return permissionsWrapper.notAuthorized(); } - + + // TODO: Think about why this call to populateFileMetadatas was added. It seems like it isn't needed after all. +// populateFileMetadatas(); + // ------------------------------------------- // Is this a file replacement operation? // ------------------------------------------- @@ -535,7 +546,7 @@ public String init() { logger.fine("The page is called with " + selectedFileIdsList.size() + " file ids."); populateFileMetadatas(); - + setUpRsync(); // and if no filemetadatas can be found for the specified file ids // and version id - same deal, send them to the "not found" page. // (at least for now; ideally, we probably want to show them a page @@ -555,9 +566,17 @@ public String init() { } saveEnabled = true; + if (mode == FileEditMode.UPLOAD && workingVersion.getFileMetadatas().isEmpty() && settingsWrapper.isRsyncUpload()) { + setUpRsync(); + } if (mode == FileEditMode.UPLOAD) { - JH.addMessage(FacesMessage.SEVERITY_INFO, getBundleString("dataset.message.uploadFiles")); + if (settingsWrapper.getUploadMethodsCount() == 1){ + JH.addMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.message.uploadFiles.label"), BundleUtil.getStringFromBundle("dataset.message.uploadFilesSingle.message", Arrays.asList(systemConfig.getGuidesBaseUrl(), systemConfig.getGuidesVersion()))); + } else if (settingsWrapper.getUploadMethodsCount() > 1) { + JH.addMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.message.uploadFiles.label"), BundleUtil.getStringFromBundle("dataset.message.uploadFilesMultiple.message", Arrays.asList(systemConfig.getGuidesBaseUrl(), systemConfig.getGuidesVersion()))); + } + } if (settingsService.isTrueForKey(SettingsServiceBean.Key.PublicInstall, false)){ @@ -826,10 +845,14 @@ public void deleteReplacementFile() throws FileReplaceException{ */ private String getBundleString(String msgName){ - return ResourceBundle.getBundle("Bundle").getString(msgName); + return BundleUtil.getStringFromBundle(msgName); } - + // This deleteFilesCompleted method is used in editFilesFragment.xhtml + public void deleteFilesCompleted(){ + + } + public void deleteFiles() { logger.fine("entering bulk file delete (EditDataFilesPage)"); if (isFileReplaceOperation()){ @@ -1031,7 +1054,7 @@ public String saveReplacementFile() throws FileReplaceException{ public String save() { - + /* // Validate Set constraintViolations = workingVersion.validate(); @@ -1071,6 +1094,28 @@ public String save() { int nExpectedFilesTotal = nOldFiles + nNewFiles; if (nNewFiles > 0) { + Dataset lockTest = datasetService.find(dataset.getId()); + //SEK 09/19/18 Get Dataset again to test for lock just in case the user downloads the rsync script via the api while the + // edit files page is open and has already loaded a file in http upload for Dual Mode + if(dataset.isLockedFor(DatasetLock.Reason.DcmUpload) || lockTest.isLockedFor(DatasetLock.Reason.DcmUpload)){ + System.out.print("Kill file save because locked for DCMUpload"); + logger.log(Level.INFO, "Couldn''t save dataset: {0}", "DCM script has been downloaded for this dataset. Additonal files are not permitted." + + ""); + populateDatasetUpdateFailureMessage(); + return null; + } + + + for (DatasetVersion dv : lockTest.getVersions()) { + if (dv.isHasPackageFile()) { + logger.log(Level.INFO, ResourceBundle.getBundle("Bundle").getString("file.api.alreadyHasPackageFile") + + ""); + populateDatasetUpdateFailureMessage(); + return null; + } + } + + // Try to save the NEW files permanently: List filesAdded = ingestService.saveAndAddFilesToDataset(workingVersion, newFiles); @@ -1349,12 +1394,12 @@ private void populateDatasetUpdateFailureMessage(){ private String returnToDraftVersion(){ - return "/dataset.xhtml?persistentId=" + dataset.getGlobalIdString() + "&version=DRAFT&faces-redirect=true"; + return "/dataset.xhtml?persistentId=" + dataset.getGlobalId().asString() + "&version=DRAFT&faces-redirect=true"; } private String returnToDatasetOnly(){ dataset = datasetService.find(dataset.getId()); - return "/dataset.xhtml?persistentId=" + dataset.getGlobalIdString() + "&faces-redirect=true"; + return "/dataset.xhtml?persistentId=" + dataset.getGlobalId().asString() + "&faces-redirect=true"; } private String returnToFileLandingPage() { @@ -1663,12 +1708,97 @@ public void uploadStarted() { // uploadStarted() is triggered by PrimeFaces 0){ - updateDifferenceSummary(ResourceBundle.getBundle("Bundle").getString("file.versionDifferences.fileTagsGroupTitle"), "", added, 0, 0, 0, true); + updateDifferenceSummary(BundleUtil.getStringFromBundle("file.versionDifferences.fileTagsGroupTitle"), "", added, 0, 0, 0, true); } if (deleted > 0){ - updateDifferenceSummary(ResourceBundle.getBundle("Bundle").getString("file.versionDifferences.fileTagsGroupTitle"), "", 0, 0, deleted, 0, true); + updateDifferenceSummary(BundleUtil.getStringFromBundle("file.versionDifferences.fileTagsGroupTitle"), "", 0, 0, deleted, 0, true); } } @@ -188,11 +190,11 @@ private void compareMetadata(FileMetadata newFileMetadata, FileMetadata original /* Get Restriction Differences */ - value1 = originalFileMetadata.isRestricted() ? ResourceBundle.getBundle("Bundle").getString("file.versionDifferences.fileRestricted") : ResourceBundle.getBundle("Bundle").getString("file.versionDifferences.fileUnrestricted"); - value2 = newFileMetadata.isRestricted() ? ResourceBundle.getBundle("Bundle").getString("file.versionDifferences.fileRestricted") : ResourceBundle.getBundle("Bundle").getString("file.versionDifferences.fileUnrestricted"); + value1 = originalFileMetadata.isRestricted() ? BundleUtil.getStringFromBundle("file.versionDifferences.fileRestricted") : BundleUtil.getStringFromBundle("file.versionDifferences.fileUnrestricted"); + value2 = newFileMetadata.isRestricted() ? BundleUtil.getStringFromBundle("file.versionDifferences.fileRestricted") : BundleUtil.getStringFromBundle("file.versionDifferences.fileUnrestricted"); if (!value1.equals(value2)) { if (!value1.equals(value2)) { - updateDifferenceSummary(ResourceBundle.getBundle("Bundle").getString("file.versionDifferences.fileAccessTitle"), value2, 0, 0, 0, 0); + updateDifferenceSummary(BundleUtil.getStringFromBundle("file.versionDifferences.fileAccessTitle"), value2, 0, 0, 0, 0); } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java index b8b25d67bb2..dea22784600 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java @@ -330,7 +330,7 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio public String getMessageTextBasedOnNotification(UserNotification userNotification, Object targetObject, String comment, AuthenticatedUser requestor) { - String messageText = ResourceBundle.getBundle("Bundle").getString("notification.email.greeting"); + String messageText = BundleUtil.getStringFromBundle("notification.email.greeting"); DatasetVersion version; Dataset dataset; DvObject dvObj; @@ -348,17 +348,17 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio dvObjURL = getDvObjectLink(dvObj); dvObjTypeStr = getDvObjectTypeString(dvObj); - pattern = ResourceBundle.getBundle("Bundle").getString("notification.email.assignRole"); + pattern = BundleUtil.getStringFromBundle("notification.email.assignRole"); String[] paramArrayAssignRole = {joinedRoleNames, dvObjTypeStr, dvObj.getDisplayName(), dvObjURL}; messageText += MessageFormat.format(pattern, paramArrayAssignRole); if (joinedRoleNames.contains("File Downloader")){ if (dvObjTypeStr.equals("dataset")){ - pattern = ResourceBundle.getBundle("Bundle").getString("notification.access.granted.fileDownloader.additionalDataset"); + pattern = BundleUtil.getStringFromBundle("notification.access.granted.fileDownloader.additionalDataset"); String[] paramArrayAssignRoleDS = {" "}; messageText += MessageFormat.format(pattern, paramArrayAssignRoleDS); } if (dvObjTypeStr.equals("dataverse")){ - pattern = ResourceBundle.getBundle("Bundle").getString("notification.access.granted.fileDownloader.additionalDataverse"); + pattern = BundleUtil.getStringFromBundle("notification.access.granted.fileDownloader.additionalDataverse"); String[] paramArrayAssignRoleDV = {" "}; messageText += MessageFormat.format(pattern, paramArrayAssignRoleDV); } @@ -370,7 +370,7 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio dvObjURL = getDvObjectLink(dvObj); dvObjTypeStr = getDvObjectTypeString(dvObj); - pattern = ResourceBundle.getBundle("Bundle").getString("notification.email.revokeRole"); + pattern = BundleUtil.getStringFromBundle("notification.email.revokeRole"); String[] paramArrayRevokeRole = {dvObjTypeStr, dvObj.getDisplayName(), dvObjURL}; messageText += MessageFormat.format(pattern, paramArrayRevokeRole); return messageText; @@ -395,19 +395,19 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio return messageText += dataverseCreatedMessage; case REQUESTFILEACCESS: DataFile datafile = (DataFile) targetObject; - pattern = ResourceBundle.getBundle("Bundle").getString("notification.email.requestFileAccess"); + pattern = BundleUtil.getStringFromBundle("notification.email.requestFileAccess"); String[] paramArrayRequestFileAccess = {datafile.getOwner().getDisplayName(), getDatasetManageFileAccessLink(datafile)}; messageText += MessageFormat.format(pattern, paramArrayRequestFileAccess); return messageText; case GRANTFILEACCESS: dataset = (Dataset) targetObject; - pattern = ResourceBundle.getBundle("Bundle").getString("notification.email.grantFileAccess"); + pattern = BundleUtil.getStringFromBundle("notification.email.grantFileAccess"); String[] paramArrayGrantFileAccess = {dataset.getDisplayName(), getDatasetLink(dataset)}; messageText += MessageFormat.format(pattern, paramArrayGrantFileAccess); return messageText; case REJECTFILEACCESS: dataset = (Dataset) targetObject; - pattern = ResourceBundle.getBundle("Bundle").getString("notification.email.rejectFileAccess"); + pattern = BundleUtil.getStringFromBundle("notification.email.rejectFileAccess"); String[] paramArrayRejectFileAccess = {dataset.getDisplayName(), getDatasetLink(dataset)}; messageText += MessageFormat.format(pattern, paramArrayRejectFileAccess); return messageText; @@ -425,14 +425,14 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio return messageText += datasetCreatedMessage; case MAPLAYERUPDATED: version = (DatasetVersion) targetObject; - pattern = ResourceBundle.getBundle("Bundle").getString("notification.email.worldMap.added"); + pattern = BundleUtil.getStringFromBundle("notification.email.worldMap.added"); String[] paramArrayMapLayer = {version.getDataset().getDisplayName(), getDatasetLink(version.getDataset())}; messageText += MessageFormat.format(pattern, paramArrayMapLayer); return messageText; case MAPLAYERDELETEFAILED: FileMetadata targetFileMetadata = (FileMetadata) targetObject; version = targetFileMetadata.getDatasetVersion(); - pattern = ResourceBundle.getBundle("Bundle").getString("notification.email.maplayer.deletefailed.text"); + pattern = BundleUtil.getStringFromBundle("notification.email.maplayer.deletefailed.text"); String[] paramArrayMapLayerDelete = {targetFileMetadata.getLabel(), getDatasetLink(version.getDataset())}; messageText += MessageFormat.format(pattern, paramArrayMapLayerDelete); return messageText; @@ -447,12 +447,11 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio if (comment != null && !comment.isEmpty()) { mightHaveSubmissionComment = ".\n\n" + BundleUtil.getStringFromBundle("submissionComment") + "\n\n" + comment; } - */ - pattern = ResourceBundle.getBundle("Bundle").getString("notification.email.wasSubmittedForReview"); - + */ String requestorName = (requestor.getLastName() != null && requestor.getLastName() != null) ? requestor.getFirstName() + " " + requestor.getLastName() : BundleUtil.getStringFromBundle("notification.email.info.unavailable"); - String requestorEmail = requestor.getEmail() != null ? requestor.getEmail() : BundleUtil.getStringFromBundle("notification.email.info.unavailable"); - + String requestorEmail = requestor.getEmail() != null ? requestor.getEmail() : BundleUtil.getStringFromBundle("notification.email.info.unavailable"); + pattern = BundleUtil.getStringFromBundle("notification.email.wasSubmittedForReview"); + String[] paramArraySubmittedDataset = {version.getDataset().getDisplayName(), getDatasetDraftLink(version.getDataset()), version.getDataset().getOwner().getDisplayName(), getDataverseLink(version.getDataset().getOwner()), requestorName, requestorEmail }; @@ -460,14 +459,14 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio return messageText; case PUBLISHEDDS: version = (DatasetVersion) targetObject; - pattern = ResourceBundle.getBundle("Bundle").getString("notification.email.wasPublished"); + pattern = BundleUtil.getStringFromBundle("notification.email.wasPublished"); String[] paramArrayPublishedDataset = {version.getDataset().getDisplayName(), getDatasetLink(version.getDataset()), version.getDataset().getOwner().getDisplayName(), getDataverseLink(version.getDataset().getOwner())}; messageText += MessageFormat.format(pattern, paramArrayPublishedDataset); return messageText; case RETURNEDDS: version = (DatasetVersion) targetObject; - pattern = ResourceBundle.getBundle("Bundle").getString("notification.email.wasReturnedByReviewer"); + pattern = BundleUtil.getStringFromBundle("notification.email.wasReturnedByReviewer"); String optionalReturnReason = ""; /* diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index 5ad4afc4d75..c089ae1b67d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -394,12 +394,12 @@ public String getAssignedRoleObjectTypes(){ because permissions are not inherited if (dataverseRolePermissionHelper.hasDataversePermissions(selectedRoleId) && dvObject instanceof Dataverse){ - String dvLabel = ResourceBundle.getBundle("Bundle").getString("dataverses"); + String dvLabel = BundleUtil.getStringFromBundle("dataverses"); retString = dvLabel; } */ if (dataverseRolePermissionHelper.hasDatasetPermissions(selectedRoleId) && dvObject instanceof Dataverse){ - String dsLabel = ResourceBundle.getBundle("Bundle").getString("datasets"); + String dsLabel = BundleUtil.getStringFromBundle("datasets"); if(!retString.isEmpty()) { retString +=", " + dsLabel; } else { @@ -408,7 +408,7 @@ public String getAssignedRoleObjectTypes(){ } if (dataverseRolePermissionHelper.hasFilePermissions(selectedRoleId)){ - String filesLabel = ResourceBundle.getBundle("Bundle").getString("files"); + String filesLabel = BundleUtil.getStringFromBundle("files"); if(!retString.isEmpty()) { retString +=", " + filesLabel; } else { @@ -422,8 +422,8 @@ public String getAssignedRoleObjectTypes(){ public String getDefinitionLevelString(){ if (dvObject != null){ - if (dvObject instanceof Dataverse) return ResourceBundle.getBundle("Bundle").getString("dataverse"); - if (dvObject instanceof Dataset) return ResourceBundle.getBundle("Bundle").getString("dataset"); + if (dvObject instanceof Dataverse) return BundleUtil.getStringFromBundle("dataverse"); + if (dvObject instanceof Dataset) return BundleUtil.getStringFromBundle("dataset"); } return null; } diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index 0c4432073b7..f3efc0a1e7b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -150,6 +150,14 @@ public boolean isRsyncDownload() { return systemConfig.isRsyncDownload(); } + public boolean isRsyncOnly() { + return systemConfig.isRsyncOnly(); + } + + public boolean isHTTPUpload(){ + return systemConfig.isHTTPUpload(); + } + public boolean isDataFilePIDSequentialDependent(){ return systemConfig.isDataFilePIDSequentialDependent(); } @@ -165,9 +173,28 @@ public String getSupportTeamEmail() { InternetAddress systemAddress = MailUtil.parseSystemAddress(systemEmail); return BrandingUtil.getSupportTeamEmailAddress(systemAddress) != null ? BrandingUtil.getSupportTeamEmailAddress(systemAddress) : BrandingUtil.getSupportTeamName(systemAddress, dataverseService.findRootDataverse().getName()); } + + public Integer getUploadMethodsCount() { + return systemConfig.getUploadMethodCount(); + } public boolean isRootDataverseThemeDisabled() { return isTrueForKey(Key.DisableRootDataverseTheme, false); } + + public String getDropBoxKey() { + + String configuredDropBoxKey = System.getProperty("dataverse.dropbox.key"); + if (configuredDropBoxKey != null) { + return configuredDropBoxKey; + } + return ""; + } + + public Boolean isHasDropBoxKey() { + + return !getDropBoxKey().isEmpty(); + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java index 634cc0868e0..e2da5f9af0a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java @@ -47,15 +47,14 @@ public Template findByDeafultTemplateOwnerId(Long ownerId) { return query.getSingleResult(); } - public List findDataversesByDefaultTemplateId(Long defaultTemplateId) { TypedQuery query = em.createQuery("select object(o) from Dataverse as o where o.defaultTemplate.id =:defaultTemplateId order by o.name", Dataverse.class); query.setParameter("defaultTemplateId", defaultTemplateId); return query.getResultList(); } - + public void incrementUsageCount(Long templateId) { - + Template toUpdate = em.find(Template.class, templateId); Long usage = toUpdate.getUsageCount(); usage++; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 89fe8a85213..8235240929b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -340,12 +340,12 @@ public Response filterAuthenticatedUsers(@QueryParam("searchTerm") String search authUser = this.findUserOrDie(); } catch (AbstractApiBean.WrappedResponse ex) { return error(Response.Status.FORBIDDEN, - ResourceBundle.getBundle("Bundle").getString("dashboard.list_users.api.auth.invalid_apikey")); + BundleUtil.getStringFromBundle("dashboard.list_users.api.auth.invalid_apikey")); } if (!authUser.isSuperuser()) { return error(Response.Status.FORBIDDEN, - ResourceBundle.getBundle("Bundle").getString("dashboard.list_users.api.auth.not_superuser")); + BundleUtil.getStringFromBundle("dashboard.list_users.api.auth.not_superuser")); } UserListMaker userListMaker = new UserListMaker(userService); @@ -1048,7 +1048,7 @@ public Response reregisterHdlToPID(@PathParam("id") String id) { try { if (settingsSvc.get(SettingsServiceBean.Key.Protocol.toString()).equals(GlobalId.HDL_PROTOCOL)) { logger.info("Bad Request protocol set to handle " ); - return error(Status.BAD_REQUEST, ResourceBundle.getBundle("Bundle").getString("admin.api.migrateHDL.failure.must.be.set.for.doi")); + return error(Status.BAD_REQUEST, BundleUtil.getStringFromBundle("admin.api.migrateHDL.failure.must.be.set.for.doi")); } User u = findUserOrDie(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 69a346080bd..0baf05b740d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -76,6 +76,7 @@ import edu.harvard.iq.dataverse.S3PackageImporter; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.EjbUtil; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; @@ -1211,7 +1212,10 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, @FormDataParam("file") final FormDataBodyPart formDataBodyPart ){ - + if (!systemConfig.isHTTPUpload()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); + } + // ------------------------------------- // (1) Get the user from the API key // ------------------------------------- @@ -1220,16 +1224,9 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, authUser = findUserOrDie(); } catch (WrappedResponse ex) { return error(Response.Status.FORBIDDEN, - ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.auth") + BundleUtil.getStringFromBundle("file.addreplace.error.auth") ); } - //--------------------------------------- - // (1A) Make sure that the upload type is not rsync - // ------------------------------------- - - if (DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) { - return error(Response.Status.METHOD_NOT_ALLOWED, SettingsServiceBean.Key.UploadMethods + " contains " + SystemConfig.FileUploadMethods.RSYNC + ". Please use rsync file upload."); - } // ------------------------------------- @@ -1245,7 +1242,20 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, return wr.getResponse(); } - + //------------------------------------ + // (2a) Make sure dataset does not have package file + // + // -------------------------------------- + + for (DatasetVersion dv : dataset.getVersions()) { + if (dv.isHasPackageFile()) { + return error(Response.Status.FORBIDDEN, + ResourceBundle.getBundle("Bundle").getString("file.api.alreadyHasPackageFile") + ); + } + } + + // ------------------------------------- // (3) Get the file name and content type // ------------------------------------- @@ -1293,7 +1303,7 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, if (addFileHelper.hasError()){ return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n")); }else{ - String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add"); + String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add"); try { //msgt("as String: " + addFileHelper.getSuccessResult()); /** diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 8f77b6a21fc..d226e03145d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -157,7 +157,10 @@ public Response replaceFileInDataset( @FormDataParam("file") FormDataContentDisposition contentDispositionHeader, @FormDataParam("file") final FormDataBodyPart formDataBodyPart ){ - + + if (!systemConfig.isHTTPUpload()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); + } // ------------------------------------- // (1) Get the user from the API key // ------------------------------------- @@ -166,7 +169,7 @@ public Response replaceFileInDataset( authUser = findUserOrDie(); } catch (AbstractApiBean.WrappedResponse ex) { return error(Response.Status.FORBIDDEN, - ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.auth") + BundleUtil.getStringFromBundle("file.addreplace.error.auth") ); } @@ -228,6 +231,10 @@ public Response replaceFileInDataset( try { DataFile dataFile = findDataFileOrDie(fileIdOrPersistentId); fileToReplaceId = dataFile.getId(); + + if (dataFile.isFilePackage()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")); + } } catch (WrappedResponse ex) { String error = BundleUtil.getStringFromBundle("file.addreplace.error.existing_file_to_replace_not_found_by_id", Arrays.asList(fileIdOrPersistentId)); // TODO: Some day, return ex.getResponse() instead. Also run FilesIT and updated expected status code and message. @@ -254,7 +261,7 @@ public Response replaceFileInDataset( }else{ msg("no error"); - String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.replace"); + String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.replace"); try { msgt("as String: " + addFileHelper.getSuccessResult()); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java index bf5964d44ef..cb28d1fae49 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java @@ -19,6 +19,7 @@ import edu.harvard.iq.dataverse.harvest.client.HarvestingClientServiceBean; import edu.harvard.iq.dataverse.harvest.server.OAISet; import edu.harvard.iq.dataverse.harvest.server.OAISetServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.authorization.users.User; import static edu.harvard.iq.dataverse.util.JsfHelper.JH; @@ -130,7 +131,7 @@ public Response createOaiSet(String jsonBody, @PathParam("specname") String spec return wr.getResponse(); } if (!dvUser.isSuperuser()) { - return badRequest(ResourceBundle.getBundle("Bundle").getString("harvestserver.newSetDialog.setspec.superUser.required")); + return badRequest(BundleUtil.getStringFromBundle("harvestserver.newSetDialog.setspec.superUser.required")); } StringReader rdr = new StringReader(jsonBody); @@ -143,18 +144,18 @@ public Response createOaiSet(String jsonBody, @PathParam("specname") String spec //Validating spec if (!StringUtils.isEmpty(spec)) { if (spec.length() > 30) { - return badRequest(ResourceBundle.getBundle("Bundle").getString("harvestserver.newSetDialog.setspec.sizelimit")); + return badRequest(BundleUtil.getStringFromBundle("harvestserver.newSetDialog.setspec.sizelimit")); } if (!Pattern.matches("^[a-zA-Z0-9\\_\\-]+$", spec)) { - return badRequest(ResourceBundle.getBundle("Bundle").getString("harvestserver.newSetDialog.setspec.invalid")); + return badRequest(BundleUtil.getStringFromBundle("harvestserver.newSetDialog.setspec.invalid")); // If it passes the regex test, check } if (oaiSetService.findBySpec(spec) != null) { - return badRequest(ResourceBundle.getBundle("Bundle").getString("harvestserver.newSetDialog.setspec.alreadyused")); + return badRequest(BundleUtil.getStringFromBundle("harvestserver.newSetDialog.setspec.alreadyused")); } } else { - return badRequest(ResourceBundle.getBundle("Bundle").getString("harvestserver.newSetDialog.setspec.required")); + return badRequest(BundleUtil.getStringFromBundle("harvestserver.newSetDialog.setspec.required")); } set.setSpec(spec); String name, desc, defn; @@ -162,7 +163,7 @@ public Response createOaiSet(String jsonBody, @PathParam("specname") String spec try { name = json.getString("name"); } catch (NullPointerException npe_name) { - return badRequest(ResourceBundle.getBundle("Bundle").getString("harvestserver.newSetDialog.setspec.required")); + return badRequest(BundleUtil.getStringFromBundle("harvestserver.newSetDialog.setspec.required")); } try { defn = json.getString("definition"); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java index dd49d7badb9..bb40c53c1ca 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java @@ -79,7 +79,6 @@ public Response addProvJson(String body, @PathParam("id") String idSupplied, @Qu @DELETE @Path("{id}/prov-json") -//MAD: SHOULD NOT WORK ON PUBLISHED public Response deleteProvJson(String body, @PathParam("id") String idSupplied) { if(!systemConfig.isProvCollectionEnabled()) { return error(FORBIDDEN, BundleUtil.getStringFromBundle("api.prov.error.provDisabled")); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index c88e54bdb88..c5fa1d79fce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.ByteArrayInputStream; @@ -200,6 +201,9 @@ public DepositReceipt addResource(String uri, Deposit deposit, AuthCredentials a } DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials authCredentials, SwordConfiguration swordConfiguration, boolean shouldReplace) throws SwordError, SwordAuthException, SwordServerException { + if (!systemConfig.isHTTPUpload()) { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, BundleUtil.getStringFromBundle("file.api.httpDisabled")); + } AuthenticatedUser user = swordAuth.auth(authCredentials); DataverseRequest dvReq = new DataverseRequest(user, httpRequest); @@ -217,12 +221,14 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au } //--------------------------------------- - // Make sure that the upload type is not rsync + // Make sure that the upload type is not rsync - handled above for dual mode // ------------------------------------- - if (DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, SettingsServiceBean.Key.UploadMethods + " contains " + SystemConfig.FileUploadMethods.RSYNC + ". Please use rsync file upload."); - } + if (dataset.getEditVersion().isHasPackageFile()) { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")); + } + + // Right now we are only supporting UriRegistry.PACKAGE_SIMPLE_ZIP but // in the future maybe we'll support other formats? Rdata files? Stata files? /** diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 530f7ee4a17..90af53d7a0e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -3,6 +3,7 @@ import com.amazonaws.AmazonClientException; import com.amazonaws.HttpMethod; import com.amazonaws.SdkClientException; +import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.model.ObjectMetadata; @@ -43,6 +44,8 @@ import java.util.logging.Logger; import org.apache.commons.io.IOUtils; +import javax.validation.constraints.NotNull; + /** * * @author Matthew A Dunlap @@ -69,18 +72,49 @@ public S3AccessIO(T dvObject) { public S3AccessIO(T dvObject, DataAccessRequest req) { super(dvObject, req); this.setIsLocalFile(false); + try { - s3 = AmazonS3ClientBuilder.standard().defaultClient(); + // get a standard client, using the standard way of configuration the credentials, etc. + AmazonS3ClientBuilder s3CB = AmazonS3ClientBuilder.standard(); + // if the admin has set a system property (see below) we use this endpoint URL instead of the standard ones. + if (!s3CEUrl.isEmpty()) { + s3CB.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(s3CEUrl, s3CERegion)); + } + // some custom S3 implementations require "PathStyleAccess" as they us a path, not a subdomain. default = false + s3CB.withPathStyleAccessEnabled(s3pathStyleAccess); + // let's build the client :-) + this.s3 = s3CB.build(); } catch (Exception e) { throw new AmazonClientException( - "Cannot instantiate a S3 client using; check your AWS credentials and region", - e); + "Cannot instantiate a S3 client using; check your AWS credentials and region", + e); } } + + public S3AccessIO(T dvObject, DataAccessRequest req, @NotNull AmazonS3 s3client) { + super(dvObject, req); + this.setIsLocalFile(false); + this.s3 = s3client; + } public static String S3_IDENTIFIER_PREFIX = "s3"; private AmazonS3 s3 = null; + /** + * Pass in a URL pointing to your S3 compatible storage. + * For possible values see https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/client/builder/AwsClientBuilder.EndpointConfiguration.html + */ + private String s3CEUrl = System.getProperty("dataverse.files.s3-custom-endpoint-url", ""); + /** + * Pass in a region to use for SigV4 signing of requests. + * Defaults to "dataverse" as it is not relevant for custom S3 implementations. + */ + private String s3CERegion = System.getProperty("dataverse.files.s3-custom-endpoint-region", "dataverse"); + /** + * Pass in a boolean value if path style access should be used within the S3 client. + * Anything but case-insensitive "true" will lead to value of false, which is default value, too. + */ + private boolean s3pathStyleAccess = Boolean.parseBoolean(System.getProperty("dataverse.files.s3-path-style-access", "false")); private String bucketName = System.getProperty("dataverse.files.s3-bucket-name"); private String key; @@ -630,7 +664,7 @@ public InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException } } - private String getDestinationKey(String auxItemTag) throws IOException { + String getDestinationKey(String auxItemTag) throws IOException { if (dvObject instanceof DataFile) { return getMainFileKey() + "." + auxItemTag; } else if (dvObject instanceof Dataset) { @@ -643,7 +677,16 @@ private String getDestinationKey(String auxItemTag) throws IOException { } } - private String getMainFileKey() throws IOException { + /** + * TODO: this function is not side effect free (sets instance variables key and bucketName). + * Is this good or bad? Need to ask @landreev + * + * Extract the file key from a file stored on S3. + * Follows template: "owner authority name"/"owner identifier"/"storage identifier without bucketname and protocol" + * @return Main File Key + * @throws IOException + */ + String getMainFileKey() throws IOException { if (key == null) { String baseKey = this.getDataFile().getOwner().getAuthorityForFileStorage() + "/" + this.getDataFile().getOwner().getIdentifierForFileStorage(); String storageIdentifier = dvObject.getStorageIdentifier(); @@ -723,7 +766,7 @@ public String generateTemporaryS3Url() throws IOException { } } - private int getUrlExpirationMinutes() { + int getUrlExpirationMinutes() { String optionValue = System.getProperty("dataverse.files.s3-url-expiration-minutes"); if (optionValue != null) { Integer num; diff --git a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java index d5a883a15a7..92f8252f45c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java @@ -3,8 +3,10 @@ import com.mashape.unirest.http.HttpResponse; import com.mashape.unirest.http.JsonNode; import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.util.SystemConfig; +import java.util.Arrays; import java.util.logging.Logger; import javax.json.Json; import javax.json.JsonObject; @@ -15,11 +17,11 @@ public class DataCaptureModuleUtil { private static final Logger logger = Logger.getLogger(DataCaptureModuleUtil.class.getCanonicalName()); public static boolean rsyncSupportEnabled(String uploadMethodsSettings) { - logger.fine("uploadMethodsSettings: " + uploadMethodsSettings); - if (uploadMethodsSettings != null && SystemConfig.FileUploadMethods.RSYNC.toString().equals(uploadMethodsSettings)) { - return true; - } else { + logger.fine("uploadMethodsSettings: " + uploadMethodsSettings);; + if (uploadMethodsSettings==null){ return false; + } else { + return Arrays.asList(uploadMethodsSettings.toLowerCase().split("\\s*,\\s*")).contains(SystemConfig.FileUploadMethods.RSYNC.toString()); } } @@ -74,4 +76,8 @@ public static String getMessageFromException(DataCaptureModuleException ex) { return message + " was caused by " + cause.getMessage(); } + public static String getScriptName(DatasetVersion datasetVersion) { + return "upload-" + datasetVersion.getDataset().getIdentifier() + ".bash"; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index 9d9a8486675..acbb9211ca1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -829,9 +829,9 @@ private String getBundleMsg(String msgName, boolean isErr){ throw new NullPointerException("msgName cannot be null"); } if (isErr){ - return ResourceBundle.getBundle("Bundle").getString("file.addreplace.error." + msgName); + return BundleUtil.getStringFromBundle("file.addreplace.error." + msgName); }else{ - return ResourceBundle.getBundle("Bundle").getString("file.addreplace.success." + msgName); + return BundleUtil.getStringFromBundle("file.addreplace.success." + msgName); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java index 6b28c6441e8..5c0631d95d6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java @@ -12,6 +12,8 @@ import edu.harvard.iq.dataverse.DataFileTag; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.api.Util; +import edu.harvard.iq.dataverse.util.BundleUtil; + import java.lang.reflect.Type; import java.util.ArrayList; import java.util.List; @@ -255,7 +257,7 @@ private void addFileDataTags(List potentialTags) throws DataFileTagExcep if (DataFileTag.isDataFileTag(tagToCheck)){ this.dataFileTags.add(tagToCheck); }else{ - String errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.invalid_datafile_tag"); + String errMsg = BundleUtil.getStringFromBundle("file.addreplace.error.invalid_datafile_tag"); throw new DataFileTagException(errMsg + " [" + tagToCheck + "]. Please use one of the following: " + DataFileTag.getListofLabelsAsString()); } } @@ -361,7 +363,7 @@ private void addFileDataTagsToFile(DataFile df) throws DataFileTagException{ // Is this a tabular file? // -------------------------------------------------- if (!df.isTabularData()){ - String errMsg = ResourceBundle.getBundle("Bundle").getString("file.metadata.datafiletag.not_tabular"); + String errMsg = BundleUtil.getStringFromBundle("file.metadata.datafiletag.not_tabular"); throw new DataFileTagException(errMsg); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java index 9c521625b48..9ebc816a9cf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java @@ -133,6 +133,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { ctxt.index().indexDataset(theDataset, true); ctxt.solrIndex().indexPermissionsOnSelfAndChildren(theDataset.getId()); + /* if (DataCaptureModuleUtil.rsyncSupportEnabled(ctxt.settings().getValueForKey(SettingsServiceBean.Key.UploadMethods))) { logger.fine("Requesting rsync support."); try { @@ -142,8 +143,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { logger.log(Level.WARNING, "Problem getting rsync script: {0}", ex.getLocalizedMessage()); } logger.fine("Done with rsync request."); - } - + }*/ return theDataset; } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java index 2e4bead7709..f08312658bb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java @@ -247,8 +247,8 @@ public void executeImpl(CommandContext ctxt) throws CommandException { // its destinations owners, remove the link logger.info("Checking linked datasets..."); for (DatasetLinkingDataverse dsld : linkingDatasets) { - for (Dataverse owner : ownersToCheck){ - if ((dsld.getLinkingDataverse()).equals(owner)){ + for (Dataverse owner : ownersToCheck) { + if ((dsld.getLinkingDataverse()).equals(owner)) { if (force == null || !force) { removeLinkDs = true; break; @@ -259,13 +259,12 @@ public void executeImpl(CommandContext ctxt) throws CommandException { } } } - - + if (removeGuestbook || removeTemplate || removeFeatDv || removeMetadataBlock || removeLinkDv || removeLinkDs) { StringBuilder errorString = new StringBuilder(); if (removeGuestbook) { errorString.append("Dataset guestbook is not in target dataverse. "); - } + } if (removeTemplate) { errorString.append("Dataverse template is not in target dataverse. "); } @@ -292,6 +291,13 @@ public void executeImpl(CommandContext ctxt) throws CommandException { logger.info("Dataverse move took " + (moveDvEnd - moveDvStart) + " milliseconds"); ctxt.indexBatch().indexDataverseRecursively(moved); - + + //REindex datasets linked to moved dv + if (moved.getDatasetLinkingDataverses() != null && !moved.getDatasetLinkingDataverses().isEmpty()) { + for (DatasetLinkingDataverse dld : moved.getDatasetLinkingDataverses()) { + Dataset linkedDS = ctxt.datasets().find(dld.getDataset().getId()); + ctxt.index().indexDataset(linkedDS, true); + } + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestRsyncScriptCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestRsyncScriptCommand.java index 93cdcb21893..2a6d7216aa5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestRsyncScriptCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestRsyncScriptCommand.java @@ -42,10 +42,13 @@ public RequestRsyncScriptCommand(DataverseRequest requestArg, Dataset datasetArg } @Override - public ScriptRequestResponse execute(CommandContext ctxt) throws CommandException { + public ScriptRequestResponse execute(CommandContext ctxt) throws CommandException { if (request == null) { throw new IllegalCommandException("DataverseRequest cannot be null.", this); } + if(!dataset.getFiles().isEmpty()){ + throw new IllegalCommandException("Cannot get script for a dataset that already has a file", this); + } String dcmBaseUrl = ctxt.settings().getValueForKey(DataCaptureModuleUrl); if (dcmBaseUrl == null) { throw new RuntimeException(DataCaptureModuleUrl + " is null!"); diff --git a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvInvestigator.java b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvInvestigator.java index 468fdea9d14..a17e77f2a9e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvInvestigator.java +++ b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvInvestigator.java @@ -131,14 +131,139 @@ public boolean isProvValid(String jsonInput) { //Pulled from https://www.w3.org/Submission/2013/SUBM-prov-json-20130424/schema //Not the prettiest way of accessing the schema, but loading the .json file as an external resource //turned out to be very painful, especially when also trying to exercise it via unit tests + // + //To solve https://github.com/IQSS/dataverse/issues/5154 , the provenance schema + //here was updated to include the "core schema" values being downloaded by the "id" tag. + //If this schema needs to be updated (as of 2018, it hadn't been since 2013) this will need + //to be done manually again or we'll need to pull both files and store them on disk. + //The later option was not done previously because we couldn't get the same files to be + //referenced by the code and our junit tests. private static final String provSchema = "{\n" + - " \"id\": \"http://provenance.ecs.soton.ac.uk/prov-json/schema#\",\n" + + " \"id\": \"\",\n" + " \"$schema\": \"http://json-schema.org/draft-04/schema#\",\n" + " \"description\": \"Schema for a PROV-JSON document\",\n" + " \"type\": \"object\",\n" + " \"additionalProperties\": false,\n" + + " \"dependencies\": {\n" + + " \"exclusiveMaximum\": [ \"maximum\" ],\n" + + " \"exclusiveMinimum\": [ \"minimum\" ]\n" + + " },"+ + " \"default\": {},\n" + " \"properties\": {\n" + + " \"id\": {\n" + + " \"type\": \"string\"\n" + + " },\n" + + " \"$schema\": {\n" + + " \"type\": \"string\"\n" + + " },\n" + + " \"title\": {\n" + + " \"type\": \"string\"\n" + + " },\n" + + " \"description\": {\n" + + " \"type\": \"string\"\n" + + " },\n" + + " \"default\": {},\n" + + " \"multipleOf\": {\n" + + " \"type\": \"number\",\n" + + " \"minimum\": 0,\n" + + " \"exclusiveMinimum\": true\n" + + " },\n" + + " \"maximum\": {\n" + + " \"type\": \"number\"\n" + + " },\n" + + " \"exclusiveMaximum\": {\n" + + " \"type\": \"boolean\",\n" + + " \"default\": false\n" + + " },\n" + + " \"minimum\": {\n" + + " \"type\": \"number\"\n" + + " },\n" + + " \"exclusiveMinimum\": {\n" + + " \"type\": \"boolean\",\n" + + " \"default\": false\n" + + " },\n" + + " \"maxLength\": { \"$ref\": \"#/definitions/positiveInteger\" },\n" + + " \"minLength\": { \"$ref\": \"#/definitions/positiveIntegerDefault0\" },\n" + + " \"pattern\": {\n" + + " \"type\": \"string\",\n" + + " \"format\": \"regex\"\n" + + " },\n" + + " \"additionalItems\": {\n" + + " \"anyOf\": [\n" + + " { \"type\": \"boolean\" },\n" + + " { \"$ref\": \"#\" }\n" + + " ],\n" + + " \"default\": {}\n" + + " },\n" + + " \"items\": {\n" + + " \"anyOf\": [\n" + + " { \"$ref\": \"#\" },\n" + + " { \"$ref\": \"#/definitions/schemaArray\" }\n" + + " ],\n" + + " \"default\": {}\n" + + " },\n" + + " \"maxItems\": { \"$ref\": \"#/definitions/positiveInteger\" },\n" + + " \"minItems\": { \"$ref\": \"#/definitions/positiveIntegerDefault0\" },\n" + + " \"uniqueItems\": {\n" + + " \"type\": \"boolean\",\n" + + " \"default\": false\n" + + " },\n" + + " \"maxProperties\": { \"$ref\": \"#/definitions/positiveInteger\" },\n" + + " \"minProperties\": { \"$ref\": \"#/definitions/positiveIntegerDefault0\" },\n" + + " \"required\": { \"$ref\": \"#/definitions/stringArray\" },\n" + + " \"additionalProperties\": {\n" + + " \"anyOf\": [\n" + + " { \"type\": \"boolean\" },\n" + + " { \"$ref\": \"#\" }\n" + + " ],\n" + + " \"default\": {}\n" + + " },\n" + + " \"definitions\": {\n" + + " \"type\": \"object\",\n" + + " \"additionalProperties\": { \"$ref\": \"#\" },\n" + + " \"default\": {}\n" + + " },\n" + + " \"properties\": {\n" + + " \"type\": \"object\",\n" + + " \"additionalProperties\": { \"$ref\": \"#\" },\n" + + " \"default\": {}\n" + + " },\n" + + " \"patternProperties\": {\n" + + " \"type\": \"object\",\n" + + " \"additionalProperties\": { \"$ref\": \"#\" },\n" + + " \"default\": {}\n" + + " },\n" + + " \"dependencies\": {\n" + + " \"type\": \"object\",\n" + + " \"additionalProperties\": {\n" + + " \"anyOf\": [\n" + + " { \"$ref\": \"#\" },\n" + + " { \"$ref\": \"#/definitions/stringArray\" }\n" + + " ]\n" + + " }\n" + + " },\n" + + " \"enum\": {\n" + + " \"type\": \"array\",\n" + + " \"minItems\": 1,\n" + + " \"uniqueItems\": true\n" + + " },\n" + + " \"type\": {\n" + + " \"anyOf\": [\n" + + " { \"$ref\": \"#/definitions/simpleTypes\" },\n" + + " {\n" + + " \"type\": \"array\",\n" + + " \"items\": { \"$ref\": \"#/definitions/simpleTypes\" },\n" + + " \"minItems\": 1,\n" + + " \"uniqueItems\": true\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"format\": { \"type\": \"string\" },\n" + + " \"allOf\": { \"$ref\": \"#/definitions/schemaArray\" },\n" + + " \"anyOf\": { \"$ref\": \"#/definitions/schemaArray\" },\n" + + " \"oneOf\": { \"$ref\": \"#/definitions/schemaArray\" },\n" + + " \"not\": { \"$ref\": \"#\" },\n" + " \"prefix\": {\n" + " \"type\": \"object\",\n" + " \"patternProperties\": {\n" + @@ -219,6 +344,27 @@ public boolean isProvValid(String jsonInput) { " }\n" + " },\n" + " \"definitions\": {\n" + + " \"schemaArray\": {\n" + + " \"type\": \"array\",\n" + + " \"minItems\": 1,\n" + + " \"items\": { \"$ref\": \"#\" }\n" + + " },\n" + + " \"positiveInteger\": {\n" + + " \"type\": \"integer\",\n" + + " \"minimum\": 0\n" + + " },\n" + + " \"positiveIntegerDefault0\": {\n" + + " \"allOf\": [ { \"$ref\": \"#/definitions/positiveInteger\" }, { \"default\": 0 } ]\n" + + " },\n" + + " \"simpleTypes\": {\n" + + " \"enum\": [ \"array\", \"boolean\", \"integer\", \"null\", \"number\", \"object\", \"string\" ]\n" + + " },\n" + + " \"stringArray\": {\n" + + " \"type\": \"array\",\n" + + " \"items\": { \"type\": \"string\" },\n" + + " \"minItems\": 1,\n" + + " \"uniqueItems\": true\n" + + " },\n"+ " \"typedLiteral\": {\n" + " \"title\": \"PROV-JSON Typed Literal\",\n" + " \"type\": \"object\",\n" + diff --git a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java index 11003389f1d..21a0b68d6b4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java @@ -97,7 +97,7 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { if(!provUtil.isProvValid(provJsonState)) { //if uploaded prov-json does not comply with schema Logger.getLogger(ProvPopupFragmentBean.class.getName()) - .log(Level.SEVERE, BundleUtil.getStringFromBundle("file.editProvenanceDialog.invalidSchemaError")); + .log(Level.INFO, BundleUtil.getStringFromBundle("file.editProvenanceDialog.invalidSchemaError")); removeJsonAndRelatedData(); JH.addMessage(FacesMessage.SEVERITY_ERROR, JH.localize("file.editProvenanceDialog.invalidSchemaError")); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java index 839c0022ee2..721fe2fde39 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java @@ -6,7 +6,10 @@ import java.net.URL; import java.net.URLClassLoader; import java.text.MessageFormat; -import java.util.*; +import java.util.List; +import java.util.Locale; +import java.util.MissingResourceException; +import java.util.ResourceBundle; import java.util.logging.Logger; public class BundleUtil { @@ -21,27 +24,7 @@ public static String getStringFromBundle(String key) { } public static String getStringFromBundle(String key, List arguments) { - - DataverseLocaleBean d = new DataverseLocaleBean(); - ResourceBundle bundle; - bundle_locale = new Locale(d.getLocaleCode()); - - String filesRootDirectory = System.getProperty("dataverse.lang.directory"); - - if (filesRootDirectory == null || filesRootDirectory.isEmpty()) { - bundle = ResourceBundle.getBundle(defaultBundleFile, bundle_locale); - } else { - File bundleFileDir = new File(filesRootDirectory); - URL[] urls = null; - try { - urls = new URL[]{bundleFileDir.toURI().toURL()}; - } catch (Exception e) { - e.printStackTrace(); - } - - ClassLoader loader = new URLClassLoader(urls); - bundle = ResourceBundle.getBundle(defaultBundleFile, bundle_locale, loader); - } + ResourceBundle bundle = getResourceBundle(defaultBundleFile ); return getStringFromBundle(key, arguments, bundle); } @@ -66,4 +49,34 @@ public static String getStringFromBundle(String key, List arguments, Res } } + public static String getStringFromPropertyFile(String key, String propertyFileName ) { + ResourceBundle bundle = getResourceBundle(propertyFileName); + return getStringFromBundle(key, null, bundle); + } + + public static ResourceBundle getResourceBundle(String propertyFileName) + { + DataverseLocaleBean d = new DataverseLocaleBean(); + ResourceBundle bundle; + bundle_locale = new Locale(d.getLocaleCode()); + + String filesRootDirectory = System.getProperty("dataverse.lang.directory"); + + if (filesRootDirectory == null || filesRootDirectory.isEmpty()) { + bundle = ResourceBundle.getBundle(propertyFileName, bundle_locale); + } else { + File bundleFileDir = new File(filesRootDirectory); + URL[] urls = null; + try { + urls = new URL[]{bundleFileDir.toURI().toURL()}; + } catch (Exception e) { + e.printStackTrace(); + } + + ClassLoader loader = new URLClassLoader(urls); + bundle = ResourceBundle.getBundle(propertyFileName, bundle_locale, loader); + } + + return bundle ; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index dcf6584fb51..b6658b7dace 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -20,6 +20,7 @@ package edu.harvard.iq.dataverse.util; + import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFile.ChecksumType; import edu.harvard.iq.dataverse.DataFileServiceBean; @@ -43,8 +44,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.util.ResourceBundle; -import java.util.MissingResourceException; import java.nio.channels.FileChannel; import java.nio.channels.WritableByteChannel; import java.nio.charset.Charset; @@ -57,11 +56,12 @@ import java.sql.Timestamp; import java.text.MessageFormat; import java.text.SimpleDateFormat; +import java.util.Map; +import java.util.MissingResourceException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; @@ -209,7 +209,7 @@ public static String getUserFriendlyFileType(DataFile dataFile) { fileType = fileType.substring(0, fileType.indexOf(";")); } try { - return ResourceBundle.getBundle("MimeTypeDisplay").getString(fileType); + return BundleUtil.getStringFromPropertyFile(fileType,"MimeTypeDisplay" ); } catch (MissingResourceException e) { return fileType; } @@ -227,7 +227,7 @@ public static String getFacetFileType(DataFile dataFile) { } try { - return ResourceBundle.getBundle("MimeTypeFacets").getString(fileType); + return BundleUtil.getStringFromPropertyFile(fileType,"MimeTypeFacets" ); } catch (MissingResourceException e) { // if there's no defined "facet-friendly" form of this mime type // we'll truncate the available type by "/", e.g., all the @@ -247,7 +247,7 @@ public static String getFacetFileType(DataFile dataFile) { } } - return ResourceBundle.getBundle("MimeTypeFacets").getString("application/octet-stream"); + return BundleUtil.getStringFromPropertyFile("application/octet-stream","MimeTypeFacets" ); } public static String getUserFriendlyOriginalType(DataFile dataFile) { @@ -258,7 +258,7 @@ public static String getUserFriendlyOriginalType(DataFile dataFile) { fileType = fileType.substring(0, fileType.indexOf(";")); } try { - return ResourceBundle.getBundle("MimeTypeDisplay").getString(fileType); + return BundleUtil.getStringFromPropertyFile(fileType,"MimeTypeDisplay" ); } catch (MissingResourceException e) { return fileType; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 40a85c4ff59..8328854f481 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -834,15 +834,21 @@ public int getPVNumberOfConsecutiveDigitsAllowed() { * * - TransferProtocols * - * There is a good chance these will be consolidated in the future. The word - * "NATIVE" is a bit of placeholder term to mean how Dataverse has - * traditionally handled files, which tends to involve users uploading and - * downloading files using a browser or APIs. + * There is a good chance these will be consolidated in the future. */ public enum FileUploadMethods { + /** + * DCM stands for Data Capture Module. Right now it supports upload over + * rsync+ssh but DCM may support additional methods in the future. + */ RSYNC("dcm/rsync+ssh"), - NATIVE("NATIVE"); + /** + * Traditional Dataverse file handling, which tends to involve users + * uploading and downloading files using a browser or APIs. + */ + NATIVE("native/http"); + private final String text; @@ -969,16 +975,53 @@ public boolean isPublicInstall(){ } public boolean isRsyncUpload(){ + return getUploadMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString()); + } + + // Controls if HTTP upload is enabled for both GUI and API. + public boolean isHTTPUpload(){ + return getUploadMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString()); + } + + public boolean isRsyncOnly(){ + String downloadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.DownloadMethods); + if(downloadMethods == null){ + return false; + } + if (!downloadMethods.toLowerCase().equals(SystemConfig.FileDownloadMethods.RSYNC.toString())){ + return false; + } String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); - return uploadMethods != null && uploadMethods.toLowerCase().equals(SystemConfig.FileUploadMethods.RSYNC.toString()); + if (uploadMethods==null){ + return false; + } else { + return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size() == 1 && uploadMethods.toLowerCase().equals(SystemConfig.FileUploadMethods.RSYNC.toString()); + } } public boolean isRsyncDownload() { String downloadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.DownloadMethods); - return downloadMethods !=null && downloadMethods.toLowerCase().equals(SystemConfig.FileDownloadMethods.RSYNC.toString()); + return downloadMethods !=null && downloadMethods.toLowerCase().contains(SystemConfig.FileDownloadMethods.RSYNC.toString()); + } + + private Boolean getUploadMethodAvailable(String method){ + String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); + if (uploadMethods==null){ + return false; + } else { + return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).contains(method); + } } + public Integer getUploadMethodCount(){ + String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); + if (uploadMethods==null){ + return 0; + } else { + return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size(); + } + } public boolean isDataFilePIDSequentialDependent(){ String doiIdentifierType = settingsService.getValueForKey(SettingsServiceBean.Key.IdentifierGenerationStyle, "randomString"); String doiDataFileFormat = settingsService.getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, "DEPENDENT"); diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index c7ac9ba2403..6ad515f23c4 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -442,7 +442,7 @@ -
        +
        #{bundle['file.dataFilesTab.terms.list.guestbook']}  
        diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index bdef7416845..9eb3f0421fe 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -199,13 +199,11 @@ #{bundle['dataset.editBtn']}
        - - - - - -

        - - -

        -
        - -

        - - -

        -
        - -

        - - - - - -

        -
          -
        1. - -
        2. -
        3. - - - - -
        4. -
        5. - - - -
        6. -
        7. - -
        8. -
        -
        -
        - -
        - -
        -

        @@ -1522,7 +1456,6 @@ - diff --git a/src/main/webapp/dataverse.xhtml b/src/main/webapp/dataverse.xhtml index 2a2abb1b08b..315474259e8 100644 --- a/src/main/webapp/dataverse.xhtml +++ b/src/main/webapp/dataverse.xhtml @@ -131,7 +131,8 @@

        - + +
      + + +

      + + + + + + +

      +
      - +
      + +
      + +
      +
      +

      + + + + + + + + + + + +

      +
      +
      +

      + + +

      +
      +
      +

      + + + + + + + + +

      + -

      - - - - - - - - - - - - -

      @@ -181,16 +222,15 @@ - -
      -
      -
      - +
      +
      + + + +
      +
      + + +

      + #{bundle['file.createUploadDisabled']} +

      +
      + +

      + #{bundle['file.rsyncUpload.rsyncUploadDisabledDueFileUploadedViaHttp']} +

      +
      +
      + + + +

      + + + + + +

      +
        +
      1. + +
      2. +
      3. + + + + +
      4. +
      5. + + + +
      6. +
      7. + +
      8. +
      +
      + + +

      + + + + + + + + + + + +

      +
      + +

      + + +

      +
      + +

      + + +

      +
      + + +

      + #{bundle['file.rsyncUpload.rsyncUploadDisabledDueFileUploadedViaHttp']} +

      +
      + + +

      + + + + + + + + + + + +

      +
      + + + +

      + + + + + + + + + + + +

      +
      +
      +
      +
      +
      +
      + + +
      + +
      -
      @@ -271,7 +452,9 @@ - + + + @@ -284,7 +467,6 @@
      -
      @@ -295,7 +477,9 @@ - + + + @@ -315,7 +499,7 @@ -
      +
      @@ -422,13 +606,15 @@
      + +
      +
      - + - @@ -441,13 +627,21 @@
      + +

      #{bundle['dataset.noSelectedFilesForDelete']}

      +
      + +
      +
      +

      #{bundle['file.deleteFileDialog.tip']}

      #{bundle['file.deleteFileDialog.failed.tip']}

      -
      @@ -791,10 +985,16 @@ $('button[id$="compareVersions"]').trigger('click'); } } + function deleteFinished() { + $('button[id$="updateEditDataFilesButtonsForDelete"]').trigger('click'); + $('button[id$="allDeletesFinished"]').trigger('click'); + } function checkFilesSelected() { var count = PF('filesTable').getSelectedRowsCount(); if (count > 0) { PF('deleteFileConfirmation').show(); + } else { + PF('selectFilesForDeleteFragment').show(); } } function checkNewlyRestricted() { @@ -824,4 +1024,4 @@ Dropbox.choose(options); } - \ No newline at end of file + diff --git a/src/main/webapp/editdatafiles.xhtml b/src/main/webapp/editdatafiles.xhtml index 61ce0ed5c1e..0b49ff8f62f 100755 --- a/src/main/webapp/editdatafiles.xhtml +++ b/src/main/webapp/editdatafiles.xhtml @@ -20,6 +20,7 @@ + @@ -31,7 +32,7 @@ - @@ -44,7 +45,7 @@ -
      +
      @@ -95,21 +96,35 @@
      - - + + + +
      -
      - - - -
      + +
      + +
      + + + +
      +
      + + + +
      +
      +
      +
      + diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index 7a473546c30..fbfbe4adb96 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -30,7 +30,7 @@
      -
      +
      #{bundle['metrics.title']}
      @@ -125,14 +125,14 @@ - +
    1. - +
    2. @@ -140,26 +140,27 @@
    3. - +
    4. - +
    5. -
      -
    6. - - - -
    7. - +
      + +
    8. + + + +
    9. +
    10. @@ -172,7 +173,7 @@
      + and !FilePage.fileMetadata.dataFile.filePackage}"> @@ -361,7 +362,7 @@ + rendered="#{settingsWrapper.rsyncDownload and FilePage.fileMetadata.dataFile.filePackage }"> @@ -424,7 +425,7 @@
      -
      +
      diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index ec6694d5df5..6e959fb4bf5 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -22,11 +22,7 @@ rowIndexVar="rowNum" rowKey="#{fileMetadata.dataFile.storageIdentifier}" selection="#{DatasetPage.selectedFiles}" var="fileMetadata" widgetVar="filesTable" rendered="#{empty DatasetPage.editMode and (DatasetPage.workingVersion != null)}" - emptyMessage="#{bundle['file.notFound.tip']}" - > - - - + emptyMessage="#{bundle['file.notFound.tip']}"> @@ -97,41 +93,40 @@
      - - +
      -
      +
      + -
      - - - - + + + + + + + - + and !widgetWrapper.widgetView}">
      +
      @@ -151,8 +146,9 @@  

      +
      +
      -
      @@ -206,7 +202,7 @@ - + @@ -238,7 +234,7 @@
      - +
      #{bundle['file.editFiles']}
      -
      +
      @@ -392,7 +389,7 @@
      -
      +
      diff --git a/src/main/webapp/metadataFragment.xhtml b/src/main/webapp/metadataFragment.xhtml index a9f1a157c1c..e83eab6565b 100755 --- a/src/main/webapp/metadataFragment.xhtml +++ b/src/main/webapp/metadataFragment.xhtml @@ -116,7 +116,8 @@ - + +
      - + +
      diff --git a/src/main/webapp/resources/css/structure.css b/src/main/webapp/resources/css/structure.css index f3a16db494d..b15bcceb181 100644 --- a/src/main/webapp/resources/css/structure.css +++ b/src/main/webapp/resources/css/structure.css @@ -680,13 +680,15 @@ span.text-info.popoverHTML {display:inline-block;} /*div[id$="versionsTable"] .ui-datatable-tablewrapper thead {display:none;}*/ div[id$="versionsTable"] th.col-select-width * {display:none;} +div[id$="filesTable"].ui-datatable.ui-widget div.ui-datatable-tablewrapper {overflow:visible;} + div[id$="filesTable"].ui-datatable.ui-widget {margin-bottom:20px;} div[id$="filesTable"].ui-datatable.ui-widget table {table-layout: fixed;} div[id$="filesTable"].ui-datatable.ui-widget table th, div[id$="filesTable"].ui-datatable.ui-widget table td {box-sizing: content-box;} div[id$="filesTable"] div.ui-datatable-header.ui-widget-header {padding:0; background:none; border:0;} -div[id$="filesTable"] #filesHeaderCount {padding:10px 10px 4px 10px;} -div[id$="filesTable"] #filesHeaderCount span.highlightBold {display:block; padding:0; line-height:1.65;} +div[id$="filesHeaderCount"] {padding:6px 12px;} +div[id$="filesHeaderCount"] span.highlightBold {display:block; padding:0; line-height:1.65;} div[id$="filesTable"] .col-file-thumb {border-right:0;} div[id$="filesTable"] .col-file-thumb div.thumbnail-block {position:relative;width:64px;height:64px;margin:0 auto;} div[id$="filesTable"] .col-file-thumb div.thumbnail-block span.file-thumbnail-icon {font-size:62px;line-height:1.05;} diff --git a/src/main/webapp/themeAndWidgetsFragment.xhtml b/src/main/webapp/themeAndWidgetsFragment.xhtml index 315ae30376e..b1931ae7e5a 100644 --- a/src/main/webapp/themeAndWidgetsFragment.xhtml +++ b/src/main/webapp/themeAndWidgetsFragment.xhtml @@ -7,7 +7,8 @@ - + +
      diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 94b207969b6..b604685f314 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -60,7 +60,8 @@ public static void setUpClass() { Response removeExcludeEmail = UtilIT.deleteSetting(SettingsServiceBean.Key.ExcludeEmailFromExport); removeExcludeEmail.then().assertThat() .statusCode(200); - + /* With Dual mode, we can no longer mess with upload methods since native is now required for anything to work + Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl); removeDcmUrl.then().assertThat() .statusCode(200); @@ -68,6 +69,7 @@ public static void setUpClass() { Response removeUploadMethods = UtilIT.deleteSetting(SettingsServiceBean.Key.UploadMethods); removeUploadMethods.then().assertThat() .statusCode(200); + */ } @AfterClass @@ -80,7 +82,7 @@ public static void afterClass() { Response removeExcludeEmail = UtilIT.deleteSetting(SettingsServiceBean.Key.ExcludeEmailFromExport); removeExcludeEmail.then().assertThat() .statusCode(200); - + /* See above Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl); removeDcmUrl.then().assertThat() .statusCode(200); @@ -88,6 +90,7 @@ public static void afterClass() { Response removeUploadMethods = UtilIT.deleteSetting(SettingsServiceBean.Key.UploadMethods); removeUploadMethods.then().assertThat() .statusCode(200); + */ } @Test @@ -1124,9 +1127,9 @@ public void testCreateDatasetWithDcmDependency() { Response getRsyncScriptPermErrorGuest = UtilIT.getRsyncScript(datasetPersistentId, nullTokenToIndicateGuest); getRsyncScriptPermErrorGuest.prettyPrint(); getRsyncScriptPermErrorGuest.then().assertThat() + .statusCode(UNAUTHORIZED.getStatusCode()) .contentType(ContentType.JSON) - .body("message", equalTo("User :guest is not permitted to perform requested action.")) - .statusCode(UNAUTHORIZED.getStatusCode()); + .body("message", equalTo("Please provide a key query parameter (?key=XXX) or via the HTTP header X-Dataverse-key")); Response createNoPermsUser = UtilIT.createRandomUser(); String noPermsUsername = UtilIT.getUsernameFromResponse(createNoPermsUser); @@ -1273,7 +1276,9 @@ public void testDcmChecksumValidationMessages() throws IOException, InterruptedE String protocol = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.protocol"); String authority = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.authority"); String identifier = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.identifier"); + logger.info("identifier: " + identifier); String datasetPersistentId = protocol + ":" + authority + "/" + identifier; + logger.info("datasetPersistentId: " + datasetPersistentId); /** * Here we are pretending to be the Data Capture Module reporting on if @@ -1334,23 +1339,32 @@ public void testDcmChecksumValidationMessages() throws IOException, InterruptedE removeUploadMethods.then().assertThat() .statusCode(200); - String uploadFolder = identifier; + String uploadFolder = identifier.split("FK2/")[1]; + logger.info("uploadFolder: " + uploadFolder); /** * The "extra testing" involves having this REST Assured test do two * jobs done by the rsync script and the DCM. The rsync script creates * "files.sha" and (if checksum validation succeeds) the DCM moves the * files and the "files.sha" file into the uploadFolder. + * + * The whole test was disabled in ae6b0a7 so we are changing + * doExtraTesting to true. */ - boolean doExtraTesting = false; + boolean doExtraTesting = true; if (doExtraTesting) { String SEP = java.io.File.separator; // Set this to where you keep your files in dev. It might be nice to have an API to query to get this location from Dataverse. - String dsDir = "/Users/pdurbin/dataverse/files/10.5072/FK2"; - java.nio.file.Files.createDirectories(java.nio.file.Paths.get(dsDir + SEP + identifier)); - java.nio.file.Files.createDirectories(java.nio.file.Paths.get(dsDir + SEP + identifier + SEP + uploadFolder)); + // TODO: Think more about if dsDir should end with "/FK2" or not. + String dsDir = "/usr/local/glassfish4/glassfish/domains/domain1/files/10.5072"; + String dsDirPlusIdentifier = dsDir + SEP + identifier; + logger.info("dsDirPlusIdentifier: " + dsDirPlusIdentifier); + java.nio.file.Files.createDirectories(java.nio.file.Paths.get(dsDirPlusIdentifier)); + String dsDirPlusIdentifierPlusUploadFolder = dsDir + SEP + identifier + SEP + uploadFolder; + logger.info("dsDirPlusIdentifierPlusUploadFolder: " + dsDirPlusIdentifierPlusUploadFolder); + java.nio.file.Files.createDirectories(java.nio.file.Paths.get(dsDirPlusIdentifierPlusUploadFolder)); String checksumFilename = "files.sha"; String filename1 = "file1.txt"; String fileContent1 = "big data!"; @@ -1387,8 +1401,8 @@ public void testDcmChecksumValidationMessages() throws IOException, InterruptedE if (doExtraTesting) { uploadSuccessful.then().assertThat() - .body("data.message", equalTo("FileSystemImportJob in progress")) - .statusCode(200); + .statusCode(200) + .body("data.message", equalTo("FileSystemImportJob in progress")); if (doExtraTesting) { @@ -1397,11 +1411,11 @@ public void testDcmChecksumValidationMessages() throws IOException, InterruptedE Response datasetAsJson2 = UtilIT.nativeGet(datasetId, apiToken); datasetAsJson2.prettyPrint(); datasetAsJson2.then().assertThat() - .body("data.latestVersion.files[0].dataFile.filename", equalTo(identifier)) + .statusCode(OK.getStatusCode()) + .body("data.latestVersion.files[0].dataFile.filename", equalTo(uploadFolder)) .body("data.latestVersion.files[0].dataFile.contentType", equalTo("application/vnd.dataverse.file-package")) .body("data.latestVersion.files[0].dataFile.filesize", equalTo(totalSize)) - .body("data.latestVersion.files[0].dataFile.checksum.type", equalTo("SHA-1")) - .statusCode(OK.getStatusCode()); + .body("data.latestVersion.files[0].dataFile.checksum.type", equalTo("SHA-1")); } } logger.info("username/password: " + username); diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIOTest.java new file mode 100644 index 00000000000..4ce821a5fee --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIOTest.java @@ -0,0 +1,123 @@ +/* + * Copyright 2018 Forschungszentrum Jülich GmbH + * SPDX-License-Identifier: Apache 2.0 + */ +package edu.harvard.iq.dataverse.dataaccess; + +import com.amazonaws.services.s3.AmazonS3; +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.api.UtilIT; +import edu.harvard.iq.dataverse.mocks.MocksFactory; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import static org.junit.jupiter.api.Assertions.*; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import static org.mockito.Mockito.*; +import static org.mockito.BDDMockito.*; + +import java.io.FileNotFoundException; +import java.io.IOException; + +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.STRICT_STUBS) +public class S3AccessIOTest { + + @Mock + private AmazonS3 s3client; + + private S3AccessIO dataSetAccess; + private S3AccessIO dataFileAccess; + private Dataset dataSet; + private DataFile dataFile; + private String dataFileId; + + @BeforeEach + public void setup() throws IOException { + dataFile = MocksFactory.makeDataFile(); + dataSet = MocksFactory.makeDataset(); + dataFile.setOwner(dataSet); + dataFileId = UtilIT.getRandomIdentifier(); + dataFile.setStorageIdentifier("s3://bucket:"+dataFileId); + dataSetAccess = new S3AccessIO<>(dataSet, null, s3client); + dataFileAccess = new S3AccessIO<>(dataFile, null, s3client); + } + + /* + createTempFile + getStorageLocation + getFileSystemPath + exists? + getWriteChannel + getOutputStream + getDestinationKey + + DONE + --------------------- + getMainFileKey + getUrlExpirationMinutes + */ + + @Test + void keyNull_getMainFileKey() throws IOException { + // given + String authOwner = dataSet.getAuthority(); + String idOwner = dataSet.getIdentifier(); + + // when + String key = dataFileAccess.getMainFileKey(); + + // then + assertEquals(authOwner+"/"+idOwner+"/"+dataFileId, key); + } + + @Test + void keyNullstorageIdNullOrEmpty_getMainFileKey() throws IOException { + // given + dataFile.setStorageIdentifier(null); + // when & then + assertThrows(FileNotFoundException.class, () -> {dataFileAccess.getMainFileKey(); }); + + // given + dataFile.setStorageIdentifier(""); + // when & then + assertThrows(FileNotFoundException.class, () -> {dataFileAccess.getMainFileKey(); }); + } + + @Test + void keyNullstorageIdNull_getMainFileKey() throws IOException { + // given + dataFile.setStorageIdentifier("invalid://abcd"); + // when & then + assertThrows(IOException.class, () -> {dataFileAccess.getMainFileKey(); }); + } + + @Test + void default_getUrlExpirationMinutes() { + // given + System.clearProperty("dataverse.files.s3-url-expiration-minutes"); + // when & then + assertEquals(60, dataFileAccess.getUrlExpirationMinutes()); + } + + @Test + void validSetting_getUrlExpirationMinutes() { + // given + System.setProperty("dataverse.files.s3-url-expiration-minutes", "120"); + // when & then + assertEquals(120, dataFileAccess.getUrlExpirationMinutes()); + } + + @Test + void invalidSetting_getUrlExpirationMinutes() { + // given + System.setProperty("dataverse.files.s3-url-expiration-minutes", "NaN"); + // when & then + assertEquals(60, dataFileAccess.getUrlExpirationMinutes()); + } + +} diff --git a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java index 08b020ec2ca..17f67c31b7f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java @@ -3,6 +3,7 @@ import com.mashape.unirest.http.HttpResponse; import com.mashape.unirest.http.JsonNode; import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import static edu.harvard.iq.dataverse.mocks.MocksFactory.makeAuthenticatedUser; import java.io.UnsupportedEncodingException; @@ -26,9 +27,9 @@ public void testRsyncSupportEnabled() { System.out.println("rsyncSupportEnabled"); assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled(null)); assertEquals(true, DataCaptureModuleUtil.rsyncSupportEnabled("dcm/rsync+ssh")); - // We haven't finalized what the separator will be yet. + // Comma sepratated lists of upload methods are supported. assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("NATIVE:dcm/rsync+ssh")); - assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("NATIVE,dcm/rsync+ssh")); + assertEquals(true, DataCaptureModuleUtil.rsyncSupportEnabled("NATIVE,dcm/rsync+ssh")); assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("NATIVE")); assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("junk")); } @@ -105,4 +106,14 @@ public void testGetMessageFromException() { assertEquals("edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleException", DataCaptureModuleUtil.getMessageFromException(new DataCaptureModuleException(null, null))); assertEquals("edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleException: message1", DataCaptureModuleUtil.getMessageFromException(new DataCaptureModuleException("message1", null))); } + + @Test + public void testScriptName() { + DatasetVersion datasetVersion = new DatasetVersion(); + Dataset dataset = new Dataset(); + dataset.setIdentifier("KYHURW"); + datasetVersion.setDataset(dataset); + assertEquals("upload-KYHURW.bash", DataCaptureModuleUtil.getScriptName(datasetVersion)); + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/provenance/ProvInvestigatorTest.java b/src/test/java/edu/harvard/iq/dataverse/provenance/ProvInvestigatorTest.java index a2ecfa77cc5..efa83fbb950 100644 --- a/src/test/java/edu/harvard/iq/dataverse/provenance/ProvInvestigatorTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/provenance/ProvInvestigatorTest.java @@ -120,9 +120,6 @@ public void testProvNamesNotInsideEntity() throws IOException { assertFalse(entities.size() > 0); } - //MAD: write a simple entity test as well, also ensure logging works after getting a real tostring together - //also write a test of parsing different cases, we don't want to catch "fakename" but we do want to catch "rdt:name" and "name" - @Category(NonEssentialTests.class) @Test public void testProvNameJsonParserEmptyEntities() throws IOException { diff --git a/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlValidatorTest.java index 2d9c31305d5..af0c657a1a4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlValidatorTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlValidatorTest.java @@ -16,7 +16,8 @@ public class XmlValidatorTest { private static final Logger logger = Logger.getLogger(XmlValidatorTest.class.getCanonicalName()); - // FIXME: Remove @Ignore after figuring out why `mvn` (but not NetBeans) shows "javax.xml.transform.TransformerException: org.xml.sax.SAXParseException; Premature end of file" + //Ignored as this relies on an external resource that has been down occasionally. + //May be a good test for our full vs. everytime test classifications (#4896) -MAD 4.9.1 @Ignore @Category(NonEssentialTests.class) @Test