From 0133410adbfa3afdec957238028706db2856537a Mon Sep 17 00:00:00 2001 From: Benedikt Meier Date: Thu, 12 Sep 2024 11:25:17 +0200 Subject: [PATCH] pacht v6.3+10797+10820 https://github.com/IQSS/dataverse/pull/10797 https://github.com/IQSS/dataverse/pull/10820 --- .../10797-update-current-version-bug-fix.md | 11 ++ .../10819-publish-thumbnail-bug.md | 5 + .../edu/harvard/iq/dataverse/Dataset.java | 7 +- .../dataverse/DatasetVersionServiceBean.java | 159 ++++++++++-------- .../iq/dataverse/TermsOfUseAndAccess.java | 24 +++ .../iq/dataverse/dataset/DatasetUtil.java | 9 +- .../CuratePublishedDatasetVersionCommand.java | 127 +++++++++----- .../privateurl/PrivateUrlServiceBean.java | 4 +- .../iq/dataverse/settings/FeatureFlags.java | 10 ++ src/main/java/propertyFiles/Bundle.properties | 4 +- src/main/webapp/dataset.xhtml | 2 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 120 ++++++++++++- .../dataaccess/RemoteOverlayAccessIOTest.java | 4 +- 13 files changed, 359 insertions(+), 127 deletions(-) create mode 100644 doc/release-notes/10797-update-current-version-bug-fix.md create mode 100644 doc/release-notes/10819-publish-thumbnail-bug.md diff --git a/doc/release-notes/10797-update-current-version-bug-fix.md b/doc/release-notes/10797-update-current-version-bug-fix.md new file mode 100644 index 00000000000..9150e41c6d0 --- /dev/null +++ b/doc/release-notes/10797-update-current-version-bug-fix.md @@ -0,0 +1,11 @@ +A significant bug in the superuser-only "Update-Current-Version" publication was found and fixed in this release. If the Update-Current-Version option was used when changes were made to the dataset Terms (rather than to dataset metadata), or if the PID provider service was down/returned an error, the update would fail and render the dataset unusable and require restoration from a backup. The fix in this release allows the update to succeed in both of these cases and redesigns the functionality such that any unknown issues should not make the dataset unusable (i.e. the error would be reported and the dataset would remain in its current state with the last-published version as it was and changes still in the draft version.) + +Users of earlier Dataverse releases are encouraged to alert their superusers to this issue. Those who wish to disable this functionality have two options: +* Change the dataset.updateRelease entry in the Bundle.properties file (or local language version) to "Do Not Use" or similar (doesn't disable but alerts superusers to the issue), or +* Edit the dataset.xhtml file to remove the lines + + + + + +, delete the contents of the generated and osgi-cache directories in the Dataverse Payara domain, and restart the Payara server. \ No newline at end of file diff --git a/doc/release-notes/10819-publish-thumbnail-bug.md b/doc/release-notes/10819-publish-thumbnail-bug.md new file mode 100644 index 00000000000..f6dc4ac67df --- /dev/null +++ b/doc/release-notes/10819-publish-thumbnail-bug.md @@ -0,0 +1,5 @@ +The initial release of the Dataverse v6.3 introduced a bug where publishing would break the dataset thumbnail, which in turn broke the rendering of the parent Collection ("dataverse") page. This problem was fixed in the PR 10820. + +This bug fix will prevent this from happening in the future, but does not fix any existing broken links. To restore any broken thumbnails caused by this bug, you can call the http://localhost:8080/api/admin/clearThumbnailFailureFlag API, which will attempt to clear the flag on all files (regardless of whether caused by this bug or some other problem with the file) or the http://localhost:8080/api/admin/clearThumbnailFailureFlag/id to clear the flag for individual files. Calling the former, batch API is recommended. + +Additionally, the same PR made it possible to turn off the feature that automatically selects of one of the image datafiles to serve as the thumbnail of the parent dataset. An admin can turn it off by raising the feature flag `-Ddataverse.feature.disable-dataset-thumbnail-autoselect=true`. When the feature is disabled, a user can still manually pick a thumbnail image, or upload a dedicated thumbnail image. diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index eaf406d01bf..066b0b46dc8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.license.License; import edu.harvard.iq.dataverse.makedatacount.DatasetExternalCitations; import edu.harvard.iq.dataverse.makedatacount.DatasetMetrics; +import edu.harvard.iq.dataverse.settings.FeatureFlags; import java.nio.file.Path; import java.nio.file.Paths; import java.sql.Timestamp; @@ -206,6 +207,10 @@ public Dataset(boolean isHarvested) { StorageUse storageUse = new StorageUse(this); this.setStorageUse(storageUse); } + + if (FeatureFlags.DISABLE_DATASET_THUMBNAIL_AUTOSELECT.enabled()) { + this.setUseGenericThumbnail(true); + } } /** @@ -969,4 +974,4 @@ public DatasetThumbnail getDatasetThumbnail(DatasetVersion datasetVersion, int s public String getTargetUrl() { return Dataset.TARGET_URL; } -} +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index ab23fa779d5..3b5795b17ed 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -9,6 +9,7 @@ import static edu.harvard.iq.dataverse.batch.jobs.importer.filesystem.FileRecordJobListener.SEP; import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.search.SolrSearchResult; +import edu.harvard.iq.dataverse.settings.FeatureFlags; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.MarkupChecker; @@ -315,6 +316,23 @@ private void msg(String s){ //logger.fine(s); } + public boolean isVersionDefaultCustomTerms(DatasetVersion datasetVersion) { + //SEK - belt and suspenders here, but this is where the bug 10719 first manifested + if (datasetVersion != null && datasetVersion.getId() != null) { + try { + TermsOfUseAndAccess toua = (TermsOfUseAndAccess) em.createNamedQuery("TermsOfUseAndAccess.findByDatasetVersionIdAndDefaultTerms") + .setParameter("id", datasetVersion.getId()).setParameter("defaultTerms", TermsOfUseAndAccess.DEFAULT_NOTERMS).getSingleResult(); + if (toua != null && datasetVersion.getTermsOfUseAndAccess().getLicense() == null) { + return true; + } + + } catch (NoResultException e) { + return false; + } + } + return false; + } + /** * Does the version identifier in the URL ask for a "DRAFT"? * @@ -790,36 +808,11 @@ public Long getThumbnailByVersionId(Long versionId) { return null; } - Long thumbnailFileId; - - // First, let's see if there are thumbnails that have already been - // generated: - try { - thumbnailFileId = (Long) em.createNativeQuery("SELECT df.id " - + "FROM datafile df, filemetadata fm, datasetversion dv, dvobject o " - + "WHERE dv.id = " + versionId + " " - + "AND df.id = o.id " - + "AND fm.datasetversion_id = dv.id " - + "AND fm.datafile_id = df.id " - + "AND df.restricted = false " - + "AND df.embargo_id is null " - + "AND df.retention_id is null " - + "AND o.previewImageAvailable = true " - + "ORDER BY df.id LIMIT 1;").getSingleResult(); - } catch (Exception ex) { - thumbnailFileId = null; - } - - if (thumbnailFileId != null) { - logger.fine("DatasetVersionService,getThumbnailByVersionid(): found already generated thumbnail for version " + versionId + ": " + thumbnailFileId); - assignDatasetThumbnailByNativeQuery(versionId, thumbnailFileId); - return thumbnailFileId; - } - - if (!systemConfig.isThumbnailGenerationDisabledForImages()) { - // OK, let's try and generate an image thumbnail! - long imageThumbnailSizeLimit = systemConfig.getThumbnailSizeLimitImage(); + if (!FeatureFlags.DISABLE_DATASET_THUMBNAIL_AUTOSELECT.enabled()) { + Long thumbnailFileId; + // First, let's see if there are thumbnails that have already been + // generated: try { thumbnailFileId = (Long) em.createNativeQuery("SELECT df.id " + "FROM datafile df, filemetadata fm, datasetversion dv, dvobject o " @@ -827,63 +820,89 @@ public Long getThumbnailByVersionId(Long versionId) { + "AND df.id = o.id " + "AND fm.datasetversion_id = dv.id " + "AND fm.datafile_id = df.id " - + "AND o.previewimagefail = false " + "AND df.restricted = false " + "AND df.embargo_id is null " + "AND df.retention_id is null " - + "AND df.contenttype LIKE 'image/%' " - + "AND NOT df.contenttype = 'image/fits' " - + "AND df.filesize < " + imageThumbnailSizeLimit + " " - + "ORDER BY df.filesize ASC LIMIT 1;").getSingleResult(); + + "AND o.previewImageAvailable = true " + + "ORDER BY df.id LIMIT 1;").getSingleResult(); } catch (Exception ex) { thumbnailFileId = null; } if (thumbnailFileId != null) { - logger.fine("obtained file id: " + thumbnailFileId); - DataFile thumbnailFile = datafileService.find(thumbnailFileId); - if (thumbnailFile != null) { - if (datafileService.isThumbnailAvailable(thumbnailFile)) { - assignDatasetThumbnailByNativeQuery(versionId, thumbnailFileId); - return thumbnailFileId; + logger.fine("DatasetVersionService,getThumbnailByVersionid(): found already generated thumbnail for version " + versionId + ": " + thumbnailFileId); + assignDatasetThumbnailByNativeQuery(versionId, thumbnailFileId); + return thumbnailFileId; + } + + if (!systemConfig.isThumbnailGenerationDisabledForImages()) { + // OK, let's try and generate an image thumbnail! + long imageThumbnailSizeLimit = systemConfig.getThumbnailSizeLimitImage(); + + try { + thumbnailFileId = (Long) em.createNativeQuery("SELECT df.id " + + "FROM datafile df, filemetadata fm, datasetversion dv, dvobject o " + + "WHERE dv.id = " + versionId + " " + + "AND df.id = o.id " + + "AND fm.datasetversion_id = dv.id " + + "AND fm.datafile_id = df.id " + + "AND o.previewimagefail = false " + + "AND df.restricted = false " + + "AND df.embargo_id is null " + + "AND df.retention_id is null " + + "AND df.contenttype LIKE 'image/%' " + + "AND NOT df.contenttype = 'image/fits' " + + "AND df.filesize < " + imageThumbnailSizeLimit + " " + + "ORDER BY df.filesize ASC LIMIT 1;").getSingleResult(); + } catch (Exception ex) { + thumbnailFileId = null; + } + + if (thumbnailFileId != null) { + logger.fine("obtained file id: " + thumbnailFileId); + DataFile thumbnailFile = datafileService.find(thumbnailFileId); + if (thumbnailFile != null) { + if (datafileService.isThumbnailAvailable(thumbnailFile)) { + assignDatasetThumbnailByNativeQuery(versionId, thumbnailFileId); + return thumbnailFileId; + } } } } - } - // And if that didn't work, try the same thing for PDFs: - if (!systemConfig.isThumbnailGenerationDisabledForPDF()) { - // OK, let's try and generate an image thumbnail! - long imageThumbnailSizeLimit = systemConfig.getThumbnailSizeLimitPDF(); - try { - thumbnailFileId = (Long) em.createNativeQuery("SELECT df.id " - + "FROM datafile df, filemetadata fm, datasetversion dv, dvobject o " - + "WHERE dv.id = " + versionId + " " - + "AND df.id = o.id " - + "AND fm.datasetversion_id = dv.id " - + "AND fm.datafile_id = df.id " - + "AND o.previewimagefail = false " - + "AND df.restricted = false " - + "AND df.embargo_id is null " - + "AND df.retention_id is null " - + "AND df.contenttype = 'application/pdf' " - + "AND df.filesize < " + imageThumbnailSizeLimit + " " - + "ORDER BY df.filesize ASC LIMIT 1;").getSingleResult(); - } catch (Exception ex) { - thumbnailFileId = null; - } + // And if that didn't work, try the same thing for PDFs: + if (!systemConfig.isThumbnailGenerationDisabledForPDF()) { + // OK, let's try and generate an image thumbnail! + long imageThumbnailSizeLimit = systemConfig.getThumbnailSizeLimitPDF(); + try { + thumbnailFileId = (Long) em.createNativeQuery("SELECT df.id " + + "FROM datafile df, filemetadata fm, datasetversion dv, dvobject o " + + "WHERE dv.id = " + versionId + " " + + "AND df.id = o.id " + + "AND fm.datasetversion_id = dv.id " + + "AND fm.datafile_id = df.id " + + "AND o.previewimagefail = false " + + "AND df.restricted = false " + + "AND df.embargo_id is null " + + "AND df.retention_id is null " + + "AND df.contenttype = 'application/pdf' " + + "AND df.filesize < " + imageThumbnailSizeLimit + " " + + "ORDER BY df.filesize ASC LIMIT 1;").getSingleResult(); + } catch (Exception ex) { + thumbnailFileId = null; + } - if (thumbnailFileId != null) { - DataFile thumbnailFile = datafileService.find(thumbnailFileId); - if (thumbnailFile != null) { - if (datafileService.isThumbnailAvailable(thumbnailFile)) { - assignDatasetThumbnailByNativeQuery(versionId, thumbnailFileId); - return thumbnailFileId; + if (thumbnailFileId != null) { + DataFile thumbnailFile = datafileService.find(thumbnailFileId); + if (thumbnailFile != null) { + if (datafileService.isThumbnailAvailable(thumbnailFile)) { + assignDatasetThumbnailByNativeQuery(versionId, thumbnailFileId); + return thumbnailFileId; + } } } } } - return null; } @@ -1277,4 +1296,4 @@ public List getUnarchivedDatasetVersions(){ return null; } } // end getUnarchivedDatasetVersions -} // end class +} // end class \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java index ee865770dbe..e7781969507 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java +++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java @@ -17,6 +17,28 @@ import jakarta.persistence.Transient; import edu.harvard.iq.dataverse.license.License; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; + +@NamedQueries({ + // TermsOfUseAndAccess.findByDatasetVersionIdAndDefaultTerms + // is used to determine if the dataset terms were set by the multi license support update + // as part of the 5.10 release. + + @NamedQuery(name = "TermsOfUseAndAccess.findByDatasetVersionIdAndDefaultTerms", + query = "SELECT o FROM TermsOfUseAndAccess o, DatasetVersion dv WHERE " + + "dv.id =:id " + + "AND dv.termsOfUseAndAccess.id = o.id " + + "AND o.termsOfUse =:defaultTerms " + + "AND o.confidentialityDeclaration IS null " + + "AND o.specialPermissions IS null " + + "AND o.restrictions IS null " + + "AND o.citationRequirements IS null " + + "AND o.depositorRequirements IS null " + + "AND o.conditions IS null " + + "AND o.disclaimer IS null " + ) +}) /** * @@ -27,6 +49,8 @@ @ValidateTermsOfUseAndAccess public class TermsOfUseAndAccess implements Serializable { + public static final String DEFAULT_NOTERMS = "This dataset is made available without information on how it can be used. You should communicate with the Contact(s) specified before use."; + @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index 98bd26b51d6..55a14990252 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -114,13 +114,20 @@ public static List getThumbnailCandidates(Dataset dataset, boo * * @param dataset * @param datasetVersion - * @return + * @param size of the requested thumbnail + * @return DatasetThumbnail object, or null if not available */ public static DatasetThumbnail getThumbnail(Dataset dataset, DatasetVersion datasetVersion, int size) { if (dataset == null) { return null; } + if (size == 0) { + // Size 0 will fail (and set the failure flag) and should never be sent + logger.warning("getThumbnail called with size 0"); + return null; + } + StorageIO dataAccess = null; try{ diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java index f83041d87bd..27c75c63b05 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java @@ -1,7 +1,6 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.authorization.Permission; -import edu.harvard.iq.dataverse.datavariable.VarGroup; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; @@ -13,14 +12,17 @@ import edu.harvard.iq.dataverse.util.DatasetFieldUtil; import edu.harvard.iq.dataverse.workflows.WorkflowComment; import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.DataFileCategory; import edu.harvard.iq.dataverse.DatasetVersionDifference; -import java.util.Collection; +import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; @@ -50,6 +52,9 @@ public Dataset execute(CommandContext ctxt) throws CommandException { if (!getUser().isSuperuser()) { throw new IllegalCommandException("Only superusers can curate published dataset versions", this); } + Dataset savedDataset = null; + // Merge the dataset into our JPA context + setDataset(ctxt.em().merge(getDataset())); ctxt.permissions().checkEditDatasetLock(getDataset(), getRequest(), this); // Invariant: Dataset has no locks preventing the update @@ -58,23 +63,23 @@ public Dataset execute(CommandContext ctxt) throws CommandException { DatasetVersion newVersion = getDataset().getOrCreateEditVersion(); // Copy metadata from draft version to latest published version updateVersion.setDatasetFields(newVersion.initDatasetFields()); - - + newVersion.setDatasetFields(new ArrayList()); // final DatasetVersion editVersion = getDataset().getEditVersion(); DatasetFieldUtil.tidyUpFields(updateVersion.getDatasetFields(), true); - // Merge the new version into our JPA context - ctxt.em().merge(updateVersion); - TermsOfUseAndAccess oldTerms = updateVersion.getTermsOfUseAndAccess(); TermsOfUseAndAccess newTerms = newVersion.getTermsOfUseAndAccess(); newTerms.setDatasetVersion(updateVersion); updateVersion.setTermsOfUseAndAccess(newTerms); - //Put old terms on version that will be deleted.... - newVersion.setTermsOfUseAndAccess(oldTerms); - - //Validate metadata and TofA conditions + // Clear unnecessary terms relationships .... + newVersion.setTermsOfUseAndAccess(null); + oldTerms.setDatasetVersion(null); + // Without this there's a db exception related to the oldTerms being referenced + // by the datasetversion table at the flush around line 212 + ctxt.em().flush(); + + // Validate metadata and TofA conditions validateOrDie(updateVersion, isValidateLenient()); //Also set the fileaccessrequest boolean on the dataset to match the new terms @@ -87,19 +92,20 @@ public Dataset execute(CommandContext ctxt) throws CommandException { updateVersion.getWorkflowComments().addAll(newComments); } - // we have to merge to update the database but not flush because // we don't want to create two draft versions! - Dataset tempDataset = ctxt.em().merge(getDataset()); - + Dataset tempDataset = getDataset(); updateVersion = tempDataset.getLatestVersionForCopy(); // Look for file metadata changes and update published metadata if needed List pubFmds = updateVersion.getFileMetadatas(); int pubFileCount = pubFmds.size(); int newFileCount = tempDataset.getOrCreateEditVersion().getFileMetadatas().size(); - /* The policy for this command is that it should only be used when the change is a 'minor update' with no file changes. - * Nominally we could call .isMinorUpdate() for that but we're making the same checks as we go through the update here. + /* + * The policy for this command is that it should only be used when the change is + * a 'minor update' with no file changes. Nominally we could call + * .isMinorUpdate() for that but we're making the same checks as we go through + * the update here. */ if (pubFileCount != newFileCount) { logger.severe("Draft version of dataset: " + tempDataset.getId() + " has: " + newFileCount + " while last published version has " + pubFileCount); @@ -108,7 +114,10 @@ public Dataset execute(CommandContext ctxt) throws CommandException { Long thumbId = null; if(tempDataset.getThumbnailFile()!=null) { thumbId = tempDataset.getThumbnailFile().getId(); - }; + } + + // Note - Curate allows file metadata changes but not adding/deleting files. If + // that ever changes, this command needs to be updated. for (FileMetadata publishedFmd : pubFmds) { DataFile dataFile = publishedFmd.getDataFile(); FileMetadata draftFmd = dataFile.getLatestFileMetadata(); @@ -155,45 +164,73 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // Update modification time on the published version and the dataset updateVersion.setLastUpdateTime(getTimestamp()); tempDataset.setModificationTime(getTimestamp()); - ctxt.em().merge(updateVersion); - Dataset savedDataset = ctxt.em().merge(tempDataset); - - // Flush before calling DeleteDatasetVersion which calls - // PrivateUrlServiceBean.getPrivateUrlFromDatasetId() that will query the DB and - // fail if our changes aren't there - ctxt.em().flush(); + newVersion = ctxt.em().merge(newVersion); + savedDataset = ctxt.em().merge(tempDataset); // Now delete draft version - DeleteDatasetVersionCommand cmd; - cmd = new DeleteDatasetVersionCommand(getRequest(), savedDataset); - ctxt.engine().submit(cmd); - // Running the command above reindexes the dataset, so we don't need to do it - // again in here. + ctxt.em().remove(newVersion); + + Iterator dvIt = savedDataset.getVersions().iterator(); + while (dvIt.hasNext()) { + DatasetVersion dv = dvIt.next(); + if (dv.isDraft()) { + dvIt.remove(); + break; // We've removed the draft version, no need to continue iterating + } + } + + savedDataset = ctxt.em().merge(savedDataset); + ctxt.em().flush(); + + RoleAssignment ra = ctxt.privateUrl().getPrivateUrlRoleAssignmentFromDataset(savedDataset); + if (ra != null) { + ctxt.roles().revoke(ra); + } // And update metadata at PID provider - ctxt.engine().submit( - new UpdateDvObjectPIDMetadataCommand(savedDataset, getRequest())); - - //And the exported metadata files try { - ExportService instance = ExportService.getInstance(); - instance.exportAllFormats(getDataset()); - } catch (ExportException ex) { - // Just like with indexing, a failure to export is not a fatal condition. - logger.log(Level.WARNING, "Curate Published DatasetVersion: exception while exporting metadata files:{0}", ex.getMessage()); + ctxt.engine().submit( + new UpdateDvObjectPIDMetadataCommand(savedDataset, getRequest())); + } catch (CommandException ex) { + // The try/catch makes this non-fatal. Should it be non-fatal - it's different from what we do in publish? + // This can be corrected by running the update PID API later, but who will look in the log? + // With the change to not use the DeleteDatasetVersionCommand above and other + // fixes, this error may now cleanly restore the initial state + // with the draft and last published versions unchanged, but this has not yet bee tested. + // (Alternately this could move to onSuccess if we intend it to stay non-fatal.) + logger.log(Level.WARNING, "Curate Published DatasetVersion: exception while updating PID metadata:{0}", ex.getMessage()); } - - - // Update so that getDataset() in updateDatasetUser will get the up-to-date copy - // (with no draft version) + // Update so that getDataset() in updateDatasetUser() will get the up-to-date + // copy (with no draft version) setDataset(savedDataset); + updateDatasetUser(ctxt); - - + // ToDo - see if there are other DatasetVersionUser entries unique to the draft + // version that should be moved to the last published version + // As this command is intended for minor fixes, often done by the person pushing + // the update-current-version button, this is probably a minor issue. return savedDataset; } -} + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + boolean retVal = true; + Dataset d = (Dataset) r; + + ctxt.index().asyncIndexDataset(d, true); + + // And the exported metadata files + try { + ExportService instance = ExportService.getInstance(); + instance.exportAllFormats(d); + } catch (ExportException ex) { + // Just like with indexing, a failure to export is not a fatal condition. + retVal = false; + logger.log(Level.WARNING, "Curate Published DatasetVersion: exception while exporting metadata files:{0}", ex.getMessage()); + } + return retVal; + } +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java index 9e5879106e4..79348147a36 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java @@ -96,7 +96,7 @@ private RoleAssignment getRoleAssignmentFromPrivateUrlToken(String privateUrlTok * * @todo This might be a good place for Optional. */ - private RoleAssignment getPrivateUrlRoleAssignmentFromDataset(Dataset dataset) { + public RoleAssignment getPrivateUrlRoleAssignmentFromDataset(Dataset dataset) { if (dataset == null) { return null; } @@ -113,4 +113,4 @@ private RoleAssignment getPrivateUrlRoleAssignmentFromDataset(Dataset dataset) { } } -} +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java index 021977ff8c6..2bfda69247a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java @@ -91,6 +91,16 @@ public enum FeatureFlags { * @since Dataverse 6.3 */ DISABLE_RETURN_TO_AUTHOR_REASON("disable-return-to-author-reason"), + /** + * This flag disables the feature that automatically selects one of the + * DataFile thumbnails in the dataset/version as the dedicated thumbnail + * for the dataset. + * + * @apiNote Raise flag by setting + * "dataverse.feature.enable-dataset-thumbnail-autoselect" + * @since Dataverse 6.4 + */ + DISABLE_DATASET_THUMBNAIL_AUTOSELECT("disable-dataset-thumbnail-autoselect"), ; final String flag; diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 0325a47f626..94e1c922a4f 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1055,7 +1055,7 @@ dataverse.theme.logo.imageFooter=Footer Image dataverse.theme.logo.image.title=The logo or image file you wish to display in the header of this dataverse. dataverse.theme.logo.image.footer=The logo or image file you wish to display in the footer of this dataverse. dataverse.theme.logo.image.uploadNewFile=Upload New File -dataverse.theme.logo.image.invalidMsg=The image could not be uploaded. Please try again with a JPG, TIF, or PNG file. +dataverse.theme.logo.image.invalidMsg=The image could not be uploaded. Please try again with a JPG, or PNG file. dataverse.theme.logo.image.uploadImgFile=Upload Image File dataverse.theme.logo.format.title=The shape for the logo or image file you upload for this dataverse. dataverse.theme.logo.alignment.title=Where the logo or image should display in the header or footer. @@ -2151,7 +2151,7 @@ dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable.title=Select a thumbn dataset.thumbnailsAndWidget.thumbnailImage.uploadNew=Upload New File dataset.thumbnailsAndWidget.thumbnailImage.uploadNew.title=Upload an image file as your dataset thumbnail, which will be stored separately from the data files that belong to your dataset. dataset.thumbnailsAndWidget.thumbnailImage.upload=Upload Image -dataset.thumbnailsAndWidget.thumbnailImage.upload.invalidMsg=The image could not be uploaded. Please try again with a JPG, TIF, or PNG file. +dataset.thumbnailsAndWidget.thumbnailImage.upload.invalidMsg=The image could not be uploaded. Please try again with a JPG, or PNG file. dataset.thumbnailsAndWidget.thumbnailImage.alt=Thumbnail image selected for dataset dataset.thumbnailsAndWidget.success=Dataset thumbnail updated. dataset.thumbnailsAndWidget.removeThumbnail=Remove Thumbnail diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 936d354e9d7..b6e01debe8f 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -86,7 +86,7 @@ - + diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index cb9481d3491..9060c8c5fbf 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -182,6 +182,90 @@ public void testCollectionSchema(){ } + @Test + public void testDatasetSchemaValidation() { + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.prettyPrint(); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response getCollectionSchemaResponse = UtilIT.getCollectionSchema(dataverseAlias, apiToken); + getCollectionSchemaResponse.prettyPrint(); + getCollectionSchemaResponse.then().assertThat() + .statusCode(200); + + JsonObject expectedSchema = null; + try { + expectedSchema = JsonUtil.getJsonObjectFromFile("doc/sphinx-guides/source/_static/api/dataset-schema.json"); + } catch (IOException ex) { + } + + assertEquals(JsonUtil.prettyPrint(expectedSchema), JsonUtil.prettyPrint(getCollectionSchemaResponse.body().asString())); + + // add a language that is not in the Controlled vocabulary + testDatasetSchemaValidationHelper(dataverseAlias, apiToken, + "\"aar\"", + "\"aar\",\"badlang\"", + BundleUtil.getStringFromBundle("schema.validation.exception.dataset.cvv.missing", List.of("fields", "language", "badlang")) + ); + + // change multiple to true on value that is a not a List + testDatasetSchemaValidationHelper(dataverseAlias, apiToken, + "multiple\": false,\n" + + " \"typeName\": \"title", + "multiple\": true,\n" + + " \"typeName\": \"title", + BundleUtil.getStringFromBundle("schema.validation.exception.notlist.multiple", List.of("fields", "title")) + ); + + // change multiple to false on value that is a List + testDatasetSchemaValidationHelper(dataverseAlias, apiToken, + "typeName\": \"language\",\n" + + " \"multiple\": true", + "typeName\": \"language\",\n" + + " \"multiple\": false", + BundleUtil.getStringFromBundle("schema.validation.exception.list.notmultiple", List.of("fields", "language")) + ); + + // add a mismatched typeName + testDatasetSchemaValidationHelper(dataverseAlias, apiToken, + "\"typeName\": \"datasetContactName\",", + "\"typeName\": \"datasetContactNme\",", + BundleUtil.getStringFromBundle("schema.validation.exception.compound.mismatch", List.of("datasetContactName", "datasetContactNme")) + ); + + // add a typeName which is not allowed + testDatasetSchemaValidationHelper(dataverseAlias, apiToken, + "\"datasetContactEmail\": {\n" + + " \"typeClass\": \"primitive\",\n" + + " \"multiple\": false,\n" + + " \"typeName\": \"datasetContactEmail\",", + "\"datasetContactNotAllowed\": {\n" + + " \"typeClass\": \"primitive\",\n" + + " \"multiple\": false,\n" + + " \"typeName\": \"datasetContactNotAllowed\",", + BundleUtil.getStringFromBundle("schema.validation.exception.dataset.invalidType", List.of("datasetContact", "datasetContactNotAllowed", "datasetContactName, datasetContactAffiliation, datasetContactEmail")) + ); + + Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken); + deleteDataverseResponse.prettyPrint(); + assertEquals(200, deleteDataverseResponse.getStatusCode()); + } + private void testDatasetSchemaValidationHelper(String dataverseAlias, String apiToken, String origString, String replacementString, String expectedError) { + String json = UtilIT.getDatasetJson("scripts/search/tests/data/dataset-finch3.json"); + json = json.replace(origString, replacementString); + Response validateDatasetJsonResponse = UtilIT.validateDatasetJson(dataverseAlias, json, apiToken); + validateDatasetJsonResponse.prettyPrint(); + validateDatasetJsonResponse.then().assertThat() + .statusCode(200) + .body(containsString(expectedError)); + } + @Test public void testCreateDataset() { @@ -3542,17 +3626,47 @@ public void testCuratePublishedDatasetVersionCommand() throws IOException { UtilIT.publishDatasetViaNativeApi(datasetId, "updatecurrent", apiToken).then().assertThat().statusCode(FORBIDDEN.getStatusCode()); - Response makeSuperUser = UtilIT.makeSuperUser(username); + Response makeSuperUser = UtilIT.setSuperuserStatus(username, true); //should work after making super user UtilIT.publishDatasetViaNativeApi(datasetId, "updatecurrent", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + //Check that the dataset contains the updated metadata (which includes the name Spruce) Response getDatasetJsonAfterUpdate = UtilIT.nativeGet(datasetId, apiToken); - getDatasetJsonAfterUpdate.prettyPrint(); + assertTrue(getDatasetJsonAfterUpdate.prettyPrint().contains("Spruce")); getDatasetJsonAfterUpdate.then().assertThat() .statusCode(OK.getStatusCode()); + //Check that the draft version is gone + Response getDraft1 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); + getDraft1.then().assertThat() + .statusCode(NOT_FOUND.getStatusCode()); + + + //Also test a terms change + String jsonLDTerms = "{\"https://dataverse.org/schema/core#fileTermsOfAccess\":{\"https://dataverse.org/schema/core#dataAccessPlace\":\"Somewhere\"}}"; + Response updateTerms = UtilIT.updateDatasetJsonLDMetadata(datasetId, apiToken, jsonLDTerms, true); + updateTerms.then().assertThat() + .statusCode(OK.getStatusCode()); + + //Run Update-Current Version again + + UtilIT.publishDatasetViaNativeApi(datasetId, "updatecurrent", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + + + //Verify the new term is there + Response jsonLDResponse = UtilIT.getDatasetJsonLDMetadata(datasetId, apiToken); + assertTrue(jsonLDResponse.prettyPrint().contains("Somewhere")); + jsonLDResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + //And that the draft is gone + Response getDraft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); + getDraft2.then().assertThat() + .statusCode(NOT_FOUND.getStatusCode()); + + } /** @@ -4857,4 +4971,4 @@ public void testGetCanDownloadAtLeastOneFile() { Response getUserPermissionsOnDatasetInvalidIdResponse = UtilIT.getCanDownloadAtLeastOneFile("testInvalidId", DS_VERSION_LATEST, secondUserApiToken); getUserPermissionsOnDatasetInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); } -} +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java index 2c0e0a5c6b7..5e4e37209d8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java @@ -41,7 +41,7 @@ public class RemoteOverlayAccessIOTest { public void setUp() { System.setProperty("dataverse.files.test.type", "remote"); System.setProperty("dataverse.files.test.label", "testOverlay"); - System.setProperty("dataverse.files.test.base-url", "https://demo.dataverse.org/resources"); + System.setProperty("dataverse.files.test.base-url", "https://data.qdr.syr.edu/resources"); System.setProperty("dataverse.files.test.base-store", "file"); System.setProperty("dataverse.files.test.download-redirect", "true"); System.setProperty("dataverse.files.test.remote-store-name", "DemoDataCorp"); @@ -123,4 +123,4 @@ void testRemoteOverlayIdentifierFormats() throws IOException { assertFalse(DataAccess.isValidDirectStorageIdentifier(datafile.getStorageIdentifier().replace("test", "bad"))); } -} +} \ No newline at end of file