findPermittedCollections(DataverseRequest request, Authen
}
return null;
}
+
+ /**
+ * Calculates the complete list of role assignments for a given user on a DvObject.
+ * This includes roles assigned directly to the user and roles inherited from any groups
+ * the user is a member of.
+ *
+ * This method's logic is based on the private method {@code getRoleStringFromUser}
+ * in the {@code DataverseUserPage} class, which produces a concatenated string of
+ * effective user role names required for displaying role-related user notifications.
+ * The common logic from these two methods may be centralized in the future to
+ * avoid code duplication.
+ *
+ * @param user The authenticated user whose roles are being checked.
+ * @param dvObject The dataverse object to check for role assignments.
+ * @return A List containing all effective RoleAssignments for the user. Never null.
+ */
+ public List getEffectiveRoleAssignments(AuthenticatedUser user, DvObject dvObject) {
+ Stream directAssignments = assignmentsFor(user, dvObject).stream();
+
+ Stream groupAssignments = groupService.groupsFor(user, dvObject)
+ .stream()
+ .flatMap(group -> assignmentsFor(group, dvObject).stream());
+
+ return Stream.concat(directAssignments, groupAssignments)
+ .collect(Collectors.toList());
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java
index 121d03ef0c7..51e36746f3a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Shib.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java
@@ -76,21 +76,6 @@ public class Shib implements java.io.Serializable {
private final String loginpage = "/loginpage.xhtml";
private final String identityProviderProblem = "Problem with Identity Provider";
- /**
- * We only have one field in which to store a unique
- * useridentifier/persistentuserid so we have to jam the the "entityId" for
- * a Shibboleth Identity Provider (IdP) and the unique persistent identifier
- * per user into the same field and a separator between these two would be
- * nice, in case we ever want to answer questions like "How many users
- * logged in from Harvard's Identity Provider?".
- *
- * A pipe ("|") is used as a separator because it's considered "unwise" to
- * use in a URL and the "entityId" for a Shibboleth Identity Provider (IdP)
- * looks like a URL:
- * http://stackoverflow.com/questions/1547899/which-characters-make-a-url-invalid
- */
- private String persistentUserIdSeparator = "|";
-
/**
* The Shibboleth Identity Provider (IdP), an "entityId" which often but not
* always looks like a URL.
@@ -248,7 +233,7 @@ else if (ShibAffiliationOrder.equals("firstAffiliation")) {
// emailAddress = "willFailBeanValidation"; // for testing createAuthenticatedUser exceptions
displayInfo = new AuthenticatedUserDisplayInfo(firstName, lastName, emailAddress, affiliation, null);
- userPersistentId = shibIdp + persistentUserIdSeparator + shibUserIdentifier;
+ userPersistentId = ShibUtil.createUserPersistentIdentifier(shibIdp, shibUserIdentifier);
ShibAuthenticationProvider shibAuthProvider = new ShibAuthenticationProvider();
AuthenticatedUser au = authSvc.lookupUser(shibAuthProvider.getId(), userPersistentId);
if (au != null) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/Template.java b/src/main/java/edu/harvard/iq/dataverse/Template.java
index 05c6df51197..2768cf417a4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Template.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Template.java
@@ -137,7 +137,11 @@ public List getDatasetFields() {
@Transient
private Map instructionsMap = null;
-
+
+ public void setInstructionsMap(Map instructionsMap) {
+ this.instructionsMap = instructionsMap;
+ }
+
@Transient
private TreeMap> metadataBlocksForView = new TreeMap<>();
@Transient
diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java
index 228e4b19c38..2fb5ea35c45 100644
--- a/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java
@@ -87,7 +87,12 @@ public UserNotification find(Object pk) {
public UserNotification save(UserNotification userNotification) {
return em.merge(userNotification);
}
-
+
+ public UserNotification markAsRead(UserNotification userNotification) {
+ userNotification.setReadNotification(true);
+ return em.merge(userNotification);
+ }
+
public void delete(UserNotification userNotification) {
em.remove(em.merge(userNotification));
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
index 018657bff4d..76ef91fbd3a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
@@ -28,6 +28,7 @@
import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.BundleUtil;
+import edu.harvard.iq.dataverse.util.DateUtil;
import edu.harvard.iq.dataverse.util.FileUtil;
import edu.harvard.iq.dataverse.util.SystemConfig;
import edu.harvard.iq.dataverse.util.json.JsonParser;
@@ -52,6 +53,7 @@
import java.io.InputStream;
import java.net.URI;
+import java.time.Instant;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.logging.Level;
@@ -447,10 +449,22 @@ public Command handleLatestPublished() {
return dsv;
}
- protected void validateInternalVersionNumberIsNotOutdated(Dataset dataset, int internalVersion) throws WrappedResponse {
- if (dataset.getLatestVersion().getVersion() > internalVersion) {
+ protected void validateInternalTimestampIsNotOutdated(DvObject dvObject, String sourceLastUpdateTime) throws WrappedResponse {
+ Date date = sourceLastUpdateTime != null ? DateUtil.parseDate(sourceLastUpdateTime, "yyyy-MM-dd'T'HH:mm:ss'Z'") : null;
+ if (date == null) {
throw new WrappedResponse(
- badRequest(BundleUtil.getStringFromBundle("abstractApiBean.error.datasetInternalVersionNumberIsOutdated", Collections.singletonList(Integer.toString(internalVersion))))
+ badRequest(BundleUtil.getStringFromBundle("jsonparser.error.parsing.date", Collections.singletonList(sourceLastUpdateTime)))
+ );
+ }
+ Instant instant = date.toInstant();
+ Instant updateTimestamp =
+ (dvObject instanceof DataFile) ? ((DataFile) dvObject).getFileMetadata().getDatasetVersion().getLastUpdateTime().toInstant() :
+ (dvObject instanceof Dataset) ? ((Dataset) dvObject).getLatestVersion().getLastUpdateTime().toInstant() :
+ instant;
+ // granularity is to the second since the json output only returns dates in this format to the second
+ if (updateTimestamp.getEpochSecond() != instant.getEpochSecond()) {
+ throw new WrappedResponse(
+ badRequest(BundleUtil.getStringFromBundle("abstractApiBean.error.internalVersionTimestampIsOutdated", Collections.singletonList(sourceLastUpdateTime)))
);
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index 94f51dd4ccc..729174dedfc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -17,6 +17,7 @@
import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode;
import edu.harvard.iq.dataverse.dataaccess.*;
import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil;
+import software.amazon.awssdk.services.s3.model.CompletedPart;
import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse;
import edu.harvard.iq.dataverse.dataset.*;
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
@@ -65,8 +66,7 @@
import jakarta.ws.rs.container.ContainerRequestContext;
import jakarta.ws.rs.core.*;
import jakarta.ws.rs.core.Response.Status;
-import software.amazon.awssdk.services.s3.model.CompletedPart;
-
+import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.util.Strings;
import org.eclipse.microprofile.openapi.annotations.Operation;
@@ -115,7 +115,7 @@ public class Datasets extends AbstractApiBean {
private static final Logger logger = Logger.getLogger(Datasets.class.getCanonicalName());
private static final Pattern dataFilePattern = Pattern.compile("^[0-9a-f]{11}-[0-9a-f]{12}\\.?.*");
-
+
@Inject DataverseSession session;
@EJB
@@ -132,19 +132,19 @@ public class Datasets extends AbstractApiBean {
@EJB
UserNotificationServiceBean userNotificationService;
-
+
@EJB
PermissionServiceBean permissionService;
-
+
@EJB
AuthenticationServiceBean authenticationServiceBean;
-
+
@EJB
DDIExportServiceBean ddiExportService;
@EJB
MetadataBlockServiceBean metadataBlockService;
-
+
@EJB
DataFileServiceBean fileService;
@@ -153,20 +153,20 @@ public class Datasets extends AbstractApiBean {
@EJB
EjbDataverseEngine commandEngine;
-
+
@EJB
IndexServiceBean indexService;
@EJB
S3PackageImporter s3PackageImporter;
-
+
@EJB
SettingsServiceBean settingsService;
// TODO: Move to AbstractApiBean
@EJB
DatasetMetricsServiceBean datasetMetricsSvc;
-
+
@EJB
DatasetExternalCitationsServiceBean datasetExternalCitationsService;
@@ -178,13 +178,13 @@ public class Datasets extends AbstractApiBean {
@Inject
MakeDataCountLoggingServiceBean mdcLogService;
-
+
@Inject
DataverseRequestServiceBean dvRequestService;
@Inject
WorkflowServiceBean wfService;
-
+
@Inject
DataverseRoleServiceBean dataverseRoleService;
@@ -203,9 +203,12 @@ public class Datasets extends AbstractApiBean {
@Inject
DatasetFieldsValidator datasetFieldsValidator;
+ @Inject
+ DataFileCategoryServiceBean dataFileCategoryService;
+
/**
* Used to consolidate the way we parse and handle dataset versions.
- * @param
+ * @param
*/
public interface DsVersionHandler {
T handleLatest();
@@ -213,7 +216,7 @@ public interface DsVersionHandler {
T handleSpecific( long major, long minor );
T handleLatestPublished();
}
-
+
@GET
@AuthRequired
@Path("{id}")
@@ -297,12 +300,12 @@ public Response exportDataset(@Context ContainerRequestContext crc, @QueryParam(
public Response deleteDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
// Internally, "DeleteDatasetCommand" simply redirects to "DeleteDatasetVersionCommand"
// (and there's a comment that says "TODO: remove this command")
- // do we need an exposed API call for it?
- // And DeleteDatasetVersionCommand further redirects to DestroyDatasetCommand,
- // if the dataset only has 1 version... In other words, the functionality
+ // do we need an exposed API call for it?
+ // And DeleteDatasetVersionCommand further redirects to DestroyDatasetCommand,
+ // if the dataset only has 1 version... In other words, the functionality
// currently provided by this API is covered between the "deleteDraftVersion" and
- // "destroyDataset" API calls.
- // (The logic below follows the current implementation of the underlying
+ // "destroyDataset" API calls.
+ // (The logic below follows the current implementation of the underlying
// commands!)
User u = getRequestUser(crc);
@@ -310,7 +313,7 @@ public Response deleteDataset(@Context ContainerRequestContext crc, @PathParam("
Dataset doomed = findDatasetOrDie(id);
DatasetVersion doomedVersion = doomed.getLatestVersion();
boolean destroy = false;
-
+
if (doomed.getVersions().size() == 1) {
if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can delete published datasets"));
@@ -321,26 +324,26 @@ public Response deleteDataset(@Context ContainerRequestContext crc, @PathParam("
throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is a published dataset with multiple versions. This API can only delete the latest version if it is a DRAFT"));
}
}
-
- // Gather the locations of the physical files that will need to be
+
+ // Gather the locations of the physical files that will need to be
// deleted once the destroy command execution has been finalized:
Map deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomedVersion, destroy);
-
+
execCommand( new DeleteDatasetCommand(req, findDatasetOrDie(id)));
-
- // If we have gotten this far, the destroy command has succeeded,
+
+ // If we have gotten this far, the destroy command has succeeded,
// so we can finalize it by permanently deleting the physical files:
- // (DataFileService will double-check that the datafiles no
- // longer exist in the database, before attempting to delete
+ // (DataFileService will double-check that the datafiles no
+ // longer exist in the database, before attempting to delete
// the physical files)
if (!deleteStorageLocations.isEmpty()) {
fileService.finalizeFileDeletes(deleteStorageLocations);
}
-
+
return ok("Dataset " + id + " deleted");
}, u);
}
-
+
@DELETE
@AuthRequired
@Path("{id}/destroy")
@@ -355,16 +358,16 @@ public Response destroyDataset(@Context ContainerRequestContext crc, @PathParam(
throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Destroy can only be called by superusers."));
}
- // Gather the locations of the physical files that will need to be
+ // Gather the locations of the physical files that will need to be
// deleted once the destroy command execution has been finalized:
Map deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
execCommand(new DestroyDatasetCommand(doomed, req));
- // If we have gotten this far, the destroy command has succeeded,
+ // If we have gotten this far, the destroy command has succeeded,
// so we can finalize permanently deleting the physical files:
- // (DataFileService will double-check that the datafiles no
- // longer exist in the database, before attempting to delete
+ // (DataFileService will double-check that the datafiles no
+ // longer exist in the database, before attempting to delete
// the physical files)
if (!deleteStorageLocations.isEmpty()) {
fileService.finalizeFileDeletes(deleteStorageLocations);
@@ -373,7 +376,7 @@ public Response destroyDataset(@Context ContainerRequestContext crc, @PathParam(
return ok("Dataset " + id + " destroyed");
}, u);
}
-
+
@DELETE
@AuthRequired
@Path("{id}/versions/{versionId}")
@@ -385,33 +388,33 @@ public Response deleteDraftVersion(@Context ContainerRequestContext crc, @PathPa
return response( req -> {
Dataset dataset = findDatasetOrDie(id);
DatasetVersion doomed = dataset.getLatestVersion();
-
+
if (!doomed.isDraft()) {
throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is NOT a DRAFT version"));
}
-
- // Gather the locations of the physical files that will need to be
+
+ // Gather the locations of the physical files that will need to be
// deleted once the destroy command execution has been finalized:
-
+
Map deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
-
+
execCommand( new DeleteDatasetVersionCommand(req, dataset));
-
- // If we have gotten this far, the delete command has succeeded -
- // by either deleting the Draft version of a published dataset,
- // or destroying an unpublished one.
+
+ // If we have gotten this far, the delete command has succeeded -
+ // by either deleting the Draft version of a published dataset,
+ // or destroying an unpublished one.
// This means we can finalize permanently deleting the physical files:
- // (DataFileService will double-check that the datafiles no
- // longer exist in the database, before attempting to delete
+ // (DataFileService will double-check that the datafiles no
+ // longer exist in the database, before attempting to delete
// the physical files)
if (!deleteStorageLocations.isEmpty()) {
fileService.finalizeFileDeletes(deleteStorageLocations);
}
-
+
return ok("Draft version of dataset " + id + " deleted");
}, getRequestUser(crc));
}
-
+
@DELETE
@AuthRequired
@Path("{datasetId}/deleteLink/{linkedDataverseId}")
@@ -422,7 +425,7 @@ public Response deleteDatasetLinkingDataverse(@Context ContainerRequestContext c
return ok("Link from Dataset " + datasetId + " to linked Dataverse " + linkedDataverseId + " deleted");
}, getRequestUser(crc));
}
-
+
@PUT
@AuthRequired
@Path("{id}/citationdate")
@@ -443,7 +446,7 @@ public Response setCitationDate(@Context ContainerRequestContext crc, @PathParam
return ok("Citation Date for dataset " + id + " set to: " + (dsfType != null ? dsfType.getDisplayName() : "default"));
}, getRequestUser(crc));
}
-
+
@DELETE
@AuthRequired
@Path("{id}/citationdate")
@@ -453,7 +456,7 @@ public Response useDefaultCitationDate(@Context ContainerRequestContext crc, @Pa
return ok("Citation Date for dataset " + id + " set to default");
}, getRequestUser(crc));
}
-
+
@GET
@AuthRequired
@Path("{id}/versions")
@@ -470,7 +473,7 @@ public Response listVersions(@Context ContainerRequestContext crc, @PathParam("i
.collect(toJsonArray()));
}, getRequestUser(crc));
}
-
+
@GET
@AuthRequired
@Path("{id}/versions/{versionId}")
@@ -484,16 +487,16 @@ public Response getVersion(@Context ContainerRequestContext crc,
@Context UriInfo uriInfo,
@Context HttpHeaders headers) {
return response( req -> {
-
- //If excludeFiles is null the default is to provide the files and because of this we need to check permissions.
+
+ //If excludeFiles is null the default is to provide the files and because of this we need to check permissions.
boolean checkPerms = excludeFiles == null ? true : !excludeFiles;
-
+
Dataset dataset = findDatasetOrDie(datasetId);
- DatasetVersion requestedDatasetVersion = getDatasetVersionOrDie(req,
- versionId,
- dataset,
- uriInfo,
- headers,
+ DatasetVersion requestedDatasetVersion = getDatasetVersionOrDie(req,
+ versionId,
+ dataset,
+ uriInfo,
+ headers,
includeDeaccessioned,
checkPerms);
@@ -507,7 +510,7 @@ public Response getVersion(@Context ContainerRequestContext crc,
Boolean includeMetadataBlocks = excludeMetadataBlocks == null ? true : !excludeMetadataBlocks;
JsonObjectBuilder jsonBuilder = json(requestedDatasetVersion,
- null,
+ null,
excludeFiles == null ? true : !excludeFiles,
returnOwners, includeMetadataBlocks);
return ok(jsonBuilder);
@@ -584,7 +587,7 @@ public Response getVersionFileCounts(@Context ContainerRequestContext crc,
} catch (IllegalArgumentException e) {
return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
}
- DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
+ DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned, false);
JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria));
jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion, fileSearchCriteria)));
@@ -638,7 +641,7 @@ public Response getFileAccessFolderView(@Context ContainerRequestContext crc, @P
folderName = folderName == null ? "" : folderName;
versionId = versionId == null ? DS_VERSION_LATEST_PUBLISHED : versionId;
-
+
DatasetVersion version;
try {
DataverseRequest req = createDataverseRequest(getRequestUser(crc));
@@ -646,27 +649,27 @@ public Response getFileAccessFolderView(@Context ContainerRequestContext crc, @P
} catch (WrappedResponse wr) {
return wr.getResponse();
}
-
+
String output = FileUtil.formatFolderListingHtml(folderName, version, "", originals != null && originals);
-
+
// return "NOT FOUND" if there is no such folder in the dataset version:
-
+
if ("".equals(output)) {
return notFound("Folder " + folderName + " does not exist");
}
-
-
+
+
String indexFileName = folderName.equals("") ? ".index.html"
: ".index-" + folderName.replace('/', '_') + ".html";
response.setHeader("Content-disposition", "filename=\"" + indexFileName + "\"");
-
+
return Response.ok()
.entity(output)
//.type("application/html").
.build();
}
-
+
@GET
@AuthRequired
@Path("{id}/versions/{versionId}/metadata")
@@ -676,7 +679,7 @@ public Response getVersionMetadata(@Context ContainerRequestContext crc, @PathPa
getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers )
.getDatasetFields())), getRequestUser(crc));
}
-
+
@GET
@AuthRequired
@Path("{id}/versions/{versionNumber}/metadata/{block}")
@@ -686,10 +689,10 @@ public Response getVersionMetadataBlock(@Context ContainerRequestContext crc,
@PathParam("block") String blockName,
@Context UriInfo uriInfo,
@Context HttpHeaders headers) {
-
+
return response( req -> {
DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, headers );
-
+
Map> fieldsByBlock = DatasetField.groupByBlock(dsv.getDatasetFields());
for ( Map.Entry> p : fieldsByBlock.entrySet() ) {
if ( p.getKey().getName().equals(blockName) ) {
@@ -711,7 +714,7 @@ public Response getVersionMetadataBlock(@Context ContainerRequestContext crc,
@GET
@AuthRequired
@Path("{id}/versions/{versionId}/linkset")
- public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId,
+ public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId,
@Context UriInfo uriInfo, @Context HttpHeaders headers) {
if (DS_VERSION_DRAFT.equals(versionId)) {
return badRequest("Signposting is not supported on the " + DS_VERSION_DRAFT + " version");
@@ -742,7 +745,7 @@ public Response updateDatasetTargetURL(@Context ContainerRequestContext crc, @Pa
return ok("Dataset " + id + " target url updated");
}, getRequestUser(crc));
}
-
+
@POST
@AuthRequired
@Path("/modifyRegistrationAll")
@@ -758,7 +761,7 @@ public Response updateDatasetTargetURLAll(@Context ContainerRequestContext crc)
return ok("Update All Dataset target url completed");
}, getRequestUser(crc));
}
-
+
@POST
@AuthRequired
@Path("{id}/modifyRegistrationMetadata")
@@ -780,7 +783,7 @@ public Response updateDatasetPIDMetadata(@Context ContainerRequestContext crc, @
return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args));
}, getRequestUser(crc));
}
-
+
@POST
@AuthRequired
@Path("/modifyRegistrationPIDMetadataAll")
@@ -803,7 +806,7 @@ public Response updateDatasetPIDMetadataAll(@Context ContainerRequestContext crc
return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.update.all"));
}, getRequestUser(crc));
}
-
+
@PUT
@AuthRequired
@Path("{id}/versions/{versionId}")
@@ -812,13 +815,13 @@ public Response updateDraftVersion(@Context ContainerRequestContext crc, String
if (!DS_VERSION_DRAFT.equals(versionId)) {
return error( Response.Status.BAD_REQUEST, "Only the " + DS_VERSION_DRAFT + " version can be updated");
}
-
+
try {
DataverseRequest req = createDataverseRequest(getRequestUser(crc));
Dataset ds = findDatasetOrDie(id);
JsonObject json = JsonUtil.getJsonObject(jsonBody);
DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json);
-
+
// clear possibly stale fields from the incoming dataset version.
// creation and modification dates are updated by the commands.
incomingVersion.setId(null);
@@ -828,13 +831,13 @@ public Response updateDraftVersion(@Context ContainerRequestContext crc, String
incomingVersion.setDataset(ds);
incomingVersion.setCreateTime(null);
incomingVersion.setLastUpdateTime(null);
-
+
if (!incomingVersion.getFileMetadatas().isEmpty()){
return error( Response.Status.BAD_REQUEST, "You may not add files via this api.");
}
-
+
boolean updateDraft = ds.getLatestVersion().isDraft();
-
+
DatasetVersion managedVersion;
if (updateDraft) {
final DatasetVersion editVersion = ds.getOrCreateEditVersion();
@@ -855,14 +858,14 @@ public Response updateDraftVersion(@Context ContainerRequestContext crc, String
managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, incomingVersion));
}
return ok( json(managedVersion, true) );
-
+
} catch (JsonParseException ex) {
logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex);
return error( Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage() );
-
+
} catch (WrappedResponse ex) {
return ex.getResponse();
-
+
}
}
@@ -1100,9 +1103,9 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav
return ex.getResponse();
}
-
+
}
-
+
private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){
String returnString = "";
for (DatasetField dsf : dscv.getChildDatasetFields()) {
@@ -1114,16 +1117,18 @@ private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){
}
return returnString;
}
-
+
@PUT
@AuthRequired
@Path("{id}/editMetadata")
- public Response editVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @QueryParam("replace") boolean replaceData, @QueryParam("sourceInternalVersionNumber") Integer sourceInternalVersionNumber) {
+ public Response editVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id,
+ @QueryParam("replace") boolean replaceData,
+ @QueryParam("sourceLastUpdateTime") String sourceLastUpdateTime) {
try {
Dataset dataset = findDatasetOrDie(id);
- if (sourceInternalVersionNumber != null) {
- validateInternalVersionNumberIsNotOutdated(dataset, sourceInternalVersionNumber);
+ if (sourceLastUpdateTime != null) {
+ validateInternalTimestampIsNotOutdated(dataset, sourceLastUpdateTime);
}
JsonObject json = JsonUtil.getJsonObject(jsonBody);
@@ -1191,12 +1196,12 @@ public Response publishDataset(@Context ContainerRequestContext crc, @PathParam(
}
Dataset ds = findDatasetOrDie(id);
-
+
boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(ds.getLatestVersion().getTermsOfUseAndAccess(), null);
if (!hasValidTerms) {
return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
}
-
+
if (mustBeIndexed) {
logger.fine("IT: " + ds.getIndexTime());
logger.fine("MT: " + ds.getModificationTime());
@@ -1422,9 +1427,9 @@ public Response createFileEmbargo(@Context ContainerRequestContext crc, @PathPar
} catch (WrappedResponse ex) {
return ex.getResponse();
}
-
+
boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dataset.getLatestVersion().getTermsOfUseAndAccess(), null);
-
+
if (!hasValidTerms){
return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
}
@@ -1755,7 +1760,7 @@ public Response createFileRetention(@Context ContainerRequestContext crc, @PathP
return error(Status.BAD_REQUEST, "Date unavailable can not be earlier than MinRetentionDurationInMonths: "+minRetentionDurationInMonths + " from now");
}
}
-
+
try {
String reason = json.getString("reason");
retention.setReason(reason);
@@ -2079,20 +2084,23 @@ public Response getCustomTermsTab(@PathParam("id") String id, @PathParam("versio
public Response getLinks(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied ) {
try {
User u = getRequestUser(crc);
- if (!u.isSuperuser()) {
- return error(Response.Status.FORBIDDEN, "Not a superuser");
- }
Dataset dataset = findDatasetOrDie(idSupplied);
+ if (!dataset.isReleased() && !permissionService.hasPermissionsFor(u, dataset, EnumSet.of(Permission.ViewUnpublishedDataset))) {
+ return error(Response.Status.FORBIDDEN, "User is not allowed to list the link(s) of this dataset");
+ }
+
long datasetId = dataset.getId();
List dvsThatLinkToThisDatasetId = dataverseSvc.findDataversesThatLinkToThisDatasetId(datasetId);
JsonArrayBuilder dataversesThatLinkToThisDatasetIdBuilder = Json.createArrayBuilder();
for (Dataverse dataverse : dvsThatLinkToThisDatasetId) {
- JsonObjectBuilder datasetBuilder = Json.createObjectBuilder();
- datasetBuilder.add("id", dataverse.getId());
- datasetBuilder.add("alias", dataverse.getAlias());
- datasetBuilder.add("displayName", dataverse.getDisplayName());
- dataversesThatLinkToThisDatasetIdBuilder.add(datasetBuilder.build());
+ if (dataverse.isReleased() || this.permissionService.hasPermissionsFor(u, dataverse, EnumSet.of(Permission.ViewUnpublishedDataverse))) {
+ JsonObjectBuilder datasetBuilder = Json.createObjectBuilder();
+ datasetBuilder.add("id", dataverse.getId());
+ datasetBuilder.add("alias", dataverse.getAlias());
+ datasetBuilder.add("displayName", dataverse.getDisplayName());
+ dataversesThatLinkToThisDatasetIdBuilder.add(datasetBuilder.build());
+ }
}
JsonObjectBuilder response = Json.createObjectBuilder();
response.add("id", datasetId);
@@ -2116,12 +2124,12 @@ public Response getLinks(@Context ContainerRequestContext crc, @PathParam("id")
public Response createAssignment(@Context ContainerRequestContext crc, RoleAssignmentDTO ra, @PathParam("identifier") String id, @QueryParam("key") String apiKey) {
try {
Dataset dataset = findDatasetOrDie(id);
-
+
RoleAssignee assignee = findAssignee(ra.getAssignee());
if (assignee == null) {
return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.assignee.not.found.error"));
}
-
+
DataverseRole theRole;
Dataverse dv = dataset.getOwner();
theRole = null;
@@ -2143,13 +2151,18 @@ public Response createAssignment(@Context ContainerRequestContext crc, RoleAssig
return ok(
json(execCommand(new AssignRoleCommand(assignee, theRole, dataset, createDataverseRequest(getRequestUser(crc)), privateUrlToken))));
} catch (WrappedResponse ex) {
+ var message = ExceptionUtils.getRootCause(ex).getMessage();
+ if (message != null && message.contains("duplicate key")) {
+ // concurrent update
+ return error(Status.CONFLICT, BundleUtil.getStringFromBundle("datasets.api.grant.role.assignee.has.role.error"));
+ }
List args = Arrays.asList(ex.getMessage());
logger.log(Level.WARNING, BundleUtil.getStringFromBundle("datasets.api.grant.role.cant.create.assignment.error", args));
return ex.getResponse();
}
}
-
+
@DELETE
@AuthRequired
@Path("{identifier}/assignments/{id}")
@@ -2203,7 +2216,7 @@ public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathPara
public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
return deletePreviewUrl(crc, idSupplied);
}
-
+
@GET
@AuthRequired
@Path("{id}/previewUrl")
@@ -2326,13 +2339,13 @@ public Response setDataFileAsThumbnail(@Context ContainerRequestContext crc, @Pa
@Path("{id}/thumbnail")
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces("application/json")
- @Operation(summary = "Uploads a logo for a dataset",
+ @Operation(summary = "Uploads a logo for a dataset",
description = "Uploads a logo for a dataset")
@APIResponse(responseCode = "200",
description = "Dataset logo uploaded successfully")
- @Tag(name = "uploadDatasetLogo",
+ @Tag(name = "uploadDatasetLogo",
description = "Uploads a logo for a dataset")
- @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
+ @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
public Response uploadDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @FormDataParam("file") InputStream inputStream) {
try {
DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, inputStream));
@@ -2368,7 +2381,7 @@ public Response getRsync(@Context ContainerRequestContext crc, @PathParam("ident
dataset = findDatasetOrDie(id);
AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
ScriptRequestResponse scriptRequestResponse = execCommand(new RequestRsyncScriptCommand(createDataverseRequest(user), dataset));
-
+
DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.DcmUpload, user.getId(), "script downloaded");
if (lock == null) {
logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
@@ -2381,7 +2394,7 @@ public Response getRsync(@Context ContainerRequestContext crc, @PathParam("ident
return error(Response.Status.INTERNAL_SERVER_ERROR, "Something went wrong attempting to download rsync script: " + EjbUtil.ejbExceptionToString(ex));
}
}
-
+
/**
* This api endpoint triggers the creation of a "package" file in a dataset
* after that package has been moved onto the same filesystem via the Data Capture Module.
@@ -2420,7 +2433,7 @@ public Response receiveChecksumValidationResults(@Context ContainerRequestContex
String uploadFolder = jsonFromDcm.getString("uploadFolder");
int totalSize = jsonFromDcm.getInt("totalSize");
String storageDriverType = System.getProperty("dataverse.file." + storageDriver + ".type");
-
+
if (storageDriverType.equals("file")) {
logger.log(Level.INFO, "File storage driver used for (dataset id={0})", dataset.getId());
@@ -2438,14 +2451,14 @@ public Response receiveChecksumValidationResults(@Context ContainerRequestContex
return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to put the files into Dataverse. Message was '" + message + "'.");
}
} else if(storageDriverType.equals(DataAccess.S3)) {
-
+
logger.log(Level.INFO, "S3 storage driver used for DCM (dataset id={0})", dataset.getId());
try {
-
+
//Where the lifting is actually done, moving the s3 files over and having dataverse know of the existance of the package
s3PackageImporter.copyFromS3(dataset, uploadFolder);
DataFile packageFile = s3PackageImporter.createPackageDataFile(dataset, uploadFolder, new Long(totalSize));
-
+
if (packageFile == null) {
logger.log(Level.SEVERE, "S3 File package import failed.");
return error(Response.Status.INTERNAL_SERVER_ERROR, "S3 File package import failed.");
@@ -2457,7 +2470,7 @@ public Response receiveChecksumValidationResults(@Context ContainerRequestContex
datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.DcmUpload);
dataset.removeLock(dcmLock);
}
-
+
// update version using the command engine to enforce user permissions and constraints
if (dataset.getVersions().size() == 1 && dataset.getLatestVersion().getVersionState() == DatasetVersion.VersionState.DRAFT) {
try {
@@ -2475,7 +2488,7 @@ public Response receiveChecksumValidationResults(@Context ContainerRequestContex
JsonObjectBuilder job = Json.createObjectBuilder();
return ok(job);
-
+
} catch (IOException e) {
String message = e.getMessage();
return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'.");
@@ -2502,7 +2515,7 @@ public Response receiveChecksumValidationResults(@Context ContainerRequestContex
return ex.getResponse();
}
}
-
+
@POST
@AuthRequired
@@ -2511,9 +2524,9 @@ public Response submitForReview(@Context ContainerRequestContext crc, @PathParam
try {
Dataset updatedDataset = execCommand(new SubmitDatasetForReviewCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied)));
JsonObjectBuilder result = Json.createObjectBuilder();
-
+
boolean inReview = updatedDataset.isLockedFor(DatasetLock.Reason.InReview);
-
+
result.add("inReview", inReview);
result.add("message", "Dataset id " + updatedDataset.getId() + " has been submitted for review.");
return ok(result);
@@ -2551,6 +2564,27 @@ public Response returnToAuthor(@Context ContainerRequestContext crc, @PathParam(
}
}
+ @GET
+ @AuthRequired
+ @Path("{id}/availableFileCategories")
+ public Response getAvailableFileCategories(@Context ContainerRequestContext crc,
+ @PathParam("id") String idSupplied) {
+
+ try {
+ Dataset ds = findDatasetOrDie(idSupplied);
+ List datasetFileCategories = dataFileCategoryService.mergeDatasetFileCategories(ds.getCategories());
+ JsonArrayBuilder fileCategoriesArrayBuilder = Json.createArrayBuilder();
+ for (String fieldName : datasetFileCategories) {
+ fileCategoriesArrayBuilder.add(fieldName);
+ }
+ return ok(fileCategoriesArrayBuilder);
+
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
+
+ }
+
@GET
@AuthRequired
@Path("{id}/curationStatus")
@@ -2860,13 +2894,13 @@ public Response completeMPUpload(@Context ContainerRequestContext crc, String pa
@Path("{id}/add")
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces("application/json")
- @Operation(summary = "Uploads a file for a dataset",
+ @Operation(summary = "Uploads a file for a dataset",
description = "Uploads a file for a dataset")
@APIResponse(responseCode = "200",
description = "File uploaded successfully to dataset")
- @Tag(name = "addFileToDataset",
+ @Tag(name = "addFileToDataset",
description = "Uploads a file for a dataset")
- @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
+ @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
public Response addFileToDataset(@Context ContainerRequestContext crc,
@PathParam("id") String idSupplied,
@FormDataParam("jsonData") String jsonData,
@@ -2887,21 +2921,21 @@ public Response addFileToDataset(@Context ContainerRequestContext crc,
// -------------------------------------
// (2) Get the Dataset Id
- //
+ //
// -------------------------------------
Dataset dataset;
-
+
try {
dataset = findDatasetOrDie(idSupplied);
} catch (WrappedResponse wr) {
return wr.getResponse();
}
-
+
//------------------------------------
// (2a) Make sure dataset does not have package file
//
// --------------------------------------
-
+
for (DatasetVersion dv : dataset.getVersions()) {
if (dv.isHasPackageFile()) {
return error(Response.Status.FORBIDDEN,
@@ -2923,7 +2957,7 @@ public Response addFileToDataset(@Context ContainerRequestContext crc,
catch (ClassCastException | com.google.gson.JsonParseException ex) {
return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("file.addreplace.error.parsing"));
}
-
+
// -------------------------------------
// (3) Get the file name and content type
// -------------------------------------
@@ -2934,7 +2968,7 @@ public Response addFileToDataset(@Context ContainerRequestContext crc,
if (optionalFileParams.hasStorageIdentifier()) {
newStorageIdentifier = optionalFileParams.getStorageIdentifier();
newStorageIdentifier = DataAccess.expandStorageIdentifierIfNeeded(newStorageIdentifier);
-
+
if(!DataAccess.uploadToDatasetAllowed(dataset, newStorageIdentifier)) {
return error(BAD_REQUEST,
"Dataset store configuration does not allow provided storageIdentifier.");
@@ -3029,7 +3063,7 @@ public Response addFileToDataset(@Context ContainerRequestContext crc,
}
}
-
+
} // end: addFileToDataset
@@ -3052,7 +3086,7 @@ public Response cleanStorage(@Context ContainerRequestContext crc, @PathParam("i
} catch (WrappedResponse wr) {
return wr.getResponse();
}
-
+
// check permissions
if (!permissionSvc.permissionsFor(createDataverseRequest(authUser), dataset).contains(Permission.EditDataset)) {
return error(Response.Status.INTERNAL_SERVER_ERROR, "Access denied!");
@@ -3077,7 +3111,7 @@ public Response cleanStorage(@Context ContainerRequestContext crc, @PathParam("i
}
return ok("Found: " + datasetFilenames.stream().collect(Collectors.joining(", ")) + "\n" + "Deleted: " + deleted.stream().collect(Collectors.joining(", ")));
-
+
}
@GET
@@ -3100,7 +3134,7 @@ public Response getCompareVersions(@Context ContainerRequestContext crc, @PathPa
return wr.getResponse();
}
}
-
+
@GET
@AuthRequired
@Path("{id}/versions/compareSummary")
@@ -3156,7 +3190,7 @@ public Response getCompareVersionsSummary(@Context ContainerRequestContext crc,
return wr.getResponse();
}
}
-
+
private JsonObject getDeaccessionJson(DatasetVersion dv) {
JsonObjectBuilder compositionBuilder = Json.createObjectBuilder();
@@ -3172,7 +3206,7 @@ private JsonObject getDeaccessionJson(DatasetVersion dv) {
JsonObject json = Json.createObjectBuilder()
.add("deaccessioned", compositionBuilder)
.build();
-
+
return json;
}
@@ -3239,42 +3273,31 @@ public static T handleVersion(String versionId, DsVersionHandler hdl)
* includeDeaccessioned default to false and checkPermsWhenDeaccessioned to false. Use it only when you are sure that the you don't need to work with
* a deaccessioned dataset.
*/
- private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req,
- String versionNumber,
+ private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req,
+ String versionNumber,
final Dataset ds,
- UriInfo uriInfo,
+ UriInfo uriInfo,
HttpHeaders headers) throws WrappedResponse {
- //The checkPerms was added to check the permissions ONLY when the dataset is deaccessioned.
- boolean checkFilePerms = false;
+ boolean checkPermsWhenDeaccessioned = false;
boolean includeDeaccessioned = false;
- return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkFilePerms);
+ return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned);
}
-
+
/*
* checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions.
*/
private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned) throws WrappedResponse {
boolean checkPermsWhenDeaccessioned = true;
- boolean bypassAccessCheck = false;
- return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck);
+ return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned);
}
- /*
- * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions.
- */
- private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
- UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse {
- boolean bypassAccessCheck = false;
- return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck);
- }
/*
* Will allow to define when the permissions should be checked when a deaccesioned dataset is requested. If the user doesn't have edit permissions will result in an error.
*/
private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
- UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned,
- boolean bypassAccessCheck)
+ UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned)
throws WrappedResponse {
DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkPermsWhenDeaccessioned);
@@ -3289,7 +3312,7 @@ private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String
}
return dsv;
}
-
+
@GET
@Path("{identifier}/locks")
public Response getLocksForDataset(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
@@ -3309,7 +3332,7 @@ public Response getLocksForDataset(@PathParam("identifier") String id, @QueryPar
locks.add(lock);
}
}
-
+
return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray()));
} catch (WrappedResponse wr) {
@@ -3329,7 +3352,7 @@ public Response deleteLocks(@Context ContainerRequestContext crc, @PathParam("id
return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
}
Dataset dataset = findDatasetOrDie(id);
-
+
if (lockType == null) {
Set locks = new HashSet<>();
for (DatasetLock lock : dataset.getLocks()) {
@@ -3341,7 +3364,7 @@ public Response deleteLocks(@Context ContainerRequestContext crc, @PathParam("id
// refresh the dataset:
dataset = findDatasetOrDie(id);
}
- // kick of dataset reindexing, in case the locks removed
+ // kick of dataset reindexing, in case the locks removed
// affected the search card:
indexService.asyncIndexDataset(dataset, true);
return ok("locks removed");
@@ -3354,7 +3377,7 @@ public Response deleteLocks(@Context ContainerRequestContext crc, @PathParam("id
execCommand(new RemoveLockCommand(req, dataset, lock.getReason()));
// refresh the dataset:
dataset = findDatasetOrDie(id);
- // ... and kick of dataset reindexing, in case the lock removed
+ // ... and kick of dataset reindexing, in case the lock removed
// affected the search card:
indexService.asyncIndexDataset(dataset, true);
return ok("lock type " + lock.getReason() + " removed");
@@ -3367,7 +3390,7 @@ public Response deleteLocks(@Context ContainerRequestContext crc, @PathParam("id
}, getRequestUser(crc));
}
-
+
@POST
@AuthRequired
@Path("{identifier}/lock/{type}")
@@ -3397,14 +3420,14 @@ public Response lockDataset(@Context ContainerRequestContext crc, @PathParam("id
}, getRequestUser(crc));
}
-
+
@GET
@AuthRequired
@Path("locks")
public Response listLocks(@Context ContainerRequestContext crc, @QueryParam("type") String lockType, @QueryParam("userIdentifier") String userIdentifier) { //DatasetLock.Reason lockType) {
// This API is here, under /datasets, and not under /admin, because we
- // likely want it to be accessible to admin users who may not necessarily
- // have localhost access, that would be required to get to /api/admin in
+ // likely want it to be accessible to admin users who may not necessarily
+ // have localhost access, that would be required to get to /api/admin in
// most installations. It is still reasonable however to limit access to
// this api to admin users only.
AuthenticatedUser apiUser;
@@ -3416,17 +3439,17 @@ public Response listLocks(@Context ContainerRequestContext crc, @QueryParam("typ
if (!apiUser.isSuperuser()) {
return error(Response.Status.FORBIDDEN, "Superusers only.");
}
-
+
// Locks can be optinally filtered by type, user or both.
DatasetLock.Reason lockTypeValue = null;
- AuthenticatedUser user = null;
-
- // For the lock type, we use a QueryParam of type String, instead of
- // DatasetLock.Reason; that would be less code to write, but this way
- // we can check if the value passed matches a valid lock type ("reason")
- // and provide a helpful error message if it doesn't. If you use a
- // QueryParam of an Enum type, trying to pass an invalid value to it
- // results in a potentially confusing "404/NOT FOUND - requested
+ AuthenticatedUser user = null;
+
+ // For the lock type, we use a QueryParam of type String, instead of
+ // DatasetLock.Reason; that would be less code to write, but this way
+ // we can check if the value passed matches a valid lock type ("reason")
+ // and provide a helpful error message if it doesn't. If you use a
+ // QueryParam of an Enum type, trying to pass an invalid value to it
+ // results in a potentially confusing "404/NOT FOUND - requested
// resource is not available".
if (lockType != null && !lockType.isEmpty()) {
try {
@@ -3436,30 +3459,30 @@ public Response listLocks(@Context ContainerRequestContext crc, @QueryParam("typ
for (Reason r: Reason.values()) {
reasonJoiner.add(r.name());
};
- String errorMessage = "Invalid lock type value: " + lockType +
+ String errorMessage = "Invalid lock type value: " + lockType +
"; valid lock types: " + reasonJoiner.toString();
return error(Response.Status.BAD_REQUEST, errorMessage);
}
}
-
+
if (userIdentifier != null && !userIdentifier.isEmpty()) {
user = authSvc.getAuthenticatedUser(userIdentifier);
if (user == null) {
return error(Response.Status.BAD_REQUEST, "Unknown user identifier: "+userIdentifier);
}
}
-
+
//List locks = datasetService.getDatasetLocksByType(lockType);
List locks = datasetService.listLocks(lockTypeValue, user);
-
+
return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray()));
- }
-
-
+ }
+
+
@GET
@Path("{id}/makeDataCount/citations")
public Response getMakeDataCountCitations(@PathParam("id") String idSupplied) {
-
+
try {
Dataset dataset = findDatasetOrDie(idSupplied);
JsonArrayBuilder datasetsCitations = Json.createArrayBuilder();
@@ -3534,7 +3557,7 @@ public Response getDownloadSize(@Context ContainerRequestContext crc,
} catch (IllegalArgumentException e) {
return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode);
}
- DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers, includeDeaccessioned);
+ DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers, includeDeaccessioned, false);
long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileSearchCriteria, fileDownloadSizeMode);
String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize);
JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
@@ -3652,31 +3675,31 @@ public Response getMakeDataCountMetric(@PathParam("id") String idSupplied, @Path
return error(BAD_REQUEST, e.getMessage());
}
}
-
+
@GET
@AuthRequired
@Path("{identifier}/storageDriver")
public Response getFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
- @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
-
- Dataset dataset;
-
+ @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
+
+ Dataset dataset;
+
try {
dataset = findDatasetOrDie(dvIdtf);
} catch (WrappedResponse ex) {
return error(Response.Status.NOT_FOUND, "No such dataset");
}
-
+
return response(req -> ok(dataset.getEffectiveStorageDriverId()), getRequestUser(crc));
}
-
+
@PUT
@AuthRequired
@Path("{identifier}/storageDriver")
public Response setFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
String storageDriverLabel,
@Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
-
+
// Superuser-only:
AuthenticatedUser user;
try {
@@ -3695,8 +3718,8 @@ public Response setFileStore(@Context ContainerRequestContext crc, @PathParam("i
} catch (WrappedResponse ex) {
return error(Response.Status.NOT_FOUND, "No such dataset");
}
-
- // We don't want to allow setting this to a store id that does not exist:
+
+ // We don't want to allow setting this to a store id that does not exist:
for (Entry store : DataAccess.getStorageDriverLabels().entrySet()) {
if (store.getKey().equals(storageDriverLabel)) {
dataset.setStorageDriverId(store.getValue());
@@ -3707,13 +3730,13 @@ public Response setFileStore(@Context ContainerRequestContext crc, @PathParam("i
return error(Response.Status.BAD_REQUEST,
"No Storage Driver found for : " + storageDriverLabel);
}
-
+
@DELETE
@AuthRequired
@Path("{identifier}/storageDriver")
public Response resetFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
@Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
-
+
// Superuser-only:
AuthenticatedUser user;
try {
@@ -3732,7 +3755,7 @@ public Response resetFileStore(@Context ContainerRequestContext crc, @PathParam(
} catch (WrappedResponse ex) {
return error(Response.Status.NOT_FOUND, "No such dataset");
}
-
+
dataset.setStorageDriverId(null);
datasetService.merge(dataset);
return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER);
@@ -3944,24 +3967,24 @@ public Response getTimestamps(@Context ContainerRequestContext crc, @PathParam("
/****************************
* Globus Support Section:
- *
+ *
* Globus transfer in (upload) and out (download) involve three basic steps: The
* app is launched and makes a callback to the
* globusUploadParameters/globusDownloadParameters method to get all of the info
* needed to set up it's display.
- *
+ *
* At some point after that, the user will make a selection as to which files to
* transfer and the app will call requestGlobusUploadPaths/requestGlobusDownload
* to indicate a transfer is about to start. In addition to providing the
* details of where to transfer the files to/from, Dataverse also grants the
* Globus principal involved the relevant rw or r permission for the dataset.
- *
+ *
* Once the transfer is started, the app records the task id and sends it to
* Dataverse in the addGlobusFiles/monitorGlobusDownload call. Dataverse then
* monitors the transfer task and when it ultimately succeeds for fails it
* revokes the principal's permission and, for the transfer in case, adds the
* files to the dataset. (The dataset is locked until the transfer completes.)
- *
+ *
* (If no transfer is started within a specified timeout, permissions will
* automatically be revoked - see the GlobusServiceBean for details.)
*
@@ -3976,7 +3999,7 @@ public Response getTimestamps(@Context ContainerRequestContext crc, @PathParam("
* transfer. This api endpoint is expected to be called as a signed callback
* after the globus-dataverse app/other app is launched, but it will accept
* other forms of authentication.
- *
+ *
* @param crc
* @param datasetId
*/
@@ -4083,7 +4106,7 @@ public Response getGlobusUploadParams(@Context ContainerRequestContext crc, @Pat
/**
* Provides specific storageIdentifiers to use for each file amd requests permissions for a given globus user to upload to the dataset
- *
+ *
* @param crc
* @param datasetId
* @param jsonData - an object that must include the id of the globus "principal" involved and the "numberOfFiles" that will be transferred.
@@ -4174,7 +4197,7 @@ public Response requestGlobusUpload(@Context ContainerRequestContext crc, @PathP
}
/** A method analogous to /addFiles that must also include the taskIdentifier of the transfer-in-progress to monitor
- *
+ *
* @param crc
* @param datasetId
* @param jsonData - see /addFiles documentation, aditional "taskIdentifier" key in the main object is required.
@@ -4189,13 +4212,13 @@ public Response requestGlobusUpload(@Context ContainerRequestContext crc, @PathP
@Path("{id}/addGlobusFiles")
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces("application/json")
- @Operation(summary = "Uploads a Globus file for a dataset",
+ @Operation(summary = "Uploads a Globus file for a dataset",
description = "Uploads a Globus file for a dataset")
@APIResponse(responseCode = "200",
description = "Globus file uploaded successfully to dataset")
- @Tag(name = "addGlobusFilesToDataset",
+ @Tag(name = "addGlobusFilesToDataset",
description = "Uploads a Globus file for a dataset")
- @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
+ @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc,
@PathParam("id") String datasetId,
@FormDataParam("jsonData") String jsonData,
@@ -4225,33 +4248,33 @@ public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc,
} catch (WrappedResponse wr) {
return wr.getResponse();
}
-
- // Is Globus upload service available?
-
+
+ // Is Globus upload service available?
+
// ... on this Dataverse instance?
if (!systemConfig.isGlobusUpload()) {
return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.globusUploadDisabled"));
}
- // ... and on this specific Dataset?
+ // ... and on this specific Dataset?
String storeId = dataset.getEffectiveStorageDriverId();
// acceptsGlobusTransfers should only be true for an S3 or globus store
if (!GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
&& !GlobusAccessibleStore.allowsGlobusReferences(storeId)) {
return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusuploaddisabled"));
}
-
+
// Check if the dataset is already locked
- // We are reusing the code and logic used by various command to determine
- // if there are any locks on the dataset that would prevent the current
+ // We are reusing the code and logic used by various command to determine
+ // if there are any locks on the dataset that would prevent the current
// users from modifying it:
try {
DataverseRequest dataverseRequest = createDataverseRequest(authUser);
- permissionService.checkEditDatasetLock(dataset, dataverseRequest, null);
+ permissionService.checkEditDatasetLock(dataset, dataverseRequest, null);
} catch (IllegalCommandException icex) {
return error(Response.Status.FORBIDDEN, "Dataset " + datasetId + " is locked: " + icex.getLocalizedMessage());
}
-
+
JsonObject jsonObject = null;
try {
jsonObject = JsonUtil.getJsonObject(jsonData);
@@ -4287,7 +4310,7 @@ public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc,
}
String requestUrl = SystemConfig.getDataverseSiteUrlStatic();
-
+
// Async Call
try {
globusService.globusUpload(jsonObject, dataset, requestUrl, authUser);
@@ -4298,18 +4321,18 @@ public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc,
return ok("Async call to Globus Upload started ");
}
-
+
/**
* Retrieve the parameters and signed URLs required to perform a globus
* transfer/download. This api endpoint is expected to be called as a signed
* callback after the globus-dataverse app/other app is launched, but it will
* accept other forms of authentication.
- *
+ *
* @param crc
* @param datasetId
* @param locale
* @param downloadId - an id to a cached object listing the files involved. This is generated via Dataverse and provided to the dataverse-globus app in a signedURL.
- * @return - JSON containing the parameters and URLs needed by the dataverse-globus app. The format is analogous to that for external tools.
+ * @return - JSON containing the parameters and URLs needed by the dataverse-globus app. The format is analogous to that for external tools.
*/
@GET
@AuthRequired
@@ -4397,9 +4420,9 @@ public Response getGlobusDownloadParams(@Context ContainerRequestContext crc, @P
/**
* Requests permissions for a given globus user to download the specified files
* the dataset and returns information about the paths to transfer from.
- *
+ *
* When called directly rather than in response to being given a downloadId, the jsonData can include a "fileIds" key with an array of file ids to transfer.
- *
+ *
* @param crc
* @param datasetId
* @param jsonData - a JSON object that must include the id of the Globus "principal" that will be transferring the files in the case where Dataverse manages the Globus endpoint. For remote endpoints, the principal is not required.
@@ -4541,7 +4564,7 @@ public Response requestGlobusDownload(@Context ContainerRequestContext crc, @Pat
/**
* Monitors a globus download and removes permissions on the dir/dataset when
* the specified transfer task is completed.
- *
+ *
* @param crc
* @param datasetId
* @param jsonData - a JSON Object containing the key "taskIdentifier" with the
@@ -4590,7 +4613,7 @@ public Response monitorGlobusDownload(@Context ContainerRequestContext crc, @Pat
return badRequest("Error parsing json body");
}
-
+
// Async Call
globusService.globusDownload(jsonObject, dataset, authUser);
@@ -4610,13 +4633,13 @@ public Response monitorGlobusDownload(@Context ContainerRequestContext crc, @Pat
@Path("{id}/addFiles")
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces("application/json")
- @Operation(summary = "Uploads a set of files to a dataset",
+ @Operation(summary = "Uploads a set of files to a dataset",
description = "Uploads a set of files to a dataset")
@APIResponse(responseCode = "200",
description = "Files uploaded successfully to dataset")
- @Tag(name = "addFilesToDataset",
+ @Tag(name = "addFilesToDataset",
description = "Uploads a set of files to a dataset")
- @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
+ @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
public Response addFilesToDataset(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,
@FormDataParam("jsonData") String jsonData) {
@@ -4685,13 +4708,13 @@ public Response addFilesToDataset(@Context ContainerRequestContext crc, @PathPar
@Path("{id}/replaceFiles")
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces("application/json")
- @Operation(summary = "Replace a set of files to a dataset",
+ @Operation(summary = "Replace a set of files to a dataset",
description = "Replace a set of files to a dataset")
@APIResponse(responseCode = "200",
description = "Files replaced successfully to dataset")
- @Tag(name = "replaceFilesInDataset",
+ @Tag(name = "replaceFilesInDataset",
description = "Replace a set of files to a dataset")
- @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
+ @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
public Response replaceFilesInDataset(@Context ContainerRequestContext crc,
@PathParam("id") String idSupplied,
@FormDataParam("jsonData") String jsonData) {
@@ -5042,10 +5065,10 @@ public Response setDatasetVersionArchivalStatus(@Context ContainerRequestContext
if (!au.isSuperuser()) {
return error(Response.Status.FORBIDDEN, "Superusers only.");
}
-
+
//Verify we have valid json after removing any HTML tags (the status gets displayed in the UI, so we want plain text).
JsonObject update= JsonUtil.getJsonObject(MarkupChecker.stripAllTags(newStatus));
-
+
if (update.containsKey(DatasetVersion.ARCHIVAL_STATUS) && update.containsKey(DatasetVersion.ARCHIVAL_STATUS_MESSAGE)) {
String status = update.getString(DatasetVersion.ARCHIVAL_STATUS);
if (status.equals(DatasetVersion.ARCHIVAL_STATUS_PENDING) || status.equals(DatasetVersion.ARCHIVAL_STATUS_FAILURE)
@@ -5081,7 +5104,7 @@ public Response setDatasetVersionArchivalStatus(@Context ContainerRequestContext
}
return error(Status.BAD_REQUEST, "Unacceptable status format");
}
-
+
@DELETE
@AuthRequired
@Produces(MediaType.APPLICATION_JSON)
@@ -5113,7 +5136,7 @@ public Response deleteDatasetVersionArchivalStatus(@Context ContainerRequestCont
return wr.getResponse();
}
}
-
+
private boolean isSingleVersionArchiving() {
String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName, null);
if (className != null) {
@@ -5129,7 +5152,7 @@ private boolean isSingleVersionArchiving() {
}
return false;
}
-
+
// This method provides a callback for an external tool to retrieve it's
// parameters/api URLs. If the request is authenticated, e.g. by it being
// signed, the api URLs will be signed. If a guest request is made, the URLs
@@ -5150,7 +5173,7 @@ public Response getExternalToolDVParams(@Context ContainerRequestContext crc,
if (target == null) {
return error(BAD_REQUEST, "DatasetVersion not found.");
}
-
+
ExternalTool externalTool = externalToolService.findById(externalToolId);
if(externalTool==null) {
return error(BAD_REQUEST, "External tool not found.");
@@ -5206,7 +5229,7 @@ public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String
}
return ok(responseJson);
}
-
+
@GET
@Path("previewUrlDatasetVersion/{previewUrlToken}")
public Response getPreviewUrlDatasetVersion(@PathParam("previewUrlToken") String previewUrlToken, @QueryParam("returnOwners") boolean returnOwners) {
@@ -5232,7 +5255,7 @@ public Response getPreviewUrlDatasetVersion(@PathParam("previewUrlToken") String
}
return ok(responseJson);
}
-
+
@GET
@Path("privateUrlDatasetVersion/{privateUrlToken}/citation")
@@ -5245,7 +5268,7 @@ public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken"
return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found")
: ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess()));
}
-
+
@GET
@Path("previewUrlDatasetVersion/{previewUrlToken}/citation")
public Response getPreviewUrlDatasetVersionCitation(@PathParam("previewUrlToken") String previewUrlToken) {
@@ -5273,7 +5296,7 @@ public Response getDatasetVersionInternalCitation(@Context ContainerRequestConte
}
}
- /**
+ /**
* Returns one of the DataCitation.Format types as a raw file download (not wrapped in our ok json)
* @param crc
* @param datasetId
@@ -5340,9 +5363,9 @@ public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathPa
}
}
execCommand(new DeaccessionDatasetVersionCommand(req, datasetVersion, false));
-
- return ok("Dataset " +
- (":persistentId".equals(datasetId) ? datasetVersion.getDataset().getGlobalId().asString() : datasetId) +
+
+ return ok("Dataset " +
+ (":persistentId".equals(datasetId) ? datasetVersion.getDataset().getGlobalId().asString() : datasetId) +
" deaccessioned for version " + versionId);
} catch (JsonParsingException jpe) {
return error(Response.Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
@@ -5639,17 +5662,17 @@ public Response addDatasetType(@Context ContainerRequestContext crc, String json
if (jsonIn == null || jsonIn.isEmpty()) {
return error(BAD_REQUEST, "JSON input was null or empty!");
}
-
+
String nameIn = null;
-
+
JsonArrayBuilder datasetTypesAfter = Json.createArrayBuilder();
List metadataBlocksToSave = new ArrayList<>();
List licensesToSave = new ArrayList<>();
-
+
try {
JsonObject datasetTypeObj = JsonUtil.getJsonObject(jsonIn);
nameIn = datasetTypeObj.getString("name");
-
+
JsonArray arr = datasetTypeObj.getJsonArray("linkedMetadataBlocks");
if (arr != null && !arr.isEmpty()) {
for (JsonString jsonValue : arr.getValuesAs(JsonString.class)) {
@@ -5798,7 +5821,7 @@ public Response updateDatasetTypeLinksWithMetadataBlocks(@Context ContainerReque
return ex.getResponse();
}
}
-
+
@AuthRequired
@PUT
@Path("datasetTypes/{idOrName}/licenses")
@@ -5964,7 +5987,7 @@ public Response addVersionNote(@Context ContainerRequestContext crc, @PathParam(
@Path("{id}/versions/{versionId}/versionNote")
public Response deleteVersionNote(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
if(!FeatureFlags.VERSION_NOTE.enabled()) {
- return notFound(BundleUtil.getStringFromBundle("datasets.api.addVersionNote.notEnabled"));
+ return notFound(BundleUtil.getStringFromBundle("datasets.api.addVersionNote.notEnabled"));
}
if (!DS_VERSION_DRAFT.equals(versionId)) {
AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
index b1d56b6b8a9..ae82ff46522 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
@@ -34,10 +34,7 @@
import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty;
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
-import edu.harvard.iq.dataverse.util.json.JSONLDUtil;
-import edu.harvard.iq.dataverse.util.json.JsonParseException;
-import edu.harvard.iq.dataverse.util.json.JsonPrinter;
-import edu.harvard.iq.dataverse.util.json.JsonUtil;
+import edu.harvard.iq.dataverse.util.json.*;
import java.io.*;
import java.util.*;
@@ -154,7 +151,7 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body,
}
AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc);
- newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facets, inputLevels, metadataBlocks));
+ newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facets, inputLevels, metadataBlocks, true));
return created("/dataverses/" + newDataverse.getAlias(), json(newDataverse));
} catch (WrappedResponse ww) {
@@ -410,9 +407,9 @@ public Response createDataset(@Context ContainerRequestContext crc, String jsonB
ds.setIdentifier(null);
ds.setProtocol(null);
ds.setGlobalIdCreateTime(null);
- Dataset managedDs = null;
+ Dataset managedDs;
try {
- managedDs = execCommand(new CreateNewDatasetCommand(ds, createDataverseRequest(u), null, validate));
+ managedDs = execCommand(new CreateNewDatasetCommand(ds, createDataverseRequest(u), validate, true));
} catch (WrappedResponse ww) {
Throwable cause = ww.getCause();
StringBuilder sb = new StringBuilder();
@@ -1695,25 +1692,36 @@ public Response listLinks(@Context ContainerRequestContext crc, @PathParam("iden
List dvsThisDvHasLinkedToList = dataverseSvc.findDataversesThisIdHasLinkedTo(dv.getId());
JsonArrayBuilder dvsThisDvHasLinkedToBuilder = Json.createArrayBuilder();
for (Dataverse dataverse : dvsThisDvHasLinkedToList) {
- dvsThisDvHasLinkedToBuilder.add(dataverse.getAlias());
+ JsonObjectBuilder job = Json.createObjectBuilder();
+ job.add("id", dataverse.getId());
+ job.add("alias", dataverse.getAlias());
+ job.add("displayName", dataverse.getDisplayName());
+ dvsThisDvHasLinkedToBuilder.add(job);
}
List dvsThatLinkToThisDvList = dataverseSvc.findDataversesThatLinkToThisDvId(dv.getId());
JsonArrayBuilder dvsThatLinkToThisDvBuilder = Json.createArrayBuilder();
for (Dataverse dataverse : dvsThatLinkToThisDvList) {
- dvsThatLinkToThisDvBuilder.add(dataverse.getAlias());
+ JsonObjectBuilder job = Json.createObjectBuilder();
+ job.add("id", dataverse.getId());
+ job.add("alias", dataverse.getAlias());
+ job.add("displayName", dataverse.getDisplayName());
+ dvsThatLinkToThisDvBuilder.add(job);
}
List datasetsThisDvHasLinkedToList = dataverseSvc.findDatasetsThisIdHasLinkedTo(dv.getId());
JsonArrayBuilder datasetsThisDvHasLinkedToBuilder = Json.createArrayBuilder();
for (Dataset dataset : datasetsThisDvHasLinkedToList) {
- datasetsThisDvHasLinkedToBuilder.add(dataset.getLatestVersion().getTitle());
+ JsonObjectBuilder ds = new NullSafeJsonBuilder();
+ ds.add("title", dataset.getLatestVersion().getTitle());
+ ds.add("identifier", dataset.getProtocol() + ":" + dataset.getAuthority() + "/" + dataset.getIdentifier());
+ datasetsThisDvHasLinkedToBuilder.add(ds);
}
JsonObjectBuilder response = Json.createObjectBuilder();
- response.add("dataverses that the " + dv.getAlias() + " dataverse has linked to", dvsThisDvHasLinkedToBuilder);
- response.add("dataverses that link to the " + dv.getAlias(), dvsThatLinkToThisDvBuilder);
- response.add("datasets that the " + dv.getAlias() + " has linked to", datasetsThisDvHasLinkedToBuilder);
+ response.add("linkedDataverses", dvsThisDvHasLinkedToBuilder);
+ response.add("dataversesLinkingToThis", dvsThatLinkToThisDvBuilder);
+ response.add("linkedDatasets", datasetsThisDvHasLinkedToBuilder);
return ok(response);
} catch (WrappedResponse wr) {
@@ -1931,4 +1939,34 @@ public Response deleteFeaturedItems(@Context ContainerRequestContext crc, @PathP
return e.getResponse();
}
}
+
+ @GET
+ @AuthRequired
+ @Path("{identifier}/templates")
+ public Response getTemplates(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) {
+ try {
+ Dataverse dataverse = findDataverseOrDie(dvIdtf);
+ return ok(jsonTemplates(execCommand(new ListDataverseTemplatesCommand(createDataverseRequest(getRequestUser(crc)), dataverse))));
+ } catch (WrappedResponse e) {
+ return e.getResponse();
+ }
+ }
+
+ @POST
+ @AuthRequired
+ @Path("{identifier}/templates")
+ public Response createTemplate(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String dvIdtf) {
+ try {
+ Dataverse dataverse = findDataverseOrDie(dvIdtf);
+ NewTemplateDTO newTemplateDTO;
+ try {
+ newTemplateDTO = NewTemplateDTO.fromRequestBody(body, jsonParser());
+ } catch (JsonParseException ex) {
+ return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.createTemplate.error.jsonParseMetadataFields"), ex.getMessage()));
+ }
+ return ok(jsonTemplate(execCommand(new CreateTemplateCommand(newTemplateDTO.toTemplate(), createDataverseRequest(getRequestUser(crc)), dataverse, true))));
+ } catch (WrappedResponse e) {
+ return e.getResponse();
+ }
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
index 61a69236f57..5834e7e0008 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
@@ -410,8 +410,7 @@ public Response deleteFileInDataset(@Context ContainerRequestContext crc, @PathP
@AuthRequired
@Path("{id}/metadata")
public Response updateFileMetadata(@Context ContainerRequestContext crc, @FormDataParam("jsonData") String jsonData,
- @PathParam("id") String fileIdOrPersistentId
- ) throws DataFileTagException, CommandException {
+ @PathParam("id") String fileIdOrPersistentId, @QueryParam("sourceLastUpdateTime") String sourceLastUpdateTime) {
FileMetadata upFmd = null;
@@ -429,6 +428,13 @@ public Response updateFileMetadata(@Context ContainerRequestContext crc, @FormDa
return error(BAD_REQUEST, "Error attempting get the requested data file.");
}
+ if (sourceLastUpdateTime != null) {
+ try {
+ validateInternalTimestampIsNotOutdated(df, sourceLastUpdateTime);
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
+ }
//You shouldn't be trying to edit a datafile that has been replaced
List result = em.createNamedQuery("DataFile.findDataFileThatReplacedId", Long.class)
@@ -519,7 +525,7 @@ public Response updateFileMetadata(@Context ContainerRequestContext crc, @FormDa
return error(Response.Status.INTERNAL_SERVER_ERROR, "Error adding metadata to DataFile: " + e);
}
- } catch (WrappedResponse wr) {
+ } catch (CommandException | WrappedResponse ex) {
return error(BAD_REQUEST, "An error has occurred attempting to update the requested DataFile, likely related to permissions.");
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java
index 7d84681afa5..45539424e85 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Info.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java
@@ -1,21 +1,16 @@
package edu.harvard.iq.dataverse.api;
-import java.io.FileInputStream;
-import java.io.InputStream;
-import java.net.URL;
-import java.nio.charset.StandardCharsets;
-import java.util.Arrays;
-import java.util.List;
-import java.util.logging.Level;
import java.util.logging.Logger;
-import jakarta.ws.rs.Produces;
-import org.apache.commons.io.IOUtils;
+import edu.harvard.iq.dataverse.customization.CustomizationConstants;
+import jakarta.ws.rs.*;
+import jakarta.ws.rs.client.Client;
+import jakarta.ws.rs.client.ClientBuilder;
+import jakarta.ws.rs.client.WebTarget;
import edu.harvard.iq.dataverse.export.ExportService;
import edu.harvard.iq.dataverse.settings.JvmSettings;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import edu.harvard.iq.dataverse.util.BundleUtil;
import edu.harvard.iq.dataverse.util.SystemConfig;
import io.gdcc.spi.export.Exporter;
import io.gdcc.spi.export.ExportException;
@@ -24,9 +19,6 @@
import jakarta.json.Json;
import jakarta.json.JsonObjectBuilder;
import jakarta.json.JsonValue;
-import jakarta.ws.rs.GET;
-import jakarta.ws.rs.Path;
-import jakarta.ws.rs.QueryParam;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import org.eclipse.microprofile.openapi.annotations.Operation;
@@ -139,6 +131,26 @@ public Response getExportFormats() {
return ok(responseModel);
}
+ @GET
+ @Path("settings/customization/{customizationFileType}")
+ public Response getCustomizationFile(@PathParam("customizationFileType") String customizationFileType) {
+ String type = customizationFileType != null ? customizationFileType.toLowerCase() : "";
+ if (!CustomizationConstants.validTypes.contains(type)) {
+ return badRequest("Customization type unknown or missing. Must be one of the following: " + CustomizationConstants.validTypes);
+ }
+ Client client = ClientBuilder.newClient();
+ WebTarget endpoint = client.target("http://localhost:8080/CustomizationFilesServlet");
+ Response response = endpoint.queryParam("customFileType", type)
+ .request(MediaType.MEDIA_TYPE_WILDCARD)
+ .get();
+
+ if (response.getLength() < 1) {
+ return notFound(type + " not found.");
+ } else {
+ return response;
+ }
+ }
+
private Response getSettingResponseByKey(SettingsServiceBean.Key key) {
String setting = settingsService.getValueForKey(key);
if (setting != null) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java
index ca8f59a71be..562fd7fcb81 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java
@@ -160,7 +160,7 @@ public Response updateCitationsForDataset(@PathParam("id") String id) throws IOE
url = new URI(JvmSettings.DATACITE_REST_API_URL.lookup(pidProvider.getId()) +
"/events?doi=" +
authorityPlusIdentifier +
- "&source=crossref&page[size]=1000").toURL();
+ "&source=crossref&page[size]=1000&page[cursor]=1").toURL();
} catch (URISyntaxException e) {
//Nominally this means a config error/ bad DATACITE_REST_API_URL for this provider
logger.warning("Unable to create URL for " + persistentId + ", pidProvider " + pidProvider.getId());
@@ -175,11 +175,14 @@ public Response updateCitationsForDataset(@PathParam("id") String id) throws IOE
int status = connection.getResponseCode();
if (status != 200) {
logger.warning("Failed to get citations from " + url.toString());
+ connection.disconnect();
return error(Status.fromStatusCode(status), "Failed to get citations from " + url.toString());
}
JsonObject report;
try (InputStream inStream = connection.getInputStream()) {
report = JsonUtil.getJsonObject(inStream);
+ } finally {
+ connection.disconnect();
}
JsonObject links = report.getJsonObject("links");
JsonArray data = report.getJsonArray("data");
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java b/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java
index df172f36973..eebdce5e509 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java
@@ -1,240 +1,218 @@
package edu.harvard.iq.dataverse.api;
-import edu.harvard.iq.dataverse.MailServiceBean;
import edu.harvard.iq.dataverse.UserNotification;
-import edu.harvard.iq.dataverse.UserNotification.Type;
import edu.harvard.iq.dataverse.api.auth.AuthRequired;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
-import edu.harvard.iq.dataverse.authorization.users.User;
-import edu.harvard.iq.dataverse.workflows.WorkflowUtil;
+
import java.util.List;
import java.util.Optional;
import java.util.Set;
-import jakarta.ejb.EJB;
import jakarta.ejb.Stateless;
import jakarta.json.Json;
import jakarta.json.JsonArrayBuilder;
import jakarta.json.JsonObjectBuilder;
-import jakarta.ws.rs.DELETE;
-import jakarta.ws.rs.GET;
-import jakarta.ws.rs.PUT;
-import jakarta.ws.rs.Path;
-import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.*;
import jakarta.ws.rs.container.ContainerRequestContext;
import jakarta.ws.rs.core.Context;
import jakarta.ws.rs.core.Response;
-import edu.harvard.iq.dataverse.util.MailUtil;
-import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
+import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
@Stateless
@Path("notifications")
public class Notifications extends AbstractApiBean {
- @EJB
- MailServiceBean mailService;
-
@GET
@AuthRequired
@Path("/all")
- public Response getAllNotificationsForUser(@Context ContainerRequestContext crc) {
- User user = getRequestUser(crc);
- if (!(user instanceof AuthenticatedUser)) {
- // It's unlikely we'll reach this error. A Guest doesn't have an API token and would have been blocked above.
- return error(Response.Status.BAD_REQUEST, "Only an AuthenticatedUser can have notifications.");
+ public Response getAllNotificationsForUser(@Context ContainerRequestContext crc, @QueryParam("inAppNotificationFormat") boolean inAppNotificationFormat) {
+ try {
+ AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
+ List userNotifications = userNotificationSvc.findByUser(authenticatedUser.getId());
+ return ok(Json.createObjectBuilder().add("notifications", json(userNotifications, authenticatedUser, inAppNotificationFormat)));
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
}
- AuthenticatedUser authenticatedUser = (AuthenticatedUser) user;
- JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder();
- List notifications = userNotificationSvc.findByUser(authenticatedUser.getId());
- for (UserNotification notification : notifications) {
- NullSafeJsonBuilder notificationObjectBuilder = jsonObjectBuilder();
- JsonArrayBuilder reasonsForReturn = Json.createArrayBuilder();
- Type type = notification.getType();
- notificationObjectBuilder.add("id", notification.getId());
- notificationObjectBuilder.add("type", type.toString());
- /* FIXME - Re-add reasons for return if/when they are added to the notifications page.
- if (Type.RETURNEDDS.equals(type) || Type.SUBMITTEDDS.equals(type)) {
- JsonArrayBuilder reasons = getReasonsForReturn(notification);
- for (JsonValue reason : reasons.build()) {
- reasonsForReturn.add(reason);
- }
- notificationObjectBuilder.add("reasonsForReturn", reasonsForReturn);
- }
- */
- Object objectOfNotification = mailService.getObjectOfNotification(notification);
- if (objectOfNotification != null){
- String subjectText = MailUtil.getSubjectTextBasedOnNotification(notification, objectOfNotification);
- String messageText = mailService.getMessageTextBasedOnNotification(notification, objectOfNotification, null, notification.getRequestor());
- notificationObjectBuilder.add("subjectText", subjectText);
- notificationObjectBuilder.add("messageText", messageText);
- }
- notificationObjectBuilder.add("sentTimestamp", notification.getSendDateTimestamp());
- jsonArrayBuilder.add(notificationObjectBuilder);
+ }
+
+ @GET
+ @AuthRequired
+ @Path("/unreadCount")
+ public Response getUnreadNotificationsCountForUser(@Context ContainerRequestContext crc) {
+ try {
+ AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
+ long unreadCount = userNotificationSvc.getUnreadNotificationCountByUser(au.getId());
+ return ok(Json.createObjectBuilder()
+ .add("unreadCount", unreadCount));
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
}
- JsonObjectBuilder result = Json.createObjectBuilder().add("notifications", jsonArrayBuilder);
- return ok(result);
}
- private JsonArrayBuilder getReasonsForReturn(UserNotification notification) {
- Long objectId = notification.getObjectId();
- return WorkflowUtil.getAllWorkflowComments(datasetVersionSvc.find(objectId));
+ @PUT
+ @AuthRequired
+ @Path("/{id}/markAsRead")
+ public Response markNotificationAsReadForUser(@Context ContainerRequestContext crc, @PathParam("id") long id) {
+ try {
+ AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
+ Long userId = au.getId();
+ Optional notification = userNotificationSvc.findByUser(userId).stream().filter(x -> x.getId().equals(id)).findFirst();
+ if (notification.isPresent()) {
+ UserNotification saved = userNotificationSvc.markAsRead(notification.get());
+ if (saved.isReadNotification()) {
+ return ok("Notification " + id + " marked as read.");
+ } else {
+ return badRequest("Notification " + id + " could not be marked as read.");
+ }
+ } else {
+ return notFound("Notification " + id + " not found.");
+ }
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
}
@DELETE
@AuthRequired
@Path("/{id}")
public Response deleteNotificationForUser(@Context ContainerRequestContext crc, @PathParam("id") long id) {
- User user = getRequestUser(crc);
- if (!(user instanceof AuthenticatedUser)) {
- // It's unlikely we'll reach this error. A Guest doesn't have an API token and would have been blocked above.
- return error(Response.Status.BAD_REQUEST, "Only an AuthenticatedUser can have notifications.");
- }
+ try {
+ AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
+ Long userId = authenticatedUser.getId();
+ Optional notification = userNotificationSvc.findByUser(userId).stream().filter(x -> x.getId().equals(id)).findFirst();
- AuthenticatedUser authenticatedUser = (AuthenticatedUser) user;
- Long userId = authenticatedUser.getId();
- Optional notification = userNotificationSvc.findByUser(userId).stream().filter(x -> x.getId().equals(id)).findFirst();
+ if (notification.isPresent()) {
+ userNotificationSvc.delete(notification.get());
+ return ok("Notification " + id + " deleted.");
+ }
- if (notification.isPresent()) {
- userNotificationSvc.delete(notification.get());
- return ok("Notification " + id + " deleted.");
+ return notFound("Notification " + id + " not found.");
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
}
-
- return notFound("Notification " + id + " not found.");
}
@GET
@AuthRequired
@Path("/mutedEmails")
public Response getMutedEmailsForUser(@Context ContainerRequestContext crc) {
- User user = getRequestUser(crc);
- if (!(user instanceof AuthenticatedUser)) {
- // It's unlikely we'll reach this error. A Guest doesn't have an API token and would have been blocked above.
- return error(Response.Status.BAD_REQUEST, "Only an AuthenticatedUser can have notifications.");
+ try {
+ AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
+ JsonArrayBuilder mutedEmails = Json.createArrayBuilder();
+ authenticatedUser.getMutedEmails().stream().forEach(
+ x -> mutedEmails.add(jsonObjectBuilder().add("name", x.name()).add("description", x.getDescription()))
+ );
+ JsonObjectBuilder result = Json.createObjectBuilder().add("mutedEmails", mutedEmails);
+ return ok(result);
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
}
-
- AuthenticatedUser authenticatedUser = (AuthenticatedUser) user;
- JsonArrayBuilder mutedEmails = Json.createArrayBuilder();
- authenticatedUser.getMutedEmails().stream().forEach(
- x -> mutedEmails.add(jsonObjectBuilder().add("name", x.name()).add("description", x.getDescription()))
- );
- JsonObjectBuilder result = Json.createObjectBuilder().add("mutedEmails", mutedEmails);
- return ok(result);
}
@PUT
@AuthRequired
@Path("/mutedEmails/{typeName}")
public Response muteEmailsForUser(@Context ContainerRequestContext crc, @PathParam("typeName") String typeName) {
- User user = getRequestUser(crc);
- if (!(user instanceof AuthenticatedUser)) {
- // It's unlikely we'll reach this error. A Guest doesn't have an API token and would have been blocked above.
- return error(Response.Status.BAD_REQUEST, "Only an AuthenticatedUser can have notifications.");
- }
-
UserNotification.Type notificationType;
try {
notificationType = UserNotification.Type.valueOf(typeName);
} catch (Exception ignore) {
return notFound("Notification type " + typeName + " not found.");
}
- AuthenticatedUser authenticatedUser = (AuthenticatedUser) user;
- Set mutedEmails = authenticatedUser.getMutedEmails();
- mutedEmails.add(notificationType);
- authenticatedUser.setMutedEmails(mutedEmails);
- authSvc.update(authenticatedUser);
- return ok("Notification emails of type " + typeName + " muted.");
+ try {
+ AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
+ Set mutedEmails = authenticatedUser.getMutedEmails();
+ mutedEmails.add(notificationType);
+ authenticatedUser.setMutedEmails(mutedEmails);
+ authSvc.update(authenticatedUser);
+ return ok("Notification emails of type " + typeName + " muted.");
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
}
@DELETE
@AuthRequired
@Path("/mutedEmails/{typeName}")
public Response unmuteEmailsForUser(@Context ContainerRequestContext crc, @PathParam("typeName") String typeName) {
- User user = getRequestUser(crc);
- if (!(user instanceof AuthenticatedUser)) {
- // It's unlikely we'll reach this error. A Guest doesn't have an API token and would have been blocked above.
- return error(Response.Status.BAD_REQUEST, "Only an AuthenticatedUser can have notifications.");
- }
-
UserNotification.Type notificationType;
try {
notificationType = UserNotification.Type.valueOf(typeName);
} catch (Exception ignore) {
return notFound("Notification type " + typeName + " not found.");
}
- AuthenticatedUser authenticatedUser = (AuthenticatedUser) user;
- Set mutedEmails = authenticatedUser.getMutedEmails();
- mutedEmails.remove(notificationType);
- authenticatedUser.setMutedEmails(mutedEmails);
- authSvc.update(authenticatedUser);
- return ok("Notification emails of type " + typeName + " unmuted.");
+ try {
+ AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
+ Set mutedEmails = authenticatedUser.getMutedEmails();
+ mutedEmails.remove(notificationType);
+ authenticatedUser.setMutedEmails(mutedEmails);
+ authSvc.update(authenticatedUser);
+ return ok("Notification emails of type " + typeName + " unmuted.");
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
}
@GET
@AuthRequired
@Path("/mutedNotifications")
public Response getMutedNotificationsForUser(@Context ContainerRequestContext crc) {
- User user = getRequestUser(crc);
- if (!(user instanceof AuthenticatedUser)) {
- // It's unlikely we'll reach this error. A Guest doesn't have an API token and would have been blocked above.
- return error(Response.Status.BAD_REQUEST, "Only an AuthenticatedUser can have notifications.");
+ try {
+ AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
+ JsonArrayBuilder mutedNotifications = Json.createArrayBuilder();
+ authenticatedUser.getMutedNotifications().stream().forEach(
+ x -> mutedNotifications.add(jsonObjectBuilder().add("name", x.name()).add("description", x.getDescription()))
+ );
+ JsonObjectBuilder result = Json.createObjectBuilder().add("mutedNotifications", mutedNotifications);
+ return ok(result);
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
}
-
- AuthenticatedUser authenticatedUser = (AuthenticatedUser) user;
- JsonArrayBuilder mutedNotifications = Json.createArrayBuilder();
- authenticatedUser.getMutedNotifications().stream().forEach(
- x -> mutedNotifications.add(jsonObjectBuilder().add("name", x.name()).add("description", x.getDescription()))
- );
- JsonObjectBuilder result = Json.createObjectBuilder().add("mutedNotifications", mutedNotifications);
- return ok(result);
}
@PUT
@AuthRequired
@Path("/mutedNotifications/{typeName}")
public Response muteNotificationsForUser(@Context ContainerRequestContext crc, @PathParam("typeName") String typeName) {
- User user = getRequestUser(crc);
- if (!(user instanceof AuthenticatedUser)) {
- // It's unlikely we'll reach this error. A Guest doesn't have an API token and would have been blocked above.
- return error(Response.Status.BAD_REQUEST, "Only an AuthenticatedUser can have notifications.");
- }
-
UserNotification.Type notificationType;
try {
notificationType = UserNotification.Type.valueOf(typeName);
} catch (Exception ignore) {
return notFound("Notification type " + typeName + " not found.");
}
- AuthenticatedUser authenticatedUser = (AuthenticatedUser) user;
- Set mutedNotifications = authenticatedUser.getMutedNotifications();
- mutedNotifications.add(notificationType);
- authenticatedUser.setMutedNotifications(mutedNotifications);
- authSvc.update(authenticatedUser);
- return ok("Notification of type " + typeName + " muted.");
+ try {
+ AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
+ Set mutedNotifications = authenticatedUser.getMutedNotifications();
+ mutedNotifications.add(notificationType);
+ authenticatedUser.setMutedNotifications(mutedNotifications);
+ authSvc.update(authenticatedUser);
+ return ok("Notification of type " + typeName + " muted.");
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
}
@DELETE
@AuthRequired
@Path("/mutedNotifications/{typeName}")
public Response unmuteNotificationsForUser(@Context ContainerRequestContext crc, @PathParam("typeName") String typeName) {
- User user = getRequestUser(crc);
- if (!(user instanceof AuthenticatedUser)) {
- // It's unlikely we'll reach this error. A Guest doesn't have an API token and would have been blocked above.
- return error(Response.Status.BAD_REQUEST, "Only an AuthenticatedUser can have notifications.");
- }
-
UserNotification.Type notificationType;
try {
notificationType = UserNotification.Type.valueOf(typeName);
} catch (Exception ignore) {
return notFound("Notification type " + typeName + " not found.");
}
- AuthenticatedUser authenticatedUser = (AuthenticatedUser) user;
- Set mutedNotifications = authenticatedUser.getMutedNotifications();
- mutedNotifications.remove(notificationType);
- authenticatedUser.setMutedNotifications(mutedNotifications);
- authSvc.update(authenticatedUser);
- return ok("Notification of type " + typeName + " unmuted.");
+ try {
+ AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
+ Set mutedNotifications = authenticatedUser.getMutedNotifications();
+ mutedNotifications.remove(notificationType);
+ authenticatedUser.setMutedNotifications(mutedNotifications);
+ authSvc.update(authenticatedUser);
+ return ok("Notification of type " + typeName + " unmuted.");
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Search.java b/src/main/java/edu/harvard/iq/dataverse/api/Search.java
index 13eb6e0b48a..33beabd0f97 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Search.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Search.java
@@ -75,6 +75,7 @@ public Response search(
@QueryParam("geo_point") String geoPointRequested,
@QueryParam("geo_radius") String geoRadiusRequested,
@QueryParam("show_type_counts") boolean showTypeCounts,
+ @QueryParam("show_collections") boolean showCollections,
@QueryParam("search_service") String searchServiceName,
@Context HttpServletResponse response
) {
@@ -134,11 +135,11 @@ public Response search(
List totalFilterQueries = new ArrayList<>();
totalFilterQueries.addAll(filterQueries);
totalFilterQueries.add(SearchFields.TYPE + allTypes);
-
+
try {
-
+
SolrQueryResponse resp = searchService.search(requestUser, dataverseSubtrees, query, totalFilterQueries, null, null, 0,
- dataRelatedToMe, 1, false, null, null, false, false);
+ dataRelatedToMe, 1, false, null, null, false, false, false);
if (resp != null) {
for (FacetCategory facetCategory : resp.getTypeFacetCategories()) {
for (FacetLabel facetLabel : facetCategory.getFacetLabel()) {
@@ -193,11 +194,12 @@ public Response search(
paginationStart,
dataRelatedToMe,
numResultsPerPage,
- queryEntities,
+ queryEntities,
geoPoint,
geoRadius,
showFacets, // facets are expensive, no need to ask for them if not requested
- showRelevance // no need for highlights unless requested either
+ showRelevance, // no need for highlights unless requested either
+ showCollections // same for collections
);
} catch (SearchException ex) {
Throwable cause = ex;
@@ -284,15 +286,15 @@ public Response search(
}
}
-
+
@GET
@Path("/services")
public Response getSearchEngines() {
Map availableEngines = searchServiceFactory.getAvailableServices();
String defaultServiceName = JvmSettings.DEFAULT_SEARCH_SERVICE.lookupOptional().orElse(SearchServiceFactory.INTERNAL_SOLR_SERVICE_NAME);
-
+
JsonArrayBuilder enginesArray = Json.createArrayBuilder();
-
+
for (String engine : availableEngines.keySet()) {
JsonObjectBuilder engineObject = Json.createObjectBuilder()
.add("name", engine)
@@ -302,10 +304,10 @@ public Response getSearchEngines() {
JsonObjectBuilder response = Json.createObjectBuilder()
.add("services", enginesArray)
.add("defaultService", defaultServiceName);
-
+
return ok(response);
}
-
+
private User getUser(ContainerRequestContext crc) throws WrappedResponse {
User userToExecuteSearchAs = GuestUser.get();
try {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/NewTemplateDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/NewTemplateDTO.java
new file mode 100644
index 00000000000..28e21ffc6c0
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/NewTemplateDTO.java
@@ -0,0 +1,75 @@
+package edu.harvard.iq.dataverse.api.dto;
+
+import edu.harvard.iq.dataverse.*;
+import edu.harvard.iq.dataverse.util.json.JsonParseException;
+import edu.harvard.iq.dataverse.util.json.JsonParser;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+import jakarta.json.*;
+
+import java.sql.Timestamp;
+import java.util.*;
+
+public class NewTemplateDTO {
+
+ private String name;
+ private List datasetFields;
+ private Map instructionsMap;
+ private boolean isDefault;
+
+ public static NewTemplateDTO fromRequestBody(String requestBody, JsonParser jsonParser) throws JsonParseException {
+ NewTemplateDTO newTemplateDTO = new NewTemplateDTO();
+
+ JsonObject jsonObject = JsonUtil.getJsonObject(requestBody);
+
+ newTemplateDTO.name = jsonObject.getString("name");
+ newTemplateDTO.datasetFields = jsonParser.parseMultipleFields(jsonObject);
+ newTemplateDTO.instructionsMap = parseRequestBodyInstructionsMap(jsonObject);
+ newTemplateDTO.isDefault = jsonObject.getBoolean("isDefault", false);
+
+ return newTemplateDTO;
+ }
+
+ public Template toTemplate() {
+ Template template = new Template();
+
+ template.setDatasetFields(getDatasetFields());
+ template.setName(getName());
+ template.setInstructionsMap(getInstructionsMap());
+ template.updateInstructions();
+ template.setCreateTime(new Timestamp(new Date().getTime()));
+ template.setUsageCount(0L);
+
+ return template;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public List getDatasetFields() {
+ return datasetFields;
+ }
+
+ public Map getInstructionsMap() {
+ return instructionsMap;
+ }
+
+ public boolean isDefault() {
+ return isDefault;
+ }
+
+ private static Map parseRequestBodyInstructionsMap(JsonObject jsonObject) {
+ Map instructionsMap = new HashMap<>();
+ JsonArray instructionsJsonArray = jsonObject.getJsonArray("instructions");
+ if (instructionsJsonArray == null) {
+ return null;
+ }
+ for (JsonObject instructionJsonObject : instructionsJsonArray.getValuesAs(JsonObject.class)) {
+ instructionsMap.put(
+ instructionJsonObject.getString("instructionField"),
+ instructionJsonObject.getString("instructionText")
+ );
+ }
+ return instructionsMap;
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java
index b49fa70cea1..b43bc0e0bb0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java
@@ -14,6 +14,7 @@
import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord;
import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.OrcidOAuth2AP;
import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthProvider;
+import edu.harvard.iq.dataverse.authorization.providers.shib.ShibUtil;
import edu.harvard.iq.dataverse.search.IndexServiceBean;
import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean;
@@ -995,10 +996,23 @@ public AuthenticatedUser lookupUserByOIDCBearerToken(String bearerToken) throws
// TODO: Get the identifier from an invalidating cache to avoid lookup bursts of the same token.
// Tokens in the cache should be removed after some (configurable) time.
OAuth2UserRecord oAuth2UserRecord = verifyOIDCBearerTokenAndGetOAuth2UserRecord(bearerToken);
- if (FeatureFlags.API_BEARER_AUTH_USE_BUILTIN_USER_ON_ID_MATCH.enabled()) {
- AuthenticatedUser builtinAuthenticatedUser = lookupUser(BuiltinAuthenticationProvider.PROVIDER_ID, oAuth2UserRecord.getUsername());
- return (builtinAuthenticatedUser != null) ? builtinAuthenticatedUser : lookupUser(oAuth2UserRecord.getUserRecordIdentifier());
+ AuthenticatedUser authenticatedUser;
+ if (FeatureFlags.API_BEARER_AUTH_USE_SHIB_USER_ON_ID_MATCH.enabled() && oAuth2UserRecord.hasShibAttributes()) {
+ logger.log(Level.FINE, "OAuth2UserRecord has Shibboleth attributes");
+ String userPersistentId = ShibUtil.createUserPersistentIdentifier(oAuth2UserRecord.getShibIdp(), oAuth2UserRecord.getShibUniquePersistentIdentifier());
+ authenticatedUser = lookupUser(ShibAuthenticationProvider.PROVIDER_ID, userPersistentId);
+ if (authenticatedUser != null) {
+ logger.log(Level.FINE, "Shibboleth user found for the given bearer token");
+ return authenticatedUser;
+ }
+ } else if (FeatureFlags.API_BEARER_AUTH_USE_BUILTIN_USER_ON_ID_MATCH.enabled()) {
+ authenticatedUser = lookupUser(BuiltinAuthenticationProvider.PROVIDER_ID, oAuth2UserRecord.getUsername());
+ if (authenticatedUser != null) {
+ logger.log(Level.FINE, "Builtin user found for the given bearer token");
+ return authenticatedUser;
+ }
}
+
return lookupUser(oAuth2UserRecord.getUserRecordIdentifier());
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/Permission.java b/src/main/java/edu/harvard/iq/dataverse/authorization/Permission.java
index 32937098118..77ff12fdee7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/Permission.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/Permission.java
@@ -49,10 +49,12 @@ public enum Permission implements java.io.Serializable {
ManageFilePermissions(BundleUtil.getStringFromBundle("permission.managePermissionsDataFile"), true, DataFile.class),
PublishDataverse(BundleUtil.getStringFromBundle("permission.publishDataverse"), true, Dataverse.class),
PublishDataset(BundleUtil.getStringFromBundle("permission.publishDataset"), true, Dataset.class, Dataverse.class),
+ LinkDataverse(BundleUtil.getStringFromBundle("permission.linkDataverse"), true, Dataverse.class),
+ LinkDataset(BundleUtil.getStringFromBundle("permission.linkDataset"), true, Dataset.class, Dataverse.class),
// Delete
DeleteDataverse(BundleUtil.getStringFromBundle("permission.deleteDataverse"), true, Dataverse.class),
DeleteDatasetDraft(BundleUtil.getStringFromBundle("permission.deleteDataset"), true, Dataset.class);
-
+
// FUTURE:
//RestrictMetadata("Mark metadata as restricted", DvObject.class),
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java
index 2ad67ac9fef..928cc3d8038 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java
@@ -563,6 +563,7 @@ public void displayNotification() {
userNotification.setDisplayAsRead(userNotification.isReadNotification());
if (userNotification.isReadNotification() == false) {
userNotification.setReadNotification(true);
+ // consider switching to userNotificationService.markAsRead
userNotificationService.save(userNotification);
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2UserRecord.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2UserRecord.java
index 234c2828ab5..3f75e882d82 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2UserRecord.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2UserRecord.java
@@ -2,39 +2,75 @@
import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo;
import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier;
+
+import java.io.Serializable;
import java.util.List;
/**
* Describes a single user on a remote IDP that uses OAuth2.
* Normally generated by {@link AbstractOAuth2Idp}.
- *
+ *
* @author michael
*/
-public class OAuth2UserRecord implements java.io.Serializable {
-
+public class OAuth2UserRecord implements Serializable {
+
private final String serviceId;
-
- /** An immutable value, probably a number. Not a username that may change. */
+
+ /**
+ * An immutable value, probably a number. Not a username that may change.
+ */
private final String idInService;
- /** A potentially mutable String that is easier on the eye than a number. */
+ /**
+ * A potentially mutable String that is easier on the eye than a number.
+ */
private final String username;
-
+
+ /**
+ * For users originally coming from a Shibboleth IdP
+ */
+ private final String shibUniquePersistentIdentifier;
+ private final String shibIdp;
+
private final AuthenticatedUserDisplayInfo displayInfo;
-
private final List availableEmailAddresses;
-
private final OAuth2TokenData tokenData;
-
- public OAuth2UserRecord(String aServiceId, String anIdInService, String aUsername,
- OAuth2TokenData someTokenData, AuthenticatedUserDisplayInfo aDisplayInfo,
- List someAvailableEmailAddresses) {
- serviceId = aServiceId;
- idInService = anIdInService;
- username = aUsername;
- tokenData = someTokenData;
- displayInfo = aDisplayInfo;
- availableEmailAddresses = someAvailableEmailAddresses;
+
+ /**
+ * Constructor for users without Shibboleth attributes.
+ */
+ public OAuth2UserRecord(
+ String serviceId,
+ String idInService,
+ String username,
+ OAuth2TokenData tokenData,
+ AuthenticatedUserDisplayInfo displayInfo,
+ List availableEmailAddresses
+ ) {
+ this(serviceId, idInService, username, null, null, tokenData, displayInfo, availableEmailAddresses);
+ }
+
+ /**
+ * Full constructor for OAuth2 user records.
+ */
+ public OAuth2UserRecord(
+ String serviceId,
+ String idInService,
+ String username,
+ String shibUniquePersistentIdentifier,
+ String shibIdp,
+ OAuth2TokenData tokenData,
+ AuthenticatedUserDisplayInfo displayInfo,
+ List availableEmailAddresses
+ ) {
+ this.serviceId = serviceId;
+ this.idInService = idInService;
+ this.username = username;
+ this.shibUniquePersistentIdentifier = shibUniquePersistentIdentifier;
+ this.shibIdp = shibIdp;
+ this.tokenData = tokenData;
+ this.displayInfo = displayInfo;
+ this.availableEmailAddresses = availableEmailAddresses;
}
public String getServiceId() {
@@ -49,10 +85,18 @@ public String getUsername() {
return username;
}
+ public String getShibUniquePersistentIdentifier() {
+ return shibUniquePersistentIdentifier;
+ }
+
+ public String getShibIdp() {
+ return shibIdp;
+ }
+
public List getAvailableEmailAddresses() {
return availableEmailAddresses;
}
-
+
public AuthenticatedUserDisplayInfo getDisplayInfo() {
return displayInfo;
}
@@ -61,12 +105,19 @@ public OAuth2TokenData getTokenData() {
return tokenData;
}
- @Override
- public String toString() {
- return "OAuth2UserRecord{" + "serviceId=" + serviceId + ", idInService=" + idInService + '}';
- }
-
public UserRecordIdentifier getUserRecordIdentifier() {
return new UserRecordIdentifier(serviceId, idInService);
}
+
+ public boolean hasShibAttributes() {
+ return shibIdp != null && shibUniquePersistentIdentifier != null;
+ }
+
+ @Override
+ public String toString() {
+ return "OAuth2UserRecord{" +
+ "serviceId='" + serviceId + '\'' +
+ ", idInService='" + idInService + '\'' +
+ '}';
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java
index 5cf8ca2ea55..4085777f180 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java
@@ -33,11 +33,11 @@
import com.nimbusds.openid.connect.sdk.op.OIDCProviderConfigurationRequest;
import com.nimbusds.openid.connect.sdk.op.OIDCProviderMetadata;
import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo;
-import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier;
import edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationSetupException;
import edu.harvard.iq.dataverse.authorization.providers.oauth2.AbstractOAuth2AuthenticationProvider;
import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception;
import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord;
+import edu.harvard.iq.dataverse.authorization.providers.shib.ShibUtil;
import edu.harvard.iq.dataverse.settings.JvmSettings;
import edu.harvard.iq.dataverse.util.BundleUtil;
@@ -47,11 +47,8 @@
import java.time.temporal.ChronoUnit;
import java.util.Arrays;
import java.util.List;
-import java.util.Map;
import java.util.Optional;
-import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
-import java.util.logging.Level;
import java.util.logging.Logger;
/**
@@ -231,16 +228,34 @@ public OAuth2UserRecord getUserRecord(String code, String state, String redirect
* @return the usable user record for processing ing {@link edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2LoginBackingBean}
*/
public OAuth2UserRecord getUserRecord(UserInfo userInfo) {
+ // Extract Shibboleth attributes if present
+ Object shibUniqueIdObj = userInfo.getClaim(ShibUtil.uniquePersistentIdentifier);
+ Object shibIdpObj = userInfo.getClaim(ShibUtil.shibIdpAttribute);
+
+ String shibUniqueId = (shibUniqueIdObj != null) ? shibUniqueIdObj.toString() : null;
+ String shibIdp = (shibIdpObj != null) ? shibIdpObj.toString() : null;
+
+ // Build display info from user attributes
+ AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(
+ userInfo.getGivenName(),
+ userInfo.getFamilyName(),
+ userInfo.getEmailAddress(),
+ "",
+ ""
+ );
+
return new OAuth2UserRecord(
- this.getId(),
- userInfo.getSubject().getValue(),
- userInfo.getPreferredUsername(),
- null,
- new AuthenticatedUserDisplayInfo(userInfo.getGivenName(), userInfo.getFamilyName(), userInfo.getEmailAddress(), "", ""),
- null
+ this.getId(),
+ userInfo.getSubject().getValue(),
+ userInfo.getPreferredUsername(),
+ shibUniqueId,
+ shibIdp,
+ null,
+ displayInfo,
+ null
);
}
-
+
/**
* Retrieve the Access Token from provider. Encapsulate for testing.
* @param grant
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java
index 4cf41903405..a2081d44834 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java
@@ -405,4 +405,23 @@ public static void printAttributes(HttpServletRequest request) {
logger.fine("shib values: " + shibValues);
}
+ /**
+ * Creates a persistent identifier for a user authenticated via a Shibboleth Identity Provider (IdP).
+ *
+ * This method combines the IdP's entity ID and the user's unique identifier into a single string,
+ * using a pipe character ("|") as a separator. This is necessary because there is only one field
+ * available to store the full identifier.
+ *
+ * The pipe character is chosen because it's considered "unwise" to use in URLs, and the
+ * Shibboleth IdP entity ID often resembles a URL. Using this separator allows for future parsing,
+ * such as answering questions like "How many users logged in from Harvard's Identity Provider?"
+ *
+ * @param shibIdp the entity ID of the Shibboleth Identity Provider
+ * @param shibUserIdentifier the unique persistent identifier for the user from the IdP
+ * @return a combined string containing both the IdP and user identifier, separated by a pipe
+ */
+ public static String createUserPersistentIdentifier(String shibIdp, String shibUserIdentifier) {
+ String persistentUserIdSeparator = "|";
+ return shibIdp + persistentUserIdSeparator + shibUserIdentifier;
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/customization/CustomizationConstants.java b/src/main/java/edu/harvard/iq/dataverse/customization/CustomizationConstants.java
index 86bab2e1f60..767bd66518d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/customization/CustomizationConstants.java
+++ b/src/main/java/edu/harvard/iq/dataverse/customization/CustomizationConstants.java
@@ -5,6 +5,8 @@
*/
package edu.harvard.iq.dataverse.customization;
+import java.util.List;
+
/**
*
* @author rmp553
@@ -23,5 +25,5 @@ public class CustomizationConstants {
public static String fileTypeLogo = "logo";
-
+ public static List validTypes = List.of(fileTypeHomePage, fileTypeHeader, fileTypeFooter, fileTypeStyle, fileTypeAnalytics, fileTypeLogo);
} // end CustomizationConstants
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
index 54844160163..f7df81b6386 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
@@ -194,46 +194,28 @@ public boolean getTabIngest() {
return this.tabIngest;
}
- public boolean hasCategories(){
- if ((categories == null)||(this.categories.isEmpty())){
- return false;
- }
- return true;
+ public boolean hasCategories() {
+ return categories != null;
}
- public boolean hasFileDataTags(){
- if ((dataFileTags == null)||(this.dataFileTags.isEmpty())){
- return false;
- }
- return true;
+ public boolean hasFileDataTags() {
+ return dataFileTags != null;
}
public boolean hasDescription(){
- if ((description == null)||(this.description.isEmpty())){
- return false;
- }
- return true;
+ return description != null;
}
- public boolean hasDirectoryLabel(){
- if ((directoryLabel == null)||(this.directoryLabel.isEmpty())){
- return false;
- }
- return true;
+ public boolean hasDirectoryLabel() {
+ return directoryLabel != null;
}
- public boolean hasLabel(){
- if ((label == null)||(this.label.isEmpty())){
- return false;
- }
- return true;
+ public boolean hasLabel() {
+ return label != null;
}
- public boolean hasProvFreeform(){
- if ((provFreeForm == null)||(this.provFreeForm.isEmpty())){
- return false;
- }
- return true;
+ public boolean hasProvFreeform() {
+ return provFreeForm != null;
}
public boolean hasStorageIdentifier() {
@@ -245,7 +227,7 @@ public String getStorageIdentifier() {
}
public boolean hasFileName() {
- return ((fileName!=null)&&(!fileName.isEmpty()));
+ return fileName != null;
}
public String getFileName() {
@@ -253,7 +235,7 @@ public String getFileName() {
}
public boolean hasMimetype() {
- return ((mimeType!=null)&&(!mimeType.isEmpty()));
+ return mimeType != null;
}
public String getMimeType() {
@@ -266,7 +248,7 @@ public void setCheckSum(String checkSum, ChecksumType type) {
}
public boolean hasCheckSum() {
- return ((checkSumValue!=null)&&(!checkSumValue.isEmpty()));
+ return checkSumValue != null;
}
public String getCheckSum() {
@@ -294,15 +276,10 @@ public void setFileSize(long fileSize) {
* @param tags
*/
public void setCategories(List newCategories) {
-
if (newCategories != null) {
newCategories = Util.removeDuplicatesNullsEmptyStrings(newCategories);
- if (newCategories.isEmpty()) {
- newCategories = null;
- }
+ this.categories = newCategories;
}
-
- this.categories = newCategories;
}
/**
@@ -495,27 +472,20 @@ private void addFileDataTags(List potentialTags) throws DataFileTagExcep
}
potentialTags = Util.removeDuplicatesNullsEmptyStrings(potentialTags);
-
- if (potentialTags.isEmpty()){
- return;
- }
-
+
// Make a new list
- this.dataFileTags = new ArrayList<>();
+ List newList = new ArrayList<>();
// Add valid potential tags to the list
for (String tagToCheck : potentialTags){
if (DataFileTag.isDataFileTag(tagToCheck)){
- this.dataFileTags.add(tagToCheck);
+ newList.add(tagToCheck);
}else{
String errMsg = BundleUtil.getStringFromBundle("file.addreplace.error.invalid_datafile_tag");
throw new DataFileTagException(errMsg + " [" + tagToCheck + "]. Please use one of the following: " + DataFileTag.getListofLabelsAsString());
}
}
- // Shouldn't happen....
- if (dataFileTags.isEmpty()){
- dataFileTags = null;
- }
+ this.dataFileTags = newList;
}
private void msg(String s){
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java
index f7a268abf64..1945d44cd78 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java
@@ -4,6 +4,7 @@
import edu.harvard.iq.dataverse.dataset.DatasetFieldsValidator;
import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItemServiceBean;
+import edu.harvard.iq.dataverse.license.LicenseServiceBean;
import edu.harvard.iq.dataverse.search.IndexServiceBean;
import edu.harvard.iq.dataverse.search.SearchService;
import edu.harvard.iq.dataverse.search.SearchServiceFactory;
@@ -140,4 +141,6 @@ public interface CommandContext {
public DataverseFeaturedItemServiceBean dataverseFeaturedItems();
public DatasetFieldsValidator datasetFieldsValidator();
+
+ public LicenseServiceBean licenses();
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java
index 247e5844659..ef46e1b61c3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java
@@ -28,19 +28,23 @@
@RequiredPermissions(Permission.AddDataverse)
public class CreateDataverseCommand extends AbstractWriteDataverseCommand {
+ private final boolean sendNotificationOnSuccess;
+
public CreateDataverseCommand(Dataverse created,
DataverseRequest request,
List facets,
List inputLevels) {
- this(created, request, facets, inputLevels, null);
+ this(created, request, facets, inputLevels, null, false);
}
public CreateDataverseCommand(Dataverse created,
DataverseRequest request,
List facets,
List inputLevels,
- List metadataBlocks) {
+ List metadataBlocks,
+ boolean sendNotificationOnSuccess) {
super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks);
+ this.sendNotificationOnSuccess = sendNotificationOnSuccess;
}
@Override
@@ -143,8 +147,30 @@ protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandExcep
return managedDv;
}
+ /**
+ * Handles the successful creation of the dataverse by sending a notification
+ * and triggering indexing.
+ *
+ * The {@code sendNotificationOnSuccess} flag is used because this command is
+ * consumed from two different places: the JSF front-end and the API.
+ *
+ * - From JSF: The flag is {@code false}, as the user notification is
+ * sent separately by the UI logic.
+ * - From the API: The flag is {@code true} to ensure users receive a
+ * notification when creating a dataverse through the API.
+ *
+ *
+ * @param ctxt The command context.
+ * @param r The created Dataverse object, returned from the command execution.
+ * @return {@code true} if the dataverse was indexed successfully.
+ */
@Override
public boolean onSuccess(CommandContext ctxt, Object r) {
+ if (sendNotificationOnSuccess) {
+ AuthenticatedUser authenticatedUser = (AuthenticatedUser) getUser();
+ ctxt.notifications().sendNotification(authenticatedUser, dataverse.getCreateDate(), UserNotification.Type.CREATEDV, dataverse.getId());
+ }
+
return ctxt.dataverses().index((Dataverse) r);
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java
index c22a2cdb4a2..b1f3147e97f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java
@@ -2,13 +2,13 @@
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.GlobalId;
import edu.harvard.iq.dataverse.RoleAssignment;
import edu.harvard.iq.dataverse.Template;
import edu.harvard.iq.dataverse.UserNotification;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.engine.command.CommandContext;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
@@ -18,10 +18,9 @@
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty;
-import java.util.logging.Logger;
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
-import java.util.List;
+
import java.sql.Timestamp;
import java.time.Instant;
@@ -45,10 +44,14 @@
// line above, AND un-comment out the getRequiredPermissions() method below.
public class CreateNewDatasetCommand extends AbstractCreateDatasetCommand {
- private static final Logger logger = Logger.getLogger(CreateNewDatasetCommand.class.getName());
-
+
private final Template template;
- private final Dataverse dv;
+ private boolean allowSelfNotification = false;
+
+ public CreateNewDatasetCommand(Dataset theDataset, DataverseRequest aRequest, boolean validate, boolean allowSelfNotification) {
+ this(theDataset, aRequest, null, validate);
+ this.allowSelfNotification = allowSelfNotification;
+ }
public CreateNewDatasetCommand(Dataset theDataset, DataverseRequest aRequest) {
this( theDataset, aRequest, null);
@@ -57,13 +60,11 @@ public CreateNewDatasetCommand(Dataset theDataset, DataverseRequest aRequest) {
public CreateNewDatasetCommand(Dataset theDataset, DataverseRequest aRequest, Template template) {
super(theDataset, aRequest);
this.template = template;
- dv = theDataset.getOwner();
}
public CreateNewDatasetCommand(Dataset theDataset, DataverseRequest aRequest, Template template, boolean validate) {
super(theDataset, aRequest, false, validate);
this.template = template;
- dv = theDataset.getOwner();
}
/**
@@ -119,9 +120,9 @@ protected void postPersist( Dataset theDataset, CommandContext ctxt ){
// (saveDataset, that the command returns). This may have been the reason
// for the github issue #4783 - where the users were losing their contributor
// permissions, when creating datasets AND uploading files in one step.
- // In that scenario, an additional UpdateDatasetCommand is exectued on the
+ // In that scenario, an additional UpdateDatasetCommand is executed on the
// dataset returned by the Create command. That issue was fixed by adding
- // a full refresh of the datast with datasetService.find() between the
+ // a full refresh of the dataset with datasetService.find() between the
// two commands. But it may be a good idea to make sure they are properly
// linked here (?)
theDataset.setPermissionModificationTime(getTimestamp());
@@ -131,33 +132,43 @@ protected void postPersist( Dataset theDataset, CommandContext ctxt ){
ctxt.templates().incrementUsageCount(template.getId());
}
}
-
- /* Emails those able to publish the dataset (except the creator themselves who already gets an email)
- * that a new dataset exists.
- * NB: Needs dataset id so has to be postDBFlush (vs postPersist())
+
+ /**
+ * Sends notifications to those able to publish the dataset upon the successful creation of a new dataset.
+ *
+ * This method checks if dataset creation notifications are enabled. If so, it
+ * notifies all users with {@code Permission.PublishDataset} on the new dataset.
+ * The user who initiated the action can be included or excluded from this
+ * notification based on the allowSelfNotification flag.
+ *
+ * @param dataset The newly created {@code Dataset}.
+ * @param ctxt The {@code CommandContext} providing access to application services.
*/
- protected void postDBFlush( Dataset theDataset, CommandContext ctxt ){
- if(ctxt.settings().isTrueForKey(SettingsServiceBean.Key.SendNotificationOnDatasetCreation, false)) {
- //QDR - alert curators that a dataset has been created
- //Should this create a notification too? (which would let us use the notification mailcapbilities to generate the subject/body.
- AuthenticatedUser requestor = getUser().isAuthenticated() ? (AuthenticatedUser) getUser() : null;
- List authUsers = ctxt.permissions().getUsersWithPermissionOn(Permission.PublishDataset, theDataset);
- for (AuthenticatedUser au : authUsers) {
- if(!au.equals(requestor)) {
- ctxt.notifications().sendNotification(
- au,
+ protected void postDBFlush(Dataset dataset, CommandContext ctxt) {
+ // 1. Exit early if the SendNotificationOnDatasetCreation setting is disabled.
+ if (!ctxt.settings().isTrueForKey(SettingsServiceBean.Key.SendNotificationOnDatasetCreation, false)) {
+ return;
+ }
+
+ // 2. Identify the user who initiated the action.
+ final User user = getUser();
+ final AuthenticatedUser requestor = user.isAuthenticated() ? (AuthenticatedUser) user : null;
+
+ // 3. Get all users with publish permission and notify them.
+ ctxt.permissions().getUsersWithPermissionOn(Permission.PublishDataset, dataset)
+ .stream()
+ .filter(recipient -> allowSelfNotification || !recipient.equals(requestor))
+ .forEach(recipient -> ctxt.notifications().sendNotification(
+ recipient,
Timestamp.from(Instant.now()),
UserNotification.Type.DATASETCREATED,
- theDataset.getId(),
+ dataset.getId(),
null,
requestor,
true
- );
- }
- }
- }
+ ));
}
-
+
// Re-enabling the method below will change the permission setup to dynamic.
// This will make it so that in an unpublished dataverse only users with the
// permission to view it will be allowed to create child datasets.
@@ -181,5 +192,5 @@ public Map> getRequiredPermissions() {
}
return ret;
}*/
-
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateTemplateCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateTemplateCommand.java
index 03177ee8c1f..32c63e55cb4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateTemplateCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateTemplateCommand.java
@@ -1,7 +1,6 @@
-
package edu.harvard.iq.dataverse.engine.command.impl;
-import edu.harvard.iq.dataverse.Dataverse;
-import edu.harvard.iq.dataverse.Template;
+
+import edu.harvard.iq.dataverse.*;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
@@ -9,25 +8,81 @@
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
+import edu.harvard.iq.dataverse.util.DatasetFieldUtil;
+
+import java.util.ArrayList;
+import java.util.List;
+
/**
- *
* @author skraffmiller
+ * Creates a template {@link Template} for a {@link Dataverse}.
*/
-@RequiredPermissions( Permission.EditDataverse )
+@RequiredPermissions(Permission.EditDataverse)
public class CreateTemplateCommand extends AbstractCommand {
- private final Template created;
- private final Dataverse dv;
-
- public CreateTemplateCommand(Template template, DataverseRequest aRequest, Dataverse anAffectedDataverse) {
- super(aRequest, anAffectedDataverse);
- created = template;
- dv = anAffectedDataverse;
- }
-
- @Override
- public Template execute(CommandContext ctxt) throws CommandException {
-
- return ctxt.templates().save(created);
- }
-
+ private final Template template;
+ private final Dataverse dataverse;
+
+ private final boolean initialize;
+
+ public CreateTemplateCommand(Template template, DataverseRequest request, Dataverse dataverse) {
+ this(template, request, dataverse, false);
+ }
+
+ public CreateTemplateCommand(Template template, DataverseRequest request, Dataverse dataverse, boolean initialize) {
+ super(request, dataverse);
+ this.template = template;
+ this.dataverse = dataverse;
+ this.initialize = initialize;
+ }
+
+ @Override
+ public Template execute(CommandContext ctxt) throws CommandException {
+ if (initialize) {
+ template.setDataverse(dataverse);
+ template.setMetadataValueBlocks(getSystemMetadataBlocks(ctxt));
+
+ updateTermsOfUseAndAccess(ctxt, template);
+ updateDatasetFieldInputLevels(template, ctxt);
+
+ DatasetFieldUtil.tidyUpFields(template.getDatasetFields(), false);
+ }
+
+ return ctxt.templates().save(template);
+ }
+
+ private static void updateTermsOfUseAndAccess(CommandContext ctxt, Template template) {
+ TermsOfUseAndAccess terms = new TermsOfUseAndAccess();
+ terms.setFileAccessRequest(true);
+ terms.setTemplate(template);
+ terms.setLicense(ctxt.licenses().getDefault());
+ template.setTermsOfUseAndAccess(terms);
+ }
+
+ private void updateDatasetFieldInputLevels(Template template, CommandContext ctxt) {
+ Long dvIdForInputLevel = this.dataverse.getId();
+ if (!this.dataverse.isMetadataBlockRoot()) {
+ dvIdForInputLevel = this.dataverse.getMetadataRootId();
+ }
+
+ for (DatasetField dsf : template.getFlatDatasetFields()) {
+ DataverseFieldTypeInputLevel inputLevel = ctxt.fieldTypeInputLevels().findByDataverseIdDatasetFieldTypeId(
+ dvIdForInputLevel,
+ dsf.getDatasetFieldType().getId()
+ );
+ if (inputLevel != null) {
+ dsf.setInclude(inputLevel.isInclude());
+ } else {
+ dsf.setInclude(true);
+ }
+ }
+ }
+
+ private static List getSystemMetadataBlocks(CommandContext ctxt) {
+ List systemMetadataBlocks = new ArrayList<>();
+ for (MetadataBlock mdb : ctxt.metadataBlocks().listMetadataBlocks()) {
+ JvmSettings.MDB_SYSTEM_KEY_FOR.lookupOptional(mdb.getName()).ifPresent(smdbString -> systemMetadataBlocks.add(mdb));
+ }
+ return systemMetadataBlocks;
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetLinkingDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetLinkingDataverseCommand.java
index 7f5672c0cd7..7eb3ad71a94 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetLinkingDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetLinkingDataverseCommand.java
@@ -7,6 +7,7 @@
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetLinkingDataverse;
+import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
@@ -15,6 +16,8 @@
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
import org.apache.solr.client.solrj.SolrServerException;
@@ -23,14 +26,14 @@
* @author sarahferry
*/
-@RequiredPermissions( Permission.PublishDataset )
+@RequiredPermissions( Permission.LinkDataset )
public class DeleteDatasetLinkingDataverseCommand extends AbstractCommand{
private final DatasetLinkingDataverse doomed;
private final Dataset editedDs;
private final boolean index;
public DeleteDatasetLinkingDataverseCommand(DataverseRequest aRequest, Dataset editedDs , DatasetLinkingDataverse doomed, boolean index) {
- super(aRequest, editedDs);
+ super(aRequest, doomed.getLinkingDataverse());
this.editedDs = editedDs;
this.doomed = doomed;
this.index = index;
@@ -42,12 +45,17 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
DatasetLinkingDataverse doomedAndMerged = ctxt.em().merge(doomed);
ctxt.em().remove(doomedAndMerged);
- try {
- ctxt.index().indexDataverse(doomed.getLinkingDataverse());
- } catch (IOException | SolrServerException e) {
- String failureLogText = "Post delete linking dataverse indexing failed for Dataverse. ";
- failureLogText += "\r\n" + e.getLocalizedMessage();
- LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, doomed.getLinkingDataverse());
+ List toReindex = new ArrayList<>();
+ toReindex.add(doomed.getLinkingDataverse());
+ toReindex.addAll(doomed.getLinkingDataverse().getOwners());
+ for (Dataverse dv : toReindex) {
+ try {
+ ctxt.index().indexDataverse(dv);
+ } catch (IOException | SolrServerException e) {
+ String failureLogText = "Post delete linking dataverse indexing failed for Dataverse. ";
+ failureLogText += "\r\n" + e.getLocalizedMessage();
+ LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dv);
+ }
}
return merged;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java
index be3e28029e4..14dafd8397c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java
@@ -49,12 +49,13 @@ public class DestroyDatasetCommand extends AbstractVoidCommand {
private List datasetAndFileSolrIdsToDelete;
- private Dataverse toReIndex;
+ private List toReIndex;
public DestroyDatasetCommand(Dataset doomed, DataverseRequest aRequest) {
super(aRequest, doomed);
this.doomed = doomed;
datasetAndFileSolrIdsToDelete = new ArrayList<>();
+ toReIndex = new ArrayList<>();
}
@Override
@@ -116,7 +117,12 @@ protected void executeImpl(CommandContext ctxt) throws CommandException {
}
}
- toReIndex = managedDoomed.getOwner();
+ toReIndex.add(managedDoomed.getOwner());
+ toReIndex.addAll(managedDoomed.getOwner().getOwners());
+ managedDoomed.getDatasetLinkingDataverses().forEach(dld -> {
+ toReIndex.add(dld.getLinkingDataverse());
+ toReIndex.addAll(dld.getLinkingDataverse().getOwners());
+ });
// add potential Solr IDs of datasets to list for deletion
String solrIdOfPublishedDatasetVersion = IndexServiceBean.solrDocIdentifierDataset + managedDoomed.getId();
@@ -145,13 +151,15 @@ public boolean onSuccess(CommandContext ctxt, Object r) {
logger.log(Level.FINE, "Result of attempt to delete dataset and file IDs from the search index: {0}", resultOfSolrDeletionAttempt.getMessage());
// reindex
- try {
- ctxt.index().indexDataverse(toReIndex);
- } catch (IOException | SolrServerException e) {
- String failureLogText = "Post-destroy dataset indexing of the owning dataverse failed. You can kickoff a re-index of this dataverse with: \r\n curl http://localhost:8080/api/admin/index/dataverses/" + toReIndex.getId().toString();
- failureLogText += "\r\n" + e.getLocalizedMessage();
- LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, toReIndex);
- retVal = false;
+ for (Dataverse dv : toReIndex) {
+ try {
+ ctxt.index().indexDataverse(dv);
+ } catch (IOException | SolrServerException e) {
+ String failureLogText = "Post-destroy dataset indexing of an owning or linking dataverse failed. You can kickoff a re-index of this dataverse with: \r\n curl http://localhost:8080/api/admin/index/dataverses/" + dv.getId().toString();
+ failureLogText += "\r\n" + e.getLocalizedMessage();
+ LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dv);
+ retVal = false;
+ }
}
return retVal;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java
index 4ba8d39a949..d4bc6b83613 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java
@@ -33,8 +33,7 @@
import java.awt.datatransfer.StringSelection;
import java.io.IOException;
import java.sql.Timestamp;
-import java.util.Date;
-import java.util.List;
+import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
@@ -42,7 +41,7 @@
import edu.harvard.iq.dataverse.dataaccess.StorageIO;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.util.FileUtil;
-import java.util.ArrayList;
+
import java.util.concurrent.Future;
import org.apache.logging.log4j.util.Strings;
@@ -67,7 +66,7 @@ public class FinalizeDatasetPublicationCommand extends AbstractPublishDatasetCom
*/
final boolean datasetExternallyReleased;
- List dataversesToIndex = new ArrayList<>();
+ Set dataversesToIndex = new HashSet<>();
public static final String FILE_VALIDATION_ERROR = "FILE VALIDATION ERROR";
@@ -209,6 +208,15 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
}
+ // The owning dataverse plus all dataverses linking to this dataset must be re-indexed to update their
+ // datasetCount
+ dataversesToIndex.add(getDataset().getOwner());
+ dataversesToIndex.addAll(getDataset().getOwner().getOwners());
+ getDataset().getDatasetLinkingDataverses().forEach(dld -> {
+ dataversesToIndex.add(dld.getLinkingDataverse());
+ dataversesToIndex.addAll(dld.getLinkingDataverse().getOwners());
+ });
+
List previouslyCalled = ctxt.getCommandsCalled();
PrivateUrl privateUrl = ctxt.engine().submit(new GetPrivateUrlCommand(getRequest(), theDataset));
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java
index 1225c892ac7..21d36f906c9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java
@@ -19,14 +19,17 @@
import edu.harvard.iq.dataverse.util.BundleUtil;
import java.io.IOException;
import java.sql.Timestamp;
+import java.util.ArrayList;
import java.util.Date;
+import java.util.List;
+
import org.apache.solr.client.solrj.SolrServerException;
/**
*
* @author skraffmiller
*/
-@RequiredPermissions(Permission.PublishDataset)
+@RequiredPermissions(Permission.LinkDataset)
public class LinkDatasetCommand extends AbstractCommand {
private final Dataset linkedDataset;
@@ -68,6 +71,20 @@ public boolean onSuccess(CommandContext ctxt, Object r) {
ctxt.index().asyncIndexDataset(dld.getDataset(), true);
+ List toReindex = new ArrayList<>();
+ toReindex.add(dld.getLinkingDataverse());
+ toReindex.addAll(dld.getLinkingDataverse().getOwners());
+ for (Dataverse dv : toReindex) {
+ try {
+ ctxt.index().indexDataverse(dv);
+ } catch (IOException | SolrServerException e) {
+ String failureLogText = "Indexing of linking dataverse failed. You can kickoff a re-index of this dataverse with: \r\n curl http://localhost:8080/api/admin/index/dataverses/" + dv.getId().toString();
+ failureLogText += "\r\n" + e.getLocalizedMessage();
+ LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dv);
+ return false;
+ }
+ }
+
return retVal;
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java
index 55fe96556a5..2e1aecc9a84 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java
@@ -31,7 +31,7 @@
*
* @author skraffmiller
*/
-@RequiredPermissions(Permission.PublishDataverse)
+@RequiredPermissions(Permission.LinkDataverse)
public class LinkDataverseCommand extends AbstractCommand {
private final Dataverse linkedDataverse;
@@ -47,7 +47,7 @@ public LinkDataverseCommand(DataverseRequest aRequest, Dataverse dataverse, Data
public DataverseLinkingDataverse execute(CommandContext ctxt) throws CommandException {
if ((!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser())) {
throw new PermissionException("Link Dataverse can only be called by superusers.",
- this, Collections.singleton(Permission.PublishDataverse), linkingDataverse);
+ this, Collections.singleton(Permission.LinkDataverse), linkingDataverse);
}
if (linkedDataverse.equals(linkingDataverse)) {
throw new IllegalCommandException("Can't link a dataverse to itself", this);
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseTemplatesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseTemplatesCommand.java
new file mode 100644
index 00000000000..f1104b5af15
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseTemplatesCommand.java
@@ -0,0 +1,39 @@
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.Template;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+
+import java.util.*;
+
+/**
+ * Lists the templates {@link Template} of a {@link Dataverse}.
+ */
+@RequiredPermissions(Permission.EditDataverse)
+public class ListDataverseTemplatesCommand extends AbstractCommand> {
+
+ private final Dataverse dataverse;
+
+ public ListDataverseTemplatesCommand(DataverseRequest request, Dataverse dataverse) {
+ super(request, dataverse);
+ this.dataverse = dataverse;
+ }
+
+ @Override
+ public List execute(CommandContext ctxt) throws CommandException {
+ List templates = new ArrayList<>();
+
+ if (dataverse.getOwner() != null) {
+ templates.addAll(dataverse.getParentTemplates());
+ }
+
+ templates.addAll(dataverse.getTemplates());
+
+ return templates;
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java
index 7ac58cb1860..cb28691a596 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java
@@ -31,6 +31,7 @@
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import org.apache.commons.lang3.mutable.MutableBoolean;
+import org.apache.solr.client.solrj.SolrServerException;
import org.xml.sax.SAXException;
import io.gdcc.xoai.model.oaipmh.results.Record;
@@ -191,6 +192,17 @@ public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId
hdLogger.log(Level.INFO, String.format("Datasets created/updated: %s, datasets deleted: %s, datasets failed: %s", harvestedDatasetIds.size(), deletedIdentifiers.size(), failedIdentifiers.size()));
+ // Reindex dataverse to update datasetCount
+ List toReindex = new ArrayList<>();
+ toReindex.add(harvestingClientConfig.getDataverse());
+ toReindex.addAll(harvestingClientConfig.getDataverse().getOwners());
+ for (Dataverse dv : toReindex) {
+ try {
+ indexService.indexDataverse(dv);
+ } catch (IOException | SolrServerException e) {
+ hdLogger.log(Level.SEVERE, "Dataverse indexing failed. You can kickoff a re-index of this dataverse with: \r\n curl http://localhost:8080/api/admin/index/dataverses/" + dv.getId().toString());
+ }
+ }
}
} catch (StopHarvestException she) {
hdLogger.log(Level.INFO, "HARVEST INTERRUPTED BY EXTERNAL REQUEST");
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java
index 2f76fed1a11..4b491a5e722 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java
@@ -1,12 +1,10 @@
package edu.harvard.iq.dataverse.harvest.client;
-import edu.harvard.iq.dataverse.DataFile;
-import edu.harvard.iq.dataverse.DataFileServiceBean;
-import edu.harvard.iq.dataverse.DataverseRequestServiceBean;
-import edu.harvard.iq.dataverse.DataverseServiceBean;
-import edu.harvard.iq.dataverse.EjbDataverseEngine;
+import edu.harvard.iq.dataverse.*;
import edu.harvard.iq.dataverse.search.IndexServiceBean;
import edu.harvard.iq.dataverse.timer.DataverseTimerServiceBean;
+
+import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@@ -22,6 +20,7 @@
import jakarta.persistence.NoResultException;
import jakarta.persistence.NonUniqueResultException;
import jakarta.persistence.PersistenceContext;
+import org.apache.solr.client.solrj.SolrServerException;
/**
*
@@ -154,6 +153,19 @@ public void deleteClient(Long clientId) {
}
em.remove(merged);
+
+ // Reindex dataverse to update datasetCount
+ List toReindex = new ArrayList<>();
+ toReindex.add(victim.getDataverse());
+ toReindex.addAll(victim.getDataverse().getOwners());
+ for (Dataverse dv : toReindex) {
+ try {
+ indexService.indexDataverse(dv);
+ } catch (IOException | SolrServerException e) {
+ logger.severe("Dataverse indexing failed. You can kickoff a re-index of this dataverse with: \r\n curl http://localhost:8080/api/admin/index/dataverses/" + dv.getId().toString());
+ }
+ }
+
} catch (Exception e) {
errorMessage = "Failed to delete cleint. Unknown exception: " + e.getMessage();
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java b/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java
index 336e7735659..e8605da09bc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java
+++ b/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java
@@ -233,7 +233,8 @@ private SolrQueryResponse getTotalCountsFromSolr(DataverseRequest dataverseReque
null,
null,
false, // no need to request facets here ...
- false // ... same for highlights
+ false, // ... same for highlights
+ false // ... same for collections
);
} catch (SearchException ex) {
logger.severe("Search for total counts failed with filter query");
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/AbstractExternalSearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/AbstractExternalSearchServiceBean.java
index b564b27dc5f..7a2ef96b615 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/AbstractExternalSearchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/AbstractExternalSearchServiceBean.java
@@ -50,7 +50,7 @@ public void setSettingsService(SettingsServiceBean settingsService) {
* @throws Exception
*/
protected SolrQueryResponse postProcessResponse(String responseString, DataverseRequest dataverseRequest,
- boolean retrieveEntities, boolean addFacets, boolean addHighlights) throws Exception {
+ boolean retrieveEntities, boolean addFacets, boolean addHighlights, boolean addCollections) throws Exception {
JsonObject responseObject = JsonUtil.getJsonObject(responseString);
JsonArray resultsArray = responseObject.getJsonArray("results");
@@ -82,7 +82,7 @@ protected SolrQueryResponse postProcessResponse(String responseString, Dataverse
// Execute Solr query
SolrQueryResponse solrResponse = solrSearchService.search(dataverseRequest, null, solrQuery,
Collections.emptyList(), null, null, 0, false, pids.size(), retrieveEntities, null, null, addFacets,
- addHighlights);
+ addHighlights, addCollections);
// Reorder results based on distance, lowest values first
List reorderedResults = solrResponse.getSolrSearchResults().stream()
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/GetExternalSearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/GetExternalSearchServiceBean.java
index f5b31094557..4902bddccc9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/GetExternalSearchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/GetExternalSearchServiceBean.java
@@ -44,7 +44,7 @@ public void setSolrSearchService(SearchService solrSearchService) {
public SolrQueryResponse search(DataverseRequest dataverseRequest, List dataverses, String query,
List filterQueries, String sortField, String sortOrder, int paginationStart,
boolean onlyDataRelatedToMe, int numResultsPerPage, boolean retrieveEntities, String geoPoint,
- String geoRadius, boolean addFacets, boolean addHighlights) throws SearchException {
+ String geoRadius, boolean addFacets, boolean addHighlights, boolean addCollections) throws SearchException {
String externalSearchUrl = settingsService.getValueForKey(SettingsServiceBean.Key.GetExternalSearchUrl);
if (externalSearchUrl == null || externalSearchUrl.isEmpty()) {
@@ -53,7 +53,7 @@ public SolrQueryResponse search(DataverseRequest dataverseRequest, List filterQueries, boolean addHighlights, boolean addFacets,
+ String sortOrder, List filterQueries, boolean addHighlights, boolean addCollections, boolean addFacets,
boolean onlyDataRelatedToMe, boolean retrieveEntities, String geoPoint, String geoRadius) {
StringBuilder queryParams = new StringBuilder();
queryParams.append("q=").append(URLEncoder.encode(query, StandardCharsets.UTF_8));
@@ -94,6 +94,7 @@ private String prepareQuery(String query, int paginationStart, int numResultsPer
}
queryParams.append("&show_relevance=").append(addHighlights);
+ queryParams.append("&show_collections=").append(addCollections);
queryParams.append("&show_facets=").append(addFacets);
queryParams.append("&show_entity_ids=true");
queryParams.append("&show_api_urls=true");
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/GoldenOldiesSearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/GoldenOldiesSearchServiceBean.java
index 1baa3875573..f49398ae7c8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/GoldenOldiesSearchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/GoldenOldiesSearchServiceBean.java
@@ -49,6 +49,7 @@ public String getDisplayName() {
* @param geoRadius e.g. "5"
* @param addFacets boolean
* @param addHighlights boolean
+ * @param addCollections boolean
* @return
* @throws SearchException
*/
@@ -67,7 +68,8 @@ public SolrQueryResponse search(
String geoPoint,
String geoRadius,
boolean addFacets,
- boolean addHighlights
+ boolean addHighlights,
+ boolean addCollections
) throws SearchException {
// Execute the query using SolrSearchService
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
index 22fdb841b28..4aa1b5abb20 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
@@ -320,6 +320,11 @@ public Future indexDataverse(Dataverse dataverse, boolean processPaths)
}
solrInputDocument.addField(SearchFields.SUBTREE, dataversePaths);
+
+ if (dataverse.isReleased()) {
+ solrInputDocument.addField(SearchFields.DATASET_COUNT, dataverseService.getDatasetCount(dataverse.getId()));
+ }
+
docs.add(solrInputDocument);
String status;
@@ -2124,7 +2129,7 @@ private List retrieveDVOPaths(DvObject dvo) {
} catch (Exception ex) {
logger.info("failed to find dataverseSegments for dataversePaths for " + SearchFields.SUBTREE + ": " + ex);
}
- List dataversePaths = getDataversePathsFromSegments(dataverseSegments);
+ Set dataversePaths = new HashSet<>(getDataversePathsFromSegments(dataverseSegments));
if (dataversePaths.size() > 0 && dvo.isInstanceofDataverse()) {
// removing the dataverse's own id from the paths
// fixes bug where if my parent dv was linked my dv was shown as linked to myself
@@ -2134,7 +2139,7 @@ private List retrieveDVOPaths(DvObject dvo) {
add linking paths
*/
dataversePaths.addAll(findLinkingDataversePaths(findAllLinkingDataverses(dvo)));
- return dataversePaths;
+ return new ArrayList<>(dataversePaths);
}
public String delete(Dataverse doomed) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/OddlyEnoughSearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/OddlyEnoughSearchServiceBean.java
index 7872239a62a..06ad4856a9c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/OddlyEnoughSearchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/OddlyEnoughSearchServiceBean.java
@@ -49,6 +49,7 @@ public String getDisplayName() {
* @param geoRadius e.g. "5"
* @param addFacets boolean
* @param addHighlights boolean
+ * @param addCollections boolean
* @return
* @throws SearchException
*/
@@ -67,12 +68,13 @@ public SolrQueryResponse search(
String geoPoint,
String geoRadius,
boolean addFacets,
- boolean addHighlights
+ boolean addHighlights,
+ boolean addCollections
) throws SearchException {
logger.info("Search query: " + query + "handled by OddlyEnough search service");
// Execute the query using SolrSearchService
- SolrQueryResponse queryResponse = solrSearchService.search(dataverseRequest, dataverses, query, filterQueries, sortField, sortOrder, 0, onlyDatatRelatedToMe, 1000, retrieveEntities, geoPoint, geoRadius, addFacets, addHighlights);
+ SolrQueryResponse queryResponse = solrSearchService.search(dataverseRequest, dataverses, query, filterQueries, sortField, sortOrder, 0, onlyDatatRelatedToMe, 1000, retrieveEntities, geoPoint, geoRadius, addFacets, addHighlights, addCollections);
// Process the results
List solrSearchResults = queryResponse.getSolrSearchResults();
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/PostExternalSearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/PostExternalSearchServiceBean.java
index 23a5fd2c938..f7e9fe5fb0f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/PostExternalSearchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/PostExternalSearchServiceBean.java
@@ -45,7 +45,7 @@ private JsonObject prepareQuery(String query, int paginationStart, int numResult
public SolrQueryResponse search(DataverseRequest dataverseRequest, List dataverses, String query,
List filterQueries, String sortField, String sortOrder, int paginationStart,
boolean onlyDataRelatedToMe, int numResultsPerPage, boolean retrieveEntities, String geoPoint,
- String geoRadius, boolean addFacets, boolean addHighlights) throws SearchException {
+ String geoRadius, boolean addFacets, boolean addHighlights, boolean addCollections) throws SearchException {
String externalSearchUrl = settingsService.getValueForKey(SettingsServiceBean.Key.PostExternalSearchUrl);
if (externalSearchUrl == null || externalSearchUrl.isEmpty()) {
@@ -68,7 +68,7 @@ public SolrQueryResponse search(DataverseRequest dataverseRequest, List dataverses = new ArrayList<>();
dataverses.add(dataverse);
- solrQueryResponse = searchServiceFactory.getDefaultSearchService().search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinal, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false, null, null, !isFacetsDisabled(), true);
+ solrQueryResponse = searchServiceFactory.getDefaultSearchService().search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinal, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false, null, null, !isFacetsDisabled(), true, false);
if (solrQueryResponse.hasError()){
logger.info(solrQueryResponse.getError());
setSolrErrorEncountered(true);
@@ -416,7 +416,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused
logger.fine("second pass query: " + queryToPassToSolr);
logger.fine("second pass filter query: "+filterQueriesFinalSecondPass.toString());
- solrQueryResponseSecondPass = searchServiceFactory.getDefaultSearchService().search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinalSecondPass, null, sortOrder.toString(), 0, onlyDataRelatedToMe, 1, false, null, null, false, false);
+ solrQueryResponseSecondPass = searchServiceFactory.getDefaultSearchService().search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinalSecondPass, null, sortOrder.toString(), 0, onlyDataRelatedToMe, 1, false, null, null, false, false, false);
if (solrQueryResponseSecondPass != null) {
@@ -1561,7 +1561,7 @@ public boolean canSeeCurationStatus(Long datasetId) {
return canPublishDataset(datasetId);
}
}
-
+
public enum SortOrder {
asc, desc
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchService.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchService.java
index f72e10fdcee..543d3e964dd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SearchService.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchService.java
@@ -65,7 +65,7 @@ default SolrQueryResponse search(DataverseRequest dataverseRequest, List filterQueries, String sortField, String sortOrder, int paginationStart,
boolean onlyDataRelatedToMe, int numResultsPerPage, boolean retrieveEntities, String geoPoint,
String geoRadius) throws SearchException{
- return search(dataverseRequest, dataverses, query, filterQueries, sortField, sortOrder, paginationStart, onlyDataRelatedToMe, numResultsPerPage, true, null, null, true, true);
+ return search(dataverseRequest, dataverses, query, filterQueries, sortField, sortOrder, paginationStart, onlyDataRelatedToMe, numResultsPerPage, true, null, null, true, true, false);
}
/**
@@ -83,13 +83,14 @@ default SolrQueryResponse search(DataverseRequest dataverseRequest, List dataverses, String query,
List filterQueries, String sortField, String sortOrder, int paginationStart,
boolean onlyDataRelatedToMe, int numResultsPerPage, boolean retrieveEntities, String geoPoint,
- String geoRadius, boolean addFacets, boolean addHighlights) throws SearchException;
+ String geoRadius, boolean addFacets, boolean addHighlights, boolean addCollections) throws SearchException;
/** Provide a way for other search engines to use the solr search engine
*
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
index e5e89e042a0..67af99e4b78 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
@@ -25,6 +25,8 @@
import edu.harvard.iq.dataverse.util.json.JsonPrinter;
import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
+import javax.xml.crypto.Data;
+
public class SolrSearchResult {
private static final Logger logger = Logger.getLogger(SolrSearchResult.class.getCanonicalName());
@@ -82,6 +84,10 @@ public class SolrSearchResult {
* Only Dataset can have a file count
*/
private Long fileCount;
+ /**
+ * Only Dataverses can have a dataset count
+ */
+ private Long datasetCount;
/**
* Files and datasets might have a UNF. Dataverses don't.
*/
@@ -102,6 +108,7 @@ public class SolrSearchResult {
private String dataverseAlias;
private String dataverseParentAlias;
private String dataverseParentName;
+ private List collections;
// private boolean statePublished;
/**
* @todo Investigate/remove this "unpublishedState" variable. For files that
@@ -698,10 +705,24 @@ public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, bool
nullSafeJsonBuilder.add("metadataBlocks", metadataFieldBuilder);
}
+
+ if (this.collections != null && !this.collections.isEmpty()) {
+ JsonArrayBuilder collections = Json.createArrayBuilder();
+ for (Dataverse collection : this.collections) {
+ NullSafeJsonBuilder dvBuilder = jsonObjectBuilder();
+ dvBuilder.add("id", collection.getId());
+ dvBuilder.add("name", collection.getName());
+ dvBuilder.add("alias", collection.getAlias());
+ collections.add(dvBuilder);
+ }
+ nullSafeJsonBuilder.add("collections", collections);
+ }
+
} else if (this.entity.isInstanceofDataverse()) {
nullSafeJsonBuilder.add("affiliation", dataverseAffiliation);
nullSafeJsonBuilder.add("parentDataverseName", dataverseParentName);
nullSafeJsonBuilder.add("parentDataverseIdentifier", dataverseParentAlias);
+ nullSafeJsonBuilder.add("datasetCount", this.datasetCount);
} else if (this.entity.isInstanceofDataFile()) {
// "published_at" field is only set when the version state is not draft.
// On the contrary, this field also takes into account DataFiles in draft version,
@@ -1300,6 +1321,10 @@ public void setDataverseParentName(String dataverseParentName) {
this.dataverseParentName = dataverseParentName;
}
+ public void setCollections(List collections) {
+ this.collections = collections;
+ }
+
public float getScore() {
return score;
}
@@ -1402,4 +1427,12 @@ public Long getFileCount() {
public void setFileCount(Long fileCount) {
this.fileCount = fileCount;
}
+
+ public Long getDatasetCount() {
+ return datasetCount;
+ }
+
+ public void setDatasetCount(Long datasetCount) {
+ this.datasetCount = datasetCount;
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchServiceBean.java
index 2a6bd335f43..530d3f9ef7e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchServiceBean.java
@@ -36,6 +36,8 @@
import jakarta.ejb.TransactionRolledbackLocalException;
import jakarta.inject.Inject;
import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
import jakarta.persistence.NoResultException;
import org.apache.commons.lang3.StringUtils;
@@ -70,6 +72,8 @@ public class SolrSearchServiceBean implements SearchService {
@EJB
DatasetVersionServiceBean datasetVersionService;
@EJB
+ DataverseServiceBean dataverseService;
+ @EJB
DatasetFieldServiceBean datasetFieldService;
@EJB
GroupServiceBean groupService;
@@ -82,12 +86,12 @@ public class SolrSearchServiceBean implements SearchService {
@Inject
ThumbnailServiceWrapper thumbnailServiceWrapper;
-
+
@Override
public String getServiceName() {
return SearchServiceFactory.INTERNAL_SOLR_SERVICE_NAME;
}
-
+
@Override
public String getDisplayName() {
return "Dataverse Standard Search";
@@ -107,6 +111,7 @@ public String getDisplayName() {
* @param geoRadius e.g. "5"
* @param addFacets boolean
* @param addHighlights boolean
+ * @param addCollections boolean
* @return
* @throws SearchException
*/
@@ -125,7 +130,8 @@ public SolrQueryResponse search(
String geoPoint,
String geoRadius,
boolean addFacets,
- boolean addHighlights
+ boolean addHighlights,
+ boolean addCollections
) throws SearchException {
if (paginationStart < 0) {
@@ -188,7 +194,7 @@ public SolrQueryResponse search(
solrQuery.addFacetField(SearchFields.PUBLICATION_STATUS);
solrQuery.addFacetField(SearchFields.DATASET_LICENSE);
solrQuery.addFacetField(SearchFields.CURATION_STATUS);
-
+
/**
* @todo when a new method on datasetFieldService is available
* (retrieveFacetsByDataverse?) only show the facets that the
@@ -450,11 +456,13 @@ public SolrQueryResponse search(
String dataverseAffiliation = (String) solrDocument.getFieldValue(SearchFields.DATAVERSE_AFFILIATION);
String dataverseParentAlias = (String) solrDocument.getFieldValue(SearchFields.DATAVERSE_PARENT_ALIAS);
String dataverseParentName = (String) solrDocument.getFieldValue(SearchFields.PARENT_NAME);
+ List subtreePaths = (List) solrDocument.getFieldValues(SearchFields.SUBTREE);
Long embargoEndDate = (Long) solrDocument.getFieldValue(SearchFields.EMBARGO_END_DATE);
Long retentionEndDate = (Long) solrDocument.getFieldValue(SearchFields.RETENTION_END_DATE);
//
Boolean datasetValid = (Boolean) solrDocument.getFieldValue(SearchFields.DATASET_VALID);
Long fileCount = (Long) solrDocument.getFieldValue(SearchFields.FILE_COUNT);
+ Long datasetCount = (Long) solrDocument.getFieldValue(SearchFields.DATASET_COUNT);
List matchedFields = new ArrayList<>();
@@ -529,6 +537,7 @@ public SolrQueryResponse search(
solrSearchResult.setDvTree(dvTree);
solrSearchResult.setDatasetValid(datasetValid);
solrSearchResult.setFileCount(fileCount);
+ solrSearchResult.setDatasetCount(datasetCount);
if (Boolean.TRUE.equals((Boolean) solrDocument.getFieldValue(SearchFields.IS_HARVESTED))) {
solrSearchResult.setHarvested(true);
@@ -584,6 +593,19 @@ public SolrQueryResponse search(
solrSearchResult.setIdentifierOfDataverse(identifierOfDataverse);
solrSearchResult.setNameOfDataverse(nameOfDataverse);
+ if (addCollections) {
+ List collections = new ArrayList<>();
+ for (String subtreePath : subtreePaths) {
+ String[] pathSegments = subtreePath.split("/");
+ if (pathSegments.length == 0) {
+ // Skip unexpected malformed subtree path
+ continue;
+ }
+ collections.add(dataverseService.find(Long.valueOf(pathSegments[pathSegments.length - 1])));
+ }
+ solrSearchResult.setCollections(collections);
+ }
+
if (title != null) {
// solrSearchResult.setTitle((String) titles.get(0));
solrSearchResult.setTitle(title);
diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java
index 27c65ed067c..b0aaae7b0d4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java
+++ b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java
@@ -71,6 +71,20 @@ public enum FeatureFlags {
* @since Dataverse @6.7:
*/
API_BEARER_AUTH_USE_BUILTIN_USER_ON_ID_MATCH("api-bearer-auth-use-builtin-user-on-id-match"),
+
+ /**
+ * Allows the use of a Shibboleth user account when an identity match is found during API bearer authentication.
+ * This feature enables automatic association of an incoming IdP identity with an existing Shibboleth user account,
+ * bypassing the need for additional user registration steps.
+ *
+ * The value of this feature flag is only considered when the feature flag
+ * {@link #API_BEARER_AUTH} is enabled.
+ *
+ * @apiNote Raise flag by setting "dataverse.feature.api-bearer-auth-use-shib-user-on-id-match"
+ * @since Dataverse @TODO:
+ */
+ API_BEARER_AUTH_USE_SHIB_USER_ON_ID_MATCH("api-bearer-auth-use-shib-user-on-id-match"),
+
/**
* For published (public) objects, don't use a join when searching Solr.
* Experimental! Requires a reindex with the following feature flag enabled,
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java
index c80e206ec69..69f9262ab5b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java
@@ -288,9 +288,13 @@ public String getPageURLWithQueryString() {
}
public String getGuidesBaseUrl() {
+ return getGuidesBaseUrl(true);
+ }
+
+ public String getGuidesBaseUrl(boolean includeLang) {
String saneDefault = "https://guides.dataverse.org";
String guidesBaseUrl = settingsService.getValueForKey(SettingsServiceBean.Key.GuidesBaseUrl, saneDefault);
- return guidesBaseUrl + "/" + getGuidesLanguage();
+ return includeLang ? guidesBaseUrl + "/" + getGuidesLanguage() : guidesBaseUrl;
}
private String getGuidesLanguage() {
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/InAppNotificationsJsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/InAppNotificationsJsonPrinter.java
new file mode 100644
index 00000000000..9a98f3b8413
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/InAppNotificationsJsonPrinter.java
@@ -0,0 +1,263 @@
+package edu.harvard.iq.dataverse.util.json;
+
+import edu.harvard.iq.dataverse.*;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.branding.BrandingUtil;
+import edu.harvard.iq.dataverse.util.SystemConfig;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+
+import static edu.harvard.iq.dataverse.dataset.DatasetUtil.getLocaleCurationStatusLabel;
+import static edu.harvard.iq.dataverse.util.json.JsonPrinter.jsonRoleAssignments;
+
+/**
+ * A helper class to build a JSON representation of a UserNotification.
+ *
+ * It is responsible for adding the correct fields to a JSON object based on the
+ * notification type.
+ */
+@Stateless
+public class InAppNotificationsJsonPrinter {
+
+ public static final String KEY_ROLE_ASSIGNMENTS = "roleAssignments";
+ public static final String KEY_DATAVERSE_ALIAS = "dataverseAlias";
+ public static final String KEY_DATAVERSE_DISPLAY_NAME = "dataverseDisplayName";
+ public static final String KEY_DATASET_PERSISTENT_ID = "datasetPersistentIdentifier";
+ public static final String KEY_DATASET_DISPLAY_NAME = "datasetDisplayName";
+ public static final String KEY_OWNER_PERSISTENT_ID = "ownerPersistentIdentifier";
+ public static final String KEY_OWNER_ALIAS = "ownerAlias";
+ public static final String KEY_OWNER_DISPLAY_NAME = "ownerDisplayName";
+ public static final String KEY_REQUESTOR_FIRST_NAME = "requestorFirstName";
+ public static final String KEY_REQUESTOR_LAST_NAME = "requestorLastName";
+ public static final String KEY_REQUESTOR_EMAIL = "requestorEmail";
+ public static final String KEY_DATAFILE_ID = "dataFileId";
+ public static final String KEY_DATAFILE_DISPLAY_NAME = "dataFileDisplayName";
+ public static final String KEY_GUIDES_BASE_URL = "userGuidesBaseUrl";
+ public static final String KEY_GUIDES_VERSION = "userGuidesVersion";
+ public static final String KEY_GUIDES_SECTION_PATH = "userGuidesSectionPath";
+ public static final String KEY_CURATION_STATUS = "currentCurationStatus";
+ public static final String KEY_ADDITIONAL_INFO = "additionalInfo";
+ public static final String KEY_OBJECT_DELETED = "objectDeleted";
+ public static final String KEY_INSTALLATION_BRAND_NAME = "installationBrandName";
+
+ public static final String GUIDES_SECTION_PATH_DATAVERSE_MANAGEMENT_HTML = "user/dataverse-management.html";
+ public static final String GUIDES_SECTION_PATH_DATASET_MANAGEMENT_HTML = "user/dataset-management.html";
+ public static final String GUIDES_SECTION_PATH_DATASET_MANAGEMENT_TABULAR_FILES_HTML = "user/dataset-management.html#tabular-data-files";
+ public static final String GUIDES_SECTION_PATH_USER_HTML = "user/index.html";
+
+ @EJB
+ private DataverseServiceBean dataverseService;
+ @EJB
+ private DatasetServiceBean datasetService;
+ @EJB
+ private DatasetVersionServiceBean datasetVersionService;
+ @EJB
+ private DataFileServiceBean dataFileService;
+ @EJB
+ private PermissionServiceBean permissionService;
+ @EJB
+ private SystemConfig systemConfig;
+
+ /**
+ * Populates a JSON builder with fields specific to the notification type.
+ *
+ * @param notificationJson The JSON builder to add fields to.
+ * @param authenticatedUser The user receiving the notification.
+ * @param userNotification The notification object containing the details.
+ */
+ public void addFieldsByType(final NullSafeJsonBuilder notificationJson, final AuthenticatedUser authenticatedUser, final UserNotification userNotification) {
+ final AuthenticatedUser requestor = userNotification.getRequestor();
+
+ switch (userNotification.getType()) {
+ case ASSIGNROLE:
+ case REVOKEROLE:
+ addRoleFields(notificationJson, authenticatedUser, userNotification);
+ break;
+ case CREATEDV:
+ addCreateDataverseFields(notificationJson, userNotification);
+ break;
+ case REQUESTFILEACCESS:
+ addRequestFileAccessFields(notificationJson, userNotification, requestor);
+ break;
+ case REQUESTEDFILEACCESS:
+ case GRANTFILEACCESS:
+ case REJECTFILEACCESS:
+ addDataFileFields(notificationJson, userNotification);
+ break;
+ case DATASETCREATED:
+ addDatasetCreatedFields(notificationJson, userNotification, requestor);
+ break;
+ case CREATEDS:
+ addCreateDatasetFields(notificationJson, userNotification);
+ break;
+ case SUBMITTEDDS:
+ addSubmittedDatasetFields(notificationJson, userNotification, requestor);
+ break;
+ case PUBLISHEDDS:
+ case PUBLISHFAILED_PIDREG:
+ case RETURNEDDS:
+ case WORKFLOW_SUCCESS:
+ case WORKFLOW_FAILURE:
+ case PIDRECONCILED:
+ case FILESYSTEMIMPORT:
+ case CHECKSUMIMPORT:
+ addDatasetVersionFields(notificationJson, userNotification);
+ break;
+ case STATUSUPDATED:
+ addDatasetVersionFields(notificationJson, userNotification, true);
+ break;
+ case CREATEACC:
+ addCreateAccountFields(notificationJson);
+ break;
+ case GLOBUSUPLOADCOMPLETED:
+ case GLOBUSDOWNLOADCOMPLETED:
+ case GLOBUSUPLOADCOMPLETEDWITHERRORS:
+ case GLOBUSUPLOADREMOTEFAILURE:
+ case GLOBUSUPLOADLOCALFAILURE:
+ case GLOBUSDOWNLOADCOMPLETEDWITHERRORS:
+ case CHECKSUMFAIL:
+ addDatasetFields(notificationJson, userNotification);
+ break;
+ case INGESTCOMPLETED:
+ case INGESTCOMPLETEDWITHERRORS:
+ addIngestFields(notificationJson, userNotification);
+ break;
+ case DATASETMENTIONED:
+ addDatasetMentionedFields(notificationJson, userNotification);
+ break;
+ }
+ }
+
+ private void addRoleFields(final NullSafeJsonBuilder notificationJson, final AuthenticatedUser authenticatedUser, final UserNotification userNotification) {
+ Dataverse dataverse = dataverseService.find(userNotification.getObjectId());
+ if (dataverse != null) {
+ notificationJson.add(KEY_ROLE_ASSIGNMENTS, jsonRoleAssignments(permissionService.getEffectiveRoleAssignments(authenticatedUser, dataverse)));
+ notificationJson.add(KEY_DATAVERSE_ALIAS, dataverse.getAlias());
+ notificationJson.add(KEY_DATAVERSE_DISPLAY_NAME, dataverse.getDisplayName());
+ } else {
+ Dataset dataset = datasetService.find(userNotification.getObjectId());
+ if (dataset != null) {
+ notificationJson.add(KEY_ROLE_ASSIGNMENTS, jsonRoleAssignments(permissionService.getEffectiveRoleAssignments(authenticatedUser, dataset)));
+ notificationJson.add(KEY_DATASET_PERSISTENT_ID, dataset.getGlobalId().asString());
+ notificationJson.add(KEY_DATASET_DISPLAY_NAME, dataset.getDisplayName());
+ } else {
+ DataFile datafile = dataFileService.find(userNotification.getObjectId());
+ if (datafile != null) {
+ notificationJson.add(KEY_ROLE_ASSIGNMENTS, jsonRoleAssignments(permissionService.getEffectiveRoleAssignments(authenticatedUser, datafile)));
+ notificationJson.add(KEY_OWNER_PERSISTENT_ID, datafile.getOwner().getGlobalId().asString());
+ notificationJson.add(KEY_OWNER_DISPLAY_NAME, datafile.getOwner().getDisplayName());
+ } else {
+ notificationJson.add(KEY_OBJECT_DELETED, true);
+ }
+ }
+ }
+ }
+
+ private void addCreateDataverseFields(final NullSafeJsonBuilder notificationJson, final UserNotification userNotification) {
+ final Dataverse dataverse = dataverseService.find(userNotification.getObjectId());
+ if (dataverse != null) {
+ notificationJson.add(KEY_DATAVERSE_ALIAS, dataverse.getAlias());
+ notificationJson.add(KEY_DATAVERSE_DISPLAY_NAME, dataverse.getDisplayName());
+ Dataverse owner = dataverse.getOwner();
+ if (owner != null) {
+ notificationJson.add(KEY_OWNER_ALIAS, owner.getAlias());
+ notificationJson.add(KEY_OWNER_DISPLAY_NAME, owner.getDisplayName());
+ }
+ } else {
+ notificationJson.add(KEY_OBJECT_DELETED, true);
+ }
+ addGuidesFields(notificationJson, GUIDES_SECTION_PATH_DATAVERSE_MANAGEMENT_HTML);
+ }
+
+ private void addCreateAccountFields(final NullSafeJsonBuilder notificationJson) {
+ notificationJson.add(KEY_INSTALLATION_BRAND_NAME, BrandingUtil.getInstallationBrandName());
+ addGuidesFields(notificationJson, GUIDES_SECTION_PATH_USER_HTML);
+ }
+
+ private void addRequestFileAccessFields(final NullSafeJsonBuilder notificationJson, final UserNotification userNotification, final AuthenticatedUser requestor) {
+ addRequestorFields(notificationJson, requestor);
+ addDataFileFields(notificationJson, userNotification);
+ }
+
+ private void addDataFileFields(final NullSafeJsonBuilder notificationJson, final UserNotification userNotification) {
+ final DataFile dataFile = dataFileService.find(userNotification.getObjectId());
+ if (dataFile != null) {
+ notificationJson.add(KEY_DATAFILE_ID, dataFile.getId());
+ notificationJson.add(KEY_DATAFILE_DISPLAY_NAME, dataFile.getDisplayName());
+ } else {
+ notificationJson.add(KEY_OBJECT_DELETED, true);
+ }
+ }
+
+ private void addDatasetCreatedFields(final NullSafeJsonBuilder notificationJson, final UserNotification userNotification, final AuthenticatedUser requestor) {
+ addDatasetFields(notificationJson, userNotification);
+ addRequestorFields(notificationJson, requestor);
+ }
+
+ private void addRequestorFields(final NullSafeJsonBuilder notificationJson, final AuthenticatedUser requestor) {
+ notificationJson.add(KEY_REQUESTOR_FIRST_NAME, requestor.getFirstName());
+ notificationJson.add(KEY_REQUESTOR_LAST_NAME, requestor.getLastName());
+ notificationJson.add(KEY_REQUESTOR_EMAIL, requestor.getEmail());
+ }
+
+ private void addDatasetFields(final NullSafeJsonBuilder notificationJson, final UserNotification userNotification) {
+ final Dataset dataset = datasetService.find(userNotification.getObjectId());
+ if (dataset != null) {
+ notificationJson.add(KEY_DATASET_PERSISTENT_ID, dataset.getGlobalId().asString());
+ notificationJson.add(KEY_DATASET_DISPLAY_NAME, dataset.getDisplayName());
+ notificationJson.add(KEY_OWNER_ALIAS, dataset.getOwner().getAlias());
+ notificationJson.add(KEY_OWNER_DISPLAY_NAME, dataset.getOwner().getDisplayName());
+ } else {
+ notificationJson.add(KEY_OBJECT_DELETED, true);
+ }
+ }
+
+ private void addCreateDatasetFields(final NullSafeJsonBuilder notificationJson, final UserNotification userNotification) {
+ addGuidesFields(notificationJson, GUIDES_SECTION_PATH_DATASET_MANAGEMENT_HTML);
+ addDatasetVersionFields(notificationJson, userNotification);
+ }
+
+ private void addGuidesFields(final NullSafeJsonBuilder notificationJson, String guidesSectionPath) {
+ notificationJson.add(KEY_GUIDES_BASE_URL, systemConfig.getGuidesBaseUrl(false));
+ notificationJson.add(KEY_GUIDES_VERSION, systemConfig.getGuidesVersion());
+
+ if (guidesSectionPath != null) {
+ notificationJson.add(KEY_GUIDES_SECTION_PATH, guidesSectionPath);
+ }
+ }
+
+ private void addSubmittedDatasetFields(final NullSafeJsonBuilder notificationJson, final UserNotification userNotification, final AuthenticatedUser requestor) {
+ addDatasetFields(notificationJson, userNotification);
+ addRequestorFields(notificationJson, requestor);
+ }
+
+ private void addDatasetVersionFields(final NullSafeJsonBuilder notificationJson, final UserNotification userNotification) {
+ addDatasetVersionFields(notificationJson, userNotification, false);
+ }
+
+ private void addDatasetVersionFields(final NullSafeJsonBuilder notificationJson, final UserNotification userNotification, final boolean addCurationStatus) {
+ final DatasetVersion datasetVersion = datasetVersionService.find(userNotification.getObjectId());
+ if (datasetVersion != null) {
+ Dataset dataset = datasetVersion.getDataset();
+ notificationJson.add(KEY_DATASET_PERSISTENT_ID, dataset.getGlobalId().asString());
+ notificationJson.add(KEY_DATASET_DISPLAY_NAME, dataset.getDisplayName());
+ notificationJson.add(KEY_OWNER_ALIAS, dataset.getOwner().getAlias());
+ notificationJson.add(KEY_OWNER_DISPLAY_NAME, dataset.getOwner().getDisplayName());
+ if (addCurationStatus) {
+ notificationJson.add(KEY_CURATION_STATUS, getLocaleCurationStatusLabel(datasetVersion.getCurrentCurationStatus()));
+ }
+ } else {
+ notificationJson.add(KEY_OBJECT_DELETED, true);
+ }
+ }
+
+ private void addIngestFields(final NullSafeJsonBuilder notificationJson, final UserNotification userNotification) {
+ addDatasetFields(notificationJson, userNotification);
+ addGuidesFields(notificationJson, GUIDES_SECTION_PATH_DATASET_MANAGEMENT_TABULAR_FILES_HTML);
+ }
+
+ private void addDatasetMentionedFields(final NullSafeJsonBuilder notificationJson, final UserNotification userNotification) {
+ addDatasetFields(notificationJson, userNotification);
+ notificationJson.add(KEY_ADDITIONAL_INFO, userNotification.getAdditionalInfo());
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
index 23bdd460160..cf4426468db 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
@@ -36,6 +36,7 @@
import edu.harvard.iq.dataverse.util.DatasetFieldWalker;
import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
+import edu.harvard.iq.dataverse.util.MailUtil;
import edu.harvard.iq.dataverse.workflow.Workflow;
import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData;
@@ -76,11 +77,24 @@ public class JsonPrinter {
@EJB
static DatasetServiceBean datasetService;
+
+ @EJB
+ static MailServiceBean mailService;
+
+ @EJB
+ static InAppNotificationsJsonPrinter inAppNotificationsJsonPrinter;
- public static void injectSettingsService(SettingsServiceBean ssb, DatasetFieldServiceBean dfsb, DataverseFieldTypeInputLevelServiceBean dfils, DatasetServiceBean ds) {
+ public static void injectSettingsService(SettingsServiceBean ssb,
+ DatasetFieldServiceBean dfsb,
+ DataverseFieldTypeInputLevelServiceBean dfils,
+ DatasetServiceBean ds,
+ MailServiceBean ms,
+ InAppNotificationsJsonPrinter njp) {
settingsService = ssb;
datasetFieldService = dfsb;
datasetService = ds;
+ mailService = ms;
+ inAppNotificationsJsonPrinter = njp;
}
public JsonPrinter() {
@@ -128,11 +142,18 @@ public static JsonObjectBuilder json(AuthenticatedUser authenticatedUser) {
return builder;
}
+ public static JsonArrayBuilder jsonRoleAssignments(List roleAssignments) {
+ JsonArrayBuilder bld = Json.createArrayBuilder();
+ roleAssignments.forEach(roleAssignment -> bld.add(json(roleAssignment)));
+ return bld;
+ }
+
public static JsonObjectBuilder json(RoleAssignment ra) {
return jsonObjectBuilder()
.add("id", ra.getId())
.add("assignee", ra.getAssigneeIdentifier())
.add("roleId", ra.getRole().getId())
+ .add("roleName", ra.getRole().getName())
.add("_roleAlias", ra.getRole().getAlias())
.add("privateUrlToken", ra.getPrivateUrlToken())
.add("definitionPointId", ra.getDefinitionPoint().getId());
@@ -755,6 +776,7 @@ public static JsonObjectBuilder json(DatasetFieldType fld, Dataverse ownerDatave
fieldsBld.add("description", fld.getDescription());
fieldsBld.add("multiple", fld.isAllowMultiples());
fieldsBld.add("isControlledVocabulary", fld.isControlledVocabulary());
+ fieldsBld.add("isAdvancedSearchFieldType", fld.isAdvancedSearchFieldType());
fieldsBld.add("displayFormat", fld.getDisplayFormat());
fieldsBld.add("displayOrder", fld.getDisplayOrder());
@@ -863,6 +885,7 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo
}
fileName = fileMetadata.getLabel();
+ String directoryLabel = fileMetadata.getDirectoryLabel();
GlobalId filePid = df.getGlobalId();
String pidURL = (filePid!=null)? filePid.asURL(): null;
//For backward compatibility - prior to #8674, asString() returned "" for the value when no PID exists.
@@ -905,7 +928,9 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo
.add("tabularData", df.isTabularData())
.add("tabularTags", getTabularFileTags(df))
.add("creationDate", df.getCreateDateFormattedYYYYMMDD())
- .add("publicationDate", df.getPublicationDateFormattedYYYYMMDD());
+ .add("publicationDate", df.getPublicationDateFormattedYYYYMMDD())
+ .add("directoryLabel", directoryLabel)
+ .add("lastUpdateTime", format(fileMetadata.getDatasetVersion().getLastUpdateTime()));
Dataset dfOwner = df.getOwner();
if (dfOwner != null) {
builder.add("fileAccessRequest", dfOwner.isFileAccessRequest());
@@ -1554,4 +1579,91 @@ public static JsonObjectBuilder json(DataverseFeaturedItem dataverseFeaturedItem
}
return job;
}
+
+ public static JsonArrayBuilder jsonTemplates(List templates) {
+ JsonArrayBuilder templatesArrayBuilder = Json.createArrayBuilder();
+ for (Template template : templates) {
+ templatesArrayBuilder.add(jsonTemplate(template));
+ }
+ return templatesArrayBuilder;
+ }
+
+ public static JsonObjectBuilder jsonTemplate(Template template) {
+ return jsonObjectBuilder()
+ .add("id", template.getId())
+ .add("name", template.getName())
+ .add("usageCount", template.getUsageCount())
+ .add("createTime", template.getCreateTime().toString())
+ .add("createDate", template.getCreateDate())
+ .add("termsOfUseAndAccess", jsonTermsOfUseAndAccess(template.getTermsOfUseAndAccess()))
+ .add("datasetFields", jsonByBlocks(template.getDatasetFields()))
+ .add("instructions", jsonTemplateInstructions(template.getInstructionsMap()))
+ .add("dataverseAlias", template.getDataverse().getAlias());
+ }
+
+ public static JsonObjectBuilder jsonTermsOfUseAndAccess(TermsOfUseAndAccess termsOfUseAndAccess) {
+ return jsonObjectBuilder()
+ .add("id", termsOfUseAndAccess.getId())
+ .add("license", json(termsOfUseAndAccess.getLicense()))
+ .add("termsOfUse", termsOfUseAndAccess.getTermsOfUse())
+ .add("termsOfAccess", termsOfUseAndAccess.getTermsOfAccess())
+ .add("confidentialityDeclaration", termsOfUseAndAccess.getConfidentialityDeclaration())
+ .add("specialPermissions", termsOfUseAndAccess.getSpecialPermissions())
+ .add("restrictions", termsOfUseAndAccess.getRestrictions())
+ .add("citationRequirements", termsOfUseAndAccess.getCitationRequirements())
+ .add("depositorRequirements", termsOfUseAndAccess.getDepositorRequirements())
+ .add("conditions", termsOfUseAndAccess.getConditions())
+ .add("disclaimer", termsOfUseAndAccess.getDisclaimer())
+ .add("dataAccessPlace", termsOfUseAndAccess.getDataAccessPlace())
+ .add("originalArchive", termsOfUseAndAccess.getOriginalArchive())
+ .add("availabilityStatus", termsOfUseAndAccess.getAvailabilityStatus())
+ .add("sizeOfCollection", termsOfUseAndAccess.getSizeOfCollection())
+ .add("studyCompletion", termsOfUseAndAccess.getStudyCompletion());
+ }
+
+ public static JsonArrayBuilder jsonTemplateInstructions(Map templateInstructions) {
+ JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder();
+
+ for (Map.Entry entry : templateInstructions.entrySet()) {
+ JsonObjectBuilder instructionObject = Json.createObjectBuilder()
+ .add("instructionField", entry.getKey())
+ .add("instructionText", entry.getValue());
+ jsonArrayBuilder.add(instructionObject);
+ }
+
+ return jsonArrayBuilder;
+ }
+
+ public static JsonArrayBuilder json(List notifications, AuthenticatedUser authenticatedUser, boolean inAppNotificationFormat) {
+ JsonArrayBuilder notificationsArray = Json.createArrayBuilder();
+
+ for (UserNotification notification : notifications) {
+ NullSafeJsonBuilder notificationJson = jsonObjectBuilder();
+ UserNotification.Type type = notification.getType();
+
+ notificationJson.add("id", notification.getId());
+ notificationJson.add("type", type.toString());
+ notificationJson.add("displayAsRead", notification.isReadNotification());
+ notificationJson.add("sentTimestamp", notification.getSendDateTimestamp());
+
+ if (inAppNotificationFormat) {
+ inAppNotificationsJsonPrinter.addFieldsByType(notificationJson, authenticatedUser, notification);
+ } else {
+ Object relatedObject = mailService.getObjectOfNotification(notification);
+ if (relatedObject != null) {
+ String subjectText = MailUtil.getSubjectTextBasedOnNotification(notification, relatedObject);
+ String messageText = mailService.getMessageTextBasedOnNotification(
+ notification, relatedObject, null, notification.getRequestor()
+ );
+
+ notificationJson.add("subjectText", subjectText);
+ notificationJson.add("messageText", messageText);
+ }
+ }
+
+ notificationsArray.add(notificationJson);
+ }
+
+ return notificationsArray;
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java
index aeba4ba797f..a9a05f1b699 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java
@@ -3,6 +3,7 @@
import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
import edu.harvard.iq.dataverse.DatasetServiceBean;
import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevelServiceBean;
+import edu.harvard.iq.dataverse.MailServiceBean;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import jakarta.annotation.PostConstruct;
@@ -11,7 +12,7 @@
import jakarta.ejb.Startup;
/**
- * This is a small helper bean
+ * This is a small helper bean
* As it is a singleton and built at application start (=deployment), it will inject the (stateless)
* settings service into the OREMap once it's ready.
*/
@@ -20,18 +21,31 @@
public class JsonPrinterHelper {
@EJB
SettingsServiceBean settingsSvc;
-
+
@EJB
DatasetFieldServiceBean datasetFieldSvc;
-
+
@EJB
DataverseFieldTypeInputLevelServiceBean datasetFieldInpuLevelSvc;
@EJB
DatasetServiceBean datasetSvc;
-
+
+ @EJB
+ MailServiceBean mailSvc;
+
+ @EJB
+ InAppNotificationsJsonPrinter inAppNotificationsJsonPrinter;
+
@PostConstruct
public void injectService() {
- JsonPrinter.injectSettingsService(settingsSvc, datasetFieldSvc, datasetFieldInpuLevelSvc, datasetSvc);
+ JsonPrinter.injectSettingsService(
+ settingsSvc,
+ datasetFieldSvc,
+ datasetFieldInpuLevelSvc,
+ datasetSvc,
+ mailSvc,
+ inAppNotificationsJsonPrinter
+ );
}
}
diff --git a/src/main/java/propertyFiles/BuiltInRoles.properties b/src/main/java/propertyFiles/BuiltInRoles.properties
index 026df600a9c..50dbb1ba80f 100644
--- a/src/main/java/propertyFiles/BuiltInRoles.properties
+++ b/src/main/java/propertyFiles/BuiltInRoles.properties
@@ -3,7 +3,7 @@ role.admin.description=A person who has all permissions for dataverses, datasets
role.contributor.name=Contributor
role.contributor.description=For datasets, a person who can edit License + Terms, and then submit them for review.
role.curator.name=Curator
-role.curator.description=For datasets, a person who can edit License + Terms, edit Permissions, and publish datasets.
+role.curator.description=For datasets, a person who can edit License + Terms, edit Permissions, and publish and link datasets.
role.dscontributor.name=Dataset Creator
role.dscontributor.description=A person who can add datasets within a dataverse.
role.fullcontributor.name=Dataverse + Dataset Creator
diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties
index 6560b914f56..486a4e9b7f2 100644
--- a/src/main/java/propertyFiles/Bundle.properties
+++ b/src/main/java/propertyFiles/Bundle.properties
@@ -1029,6 +1029,7 @@ dataverse.update.featuredItems.error.invalidTypeAndDvObject=Invalid type and DvO
dataverse.update.featuredItems.error.invalidType=Unknown 'type' must be one of {0}
dataverse.update.featuredItems.error.typeAndDvObjectMismatch=The 'type' passed does not match the dvObject type.
dataverse.delete.featuredItems.success=All featured items of this Dataverse have been successfully deleted.
+dataverse.createTemplate.error.jsonParseMetadataFields=Error parsing the POSTed template dataset fields: {0}
# rolesAndPermissionsFragment.xhtml
# advanced.xhtml
@@ -2563,6 +2564,8 @@ permission.deleteDataset=Delete a dataset draft
permission.deleteDataverse=Delete an unpublished dataverse
permission.publishDataset=Publish a dataset
permission.publishDataverse=Publish a dataverse
+permission.linkDataset=Link a dataset to a dataverse
+permission.linkDataverse=Link a dataverse to another dataverse
permission.managePermissionsDataFile=Manage permissions for a file
permission.managePermissionsDataset=Manage permissions for a dataset
permission.managePermissionsDataverse=Manage permissions for a dataverse
@@ -2923,6 +2926,8 @@ permission.ManageDataversePermissions.label=ManageDataversePermissions
permission.ManageDatasetPermissions.label=ManageDatasetPermissions
permission.PublishDataverse.label=PublishDataverse
permission.PublishDataset.label=PublishDataset
+permission.LinkDataverse.label=LinkDataverse
+permission.LinkDataset.label=LinkDataset
permission.DeleteDataverse.label=DeleteDataverse
permission.DeleteDatasetDraft.label=DeleteDatasetDraft
permission.ManageFilePermissions.label=ManageFilePermissions
@@ -2932,6 +2937,8 @@ permission.DeleteDatasetDraft.desc=Delete a dataset draft
permission.DeleteDataverse.desc=Delete an unpublished dataverse
permission.PublishDataset.desc=Publish a dataset
permission.PublishDataverse.desc=Publish a dataverse
+permission.LinkDataset.desc=Link a dataset
+permission.LinkDataverse.desc=Link a dataverse
permission.ManageFilePermissions.desc=Manage permissions for a file
permission.ManageDatasetPermissions.desc=Manage permissions for a dataset
permission.ManageDataversePermissions.desc=Manage permissions for a dataverse
@@ -3220,7 +3227,7 @@ datasetFieldValidator.error.emptyRequiredSingleValueForField=Empty required valu
updateDatasetFieldsCommand.api.processDatasetUpdate.parseError=Error parsing dataset update: {0}
#AbstractApiBean.java
-abstractApiBean.error.datasetInternalVersionNumberIsOutdated=Dataset internal version number {0} is outdated
+abstractApiBean.error.internalVersionTimestampIsOutdated=Internal version timestamp {0} is outdated
#RoleAssigneeServiceBean.java
roleAssigneeServiceBean.error.dataverseRequestCannotBeNull=DataverseRequest cannot be null.
diff --git a/src/main/java/propertyFiles/citation.properties b/src/main/java/propertyFiles/citation.properties
index 9a1e6f280ec..00f25740a19 100644
--- a/src/main/java/propertyFiles/citation.properties
+++ b/src/main/java/propertyFiles/citation.properties
@@ -4250,8 +4250,8 @@ controlledvocabulary.language.magdalena_penasco_mixtec=Magdalena Peñasco Mixtec
controlledvocabulary.language.magoma=Magoma
controlledvocabulary.language.magori=Magori
controlledvocabulary.language.maguindanaon=Maguindanaon
-controlledvocabulary.language.magɨ_(madang_province)=Magɨ (Madang Province)
-controlledvocabulary.language.magɨyi=Magɨyi
+controlledvocabulary.language.magi_(madang_province)=Magɨ (Madang Province)
+controlledvocabulary.language.magiyi=Magɨyi
controlledvocabulary.language.mahali=Mahali
controlledvocabulary.language.mahasu_pahari=Mahasu Pahari
controlledvocabulary.language.mahican=Mahican
diff --git a/src/main/resources/db/migration/V6.7.1.1.sql b/src/main/resources/db/migration/V6.7.1.1.sql
new file mode 100644
index 00000000000..3d7a9b103c2
--- /dev/null
+++ b/src/main/resources/db/migration/V6.7.1.1.sql
@@ -0,0 +1,39 @@
+/* We're adding new permissions at bit 13 (4096 LinkDataverse) and bit 14 (8192 LinkDataset).
+They should be set for any role that has the permission in bit 11 (1024 PublishDataverse) and bit 12 (2048 PublishDataset),
+respectively.
+We also need to move the permissions with current bits 13-14 to bits 15-16 to make room for the new permissions.
+
+Finally, to make this idempotent, at least under the assumption that the standard admin role with all permissions exists
+(or some role with current permission bit 13 (DeleteDataverse) or 14 (DeleteDatasetDraft) set), we check to make sure no
+role already has bit 15-16 set before applying the migration.
+
+*/
+DO
+$do$
+ BEGIN
+ -- Skip if migration already applied (i.e., bits 15-16 in use)
+ IF (SELECT MAX(permissionbits) FROM dataverserole) < 16384 THEN
+
+ -- If bit 13 is set, move it to bit 15 and clear it
+ UPDATE dataverserole
+ SET permissionbits = (permissionbits | 16384) & ~4096
+ WHERE (permissionbits & 4096) != 0;
+
+ -- If bit 14 is set, move it to bit 16 and clear it
+ UPDATE dataverserole
+ SET permissionbits = (permissionbits | 32768) & ~8192
+ WHERE (permissionbits & 8192) != 0;
+
+ -- Set bit 13 (4096 LinkDataverse) if bit 11 (1024 PublishDataverse) is set
+ UPDATE dataverserole
+ SET permissionbits = permissionbits | 4096
+ WHERE (permissionbits & 1024) != 0;
+
+ -- Set bit 14 (8192 LinkDataset) if bit 12 (2048 PublishDataset) is set
+ UPDATE dataverserole
+ SET permissionbits = permissionbits | 8192
+ WHERE (permissionbits & 2048) != 0;
+
+ END IF;
+ END
+$do$;
\ No newline at end of file
diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml
index 8ed482679f7..f1f72b50634 100644
--- a/src/main/webapp/dataset.xhtml
+++ b/src/main/webapp/dataset.xhtml
@@ -19,79 +19,103 @@
+
-
-
-
+
+
+
+
+
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
+ and canUpdateDataset
+ and !canIssuePublishDatasetCommand}"/>
+
+
-
-
-
+
+
-
+
-
-
+
+
-
-
+
+
-
+
-
+
-
-
+
+
-
-
+
+
-
-
+
+
-
+
@@ -141,20 +165,20 @@
#{DatasetPage.datasetVersionUI.title.value}
-
-
-
-
+
+
+
+
-
+
-
+
-
-
+
+
@@ -163,10 +187,10 @@
-
+
-
+
@@ -184,70 +208,70 @@