diff --git a/doc/release-notes/9692-files-api-extension.md b/doc/release-notes/9692-files-api-extension.md new file mode 100644 index 00000000000..baa8e2f87cd --- /dev/null +++ b/doc/release-notes/9692-files-api-extension.md @@ -0,0 +1,7 @@ +The following API endpoints have been added: + +- /api/files/{id}/downloadCount +- /api/files/{id}/dataTables +- /access/datafile/{id}/userPermissions + +The getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) has been extended to support pagination and ordering diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index e76ea167587..d714c90372a 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -403,3 +403,19 @@ This method returns a list of Authenticated Users who have requested access to t A curl example using an ``id``:: curl -H "X-Dataverse-key:$API_TOKEN" -X GET http://$SERVER/api/access/datafile/{id}/listRequests + +Get User Permissions on a File: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``/api/access/datafile/{id}/userPermissions`` + +This method returns the permissions that the calling user has on a particular file. + +In particular, the user permissions that this method checks, returned as booleans, are the following: + +* Can download the file +* Can edit the file owner dataset + +A curl example using an ``id``:: + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "http://$SERVER/api/access/datafile/{id}/userPermissions" diff --git a/doc/sphinx-guides/source/api/metrics.rst b/doc/sphinx-guides/source/api/metrics.rst index 28ac33ea228..613671e49d1 100755 --- a/doc/sphinx-guides/source/api/metrics.rst +++ b/doc/sphinx-guides/source/api/metrics.rst @@ -163,3 +163,10 @@ The following table lists the available metrics endpoints (not including the Mak /api/info/metrics/uniquefiledownloads/toMonth/{yyyy-MM},"count by id, pid","json, csv",collection subtree,published,y,cumulative up to month specified,unique download counts per file id to the specified month. PIDs are also included in output if they exist /api/info/metrics/tree,"id, ownerId, alias, depth, name, children",json,collection subtree,published,y,"tree of dataverses starting at the root or a specified parentAlias with their id, owner id, alias, name, a computed depth, and array of children dataverses","underlying code can also include draft dataverses, this is not currently accessible via api, depth starts at 0" /api/info/metrics/tree/toMonth/{yyyy-MM},"id, ownerId, alias, depth, name, children",json,collection subtree,published,y,"tree of dataverses in existence as of specified date starting at the root or a specified parentAlias with their id, owner id, alias, name, a computed depth, and array of children dataverses","underlying code can also include draft dataverses, this is not currently accessible via api, depth starts at 0" + +Related API Endpoints +--------------------- + +The following endpoints are not under the metrics namespace but also return counts: + +- :ref:`file-download-count` diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 4d9466703e4..214ca440d1d 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -958,6 +958,29 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files" +This endpoint supports optional pagination, through the ``limit`` and ``offset`` query params: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?limit=10&offset=20" + +Ordering criteria for sorting the results is also optionally supported. In particular, by the following possible values: + +* ``NameAZ`` (Default) +* ``NameZA`` +* ``Newest`` +* ``Oldest`` +* ``Size`` +* ``Type`` + +Please note that these values are case sensitive and must be correctly typed for the endpoint to recognize them. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?orderCriteria=Newest" + View Dataset Files and Folders as a Directory Index ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -2702,6 +2725,85 @@ The fully expanded example above (without environment variables) looks like this Note: The ``id`` returned in the json response is the id of the file metadata version. +Getting File Data Tables +~~~~~~~~~~~~~~~~~~~~~~~~ + +This endpoint is oriented toward tabular files and provides a JSON representation of the file data tables for an existing tabular file. ``ID`` is the database id of the file to get the data tables from or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl $SERVER_URL/api/files/$ID/dataTables + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl https://demo.dataverse.org/api/files/24/dataTables + +A curl example using a ``PERSISTENT_ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + + curl "$SERVER_URL/api/files/:persistentId/dataTables?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/files/:persistentId/dataTables?persistentId=doi:10.5072/FK2/AAA000" + +Note that if the requested file is not tabular, the endpoint will return an error. + +.. _file-download-count: + +Getting File Download Count +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Provides the download count for a particular file, where ``ID`` is the database id of the file to get the download count from or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl "$SERVER_URL/api/files/$ID/downloadCount" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/files/24/downloadCount" + +A curl example using a ``PERSISTENT_ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + + curl "$SERVER_URL/api/files/:persistentId/downloadCount?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/files/:persistentId/downloadCount?persistentId=doi:10.5072/FK2/AAA000" + +If you are interested in download counts for multiple files, see :doc:`/api/metrics`. Updating File Metadata ~~~~~~~~~~~~~~~~~~~~~~ diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 28243c37eee..6f087f9eabc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -48,7 +48,23 @@ public class DatasetVersionServiceBean implements java.io.Serializable { private static final Logger logger = Logger.getLogger(DatasetVersionServiceBean.class.getCanonicalName()); private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); - + + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL = "SELECT fm FROM FileMetadata fm" + + " WHERE fm.datasetVersion.id=:datasetVersionId" + + " ORDER BY fm.label"; + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE = "SELECT fm FROM FileMetadata fm, DvObject dvo" + + " WHERE fm.datasetVersion.id = :datasetVersionId" + + " AND fm.dataFile.id = dvo.id" + + " ORDER BY CASE WHEN dvo.publicationDate IS NOT NULL THEN dvo.publicationDate ELSE dvo.createDate END"; + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE = "SELECT fm FROM FileMetadata fm, DataFile df" + + " WHERE fm.datasetVersion.id = :datasetVersionId" + + " AND fm.dataFile.id = df.id" + + " ORDER BY df.filesize"; + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE = "SELECT fm FROM FileMetadata fm, DataFile df" + + " WHERE fm.datasetVersion.id = :datasetVersionId" + + " AND fm.dataFile.id = df.id" + + " ORDER BY df.contentType"; + @EJB DatasetServiceBean datasetService; @@ -149,7 +165,19 @@ public DatasetVersion getDatasetVersion(){ return this.datasetVersionForResponse; } } // end RetrieveDatasetVersionResponse - + + /** + * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionServiceBean#getFileMetadatas} + */ + public enum FileMetadatasOrderCriteria { + NameAZ, + NameZA, + Newest, + Oldest, + Size, + Type + } + public DatasetVersion find(Object pk) { return em.find(DatasetVersion.class, pk); } @@ -1224,4 +1252,50 @@ public List getUnarchivedDatasetVersions(){ return null; } } // end getUnarchivedDatasetVersions + + /** + * Returns a FileMetadata list of files in the specified DatasetVersion + * + * @param datasetVersion the DatasetVersion to access + * @param limit for pagination, can be null + * @param offset for pagination, can be null + * @param orderCriteria a FileMetadatasOrderCriteria to order the results + * @return a FileMetadata list of the specified DatasetVersion + */ + public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileMetadatasOrderCriteria orderCriteria) { + TypedQuery query = em.createQuery(getQueryStringFromFileMetadatasOrderCriteria(orderCriteria), FileMetadata.class) + .setParameter("datasetVersionId", datasetVersion.getId()); + if (limit != null) { + query.setMaxResults(limit); + } + if (offset != null) { + query.setFirstResult(offset); + } + return query.getResultList(); + } + + private String getQueryStringFromFileMetadatasOrderCriteria(FileMetadatasOrderCriteria orderCriteria) { + String queryString; + switch (orderCriteria) { + case NameZA: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL + " DESC"; + break; + case Newest: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE + " DESC"; + break; + case Oldest: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE; + break; + case Size: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE; + break; + case Type: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE; + break; + default: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL; + break; + } + return queryString; + } } // end class diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index e2b07717358..6bf68f908ed 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.impl.CreateGuestbookResponseCommand; import edu.harvard.iq.dataverse.engine.command.impl.RequestAccessCommand; @@ -571,5 +572,15 @@ public String getDirectStorageLocatrion(String storageLocation) { return null; } - + + /** + * Checks if the DataverseRequest, which contains IP Groups, has permission to download the file + * + * @param dataverseRequest the DataverseRequest + * @param dataFile the DataFile to check permissions + * @return boolean + */ + public boolean canDownloadFile(DataverseRequest dataverseRequest, DataFile dataFile) { + return permissionService.requestOn(dataverseRequest, dataFile).has(Permission.DownloadFile); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 0341f8c1127..ccdec19456c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -1945,5 +1945,22 @@ private URI handleCustomZipDownload(User user, String customZipServiceUrl, Strin throw new BadRequestException(); } return redirectUri; - } + } + + @GET + @AuthRequired + @Path("/datafile/{id}/userPermissions") + public Response getUserPermissionsOnFile(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + DataFile dataFile; + try { + dataFile = findDataFileOrDie(dataFileId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + User requestUser = getRequestUser(crc); + jsonObjectBuilder.add("canDownloadFile", fileDownloadService.canDownloadFile(createDataverseRequest(requestUser), dataFile)); + jsonObjectBuilder.add("canEditOwnerDataset", permissionService.userOn(requestUser, dataFile.getOwner()).has(Permission.EditDataset)); + return ok(jsonObjectBuilder); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index dbea63cb1c8..98bc42f75b0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -488,9 +488,17 @@ public Response getVersion(@Context ContainerRequestContext crc, @PathParam("id" @GET @AuthRequired @Path("{id}/versions/{versionId}/files") - public Response getVersionFiles(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> ok( jsonFileMetadatas( - getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getFileMetadatas())), getRequestUser(crc)); + public Response getVersionFiles(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset, @QueryParam("orderCriteria") String orderCriteria, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response( req -> { + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); + DatasetVersionServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; + try { + fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameAZ; + } catch (IllegalArgumentException e) { + return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); + } + return ok(jsonFileMetadatas(datasetversionService.getFileMetadatas(datasetVersion, limit, offset, fileMetadatasOrderCriteria))); + }, getRequestUser(crc)); } @GET diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 3324523afbc..fec60f10f3f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -3,6 +3,7 @@ import com.google.gson.Gson; import com.google.gson.JsonObject; import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DataFileServiceBean; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.DatasetServiceBean; @@ -11,10 +12,13 @@ import edu.harvard.iq.dataverse.DataverseRequestServiceBean; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.EjbDataverseEngine; +import edu.harvard.iq.dataverse.FileDownloadServiceBean; import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; import edu.harvard.iq.dataverse.TermsOfUseAndAccessValidator; import edu.harvard.iq.dataverse.UserNotificationServiceBean; import edu.harvard.iq.dataverse.api.auth.AuthRequired; +import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; @@ -73,7 +77,11 @@ import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Response; + +import static edu.harvard.iq.dataverse.util.json.JsonPrinter.jsonDT; import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; +import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; + import jakarta.ws.rs.core.UriInfo; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataContentDisposition; @@ -102,7 +110,9 @@ public class Files extends AbstractApiBean { SettingsServiceBean settingsService; @Inject MakeDataCountLoggingServiceBean mdcLogService; - + @Inject + GuestbookResponseServiceBean guestbookResponseService; + private static final Logger logger = Logger.getLogger(Files.class.getName()); @@ -818,4 +828,37 @@ public Response getExternalToolFMParams(@Context ContainerRequestContext crc, @P public Response getFixityAlgorithm() { return ok(systemConfig.getFileFixityChecksumAlgorithm().toString()); } + + @GET + @AuthRequired + @Path("{id}/downloadCount") + public Response getFileDownloadCount(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + return response(req -> { + DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); + return ok(guestbookResponseService.getCountGuestbookResponsesByDataFileId(dataFile.getId()).toString()); + }, getRequestUser(crc)); + } + + @GET + @AuthRequired + @Path("{id}/dataTables") + public Response getFileDataTables(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + DataFile dataFile; + try { + dataFile = findDataFileOrDie(dataFileId); + } catch (WrappedResponse e) { + return error(Response.Status.NOT_FOUND, "File not found for given id."); + } + if (dataFile.isRestricted() || FileUtil.isActivelyEmbargoed(dataFile)) { + DataverseRequest dataverseRequest = createDataverseRequest(getRequestUser(crc)); + boolean hasPermissionToDownloadFile = permissionSvc.requestOn(dataverseRequest, dataFile).has(Permission.DownloadFile); + if (!hasPermissionToDownloadFile) { + return error(FORBIDDEN, "Insufficient permissions to access the requested information."); + } + } + if (!dataFile.isTabularData()) { + return error(BAD_REQUEST, "This operation is only available for tabular files."); + } + return ok(jsonDT(dataFile.getDataTables())); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index b6026998bb7..36085f7ead7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -689,7 +689,8 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo .add("md5", getMd5IfItExists(df.getChecksumType(), df.getChecksumValue())) .add("checksum", getChecksumTypeAndValue(df.getChecksumType(), df.getChecksumValue())) .add("tabularTags", getTabularFileTags(df)) - .add("creationDate", df.getCreateDateFormattedYYYYMMDD()); + .add("creationDate", df.getCreateDateFormattedYYYYMMDD()) + .add("publicationDate", df.getPublicationDateFormattedYYYYMMDD()); /* * The restricted state was not included prior to #9175 so to avoid backward * incompatability, it is now only added when generating json for the @@ -756,7 +757,7 @@ public static JsonObjectBuilder json(DataVariable dv) { .add("variableMetadata",jsonVarMetadata(dv.getVariableMetadatas())) .add("invalidRanges", dv.getInvalidRanges().isEmpty() ? null : JsonPrinter.jsonInvalidRanges(dv.getInvalidRanges())) .add("summaryStatistics", dv.getSummaryStatistics().isEmpty() ? null : JsonPrinter.jsonSumStat(dv.getSummaryStatistics())) - .add("variableCategories", dv.getCategories().isEmpty() ? null : JsonPrinter.jsonCatStat(dv.getCategories())) + .add("variableCategories", dv.getCategories().isEmpty() ? null : JsonPrinter.jsonCatStat(dv.getCategories())) ; } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index 606e8fa120e..76012882ef5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -6,7 +6,6 @@ package edu.harvard.iq.dataverse.api; import io.restassured.RestAssured; -import static io.restassured.RestAssured.given; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; import edu.harvard.iq.dataverse.DataFile; @@ -14,7 +13,7 @@ import java.io.IOException; import java.util.zip.ZipInputStream; -import org.hamcrest.MatcherAssert; +import jakarta.json.Json; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -22,9 +21,10 @@ import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.util.HashMap; -import static jakarta.ws.rs.core.Response.Status.OK; + import org.hamcrest.collection.IsMapContaining; +import static jakarta.ws.rs.core.Response.Status.*; import static org.hamcrest.MatcherAssert.*; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -629,7 +629,39 @@ public void testZipUploadAndDownload() throws IOException { System.out.println("MD5 checksums of the unzipped file streams are correct."); System.out.println("Zip upload-and-download round trip test: success!"); - } + @Test + public void testGetUserPermissionsOnFile() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload test file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Assert user permissions on file + int testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); + Response getUserPermissionsOnFileResponse = UtilIT.getUserPermissionsOnFile(Integer.toString(testFileId), apiToken); + + getUserPermissionsOnFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + boolean canDownloadFile = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canDownloadFile"); + assertTrue(canDownloadFile); + boolean canEditOwnerDataset = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canEditOwnerDataset"); + assertTrue(canEditOwnerDataset); + + // Call with invalid file id + Response getUserPermissionsOnFileInvalidIdResponse = UtilIT.getUserPermissionsOnFile("testInvalidId", apiToken); + getUserPermissionsOnFileInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index b353b4488d0..3b6d4d1ecdf 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.DatasetVersionServiceBean; import io.restassured.RestAssured; import static io.restassured.RestAssured.given; @@ -3262,4 +3263,131 @@ public void getDatasetVersionCitation() { // We check that the returned message contains information expected for the citation string .body("data.message", containsString("DRAFT VERSION")); } + + @Test + public void getVersionFiles() throws IOException { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + String testFileName1 = "test_1.txt"; + String testFileName2 = "test_2.txt"; + String testFileName3 = "test_3.txt"; + String testFileName4 = "test_4.txt"; + String testFileName5 = "test_5.png"; + + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName1, new byte[50], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName2, new byte[200], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName3, new byte[100], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName5, new byte[300], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName4, new byte[400], apiToken); + + String testDatasetVersion = ":latest"; + + // Test pagination and NameAZ order criteria (the default criteria) + int testPageSize = 2; + + // Test page 1 + Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, apiToken); + + int fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + assertEquals(testPageSize, fileMetadatasCount); + + getVersionFilesResponsePaginated.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)) + .body("data[1].label", equalTo(testFileName2)); + + // Test page 2 + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, apiToken); + + fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + assertEquals(testPageSize, fileMetadatasCount); + + getVersionFilesResponsePaginated.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName3)) + .body("data[1].label", equalTo(testFileName4)); + + // Test page 3 (last) + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, apiToken); + + fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + getVersionFilesResponsePaginated.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName5)); + + // Test NameZA order criteria + Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), apiToken); + + getVersionFilesResponseNameZACriteria.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName5)) + .body("data[1].label", equalTo(testFileName4)) + .body("data[2].label", equalTo(testFileName3)) + .body("data[3].label", equalTo(testFileName2)) + .body("data[4].label", equalTo(testFileName1)); + + // Test Newest order criteria + Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Newest.toString(), apiToken); + + getVersionFilesResponseNewestCriteria.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName4)) + .body("data[1].label", equalTo(testFileName5)) + .body("data[2].label", equalTo(testFileName3)) + .body("data[3].label", equalTo(testFileName2)) + .body("data[4].label", equalTo(testFileName1)); + + // Test Oldest order criteria + Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), apiToken); + + getVersionFilesResponseOldestCriteria.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)) + .body("data[1].label", equalTo(testFileName2)) + .body("data[2].label", equalTo(testFileName3)) + .body("data[3].label", equalTo(testFileName5)) + .body("data[4].label", equalTo(testFileName4)); + + // Test Size order criteria + Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Size.toString(), apiToken); + + getVersionFilesResponseSizeCriteria.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)) + .body("data[1].label", equalTo(testFileName3)) + .body("data[2].label", equalTo(testFileName2)) + .body("data[3].label", equalTo(testFileName5)) + .body("data[4].label", equalTo(testFileName4)); + + // Test Type order criteria + Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Type.toString(), apiToken); + + getVersionFilesResponseTypeCriteria.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName5)) + .body("data[1].label", equalTo(testFileName1)) + .body("data[2].label", equalTo(testFileName2)) + .body("data[3].label", equalTo(testFileName3)) + .body("data[4].label", equalTo(testFileName4)); + + // Test invalid order criteria + String invalidOrderCriteria = "invalidOrderCriteria"; + Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, invalidOrderCriteria, apiToken); + getVersionFilesResponseInvalidOrderCriteria.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Invalid order criteria: " + invalidOrderCriteria)); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index d0f20a8642b..0a16bca7008 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2114,4 +2114,101 @@ public void testFilePIDsBehavior() { UtilIT.deleteSetting(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection); } } + + @Test + public void testGetFileDownloadCount() throws InterruptedException { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload test file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Publish collection and dataset + UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); + UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + + // Download test file + int testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); + + Response downloadResponse = UtilIT.downloadFile(testFileId, apiToken); + downloadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Ensure download count is updated + sleep(2000); + + // Get download count and assert it is 1 + Response getFileDownloadCountResponse = UtilIT.getFileDownloadCount(Integer.toString(testFileId), apiToken); + getFileDownloadCountResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("1")); + + // Call with invalid file id + Response getFileDownloadCountInvalidIdResponse = UtilIT.getFileDownloadCount("testInvalidId", apiToken); + getFileDownloadCountInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + } + + @Test + public void testGetFileDataTables() throws InterruptedException { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload non-tabular file + String pathToNonTabularTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadNonTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToNonTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadNonTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Assert that getting data tables for non-tabular file fails + int testNonTabularFileId = JsonPath.from(uploadNonTabularFileResponse.body().asString()).getInt("data.files[0].dataFile.id"); + Response getFileDataTablesForNonTabularFileResponse = UtilIT.getFileDataTables(Integer.toString(testNonTabularFileId), apiToken); + getFileDataTablesForNonTabularFileResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + + // Upload tabular file + String pathToTabularTestFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Ensure tabular file is ingested + sleep(2000); + + String testTabularFileId = Integer.toString(JsonPath.from(uploadTabularFileResponse.body().asString()).getInt("data.files[0].dataFile.id")); + + // Get file data tables for the tabular file and assert data is obtained + Response getFileDataTablesForTabularFileResponse = UtilIT.getFileDataTables(testTabularFileId, apiToken); + getFileDataTablesForTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + int dataTablesNumber = JsonPath.from(getFileDataTablesForTabularFileResponse.body().asString()).getList("data").size(); + assertTrue(dataTablesNumber > 0); + + // Get file data tables for a restricted tabular file as the owner and assert data is obtained + Response restrictFileResponse = UtilIT.restrictFile(testTabularFileId, true, apiToken); + restrictFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + getFileDataTablesForTabularFileResponse = UtilIT.getFileDataTables(testTabularFileId, apiToken); + getFileDataTablesForTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Get file data tables for a restricted tabular file as other user and assert forbidden error is thrown + Response createRandomUser = UtilIT.createRandomUser(); + createRandomUser.then().assertThat().statusCode(OK.getStatusCode()); + String randomUserApiToken = UtilIT.getApiTokenFromResponse(createRandomUser); + getFileDataTablesForTabularFileResponse = UtilIT.getFileDataTables(testTabularFileId, randomUserApiToken); + getFileDataTablesForTabularFileResponse.then().assertThat().statusCode(FORBIDDEN.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index e47971f9b92..f61b392c898 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3,6 +3,8 @@ import io.restassured.http.ContentType; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; + +import java.io.*; import java.util.UUID; import java.util.logging.Logger; import jakarta.json.Json; @@ -10,8 +12,6 @@ import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObject; -import java.io.File; -import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; @@ -28,7 +28,6 @@ import com.mashape.unirest.http.Unirest; import com.mashape.unirest.http.exceptions.UnirestException; import com.mashape.unirest.request.GetRequest; -import java.io.InputStream; import edu.harvard.iq.dataverse.util.FileUtil; import java.util.Base64; import org.apache.commons.io.IOUtils; @@ -39,7 +38,6 @@ import org.hamcrest.Description; import org.hamcrest.Matcher; -import static io.restassured.RestAssured.put; import static io.restassured.path.xml.XmlPath.from; import static io.restassured.RestAssured.given; import edu.harvard.iq.dataverse.DatasetField; @@ -47,11 +45,9 @@ import edu.harvard.iq.dataverse.DatasetFieldValue; import edu.harvard.iq.dataverse.util.StringUtil; -import java.io.StringReader; import java.util.Collections; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.*; -import static org.junit.jupiter.api.Assertions.*; public class UtilIT { @@ -66,7 +62,7 @@ public class UtilIT { public static final int MAXIMUM_INGEST_LOCK_DURATION = 15; public static final int MAXIMUM_PUBLISH_LOCK_DURATION = 15; public static final int MAXIMUM_IMPORT_DURATION = 1; - + private static SwordConfigurationImpl swordConfiguration = new SwordConfigurationImpl(); static Matcher equalToCI( String value ) { @@ -2607,7 +2603,7 @@ static Boolean sleepForDeadlock(int duration) { } while (true); return i <= duration; } - + //Helper function that returns true if a given search returns a non-zero response within a fixed time limit // a given duration returns false if still zero results after given duration static Boolean sleepForSearch(String searchPart, String apiToken, String subTree, int duration) { @@ -3279,4 +3275,51 @@ static Response getDatasetVersionCitation(Integer datasetId, String version, Str .get("/api/datasets/" + datasetId + "/versions/" + version + "/citation"); return response; } + + static Response getVersionFiles(Integer datasetId, String version, Integer limit, Integer offset, String orderCriteria, String apiToken) { + RequestSpecification requestSpecification = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .contentType("application/json"); + if (limit != null) { + requestSpecification = requestSpecification.queryParam("limit", limit); + } + if (offset != null) { + requestSpecification = requestSpecification.queryParam("offset", offset); + } + if (orderCriteria != null) { + requestSpecification = requestSpecification.queryParam("orderCriteria", orderCriteria); + } + return requestSpecification.get("/api/datasets/" + datasetId + "/versions/" + version + "/files"); + } + + static Response createAndUploadTestFile(String persistentId, String testFileName, byte[] testFileContentInBytes, String apiToken) throws IOException { + Path pathToTempDir = Paths.get(Files.createTempDirectory(null).toString()); + String pathToTestFile = pathToTempDir + File.separator + testFileName; + File testFile = new File(pathToTestFile); + FileOutputStream fileOutputStream = new FileOutputStream(testFile); + + fileOutputStream.write(testFileContentInBytes); + fileOutputStream.flush(); + fileOutputStream.close(); + + return uploadZipFileViaSword(persistentId, pathToTestFile, apiToken); + } + + static Response getFileDownloadCount(String dataFileId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/files/" + dataFileId + "/downloadCount"); + } + + static Response getFileDataTables(String dataFileId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/files/" + dataFileId + "/dataTables"); + } + + static Response getUserPermissionsOnFile(String dataFileId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/access/datafile/" + dataFileId + "/userPermissions"); + } } diff --git a/src/test/resources/tab/test.tab b/src/test/resources/tab/test.tab new file mode 100644 index 00000000000..d750d42d995 --- /dev/null +++ b/src/test/resources/tab/test.tab @@ -0,0 +1,11 @@ +position name age +1 "Belle" 36 +2 "Lola" 37 +3 "Jayden" 45 +4 "Margaret" 37 +5 "Russell" 40 +6 "Bertie" 60 +7 "Maud" 34 +8 "Mabel" 31 +9 "Trevor" 51 +10 "Duane" 26