diff --git a/doc/release-notes/9686-move-harvesting-client-id.md b/doc/release-notes/9686-move-harvesting-client-id.md deleted file mode 100644 index 110fcc6ca6e..00000000000 --- a/doc/release-notes/9686-move-harvesting-client-id.md +++ /dev/null @@ -1 +0,0 @@ -With this release the harvesting client id will be available for harvested files. A database update will copy the id to previously harvested files./ diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index e2788e6acc6..a2f560bc959 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -752,9 +752,21 @@ public void setDatasetExternalCitations(List datasetEx this.datasetExternalCitations = datasetExternalCitations; } + @ManyToOne + @JoinColumn(name="harvestingClient_id") + private HarvestingClient harvestedFrom; - + public HarvestingClient getHarvestedFrom() { + return this.harvestedFrom; + } + public void setHarvestedFrom(HarvestingClient harvestingClientConfig) { + this.harvestedFrom = harvestingClientConfig; + } + + public boolean isHarvested() { + return this.harvestedFrom != null; + } private String harvestIdentifier; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 4c4aafdd1ec..c6df2a2e1ab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -583,6 +583,54 @@ public Long getDatasetVersionCardImage(Long versionId, User user) { return null; } + /** + * Used to identify and properly display Harvested objects on the dataverse page. + * + * @param datasetIds + * @return + */ + public Map getArchiveDescriptionsForHarvestedDatasets(Set datasetIds){ + if (datasetIds == null || datasetIds.size() < 1) { + return null; + } + + String datasetIdStr = StringUtils.join(datasetIds, ", "); + + String qstr = "SELECT d.id, h.archiveDescription FROM harvestingClient h, dataset d WHERE d.harvestingClient_id = h.id AND d.id IN (" + datasetIdStr + ")"; + List searchResults; + + try { + searchResults = em.createNativeQuery(qstr).getResultList(); + } catch (Exception ex) { + searchResults = null; + } + + if (searchResults == null) { + return null; + } + + Map ret = new HashMap<>(); + + for (Object[] result : searchResults) { + Long dsId; + if (result[0] != null) { + try { + dsId = (Long)result[0]; + } catch (Exception ex) { + dsId = null; + } + if (dsId == null) { + continue; + } + + ret.put(dsId, (String)result[1]); + } + } + + return ret; + } + + public boolean isDatasetCardImageAvailable(DatasetVersion datasetVersion, User user) { if (datasetVersion == null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index 46955f52878..cc5d7620969 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -1,7 +1,6 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.storageuse.StorageQuota; @@ -372,22 +371,6 @@ public GlobalId getGlobalId() { return globalId; } - @ManyToOne - @JoinColumn(name="harvestingClient_id") - private HarvestingClient harvestedFrom; - - public HarvestingClient getHarvestedFrom() { - return this.harvestedFrom; - } - - public void setHarvestedFrom(HarvestingClient harvestingClientConfig) { - this.harvestedFrom = harvestingClientConfig; - } - - public boolean isHarvested() { - return this.harvestedFrom != null; - } - public abstract T accept(Visitor v); @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java index 58a246b364a..d4219c36149 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java @@ -383,54 +383,6 @@ public Map getObjectPathsByIds(Set objectIds){ return ret; } - /** - * Used to identify and properly display Harvested objects on the dataverse page. - * - * @param dvObjectIds - * @return - */ - public Map getArchiveDescriptionsForHarvestedDvObjects(Set dvObjectIds){ - - if (dvObjectIds == null || dvObjectIds.size() < 1) { - return null; - } - - String dvObjectIsString = StringUtils.join(dvObjectIds, ", "); - String qstr = "SELECT d.id, h.archiveDescription FROM harvestingClient h, DvObject d WHERE d.harvestingClient_id = h.id AND d.id IN (" + dvObjectIsString + ")"; - List searchResults; - - try { - searchResults = em.createNativeQuery(qstr).getResultList(); - } catch (Exception ex) { - searchResults = null; - } - - if (searchResults == null) { - return null; - } - - Map ret = new HashMap<>(); - - for (Object[] result : searchResults) { - Long dvObjId; - if (result[0] != null) { - try { - Integer castResult = (Integer) result[0]; - dvObjId = Long.valueOf(castResult); - } catch (Exception ex) { - dvObjId = null; - } - if (dvObjId == null) { - continue; - } - ret.put(dvObjId, (String)result[1]); - } - } - - return ret; - } - - public String generateNewIdentifierByStoredProcedure() { StoredProcedureQuery query = this.em.createNamedStoredProcedureQuery("Dataset.generateIdentifierFromStoredProcedure"); query.execute(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index c5812403f31..c17ba909230 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -332,11 +332,6 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId().asString()); - //adding the harvesting client id to harvested files #9686 - for (DataFile df : ds.getFiles()){ - df.setHarvestedFrom(harvestingClient); - } - if (existingDs != null) { // If this dataset already exists IN ANOTHER DATAVERSE // we are just going to skip it! diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java index 5747c64d217..7ec6d75a41c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java @@ -199,8 +199,8 @@ public void recordHarvestJobStatus(Long hcId, Date finishTime, int harvestedCoun public Long getNumberOfHarvestedDatasetsByAllClients() { try { - return (Long) em.createNativeQuery("SELECT count(d.id) FROM dvobject d " - + " WHERE d.harvestingclient_id IS NOT NULL and d.dtype = 'Dataset'").getSingleResult(); + return (Long) em.createNativeQuery("SELECT count(d.id) FROM dataset d " + + " WHERE d.harvestingclient_id IS NOT NULL").getSingleResult(); } catch (Exception ex) { logger.info("Warning: exception looking up the total number of harvested datasets: " + ex.getMessage()); diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java index 9ae0c7cbb8f..1b5619c53e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java @@ -138,8 +138,8 @@ public JsonArray getDatasetsTimeSeries(UriInfo uriInfo, String dataLocation, Dat + "from datasetversion\n" + "where versionstate='RELEASED' \n" + (((d == null)&&(DATA_LOCATION_ALL.equals(dataLocation))) ? "" : "and dataset_id in (select dataset.id from dataset, dvobject where dataset.id=dvobject.id\n") - + ((DATA_LOCATION_LOCAL.equals(dataLocation)) ? "and dvobject.harvestingclient_id IS NULL and publicationdate is not null\n " : "") - + ((DATA_LOCATION_REMOTE.equals(dataLocation)) ? "and dvobject.harvestingclient_id IS NOT NULL\n " : "") + + ((DATA_LOCATION_LOCAL.equals(dataLocation)) ? "and dataset.harvestingclient_id IS NULL and publicationdate is not null\n " : "") + + ((DATA_LOCATION_REMOTE.equals(dataLocation)) ? "and dataset.harvestingclient_id IS NOT NULL\n " : "") + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n ") + (((d == null)&&(DATA_LOCATION_ALL.equals(dataLocation))) ? "" : ")\n") + "group by dataset_id) as subq group by subq.date order by date;" @@ -156,11 +156,11 @@ public JsonArray getDatasetsTimeSeries(UriInfo uriInfo, String dataLocation, Dat * @param d */ public long datasetsToMonth(String yyyymm, String dataLocation, Dataverse d) { - String dataLocationLine = "(date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM') and dvobject.harvestingclient_id IS NULL)\n"; + String dataLocationLine = "(date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM') and dataset.harvestingclient_id IS NULL)\n"; if (!DATA_LOCATION_LOCAL.equals(dataLocation)) { // Default api state is DATA_LOCATION_LOCAL //we have to use createtime for harvest as post dvn3 harvests do not have releasetime populated - String harvestBaseLine = "(date_trunc('month', createtime) <= to_date('" + yyyymm + "','YYYY-MM') and dvobject.harvestingclient_id IS NOT NULL)\n"; + String harvestBaseLine = "(date_trunc('month', createtime) <= to_date('" + yyyymm + "','YYYY-MM') and dataset.harvestingclient_id IS NOT NULL)\n"; if (DATA_LOCATION_REMOTE.equals(dataLocation)) { dataLocationLine = harvestBaseLine; // replace } else if (DATA_LOCATION_ALL.equals(dataLocation)) { @@ -189,7 +189,7 @@ public long datasetsToMonth(String yyyymm, String dataLocation, Dataverse d) { + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber))\n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" - + "join dvobject on dvobject.id = dataset.id\n" + + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") + "where versionstate='RELEASED' \n" + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n ") + "and \n" @@ -198,6 +198,7 @@ public long datasetsToMonth(String yyyymm, String dataLocation, Dataverse d) { +") sub_temp" ); logger.log(Level.FINE, "Metric query: {0}", query); + return (long) query.getSingleResult(); } @@ -206,17 +207,16 @@ public List datasetsBySubjectToMonth(String yyyymm, String dataLocatio // A published local datasets may have more than one released version! // So that's why we have to jump through some extra hoops below // in order to select the latest one: - String originClause = "(datasetversion.dataset_id || ':' || datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber) in\n" - + "(\n" - + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber))\n" - + " from datasetversion\n" - + " join dataset on dataset.id = datasetversion.dataset_id\n" - + " join dvobject on dataset.id = dvobject.id\n" - + " where versionstate='RELEASED'\n" - + " and dvobject.harvestingclient_id is null" - + " and date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" - + " group by dataset_id\n" - + "))\n"; + String originClause = "(datasetversion.dataset_id || ':' || datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber) in\n" + + "(\n" + + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber))\n" + + " from datasetversion\n" + + " join dataset on dataset.id = datasetversion.dataset_id\n" + + " where versionstate='RELEASED'\n" + + " and dataset.harvestingclient_id is null\n" + + " and date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" + + " group by dataset_id\n" + + "))\n"; if (!DATA_LOCATION_LOCAL.equals(dataLocation)) { // Default api state is DATA_LOCATION_LOCAL //we have to use createtime for harvest as post dvn3 harvests do not have releasetime populated @@ -225,7 +225,7 @@ public List datasetsBySubjectToMonth(String yyyymm, String dataLocatio // so the query is simpler: String harvestOriginClause = "(\n" + " datasetversion.dataset_id = dataset.id\n" + - " AND dvobject.harvestingclient_id IS NOT null \n" + + " AND dataset.harvestingclient_id IS NOT null \n" + " AND date_trunc('month', datasetversion.createtime) <= to_date('" + yyyymm + "','YYYY-MM')\n" + ")\n"; @@ -244,7 +244,7 @@ public List datasetsBySubjectToMonth(String yyyymm, String dataLocatio + "JOIN datasetfieldtype ON datasetfieldtype.id = controlledvocabularyvalue.datasetfieldtype_id\n" + "JOIN datasetversion ON datasetversion.id = datasetfield.datasetversion_id\n" + "JOIN dataset ON dataset.id = datasetversion.dataset_id\n" - + "JOIN dvobject ON dvobject.id = dataset.id\n" + + ((d == null) ? "" : "JOIN dvobject ON dvobject.id = dataset.id\n") + "WHERE\n" + originClause + "AND datasetfieldtype.name = 'subject'\n" @@ -258,11 +258,11 @@ public List datasetsBySubjectToMonth(String yyyymm, String dataLocatio } public long datasetsPastDays(int days, String dataLocation, Dataverse d) { - String dataLocationLine = "(releasetime > current_date - interval '" + days + "' day and dvobject.harvestingclient_id IS NULL)\n"; + String dataLocationLine = "(releasetime > current_date - interval '" + days + "' day and dataset.harvestingclient_id IS NULL)\n"; if (!DATA_LOCATION_LOCAL.equals(dataLocation)) { // Default api state is DATA_LOCATION_LOCAL //we have to use createtime for harvest as post dvn3 harvests do not have releasetime populated - String harvestBaseLine = "(createtime > current_date - interval '" + days + "' day and dvobject.harvestingclient_id IS NOT NULL)\n"; + String harvestBaseLine = "(createtime > current_date - interval '" + days + "' day and dataset.harvestingclient_id IS NOT NULL)\n"; if (DATA_LOCATION_REMOTE.equals(dataLocation)) { dataLocationLine = harvestBaseLine; // replace } else if (DATA_LOCATION_ALL.equals(dataLocation)) { @@ -276,7 +276,7 @@ public long datasetsPastDays(int days, String dataLocation, Dataverse d) { + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) as max\n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" - + "join dvobject on dvobject.id = dataset.id\n" + + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") + "where versionstate='RELEASED' \n" + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n") + "and \n" @@ -304,7 +304,7 @@ public JsonArray filesTimeSeries(Dataverse d) { + "where datasetversion.id=filemetadata.datasetversion_id\n" + "and versionstate='RELEASED' \n" + "and dataset_id in (select dataset.id from dataset, dvobject where dataset.id=dvobject.id\n" - + "and dvobject.harvestingclient_id IS NULL and publicationdate is not null\n " + + "and dataset.harvestingclient_id IS NULL and publicationdate is not null\n " + ((d == null) ? ")" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + "))\n ") + "group by filemetadata.id) as subq group by subq.date order by date;"); logger.log(Level.FINE, "Metric query: {0}", query); @@ -327,11 +327,11 @@ public long filesToMonth(String yyyymm, Dataverse d) { + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) as max \n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" - + "join dvobject on dvobject.id = dataset.id\n" + + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") + "where versionstate='RELEASED'\n" + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n") + "and date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" - + "and dvobject.harvestingclient_id is null\n" + + "and dataset.harvestingclient_id is null\n" + "group by dataset_id \n" + ");" ); @@ -350,11 +350,11 @@ public long filesPastDays(int days, Dataverse d) { + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) as max \n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" - + "join dvobject on dvobject.id = dataset.id\n" + + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") + "where versionstate='RELEASED'\n" + "and releasetime > current_date - interval '" + days + "' day\n" + ((d == null) ? "" : "AND dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n") - + "and dvobject.harvestingclient_id is null\n" + + "and dataset.harvestingclient_id is null\n" + "group by dataset_id \n" + ");" ); diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 939b39b94ef..5a5d8781726 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -1367,7 +1367,6 @@ public boolean canPublishDataset(Long datasetId){ public void setDisplayCardValues() { Set harvestedDatasetIds = null; - Set harvestedFileIds = null; for (SolrSearchResult result : searchResultsList) { //logger.info("checking DisplayImage for the search result " + i++); if (result.getType().equals("dataverses")) { @@ -1393,10 +1392,10 @@ public void setDisplayCardValues() { } else if (result.getType().equals("files")) { result.setImageUrl(thumbnailServiceWrapper.getFileCardImageAsBase64Url(result)); if (result.isHarvested()) { - if (harvestedFileIds == null) { - harvestedFileIds = new HashSet<>(); + if (harvestedDatasetIds == null) { + harvestedDatasetIds = new HashSet<>(); } - harvestedFileIds.add(result.getEntityId()); + harvestedDatasetIds.add(result.getParentIdAsLong()); } } } @@ -1408,35 +1407,25 @@ public void setDisplayCardValues() { // SQL query: if (harvestedDatasetIds != null) { - Map descriptionsForHarvestedDatasets = dvObjectService.getArchiveDescriptionsForHarvestedDvObjects(harvestedDatasetIds); - if (descriptionsForHarvestedDatasets != null && !descriptionsForHarvestedDatasets.isEmpty()) { + Map descriptionsForHarvestedDatasets = datasetService.getArchiveDescriptionsForHarvestedDatasets(harvestedDatasetIds); + if (descriptionsForHarvestedDatasets != null && descriptionsForHarvestedDatasets.size() > 0) { for (SolrSearchResult result : searchResultsList) { - if (result.isHarvested() && result.getType().equals("datasets") && descriptionsForHarvestedDatasets.containsKey(result.getEntityId())) { - result.setHarvestingDescription(descriptionsForHarvestedDatasets.get(result.getEntityId())); + if (result.isHarvested()) { + if (result.getType().equals("files")) { + if (descriptionsForHarvestedDatasets.containsKey(result.getParentIdAsLong())) { + result.setHarvestingDescription(descriptionsForHarvestedDatasets.get(result.getParentIdAsLong())); + } + } else if (result.getType().equals("datasets")) { + if (descriptionsForHarvestedDatasets.containsKey(result.getEntityId())) { + result.setHarvestingDescription(descriptionsForHarvestedDatasets.get(result.getEntityId())); + } + } } } } descriptionsForHarvestedDatasets = null; harvestedDatasetIds = null; } - - if (harvestedFileIds != null) { - - Map descriptionsForHarvestedFiles = dvObjectService.getArchiveDescriptionsForHarvestedDvObjects(harvestedFileIds); - if (descriptionsForHarvestedFiles != null && !descriptionsForHarvestedFiles.isEmpty()) { - for (SolrSearchResult result : searchResultsList) { - if (result.isHarvested() && result.getType().equals("files") && descriptionsForHarvestedFiles.containsKey(result.getEntityId())) { - - result.setHarvestingDescription(descriptionsForHarvestedFiles.get(result.getEntityId())); - - } - } - } - descriptionsForHarvestedFiles = null; - harvestedDatasetIds = null; - - } - // determine which of the objects are linked: diff --git a/src/main/resources/db/migration/V6.1.0.2__9686-move-harvestingclient-id.sql b/src/main/resources/db/migration/V6.1.0.2__9686-move-harvestingclient-id.sql deleted file mode 100644 index 67ba026745f..00000000000 --- a/src/main/resources/db/migration/V6.1.0.2__9686-move-harvestingclient-id.sql +++ /dev/null @@ -1,14 +0,0 @@ -ALTER TABLE dvobject ADD COLUMN IF NOT EXISTS harvestingclient_id BIGINT; - ---add harvesting client id to dvobject records of harvested datasets -update dvobject dvo set harvestingclient_id = s.harvestingclient_id from -(select id, harvestingclient_id from dataset d where d.harvestingclient_id is not null) s -where s.id = dvo.id; - ---add harvesting client id to dvobject records of harvested files -update dvobject dvo set harvestingclient_id = s.harvestingclient_id from -(select id, harvestingclient_id from dataset d where d.harvestingclient_id is not null) s -where s.id = dvo.owner_id; - -ALTER TABLE dataset drop COLUMN IF EXISTS harvestingclient_id; - diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 087db4858b2..9b51be4b365 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -2548,8 +2548,6 @@ public void testLinkingDatasets() { EntityManager entityManager = entityManagerFactory.createEntityManager(); entityManager.getTransaction().begin(); // Do stuff... - //SEK 01/22/2024 - as of 6.2 harvestingclient_id will be on the dv object table - // so if this is ever implemented change will probably need to happen in the updatequery below entityManager.createNativeQuery("UPDATE dataset SET harvestingclient_id=1 WHERE id="+datasetId2).executeUpdate(); entityManager.getTransaction().commit(); entityManager.close(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java index 1425b7bc5d9..e3328eefb4a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java @@ -5,8 +5,6 @@ import edu.harvard.iq.dataverse.metrics.MetricsUtil; import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; import static jakarta.ws.rs.core.Response.Status.OK; -import java.time.LocalDate; -import java.time.format.DateTimeFormatter; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -18,13 +16,10 @@ //To improve these tests we should try adding data and see if the number DOESN'T //go up to show that the caching worked public class MetricsIT { - - private static String yyyymm; @BeforeAll public static void setUpClass() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); - yyyymm = LocalDate.now().format(DateTimeFormatter.ofPattern(MetricsUtil.YEAR_AND_MONTH_PATTERN)); UtilIT.clearMetricCache(); } @@ -35,7 +30,8 @@ public static void cleanUpClass() { @Test public void testGetDataversesToMonth() { - + String yyyymm = "2018-04"; +// yyyymm = null; Response response = UtilIT.metricsDataversesToMonth(yyyymm, null); String precache = response.prettyPrint(); response.then().assertThat() @@ -58,7 +54,8 @@ public void testGetDataversesToMonth() { @Test public void testGetDatasetsToMonth() { - + String yyyymm = "2018-04"; +// yyyymm = null; Response response = UtilIT.metricsDatasetsToMonth(yyyymm, null); String precache = response.prettyPrint(); response.then().assertThat() @@ -80,7 +77,8 @@ public void testGetDatasetsToMonth() { @Test public void testGetFilesToMonth() { - + String yyyymm = "2018-04"; +// yyyymm = null; Response response = UtilIT.metricsFilesToMonth(yyyymm, null); String precache = response.prettyPrint(); response.then().assertThat() @@ -102,7 +100,8 @@ public void testGetFilesToMonth() { @Test public void testGetDownloadsToMonth() { - + String yyyymm = "2018-04"; +// yyyymm = null; Response response = UtilIT.metricsDownloadsToMonth(yyyymm, null); String precache = response.prettyPrint(); response.then().assertThat()