From c803e94a9acbdfeb4f5efeeb6cba1416d3303eb2 Mon Sep 17 00:00:00 2001 From: Niels Erik Date: Wed, 18 Sep 2024 23:27:21 +0200 Subject: [PATCH] MODHAADM-71 log pruning (#118) * MODHAADM-71 add timer process for purging logs * add Okapi timer process for deleting old jobs, record failures and logs * add APIs for importing jobs, record failures and logs, i.e. from exports or from other FOLIO instances * MODHAADM-71 add timer process for purging logs * add Okapi timer process for deleting old jobs, record failures and logs * add APIs for importing jobs, record failures and logs, i.e. from exports or from other FOLIO instances * MODHAADM-71 wip Reorganize Java classes (rename packages, classes, move classes) Update logging dependencies Add Okapi client for configuration look-up Preparations for unit tests (settable clock, APIs for populating jobs and logs with sample data) * MODHAADM-71 wip Remove test resources directory * MODHAADM-71 wip, test infrastructure Expand test harness to cover local mod-harvester-admin APIs (purge old jobs) and FOLIO APIs (/configuration/entries) along existing legacy harvester interaction tests. * MODHAADM-71 wip, unit tests * MODHAADM-71 revert renaming of test suite class After renaming the test suite from HarvesterAdminTestSuite to HarvesterAdminTestSuiteIT, the environment variables set in the configuration of the maven-surefire-plugin are no longer available in the module when running the unit tests by `mvn install` on the command line. Reverting the name to HarvesterAdminTestSuite, at least until it is clear why the env vars disappear. When running the tests in IDEA, the environment variables seems to be forwarded fine with either name for the unit test suite class. * MODHAADM-71 separate test suites for exclude/include Harvester Adding profile for running tests against installed and running Harvester, activate with `-PharvesterTests` Default is to run unit tests that doesn't require Harvester * MODHAADM-71 Tests: adding fake configurations module * MODHAADM-71 Reorganize, fix tests * MODHAADM-71 Clean up db init code, error reporting * MODHAADM-71 support purge setting in German * MODHAADM-71 Add integration test suite - requires Harvester to be running (at localhost:8080) * MODHAADM-71 Documentation. Default schedule for timer process. * MODHAADM-71 SC --- NEWS.MD | 4 + README.MD | 27 + descriptors/ModuleDescriptor-template.json | 52 +- pom.xml | 60 ++- .../folio/harvesteradmin/MainVerticle.java | 2 +- .../foliodata/ConfigurationsClient.java | 37 ++ .../folio/harvesteradmin/foliodata/Folio.java | 22 + .../JobLauncher.java | 6 +- .../LegacyHarvesterStorage.java | 77 +-- .../dataconverters/HarvesterXml2Json.java | 3 +- .../HarvesterXml2JsonFailedRecords.java | 6 +- .../dataconverters/JsonToHarvesterXml.java | 5 +- .../ProcessedHarvesterResponse.java | 2 +- .../ProcessedHarvesterResponseDelete.java | 4 +- .../ProcessedHarvesterResponseGet.java | 15 +- .../ProcessedHarvesterResponseGetById.java | 6 +- ...essedHarvesterResponseGetUniqueByName.java | 2 +- .../ProcessedHarvesterResponsePost.java | 2 +- .../ProcessedHarvesterResponsePut.java | 2 +- .../statics/ApiPaths.java | 2 +- .../statics/EntityRootNames.java | 12 +- .../statics/LegacyServiceConfig.java | 2 +- .../statics/RequestParameters.java | 2 +- .../harvesteradmin/moduledata/HarvestJob.java | 34 +- .../moduledata/JsonProperty.java | 4 +- .../harvesteradmin/moduledata/LogLine.java | 14 +- .../harvesteradmin/moduledata/PgColumn.java | 2 +- .../moduledata/RecordFailure.java | 24 +- .../moduledata/StoredEntity.java | 4 +- .../moduledata/database/DatabaseInit.java | 70 +++ .../database/ModuleStorageAccess.java} | 111 +++-- .../moduledata/{ => database}/SqlQuery.java | 2 +- .../moduledata/database/Tables.java | 9 + .../harvesteradmin/modulestorage/Schema.java | 84 ---- .../service/HarvestAdminService.java | 210 ++++++-- .../harvesteradmin/utils/Miscellaneous.java | 50 ++ .../harvesteradmin/utils/SettableClock.java | 109 ++++ .../resources/openapi/harvest-admin-1.0.yaml | 127 +++-- .../schemas/failedRecordPreviousJob.json | 2 +- .../org/folio/harvesteradmin/test/Api.java | 195 ++++---- ...ava => HarvesterIntegrationTestSuite.java} | 143 ++---- .../test/NoHarvesterTestSuite.java | 427 ++++++++++++++++ .../folio/harvesteradmin/test/Statics.java | 21 + .../fakestorage/ConfigurationStorage.java | 19 + .../test/fakestorage/FakeFolioApis.java | 114 +++++ .../test/fakestorage/FolioApiRecord.java | 123 +++++ .../test/fakestorage/RecordStorage.java | 392 +++++++++++++++ .../test/fakestorage/StorageResponse.java | 12 + test-resources/apis/harvester-admin-endpoints | 8 - test-resources/apis/inventory-endpoints | 7 - test-resources/delete-admin-record-by-id.sh | 17 - test-resources/delete-admin-records.sh | 17 - test-resources/delete-inventory-record.sh | 22 - .../demo/diku/harvestable-diku-900020.json | 41 -- .../demo/diku/holdings-diku-900011.json | 13 - .../demo/diku/locations-diku-900012.json | 12 - .../demo/diku/oai-2-inst-diku-900010.json | 12 - .../demo/diku/post-demo-config-diku.sh | 25 - .../demo/diku/storage-diku-900001.json | 18 - .../demo/diku/transformation-diku-900015.json | 23 - .../demo/diku/xml-2-json-diku-900013.json | 14 - test-resources/demo/job-800020.json | 3 - test-resources/demo/job-900020.json | 3 - .../demo/north/harvestable-north-800020.json | 41 -- .../demo/north/holdings-north-800011.json | 13 - .../demo/north/locations-north-800012.json | 12 - .../demo/north/oai-2-inst-north-800010.json | 12 - .../demo/north/post-demo-config-north.sh | 25 - .../demo/north/storage-north-800001.json | 19 - .../north/transformation-north-800015.json | 23 - .../demo/north/xml-2-json-north-800013.json | 13 - test-resources/demo/start-job.sh | 16 - test-resources/demo/stop-job.sh | 16 - test-resources/get-admin-record-by-id.sh | 23 - test-resources/get-admin-records.sh | 24 - test-resources/get-inventory-record-by-id.sh | 19 - test-resources/get-inventory-records.sh | 24 - test-resources/get-script-by-step-id.sh | 16 - test-resources/logins/diku@localhost:9130.sh | 7 - test-resources/logins/login.sh | 26 - test-resources/logins/north@localhost:9130.sh | 7 - test-resources/post-admin-record.sh | 22 - test-resources/post-inventory-record.sh | 22 - test-resources/put-admin-record.sh | 23 - test-resources/put-inventory-record.sh | 23 - test-resources/put-script-to-step.sh | 18 - test-resources/step-800010.xslt | 465 ------------------ 87 files changed, 2152 insertions(+), 1646 deletions(-) create mode 100644 src/main/java/org/folio/harvesteradmin/foliodata/ConfigurationsClient.java create mode 100644 src/main/java/org/folio/harvesteradmin/foliodata/Folio.java rename src/main/java/org/folio/harvesteradmin/{dataaccess => legacydata}/JobLauncher.java (97%) rename src/main/java/org/folio/harvesteradmin/{dataaccess => legacydata}/LegacyHarvesterStorage.java (95%) rename src/main/java/org/folio/harvesteradmin/{dataaccess => legacydata}/dataconverters/HarvesterXml2Json.java (98%) rename src/main/java/org/folio/harvesteradmin/{dataaccess => legacydata}/dataconverters/HarvesterXml2JsonFailedRecords.java (94%) rename src/main/java/org/folio/harvesteradmin/{dataaccess => legacydata}/dataconverters/JsonToHarvesterXml.java (96%) rename src/main/java/org/folio/harvesteradmin/{dataaccess => legacydata}/responsehandlers/ProcessedHarvesterResponse.java (94%) rename src/main/java/org/folio/harvesteradmin/{dataaccess => legacydata}/responsehandlers/ProcessedHarvesterResponseDelete.java (87%) rename src/main/java/org/folio/harvesteradmin/{dataaccess => legacydata}/responsehandlers/ProcessedHarvesterResponseGet.java (74%) rename src/main/java/org/folio/harvesteradmin/{dataaccess => legacydata}/responsehandlers/ProcessedHarvesterResponseGetById.java (93%) rename src/main/java/org/folio/harvesteradmin/{dataaccess => legacydata}/responsehandlers/ProcessedHarvesterResponseGetUniqueByName.java (85%) rename src/main/java/org/folio/harvesteradmin/{dataaccess => legacydata}/responsehandlers/ProcessedHarvesterResponsePost.java (97%) rename src/main/java/org/folio/harvesteradmin/{dataaccess => legacydata}/responsehandlers/ProcessedHarvesterResponsePut.java (93%) rename src/main/java/org/folio/harvesteradmin/{dataaccess => legacydata}/statics/ApiPaths.java (98%) rename src/main/java/org/folio/harvesteradmin/{dataaccess => legacydata}/statics/EntityRootNames.java (90%) rename src/main/java/org/folio/harvesteradmin/{dataaccess => legacydata}/statics/LegacyServiceConfig.java (98%) rename src/main/java/org/folio/harvesteradmin/{dataaccess => legacydata}/statics/RequestParameters.java (97%) create mode 100644 src/main/java/org/folio/harvesteradmin/moduledata/database/DatabaseInit.java rename src/main/java/org/folio/harvesteradmin/{modulestorage/Storage.java => moduledata/database/ModuleStorageAccess.java} (67%) rename src/main/java/org/folio/harvesteradmin/moduledata/{ => database}/SqlQuery.java (97%) create mode 100644 src/main/java/org/folio/harvesteradmin/moduledata/database/Tables.java delete mode 100644 src/main/java/org/folio/harvesteradmin/modulestorage/Schema.java create mode 100644 src/main/java/org/folio/harvesteradmin/utils/Miscellaneous.java create mode 100644 src/main/java/org/folio/harvesteradmin/utils/SettableClock.java rename src/test/java/org/folio/harvesteradmin/test/{HarvesterAdminTestSuite.java => HarvesterIntegrationTestSuite.java} (87%) create mode 100644 src/test/java/org/folio/harvesteradmin/test/NoHarvesterTestSuite.java create mode 100644 src/test/java/org/folio/harvesteradmin/test/Statics.java create mode 100644 src/test/java/org/folio/harvesteradmin/test/fakestorage/ConfigurationStorage.java create mode 100644 src/test/java/org/folio/harvesteradmin/test/fakestorage/FakeFolioApis.java create mode 100644 src/test/java/org/folio/harvesteradmin/test/fakestorage/FolioApiRecord.java create mode 100644 src/test/java/org/folio/harvesteradmin/test/fakestorage/RecordStorage.java create mode 100644 src/test/java/org/folio/harvesteradmin/test/fakestorage/StorageResponse.java delete mode 100644 test-resources/apis/harvester-admin-endpoints delete mode 100644 test-resources/apis/inventory-endpoints delete mode 100755 test-resources/delete-admin-record-by-id.sh delete mode 100755 test-resources/delete-admin-records.sh delete mode 100755 test-resources/delete-inventory-record.sh delete mode 100644 test-resources/demo/diku/harvestable-diku-900020.json delete mode 100644 test-resources/demo/diku/holdings-diku-900011.json delete mode 100644 test-resources/demo/diku/locations-diku-900012.json delete mode 100644 test-resources/demo/diku/oai-2-inst-diku-900010.json delete mode 100755 test-resources/demo/diku/post-demo-config-diku.sh delete mode 100644 test-resources/demo/diku/storage-diku-900001.json delete mode 100644 test-resources/demo/diku/transformation-diku-900015.json delete mode 100644 test-resources/demo/diku/xml-2-json-diku-900013.json delete mode 100644 test-resources/demo/job-800020.json delete mode 100644 test-resources/demo/job-900020.json delete mode 100644 test-resources/demo/north/harvestable-north-800020.json delete mode 100644 test-resources/demo/north/holdings-north-800011.json delete mode 100644 test-resources/demo/north/locations-north-800012.json delete mode 100644 test-resources/demo/north/oai-2-inst-north-800010.json delete mode 100755 test-resources/demo/north/post-demo-config-north.sh delete mode 100644 test-resources/demo/north/storage-north-800001.json delete mode 100644 test-resources/demo/north/transformation-north-800015.json delete mode 100644 test-resources/demo/north/xml-2-json-north-800013.json delete mode 100755 test-resources/demo/start-job.sh delete mode 100755 test-resources/demo/stop-job.sh delete mode 100755 test-resources/get-admin-record-by-id.sh delete mode 100755 test-resources/get-admin-records.sh delete mode 100755 test-resources/get-inventory-record-by-id.sh delete mode 100755 test-resources/get-inventory-records.sh delete mode 100755 test-resources/get-script-by-step-id.sh delete mode 100755 test-resources/logins/diku@localhost:9130.sh delete mode 100755 test-resources/logins/login.sh delete mode 100755 test-resources/logins/north@localhost:9130.sh delete mode 100755 test-resources/post-admin-record.sh delete mode 100755 test-resources/post-inventory-record.sh delete mode 100755 test-resources/put-admin-record.sh delete mode 100755 test-resources/put-inventory-record.sh delete mode 100755 test-resources/put-script-to-step.sh delete mode 100644 test-resources/step-800010.xslt diff --git a/NEWS.MD b/NEWS.MD index 59ed117..f85843f 100644 --- a/NEWS.MD +++ b/NEWS.MD @@ -1,3 +1,7 @@ +## 1.3.0 IN PROGRESS + +* [MODHAADM-71](https://issues.folio.org/browse/MODHAADM-71) Provides timer process for automatic purge of past job runs and logs. + ## 1.2.1 2024-09-06 * [MODHAADM-94](https://issues.folio.org/browse/MODHAADM-94) Observes Harvester's timezone when fetching logs for latest harvest run. diff --git a/README.MD b/README.MD index 55c0f48..900cd7c 100644 --- a/README.MD +++ b/README.MD @@ -759,6 +759,32 @@ to push the correct, up-to-date [status information in the POST body](src/main/resources/openapi/schemas/harvestJobStatus.json) to the history. +#### Automatic clean up of past job runs and job logs + +The module has a scheduled process for cleaning up old job runs together with their logs and any failed records saved for the jobs. By default, +the job is set to run each night at 2 AM in the Central European time zone (CET), and it will then by default remove jobs that +are more than three months old. + +The timer process can be disabled with + +``` +curl -XPATCH -d'{"id":"mod-harvester-admin_0","routingEntry":{"delay":"0"}}' \ + http://localhost:9130/_/proxy/tenants//timers +``` + +The age at which old jobs should be deleted can be changed by posting a configuration like this to `configurations/entries` : + +``` +{ + "module": "HARVESTER_ADMIN", + "configName": "PURGE_LOGS_AFTER", + "value": "2 MONTHS" +} +``` + +The format for `value` is an integer followed by a time unit that can be any of "DAY[S]", "TAG[E]", "WEEK[S]", +"WOCHE[N], "MONTH[S]", or "MONAT[E]". It can be uppercase or lowercase. + #### View current and historic harvest job logs and error reports If the logs are saved to history, there are thus two sets of APIs for retrieving configurations and logs; one for @@ -808,6 +834,7 @@ updates happens several times a day in which case the current logs will frequent | Content | [Error report for a single incoming record](src/main/resources/openapi/schemas/failedRecordCurrentJob.json) | [Error report for a single incoming record](src/main/resources/openapi/schemas/failedRecordPreviousJob.json) | | Identifier | `harvestableId` and `recordNumber` | `id` (uuid) | | Mutating? | Yes, the error report can disappear with next job run. | No. Unless the administrator decides do delete old logs and error reports. | + ### Running harvest jobs diff --git a/descriptors/ModuleDescriptor-template.json b/descriptors/ModuleDescriptor-template.json index cb65558..65aa437 100644 --- a/descriptors/ModuleDescriptor-template.json +++ b/descriptors/ModuleDescriptor-template.json @@ -83,6 +83,10 @@ "pathPattern": "/harvester-admin/previous-jobs", "permissionsRequired": ["harvester-admin.previous-jobs.collection.get"] }, { + "methods": ["POST"], + "pathPattern": "/harvester-admin/previous-jobs", + "permissionsRequired": ["harvester-admin.previous-jobs.item.post"] + },{ "methods": ["GET"], "pathPattern": "/harvester-admin/previous-jobs/failed-records", "permissionsRequired": ["harvester-admin.previous-jobs.failed-records.collection.get"] @@ -102,10 +106,18 @@ "methods": ["GET"], "pathPattern": "/harvester-admin/previous-jobs/{id}/log", "permissionsRequired": ["harvester-admin.previous-jobs.log.get"] + },{ + "methods": ["POST"], + "pathPattern": "/harvester-admin/previous-jobs/{id}/log", + "permissionsRequired": ["harvester-admin.previous-jobs.log.post"] },{ "methods": ["GET"], "pathPattern": "/harvester-admin/previous-jobs/{id}/failed-records", "permissionsRequired": ["harvester-admin.previous-jobs.failed-records.collection.get"] + },{ + "methods": ["POST"], + "pathPattern": "/harvester-admin/previous-jobs/{id}/failed-records", + "permissionsRequired": ["harvester-admin.previous-jobs.failed-records.collection.post"] },{ "methods": ["GET"], "pathPattern": "/harvester-admin/storages", @@ -224,6 +236,26 @@ "permissionsRequired": [] } ] + }, + { + "id": "_timer", + "version": "1.0", + "interfaceType": "system", + "handlers": [ + { + "methods": [ + "POST" + ], + "pathPattern": "/harvester-admin/purge-aged-logs", + "modulePermissions": [ + "configuration.entries.collection.get" + ], + "schedule": { + "cron": "0 2 * * *", + "zone": "CET" + } + } + ] } ], "requires": [], @@ -288,6 +320,11 @@ "displayName": "harvester admin - view info about a finished harvest job ", "description": "view info about a finished harvest job" }, + { + "permissionName": "harvester-admin.previous-jobs.item.post", + "displayName": "harvester admin - backdoor for adding job logs", + "description": "add previous job information directly to the database independently of a job run, i.e. by import from a different FOLIO instance" + }, { "permissionName": "harvester-admin.previous-jobs.item.delete", "displayName": "harvester admin - delete a previous job run with all its logs", @@ -298,11 +335,21 @@ "displayName": "harvester admin - view past harvest job logs", "description": "get log statements for past harvest jobs" }, + { + "permissionName": "harvester-admin.previous-jobs.log.post", + "displayName": "harvester-admin - backdoor for creating logs for a job", + "description": "creating logs for a job without running a job, for example to import logs from another FOLIO instance" + }, { "permissionName": "harvester-admin.previous-jobs.failed-records.collection.get", "displayName": "harvester admin - view failed records for a past harvest job", "description": "get failed records for past harvest jobs" }, + { + "permissionName": "harvester-admin.previous-jobs.failed-records.collection.post", + "displayName": "harvester admin - backdoor for adding failed record entries", + "description": "add failed record entries without running a job, for example to import failure records from another FOLIO instance" + }, { "permissionName": "harvester-admin.storages.collection.get", "displayName": "harvester admin - get storage collection", @@ -483,10 +530,13 @@ "harvester-admin.harvestables.failed-records.item.get", "harvester-admin.previous-jobs.collection.get", "harvester-admin.previous-jobs.item.get", + "harvester-admin.previous-jobs.item.post", "harvester-admin.previous-jobs.item.delete", "harvester-admin.previous-jobs.log.get", + "harvester-admin.previous-jobs.log.post", "harvester-admin.previous-jobs.failed-records.collection.get", - "harvester-admin.previous-jobs.failed-records.item.get" + "harvester-admin.previous-jobs.failed-records.item.get", + "harvester-admin.previous-jobs.failed-records.collection.post" ] } ], diff --git a/pom.xml b/pom.xml index 0dc9fee..682615d 100644 --- a/pom.xml +++ b/pom.xml @@ -3,14 +3,13 @@ 4.0.0 org.folio mod-harvester-admin - 1.2.2-SNAPSHOT + 1.3.0-SNAPSHOT UTF-8 5.3.0 4.5.3 18 18 - true @@ -73,7 +72,7 @@ org.testcontainers testcontainers-bom - 1.19.6 + 1.20.1 pom import @@ -143,11 +142,17 @@ org.apache.logging.log4j - log4j-api + log4j-core org.apache.logging.log4j - log4j-core + log4j-api + + + org.slf4j + slf4j-jdk14 + 2.1.0-alpha1 + test org.folio.okapi @@ -179,17 +184,6 @@ rest-assured test - - - org.apache.logging.log4j - log4j-slf4j-impl - test - - - org.apache.logging.log4j - log4j-jul - test - org.folio.okapi okapi-testing @@ -256,15 +250,10 @@ 8080 false - - **/package/**XYZ.class - - **/test/HarvesterAdminTestSuite.class - **/UnitTest.class + **/test/NoHarvesterTestSuite.class - org.apache.maven.plugins @@ -444,6 +433,33 @@ + + + harvesterTests + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.22.2 + + false + + http + localhost + 8080 + false + + + **/test/HarvesterIntegrationTestSuite.class + **/test/NoHarvesterTestSuite.class + + + + + + + folio-nexus diff --git a/src/main/java/org/folio/harvesteradmin/MainVerticle.java b/src/main/java/org/folio/harvesteradmin/MainVerticle.java index afa1056..5097c26 100644 --- a/src/main/java/org/folio/harvesteradmin/MainVerticle.java +++ b/src/main/java/org/folio/harvesteradmin/MainVerticle.java @@ -3,7 +3,7 @@ import io.vertx.core.AbstractVerticle; import io.vertx.core.Promise; import io.vertx.core.http.HttpServerOptions; -import org.folio.harvesteradmin.dataaccess.statics.LegacyServiceConfig; +import org.folio.harvesteradmin.legacydata.statics.LegacyServiceConfig; import org.folio.harvesteradmin.service.HarvestAdminService; import org.folio.okapi.common.Config; import org.folio.tlib.RouterCreator; diff --git a/src/main/java/org/folio/harvesteradmin/foliodata/ConfigurationsClient.java b/src/main/java/org/folio/harvesteradmin/foliodata/ConfigurationsClient.java new file mode 100644 index 0000000..110d47c --- /dev/null +++ b/src/main/java/org/folio/harvesteradmin/foliodata/ConfigurationsClient.java @@ -0,0 +1,37 @@ +package org.folio.harvesteradmin.foliodata; + +import io.vertx.core.Future; +import io.vertx.core.json.JsonArray; +import io.vertx.core.json.JsonObject; +import io.vertx.ext.web.RoutingContext; +import io.vertx.reactivex.core.Promise; + +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; + +public class ConfigurationsClient { + private static final String CONFIGURATIONS_PATH = "/configurations/entries"; + private static final String RECORDS = "configs"; + public static final String MODULE_HARVESTER_ADMIN = "HARVESTER_ADMIN"; + public static final String CONFIG_NAME_PURGE_LOGS_AFTER = "PURGE_LOGS_AFTER"; + + public static Future getStringValue (RoutingContext routingContext, String moduleName, String configName) { + String query = "module==" + moduleName + " and configName==" + configName + " and enabled=true"; + Promise promise = Promise.promise(); + Folio.okapiClient(routingContext).get(CONFIGURATIONS_PATH + + "?query=(" + URLEncoder.encode(query, StandardCharsets.UTF_8) +")") + .onComplete(response -> { + JsonObject json = new JsonObject(response.result()); + JsonArray entries = json.getJsonArray(RECORDS); + if (entries.isEmpty()) { + promise.complete(null); + + } else { + JsonObject entry = entries.getJsonObject(0); + promise.complete(entry.getString("value")); + } + }); + return promise.future(); + } + +} diff --git a/src/main/java/org/folio/harvesteradmin/foliodata/Folio.java b/src/main/java/org/folio/harvesteradmin/foliodata/Folio.java new file mode 100644 index 0000000..6a2701f --- /dev/null +++ b/src/main/java/org/folio/harvesteradmin/foliodata/Folio.java @@ -0,0 +1,22 @@ +package org.folio.harvesteradmin.foliodata; + +import io.vertx.ext.web.RoutingContext; +import org.folio.okapi.common.OkapiClient; +import org.folio.okapi.common.WebClientFactory; + +import java.util.HashMap; +import java.util.Map; + +public class Folio { + public static OkapiClient okapiClient(RoutingContext ctx) { + OkapiClient client = new OkapiClient(WebClientFactory.getWebClient(ctx.vertx()), ctx); + Map headers = new HashMap<>(); + headers.put("Content-type", "application/json"); + if (ctx.request().getHeader("X-Okapi-Tenant") != null) headers.put("X-Okapi-Tenant", ctx.request().getHeader("X-Okapi-Tenant")); + if (ctx.request().getHeader("X-Okapi-Token") != null) headers.put("X-Okapi-Token", ctx.request().getHeader("X-Okapi-Token")); + headers.put("Accept", "application/json, text/plain"); + client.setHeaders(headers); + return client; + } + +} diff --git a/src/main/java/org/folio/harvesteradmin/dataaccess/JobLauncher.java b/src/main/java/org/folio/harvesteradmin/legacydata/JobLauncher.java similarity index 97% rename from src/main/java/org/folio/harvesteradmin/dataaccess/JobLauncher.java rename to src/main/java/org/folio/harvesteradmin/legacydata/JobLauncher.java index 9014f0c..8f4ea7b 100644 --- a/src/main/java/org/folio/harvesteradmin/dataaccess/JobLauncher.java +++ b/src/main/java/org/folio/harvesteradmin/legacydata/JobLauncher.java @@ -1,6 +1,6 @@ -package org.folio.harvesteradmin.dataaccess; +package org.folio.harvesteradmin.legacydata; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.HARVESTER_HARVESTABLES_PATH; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.HARVESTER_HARVESTABLES_PATH; import static org.folio.okapi.common.HttpResponse.responseJson; import static org.folio.okapi.common.HttpResponse.responseText; @@ -22,7 +22,7 @@ public class JobLauncher extends LegacyHarvesterStorage { private static final int BAD_REQUEST = 400; private static final int OK = 200; - private static SimpleDateFormat dateFormat; + private final SimpleDateFormat dateFormat; /** * Constructor. diff --git a/src/main/java/org/folio/harvesteradmin/dataaccess/LegacyHarvesterStorage.java b/src/main/java/org/folio/harvesteradmin/legacydata/LegacyHarvesterStorage.java similarity index 95% rename from src/main/java/org/folio/harvesteradmin/dataaccess/LegacyHarvesterStorage.java rename to src/main/java/org/folio/harvesteradmin/legacydata/LegacyHarvesterStorage.java index cab1a2b..4f11ec3 100644 --- a/src/main/java/org/folio/harvesteradmin/dataaccess/LegacyHarvesterStorage.java +++ b/src/main/java/org/folio/harvesteradmin/legacydata/LegacyHarvesterStorage.java @@ -1,17 +1,17 @@ -package org.folio.harvesteradmin.dataaccess; +package org.folio.harvesteradmin.legacydata; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.HARVESTER_HARVESTABLES_PATH; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.HARVESTER_STEPS_PATH; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.HARVESTER_STORAGES_PATH; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.HARVESTER_TRANSFORMATIONS_PATH; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.HARVESTER_TSAS_PATH; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.harvesterPathByRequestPath; -import static org.folio.harvesteradmin.dataaccess.statics.EntityRootNames.mapToNameOfArrayOfEntities; -import static org.folio.harvesteradmin.dataaccess.statics.EntityRootNames.mapToNameOfRootOfEntity; -import static org.folio.harvesteradmin.dataaccess.statics.EntityRootNames.typeToEmbeddedTypeMap; -import static org.folio.harvesteradmin.dataaccess.statics.RequestParameters.crosswalkCqlFieldNames; -import static org.folio.harvesteradmin.dataaccess.statics.RequestParameters.crosswalkRequestParameterNames; -import static org.folio.harvesteradmin.dataaccess.statics.RequestParameters.supportedGetRequestParameters; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.HARVESTER_HARVESTABLES_PATH; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.HARVESTER_STEPS_PATH; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.HARVESTER_STORAGES_PATH; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.HARVESTER_TRANSFORMATIONS_PATH; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.HARVESTER_TSAS_PATH; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.harvesterPathByRequestPath; +import static org.folio.harvesteradmin.legacydata.statics.EntityRootNames.mapToNameOfArrayOfEntities; +import static org.folio.harvesteradmin.legacydata.statics.EntityRootNames.mapToNameOfRootOfEntity; +import static org.folio.harvesteradmin.legacydata.statics.EntityRootNames.typeToEmbeddedTypeMap; +import static org.folio.harvesteradmin.legacydata.statics.RequestParameters.crosswalkCqlFieldNames; +import static org.folio.harvesteradmin.legacydata.statics.RequestParameters.crosswalkRequestParameterNames; +import static org.folio.harvesteradmin.legacydata.statics.RequestParameters.supportedGetRequestParameters; import static org.folio.okapi.common.HttpResponse.responseText; import io.vertx.core.AsyncResult; @@ -33,31 +33,31 @@ import java.io.StringReader; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; -import java.time.Instant; +import java.security.SecureRandom; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Random; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.folio.harvesteradmin.dataaccess.dataconverters.JsonToHarvesterXml; -import org.folio.harvesteradmin.dataaccess.responsehandlers.ProcessedHarvesterResponse; -import org.folio.harvesteradmin.dataaccess.responsehandlers.ProcessedHarvesterResponseDelete; -import org.folio.harvesteradmin.dataaccess.responsehandlers.ProcessedHarvesterResponseGet; -import org.folio.harvesteradmin.dataaccess.responsehandlers.ProcessedHarvesterResponseGetById; -import org.folio.harvesteradmin.dataaccess.responsehandlers.ProcessedHarvesterResponseGetUniqueByName; -import org.folio.harvesteradmin.dataaccess.responsehandlers.ProcessedHarvesterResponsePost; -import org.folio.harvesteradmin.dataaccess.responsehandlers.ProcessedHarvesterResponsePut; -import org.folio.harvesteradmin.dataaccess.statics.ApiPaths; -import org.folio.harvesteradmin.dataaccess.statics.EntityRootNames; -import org.folio.harvesteradmin.dataaccess.statics.LegacyServiceConfig; +import org.folio.harvesteradmin.legacydata.dataconverters.JsonToHarvesterXml; +import org.folio.harvesteradmin.legacydata.responsehandlers.ProcessedHarvesterResponse; +import org.folio.harvesteradmin.legacydata.responsehandlers.ProcessedHarvesterResponseDelete; +import org.folio.harvesteradmin.legacydata.responsehandlers.ProcessedHarvesterResponseGet; +import org.folio.harvesteradmin.legacydata.responsehandlers.ProcessedHarvesterResponseGetById; +import org.folio.harvesteradmin.legacydata.responsehandlers.ProcessedHarvesterResponseGetUniqueByName; +import org.folio.harvesteradmin.legacydata.responsehandlers.ProcessedHarvesterResponsePost; +import org.folio.harvesteradmin.legacydata.responsehandlers.ProcessedHarvesterResponsePut; +import org.folio.harvesteradmin.legacydata.statics.ApiPaths; +import org.folio.harvesteradmin.legacydata.statics.EntityRootNames; +import org.folio.harvesteradmin.legacydata.statics.LegacyServiceConfig; +import org.folio.harvesteradmin.utils.SettableClock; import org.folio.okapi.common.GenericCompositeFuture; import org.xml.sax.SAXException; @@ -80,8 +80,9 @@ public class LegacyHarvesterStorage { private static final DateTimeFormatter iso_instant = DateTimeFormatter.ISO_INSTANT; private static final Logger logger = LogManager.getLogger(LegacyHarvesterStorage.class); + private static final SecureRandom random = new SecureRandom(); private static final Iterator fifteenDigitLongs = - new Random().longs(100000000000000L, 999999999999999L).iterator(); + random.longs(100000000000000L, 999999999999999L).iterator(); public LegacyHarvesterStorage(Vertx vertx, String tenant) { this.tenant = tenant; @@ -338,7 +339,7 @@ public Future doPostConfigRecord(RoutingContext } else { JsonObject jsonToPost = routingContext.body().asJsonObject(); if (harvesterPath.equals(HARVESTER_HARVESTABLES_PATH)) { - jsonToPost.put("lastUpdated", iso_instant.format(Instant.now())); + jsonToPost.put("lastUpdated", iso_instant.format(SettableClock.getInstant())); JsonObject transformationReference = jsonToPost.getJsonObject("transformation"); if (!transformationReference.containsKey("entityType")) { transformationReference.put("entityType", "basicTransformation"); @@ -414,7 +415,7 @@ public Future putConfigRecord(RoutingContext rout if (harvesterPath.equals(HARVESTER_TRANSFORMATIONS_PATH)) { return putTransformation(routingContext); } else if (harvesterPath.equals(HARVESTER_HARVESTABLES_PATH)) { - jsonToPut.put("lastUpdated", iso_instant.format(Instant.now())); + jsonToPut.put("lastUpdated", iso_instant.format(SettableClock.getInstant())); } } return putConfigRecord(routingContext, harvesterPath, jsonToPut, id); @@ -718,7 +719,19 @@ private Future doPostAndPutTransformation( } }); } else { - logger.error(transformationPost.cause().getMessage()); + if (!transformationPost.succeeded()) { + promise.complete( + new ProcessedHarvesterResponsePost(500, + "There was a problem posting transformation to legacy harvester " + transformationPost.cause().getMessage())); + } else { + if (!(transformationPost.result().statusCode() == CREATED)) { + promise.complete( + new ProcessedHarvesterResponsePost(500, + "There was a problem posting transformation to legacy harvester, status was " + transformationPost.result().statusCode())); + + } + } + logger.error(transformationPost.cause()); } }); } @@ -1136,7 +1149,9 @@ public Future putScript(RoutingContext routingCon */ private static String validateScriptAsXml(String script) { try { - DocumentBuilder parser = DocumentBuilderFactory.newInstance().newDocumentBuilder(); + DocumentBuilderFactory builder = DocumentBuilderFactory.newInstance(); + builder.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); + DocumentBuilder parser = builder.newDocumentBuilder(); parser.parse(new ByteArrayInputStream(script.getBytes(StandardCharsets.UTF_8))); } catch (ParserConfigurationException | IOException | SAXException pe) { return "Validation failed for script [ " + script + "]: " + pe.getMessage(); diff --git a/src/main/java/org/folio/harvesteradmin/dataaccess/dataconverters/HarvesterXml2Json.java b/src/main/java/org/folio/harvesteradmin/legacydata/dataconverters/HarvesterXml2Json.java similarity index 98% rename from src/main/java/org/folio/harvesteradmin/dataaccess/dataconverters/HarvesterXml2Json.java rename to src/main/java/org/folio/harvesteradmin/legacydata/dataconverters/HarvesterXml2Json.java index cf8943b..8cdb170 100644 --- a/src/main/java/org/folio/harvesteradmin/dataaccess/dataconverters/HarvesterXml2Json.java +++ b/src/main/java/org/folio/harvesteradmin/legacydata/dataconverters/HarvesterXml2Json.java @@ -1,4 +1,4 @@ -package org.folio.harvesteradmin.dataaccess.dataconverters; +package org.folio.harvesteradmin.legacydata.dataconverters; import io.vertx.core.json.DecodeException; import io.vertx.core.json.JsonArray; @@ -274,6 +274,7 @@ protected static Iterable iterable(Node node) { protected static Document xmlStringToXmlDocument(String xmlString) throws IOException, ParserConfigurationException, SAXException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); DocumentBuilder builder = factory.newDocumentBuilder(); return builder.parse(new InputSource(new StringReader(xmlString))); } diff --git a/src/main/java/org/folio/harvesteradmin/dataaccess/dataconverters/HarvesterXml2JsonFailedRecords.java b/src/main/java/org/folio/harvesteradmin/legacydata/dataconverters/HarvesterXml2JsonFailedRecords.java similarity index 94% rename from src/main/java/org/folio/harvesteradmin/dataaccess/dataconverters/HarvesterXml2JsonFailedRecords.java rename to src/main/java/org/folio/harvesteradmin/legacydata/dataconverters/HarvesterXml2JsonFailedRecords.java index 26d839c..58b4b5a 100644 --- a/src/main/java/org/folio/harvesteradmin/dataaccess/dataconverters/HarvesterXml2JsonFailedRecords.java +++ b/src/main/java/org/folio/harvesteradmin/legacydata/dataconverters/HarvesterXml2JsonFailedRecords.java @@ -1,13 +1,13 @@ -package org.folio.harvesteradmin.dataaccess.dataconverters; +package org.folio.harvesteradmin.legacydata.dataconverters; -import static org.folio.harvesteradmin.dataaccess.dataconverters.JsonToHarvesterXml.writeXmlNodeToString; +import static org.folio.harvesteradmin.legacydata.dataconverters.JsonToHarvesterXml.writeXmlNodeToString; import io.vertx.core.json.DecodeException; import io.vertx.core.json.JsonArray; import io.vertx.core.json.JsonObject; import java.util.Map; import javax.xml.transform.TransformerException; -import org.w3c.dom.Document; + import org.w3c.dom.Element; import org.w3c.dom.Node; diff --git a/src/main/java/org/folio/harvesteradmin/dataaccess/dataconverters/JsonToHarvesterXml.java b/src/main/java/org/folio/harvesteradmin/legacydata/dataconverters/JsonToHarvesterXml.java similarity index 96% rename from src/main/java/org/folio/harvesteradmin/dataaccess/dataconverters/JsonToHarvesterXml.java rename to src/main/java/org/folio/harvesteradmin/legacydata/dataconverters/JsonToHarvesterXml.java index 6d0f8a6..a0bd667 100644 --- a/src/main/java/org/folio/harvesteradmin/dataaccess/dataconverters/JsonToHarvesterXml.java +++ b/src/main/java/org/folio/harvesteradmin/legacydata/dataconverters/JsonToHarvesterXml.java @@ -1,4 +1,4 @@ -package org.folio.harvesteradmin.dataaccess.dataconverters; +package org.folio.harvesteradmin.legacydata.dataconverters; import io.vertx.core.json.JsonArray; import io.vertx.core.json.JsonObject; @@ -50,6 +50,7 @@ public static String convertToHarvesterRecord(JsonObject json, String rootProper private static Document recordJsonToHarvesterXml(JsonObject json) throws DOMException, ParserConfigurationException { DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); + docFactory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); Document doc = docBuilder.newDocument(); @@ -134,6 +135,7 @@ private static void iterateJsonArray(String arrayName, JsonArray array, Document */ private static String writeXmlDocumentToString(Document xmlDocument) throws TransformerException { TransformerFactory tf = TransformerFactory.newInstance(); + //tf.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); Transformer transformer; transformer = tf.newTransformer(); StringWriter writer = new StringWriter(); @@ -146,6 +148,7 @@ private static String writeXmlDocumentToString(Document xmlDocument) throws Tran */ public static String writeXmlNodeToString(Node node) throws TransformerException { TransformerFactory tf = TransformerFactory.newInstance(); + tf.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); Transformer transformer; transformer = tf.newTransformer(); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); diff --git a/src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponse.java b/src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponse.java similarity index 94% rename from src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponse.java rename to src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponse.java index beed30f..d61adf8 100644 --- a/src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponse.java +++ b/src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponse.java @@ -1,4 +1,4 @@ -package org.folio.harvesteradmin.dataaccess.responsehandlers; +package org.folio.harvesteradmin.legacydata.responsehandlers; import io.vertx.core.json.JsonObject; diff --git a/src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponseDelete.java b/src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponseDelete.java similarity index 87% rename from src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponseDelete.java rename to src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponseDelete.java index efef1bd..8a6797a 100644 --- a/src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponseDelete.java +++ b/src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponseDelete.java @@ -1,6 +1,6 @@ -package org.folio.harvesteradmin.dataaccess.responsehandlers; +package org.folio.harvesteradmin.legacydata.responsehandlers; -import static org.folio.harvesteradmin.dataaccess.LegacyHarvesterStorage.NO_CONTENT; +import static org.folio.harvesteradmin.legacydata.LegacyHarvesterStorage.NO_CONTENT; import io.vertx.core.AsyncResult; import io.vertx.core.buffer.Buffer; diff --git a/src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponseGet.java b/src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponseGet.java similarity index 74% rename from src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponseGet.java rename to src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponseGet.java index 89f5e0c..6a39aab 100644 --- a/src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponseGet.java +++ b/src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponseGet.java @@ -1,16 +1,12 @@ -package org.folio.harvesteradmin.dataaccess.responsehandlers; +package org.folio.harvesteradmin.legacydata.responsehandlers; import io.vertx.core.AsyncResult; import io.vertx.core.buffer.Buffer; import io.vertx.core.json.JsonObject; import io.vertx.ext.web.client.HttpResponse; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import org.folio.harvesteradmin.dataaccess.dataconverters.HarvesterXml2Json; +import org.folio.harvesteradmin.legacydata.dataconverters.HarvesterXml2Json; public class ProcessedHarvesterResponseGet extends ProcessedHarvesterResponse { - private static final Pattern exceptionDescriptionPattern = - Pattern.compile("(Exception Description:.*?[\\n\\r]+.*?[\\n\\r]+)", Pattern.DOTALL); /** * Constructor. @@ -27,12 +23,7 @@ public ProcessedHarvesterResponseGet(AsyncResult> response, } else if (harvesterStatusCode == 500 && bodyAsString.contains( "An exception occurred while creating a query in EntityManager")) { - Matcher m = exceptionDescriptionPattern.matcher(bodyAsString); - if (m.find()) { - errorMessage = "Query failed: " + m.group(1).replaceAll("'", "'"); - } else { - errorMessage = "Query failed: " + bodyAsString; - } + errorMessage = "Query failed: " + bodyAsString; statusCode = 400; jsonObject = new JsonObject(); } else { diff --git a/src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponseGetById.java b/src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponseGetById.java similarity index 93% rename from src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponseGetById.java rename to src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponseGetById.java index 5f8a160..b094f73 100644 --- a/src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponseGetById.java +++ b/src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponseGetById.java @@ -1,4 +1,4 @@ -package org.folio.harvesteradmin.dataaccess.responsehandlers; +package org.folio.harvesteradmin.legacydata.responsehandlers; import io.vertx.core.AsyncResult; import io.vertx.core.buffer.Buffer; @@ -6,8 +6,8 @@ import io.vertx.ext.web.client.HttpResponse; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.folio.harvesteradmin.dataaccess.dataconverters.HarvesterXml2Json; -import org.folio.harvesteradmin.dataaccess.statics.LegacyServiceConfig; +import org.folio.harvesteradmin.legacydata.dataconverters.HarvesterXml2Json; +import org.folio.harvesteradmin.legacydata.statics.LegacyServiceConfig; public class ProcessedHarvesterResponseGetById extends ProcessedHarvesterResponse { diff --git a/src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponseGetUniqueByName.java b/src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponseGetUniqueByName.java similarity index 85% rename from src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponseGetUniqueByName.java rename to src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponseGetUniqueByName.java index 4a5827b..94f5291 100644 --- a/src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponseGetUniqueByName.java +++ b/src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponseGetUniqueByName.java @@ -1,4 +1,4 @@ -package org.folio.harvesteradmin.dataaccess.responsehandlers; +package org.folio.harvesteradmin.legacydata.responsehandlers; import io.vertx.core.json.JsonObject; diff --git a/src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponsePost.java b/src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponsePost.java similarity index 97% rename from src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponsePost.java rename to src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponsePost.java index bfbb7ad..c7507c6 100644 --- a/src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponsePost.java +++ b/src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponsePost.java @@ -1,4 +1,4 @@ -package org.folio.harvesteradmin.dataaccess.responsehandlers; +package org.folio.harvesteradmin.legacydata.responsehandlers; import io.vertx.core.AsyncResult; import io.vertx.core.buffer.Buffer; diff --git a/src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponsePut.java b/src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponsePut.java similarity index 93% rename from src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponsePut.java rename to src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponsePut.java index 302f788..3853e2e 100644 --- a/src/main/java/org/folio/harvesteradmin/dataaccess/responsehandlers/ProcessedHarvesterResponsePut.java +++ b/src/main/java/org/folio/harvesteradmin/legacydata/responsehandlers/ProcessedHarvesterResponsePut.java @@ -1,4 +1,4 @@ -package org.folio.harvesteradmin.dataaccess.responsehandlers; +package org.folio.harvesteradmin.legacydata.responsehandlers; import io.vertx.core.AsyncResult; import io.vertx.core.buffer.Buffer; diff --git a/src/main/java/org/folio/harvesteradmin/dataaccess/statics/ApiPaths.java b/src/main/java/org/folio/harvesteradmin/legacydata/statics/ApiPaths.java similarity index 98% rename from src/main/java/org/folio/harvesteradmin/dataaccess/statics/ApiPaths.java rename to src/main/java/org/folio/harvesteradmin/legacydata/statics/ApiPaths.java index 34f1887..3c578fe 100644 --- a/src/main/java/org/folio/harvesteradmin/dataaccess/statics/ApiPaths.java +++ b/src/main/java/org/folio/harvesteradmin/legacydata/statics/ApiPaths.java @@ -1,4 +1,4 @@ -package org.folio.harvesteradmin.dataaccess.statics; +package org.folio.harvesteradmin.legacydata.statics; import java.util.HashMap; import java.util.Map; diff --git a/src/main/java/org/folio/harvesteradmin/dataaccess/statics/EntityRootNames.java b/src/main/java/org/folio/harvesteradmin/legacydata/statics/EntityRootNames.java similarity index 90% rename from src/main/java/org/folio/harvesteradmin/dataaccess/statics/EntityRootNames.java rename to src/main/java/org/folio/harvesteradmin/legacydata/statics/EntityRootNames.java index 41387ca..f6bb4a0 100644 --- a/src/main/java/org/folio/harvesteradmin/dataaccess/statics/EntityRootNames.java +++ b/src/main/java/org/folio/harvesteradmin/legacydata/statics/EntityRootNames.java @@ -1,10 +1,10 @@ -package org.folio.harvesteradmin.dataaccess.statics; +package org.folio.harvesteradmin.legacydata.statics; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.HARVESTER_HARVESTABLES_PATH; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.HARVESTER_STEPS_PATH; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.HARVESTER_STORAGES_PATH; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.HARVESTER_TRANSFORMATIONS_PATH; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.HARVESTER_TSAS_PATH; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.HARVESTER_HARVESTABLES_PATH; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.HARVESTER_STEPS_PATH; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.HARVESTER_STORAGES_PATH; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.HARVESTER_TRANSFORMATIONS_PATH; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.HARVESTER_TSAS_PATH; import java.util.HashMap; import java.util.Map; diff --git a/src/main/java/org/folio/harvesteradmin/dataaccess/statics/LegacyServiceConfig.java b/src/main/java/org/folio/harvesteradmin/legacydata/statics/LegacyServiceConfig.java similarity index 98% rename from src/main/java/org/folio/harvesteradmin/dataaccess/statics/LegacyServiceConfig.java rename to src/main/java/org/folio/harvesteradmin/legacydata/statics/LegacyServiceConfig.java index 8c7f42d..a934c84 100644 --- a/src/main/java/org/folio/harvesteradmin/dataaccess/statics/LegacyServiceConfig.java +++ b/src/main/java/org/folio/harvesteradmin/legacydata/statics/LegacyServiceConfig.java @@ -1,4 +1,4 @@ -package org.folio.harvesteradmin.dataaccess.statics; +package org.folio.harvesteradmin.legacydata.statics; import java.lang.management.ManagementFactory; import org.apache.logging.log4j.Level; diff --git a/src/main/java/org/folio/harvesteradmin/dataaccess/statics/RequestParameters.java b/src/main/java/org/folio/harvesteradmin/legacydata/statics/RequestParameters.java similarity index 97% rename from src/main/java/org/folio/harvesteradmin/dataaccess/statics/RequestParameters.java rename to src/main/java/org/folio/harvesteradmin/legacydata/statics/RequestParameters.java index 01eb48e..6a7b2f3 100644 --- a/src/main/java/org/folio/harvesteradmin/dataaccess/statics/RequestParameters.java +++ b/src/main/java/org/folio/harvesteradmin/legacydata/statics/RequestParameters.java @@ -1,4 +1,4 @@ -package org.folio.harvesteradmin.dataaccess.statics; +package org.folio.harvesteradmin.legacydata.statics; import java.util.HashMap; import java.util.HashSet; diff --git a/src/main/java/org/folio/harvesteradmin/moduledata/HarvestJob.java b/src/main/java/org/folio/harvesteradmin/moduledata/HarvestJob.java index 02b8465..d3a3ce1 100644 --- a/src/main/java/org/folio/harvesteradmin/moduledata/HarvestJob.java +++ b/src/main/java/org/folio/harvesteradmin/moduledata/HarvestJob.java @@ -10,7 +10,8 @@ import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.folio.harvesteradmin.modulestorage.Storage; + +import org.folio.harvesteradmin.moduledata.database.Tables; import org.folio.tlib.postgres.PgCqlDefinition; import org.folio.tlib.postgres.cqlfield.PgCqlFieldAlwaysMatches; @@ -55,6 +56,33 @@ public static HarvestJob fromHarvestableJson(JsonObject harvestableJson) { return harvestJob; } + /** + * Test purposes: For importing harvest job samples without running jobs to create them first + * @param harvestJobJson Could be harvest job JSON exported from another service + * @return POJO + */ + public static HarvestJob fromHarvestJobJson(JsonObject harvestJobJson) { + HarvestJob harvestJob = new HarvestJob(); + harvestJob.setId(UUID.fromString(harvestJobJson.getString(HarvestJobField.ID.propertyName()))); + harvestJob.setHarvestableId(harvestJobJson.getString(HarvestJobField.HARVESTABLE_ID.propertyName())); + harvestJob.setName(harvestJobJson.getString(HarvestJobField.HARVESTABLE_NAME.propertyName())); + harvestJob.setType(harvestJobJson.getString(HarvestJobField.HARVESTABLE_TYPE.propertyName())); + harvestJob.setUrl(harvestJobJson.getString(HarvestJobField.URL.propertyName())); + harvestJob.setAllowErrors(harvestJobJson.getString(HarvestJobField.ALLOW_ERRORS.propertyName())); + harvestJob.setRecordLimit(harvestJobJson.getString(HarvestJobField.RECORD_LIMIT.propertyName())); + harvestJob.setBatchSize(harvestJobJson.getString(HarvestJobField.BATCH_SIZE.propertyName())); + harvestJob.setTransformation(harvestJobJson.getString(HarvestJobField.TRANSFORMATION.propertyName())); + harvestJob.setStorage(harvestJobJson.getString(HarvestJobField.STORAGE.propertyName())); + harvestJob.setStatus(harvestJobJson.getString(HarvestJobField.STATUS.propertyName())); + harvestJob.setStartedAndFinished( + harvestJobJson.getString(HarvestJobField.STARTED.propertyName()), + harvestJobJson.getString(HarvestJobField.FINISHED.propertyName())); + harvestJob.setAmountHarvested(harvestJobJson.getInteger(HarvestJobField.AMOUNT_HARVESTED.propertyName())); + harvestJob.setMessage(harvestJobJson.getString(HarvestJobField.MESSAGE.propertyName())); + return harvestJob; + } + + /** * HarvestJob to JSON mapping. */ @@ -71,7 +99,7 @@ public String makeCreateTableSql(String schema) { .forEach(field -> columnsDdl.append(field.pgColumn().getColumnDdl()).append(",")); columnsDdl.deleteCharAt(columnsDdl.length() - 1); // remove ending comma - return "CREATE TABLE IF NOT EXISTS " + schema + "." + Storage.Table.harvest_job + return "CREATE TABLE IF NOT EXISTS " + schema + "." + Tables.harvest_job + "(" + columnsDdl + ")"; @@ -81,7 +109,7 @@ public String makeCreateTableSql(String schema) { * INSERT INTO statement. */ public String makeInsertTemplate(String schema) { - return "INSERT INTO " + schema + "." + Storage.Table.harvest_job + return "INSERT INTO " + schema + "." + Tables.harvest_job + " (" + HarvestJobField.ID + ", " + HarvestJobField.HARVESTABLE_ID.columnName() + ", " diff --git a/src/main/java/org/folio/harvesteradmin/moduledata/JsonProperty.java b/src/main/java/org/folio/harvesteradmin/moduledata/JsonProperty.java index bb63821..5ca6bd0 100644 --- a/src/main/java/org/folio/harvesteradmin/moduledata/JsonProperty.java +++ b/src/main/java/org/folio/harvesteradmin/moduledata/JsonProperty.java @@ -6,13 +6,13 @@ public class JsonProperty { public final String type; public final String format; - enum Type { + public enum Type { string, integer, undefined } - enum Format { + public enum Format { undefined } diff --git a/src/main/java/org/folio/harvesteradmin/moduledata/LogLine.java b/src/main/java/org/folio/harvesteradmin/moduledata/LogLine.java index 518f15f..ecf68f4 100644 --- a/src/main/java/org/folio/harvesteradmin/moduledata/LogLine.java +++ b/src/main/java/org/folio/harvesteradmin/moduledata/LogLine.java @@ -11,7 +11,9 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; -import org.folio.harvesteradmin.modulestorage.Storage; + +import org.folio.harvesteradmin.moduledata.database.SqlQuery; +import org.folio.harvesteradmin.moduledata.database.Tables; import org.folio.tlib.postgres.PgCqlDefinition; import org.folio.tlib.postgres.cqlfield.PgCqlFieldAlwaysMatches; @@ -130,11 +132,11 @@ public static LogLine entity() { * CREATE TABLE SQL template. */ public String makeCreateTableSql(String schema) { - return "CREATE TABLE IF NOT EXISTS " + schema + "." + Storage.Table.log_statement + return "CREATE TABLE IF NOT EXISTS " + schema + "." + Tables.log_statement + "(" + LogLineField.ID.columnName() + " UUID PRIMARY KEY, " + LogLineField.HARVEST_JOB_ID.columnName() + " UUID NOT NULL REFERENCES " - + schema + "." + Storage.Table.harvest_job + "(" + HarvestJobField.ID.columnName() + "), " + + schema + "." + Tables.harvest_job + "(" + HarvestJobField.ID.columnName() + "), " + LogLineField.SEQUENCE_NUMBER.columnName() + " INTEGER NOT NULL, " + LogLineField.TIME_STAMP.columnName() + " TIMESTAMP NOT NULL, " + LogLineField.LOG_LEVEL.columnName() + " TEXT NOT NULL, " @@ -149,7 +151,9 @@ public RowMapper getRowMapper() { LogLine logLine = new LogLine(); logLine.id = row.getUUID(LogLineField.ID.columnName()); logLine.harvestJobId = row.getUUID(LogLineField.HARVEST_JOB_ID.columnName()); - logLine.timeStamp = row.getLocalDateTime(LogLineField.TIME_STAMP.columnName()).toString(); + // Display in original legacy harvester date format, not the pg date format (supports importing the output) + logLine.timeStamp = row.getLocalDateTime(LogLineField.TIME_STAMP.columnName()) + .toString().replace("T", " ").replace(".",","); logLine.logLevel = row.getString(LogLineField.LOG_LEVEL.columnName()); logLine.jobLabel = row.getString(LogLineField.JOB_LABEL.columnName()); logLine.line = row.getString(LogLineField.LOG_STATEMENT.columnName()); @@ -161,7 +165,7 @@ public RowMapper getRowMapper() { * INSERT INTO statement. */ public String makeInsertTemplate(String schema) { - return "INSERT INTO " + schema + "." + Storage.Table.log_statement + return "INSERT INTO " + schema + "." + Tables.log_statement + " (" + LogLineField.ID.columnName() + ", " + LogLineField.HARVEST_JOB_ID.columnName() + ", " diff --git a/src/main/java/org/folio/harvesteradmin/moduledata/PgColumn.java b/src/main/java/org/folio/harvesteradmin/moduledata/PgColumn.java index 2757ab5..123104c 100644 --- a/src/main/java/org/folio/harvesteradmin/moduledata/PgColumn.java +++ b/src/main/java/org/folio/harvesteradmin/moduledata/PgColumn.java @@ -13,7 +13,7 @@ public class PgColumn { public final String nullable; public final Boolean isPrimaryKey; - enum Type { + public enum Type { TEXT, INTEGER, BIGINT, diff --git a/src/main/java/org/folio/harvesteradmin/moduledata/RecordFailure.java b/src/main/java/org/folio/harvesteradmin/moduledata/RecordFailure.java index 9de1b8c..567a962 100644 --- a/src/main/java/org/folio/harvesteradmin/moduledata/RecordFailure.java +++ b/src/main/java/org/folio/harvesteradmin/moduledata/RecordFailure.java @@ -9,7 +9,8 @@ import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.folio.harvesteradmin.modulestorage.Storage; + +import org.folio.harvesteradmin.moduledata.database.Tables; import org.folio.tlib.postgres.PgCqlDefinition; import org.folio.tlib.postgres.cqlfield.PgCqlFieldAlwaysMatches; import org.folio.tlib.postgres.cqlfield.PgCqlFieldNumber; @@ -82,15 +83,28 @@ public static RecordFailure fromLegacyHarvesterJson(UUID harvestJobId, JsonObjec return recordFailure; } + public static RecordFailure fromHarvesterAdminJson(UUID harvestJobId, JsonObject json) { + RecordFailure recordFailure = new RecordFailure(); + recordFailure.id = UUID.fromString(json.getString("id")); + recordFailure.harvestJobId = harvestJobId; + recordFailure.recordNumber = json.getString("recordNumber"); + recordFailure.timeStamp = json.getString("timeStamp").replace("T", " "); + recordFailure.originalRecord = json.getString("originalRecord"); + recordFailure.transformedRecord = json.getJsonObject("transformedRecord"); + recordFailure.recordErrors = json.getJsonArray("recordErrors"); + return recordFailure; + + } + /** * CREATE TABLE statement. */ public String makeCreateTableSql(String schema) { - return "CREATE TABLE IF NOT EXISTS " + schema + "." + Storage.Table.record_failure + return "CREATE TABLE IF NOT EXISTS " + schema + "." + Tables.record_failure + "(" + Column.id + " UUID PRIMARY KEY, " + Column.harvest_job_id + " UUID NOT NULL REFERENCES " - + schema + "." + Storage.Table.harvest_job + "(" + HarvestJobField.ID.columnName() + "), " + + schema + "." + Tables.harvest_job + "(" + HarvestJobField.ID.columnName() + "), " + Column.record_number + " TEXT, " + Column.time_stamp + " TIMESTAMP, " + Column.record_errors + " JSONB NOT NULL, " @@ -105,7 +119,7 @@ public String makeCreateTableSql(String schema) { public RowMapper getRowMapper() { return row -> { RecordFailure recordFailure = new RecordFailure(); - recordFailure.id = row.getUUID(RecordFailure.Column.id.name()); + recordFailure.id = row.getUUID(Column.id.name()); recordFailure.harvestableId = row.getLong(Column.harvestable_id.name()); recordFailure.harvestableName = row.getString(Column.harvestable_name.name()); recordFailure.harvestJobId = row.getUUID(Column.harvest_job_id.name()); @@ -121,7 +135,7 @@ public RowMapper getRowMapper() { @Override public String makeInsertTemplate(String schema) { - return "INSERT INTO " + schema + "." + Storage.Table.record_failure + return "INSERT INTO " + schema + "." + Tables.record_failure + " (" + Column.id + ", " + Column.harvest_job_id + ", " diff --git a/src/main/java/org/folio/harvesteradmin/moduledata/StoredEntity.java b/src/main/java/org/folio/harvesteradmin/moduledata/StoredEntity.java index 7c75587..9050d75 100644 --- a/src/main/java/org/folio/harvesteradmin/moduledata/StoredEntity.java +++ b/src/main/java/org/folio/harvesteradmin/moduledata/StoredEntity.java @@ -7,6 +7,8 @@ import io.vertx.sqlclient.templates.RowMapper; import io.vertx.sqlclient.templates.TupleMapper; import java.util.Map; + +import org.folio.harvesteradmin.moduledata.database.SqlQuery; import org.folio.tlib.postgres.PgCqlDefinition; import org.folio.tlib.postgres.PgCqlQuery; @@ -40,7 +42,7 @@ public abstract class StoredEntity { /** * Map of JSON property names to Postgres table column definitions (PgColumns). */ - public abstract Map getFieldMap(); + public abstract Map getFieldMap(); /** * Gets a SQL query string. diff --git a/src/main/java/org/folio/harvesteradmin/moduledata/database/DatabaseInit.java b/src/main/java/org/folio/harvesteradmin/moduledata/database/DatabaseInit.java new file mode 100644 index 0000000..c380fda --- /dev/null +++ b/src/main/java/org/folio/harvesteradmin/moduledata/database/DatabaseInit.java @@ -0,0 +1,70 @@ +package org.folio.harvesteradmin.moduledata.database; + +import io.vertx.core.Future; +import io.vertx.core.Promise; +import io.vertx.sqlclient.Query; +import io.vertx.sqlclient.Row; +import io.vertx.sqlclient.RowSet; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.folio.harvesteradmin.moduledata.HarvestJob; +import org.folio.harvesteradmin.moduledata.LogLine; +import org.folio.harvesteradmin.moduledata.RecordFailure; +import org.folio.tlib.postgres.TenantPgPool; + +public class DatabaseInit { + + /** + * Creates tables and views. + */ + public static Future createDatabase(TenantPgPool pool) { + final Promise promise = Promise.promise(); + Query> createHarvestJob = pool.query(HarvestJob.entity().makeCreateTableSql(pool.getSchema())); + Query> createLogLine = pool.query(LogLine.entity().makeCreateTableSql(pool.getSchema())); + Query> createRecordFailure = pool.query(RecordFailure.entity().makeCreateTableSql(pool.getSchema())); + Query> createRecordFailureView = pool.query(createRecordFailureView(pool.getSchema())); + createHarvestJob.execute().onSuccess(a -> + createLogLine.execute().onSuccess( + b -> createRecordFailure.execute().onSuccess( + c -> createRecordFailureView.execute().onSuccess( + d -> promise.complete(null) + ).onFailure(d -> promise.fail("CREATE VIEW record_failure_view failed " + d.getMessage())) + ).onFailure(c -> promise.fail("CREATE TABLE record_failure failed " + c.getMessage())) + ).onFailure(b -> promise.fail("CREATE TABLE log_statement failed " + b.getMessage())) + ).onFailure(a -> promise.fail("CREATE TABLE harvest_job failed: " + a.getMessage())); + return promise.future(); + + /* Template for processing parameters in init. + JsonArray parameters = tenantAttributes.getJsonArray("parameters"); + if (parameters != null) { + for (int i = 0; i < parameters.size(); i++) { + JsonObject parameter = parameters.getJsonObject(i); + if ("loadSample".equals(parameter.getString("key")) + && "true".equals(parameter.getString("value"))) { + } + } + } + */ + } + + /** + * Creates view. + */ + public static String createRecordFailureView(String schema) { + String ddl; + ddl = "CREATE OR REPLACE VIEW " + schema + "." + Tables.record_failure_view + + " AS SELECT rf.id AS id, " + + " rf.harvest_job_Id AS harvest_job_id, " + + " hj.harvestable_id AS harvestable_id, " + + " hj.harvestable_name AS harvestable_name, " + + " rf.record_number AS record_number, " + + " rf.time_stamp AS time_stamp, " + + " rf.record_errors AS record_errors, " + + " rf.original_record AS original_record, " + + " rf.transformed_record AS transformed_record " + + " FROM " + schema + ".record_failure AS rf, " + + " " + schema + ".harvest_job as hj " + + " WHERE rf.harvest_job_id = hj.id"; + return ddl; + } +} diff --git a/src/main/java/org/folio/harvesteradmin/modulestorage/Storage.java b/src/main/java/org/folio/harvesteradmin/moduledata/database/ModuleStorageAccess.java similarity index 67% rename from src/main/java/org/folio/harvesteradmin/modulestorage/Storage.java rename to src/main/java/org/folio/harvesteradmin/moduledata/database/ModuleStorageAccess.java index 8c1df7b..fa5b4b6 100644 --- a/src/main/java/org/folio/harvesteradmin/modulestorage/Storage.java +++ b/src/main/java/org/folio/harvesteradmin/moduledata/database/ModuleStorageAccess.java @@ -1,4 +1,4 @@ -package org.folio.harvesteradmin.modulestorage; +package org.folio.harvesteradmin.moduledata.database; import io.vertx.core.Future; import io.vertx.core.Promise; @@ -6,44 +6,35 @@ import io.vertx.core.json.JsonArray; import io.vertx.core.json.JsonObject; import io.vertx.sqlclient.RowIterator; +import io.vertx.sqlclient.SqlResult; import io.vertx.sqlclient.templates.RowMapper; import io.vertx.sqlclient.templates.SqlTemplate; import java.io.BufferedReader; import java.io.IOException; import java.io.StringReader; +import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.UUID; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.folio.harvesteradmin.moduledata.HarvestJob; -import org.folio.harvesteradmin.moduledata.HarvestJobField; -import org.folio.harvesteradmin.moduledata.LogLine; -import org.folio.harvesteradmin.moduledata.RecordFailure; -import org.folio.harvesteradmin.moduledata.SqlQuery; -import org.folio.harvesteradmin.moduledata.StoredEntity; +import org.folio.harvesteradmin.moduledata.*; import org.folio.tlib.postgres.TenantPgPool; -public class Storage { +public class ModuleStorageAccess { TenantPgPool pool; - private static final Logger logger = LogManager.getLogger(Storage.class); + private static final Logger logger = LogManager.getLogger(ModuleStorageAccess.class); - public enum Table { - harvest_job, - log_statement, - record_failure, - record_failure_view - } /** * Constructor. */ - public Storage(Vertx vertx, String tenant) { + public ModuleStorageAccess(Vertx vertx, String tenant) { pool = TenantPgPool.pool(vertx, tenant); } - public String schemaDotTable(Table table) { + public String schemaDotTable(Tables table) { return pool.getSchema() + "." + table.name(); } @@ -61,7 +52,7 @@ public Future init(JsonObject tenantAttributes) { if (!tenantAttributes.containsKey("module_to")) { return Future.succeededFuture(); // doing nothing for disable } else { - return Schema.createDatabase(pool); + return DatabaseInit.createDatabase(pool); } } @@ -92,9 +83,12 @@ public Future storeLogStatements(UUID harvestJobId, String log) { long startParse = System.currentTimeMillis(); while ((line = bufReader.readLine()) != null) { if (line.length() > 100) { - LogLine logLine = new LogLine(harvestJobId, line, ++sequence); + StringBuilder str = new StringBuilder(line); + str.setCharAt(10,' '); + str.setCharAt(19,','); + LogLine logLine = new LogLine(harvestJobId, str.toString(), ++sequence); if (logLine.getId() != null) { - logLines.add(new LogLine(harvestJobId, line, sequence)); + logLines.add(logLine); } else { logger.error("Could not parse " + line); nonMatches++; @@ -121,20 +115,16 @@ public Future storeLogStatements(UUID harvestJobId, String log) { } } + /** * Stores failed records. */ - public Future storeFailedRecords(UUID harvestJobId, JsonArray failedRecords) { + public Future storeFailedRecords(UUID harvestJobId, List failedRecords) { if (failedRecords != null && ! failedRecords.isEmpty()) { - List list = new ArrayList<>(); - for (Object rec : failedRecords) { - JsonObject failedRecord = (JsonObject) rec; - list.add(RecordFailure.fromLegacyHarvesterJson(harvestJobId, failedRecord)); - } - return SqlTemplate.forUpdate(pool.getPool(), + return SqlTemplate.forUpdate(pool.getPool(), RecordFailure.entity().makeInsertTemplate(pool.getSchema())) .mapFrom(RecordFailure.entity().getTupleMapper()) - .executeBatch(list) + .executeBatch(failedRecords) .onFailure(res -> logger.error("Didn't save record failures: " + res.getMessage())) .mapEmpty(); @@ -144,6 +134,7 @@ public Future storeFailedRecords(UUID harvestJobId, JsonArray failedRecord } } + /** * Gets previous jobs from module's storage. */ @@ -165,7 +156,7 @@ public Future> getPreviousJobs(String query) { public Future getPreviousJobById(UUID id) { return SqlTemplate.forQuery(pool.getPool(), "SELECT * " - + "FROM " + schemaDotTable(Table.harvest_job) + " " + + "FROM " + schemaDotTable(Tables.harvest_job) + " " + "WHERE id = #{id}") .mapTo(HarvestJob.entity().getRowMapper()) .execute(Collections.singletonMap("id", id)) @@ -181,7 +172,8 @@ public Future getPreviousJobById(UUID id) { public Future getLogsForPreviousJob(UUID id, SqlQuery queryFromCql) { Promise promise = Promise.promise(); final StringBuilder log = new StringBuilder(); - String query = queryFromCql.withAdditionalWhereClause("harvest_job_id = #{id}").toString(); + String query = queryFromCql + .withAdditionalWhereClause("harvest_job_id = #{id}").toString(); SqlTemplate.forQuery(pool.getPool(), query) .mapTo(LogLine.entity().getRowMapper()) .execute(Collections.singletonMap("id", id)) @@ -246,7 +238,7 @@ public Future getFailedRecordForPreviousJob(UUID id) { Promise promise = Promise.promise(); SqlTemplate.forQuery(pool.getPool(), "SELECT * " - + "FROM " + schemaDotTable(Table.record_failure) + " " + + "FROM " + schemaDotTable(Tables.record_failure) + " " + "WHERE id = #{id} ") .mapTo(RecordFailure.entity().getRowMapper()) .execute(Collections.singletonMap("id", id)) @@ -274,21 +266,21 @@ public Future deletePreviousJob(UUID id) { if (previousJob.result() == null) { promise.fail("No job history found with job ID " + id + ". Nothing deleted."); } else { - logger.info("Found job " + previousJob.result().getId()); + logger.info("Found job to delete: " + previousJob.result().getId()); SqlTemplate.forUpdate(pool.getPool(), - "DELETE FROM " + schemaDotTable(Table.log_statement) + "DELETE FROM " + schemaDotTable(Tables.log_statement) + " WHERE " + LogLine.LogLineField.HARVEST_JOB_ID + " = #{id} ") .execute(Collections.singletonMap("id", id)) .onComplete(deletedLogs -> { if (deletedLogs.succeeded()) { SqlTemplate.forUpdate(pool.getPool(), - "DELETE FROM " + schemaDotTable(Table.record_failure) + "DELETE FROM " + schemaDotTable(Tables.record_failure) + " WHERE " + RecordFailure.Column.harvest_job_id + " = #{id} ") .execute(Collections.singletonMap("id", id)) .onComplete(deletedFailedRecords -> { if (deletedFailedRecords.succeeded()) { SqlTemplate.forUpdate(pool.getPool(), - "DELETE FROM " + schemaDotTable(Table.harvest_job) + "DELETE FROM " + schemaDotTable(Tables.harvest_job) + " WHERE " + HarvestJobField.ID + " = #{id} ") .execute(Collections.singletonMap("id", id)) .onComplete(deletedJobRun -> { @@ -316,6 +308,55 @@ public Future deletePreviousJob(UUID id) { return promise.future(); } + public Future> purgePreviousJobsByAge (LocalDateTime untilDate) { + Promise promise = Promise.promise(); + return SqlTemplate.forUpdate(pool.getPool(), + "DELETE FROM " + schemaDotTable(Tables.log_statement) + + " WHERE " + LogLine.LogLineField.HARVEST_JOB_ID + + " IN (SELECT " + HarvestJobField.ID + + " FROM " + schemaDotTable(Tables.harvest_job) + + " WHERE " + HarvestJobField.STARTED + " < #{untilDate} )") + .execute(Collections.singletonMap("untilDate", untilDate)) + .onComplete(deletedLogs -> { + if (deletedLogs.succeeded()) { + SqlTemplate.forUpdate(pool.getPool(), + "DELETE FROM " + schemaDotTable(Tables.record_failure) + + " WHERE " + RecordFailure.Column.harvest_job_id + + " IN (SELECT " + HarvestJobField.ID + + " FROM " + schemaDotTable(Tables.harvest_job) + + " WHERE " + HarvestJobField.STARTED + " < #{untilDate} )") + .execute(Collections.singletonMap("untilDate", untilDate)) + .onComplete(deletedFailedRecords -> { + if (deletedFailedRecords.succeeded()) { + SqlTemplate.forUpdate(pool.getPool(), + "DELETE FROM " + schemaDotTable(Tables.harvest_job) + + " WHERE " + HarvestJobField.STARTED + " < #{untilDate} ") + .execute(Collections.singletonMap("untilDate", untilDate)) + .onSuccess( result -> { + logger.info("Timer process purged " + result.rowCount() + " harvest job runs from before " + untilDate); + promise.complete(); + }) + .onFailure( result -> { + logger.error("Timer process: Purge of previous jobs failed." + result.getCause().getMessage()); + promise.fail("Could not delete job runs with finish dates before " + untilDate + + result.getCause().getMessage()); + }); + } else { + logger.error("Purge of failed records failed." + deletedFailedRecords.cause().getMessage()); + promise.fail("Could not delete job runs with finish dates before " + untilDate + + " because deletion of its failed records failed: " + + deletedFailedRecords.cause().getMessage()); + } + }); + } else { + logger.error("Purge of log statements failed." + deletedLogs.cause().getMessage()); + promise.fail("Could not delete job runs with finish dates before " + untilDate + + " because deletion of its logs failed: " + + deletedLogs.cause().getMessage()); + } + }); + } + /** * Gets record count. */ diff --git a/src/main/java/org/folio/harvesteradmin/moduledata/SqlQuery.java b/src/main/java/org/folio/harvesteradmin/moduledata/database/SqlQuery.java similarity index 97% rename from src/main/java/org/folio/harvesteradmin/moduledata/SqlQuery.java rename to src/main/java/org/folio/harvesteradmin/moduledata/database/SqlQuery.java index 84f8164..60e8647 100644 --- a/src/main/java/org/folio/harvesteradmin/moduledata/SqlQuery.java +++ b/src/main/java/org/folio/harvesteradmin/moduledata/database/SqlQuery.java @@ -1,4 +1,4 @@ -package org.folio.harvesteradmin.moduledata; +package org.folio.harvesteradmin.moduledata.database; import io.vertx.ext.web.validation.RequestParameter; diff --git a/src/main/java/org/folio/harvesteradmin/moduledata/database/Tables.java b/src/main/java/org/folio/harvesteradmin/moduledata/database/Tables.java new file mode 100644 index 0000000..afd6095 --- /dev/null +++ b/src/main/java/org/folio/harvesteradmin/moduledata/database/Tables.java @@ -0,0 +1,9 @@ +package org.folio.harvesteradmin.moduledata.database; + +public enum Tables { + harvest_job, + log_statement, + record_failure, + record_failure_view + +} diff --git a/src/main/java/org/folio/harvesteradmin/modulestorage/Schema.java b/src/main/java/org/folio/harvesteradmin/modulestorage/Schema.java deleted file mode 100644 index 429cd84..0000000 --- a/src/main/java/org/folio/harvesteradmin/modulestorage/Schema.java +++ /dev/null @@ -1,84 +0,0 @@ -package org.folio.harvesteradmin.modulestorage; - -import io.vertx.core.CompositeFuture; -import io.vertx.core.Future; -import io.vertx.core.Promise; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.folio.harvesteradmin.moduledata.HarvestJob; -import org.folio.harvesteradmin.moduledata.LogLine; -import org.folio.harvesteradmin.moduledata.RecordFailure; -import org.folio.harvesteradmin.moduledata.StoredEntity; -import org.folio.tlib.postgres.TenantPgPool; - -public class Schema { - - private static final Logger logger = LogManager.getLogger(Storage.class); - - /** - * Creates tables and views. - */ - public static Future createDatabase(TenantPgPool pool) { - final Promise promise = Promise.promise(); - @SuppressWarnings("rawtypes") List tables = new ArrayList<>(); - for (StoredEntity entity : Arrays.asList( - HarvestJob.entity(), - LogLine.entity(), - RecordFailure.entity())) { - tables.add(pool.query(entity.makeCreateTableSql(pool.getSchema())).execute().mapEmpty()); - } - CompositeFuture.all(tables).onComplete( - creates -> { - if (creates.succeeded()) { - pool.query(createRecordFailureView(pool.getSchema())) - .execute().mapEmpty().onComplete(view -> { - if (view.succeeded()) { - promise.complete(); - } else { - promise.fail(view.cause().getMessage()); - } - }); - } else { - promise.fail(creates.cause().getMessage()); - } - }); - return promise.future(); - - /* Template for processing parameters in init. - JsonArray parameters = tenantAttributes.getJsonArray("parameters"); - if (parameters != null) { - for (int i = 0; i < parameters.size(); i++) { - JsonObject parameter = parameters.getJsonObject(i); - if ("loadSample".equals(parameter.getString("key")) - && "true".equals(parameter.getString("value"))) { - } - } - } - */ - } - - /** - * Creates view. - */ - public static String createRecordFailureView(String schema) { - String ddl; - ddl = "CREATE OR REPLACE VIEW " + schema + ".record_failure_view " - + "AS SELECT rf.id AS id, " - + " rf.harvest_job_Id AS harvest_job_id, " - + " hj.harvestable_id AS harvestable_id, " - + " hj.harvestable_name AS harvestable_name, " - + " rf.record_number AS record_number, " - + " rf.time_stamp AS time_stamp, " - + " rf.record_errors AS record_errors, " - + " rf.original_record AS original_record, " - + " rf.transformed_record AS transformed_record " - + " FROM " + schema + ".record_failure AS rf, " - + " " + schema + ".harvest_job as hj " - + " WHERE rf.harvest_job_id = hj.id"; - logger.info("View DDL " + ddl); - return ddl; - } -} diff --git a/src/main/java/org/folio/harvesteradmin/service/HarvestAdminService.java b/src/main/java/org/folio/harvesteradmin/service/HarvestAdminService.java index 85c3771..8631dca 100644 --- a/src/main/java/org/folio/harvesteradmin/service/HarvestAdminService.java +++ b/src/main/java/org/folio/harvesteradmin/service/HarvestAdminService.java @@ -1,8 +1,9 @@ package org.folio.harvesteradmin.service; -import static org.folio.harvesteradmin.dataaccess.LegacyHarvesterStorage.getIntOrDefault; -import static org.folio.harvesteradmin.dataaccess.LegacyHarvesterStorage.pagingPlainText; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.HARVESTER_HARVESTABLES_PATH; +import static org.folio.harvesteradmin.legacydata.LegacyHarvesterStorage.getIntOrDefault; +import static org.folio.harvesteradmin.legacydata.LegacyHarvesterStorage.pagingPlainText; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.HARVESTER_HARVESTABLES_PATH; +import static org.folio.harvesteradmin.utils.Miscellaneous.getPeriod; import static org.folio.okapi.common.HttpResponse.responseError; import static org.folio.okapi.common.HttpResponse.responseJson; import static org.folio.okapi.common.HttpResponse.responseText; @@ -20,28 +21,37 @@ import io.vertx.ext.web.validation.RequestParameter; import io.vertx.ext.web.validation.RequestParameters; import io.vertx.ext.web.validation.ValidationHandler; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; import java.util.List; import java.util.UUID; + + import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.folio.harvesteradmin.dataaccess.JobLauncher; -import org.folio.harvesteradmin.dataaccess.LegacyHarvesterStorage; -import org.folio.harvesteradmin.dataaccess.responsehandlers.ProcessedHarvesterResponseGet; -import org.folio.harvesteradmin.moduledata.HarvestJob; -import org.folio.harvesteradmin.moduledata.HarvestJobField; -import org.folio.harvesteradmin.moduledata.LogLine; -import org.folio.harvesteradmin.moduledata.RecordFailure; -import org.folio.harvesteradmin.moduledata.SqlQuery; -import org.folio.harvesteradmin.modulestorage.Storage; +import org.folio.harvesteradmin.legacydata.JobLauncher; +import org.folio.harvesteradmin.legacydata.LegacyHarvesterStorage; +import org.folio.harvesteradmin.legacydata.responsehandlers.ProcessedHarvesterResponseGet; +import org.folio.harvesteradmin.foliodata.ConfigurationsClient; +import org.folio.harvesteradmin.moduledata.*; +import org.folio.harvesteradmin.moduledata.database.ModuleStorageAccess; +import org.folio.harvesteradmin.moduledata.database.SqlQuery; +import org.folio.harvesteradmin.moduledata.database.Tables; +import org.folio.harvesteradmin.utils.SettableClock; import org.folio.okapi.common.HttpResponse; import org.folio.tlib.RouterCreator; import org.folio.tlib.TenantInitHooks; import org.folio.tlib.postgres.PgCqlException; import org.folio.tlib.util.TenantUtil; +/** + * Main service. + */ public class HarvestAdminService implements RouterCreator, TenantInitHooks { - private static final Logger logger = LogManager.getLogger("harvester-admin"); + public static final Logger logger = LogManager.getLogger("harvester-admin"); @Override public Future createRouter(Vertx vertx) { @@ -145,17 +155,31 @@ private void handlers(Vertx vertx, RouterBuilder routerBuilder) { .handler(ctx -> deletePreviousJob(vertx, ctx) .onFailure(cause -> exceptionResponse(cause, ctx))) .failureHandler(this::routerExceptionResponse); - + routerBuilder + .operation("postPreviousJob") + .handler(ctx -> postPreviousJob(vertx, ctx) + .onFailure(cause -> exceptionResponse(cause, ctx))) + .failureHandler(this::routerExceptionResponse); routerBuilder .operation("getPreviousJobLog") .handler(ctx -> getPreviousJobLog(vertx, ctx) .onFailure(cause -> exceptionResponse(cause, ctx))) .failureHandler(this::routerExceptionResponse); + routerBuilder + .operation("postPreviousJobLog") + .handler(ctx -> postPreviousJobLog(vertx, ctx) + .onFailure(cause -> exceptionResponse(cause, ctx))) + .failureHandler(this::routerExceptionResponse); routerBuilder .operation("getFailedRecordsForPreviousJob") .handler(ctx -> getFailedRecordsForPreviousJobs(vertx, ctx) .onFailure(cause -> exceptionResponse(cause, ctx))) .failureHandler(this::routerExceptionResponse); + routerBuilder + .operation("postFailedRecords") + .handler(ctx -> postFailedRecords(vertx, ctx) + .onFailure(cause -> exceptionResponse(cause, ctx))) + .failureHandler(this::routerExceptionResponse); routerBuilder .operation("getFailedRecordForPreviousJob") .handler(ctx -> getFailedRecordForPreviousJob(vertx, ctx) @@ -286,6 +310,11 @@ private void handlers(Vertx vertx, RouterBuilder routerBuilder) { .handler(ctx -> stopJob(vertx, ctx)) .failureHandler(this::routerExceptionResponse); + routerBuilder + .operation("purgeAgedLogs") + .handler(ctx -> purgeAgedLogs(vertx, ctx)) + .failureHandler(this::routerExceptionResponse); + routerBuilder .operation("getIds") .handler(this::generateIds); @@ -319,8 +348,9 @@ private void routerExceptionResponse(RoutingContext ctx) { @Override public Future postInit(Vertx vertx, String tenant, JsonObject tenantAttributes) { - Storage storage = new Storage(vertx, tenant); - return storage.init(tenantAttributes); + return new ModuleStorageAccess(vertx, tenant).init(tenantAttributes) + .onFailure(x -> logger.error("Database initialization failed: " + x.getMessage())) + .onSuccess(x -> logger.info("Tenant '" + tenant + "' database initialized")); } private Future getConfigRecords(Vertx vertx, RoutingContext routingContext) { @@ -445,6 +475,21 @@ private void stopJob(Vertx vertx, RoutingContext routingContext) { launcher.stopJob(routingContext); } + private Future purgeAgedLogs(Vertx vertx, RoutingContext routingContext) { + return ConfigurationsClient.getStringValue(routingContext, + ConfigurationsClient.MODULE_HARVESTER_ADMIN, + ConfigurationsClient.CONFIG_NAME_PURGE_LOGS_AFTER) + .onComplete(val -> { + Period ageForDeletion = getPeriod(val.result(),3, "MONTHS"); + LocalDateTime untilDate = SettableClock.getLocalDateTime().minus(ageForDeletion).truncatedTo(ChronoUnit.MINUTES); + logger.info("Running timer process: purging aged logs from before " + untilDate); + String tenant = TenantUtil.tenant(routingContext); + ModuleStorageAccess moduleStorage = new ModuleStorageAccess(vertx, tenant); + moduleStorage.purgePreviousJobsByAge(untilDate) + .onComplete(x -> routingContext.response().setStatusCode(204).end()); + }); + } + private Future getJobLog(Vertx vertx, RoutingContext routingContext) { String tenant = TenantUtil.tenant(routingContext); LegacyHarvesterStorage legacyStorage = new LegacyHarvesterStorage(vertx, tenant); @@ -515,9 +560,11 @@ private Future pullJobAndSaveItsLogs(Vertx vertx, RoutingContext routingCo .onComplete(logResults -> { HttpResponseImpl logsResponse = logResults.result().resultAt(0); ProcessedHarvesterResponseGet failuresResponse = logResults.result().resultAt(1); - Storage storage = new Storage(vertx, tenant); + ModuleStorageAccess moduleStorage = new ModuleStorageAccess(vertx, tenant); HarvestJob job = HarvestJob.fromHarvestableJson(harvestable.result().jsonObject()); + List failedRecords = failedRecordsFromLegacyHarvesterJson(job.getId(), + failuresResponse.jsonObject().getJsonArray("failedRecords")); if (!jobStatus.isEmpty()) { // Job status was included in request, overwrite pulled properties job.setFinished(jobStatus.getString(HarvestJobField.FINISHED.propertyName())); @@ -529,18 +576,18 @@ private Future pullJobAndSaveItsLogs(Vertx vertx, RoutingContext routingCo job.setStarted(harvestStartedDate); job.setStatus(jobStatus.getString(HarvestJobField.STATUS.propertyName())); } - storage.storeHarvestJob(job) + moduleStorage.storeHarvestJob(job) .onComplete(jobStored -> CompositeFuture.all( - storage.storeLogStatements(job.getId(),logsResponse.bodyAsString()), - storage.storeFailedRecords(job.getId(), - failuresResponse.jsonObject().getJsonArray("failedRecords")) + moduleStorage.storeLogStatements(job.getId(), logsResponse.bodyAsString()), + moduleStorage.storeFailedRecords(job.getId(), + failedRecords) ).onComplete( result -> { if (result.succeeded()) { - responseText(routingContext,200) + responseText(routingContext, 200) .end("Saved job with logs and record failures if any."); } else { - responseError(routingContext,500, + responseError(routingContext, 500, "There was an error saving the job or it's logs: " + result.cause().getMessage()); } @@ -554,10 +601,27 @@ private Future pullJobAndSaveItsLogs(Vertx vertx, RoutingContext routingCo }) .mapEmpty(); } + private static List failedRecordsFromLegacyHarvesterJson(UUID harvestJobId, JsonArray failedRecords) { + List list = new ArrayList<>(); + for (Object rec : failedRecords) { + JsonObject failedRecord = (JsonObject) rec; + list.add(RecordFailure.fromLegacyHarvesterJson(harvestJobId, failedRecord)); + } + return list; + } + + private static List failedRecordsFromHarvesterAdminJson(UUID harvestJobId, JsonArray failedRecords) { + List list = new ArrayList<>(); + for (Object rec : failedRecords) { + JsonObject failedRecord = (JsonObject) rec; + list.add(RecordFailure.fromHarvesterAdminJson(harvestJobId, failedRecord)); + } + return list; + } private Future getPreviousJobs(Vertx vertx, RoutingContext routingContext) { String tenant = TenantUtil.tenant(routingContext); - Storage storage = new Storage(vertx, tenant); + ModuleStorageAccess moduleStorage = new ModuleStorageAccess(vertx, tenant); String fromDateTime = routingContext.request().getParam("from"); String untilDateTime = routingContext.request().getParam("until"); @@ -573,16 +637,16 @@ private Future getPreviousJobs(Vertx vertx, RoutingContext routingContext) SqlQuery query; try { query = HarvestJob.entity() - .makeSqlFromCqlQuery(routingContext, storage.schemaDotTable(Storage.Table.harvest_job)) + .makeSqlFromCqlQuery(routingContext, moduleStorage.schemaDotTable(Tables.harvest_job)) .withAdditionalWhereClause(timeRange); } catch (PgCqlException pce) { responseText(routingContext, 400) - .end("Could not execute query to retrieve jobs: " + pce.getMessage()); + .end("Could not execute query to retrieve jobs: " + pce.getMessage() + " Request:" + routingContext.request().absoluteURI()); return Future.succeededFuture(); } catch (Exception e) { return Future.failedFuture(e.getMessage()); } - return storage.getPreviousJobs(query.getQueryWithLimits()).onComplete( + return moduleStorage.getPreviousJobs(query.getQueryWithLimits()).onComplete( jobsList -> { if (jobsList.succeeded()) { JsonObject responseJson = new JsonObject(); @@ -592,7 +656,7 @@ private Future getPreviousJobs(Vertx vertx, RoutingContext routingContext) for (HarvestJob job : jobs) { previousJobs.add(job.asJson()); } - storage.getCount(query.getCountingSql()).onComplete( + moduleStorage.getCount(query.getCountingSql()).onComplete( count -> { responseJson.put("totalRecords", count.result()); responseJson(routingContext, 200).end(responseJson.encodePrettily()); @@ -606,12 +670,27 @@ private Future getPreviousJobs(Vertx vertx, RoutingContext routingContext) ).mapEmpty(); } + private Future postPreviousJob(Vertx vertx, RoutingContext routingContext) { + String tenant = TenantUtil.tenant(routingContext); + ModuleStorageAccess moduleStorage = new ModuleStorageAccess(vertx, tenant); + HarvestJob job = + HarvestJob.fromHarvestJobJson(routingContext.body().asJsonObject()); + return moduleStorage.storeHarvestJob(job) + .onComplete(harvestJobId -> { + if (harvestJobId.result() == null) { + responseText(routingContext, 400).end("Failed to insert harvest job run: " + harvestJobId.cause().getMessage()); + } else { + responseJson(routingContext, 201).end(job.asJson().encodePrettily()); + } + }).mapEmpty(); + } + private Future getPreviousJobById(Vertx vertx, RoutingContext routingContext) { String tenant = TenantUtil.tenant(routingContext); RequestParameters params = routingContext.get(ValidationHandler.REQUEST_CONTEXT_KEY); UUID id = UUID.fromString(params.pathParameter("id").getString()); - Storage storage = new Storage(vertx, tenant); - return storage.getPreviousJobById(id) + ModuleStorageAccess moduleStorage = new ModuleStorageAccess(vertx, tenant); + return moduleStorage.getPreviousJobById(id) .onComplete(harvestJob -> { if (harvestJob.result() == null) { responseText(routingContext, 404).end("Found no job with id " + id); @@ -625,8 +704,8 @@ private Future deletePreviousJob(Vertx vertx, RoutingContext routingContex String tenant = TenantUtil.tenant(routingContext); RequestParameters params = routingContext.get(ValidationHandler.REQUEST_CONTEXT_KEY); UUID id = UUID.fromString(params.pathParameter("id").getString()); - Storage storage = new Storage(vertx, tenant); - return storage.deletePreviousJob(id) + ModuleStorageAccess moduleStorage = new ModuleStorageAccess(vertx, tenant); + return moduleStorage.deletePreviousJob(id) .onComplete(deleted -> { if (deleted.succeeded()) { responseText(routingContext, 200).end("Job " + id + " and its logs deleted."); @@ -655,16 +734,16 @@ private Future getPreviousJobLog(Vertx vertx, RoutingContext routingContex timeRange = "time_stamp <= '" + untilDateTime + "'"; } - Storage storage = new Storage(vertx, tenant); + ModuleStorageAccess moduleStorage = new ModuleStorageAccess(vertx, tenant); SqlQuery queryFromCql = LogLine.entity() - .makeSqlFromCqlQuery(routingContext, storage.schemaDotTable(Storage.Table.log_statement)) + .makeSqlFromCqlQuery(routingContext, moduleStorage.schemaDotTable(Tables.log_statement)) .withAdditionalWhereClause(timeRange); if (contentType != null && contentType.contains("json")) { - return storage.getLogsAsJsonForPreviousJob(id, queryFromCql) + return moduleStorage.getLogsAsJsonForPreviousJob(id, queryFromCql) .onComplete(jobLog -> { if (jobLog.succeeded()) { - if (jobLog.result().size() == 0) { - storage.getPreviousJobById(id).onComplete(harvestJob -> { + if (jobLog.result().isEmpty()) { + moduleStorage.getPreviousJobById(id).onComplete(harvestJob -> { if (harvestJob.result() == null) { responseText(routingContext, 404) .end("Found no previous job with ID " + id); @@ -682,11 +761,11 @@ private Future getPreviousJobLog(Vertx vertx, RoutingContext routingContex } }).mapEmpty(); } else { - return storage.getLogsForPreviousJob(id, queryFromCql) + return moduleStorage.getLogsForPreviousJob(id, queryFromCql) .onComplete(jobLog -> { if (jobLog.succeeded()) { - if (jobLog.result().length() == 0) { - storage.getPreviousJobById(id).onComplete(harvestJob -> { + if (jobLog.result().isEmpty()) { + moduleStorage.getPreviousJobById(id).onComplete(harvestJob -> { if (harvestJob.result() == null) { responseText(routingContext, 404) .end("Found no previous job with ID " + id); @@ -706,12 +785,29 @@ private Future getPreviousJobLog(Vertx vertx, RoutingContext routingContex } } + public Future postPreviousJobLog (Vertx vertx, RoutingContext routingContext) { + String log = routingContext.body().asString(); + String tenant = TenantUtil.tenant(routingContext); + RequestParameters params = routingContext.get(ValidationHandler.REQUEST_CONTEXT_KEY); + UUID id = UUID.fromString(params.pathParameter("id").getString()); + ModuleStorageAccess moduleStorage = new ModuleStorageAccess(vertx, tenant); + return moduleStorage.storeLogStatements(id, log) + .onComplete(failedRecordsResponse -> { + if (failedRecordsResponse.succeeded()) { + responseText(routingContext, 201).end("Job logs saved for job " + id); + } else { + responseText(routingContext, 500).end("Could not store the logs for job " + id + + failedRecordsResponse.cause().getMessage()); + } + }).mapEmpty(); + } + private Future getFailedRecordsForPreviousJobs(Vertx vertx, RoutingContext routingContext) { String tenant = TenantUtil.tenant(routingContext); - Storage storage = new Storage(vertx, tenant); + ModuleStorageAccess moduleStorage = new ModuleStorageAccess(vertx, tenant); SqlQuery queryFromCql = RecordFailure.entity().makeSqlFromCqlQuery( - routingContext, storage.schemaDotTable(Storage.Table.record_failure_view)) + routingContext, moduleStorage.schemaDotTable(Tables.record_failure_view)) .withDefaultLimit("100"); RequestParameters params = routingContext.get(ValidationHandler.REQUEST_CONTEXT_KEY); RequestParameter jobId = params.pathParameter("id"); @@ -735,7 +831,7 @@ private Future getFailedRecordsForPreviousJobs(Vertx vertx, RoutingContext queryFromCql.withAdditionalWhereClause(timeRange); } - return storage.getFailedRecordsForPreviousJobs(queryFromCql).onComplete( + return moduleStorage.getFailedRecordsForPreviousJobs(queryFromCql).onComplete( failuresList -> { if (failuresList.succeeded()) { JsonObject responseJson = new JsonObject(); @@ -745,7 +841,7 @@ private Future getFailedRecordsForPreviousJobs(Vertx vertx, RoutingContext for (RecordFailure failure : failures) { recordFailures.add(failure.asJson()); } - storage.getCount(queryFromCql.getCountingSql()).onComplete( + moduleStorage.getCount(queryFromCql.getCountingSql()).onComplete( count -> { responseJson.put("totalRecords", count.result()); responseJson(routingContext, 200).end(responseJson.encodePrettily()); @@ -760,8 +856,8 @@ private Future getFailedRecordForPreviousJob(Vertx vertx, RoutingContext r String tenant = TenantUtil.tenant(routingContext); RequestParameters params = routingContext.get(ValidationHandler.REQUEST_CONTEXT_KEY); UUID id = UUID.fromString(params.pathParameter("id").getString()); - Storage storage = new Storage(vertx, tenant); - return storage.getFailedRecordForPreviousJob(id).onComplete( + ModuleStorageAccess moduleStorage = new ModuleStorageAccess(vertx, tenant); + return moduleStorage.getFailedRecordForPreviousJob(id).onComplete( failureRecord -> { if (failureRecord.succeeded()) { RecordFailure failure = failureRecord.result(); @@ -778,11 +874,31 @@ private Future getFailedRecordForPreviousJob(Vertx vertx, RoutingContext r ).mapEmpty(); } + private Future postFailedRecords(Vertx vertx, RoutingContext routingContext) { + String tenant = TenantUtil.tenant(routingContext); + ModuleStorageAccess moduleStorage = new ModuleStorageAccess(vertx, tenant); + RequestParameters params = routingContext.get(ValidationHandler.REQUEST_CONTEXT_KEY); + UUID id = UUID.fromString(params.pathParameter("id").getString()); + JsonObject failedRecordsJson = routingContext.body().asJsonObject(); + List failedRecords = + failedRecordsFromHarvesterAdminJson(id, failedRecordsJson.getJsonArray("failedRecords")); + return moduleStorage.storeFailedRecords(id, failedRecords) + .onComplete(failedRecordsResponse -> { + if (failedRecordsResponse.succeeded()) { + responseText(routingContext, 201).end("Failed records inserted."); + } else { + responseText(routingContext, 500).end("Could not insert failed records " + + failedRecordsResponse.cause().getMessage()); + } + }).mapEmpty(); + } + + private void generateIds(RoutingContext routingContext) { RequestParameters params = routingContext.get(ValidationHandler.REQUEST_CONTEXT_KEY); int count = 1; if (params.queryParameter("count") != null) { - count = Math.min(params.queryParameter("count").getInteger(),100); + count = Math.min(params.queryParameter("count").getInteger(), 100); } StringBuilder response = new StringBuilder(); for (int i = 0; i < count; i++) { diff --git a/src/main/java/org/folio/harvesteradmin/utils/Miscellaneous.java b/src/main/java/org/folio/harvesteradmin/utils/Miscellaneous.java new file mode 100644 index 0000000..a9f41ba --- /dev/null +++ b/src/main/java/org/folio/harvesteradmin/utils/Miscellaneous.java @@ -0,0 +1,50 @@ +package org.folio.harvesteradmin.utils; + +import java.time.Period; + +import static org.folio.harvesteradmin.service.HarvestAdminService.logger; + +public class Miscellaneous { + /** + * Takes a period in the form of "3 WEEKS", for example, and turns it into a temporal amount. + * @param periodAsText a string with an integer followed by DAYS, WEEKS, or MONTHS + * @return temporal amount representing the period. + */ + public static Period getPeriod(String periodAsText, int defaultAmount, String defaultUnit) { + if (periodAsText != null) { + String[] periodAsArray = periodAsText.trim().toUpperCase().split(" "); + if (periodAsArray.length == 2) { + try { + int amount = Integer.parseInt(periodAsArray[0]); + String unit = periodAsArray[1]; + switch (unit) { + case "DAY": + case "DAYS": + case "TAG": + case "TAGE": + return Period.ofDays(amount); + case "WEEK": + case "WEEKS": + case "WOCHE": + case "WOCHEN": + return Period.ofWeeks(amount); + case "MONTH": + case "MONTHS": + case "MONAT": + case "MONATE": + return Period.ofMonths(amount); + } + } catch (NumberFormatException ignored) { + } + } + logger.error("Could not resolve period from [" + periodAsText + "]. Expected string on the format: DAY(S)|TAG(E)|WEEK(S)|WOCHE(N)|MONTH(S)|MONAT(E)"); + } + return switch (defaultUnit) { + case "DAYS" -> Period.ofDays(defaultAmount); + case "WEEKS" -> Period.ofWeeks(defaultAmount); + case "MONTHS" -> Period.ofMonths(defaultAmount); + default -> null; + }; + } + +} diff --git a/src/main/java/org/folio/harvesteradmin/utils/SettableClock.java b/src/main/java/org/folio/harvesteradmin/utils/SettableClock.java new file mode 100644 index 0000000..83fe6f1 --- /dev/null +++ b/src/main/java/org/folio/harvesteradmin/utils/SettableClock.java @@ -0,0 +1,109 @@ +package org.folio.harvesteradmin.utils; + +import java.time.*; +import java.time.temporal.ChronoUnit; + +public class SettableClock { + private static java.time.Clock clock = java.time.Clock.systemUTC(); + + private SettableClock() { + throw new UnsupportedOperationException("Do not instantiate"); + } + + /** + * Set the clock assigned to the clock manager to a given clock. + */ + public static void setClock(Clock clock) { + if (clock == null) { + throw new IllegalArgumentException("clock cannot be null"); + } + + SettableClock.clock = clock; + } + + /** + * Set the clock assigned to the clock manager to the system clock. + */ + public static void setDefaultClock() { + clock = Clock.systemUTC(); + } + + /** + * Get the clock assigned the clock manager. + * + * @return The clock currently being used by ClockManager. + */ + public static Clock getClock() { + return clock; + } + + /** + * Get the current system time according to the clock manager. + * + * @return A ZonedDateTime as if now() is called. + * Time is truncated to milliseconds. + */ + public static ZonedDateTime getZonedDateTime() { + return ZonedDateTime.now(clock).truncatedTo(ChronoUnit.MILLIS); + } + + /** + * Get the current system time according to the clock manager. + * + * @return A LocalDateTime as if now() is called. + * Time is truncated to milliseconds. + */ + public static LocalDateTime getLocalDateTime() { + return LocalDateTime.now(clock).truncatedTo(ChronoUnit.MILLIS); + } + + /** + * Get the current system time according to the clock manager. + * + * @return A LocalDate as if now() is called. + */ + public static LocalDate getLocalDate() { + return LocalDate.now(clock); + } + + /** + * Get the current system time according to the clock manager. + * + * @return A LocalTime as if now() is called. + * Time is truncated to milliseconds. + */ + public static LocalTime getLocalTime() { + return LocalTime.now(clock).truncatedTo(ChronoUnit.MILLIS); + } + + /** + * Get the current system time according to the clock manager. + * + * @return An Instant as if now() is called. + * Time is truncated to milliseconds. + */ + public static Instant getInstant() { + return clock.instant(); + } + + /** + * Get the time zone of the system clock according to the clock manager. + * + * @return The current time zone as a ZoneId. + */ + public static ZoneId getZoneId() { + return clock.getZone(); + } + + /** + * Get the time zone of the system clock according to the clock manager. + * + * @return The current time zone as a ZoneOffset. + */ + public static ZoneOffset getZoneOffset() { + return ZoneOffset.of(clock.getZone().getRules().getOffset(clock.instant()) + .getId()); + } + + +} diff --git a/src/main/resources/openapi/harvest-admin-1.0.yaml b/src/main/resources/openapi/harvest-admin-1.0.yaml index f27f86f..f34784f 100644 --- a/src/main/resources/openapi/harvest-admin-1.0.yaml +++ b/src/main/resources/openapi/harvest-admin-1.0.yaml @@ -485,6 +485,27 @@ paths: $ref: "#/components/responses/trait_400" "500": $ref: "#/components/responses/trait_500" + post: + description: Create job log samples for test purposes etc, for example by import from another FOLIO instance. + operationId: postPreviousJob + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/previousJob" + required: true + responses: + "201": + description: Job log created + content: + application/json: + schema: + $ref: "#/components/schemas/previousJob" + "400": + $ref: "#/components/responses/trait_400" + "500": + $ref: "#/components/responses/trait_500" + /harvester-admin/previous-jobs/failed-records: parameters: @@ -620,6 +641,25 @@ paths: "500": $ref: "#/components/responses/trait_500" + post: + operationId: postPreviousJobLog + description: Backdoor for creating logs of a previous harvest job without running a job + requestBody: + content: + text/plain: + schema: + type: string + required: true + responses: + "201": + description: Logs for a previous harvest job created + "400": + $ref: "#/components/responses/trait_400" + "404": + $ref: "#/components/responses/trait_404" + "500": + $ref: "#/components/responses/trait_500" + /harvester-admin/previous-jobs/{id}/failed-records: parameters: - $ref: headers/okapi-tenant.yaml @@ -632,40 +672,41 @@ paths: schema: type: string format: uuid - - in: query - name: query - required: false - description: CQL query, supporting recordNumber, harvestableId, harvestableName in queries - schema: - type: string - - in: query - name: from - required: false - description: date range parameter on error report timestamp - schema: - type: string - - in: query - name: until - required: false - description: date range parameter on error report timestamp - schema: - type: string - - in: query - name: offset - required: false - description: result set start row - schema: - type: string - - in: query - name: limit - required: false - description: result set max rows - schema: - type: string get: operationId: getFailedRecordsForPreviousJob description: Retrieves the failed records of a previous harvest job + parameters: + - in: query + name: query + required: false + description: CQL query, supporting recordNumber, harvestableId, harvestableName in queries + schema: + type: string + - in: query + name: from + required: false + description: date range parameter on error report timestamp + schema: + type: string + - in: query + name: until + required: false + description: date range parameter on error report timestamp + schema: + type: string + - in: query + name: offset + required: false + description: result set start row + schema: + type: string + - in: query + name: limit + required: false + description: result set max rows + schema: + type: string responses: "200": description: List of failed records for a previous harvest job @@ -681,6 +722,24 @@ paths: "500": $ref: "#/components/responses/trait_500" + post: + description: Create failed record samples without running a job, for example to import from another FOLIO instance. + operationId: postFailedRecords + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/failedRecordsPreviousJob" + required: true + responses: + "201": + description: Failed record entries created + "400": + $ref: "#/components/responses/trait_400" + "500": + $ref: "#/components/responses/trait_500" + + /harvester-admin/previous-jobs/failed-records/{id}: parameters: - $ref: headers/okapi-tenant.yaml @@ -1170,6 +1229,14 @@ paths: schema: type: string + /harvester-admin/purge-aged-logs: + post: + operationId: purgeAgedLogs + description: Delete old harvest logs from storage + responses: + "201": + description: No content + components: responses: trait_400: diff --git a/src/main/resources/openapi/schemas/failedRecordPreviousJob.json b/src/main/resources/openapi/schemas/failedRecordPreviousJob.json index 8582512..c829bb1 100644 --- a/src/main/resources/openapi/schemas/failedRecordPreviousJob.json +++ b/src/main/resources/openapi/schemas/failedRecordPreviousJob.json @@ -34,7 +34,7 @@ "description": "Short context description" }, "typeOfError": { - "type": "string", + "type": "object", "description": "Error category." }, "typeOfRecord": { diff --git a/src/test/java/org/folio/harvesteradmin/test/Api.java b/src/test/java/org/folio/harvesteradmin/test/Api.java index 3ace4d1..9378578 100644 --- a/src/test/java/org/folio/harvesteradmin/test/Api.java +++ b/src/test/java/org/folio/harvesteradmin/test/Api.java @@ -1,104 +1,109 @@ package org.folio.harvesteradmin.test; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.THIS_HARVESTABLES_PATH; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.THIS_STEPS_PATH; -import static org.folio.harvesteradmin.test.HarvesterAdminTestSuite.CONTENT_TYPE_JSON; -import static org.folio.harvesteradmin.test.HarvesterAdminTestSuite.CONTENT_TYPE_XML; -import static org.folio.harvesteradmin.test.HarvesterAdminTestSuite.OKAPI_TENANT; +import static io.restassured.RestAssured.given; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.THIS_HARVESTABLES_PATH; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.THIS_STEPS_PATH; +import static org.folio.harvesteradmin.test.HarvesterIntegrationTestSuite.*; +import static org.folio.harvesteradmin.test.Statics.*; -import io.restassured.RestAssured; import io.restassured.response.Response; import io.vertx.core.json.JsonObject; public class Api { - public static Response postConfigRecord(JsonObject record, String api ) { - return postConfigRecord(record, api, 201); - } - - public static Response postConfigRecord( JsonObject record, String api, int expectStatus ) { - return RestAssured - .given() - .body(record.encodePrettily()) - .header(CONTENT_TYPE_JSON) - .header(OKAPI_TENANT) - .post(api) - .then() - .log().ifValidationFails().statusCode(expectStatus).extract().response(); - } - - public static Response getConfigRecord(String api, String id) { - return getConfigRecord(api, id, 200); - } - - public static Response getConfigRecord(String api, String id, int expectStatus) { - return RestAssured - .given() - .header(OKAPI_TENANT) - .get(api + "/" + id) - .then() - .log().ifValidationFails().statusCode(expectStatus).extract().response(); - } - - public static JsonObject responseJson(Response response) { - return new JsonObject(response.body().asString()); - } - - public static Response getConfigRecords(String api, int expectStatus) { - return getConfigRecords(api, null, expectStatus); - } - - public static Response getConfigRecords(String api, String query, int expectStatus) { - return RestAssured - .given() - .header(OKAPI_TENANT) - .get(api - + (query == null ? "" : "?query=" + query )) - .then() - .log().ifValidationFails().statusCode(expectStatus).extract().response(); - } - - public static Response deleteConfigRecord(String api, String id, int expectStatus) { - return RestAssured.given() - .header(OKAPI_TENANT) - .delete(api + "/" + id) - .then() - .log().ifValidationFails().statusCode(expectStatus).extract().response(); - } - - public static Response putConfigRecord(String api, String id, JsonObject record, int status) { - return RestAssured.given() - .header(OKAPI_TENANT) - .header(CONTENT_TYPE_JSON) - .body(record.encodePrettily()) - .put(api + "/" + id) - .then() - .log().ifValidationFails().statusCode(status).extract().response(); - } - - public static Response putScript(String stepId, String stepName, String xsl, int status) { - return RestAssured.given() - .header(OKAPI_TENANT) - .header(CONTENT_TYPE_XML) - .body(xsl) - .put(THIS_STEPS_PATH + "/" + stepId + "/script?name="+stepName) - .then() - .log().ifValidationFails().statusCode(status).extract().response(); - } - - public static Response getScript(String stepId, int status) { - return RestAssured.given() - .header(OKAPI_TENANT) - .get(THIS_STEPS_PATH + "/" + stepId + "/script") - .then() - .log().ifValidationFails().statusCode(status).extract().response(); - } - - public static Response getJobLog(String harvestableId, int status) { - return RestAssured.given() - .header(OKAPI_TENANT) - .get(THIS_HARVESTABLES_PATH + "/" + harvestableId + "/log") - .then() - .log().ifValidationFails().statusCode(status).extract().response(); - } + + public static Response postConfigRecord(JsonObject record, String api) { + return postConfigRecord(record, api, 201); + } + + public static Response postConfigRecord(JsonObject record, String api, int expectStatus) { + return given() + .baseUri(BASE_URI_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .body(record.encodePrettily()) + .header(CONTENT_TYPE_JSON) + .post(api) + .then() + .log().ifValidationFails().statusCode(expectStatus).extract().response(); + } + + public static Response getConfigRecord(String api, String id) { + return getConfigRecord(api, id, 200); + } + + public static Response getConfigRecord(String api, String id, int expectStatus) { + return given() + .baseUri(BASE_URI_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .get(api + "/" + id) + .then() + .log().ifValidationFails().statusCode(expectStatus).extract().response(); + } + + public static JsonObject responseJson(Response response) { + return new JsonObject(response.body().asString()); + } + + public static Response getConfigRecords(String api, int expectStatus) { + return getConfigRecords(api, null, expectStatus); + } + + public static Response getConfigRecords(String api, String query, int expectStatus) { + return given() + .baseUri(BASE_URI_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .get(api + + (query == null ? "" : "?query=" + query)) + .then() + .log().ifValidationFails().statusCode(expectStatus).extract().response(); + } + + public static Response deleteConfigRecord(String api, String id, int expectStatus) { + return given() + .baseUri(BASE_URI_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .delete(api + "/" + id) + .then() + .log().ifValidationFails().statusCode(expectStatus).extract().response(); + } + + public static Response putConfigRecord(String api, String id, JsonObject record, int status) { + return given() + .baseUri(BASE_URI_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .header(CONTENT_TYPE_JSON) + .body(record.encodePrettily()) + .put(api + "/" + id) + .then() + .log().ifValidationFails().statusCode(status).extract().response(); + } + + public static Response putScript(String stepId, String stepName, String xsl, int status) { + return given() + .baseUri(BASE_URI_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .header(CONTENT_TYPE_XML) + .body(xsl) + .put(THIS_STEPS_PATH + "/" + stepId + "/script?name=" + stepName) + .then() + .log().ifValidationFails().statusCode(status).extract().response(); + } + + public static Response getScript(String stepId, int status) { + return given() + .baseUri(BASE_URI_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .get(THIS_STEPS_PATH + "/" + stepId + "/script") + .then() + .log().ifValidationFails().statusCode(status).extract().response(); + } + + public static Response getJobLog(String harvestableId, int status) { + return given() + .baseUri(BASE_URI_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .get(THIS_HARVESTABLES_PATH + "/" + harvestableId + "/log") + .then() + .log().ifValidationFails().statusCode(status).extract().response(); + } } diff --git a/src/test/java/org/folio/harvesteradmin/test/HarvesterAdminTestSuite.java b/src/test/java/org/folio/harvesteradmin/test/HarvesterIntegrationTestSuite.java similarity index 87% rename from src/test/java/org/folio/harvesteradmin/test/HarvesterAdminTestSuite.java rename to src/test/java/org/folio/harvesteradmin/test/HarvesterIntegrationTestSuite.java index 000991f..b16afef 100644 --- a/src/test/java/org/folio/harvesteradmin/test/HarvesterAdminTestSuite.java +++ b/src/test/java/org/folio/harvesteradmin/test/HarvesterIntegrationTestSuite.java @@ -1,20 +1,15 @@ package org.folio.harvesteradmin.test; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.THIS_HARVESTABLES_PATH; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.THIS_STEPS_PATH; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.THIS_STORAGES_PATH; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.THIS_TRANSFORMATIONS_PATH; -import static org.folio.harvesteradmin.dataaccess.statics.ApiPaths.THIS_TRANSFORMATIONS_STEPS_PATH; -import static org.folio.harvesteradmin.test.Api.deleteConfigRecord; -import static org.folio.harvesteradmin.test.Api.getConfigRecord; -import static org.folio.harvesteradmin.test.Api.getConfigRecords; -import static org.folio.harvesteradmin.test.Api.getJobLog; -import static org.folio.harvesteradmin.test.Api.getScript; -import static org.folio.harvesteradmin.test.Api.putConfigRecord; -import static org.folio.harvesteradmin.test.Api.putScript; -import static org.folio.harvesteradmin.test.Api.responseJson; -import static org.folio.harvesteradmin.test.Api.postConfigRecord; +import static io.restassured.RestAssured.given; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.THIS_HARVESTABLES_PATH; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.THIS_STEPS_PATH; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.THIS_STORAGES_PATH; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.THIS_TRANSFORMATIONS_PATH; +import static org.folio.harvesteradmin.legacydata.statics.ApiPaths.THIS_TRANSFORMATIONS_STEPS_PATH; +import static org.folio.harvesteradmin.test.Api.*; import static org.folio.harvesteradmin.test.SampleId.SAMPLES_ID_PREFIX; +import static org.folio.harvesteradmin.test.Statics.BASE_URI_HARVESTER_ADMIN; +import static org.folio.harvesteradmin.test.Statics.PORT_HARVESTER_ADMIN; import static org.folio.harvesteradmin.test.sampleData.Samples.BASE_STORAGE_ID; import static org.folio.harvesteradmin.test.sampleData.Samples.BASE_STORAGE_JSON; import static org.folio.harvesteradmin.test.sampleData.Samples.BASE_TRANSFORMATION_ID; @@ -29,44 +24,43 @@ import static org.junit.Assert.assertTrue; import io.restassured.RestAssured; +import io.restassured.builder.RequestSpecBuilder; +import io.restassured.config.HttpClientConfig; +import io.restassured.http.ContentType; import io.restassured.http.Header; import io.restassured.response.ExtractableResponse; import io.restassured.response.Response; +import io.restassured.specification.RequestSpecification; import io.vertx.core.DeploymentOptions; import io.vertx.core.Vertx; import io.vertx.core.json.JsonArray; import io.vertx.core.json.JsonObject; -import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; import org.folio.harvesteradmin.MainVerticle; +import org.folio.harvesteradmin.test.fakestorage.FakeFolioApis; import org.folio.okapi.common.XOkapiHeaders; import org.folio.tlib.postgres.testing.TenantPgPoolContainer; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; +import org.junit.*; import org.junit.rules.TestName; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.containers.PostgreSQLContainer; -@SuppressWarnings("JUnitMalformedDeclaration") +import java.time.LocalDateTime; +import java.time.temporal.ChronoUnit; +import java.util.UUID; + @RunWith( VertxUnitRunner.class ) -public class HarvesterAdminTestSuite { - private final Logger logger = LoggerFactory.getLogger( "HarvesterAdminTestSuite" ); +public class HarvesterIntegrationTestSuite { + private static final Logger logger = LoggerFactory.getLogger( "HarvesterAdminTestSuite" ); static final String TENANT = "mha_test"; - Vertx vertx; - private static final int PORT_HARVESTER_ADMIN = 9031; - public static final Header CONTENT_TYPE_JSON = new Header("Content-Type", "application/json"); - public static final Header CONTENT_TYPE_XML = new Header("Content-Type", "application/xml"); - public static final Header OKAPI_TENANT = new Header ("X-Okapi-Tenant", TENANT); - + static Vertx vertx; + public static final Header OKAPI_TENANT = new Header (XOkapiHeaders.TENANT, TENANT); - public HarvesterAdminTestSuite() {} + public HarvesterIntegrationTestSuite() {} @ClassRule public static PostgreSQLContainer postgresSQLContainer = TenantPgPoolContainer.create(); @@ -74,30 +68,31 @@ public HarvesterAdminTestSuite() {} @Rule public final TestName name = new TestName(); - @Before - public void setUp(TestContext testContext) { - logger.info("setUp " + name.getMethodName()); + @BeforeClass + public static void beforeClass(TestContext context) { vertx = Vertx.vertx(); + RestAssured.enableLoggingOfRequestAndResponseIfValidationFails(); + RestAssured.baseURI = BASE_URI_HARVESTER_ADMIN; + RestAssured.requestSpecification = new RequestSpecBuilder().build(); - // Register the testContext exception handler to catch assertThat - vertx.exceptionHandler(testContext.exceptionHandler()); - RestAssured.port = PORT_HARVESTER_ADMIN; + DeploymentOptions deploymentOptions = new DeploymentOptions(); + deploymentOptions.setConfig(new JsonObject().put("port", Integer.toString(PORT_HARVESTER_ADMIN))); + vertx.deployVerticle(new MainVerticle(), deploymentOptions).onComplete(context.asyncAssertSuccess()); + + } - System.setProperty("port", String.valueOf(PORT_HARVESTER_ADMIN)); - vertx.deployVerticle( - MainVerticle.class.getName(), new DeploymentOptions()) - .onComplete(testContext.asyncAssertSuccess(outcome -> deleteSamplesFromLegacyHarvester())); + @AfterClass + public static void afterClass(TestContext context) { + vertx.close().onComplete(context.asyncAssertSuccess()); } @After - public void tearDown(TestContext context) { + public void cleanUpTestRecords() { deleteSamplesFromLegacyHarvester(); - Async async = context.async(); - vertx.close(context.asyncAssertSuccess(res -> async.complete())); } - private void deleteSamplesFromLegacyHarvester() { + private static void deleteSamplesFromLegacyHarvester() { deleteRecordsByIdPrefix(THIS_HARVESTABLES_PATH, "harvestables"); deleteRecordsByIdPrefix(THIS_STORAGES_PATH, "storages"); deleteTsasByStepIdPrefix(); @@ -105,7 +100,7 @@ private void deleteSamplesFromLegacyHarvester() { deleteRecordsByIdPrefix(THIS_STEPS_PATH, "transformationSteps"); } - private void deleteRecordsByIdPrefix(String path, String recordsArrayProperty) { + private static void deleteRecordsByIdPrefix(String path, String recordsArrayProperty) { JsonObject samples = responseJson( getConfigRecords(path, "id=" + SAMPLES_ID_PREFIX + "*", 200)); JsonArray sampleRecords = samples.getJsonArray(recordsArrayProperty); @@ -115,11 +110,11 @@ private void deleteRecordsByIdPrefix(String path, String recordsArrayProperty) { } } - private void deleteTsasByStepIdPrefix() { + private static void deleteTsasByStepIdPrefix() { Response response = getConfigRecords(THIS_TRANSFORMATIONS_STEPS_PATH, "step.id=" +SAMPLES_ID_PREFIX + "*",200); JsonObject transformationStepAssociations = new JsonObject(response.body().asString()); - if (transformationStepAssociations.getJsonArray("transformationStepAssociations").size() > 0) { + if (!transformationStepAssociations.getJsonArray("transformationStepAssociations").isEmpty()) { logger.info("TSAs to delete: "); } for (Object o : transformationStepAssociations.getJsonArray("transformationStepAssociations")) { @@ -337,7 +332,7 @@ public void cannotGetLogsForNonExistingHarvestable() { getJobLog("9865320", 404); } - @Test + //@Test public void cannotCreateHarvestableWithWrongStorageId() { final SampleId harvestableId = new SampleId(1); @@ -599,7 +594,7 @@ public void cannotDeleteStorageThatIsInUse() { } @Test - public void cannotPostConfigsWithAlreadyExistingNames() { + public void cannotPostConfigWithAlreadyExistingName() { JsonObject storageWithNoId = BASE_STORAGE_JSON.copy(); storageWithNoId.remove("id"); JsonObject transformationWithNoId = BASE_TRANSFORMATION_JSON.copy(); @@ -642,26 +637,6 @@ public void cannotPostConfigsWithAlreadyExistingNames() { } - - @Test - public void canGetPreviousJob () { - tenantAction(TENANT, new JsonObject() - .put("module_to", "mod-harvester-admin-0.4.0-SNAPSHOT")); - - RestAssured - .given() - .header(OKAPI_TENANT) - .get("harvester-admin/previous-jobs") - .then() - .log().ifValidationFails().statusCode(200).extract().response(); - - tenantAction(TENANT, new JsonObject() - .put("module_from", "mod-harvester-admin-0.4.0-SNAPSHOT") - .put("purge", true)); - - - } - @Test public void deletingNonExistingConfigWillReturnNotFound() { deleteConfigRecord(THIS_TRANSFORMATIONS_PATH, "998877665544", 404); @@ -679,34 +654,4 @@ public void putByIdWithInvalidCharactersReturn400() { putConfigRecord(THIS_STORAGES_PATH, "æøå", BASE_STORAGE_JSON, 400); } - void tenantAction(String tenant, JsonObject tenantAttributes) { - ExtractableResponse response = RestAssured.given() - .header(XOkapiHeaders.TENANT, tenant) - .header("Content-Type", "application/json") - .body(tenantAttributes.encode()) - .post("/_/tenant") - .then() - .extract(); - - if (response.statusCode() == 204) { - return; - } - assertThat(response.statusCode(), is(201)); - String location = response.header("Location"); - JsonObject tenantJob = new JsonObject(response.asString()); - assertThat(location, is("/_/tenant/" + tenantJob.getString("id"))); - - RestAssured.given() - .header(XOkapiHeaders.TENANT, tenant) - .get(location + "?wait=10000") - .then().statusCode(200) - .body("complete", is(true)) - .body("error", is((String) null)); - - RestAssured.given() - .header(XOkapiHeaders.TENANT, tenant) - .delete(location) - .then().statusCode(204); - } - } diff --git a/src/test/java/org/folio/harvesteradmin/test/NoHarvesterTestSuite.java b/src/test/java/org/folio/harvesteradmin/test/NoHarvesterTestSuite.java new file mode 100644 index 0000000..6b6f363 --- /dev/null +++ b/src/test/java/org/folio/harvesteradmin/test/NoHarvesterTestSuite.java @@ -0,0 +1,427 @@ +package org.folio.harvesteradmin.test; + +import io.restassured.RestAssured; +import io.restassured.builder.RequestSpecBuilder; +import io.restassured.config.HttpClientConfig; +import io.restassured.http.ContentType; +import io.restassured.http.Header; +import io.restassured.response.ExtractableResponse; +import io.restassured.response.Response; +import io.restassured.specification.RequestSpecification; +import io.vertx.core.DeploymentOptions; +import io.vertx.core.Vertx; +import io.vertx.core.json.JsonObject; +import io.vertx.ext.unit.TestContext; +import io.vertx.ext.unit.junit.VertxUnitRunner; +import org.folio.harvesteradmin.MainVerticle; +import org.folio.harvesteradmin.test.fakestorage.FakeFolioApis; +import org.folio.okapi.common.XOkapiHeaders; +import org.folio.tlib.postgres.testing.TenantPgPoolContainer; +import org.junit.*; +import org.junit.rules.TestName; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.containers.PostgreSQLContainer; + +import java.time.LocalDateTime; +import java.time.temporal.ChronoUnit; +import java.util.UUID; + +import static io.restassured.RestAssured.given; +import static org.folio.harvesteradmin.test.Statics.BASE_URI_OKAPI; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +@RunWith(VertxUnitRunner.class) +public class NoHarvesterTestSuite { + private static final Logger logger = LoggerFactory.getLogger("HarvesterAdminTestSuite"); + + static Vertx vertx; + + private static FakeFolioApis fakeFolioApis; + static final String TENANT = "mha_test"; + public static final Header OKAPI_TENANT = new Header(XOkapiHeaders.TENANT, TENANT); + public static final Header OKAPI_URL = new Header(XOkapiHeaders.URL, BASE_URI_OKAPI); + public static final Header OKAPI_TOKEN = new Header(XOkapiHeaders.TOKEN, "eyJhbGciOiJIUzUxMiJ9eyJzdWIiOiJhZG1pbiIsInVzZXJfaWQiOiI3OWZmMmE4Yi1kOWMzLTViMzktYWQ0YS0wYTg0MDI1YWIwODUiLCJ0ZW5hbnQiOiJ0ZXN0X3RlbmFudCJ9BShwfHcNClt5ZXJ8ImQTMQtAM1sQEnhsfWNmXGsYVDpuaDN3RVQ9"); + + + public NoHarvesterTestSuite() { + } + + @ClassRule + public static PostgreSQLContainer postgresSQLContainer = TenantPgPoolContainer.create(); + + + @Rule + public final TestName name = new TestName(); + + @BeforeClass + public static void beforeClass(TestContext context) { + vertx = Vertx.vertx(); + RestAssured.enableLoggingOfRequestAndResponseIfValidationFails(); + RestAssured.baseURI = Statics.BASE_URI_HARVESTER_ADMIN; + RestAssured.requestSpecification = new RequestSpecBuilder().build(); + + DeploymentOptions deploymentOptions = new DeploymentOptions(); + deploymentOptions.setConfig(new JsonObject().put("port", Integer.toString(Statics.PORT_HARVESTER_ADMIN))); + vertx.deployVerticle(new MainVerticle(), deploymentOptions) + .onComplete(context.asyncAssertSuccess(x -> + fakeFolioApis = new FakeFolioApis(vertx, context))); + + } + + @AfterClass + public static void afterClass(TestContext context) { + vertx.close().onComplete(context.asyncAssertSuccess()); + } + + @Before + public void initSchema() { + tenantOp(TENANT, new JsonObject() + .put("module_to", "mod-harvester-admin-1.0.0") + , null); + } + + @After + public void cleanUpTestRecords() { + tenantOp(TENANT, new JsonObject() + .put("module_from", "mod-harvester-admin-1.0.0") + .put("purge", true), null); + fakeFolioApis.configurationStorage.wipeMockRecords(); + } + + void tenantOp(String tenant, JsonObject tenantAttributes, String expectedError) { + ExtractableResponse response = RestAssured.given() + .header(XOkapiHeaders.TENANT, tenant) + .contentType(ContentType.JSON) + .body(tenantAttributes.encode()) + .post("/_/tenant") + .then() + .extract(); + + logger.info(response.asString()); + if (response.statusCode() == 204) { + return; + } + assertThat(response.statusCode(), is(201)); + String location = response.header("Location"); + JsonObject tenantJob = new JsonObject(response.asString()); + assertThat(location, is("/_/tenant/" + tenantJob.getString("id"))); + + RestAssured.given() + .header(XOkapiHeaders.TENANT, tenant) + .get(location + "?wait=10000") + .then().statusCode(200) + .body("complete", is(true)) + .body("error", is(expectedError)); + + RestAssured.given() + .header(XOkapiHeaders.TENANT, tenant) + .delete(location) + .then().statusCode(204); + } + + @Test + public void canGetPreviousJobs() { + Response response = given().port(Statics.PORT_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .get("harvester-admin/previous-jobs") + .then() + .log().ifValidationFails().statusCode(200).extract().response(); + logger.info("canGetPreviousJobs response: " + response.asPrettyString()); + } + + @Test + public void willPurgeAgedJobLogsUsingDefaultThreshold() { + Response response = given().port(Statics.PORT_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .get("harvester-admin/previous-jobs") + .then() + .log().ifValidationFails().statusCode(200).extract().response(); + logger.info("will purge jobs response: " + response.asPrettyString()); + + given().port(Statics.PORT_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .get("harvester-admin/previous-jobs") + .then() + .log().ifValidationFails().statusCode(200).extract().response(); + + LocalDateTime now = LocalDateTime.now(); + final LocalDateTime agedJobStartedTime = now.minusMonths(3).minusDays(1).truncatedTo(ChronoUnit.SECONDS); + final LocalDateTime agedJobFinishedTime = agedJobStartedTime.plusMinutes(2); + final LocalDateTime intermediateJobStartedTime = now.minusMonths(2).minusDays(1).truncatedTo(ChronoUnit.SECONDS); + final LocalDateTime intermediateJobFinishedTime = intermediateJobStartedTime.plusMinutes(2); + final LocalDateTime newerJobStartedTime = now.minusMonths(2).truncatedTo(ChronoUnit.SECONDS); + final LocalDateTime newerJobFinishedTime = newerJobStartedTime.plusMinutes(3); + + JsonObject agedJobJson = + new JsonObject( + " {\n" + + " \"id\" : \"" + UUID.randomUUID() + "\",\n" + + " \"name\" : \"fake job log\",\n" + + " \"harvestableId\" : 672813240090200,\n" + + " \"type\" : \"xmlBulk\",\n" + + " \"url\" : \"http://fileserver/xml/\",\n" + + " \"allowErrors\" : true,\n" + + " \"transformation\" : \"12345\",\n" + + " \"storage\" : \"Batch Upsert Inventory\",\n" + + " \"status\" : \"OK\",\n" + + " \"started\" : \"" + agedJobStartedTime + "\",\n" + + " \"finished\" : \"" + agedJobFinishedTime + "\",\n" + + " \"amountHarvested\" : 5,\n" + + " \"message\" : \" Instances_processed/loaded/deletions(signals)/failed:__5___5___0(0)___0_ Holdings_records_processed/loaded/deleted/failed:__13___13___0___0_ Items_processed/loaded/deleted/failed:__4___4___0___0_ Source_records_processed/loaded/deleted/failed:__0___0___0___0_\"\n" + + " }\n"); + + JsonObject intermediateJobJson = + new JsonObject( + " {\n" + + " \"id\" : \"" + UUID.randomUUID() + "\",\n" + + " \"name\" : \"fake job log\",\n" + + " \"harvestableId\" : 672813240090200,\n" + + " \"type\" : \"xmlBulk\",\n" + + " \"url\" : \"http://fileserver/xml/\",\n" + + " \"allowErrors\" : true,\n" + + " \"transformation\" : \"12345\",\n" + + " \"storage\" : \"Batch Upsert Inventory\",\n" + + " \"status\" : \"OK\",\n" + + " \"started\" : \"" + intermediateJobStartedTime + "\",\n" + + " \"finished\" : \"" + intermediateJobFinishedTime + "\",\n" + + " \"amountHarvested\" : 5,\n" + + " \"message\" : \" Instances_processed/loaded/deletions(signals)/failed:__5___5___0(0)___0_ Holdings_records_processed/loaded/deleted/failed:__13___13___0___0_ Items_processed/loaded/deleted/failed:__4___4___0___0_ Source_records_processed/loaded/deleted/failed:__0___0___0___0_\"\n" + + " }\n"); + + JsonObject newerJobJson = + new JsonObject( + " {\n" + + " \"id\" : \"" + UUID.randomUUID() + "\",\n" + + " \"name\" : \"fake job log\",\n" + + " \"harvestableId\" : 672813240090200,\n" + + " \"type\" : \"xmlBulk\",\n" + + " \"url\" : \"http://fileserver/xml/\",\n" + + " \"allowErrors\" : true,\n" + + " \"transformation\" : \"12345\",\n" + + " \"storage\" : \"Batch Upsert Inventory\",\n" + + " \"status\" : \"OK\",\n" + + " \"started\" : \"" + newerJobStartedTime + "\",\n" + + " \"finished\" : \"" + newerJobFinishedTime + "\",\n" + + " \"amountHarvested\" : 3,\n" + + " \"message\" : \" Instances_processed/loaded/deletions(signals)/failed:__3___3___0(0)___0_ Holdings_records_processed/loaded/deleted/failed:__8___8___0___0_ Items_processed/loaded/deleted/failed:__2___2___0___0_ Source_records_processed/loaded/deleted/failed:__0___0___0___0_\"\n" + + " }\n"); + + given().port(Statics.PORT_HARVESTER_ADMIN).header(OKAPI_TENANT) + .body(agedJobJson.encode()) + .contentType(ContentType.JSON) + .post("harvester-admin/previous-jobs") + .then() + .log().ifValidationFails().statusCode(201).extract().response(); + + given().port(Statics.PORT_HARVESTER_ADMIN).header(OKAPI_TENANT) + .body(intermediateJobJson.encode()) + .contentType(ContentType.JSON) + .post("harvester-admin/previous-jobs") + .then() + .log().ifValidationFails().statusCode(201).extract().response(); + + given().port(Statics.PORT_HARVESTER_ADMIN).header(OKAPI_TENANT) + .body(newerJobJson.encode()) + .contentType(ContentType.JSON) + .post("harvester-admin/previous-jobs") + .then() + .log().ifValidationFails().statusCode(201).extract().response(); + + RestAssured + .given() + .port(Statics.PORT_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .contentType(ContentType.JSON) + .get("harvester-admin/previous-jobs") + .then().statusCode(200) + .body("totalRecords", is(3)); + + final RequestSpecification timeoutConfig = timeoutConfig(10000); + + given() + .port(Statics.PORT_OKAPI) + .header(OKAPI_TENANT) + .header(OKAPI_URL) + .header(OKAPI_TOKEN) + .contentType(ContentType.JSON) + .header(XOkapiHeaders.REQUEST_ID, "purge-aged-logs") + .spec(timeoutConfig) + .when().post("/harvester-admin/purge-aged-logs") + .then().log().ifValidationFails().statusCode(204) + .extract().response(); + + RestAssured + .given() + .port(Statics.PORT_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .contentType(ContentType.JSON) + .get("harvester-admin/previous-jobs") + .then().statusCode(200) + .body("totalRecords", is(2)); + + } + + @Test + public void willPurgeAgedJobLogsUsingConfiguredThreshold() { + Response response = given().port(Statics.PORT_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .get("harvester-admin/previous-jobs") + .then() + .log().ifValidationFails().statusCode(200).extract().response(); + logger.info("will purge jobs response: " + response.asPrettyString()); + + given().port(Statics.PORT_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .get("harvester-admin/previous-jobs") + .then() + .log().ifValidationFails().statusCode(200).extract().response(); + + LocalDateTime now = LocalDateTime.now(); + final LocalDateTime agedJobStartedTime = now.minusMonths(3).minusDays(1).truncatedTo(ChronoUnit.SECONDS); + final LocalDateTime agedJobFinishedTime = agedJobStartedTime.plusMinutes(2); + final LocalDateTime intermediateJobStartedTime = now.minusMonths(2).minusDays(1).truncatedTo(ChronoUnit.SECONDS); + final LocalDateTime intermediateJobFinishedTime = intermediateJobStartedTime.plusMinutes(2); + final LocalDateTime newerJobStartedTime = now.minusMonths(2).truncatedTo(ChronoUnit.SECONDS); + final LocalDateTime newerJobFinishedTime = newerJobStartedTime.plusMinutes(3); + + JsonObject agedJobJson = + new JsonObject( + " {\n" + + " \"id\" : \"" + UUID.randomUUID() + "\",\n" + + " \"name\" : \"fake job log\",\n" + + " \"harvestableId\" : 672813240090200,\n" + + " \"type\" : \"xmlBulk\",\n" + + " \"url\" : \"http://fileserver/xml/\",\n" + + " \"allowErrors\" : true,\n" + + " \"transformation\" : \"12345\",\n" + + " \"storage\" : \"Batch Upsert Inventory\",\n" + + " \"status\" : \"OK\",\n" + + " \"started\" : \"" + agedJobStartedTime + "\",\n" + + " \"finished\" : \"" + agedJobFinishedTime + "\",\n" + + " \"amountHarvested\" : 5,\n" + + " \"message\" : \" Instances_processed/loaded/deletions(signals)/failed:__5___5___0(0)___0_ Holdings_records_processed/loaded/deleted/failed:__13___13___0___0_ Items_processed/loaded/deleted/failed:__4___4___0___0_ Source_records_processed/loaded/deleted/failed:__0___0___0___0_\"\n" + + " }\n"); + + JsonObject intermediateJobJson = + new JsonObject( + " {\n" + + " \"id\" : \"" + UUID.randomUUID() + "\",\n" + + " \"name\" : \"fake job log\",\n" + + " \"harvestableId\" : 672813240090200,\n" + + " \"type\" : \"xmlBulk\",\n" + + " \"url\" : \"http://fileserver/xml/\",\n" + + " \"allowErrors\" : true,\n" + + " \"transformation\" : \"12345\",\n" + + " \"storage\" : \"Batch Upsert Inventory\",\n" + + " \"status\" : \"OK\",\n" + + " \"started\" : \"" + intermediateJobStartedTime + "\",\n" + + " \"finished\" : \"" + intermediateJobFinishedTime + "\",\n" + + " \"amountHarvested\" : 5,\n" + + " \"message\" : \" Instances_processed/loaded/deletions(signals)/failed:__5___5___0(0)___0_ Holdings_records_processed/loaded/deleted/failed:__13___13___0___0_ Items_processed/loaded/deleted/failed:__4___4___0___0_ Source_records_processed/loaded/deleted/failed:__0___0___0___0_\"\n" + + " }\n"); + + + JsonObject newerJobJson = + new JsonObject( + " {\n" + + " \"id\" : \"" + UUID.randomUUID() + "\",\n" + + " \"name\" : \"fake job log\",\n" + + " \"harvestableId\" : 672813240090200,\n" + + " \"type\" : \"xmlBulk\",\n" + + " \"url\" : \"http://fileserver/xml/\",\n" + + " \"allowErrors\" : true,\n" + + " \"transformation\" : \"12345\",\n" + + " \"storage\" : \"Batch Upsert Inventory\",\n" + + " \"status\" : \"OK\",\n" + + " \"started\" : \"" + newerJobStartedTime + "\",\n" + + " \"finished\" : \"" + newerJobFinishedTime + "\",\n" + + " \"amountHarvested\" : 3,\n" + + " \"message\" : \" Instances_processed/loaded/deletions(signals)/failed:__3___3___0(0)___0_ Holdings_records_processed/loaded/deleted/failed:__8___8___0___0_ Items_processed/loaded/deleted/failed:__2___2___0___0_ Source_records_processed/loaded/deleted/failed:__0___0___0___0_\"\n" + + " }\n"); + + given().port(Statics.PORT_HARVESTER_ADMIN).header(OKAPI_TENANT) + .body(agedJobJson.encode()) + .contentType(ContentType.JSON) + .post("harvester-admin/previous-jobs") + .then() + .log().ifValidationFails().statusCode(201).extract().response(); + + given().port(Statics.PORT_HARVESTER_ADMIN).header(OKAPI_TENANT) + .body(intermediateJobJson.encode()) + .contentType(ContentType.JSON) + .post("harvester-admin/previous-jobs") + .then() + .log().ifValidationFails().statusCode(201).extract().response(); + + given().port(Statics.PORT_HARVESTER_ADMIN).header(OKAPI_TENANT) + .body(newerJobJson.encode()) + .contentType(ContentType.JSON) + .post("harvester-admin/previous-jobs") + .then() + .log().ifValidationFails().statusCode(201).extract().response(); + + RestAssured + .given() + .port(Statics.PORT_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .contentType(ContentType.JSON) + .get("harvester-admin/previous-jobs") + .then().statusCode(200) + .body("totalRecords", is(3)); + + FakeFolioApis.post("/configurations/entries", + new JsonObject() + .put("module", "HARVESTER_ADMIN") + .put("configName", "PURGE_LOGS_AFTER") + .put("value", "2 MONATE")); + + Response responsex = RestAssured + .given() + .baseUri("http://localhost:" + Statics.PORT_OKAPI) + .port(Statics.PORT_OKAPI) + .header(OKAPI_TENANT) + .contentType(ContentType.JSON) + .get("configurations/entries") + .then().statusCode(200) + .body("totalRecords", is(1)) + .extract().response(); + + logger.info(responsex.asPrettyString()); + + final RequestSpecification timeoutConfig = timeoutConfig(10000); + + given() + .port(Statics.PORT_OKAPI) + .header(OKAPI_TENANT) + .header(Statics.OKAPI_URL) + .header(Statics.OKAPI_TOKEN) + .contentType(ContentType.JSON) + .header(XOkapiHeaders.REQUEST_ID, "purge-aged-logs") + .spec(timeoutConfig) + .when().post("/harvester-admin/purge-aged-logs") + .then().log().ifValidationFails().statusCode(204) + .extract().response(); + + RestAssured + .given() + .port(Statics.PORT_HARVESTER_ADMIN) + .header(OKAPI_TENANT) + .contentType(ContentType.JSON) + .get("harvester-admin/previous-jobs") + .then().statusCode(200) + .body("totalRecords", is(1)); + } + + public static RequestSpecification timeoutConfig(int timeOutInMilliseconds) { + return new RequestSpecBuilder() + .setConfig(RestAssured.config() + .httpClient(HttpClientConfig.httpClientConfig() + .setParam("http.connection.timeout", timeOutInMilliseconds) + .setParam("http.socket.timeout", timeOutInMilliseconds))) + .build(); + } + +} diff --git a/src/test/java/org/folio/harvesteradmin/test/Statics.java b/src/test/java/org/folio/harvesteradmin/test/Statics.java new file mode 100644 index 0000000..4fb2072 --- /dev/null +++ b/src/test/java/org/folio/harvesteradmin/test/Statics.java @@ -0,0 +1,21 @@ +package org.folio.harvesteradmin.test; + +import io.restassured.http.Header; +import org.folio.okapi.common.XOkapiHeaders; + +public class Statics { + + static final String TENANT = "mha_test"; + public static final Header CONTENT_TYPE_JSON = new Header("Content-Type", "application/json"); + public static final Header CONTENT_TYPE_XML = new Header("Content-Type", "application/xml"); + public static final Header OKAPI_TOKEN = new Header(XOkapiHeaders.TOKEN,"eyJhbGciOiJIUzUxMiJ9eyJzdWIiOiJhZG1pbiIsInVzZXJfaWQiOiI3OWZmMmE4Yi1kOWMzLTViMzktYWQ0YS0wYTg0MDI1YWIwODUiLCJ0ZW5hbnQiOiJ0ZXN0X3RlbmFudCJ9BShwfHcNClt5ZXJ8ImQTMQtAM1sQEnhsfWNmXGsYVDpuaDN3RVQ9"); + + + public static int PORT_HARVESTER_ADMIN = 9230; + public static String BASE_URI_HARVESTER_ADMIN = "http://localhost:" + PORT_HARVESTER_ADMIN; + public static int PORT_OKAPI = 9031; + public static String BASE_URI_OKAPI = "http://localhost:" + PORT_OKAPI; + + public static final Header OKAPI_URL = new Header (XOkapiHeaders.URL, BASE_URI_OKAPI); + +} diff --git a/src/test/java/org/folio/harvesteradmin/test/fakestorage/ConfigurationStorage.java b/src/test/java/org/folio/harvesteradmin/test/fakestorage/ConfigurationStorage.java new file mode 100644 index 0000000..74905f7 --- /dev/null +++ b/src/test/java/org/folio/harvesteradmin/test/fakestorage/ConfigurationStorage.java @@ -0,0 +1,19 @@ +package org.folio.harvesteradmin.test.fakestorage; + +public class ConfigurationStorage extends RecordStorage { + public String getResultSetName() { + return CONFIGURATION_ENTRIES; + } + + @Override + protected void declareDependencies() { + // Instances have none in fake storage + } + + @Override + protected void declareMandatoryProperties() {} + + protected void declareUniqueProperties() {} + + +} diff --git a/src/test/java/org/folio/harvesteradmin/test/fakestorage/FakeFolioApis.java b/src/test/java/org/folio/harvesteradmin/test/fakestorage/FakeFolioApis.java new file mode 100644 index 0000000..5088922 --- /dev/null +++ b/src/test/java/org/folio/harvesteradmin/test/fakestorage/FakeFolioApis.java @@ -0,0 +1,114 @@ +package org.folio.harvesteradmin.test.fakestorage; + +import io.vertx.core.json.JsonObject; +import io.restassured.RestAssured; +import io.restassured.response.Response; +import io.vertx.core.Vertx; +import io.vertx.core.http.HttpServerOptions; +import io.vertx.ext.unit.TestContext; +import io.vertx.ext.web.Router; +import io.vertx.ext.web.handler.BodyHandler; + +import static org.folio.harvesteradmin.test.Statics.*; + + +public class FakeFolioApis { + public final static String CONFIGURATION_STORAGE_PATH = "/configurations/entries"; + + public ConfigurationStorage configurationStorage = new ConfigurationStorage(); + + public FakeFolioApis(Vertx vertx, TestContext testContext) { + configurationStorage.attachToFakeStorage(this); + + Router router = Router.router(vertx); + router.get(CONFIGURATION_STORAGE_PATH).handler(configurationStorage::getRecords); + router.get(CONFIGURATION_STORAGE_PATH + "/:id").handler(configurationStorage::getRecordById); + router.post("/*").handler(BodyHandler.create()); + router.post(CONFIGURATION_STORAGE_PATH).handler(configurationStorage::createRecord); + router.put("/*").handler(BodyHandler.create()); + router.put(CONFIGURATION_STORAGE_PATH + "/:id").handler(configurationStorage::updateRecord); + router.delete(CONFIGURATION_STORAGE_PATH + "/:id").handler(configurationStorage::deleteRecord); + HttpServerOptions so = new HttpServerOptions().setHandle100ContinueAutomatically(true); + vertx.createHttpServer(so) + .requestHandler(router) + .listen(PORT_OKAPI) + .onComplete(testContext.asyncAssertSuccess()); + RestAssured.port = PORT_OKAPI; + } + + public static JsonObject getRecordsByQuery(String storagePath, String query) { + return getRecordsByQuery(storagePath, query, 200); + } + + public static JsonObject getRecordsByQuery(String storagePath, String query, int expectedResponseCode) { + Response response = RestAssured.given() + .baseUri(BASE_URI_OKAPI) + .get(storagePath + "?" + query) + .then() + .log().ifValidationFails() + .statusCode(expectedResponseCode).extract().response(); + return new JsonObject(response.getBody().asString()); + } + + public static JsonObject getRecordById(String storagePath, String id) { + return getRecordById(storagePath, id, 200); + } + + public static JsonObject getRecordById(String storagePath, String id, int expectedResponseCode) { + Response response = RestAssured.given() + .baseUri(BASE_URI_OKAPI) + .get(storagePath + "/" + id) + .then() + .log().ifValidationFails() + .statusCode(expectedResponseCode).extract().response(); + return new JsonObject(response.getBody().asString()); + } + + public static JsonObject post(String storagePath, JsonObject recordToPOST) { + return post(storagePath, recordToPOST, 201); + } + + public static JsonObject post(String storagePath, JsonObject recordToPOST, int expectedResponseCode) { + Response response = RestAssured.given() + .baseUri(BASE_URI_OKAPI) + .body(recordToPOST.toString()) + .post(storagePath) + .then() + .log().ifValidationFails() + .statusCode(expectedResponseCode).extract().response(); + if (response.getContentType().equals("application/json")) { + return new JsonObject(response.getBody().asString()); + } else { + return new JsonObject().put("response", response.asString()); + } + } + + public static void put(String storagePath, JsonObject recordToPUT) { + put(storagePath, recordToPUT, 204); + } + + public static void put(String storagePath, JsonObject recordToPUT, int expectedResponseCode) { + RestAssured.given() + .baseUri(BASE_URI_OKAPI) + .body(recordToPUT.toString()) + .put(storagePath + "/" + recordToPUT.getString("id")) + .then() + .log().ifValidationFails() + .statusCode(expectedResponseCode).extract().response(); + } + + public static void delete(String storagePath, String id) { + delete(storagePath, id, 200); + } + + public static void delete(String storagePath, String id, int expectedResponseCode) { + RestAssured.given() + .baseUri(BASE_URI_OKAPI) + .delete(storagePath + "/" + id) + .then() + .log().ifValidationFails() + .statusCode(expectedResponseCode); + + } + +} diff --git a/src/test/java/org/folio/harvesteradmin/test/fakestorage/FolioApiRecord.java b/src/test/java/org/folio/harvesteradmin/test/fakestorage/FolioApiRecord.java new file mode 100644 index 0000000..f79a913 --- /dev/null +++ b/src/test/java/org/folio/harvesteradmin/test/fakestorage/FolioApiRecord.java @@ -0,0 +1,123 @@ +package org.folio.harvesteradmin.test.fakestorage; + +import io.vertx.core.impl.logging.Logger; +import io.vertx.core.json.JsonObject; + +import java.util.UUID; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class FolioApiRecord { + public static final String ID = "id"; + public static final String VERSION = "_version"; + protected JsonObject recordJson; + + private final Logger logger = io.vertx.core.impl.logging.LoggerFactory.getLogger("FolioApiRecord"); + + public FolioApiRecord() { + recordJson = new JsonObject(); + } + + public FolioApiRecord(JsonObject FolioApiRecord) { + recordJson = FolioApiRecord; + } + + + public String getStringValue (String propertyName) { + return recordJson.containsKey(propertyName) ? recordJson.getValue(propertyName).toString() : null; + } + + public JsonObject getJson() { + return recordJson; + } + + public FolioApiRecord setId (String id) { + recordJson.put(ID, id); + return this; + } + + public boolean hasId() { + return recordJson.getString(ID) != null; + } + + public FolioApiRecord generateId () { + recordJson.put(ID, UUID.randomUUID().toString()); + return this; + } + + public String getId () { + return recordJson.getString(ID); + } + + public FolioApiRecord setFirstVersion () { + recordJson.put(VERSION,1); + return this; + } + + public Integer getVersion () { + return recordJson.containsKey( VERSION ) ? recordJson.getInteger( VERSION ) : 0; + } + + public FolioApiRecord setVersion (Integer version) { + recordJson.put( VERSION, version); + return this; + } + + public boolean match(String query) { + logger.debug("Matching " + recordJson + " with query " + query); + Pattern orListPattern = Pattern.compile("[(]?(.*)==\\(([^)]*)\\)[)]?"); + Matcher orListMatcher = orListPattern.matcher(query); + if (orListMatcher.find()) { + logger.debug("OR list found"); + String key = orListMatcher.group(1); + String[] values = orListMatcher.group(2).split(" OR "); + for (String value : values) { + if (value.replace("\"","").equals(recordJson.getString(key))) { + return true; + } + } + } else { + String trimmed = query.replace("(", "").replace(")", ""); + String[] orSections = trimmed.split(" and "); + logger.debug( + "orSections: " + ( orSections.length > 1 ? orSections[0] + ", " + orSections[1] : orSections[0] )); + + for (int i = 0; i < orSections.length; i++) { + if (orSections[i].contains(" not ")) { + Pattern pattern = Pattern.compile(" not ([^ ]+)"); + Matcher matcher = pattern.matcher(orSections[i]); + if (matcher.find()) { + String notCriterion = matcher.group(1); + String[] equalityParts = notCriterion.split("=="); + String key = equalityParts[0]; + String value = equalityParts.length > 1 ? equalityParts[1].replace("\"", "") : ""; + if (recordJson.getString(key) != null && recordJson.getString(key).equals( + value)) { + logger.debug("NOT query, no match for " + key + " not equal to " + value + " in " + recordJson); + return false; + } else { + logger.debug("NOT query, have match for " + key + " not equal to " + value + " in " + recordJson); + } + } + } + String[] queryParts = orSections[i].split("=="); + logger.debug("query: " + query); + logger.debug("queryParts[0]: " + queryParts[0]); + String key = queryParts[0]; + String value = queryParts.length > 1 ? queryParts[1].replace("\"", "") : ""; + logger.debug("key: " + key); + logger.debug("value: " + value); + logger.debug("recordJson.getString(key): " + recordJson.getString(key)); + logger.debug( + "Query parameter [" + value + "] matches record property [" + key + "(" + recordJson.getString( + key) + ")] ?: " + ( recordJson.getString( + key) != null && recordJson.getString(key).equals(value) )); + if (recordJson.getString(key) != null && recordJson.getString(key).equals(value)) { + return true; + } + } + } + return false; + } + +} diff --git a/src/test/java/org/folio/harvesteradmin/test/fakestorage/RecordStorage.java b/src/test/java/org/folio/harvesteradmin/test/fakestorage/RecordStorage.java new file mode 100644 index 0000000..1cd1984 --- /dev/null +++ b/src/test/java/org/folio/harvesteradmin/test/fakestorage/RecordStorage.java @@ -0,0 +1,392 @@ +package org.folio.harvesteradmin.test.fakestorage; + +import io.vertx.core.impl.logging.Logger; +import io.vertx.core.impl.logging.LoggerFactory; +import io.vertx.core.json.JsonArray; +import io.vertx.core.json.JsonObject; +import io.vertx.ext.web.RoutingContext; + +import java.net.URLDecoder; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.*; + +public abstract class RecordStorage { + public final static String TOTAL_RECORDS = "totalRecords"; + // Property keys, JSON responses + public final static String CONFIGURATION_ENTRIES = "configs"; + + public final String STORAGE_NAME = getClass().getSimpleName(); + public boolean failOnDelete = false; + public boolean failOnCreate = false; + public boolean failOnUpdate = false; + public boolean failOnGetRecordById = false; + public boolean failOnGetRecords = false; + List dependentEntities = new ArrayList<>(); + List masterEntities = new ArrayList<>(); + List mandatoryProperties = new ArrayList<>(); + List uniqueProperties = new ArrayList<>(); + + protected FakeFolioApis fakeStorage; + + protected final Map records = new HashMap<>(); + protected final Logger logger = LoggerFactory.getLogger("fake-folio-storage"); + + public void attachToFakeStorage(FakeFolioApis fakeStorage) { + this.fakeStorage = fakeStorage; + declareDependencies(); + declareMandatoryProperties(); + declareUniqueProperties(); + } + + // PROPERTY NAME OF THE OBJECT THAT API RESULTS ARE RETURNED IN, IMPLEMENTED PER STORAGE ENTITY + protected abstract String getResultSetName(); + + // INTERNAL DATABASE OPERATIONS - insert() IS DECLARED PUBLIC SO THE TEST SUITE CAN INITIALIZE DATA OUTSIDE THE API. + public StorageResponse insert (FolioApiRecord FolioApiRecord) { + Resp validation = validateCreate(FolioApiRecord); + + if (validation.statusCode == 201) { + FolioApiRecord.setFirstVersion(); + records.put(FolioApiRecord.getId(), FolioApiRecord); + } + return new StorageResponse(validation.statusCode, validation.message); + } + + public static class Resp { + public int statusCode; + public String message; + public Resp(int status, String message) { + statusCode = status; + this.message = message; + } + } + + public Resp validateCreate(FolioApiRecord FolioApiRecord) { + if (failOnCreate) { + return new Resp(500, "forced fail"); + } + if (!FolioApiRecord.hasId()) { + FolioApiRecord.generateId(); + } + if (records.containsKey(FolioApiRecord.getId())) { + logger.error("Fake record storage already contains a record with id " + FolioApiRecord.getId() + ", cannot create " + FolioApiRecord.getJson().encodePrettily()); + return new Resp(400, "Record storage already contains a record with id " + FolioApiRecord.getId()); + } + for (FolioApiRecord existingRecord : records.values()) { + for (String nameOfUniqueProperty : uniqueProperties) { + if (FolioApiRecord.getStringValue(nameOfUniqueProperty) != null && existingRecord.getStringValue(nameOfUniqueProperty) != null ) { + if (FolioApiRecord.getStringValue(nameOfUniqueProperty).equals(existingRecord.getStringValue(nameOfUniqueProperty))) { + return new Resp(400, this.STORAGE_NAME +" already contains a record with " + nameOfUniqueProperty + " = " + FolioApiRecord.getStringValue(nameOfUniqueProperty)); + } + } + } + } + logger.debug("Checking foreign keys"); + logger.debug("Got " + masterEntities.size() + " foreign keys"); + for (ForeignKey fk : masterEntities) { + if (! FolioApiRecord.getJson().containsKey(fk.getDependentPropertyName())) { + logger.error("Foreign key violation, record must contain " + fk.getDependentPropertyName()); + return new Resp(422, "{\"errors\":[{\"message\":\"must not be null\",\"type\":\"1\",\"code\":\"-1\",\"parameters\":[{\"key\":\""+fk.getDependentPropertyName()+"\",\"value\":\"null\"}]}]}"); + } + if (!fk.getMasterStorage().hasId(FolioApiRecord.getJson().getString(fk.getDependentPropertyName()))) { + logger.error("Foreign key violation " + fk.getDependentPropertyName() + " not found in "+ fk.getMasterStorage().getResultSetName() + ", cannot create " + FolioApiRecord.getJson().encodePrettily()); + logger.error(new JsonObject().encode()); + return new Resp (500, new JsonObject("{ \"message\": \"insert or update on table \\\"storage_table\\\" violates foreign key constraint \\\"fkey\\\"\", \"severity\": \"ERROR\", \"code\": \"23503\", \"detail\": \"Key (property value)=(the id) is not present in table \\\"a_referenced_table\\\".\", \"file\": \"ri_triggers.c\", \"line\": \"3266\", \"routine\": \"ri_ReportViolation\", \"schema\": \"diku_mod_inventory_storage\", \"table\": \"storage_table\", \"constraint\": \"a_fkey\" }").encodePrettily()); + } else { + logger.debug("Found " + FolioApiRecord.getJson().getString(fk.getDependentPropertyName()) + " in " + fk.getMasterStorage().getResultSetName()); + } + } + for (String mandatory : mandatoryProperties) { + if (!FolioApiRecord.getJson().containsKey(mandatory)) { + return new Resp(422, new JsonObject("{\"message\" : {\n" + " \"errors\" : [ {\n" + " \"message\" : \"must not be null\",\n" + " \"type\" : \"1\",\n" + " \"code\" : \"javax.validation.constraints.NotNull.message\",\n" + " \"parameters\" : [ {\n" + " \"key\" : \"" + mandatory +"\",\n" + " \"value\" : \"null\"\n" + " } ]\n" + " } ]\n" + " }}").encodePrettily()); + } + } + return new Resp(201,"created"); + } + + protected int update (String id, FolioApiRecord FolioApiRecord) { + + Resp validation = validateUpdate(id, FolioApiRecord); + if (validation.statusCode == 204) { + records.put(id, FolioApiRecord); + } + return validation.statusCode; + } + + public Resp validateUpdate (String id, FolioApiRecord FolioApiRecord) { + if (failOnUpdate) { + return new Resp(500, "forced fail on update"); + } + if (FolioApiRecord.hasId() && !id.equals(FolioApiRecord.getId())) { + return new Resp(400, "Fake record storage received request to update a record at an ID that doesn't match the ID in the record"); + } + if (! records.containsKey(id)) { + return new Resp(404,"Record not found, cannot update " + FolioApiRecord.getJson().encodePrettily()); + } + for (ForeignKey fk : masterEntities) { + if (! FolioApiRecord.getJson().containsKey(fk.getDependentPropertyName())) { + return new Resp(422, "Foreign key violation, record must contain " + fk.getDependentPropertyName()); + } + if (!fk.getMasterStorage().hasId(FolioApiRecord.getJson().getString(fk.getDependentPropertyName()))) { + return new Resp(500, "Not found: "+ FolioApiRecord.getJson().getString(fk.getDependentPropertyName()) + " in " + fk.getMasterStorage().getResultSetName()); + } + } + return new Resp(204,""); + } + + protected int delete (String id) { + if (failOnDelete) return 500; + if (!records.containsKey(id)) { + logger.error("Record " + id + " not found, cannot delete"); + return 404; + } + logger.debug("Dependent entities: " + dependentEntities.size()); + for (ForeignKey fk : dependentEntities) { + logger.debug("Deleting. Checking dependent " + fk.getDependentStorage().getResultSetName()); + logger.debug("Looking at property " + fk.getDependentPropertyName()); + if (fk.getDependentStorage().hasValue(fk.getDependentPropertyName(), id)) { + logger.error("Foreign key violation " + records.get(id).getJson().toString() + " has a dependent record in " + fk.getDependentStorage().getResultSetName()); + return 400; + } + } + records.remove(id); + return 200; + } + + protected Collection getRecords () { + return records.values(); + } + + private FolioApiRecord getRecord (String id) { + if (failOnGetRecordById) { + return null; + } else { + FolioApiRecord FolioApiRecord = records.get( id ); + if (FolioApiRecord != null) { + FolioApiRecord.setVersion(FolioApiRecord.getVersion() + 1); + } + return FolioApiRecord; + } + } + + // FOREIGN KEY HANDLING + protected boolean hasId (String id) { + return records.containsKey(id); + } + + /** + * Checks if this storage has a record where this property (presumably a foreign key property) has this value + * @param fkPropertyName + * @param value + * @return + */ + protected boolean hasValue (String fkPropertyName, String value) { + for (FolioApiRecord FolioApiRecord : records.values()) { + logger.debug("Checking " + FolioApiRecord.getJson().toString() + " for value " + value); + if (FolioApiRecord.getJson().containsKey(fkPropertyName) && FolioApiRecord.getJson().getString(fkPropertyName).equals(value)) { + return true; + } + } + return false; + } + + // USED BY A DEPENDENT ENTITY TO SET UP ITS FOREIGN KEYS BY CALLS to acceptDependant() + protected abstract void declareDependencies(); + + // METHOD ON THE PRIMARY KEY ENTITY TO REGISTER DEPENDENT ENTITIES + protected void acceptDependant(RecordStorage dependentEntity, String dependentPropertyName) { + ForeignKey fk = new ForeignKey(dependentEntity, dependentPropertyName, this); + dependentEntities.add(fk); + dependentEntity.setMasterEntity(fk); + } + + protected void setMasterEntity (ForeignKey fk) { + masterEntities.add(fk); + } + + protected abstract void declareMandatoryProperties (); + + protected void declareUniqueProperties () {} + // API REQUEST HANDLERS + + /** + * Handles GET request with query parameter + * + */ + public void getRecords(RoutingContext routingContext) { + final String optionalQuery = routingContext.request().getParam("query") != null ? + decode(routingContext.request().getParam("query")) : null; + JsonObject responseJson = buildJsonRecordsResponse(optionalQuery); + if (responseJson != null) { + respond(routingContext, buildJsonRecordsResponse(optionalQuery), 200); + } else { + respondWithMessage(routingContext, (failOnGetRecords ? "Forced " : "") + " Error on getting records", 500); + } + } + + /** + * Handles GET by ID + */ + protected void getRecordById(RoutingContext routingContext) { + final String id = routingContext.pathParam("id"); + FolioApiRecord FolioApiRecord = getRecord(id); + + if (FolioApiRecord != null) { + respond(routingContext, FolioApiRecord.getJson(), 200); + } else { + respondWithMessage(routingContext, (failOnGetRecordById ? "Forced error on get from " : "No record with ID " + id + " in ") + STORAGE_NAME, 404); + } + } + + /** + * Handles DELETE + */ + protected void deleteRecord (RoutingContext routingContext) { + final String id = routingContext.pathParam("id"); + int code = delete(id); + + if (code == 200) { + respond(routingContext, new JsonObject(), code); + } else { + respondWithMessage(routingContext, (failOnDelete ? "Forced " : "") + "Error deleting from " + STORAGE_NAME, code); + } + } + + /** + * Handles DELETE ALL + */ + protected void deleteAll (RoutingContext routingContext) { + records.clear(); + respond(routingContext, new JsonObject("{\"message\": \"all records deleted\"}"), 200); + } + + public void wipeMockRecords() { + records.clear(); + } + + /** + * Handles POST + * + */ + protected void createRecord(RoutingContext routingContext) { + JsonObject recordJson = new JsonObject(routingContext.body().asString()); + StorageResponse response = insert(new FolioApiRecord(recordJson)); + if (response.statusCode == 201) { + respond(routingContext, recordJson, response.statusCode); + } else { + respondWithMessage(routingContext, response.responseBody, response.statusCode); + } + } + + + /** + * Handles PUT + * + */ + protected void updateRecord(RoutingContext routingContext) { + JsonObject recordJson = new JsonObject(routingContext.body().asString()); + String id = routingContext.pathParam("id"); + int code = update(id, new FolioApiRecord(recordJson)); + if (code == 204) { + respond(routingContext, code); + } else { + respondWithMessage(routingContext, (failOnUpdate ? "Forced " : "") + "Error updating record in " + STORAGE_NAME, code); + } + } + + // HELPERS FOR RESPONSE PROCESSING + + JsonObject buildJsonRecordsResponse(String optionalQuery) { + if (failOnGetRecords) return null; + JsonObject response = new JsonObject(); + JsonArray jsonRecords = new JsonArray(); + getRecords().forEach( FolioApiRecord -> { + if (optionalQuery == null || FolioApiRecord.match(optionalQuery)) { + jsonRecords.add(FolioApiRecord.getJson()); + }}); + response.put(getResultSetName(), jsonRecords); + response.put(TOTAL_RECORDS, jsonRecords.size()); + return response; + } + + /** + * Respond with JSON and status code + * + * @param responseJson the response + * @param code the status code + */ + protected static void respond(RoutingContext routingContext, JsonObject responseJson, int code) { + routingContext.response().headers().add("Content-Type", "application/json"); + routingContext.response().setStatusCode(code); + routingContext.response().end(responseJson.encodePrettily()); + } + + /** + * Respond with status code + * + * @param code the status code + */ + protected static void respond(RoutingContext routingContext, int code) { + routingContext.response().headers().add("Content-Type", "application/json"); + routingContext.response().setStatusCode(code); + routingContext.response().end(); + } + + /** + * Respond with text message (error response) + * + * @param res error condition + */ + protected static void respondWithMessage(RoutingContext routingContext, Throwable res) { + routingContext.response().setStatusCode(500); + routingContext.response().end(res.getMessage()); + } + + protected static void respondWithMessage (RoutingContext routingContext, String message, int code) { + routingContext.response().setStatusCode(code); + routingContext.response().end(message); + + } + + + // UTILS + + public static String decode (String string) { + return URLDecoder.decode(string, StandardCharsets.UTF_8); + } + + public static String encode (String string) { + return URLEncoder.encode(string, StandardCharsets.UTF_8); + } + + public void logRecords (Logger logger) { + records.values().stream().forEach(FolioApiRecord -> logger.debug(FolioApiRecord.getJson().encodePrettily())); + } + + public static class ForeignKey { + + private final RecordStorage dependentStorage; + private final String dependentPropertyName; + private final RecordStorage masterStorage; + + public ForeignKey (RecordStorage dependentStorage, String dependentPropertyName, RecordStorage masterStorage) { + this.dependentStorage = dependentStorage; + this.dependentPropertyName = dependentPropertyName; + this.masterStorage = masterStorage; + } + + public RecordStorage getDependentStorage() { + return dependentStorage; + } + + public String getDependentPropertyName() { + return dependentPropertyName; + } + + public RecordStorage getMasterStorage() { + return masterStorage; + } + + } +} diff --git a/src/test/java/org/folio/harvesteradmin/test/fakestorage/StorageResponse.java b/src/test/java/org/folio/harvesteradmin/test/fakestorage/StorageResponse.java new file mode 100644 index 0000000..8ff8f67 --- /dev/null +++ b/src/test/java/org/folio/harvesteradmin/test/fakestorage/StorageResponse.java @@ -0,0 +1,12 @@ +package org.folio.harvesteradmin.test.fakestorage; + +public class StorageResponse +{ + public int statusCode; + public String responseBody; + + public StorageResponse (int statusCode, String responseBody) { + this.statusCode = statusCode; + this.responseBody = responseBody; + } +} diff --git a/test-resources/apis/harvester-admin-endpoints b/test-resources/apis/harvester-admin-endpoints deleted file mode 100644 index 8b87061..0000000 --- a/test-resources/apis/harvester-admin-endpoints +++ /dev/null @@ -1,8 +0,0 @@ -harvestables -transformations -steps -tsas -storages - - - diff --git a/test-resources/apis/inventory-endpoints b/test-resources/apis/inventory-endpoints deleted file mode 100644 index bf20b56..0000000 --- a/test-resources/apis/inventory-endpoints +++ /dev/null @@ -1,7 +0,0 @@ -instance-storage/instances -item-storage/items -holdings-storage/holdings -inventory/instances -inventory/items -locations -users diff --git a/test-resources/delete-admin-record-by-id.sh b/test-resources/delete-admin-record-by-id.sh deleted file mode 100755 index d42a154..0000000 --- a/test-resources/delete-admin-record-by-id.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -ID=$1 - -if [ -z $token ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi -OPTS=`cat apis/harvester-admin-endpoints`; - -# echo "Choose an endpoint..." -select EP in $OPTS -do - break -done - -curl -i -v -w '\n' --http1.1 -X DELETE "$protocol://$host/harvester-admin/${EP}/${ID}" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" diff --git a/test-resources/delete-admin-records.sh b/test-resources/delete-admin-records.sh deleted file mode 100755 index 2f5d9d3..0000000 --- a/test-resources/delete-admin-records.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -ID=$1 - -if [ -z $token ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi -OPTS=`cat apis/harvester-admin-endpoints`; - -# echo "Choose an endpoint..." -select EP in $OPTS -do - break -done - -curl -i -v -w '\n' --http1.1 -X DELETE "$protocol://$host/harvester-admin/${EP}" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" diff --git a/test-resources/delete-inventory-record.sh b/test-resources/delete-inventory-record.sh deleted file mode 100755 index 3d5fd68..0000000 --- a/test-resources/delete-inventory-record.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash -ID=$1 - -if [ -z $token ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi -if [ -z $DATA ] - then - echo "Usage: . ./delete-inventory-record.sh [uuid]" - return || exit -fi -OPTS=`cat apis/inventory-endpoints`; - -# echo "Choose an endpoint..." -select EP in $OPTS -do - break -done - -curl -i -v -w '\n' --http1.1 -X DELETE "$protocol://$host/${EP}/${ID}" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" diff --git a/test-resources/demo/diku/harvestable-diku-900020.json b/test-resources/demo/diku/harvestable-diku-900020.json deleted file mode 100644 index cdf5634..0000000 --- a/test-resources/demo/diku/harvestable-diku-900020.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "acl": "diku", - "allowErrors": "false", - "cacheEnabled": "false", - "currentStatus": "NEW", - "diskRun": "false", - "enabled": "false", - "failedRecordsLogging": "CLEAN_DIRECTORY", - "harvestImmediately": "false", - "id": "900020", - "recordLimit": "600", - "lastUpdated": "2020-01-01T08:10:00Z", - "laxParsing": "false", - "logLevel": "INFO", - "mailLevel": "WARN", - "maxSavedFailedRecordsPerRun": "100", - "maxSavedFailedRecordsTotal": "1000", - "name": "DIKU: Diku to localhost", - "openAccess": "false", - "overwrite": "false", - "retryCount": "2", - "retryWait": "60", - "storeOriginal": "false", - "scheduleString": "10 10 10 6 *", - "storage": { - "entityType": "inventoryStorageEntity", - "id": "900001" - }, - "timeout": "300", - "transformation": { - "entityType": "basicTransformation", - "id": "900015" - }, - "clearRtOnError": "false", - "dateFormat": "yyyy-MM-dd'T'hh:mm:ss'Z'", - "keepPartial": "true", - "metadataPrefix": "marc21", - "oaiSetName": "PALCI_RESHARE", - "url": "https://na01.alma.exlibrisgroup.com/view/oai/01SSHELCO_MILLRSVL/request", - "type": "oaiPmh" -} diff --git a/test-resources/demo/diku/holdings-diku-900011.json b/test-resources/demo/diku/holdings-diku-900011.json deleted file mode 100644 index 0adfd2a..0000000 --- a/test-resources/demo/diku/holdings-diku-900011.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "acl": "diku", - "description": "Holdings and Items, Diku", - "enabled": "true", - "inputFormat": "XML", - "name": "Holdings and Items, Diku", - "outputFormat": "XML", - "script": "\r\r\r \r \r\r \r \r \r \r \r\r \r \r \r \r \r \r \r \r\r 46970b40-918e-47a4-a45d-b1677a2d3d46 \r b0f97013-87f5-4bab-87f2-ac4a5191b489 \r\r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r 1\">\r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r 2b94c631-fca9-4892-a730-03ee529ffe27 \r \r \r \r e65ed344-a691-4f8f-8446-3c0a346c6882 \r 22cb5b81-2056-46ed-8050-c5d8f01caf90 \r 191da404-dec2-4977-9770-1443d9d6c238 \r e65ed344-a691-4f8f-8446-3c0a346c6882 \r 9f2c8d85-97e6-4722-a67c-bb51ccfb7781 \r 04858422-c46f-4041-94fc-f5c5a6b98319 \r e5578a7e-49b9-4859-a86a-21b812215b4f \r d8dc9046-a9f4-4789-adf5-678fd855b016 \r 14f1845d-2fad-4091-80c8-48614eaea40b \r f6dd0721-5d4a-439e-9205-c71aef39d15b \r \r \r \r \r \r \r \r \r Unknown\r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r 2b94c631-fca9-4892-a730-03ee529ffe27 \r \r \r 1a54b431-2e4f-452d-9cae-9cee66c9a892 \r 615b8413-82d5-4203-aa6e-e37984cb5ac3 \r d9acad2f-2aac-4b48-9097-e6ab85906b25 \r d9acad2f-2aac-4b48-9097-e6ab85906b25 \r 71fbd940-1027-40a6-8a48-49b44d795e46 \r 71fbd940-1027-40a6-8a48-49b44d795e46 \r 71fbd940-1027-40a6-8a48-49b44d795e46 \r dd0bf600-dbd9-44ab-9ff2-e2a61a6539f1 \r 71fbd940-1027-40a6-8a48-49b44d795e46 \r \r \r \r \r \r \r \r \r Unknown\r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r\r", - "id": "900011", - "testData": "", - "testOutput": "", - "type": "XmlTransformStep" -} diff --git a/test-resources/demo/diku/locations-diku-900012.json b/test-resources/demo/diku/locations-diku-900012.json deleted file mode 100644 index f4c3726..0000000 --- a/test-resources/demo/diku/locations-diku-900012.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "acl": "diku", - "description": "Maps locations, record identifier type", - "inputFormat": "XML", - "name": "Library codes, Diku, granular locations", - "outputFormat": "XML", - "script": "\r\r\r \r \r \r \r \r\r \r \r 04d081a1-5c52-4b84-8962-949fc5f6773c\r \r\r \r \r \r \r 6aa3da20-8c2c-44a8-a339-16613ba9e42f\r ea261544-b717-4fb5-82fc-c4135fcae2dd\r af25709e-255b-4109-b889-b221dcc7fab6\r 6b8a1218-9afc-4cac-b441-f5914efed2dc\r 794333dd-17e5-4fb9-b78c-5870484f1489\r 7fcd0128-95dd-459d-ae5f-7d40ac8b4d0c\r 39778f72-5055-4725-ab05-54bf3792f311\r f775884d-496d-428f-977b-21a11d5dbbc1\r 4286b967-3394-4326-a2b4-4be83278eec3\r b403bdf3-f1e5-4b93-8110-5abaf472fe90\r 109df58d-7158-4f90-bb1d-392531d31321\r 4b099efe-2845-4f6a-b50a-b1a8a8383325\r a77c18fd-fb45-4efc-b651-6aa5272d4e51\r 4d66b280-af19-4bf6-b0ac-94bec8a1f2ad\r 74cab375-a427-4fe9-a577-68a0294e41e4\r 9ff1e42f-00bc-4e91-af7c-4260025dae0e\r bb2ff7ce-35c8-4dc9-8573-f6742cfd14d4\r f8fc91a6-fcb2-4ee8-94cc-a8d5b66a86f8\r 0778208b-9a6e-4cee-957e-f4963cd8c312\r 4bd45818-159f-425e-98d0-52e5a7cce7c3\r 7929dd29-2d91-4015-8d15-915b636c6e12\r efa5bcfe-f5e0-4a5d-bf2e-520f1c6ac329\r 079135d8-c187-4dd0-8f8b-7b78a1f7aee3\r fa75e5f8-ea13-4608-833b-cd96643a01d9\r 1a67208f-1e8e-4271-9d70-f6419bb26a6b\r 96dcf009-f8ba-40b1-be87-18d7a0936b7a\r a7397c9a-0138-49d6-b9eb-f89381021a52\r 1d773228-9a9f-44d2-979f-7b9a5cd83bf2\r 2e394ecc-0b6c-4bee-923b-7931fccfc7d6\r 6e9f3c9f-8a9a-4823-a073-34d7a4ae5b08\r 1c052320-55b2-4005-921d-341f81eccd34\r f2a30c3b-e22b-4043-be37-51cdb2711c17\r e3916276-bc0d-4150-87bc-cc48585aba64\r c54fd1f6-a121-4516-89d5-1dd48601d669\r b49d08df-5742-49b2-91ff-327e6933950e\r 776a1eab-a4cc-43d6-abcf-6d0b60f8f1a0\r 6ec94c52-b40a-430b-8af1-aa6b6b8fea5e\r 11641849-1544-48ec-aebf-2e86c8b9eaaa\r e8809314-a77c-472b-b7f8-5d0fbb7981e2\r 7bbee8f9-5a03-4e13-a5a8-a61f63d81ec7\r 224459d6-fa5e-4475-91fe-053924a46708\r 4b404be6-bc6d-4531-b6a1-e40d4802d546\r 208d9dd2-a3f0-4ccd-a168-47bcbb614000\r c36f2165-f4d6-463d-9576-aceeecf1594a\r 227daa37-fc35-47d7-9dd5-8694e96311a7\r c3d58b7e-e464-4daa-b4d5-73e566409371\r 3cd175f1-309c-4ad3-9cbf-361817e9fa73\r f8431974-38ea-42e3-92e6-e30e6590b624\r 7644843c-1357-4c04-8e29-3a3dc9d1bf29\r df0547e6-b5ee-4026-9a6f-84b6acfaa2fe\r 7560d10c-ec53-4c5c-9aee-585732650b4f\r e054a564-d69b-4868-a9bb-35aa314526eb\r 4d11c5e2-98f0-47ab-bc7f-7d8520dd997a\r 147175a8-7795-42ef-8e2a-702927fb8fb7\r d3e086ef-f77d-4344-9104-ec9d1c434802\r e3fc0afb-e0eb-4671-a432-35d6fb5f7f87\r 85779aa4-fe82-4eb0-b669-9d8d6a96fe54\r 7457335b-1b08-487b-a144-33ef0ba2c0d1\r 7b272746-0ef2-4372-bbab-06b3c305df97\r 41788814-7f3c-44e8-8423-bef9d371f482\r a1de6293-5645-413c-b35b-341aa596ae9e\r 1762cc01-c7f5-475d-a6f5-9db94d77fa50\r fea5327c-20e4-4258-8efe-5d2662fb4b01\r c0b90724-e34c-4819-94a9-d39dde77050a\r c314c358-578e-4f1d-ba95-d875b0edec8d\r 607e3781-4f2b-48c5-8a14-ac736b24577a\r 831c39d7-96db-4645-a8d5-ac991393d95d\r db4a297d-6424-447c-8483-9b5dded403a5\r d7d47a5a-279f-49c7-b5e6-44edda4e2e00\r d95ce7c5-1592-4070-a636-b62a4a576868\r 5867ef8d-0d59-434a-a315-15f93d698c16\r 9ad050c1-b402-4b5c-8cd4-b2570e8e08b0\r b6d8fb49-6af7-4f4e-8e9d-4a9a54223fad\r c2e52968-3323-434f-ba72-d7f1826ad2c1\r 56c73ff3-5ca6-411e-a251-8fd833e6e3f9\r 1762b92a-7e05-485f-aae2-b363b1086724\r 98e45a55-8a6b-47e8-a14a-cd3ae2b8e4e9\r 083e288f-74ab-463d-afde-88bd2b65c073\r 33e66d3a-4f36-40da-9eda-6432e90d3fcb\r b2da7e89-aa4c-4ada-8cd9-68d97cfa8984\r a647342f-9dfe-4bf0-9f71-cd29ee7ecf3e\r f00269e6-a4e4-41d6-9984-5fcba2c83fb5\r 149d3182-fcba-4031-b22f-643ab13a1eb7\r 5ba4c8a1-5dc1-467f-b98c-90ac74238b3e\r ccc422f5-e5d7-4739-b7e4-77eb65db5ff1\r 32bf874a-563b-4cdf-abfe-a2fe02803b77\r ffdde491-2317-4d64-b1ae-655cd36d9849\r 6f9951ae-d09d-4456-8881-58c6f6aa3cab\r 555c9c0e-6bfe-422f-af2e-9a6cc31afcce\r 5d8d2476-f04b-467f-b6ab-a05401fd04e7\r 96caaa02-10d2-40e5-8058-6e132374ef42\r 56d8b539-b0a2-45e7-be86-5b6e1c07a062\r a1d0d4d8-d109-4bc1-94cd-aca2567d1447\r ba508636-76ec-4ca3-8b53-1c0df8fb7d0e\r 556ecb69-f91c-43c0-a38a-866a9c504494\r 1c40af59-51ba-4b25-8ff7-15b15f64aa2f\r 12ee1371-43f0-4c0c-9a10-835459d048a3\r 004c14d3-fb87-40fc-b4db-9e91738b4f1b\r \r \r \r\r \r \r b4578dbc-4dd9-4ac1-9c01-8a13f65aa95e> \r \r\r", - "id": "900012", - "testData": "", - "testOutput": "", - "type": "XmlTransformStep" -} diff --git a/test-resources/demo/diku/oai-2-inst-diku-900010.json b/test-resources/demo/diku/oai-2-inst-diku-900010.json deleted file mode 100644 index 1b98e30..0000000 --- a/test-resources/demo/diku/oai-2-inst-diku-900010.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "description": "DIKU: OAI20-MARC21 XML to FOLIO Instance XML", - "inputFormat": "XML", - "name": "DIKU: OAI20-MARC21 to Instance XML", - "outputFormat": "XML", - "acl": "diku", - "script": "\r\r\r \r \r \r \r \r \r \r \r\r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r\r\r \r \r \r \r \r \r \r \r\r \r \r\r \r \r \r\r \r \r MARC\r\r \r \r \r \r 6312d172-f0cf-40f6-b27d-9fa8feaf332f \r 497b5090-3da2-486c-b57f-de5bb3c2e26d \r 497b5090-3da2-486c-b57f-de5bb3c2e26d \r 526aa04d-9289-4511-8866-349299592c18 \r a2c91e87-6bab-44d6-8adb-1fd02481fc4f \r 535e3160-763a-42f9-b0c0-d8ed7df6e2a2 \r 9bce18bd-45bf-4949-8fa8-63163e4b7d7f \r 3be24c14-3551-4180-9292-26a786649c8b \r a2c91e87-6bab-44d6-8adb-1fd02481fc4f \r df5dddff-9c30-4507-8b82-119ff972d4d7 \r a2c91e87-6bab-44d6-8adb-1fd02481fc4f \r a2c91e87-6bab-44d6-8adb-1fd02481fc4f \r c1e95c2b-4efc-48cf-9e71-edb622cf0c22 \r 6312d172-f0cf-40f6-b27d-9fa8feaf332f \r a2c91e87-6bab-44d6-8adb-1fd02481fc4f \r \r \r\r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r c858e4f2-2b6b-4385-842b-60732ee14abb \r \r \r \r \r \r 8261054f-be78-422d-bd51-4ed9f33c3422 \r \r \r \r \r \r 913300b2-03ed-469a-8179-c1092c991227 \r \r \r \r \r \r 2e8b3b6c-0e7d-4e48-bca2-b0b23b376af5 \r \r \r \r \r \r b5d8cdc4-9441-487c-90cf-0c7ec97728eb \r \r \r \r \r \r 7e591197-f335-4afb-bc6d-a6d76ca3bace \r \r \r \r \r \r 351ebc1c-3aae-4825-8765-c6d50dbf011f \r \r \r \r \r \r \r \r\r \r \r \r \r \r \r \r \r \r \r 1\">\r ; \r \r \r \r \r ce176ace-a53e-4b4d-aa89-725ed7b2edac \r \r \r \r \r 1\">\r ; \r \r \r \r \r 42471af9-7d25-4f3a-bf78-60d29dcf463b \r \r \r \r \r \r 9075b5f8-7d97-49e1-a431-73fdd468d476 \r \r \r \r \r \r \r \r\r \r \r <xsl:variable name=\"dirty-title\">\r <xsl:for-each select=\"marc:datafield[@tag='245'][1]/marc:subfield[@code='a' or @code='b' or @code='h' or @code='n' or @code='p']\">\r <xsl:value-of select=\".\"/>\r <xsl:if test=\"position() != last()\">\r <xsl:text> </xsl:text>\r </xsl:if>\r </xsl:for-each>\r </xsl:variable>\r <xsl:call-template name=\"remove-characters-last\">\r <xsl:with-param name=\"input\" select=\"$dirty-title\" />\r <xsl:with-param name=\"characters\">,-./ :;</xsl:with-param>\r </xsl:call-template>\r \r\r \r \r \r \r \r \r \r \r 1\">\r , \r \r \r \r ,-.\r \r \r \r \r \r 2b94c631-fca9-4892-a730-03ee529ffe2a \r \r true\r \r \r \r 2e48e713-17f3-4c13-a9f8-23845bb210aa \r \r \r e8b311a6-3b21-43f2-a269-dd9310cb2d0a \r \r \r 2b94c631-fca9-4892-a730-03ee529ffe2a \r \r \r \r \r \r \r \r \r \r \r \r \r\r \r \r \r \r \r \r \r ; \r \r \r \r \r \r\r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r\r \r \r \r \r \r \r \r \r ,-./ :;\r \r \r \r \r \r \r\r \r \r \r \r \r \r \r 1\">\r --\r \r \r \r ,-.\r \r \r \r \r \r \r \r\r \r \r \r \r \r \r \r \r \r \r \r 86b6e817-e1bc-42fb-bab0-70e7547de6c1 \r 5ba8e385-0e27-462e-a571-ffa1fa34ea54 \r 0e2e11b-450f-45c8-b09b-0f819999966e \r 6a2533a7-4de2-4e64-8466-074c2fa9308c \r \r \r \r \r \r \r \r \r \r\r \r \r \r \r <xsl:call-template name=\"remove-characters-last\">\r <xsl:with-param name=\"input\" select=\"marc:subfield[@code='a']\" />\r <xsl:with-param name=\"characters\">,-./ :;</xsl:with-param>\r </xsl:call-template>\r \r \r : \r \r \r ,-./ :;\r \r \r \r \r \r ,-./ :;\r \r \r \r \r \r \r \r \r \r \r \r \r \r \r\r \r\r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r\r\r", - "id": "900010", - "testData": "", - "testOutput": "", - "type": "XmlTransformStep" -} diff --git a/test-resources/demo/diku/post-demo-config-diku.sh b/test-resources/demo/diku/post-demo-config-diku.sh deleted file mode 100755 index c3d3f25..0000000 --- a/test-resources/demo/diku/post-demo-config-diku.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" - -DATA=$1 - -if [ -z $token ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi - -curl -i -w '\n' --http1.1 -X POST "$protocol://$host/harvester-admin/storages" -d "@${SCRIPT_DIR}/storage-diku-900001.json" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" - -curl -i -w '\n' --http1.1 -X POST "$protocol://$host/harvester-admin/steps" -d "@${SCRIPT_DIR}/oai-2-inst-diku-900010.json" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" - -curl -i -w '\n' --http1.1 -X POST "$protocol://$host/harvester-admin/steps" -d "@${SCRIPT_DIR}/holdings-diku-900011.json" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" - -curl -i -w '\n' --http1.1 -X POST "$protocol://$host/harvester-admin/steps" -d "@${SCRIPT_DIR}/locations-diku-900012.json" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" - -curl -i -w '\n' --http1.1 -X POST "$protocol://$host/harvester-admin/steps" -d "@${SCRIPT_DIR}/xml-2-json-diku-900013.json" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" - -curl -i -w '\n' --http1.1 -X POST "$protocol://$host/harvester-admin/transformations" -d "@${SCRIPT_DIR}/transformation-diku-900015.json" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" - -curl -i -w '\n' --http1.1 -X POST "$protocol://$host/harvester-admin/harvestables" -d "@${SCRIPT_DIR}/harvestable-diku-900020.json" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" - diff --git a/test-resources/demo/diku/storage-diku-900001.json b/test-resources/demo/diku/storage-diku-900001.json deleted file mode 100644 index 301180f..0000000 --- a/test-resources/demo/diku/storage-diku-900001.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "bulkSize": "1000", - "currentStatus": "NEW", - "description": "diku : Inventory storage at localhost", - "enabled": "true", - "id": "900001", - "json": { - "folioAuthPath": "bl-users/login", - "folioTenant": "diku", - "folioUsername": "diku_admin", - "folioPassword": "admin", - "inventoryUpsertPath": "shared-inventory-upsert-matchkey", - "marcStoragePath": "marc-records" - }, - "name": "DIKU: FOLIO @ localhost from Vagrant box", - "url": "http://10.0.2.2:9130/", - "type": "inventoryStorage" -} diff --git a/test-resources/demo/diku/transformation-diku-900015.json b/test-resources/demo/diku/transformation-diku-900015.json deleted file mode 100644 index 68aa3e0..0000000 --- a/test-resources/demo/diku/transformation-diku-900015.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "acl": "diku", - "description": "", - "enabled": "true", - "name": "Diku - OAI-PMH to FOLIO Inventory", - "parallel": "false", - "stepAssociations": [ - { - "stepId": "900010" - }, - { - "stepId": "900011" - }, - { - "stepId": "900012" - }, - { - "stepId": "900013" - } - ], - "id": "900015", - "type": "basicTransformation" -} diff --git a/test-resources/demo/diku/xml-2-json-diku-900013.json b/test-resources/demo/diku/xml-2-json-diku-900013.json deleted file mode 100644 index 54311da..0000000 --- a/test-resources/demo/diku/xml-2-json-diku-900013.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "customClass": "com.indexdata.masterkey.localindices.harvest.messaging.InstanceXmlToInstanceJsonTransformerRouter", - "description": "Diku: FOLIO Instance XML to JSON", - "enabled": "true", - "inputFormat": "XML", - "name": "Diku: Instance XML to JSON", - "outputFormat": "JSON", - "script": "", - "id": "900013", - "acl": "diku", - "testData": "", - "testOutput": "", - "type": "CustomTransformStep" -} diff --git a/test-resources/demo/job-800020.json b/test-resources/demo/job-800020.json deleted file mode 100644 index 0df42cb..0000000 --- a/test-resources/demo/job-800020.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "harvestableId": "800020" -} diff --git a/test-resources/demo/job-900020.json b/test-resources/demo/job-900020.json deleted file mode 100644 index 0becd2a..0000000 --- a/test-resources/demo/job-900020.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "harvestableId": "900020" -} diff --git a/test-resources/demo/north/harvestable-north-800020.json b/test-resources/demo/north/harvestable-north-800020.json deleted file mode 100644 index 284c77d..0000000 --- a/test-resources/demo/north/harvestable-north-800020.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "acl": "north", - "allowErrors": "false", - "cacheEnabled": "false", - "currentStatus": "NEW", - "diskRun": "false", - "enabled": "false", - "failedRecordsLogging": "CLEAN_DIRECTORY", - "harvestImmediately": "false", - "id": "800020", - "recordLimit": "400", - "lastUpdated": "2020-01-01T08:10:00Z", - "laxParsing": "false", - "logLevel": "INFO", - "mailLevel": "WARN", - "maxSavedFailedRecordsPerRun": "100", - "maxSavedFailedRecordsTotal": "1000", - "name": "NORTH: North to localhost", - "openAccess": "false", - "overwrite": "false", - "retryCount": "2", - "retryWait": "60", - "scheduleString": "10 10 10 6 *", - "storage": { - "entityType": "inventoryStorageEntity", - "id": "800001" - }, - "storeOriginal": "false", - "timeout": "300", - "transformation": { - "entityType": "basicTransformation", - "id": "800015" - }, - "clearRtOnError": "false", - "dateFormat": "yyyy-MM-dd'T'hh:mm:ss'Z'", - "keepPartial": "true", - "metadataPrefix": "marc21", - "oaiSetName": "PALCI_RESHARE", - "url": "https://na01.alma.exlibrisgroup.com/view/oai/01SSHELCO_BLMSBRG/request", - "type": "oaiPmh" -} diff --git a/test-resources/demo/north/holdings-north-800011.json b/test-resources/demo/north/holdings-north-800011.json deleted file mode 100644 index 58eeed5..0000000 --- a/test-resources/demo/north/holdings-north-800011.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "acl": "north", - "description": "Holdings and Items, North", - "enabled": "true", - "inputFormat": "XML", - "name": "Holdings and Items, North", - "outputFormat": "XML", - "script": "\r\r\r \r \r\r \r \r \r \r \r\r \r \r \r \r \r \r \r \r\r 46970b40-918e-47a4-a45d-b1677a2d3d46 \r b0f97013-87f5-4bab-87f2-ac4a5191b489 \r\r 03dd64d0-5626-4ecd-8ece-4531e0069f35 \r 95467209-6d7b-468b-94df-0f5d7ad2747d \r 6caca63e-5651-4db6-9247-3205156e9699 \r fc388041-6cd0-4806-8a74-ebe3b9ab4c6e \r\r \r \r \r \r \r \r \r \r\r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r\r \r \r \r 1\">\r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r 2b94c631-fca9-4892-a730-03ee529ffe27 \r \r \r \r e65ed344-a691-4f8f-8446-3c0a346c6882 \r 22cb5b81-2056-46ed-8050-c5d8f01caf90 \r 191da404-dec2-4977-9770-1443d9d6c238 \r e65ed344-a691-4f8f-8446-3c0a346c6882 \r 9f2c8d85-97e6-4722-a67c-bb51ccfb7781 \r 04858422-c46f-4041-94fc-f5c5a6b98319 \r e5578a7e-49b9-4859-a86a-21b812215b4f \r d8dc9046-a9f4-4789-adf5-678fd855b016 \r 14f1845d-2fad-4091-80c8-48614eaea40b \r f6dd0721-5d4a-439e-9205-c71aef39d15b \r \r \r \r Unknown\r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r 2b94c631-fca9-4892-a730-03ee529ffe27 \r \r \r 1a54b431-2e4f-452d-9cae-9cee66c9a892 \r 615b8413-82d5-4203-aa6e-e37984cb5ac3 \r d9acad2f-2aac-4b48-9097-e6ab85906b25 \r d9acad2f-2aac-4b48-9097-e6ab85906b25 \r 71fbd940-1027-40a6-8a48-49b44d795e46 \r 71fbd940-1027-40a6-8a48-49b44d795e46 \r 71fbd940-1027-40a6-8a48-49b44d795e46 \r dd0bf600-dbd9-44ab-9ff2-e2a61a6539f1 \r 71fbd940-1027-40a6-8a48-49b44d795e46 \r \r \r \r Unknown\r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r \r\r", - "id": "800011", - "testData": "\r\r \r \r \r
\r oai:alma.01SSHELCO_BLMSBRG:992178863403556\r 2020-11-23T19:21:41Z\r PALCI_RESHARE\r EDS-PBB-Print\r Rapid-ILL-PrintJournals\r
\r \r \r 01764cas a2200433 a 4500\r 20170508101015.0\r hdrafb---buuu\r 800603d19721975miuar p a 0 a0eng d\r 992178863403556\r \r sc 82003713 \r \r \r 2\r \r \r 0094-2049\r \r \r ocm06382947\r \r \r (PBB)217886\r \r \r (PBbS)217886-bloomdb\r \r \r WIS\r WIS\r PIT\r IUL\r DLC\r NST\r DLC\r PBB\r \r \r lc\r \r \r PBBY\r \r \r HF5601\r .N53\r \r \r CPA journal (New York, N.Y. : 1972)\r \r \r C.P.A. journal\r \r \r The CPA journal\r [microform].\r \r \r C.P.A. journal\r \r \r New York, N.Y. :\r New York State Society of Certified Public Accountants,\r c1972-1975.\r \r \r 4 v. :\r ill. ;\r 28 cm.\r \r \r Monthly\r \r \r Vol. 42, no. 1 (Jan. 1972)-v. 45, no. 5 (May 1975).\r \r \r Microfilm.\r Ann Arbor, Mich. :\r University Microfilms,\r 1973.\r 2 microfilm reels ; 35 mm.\r (Current periodical series ; publication no. 3470)\r Filmed with v. 41 1971 of Certified public accountant (New York, N.Y.) and v. 45 no. 6-11 1975 June-Nov. of CPA (New York, N.Y. : 1975) and v. 45 no. 12-v. 46 1975 Dec.-1976 of CPA journal (New York, N.Y. : 1975)\r \r \r Accounting\r Periodicals.\r \r \r New York State Society of Certified Public Accountants.\r \r \r Current periodical series\r \r \r Original\r CPA journal (New York, N.Y. : 1972)\r (OCoLC)1793499\r \r \r Certified public accountant (New York, N.Y.)\r (OCoLC)29333821\r \r \r CPA (New York, N.Y. : 1975)\r June 1975\r (OCoLC)6382996\r \r \r Current periodical series;\r publication no. 3470.\r \r \r InU\r ViBlbV\r \r \r Ser\r \r \r PBBY\r \r \r May 1975\r \r \r bloom\r MICRFLMPER\r Microfilm\r 2259124680003556\r \r \r v. 42-45 no. 5 1972-1975 May\r 2259124680003556\r \r \r 2259124680003556\r 1\r 8\r MICRFLMPER\r ISSUE\r 32404007708866-521441\r MICRFLMPER\r ON_RESERVE: N | RESERVE_CHARGES: 0 | RECALLS_PLACED: 0 | HOLDS_PLACED: 0 | HISTORICAL_BOOKINGS: 0 | SHORT_LOAN_CHARGES: 0 | \r 2359124660003556\r MFPE\r Microfilm CPA journal\r false\r 1\r v. 42-43 1972-73\r v. 42-43\r 1972-73\r bloom\r bloom\r \r \r 2259124680003556\r 1\r 8\r MICRFLMPER\r ISSUE\r 32404007708874-519785\r MICRFLMPER\r ON_RESERVE: N | RESERVE_CHARGES: 0 | RECALLS_PLACED: 0 | HOLDS_PLACED: 0 | HISTORICAL_BOOKINGS: 0 | SHORT_LOAN_CHARGES: 0 | \r 2359124670003556\r MFPE\r Microfilm CPA journal\r false\r 1\r v. 44-45 no. 5 1974-1975 May\r v. 44-45 no. 5\r test\r 1974-1975 May\r bloom\r bloom\r \r \r \r
\r
\r \r 992178863403556\r \r MARC\r 6312d172-f0cf-40f6-b27d-9fa8feaf332f\r \r \r \r 992178863403556\r \r \r \r sc 82003713 \r c858e4f2-2b6b-4385-842b-60732ee14abb\r \r \r 0094-2049\r 913300b2-03ed-469a-8179-c1092c991227\r \r \r ocm06382947\r 7e591197-f335-4afb-bc6d-a6d76ca3bace\r \r \r (PBB)217886\r 7e591197-f335-4afb-bc6d-a6d76ca3bace\r \r \r (PBbS)217886-bloomdb\r 7e591197-f335-4afb-bc6d-a6d76ca3bace\r \r \r \r \r \r \r \r \r The CPA journal [microform]\r \r \r \r New York State Society of Certified Public Accountants\r 2e48e713-17f3-4c13-a9f8-23845bb210aa\r \r \r \r \r \r \r New York State Society of Certified Public Accountants,\r New York, N.Y. :\r c1972-1975.\r \r \r \r \r \r 4 v\r \r \r \r \r Accounting--Periodicals\r \r \r \r \r The CPA journal\r : \r [microform]\r \r
\r
\r", - "testOutput": "\r\r \r \r \r
\r oai:alma.01SSHELCO_BLMSBRG:992178863403556\r 2020-11-23T19:21:41Z\r PALCI_RESHARE\r EDS-PBB-Print\r Rapid-ILL-PrintJournals\r
\r \r \r 01764cas a2200433 a 4500\r 20170508101015.0\r hdrafb---buuu\r 800603d19721975miuar p a 0 a0eng d\r 992178863403556\r \r sc 82003713 \r \r \r 2\r \r \r 0094-2049\r \r \r ocm06382947\r \r \r (PBB)217886\r \r \r (PBbS)217886-bloomdb\r \r \r WIS\r WIS\r PIT\r IUL\r DLC\r NST\r DLC\r PBB\r \r \r lc\r \r \r PBBY\r \r \r HF5601\r .N53\r \r \r CPA journal (New York, N.Y. : 1972)\r \r \r C.P.A. journal\r \r \r The CPA journal\r [microform].\r \r \r C.P.A. journal\r \r \r New York, N.Y. :\r New York State Society of Certified Public Accountants,\r c1972-1975.\r \r \r 4 v. :\r ill. ;\r 28 cm.\r \r \r Monthly\r \r \r Vol. 42, no. 1 (Jan. 1972)-v. 45, no. 5 (May 1975).\r \r \r Microfilm.\r Ann Arbor, Mich. :\r University Microfilms,\r 1973.\r 2 microfilm reels ; 35 mm.\r (Current periodical series ; publication no. 3470)\r Filmed with v. 41 1971 of Certified public accountant (New York, N.Y.) and v. 45 no. 6-11 1975 June-Nov. of CPA (New York, N.Y. : 1975) and v. 45 no. 12-v. 46 1975 Dec.-1976 of CPA journal (New York, N.Y. : 1975)\r \r \r Accounting\r Periodicals.\r \r \r New York State Society of Certified Public Accountants.\r \r \r Current periodical series\r \r \r Original\r CPA journal (New York, N.Y. : 1972)\r (OCoLC)1793499\r \r \r Certified public accountant (New York, N.Y.)\r (OCoLC)29333821\r \r \r CPA (New York, N.Y. : 1975)\r June 1975\r (OCoLC)6382996\r \r \r Current periodical series;\r publication no. 3470.\r \r \r InU\r ViBlbV\r \r \r Ser\r \r \r PBBY\r \r \r May 1975\r \r \r bloom\r MICRFLMPER\r Microfilm\r 2259124680003556\r \r \r v. 42-45 no. 5 1972-1975 May\r 2259124680003556\r \r \r 2259124680003556\r 1\r 8\r MICRFLMPER\r ISSUE\r 32404007708866-521441\r MICRFLMPER\r ON_RESERVE: N | RESERVE_CHARGES: 0 | RECALLS_PLACED: 0 | HOLDS_PLACED: 0 | HISTORICAL_BOOKINGS: 0 | SHORT_LOAN_CHARGES: 0 | \r 2359124660003556\r MFPE\r Microfilm CPA journal\r false\r 1\r v. 42-43 1972-73\r v. 42-43\r 1972-73\r bloom\r bloom\r \r \r 2259124680003556\r 1\r 8\r MICRFLMPER\r ISSUE\r 32404007708874-519785\r MICRFLMPER\r ON_RESERVE: N | RESERVE_CHARGES: 0 | RECALLS_PLACED: 0 | HOLDS_PLACED: 0 | HISTORICAL_BOOKINGS: 0 | SHORT_LOAN_CHARGES: 0 | \r 2359124670003556\r MFPE\r Microfilm CPA journal\r false\r 1\r v. 44-45 no. 5 1974-1975 May\r v. 44-45 no. 5\r test\r 1974-1975 May\r bloom\r bloom\r \r \r \r
\r
\r \r 992178863403556\r \r MARC\r 6312d172-f0cf-40f6-b27d-9fa8feaf332f\r \r \r \r 992178863403556\r \r \r \r sc 82003713 \r c858e4f2-2b6b-4385-842b-60732ee14abb\r \r \r 0094-2049\r 913300b2-03ed-469a-8179-c1092c991227\r \r \r ocm06382947\r 7e591197-f335-4afb-bc6d-a6d76ca3bace\r \r \r (PBB)217886\r 7e591197-f335-4afb-bc6d-a6d76ca3bace\r \r \r (PBbS)217886-bloomdb\r 7e591197-f335-4afb-bc6d-a6d76ca3bace\r \r \r \r \r \r \r \r \r The CPA journal [microform]\r \r \r \r New York State Society of Certified Public Accountants\r 2e48e713-17f3-4c13-a9f8-23845bb210aa\r \r \r \r \r \r \r New York State Society of Certified Public Accountants,\r New York, N.Y. :\r c1972-1975.\r \r \r \r \r \r 4 v\r \r \r \r \r Accounting--Periodicals\r \r \r \r \r The CPA journal\r : \r [microform]\r \r \r \r \r \r \r 2259124680003556\r \r \r b0f97013-87f5-4bab-87f2-ac4a5191b489\r MICRFLMPER\r Microfilm\r 6caca63e-5651-4db6-9247-3205156e9699\r \r \r \r 2359124660003556\r 32404007708866-521441\r 2b94c631-fca9-4892-a730-03ee529ffe27\r 191da404-dec2-4977-9770-1443d9d6c238\r \r Unknown\r \r v. 42-43\r 1972-73\r \r \r 2359124670003556\r 32404007708874-519785\r 2b94c631-fca9-4892-a730-03ee529ffe27\r 191da404-dec2-4977-9770-1443d9d6c238\r \r Unknown\r \r v. 44-45 no. 5 test\r 1974-1975 May\r \r \r \r \r \r \r
\r
", - "type": "XmlTransformStep" -} diff --git a/test-resources/demo/north/locations-north-800012.json b/test-resources/demo/north/locations-north-800012.json deleted file mode 100644 index 5cee70a..0000000 --- a/test-resources/demo/north/locations-north-800012.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "acl": "north", - "description": "Maps locations, record identifier type for North", - "inputFormat": "XML", - "name": "Library codes, North", - "outputFormat": "XML", - "script": "\r\r\r \r \r \r \r \r\r \r \r 8039b58b-756e-5c64-8f47-c91a2d8f49bd\r \r\r \r \r \r \r 081dc6b1-3835-5098-ae1a-6339aed82e43\r 22a58c72-2e14-550b-b721-48b9522e6870\r f5e54333-050d-5685-a27e-53298a38a27d\r ead2ba27-7c73-51e4-b416-ee6f29414d52\r 68b4da59-4d52-5e97-9296-f31ec2a8e726\r 25b261c3-20f2-5df9-a608-e9426354eb85\r 7a72c533-99ad-56da-ad72-ea17ccdc1de6\r ed8b375f-21fa-5a02-a03e-f8532abb546e\r 081dc6b1-3835-5098-ae1a-6339aed82e43\r \r \r \r\r \r \r e169e329-80bf-553b-9011-0bc890157298\r \r\r\r", - "id": "800012", - "testData": "\r\r \r \r \r
\r oai:alma.01SSHELCO_BLMSBRG:999623403556\r 2020-11-23T17:08:36Z\r PALCI_RESHARE\r Rapid-ILL-Books\r EDS-PBB-Print\r
\r \r \r 00940cam a2200277 4500\r 690410s1969 nyua j 000 0 eng \r 20170508193206.0\r 999623403556\r \r 69010208 /AC\r \r \r 4.79\r \r \r ocm00003744\r \r \r (PBbS)962-bloomdb\r \r \r (PBB)999623403556\r \r \r DLC\r DLC\r m.c.\r OCL\r PBB\r \r \r [oversize]PBBJ\r [][][][170040]\r \r \r U800\r .G55\r \r \r 355.1/4\r \r \r 399\r G567k\r \r \r Glubok, Shirley.\r \r \r Knights in armor.\r Designed by Gerard Nook.\r \r \r New York,\r Harper & Row\r [1969]\r \r \r 48 p.\r illus.\r 33 cm.\r \r \r Depicts the customs and history of the era of knighthood and chivalry through descriptions of armor surviving as museum pieces and works of art.\r \r \r Armor\r Juvenile literature.\r \r \r Knights and knighthood\r Juvenile literature.\r \r \r [oversize]PBBJ\r [][][][170040]\r \r \r bloom\r JUVENILEOV\r 399\r G567k\r 2280159360003556\r \r \r 2280159360003556\r 1\r 1\r JUVENILEOV\r BOOK\r 32404002242333\r JUVENILEOV\r ON_RESERVE: N | RESERVE_CHARGES: 0 | RECALLS_PLACED: 0 | HOLDS_PLACED: 0 | HISTORICAL_BOOKINGS: 0 | SHORT_LOAN_CHARGES: 0 | \r 2380159350003556\r JUV\r 399 G567k\r false\r 1\r bloom\r bloom\r \r \r \r
\r
\r \r 999623403556\r \r MARC\r 6312d172-f0cf-40f6-b27d-9fa8feaf332f\r \r \r \r 999623403556\r \r \r \r 69010208 /AC\r c858e4f2-2b6b-4385-842b-60732ee14abb\r \r \r ocm00003744\r 7e591197-f335-4afb-bc6d-a6d76ca3bace\r \r \r (PBbS)962-bloomdb\r 7e591197-f335-4afb-bc6d-a6d76ca3bace\r \r \r (PBB)999623403556\r 7e591197-f335-4afb-bc6d-a6d76ca3bace\r \r \r \r \r \r \r U800; .G55\r ce176ace-a53e-4b4d-aa89-725ed7b2edac\r \r \r 355.1/4\r 42471af9-7d25-4f3a-bf78-60d29dcf463b\r \r \r \r Knights in armor\r \r \r \r Glubok, Shirley\r 2b94c631-fca9-4892-a730-03ee529ffe2a\r true\r \r \r \r \r \r \r Harper & Row\r New York,\r [1969]\r \r \r \r \r \r 48 p\r \r \r \r \r Armor--Juvenile literature\r Knights and knighthood--Juvenile literature\r \r \r \r \r Knights in armor\r : \r \r \r \r \r \r \r \r 2280159360003556\r \r \r b0f97013-87f5-4bab-87f2-ac4a5191b489\r JUVENILEOV\r 399 G567k\r \r \r \r 2380159350003556\r 32404002242333\r 2b94c631-fca9-4892-a730-03ee529ffe27\r e65ed344-a691-4f8f-8446-3c0a346c6882\r \r Unknown\r \r \r \r \r \r \r \r
\r
", - "testOutput": "\r\r \r \r \r
\r oai:alma.01SSHELCO_BLMSBRG:999623403556\r 2020-11-23T17:08:36Z\r PALCI_RESHARE\r Rapid-ILL-Books\r EDS-PBB-Print\r
\r \r \r 00940cam a2200277 4500\r 690410s1969 nyua j 000 0 eng \r 20170508193206.0\r 999623403556\r \r 69010208 /AC\r \r \r 4.79\r \r \r ocm00003744\r \r \r (PBbS)962-bloomdb\r \r \r (PBB)999623403556\r \r \r DLC\r DLC\r m.c.\r OCL\r PBB\r \r \r [oversize]PBBJ\r [][][][170040]\r \r \r U800\r .G55\r \r \r 355.1/4\r \r \r 399\r G567k\r \r \r Glubok, Shirley.\r \r \r Knights in armor.\r Designed by Gerard Nook.\r \r \r New York,\r Harper & Row\r [1969]\r \r \r 48 p.\r illus.\r 33 cm.\r \r \r Depicts the customs and history of the era of knighthood and chivalry through descriptions of armor surviving as museum pieces and works of art.\r \r \r Armor\r Juvenile literature.\r \r \r Knights and knighthood\r Juvenile literature.\r \r \r [oversize]PBBJ\r [][][][170040]\r \r \r bloom\r JUVENILEOV\r 399\r G567k\r 2280159360003556\r \r \r 2280159360003556\r 1\r 1\r JUVENILEOV\r BOOK\r 32404002242333\r JUVENILEOV\r ON_RESERVE: N | RESERVE_CHARGES: 0 | RECALLS_PLACED: 0 | HOLDS_PLACED: 0 | HISTORICAL_BOOKINGS: 0 | SHORT_LOAN_CHARGES: 0 | \r 2380159350003556\r JUV\r 399 G567k\r false\r 1\r bloom\r bloom\r \r \r \r
\r
\r e169e329-80bf-553b-9011-0bc890157298\r 999623403556\r \r MARC\r 6312d172-f0cf-40f6-b27d-9fa8feaf332f\r \r \r \r 999623403556\r 8039b58b-756e-5c64-8f47-c91a2d8f49bd\r \r \r 69010208 /AC\r c858e4f2-2b6b-4385-842b-60732ee14abb\r \r \r ocm00003744\r 7e591197-f335-4afb-bc6d-a6d76ca3bace\r \r \r (PBbS)962-bloomdb\r 7e591197-f335-4afb-bc6d-a6d76ca3bace\r \r \r (PBB)999623403556\r 7e591197-f335-4afb-bc6d-a6d76ca3bace\r \r \r \r \r \r \r U800; .G55\r ce176ace-a53e-4b4d-aa89-725ed7b2edac\r \r \r 355.1/4\r 42471af9-7d25-4f3a-bf78-60d29dcf463b\r \r \r \r Knights in armor\r \r \r \r Glubok, Shirley\r 2b94c631-fca9-4892-a730-03ee529ffe2a\r true\r \r \r \r \r \r \r Harper & Row\r New York,\r [1969]\r \r \r \r \r \r 48 p\r \r \r \r \r Armor--Juvenile literature\r Knights and knighthood--Juvenile literature\r \r \r \r \r Knights in armor\r : \r \r \r \r \r \r \r \r 2280159360003556\r \r \r b0f97013-87f5-4bab-87f2-ac4a5191b489\r ead2ba27-7c73-51e4-b416-ee6f29414d52\r 399 G567k\r \r \r \r 2380159350003556\r 32404002242333\r 2b94c631-fca9-4892-a730-03ee529ffe27\r e65ed344-a691-4f8f-8446-3c0a346c6882\r \r Unknown\r \r \r \r \r \r \r \r
\r
", - "type": "XmlTransformStep" -} diff --git a/test-resources/demo/north/oai-2-inst-north-800010.json b/test-resources/demo/north/oai-2-inst-north-800010.json deleted file mode 100644 index 8a48a5e..0000000 --- a/test-resources/demo/north/oai-2-inst-north-800010.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "acl": "north", - "description": "NORTH: OAI20-MARC21 XML to FOLIO Instance XML", - "inputFormat": "XML", - "name": "NORTH: OAI20-MARC21 to Instance XML", - "outputFormat": "XML", - "script": "\n\n\n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n \n \n \n \n \n \n \n \n\n \n \n\n \n \n \n \n\n \n \n MARC\n\n \n \n \n \n 6312d172-f0cf-40f6-b27d-9fa8feaf332f \n 497b5090-3da2-486c-b57f-de5bb3c2e26d \n 497b5090-3da2-486c-b57f-de5bb3c2e26d \n 526aa04d-9289-4511-8866-349299592c18 \n a2c91e87-6bab-44d6-8adb-1fd02481fc4f \n 535e3160-763a-42f9-b0c0-d8ed7df6e2a2 \n 9bce18bd-45bf-4949-8fa8-63163e4b7d7f \n 3be24c14-3551-4180-9292-26a786649c8b \n a2c91e87-6bab-44d6-8adb-1fd02481fc4f \n df5dddff-9c30-4507-8b82-119ff972d4d7 \n a2c91e87-6bab-44d6-8adb-1fd02481fc4f \n a2c91e87-6bab-44d6-8adb-1fd02481fc4f \n c1e95c2b-4efc-48cf-9e71-edb622cf0c22 \n 6312d172-f0cf-40f6-b27d-9fa8feaf332f \n a2c91e87-6bab-44d6-8adb-1fd02481fc4f \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n c858e4f2-2b6b-4385-842b-60732ee14abb \n \n \n \n \n \n 8261054f-be78-422d-bd51-4ed9f33c3422 \n \n \n \n \n \n 913300b2-03ed-469a-8179-c1092c991227 \n \n \n \n \n \n 2e8b3b6c-0e7d-4e48-bca2-b0b23b376af5 \n \n \n \n \n \n b5d8cdc4-9441-487c-90cf-0c7ec97728eb \n \n \n \n \n \n 7e591197-f335-4afb-bc6d-a6d76ca3bace \n \n \n \n \n \n 351ebc1c-3aae-4825-8765-c6d50dbf011f \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n 1\">\n ; \n \n \n \n \n ce176ace-a53e-4b4d-aa89-725ed7b2edac \n \n \n \n \n 1\">\n ; \n \n \n \n \n 42471af9-7d25-4f3a-bf78-60d29dcf463b \n \n \n \n \n \n 9075b5f8-7d97-49e1-a431-73fdd468d476 \n \n \n \n \n \n \n \n\n \n \n <xsl:variable name=\"dirty-title\">\n <xsl:for-each select=\"marc:datafield[@tag='245'][1]/marc:subfield[@code='a' or @code='b' or @code='h' or @code='n' or @code='p']\">\n <xsl:value-of select=\".\"/>\n <xsl:if test=\"position() != last()\">\n <xsl:text> </xsl:text>\n </xsl:if>\n </xsl:for-each>\n </xsl:variable>\n <xsl:call-template name=\"remove-characters-last\">\n <xsl:with-param name=\"input\" select=\"$dirty-title\" />\n <xsl:with-param name=\"characters\">,-./ :;</xsl:with-param>\n </xsl:call-template>\n \n\n \n \n \n \n \n \n \n \n 1\">\n , \n \n \n \n ,-.\n \n \n \n \n \n 2b94c631-fca9-4892-a730-03ee529ffe2a \n \n true\n \n \n \n 2e48e713-17f3-4c13-a9f8-23845bb210aa \n \n \n e8b311a6-3b21-43f2-a269-dd9310cb2d0a \n \n \n 2b94c631-fca9-4892-a730-03ee529ffe2a \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n ; \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n ,-./ :;\n \n \n \n \n \n \n\n \n \n \n \n \n \n \n 1\">\n --\n \n \n \n ,-.\n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n 86b6e817-e1bc-42fb-bab0-70e7547de6c1 \n 5ba8e385-0e27-462e-a571-ffa1fa34ea54 \n 0e2e11b-450f-45c8-b09b-0f819999966e \n 6a2533a7-4de2-4e64-8466-074c2fa9308c \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n <xsl:call-template name=\"remove-characters-last\">\n <xsl:with-param name=\"input\" select=\"marc:subfield[@code='a']\" />\n <xsl:with-param name=\"characters\">,-./ :;</xsl:with-param>\n </xsl:call-template>\n \n \n : \n \n \n ,-./ :;\n \n \n \n \n \n ,-./ :;\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n", - "id": "800010", - "testData": "", - "testOutput": "", - "type": "XmlTransformStep" -} \ No newline at end of file diff --git a/test-resources/demo/north/post-demo-config-north.sh b/test-resources/demo/north/post-demo-config-north.sh deleted file mode 100755 index 2bb6ed2..0000000 --- a/test-resources/demo/north/post-demo-config-north.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" - -DATA=$1 - -if [ -z $token ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi - -curl -i -w '\n' --http1.1 -X POST "$protocol://$host/harvester-admin/storages" -d "@${SCRIPT_DIR}/storage-north-800001.json" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" - -curl -i -w '\n' --http1.1 -X POST "$protocol://$host/harvester-admin/steps" -d "@${SCRIPT_DIR}/oai-2-inst-north-800010.json" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" - -curl -i -w '\n' --http1.1 -X POST "$protocol://$host/harvester-admin/steps" -d "@${SCRIPT_DIR}/holdings-north-800011.json" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" - -curl -i -w '\n' --http1.1 -X POST "$protocol://$host/harvester-admin/steps" -d "@${SCRIPT_DIR}/locations-north-800012.json" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" - -curl -i -w '\n' --http1.1 -X POST "$protocol://$host/harvester-admin/steps" -d "@${SCRIPT_DIR}/xml-2-json-north-800013.json" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" - -curl -i -w '\n' --http1.1 -X POST "$protocol://$host/harvester-admin/transformations" -d "@${SCRIPT_DIR}/transformation-north-800015.json" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" - -curl -i -w '\n' --http1.1 -X POST "$protocol://$host/harvester-admin/harvestables" -d "@${SCRIPT_DIR}/harvestable-north-800020.json" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" - diff --git a/test-resources/demo/north/storage-north-800001.json b/test-resources/demo/north/storage-north-800001.json deleted file mode 100644 index 7a4f51e..0000000 --- a/test-resources/demo/north/storage-north-800001.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "bulkSize": "1000", - "currentStatus": "NEW", - "description": "North: Inventory storage at localhost", - "enabled": "true", - "id": "800001", - "acl": "north", - "json": { - "folioAuthPath": "bl-users/login", - "folioTenant": "north", - "folioUsername": "north_admin", - "folioPassword": "admin", - "inventoryUpsertPath": "shared-inventory-upsert-matchkey", - "marcStoragePath": "marc-records" - }, - "name": "NORTH: FOLIO @ localhost from Vagrant box", - "url": "http://10.0.2.2:9130/", - "type": "inventoryStorage" -} diff --git a/test-resources/demo/north/transformation-north-800015.json b/test-resources/demo/north/transformation-north-800015.json deleted file mode 100644 index 9a16efd..0000000 --- a/test-resources/demo/north/transformation-north-800015.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "acl": "north", - "description": "", - "enabled": "true", - "name": "North - OAI-PMH to FOLIO Inventory", - "parallel": "false", - "stepAssociations": [ - { - "stepId": "800010" - }, - { - "stepId": "800011" - }, - { - "stepId": "800012" - }, - { - "stepId": "800013" - } - ], - "id": "800015", - "type": "basicTransformation" -} diff --git a/test-resources/demo/north/xml-2-json-north-800013.json b/test-resources/demo/north/xml-2-json-north-800013.json deleted file mode 100644 index f6c7272..0000000 --- a/test-resources/demo/north/xml-2-json-north-800013.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "customClass": "com.indexdata.masterkey.localindices.harvest.messaging.InstanceXmlToInstanceJsonTransformerRouter", - "description": "North: FOLIO Instance XML to JSON", - "enabled": "true", - "inputFormat": "XML", - "name": "North: Instance XML to JSON", - "outputFormat": "JSON", - "script": "", - "id": "800013", - "testData": "", - "testOutput": "", - "type": "CustomTransformStep" -} diff --git a/test-resources/demo/start-job.sh b/test-resources/demo/start-job.sh deleted file mode 100755 index c75f64b..0000000 --- a/test-resources/demo/start-job.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -ID=$1 - -if [ -z "$token" ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi -if [ -z "$ID" ] - then - echo "Usage: . ./start-job.sh [harvestable id]" - return || exit -fi - -# shellcheck disable=SC2154 -curl -i -w '\n' --http1.1 -X PUT "$protocol://$host/harvester-admin/jobs/run/$ID" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" diff --git a/test-resources/demo/stop-job.sh b/test-resources/demo/stop-job.sh deleted file mode 100755 index 187e9db..0000000 --- a/test-resources/demo/stop-job.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -ID=$1 - -if [ -z "$token" ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi -if [ -z "$ID" ] - then - echo "Usage: . ./stop-job.sh [harvestable ID]" - return || exit -fi - -# shellcheck disable=SC2154 -curl -i -w '\n' --http1.1 -X PUT "$protocol://$host/harvester-admin/jobs/stop/$ID" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" diff --git a/test-resources/get-admin-record-by-id.sh b/test-resources/get-admin-record-by-id.sh deleted file mode 100755 index 0ac25ad..0000000 --- a/test-resources/get-admin-record-by-id.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" -ID=$1 -CURL_OPTIONS=$2 - -if [ -z $token ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi -OPTS=`cat $SCRIPT_DIR/apis/harvester-admin-endpoints`; - -# echo "Choose an endpoint..." -select EP in $OPTS -do - break -done - -if [ "$CURL_OPTIONS" = "" ]; then - curl -i -w '\n' --http1.1 "$protocol://$host/harvester-admin/${EP}/${ID}" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -else - curl -s --http1.1 "$protocol://$host/harvester-admin/${EP}/${ID}" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" $CURL_OPTIONS -fi diff --git a/test-resources/get-admin-records.sh b/test-resources/get-admin-records.sh deleted file mode 100755 index 77ea06d..0000000 --- a/test-resources/get-admin-records.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" -QUERY=$1 - -if [ -z $token ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi -OPTS=`cat $SCRIPT_DIR/apis/harvester-admin-endpoints`; - -# echo "Choose an endpoint..." -select EP in $OPTS -do - break -done - -if [ -z QUERY ] - then - curl -i -w '\n' --http1.1 "$protocol://$host/harvester-admin/${EP}?${QUERY}" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" - else - echo Using query ${QUERY} - curl -i -w '\n' --http1.1 "$protocol://$host/harvester-admin/${EP}?${QUERY}" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -fi diff --git a/test-resources/get-inventory-record-by-id.sh b/test-resources/get-inventory-record-by-id.sh deleted file mode 100755 index 1e37d13..0000000 --- a/test-resources/get-inventory-record-by-id.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" - -ID=$1 - -if [ -z $token ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi -OPTS=`cat $SCRIPT_DIR/apis/inventory-endpoints`; - -# echo "Choose an endpoint..." -select EP in $OPTS -do - break -done - -curl -i -w '\n' --http1.1 "$protocol://$host/${EP}/${ID}" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" diff --git a/test-resources/get-inventory-records.sh b/test-resources/get-inventory-records.sh deleted file mode 100755 index 70bac2b..0000000 --- a/test-resources/get-inventory-records.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" - -QUERY=$1 - -if [ -z $token ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi -OPTS=`cat $SCRIPT_DIR/apis/inventory-endpoints`; - -# echo "Choose an endpoint..." -select EP in $OPTS -do - break -done - -if [ -z QUERY ] - then - curl -i -w '\n' --http1.1 "$protocol://$host/${EP}?${QUERY}" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" - else - curl -i -w '\n' --http1.1 "$protocol://$host/${EP}" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -fi diff --git a/test-resources/get-script-by-step-id.sh b/test-resources/get-script-by-step-id.sh deleted file mode 100755 index 01f3428..0000000 --- a/test-resources/get-script-by-step-id.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -ID=$1 -CURL_OPTIONS=$2 - -if [ -z $token ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi - - -if [ "$CURL_OPTIONS" = "" ]; then - curl -i -w '\n' --http1.1 "$protocol://$host/harvester-admin/steps/${ID}/script" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -else - curl -s --http1.1 "$protocol://$host/harvester-admin/steps/${ID}/script" -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" $CURL_OPTIONS -fi diff --git a/test-resources/logins/diku@localhost:9130.sh b/test-resources/logins/diku@localhost:9130.sh deleted file mode 100755 index fea61c1..0000000 --- a/test-resources/logins/diku@localhost:9130.sh +++ /dev/null @@ -1,7 +0,0 @@ -# Source this file in scripts to run them against tenant diku on localhost (username diku_admin) -host=localhost:9130 -protocol=http -tenant=diku -username=diku_admin - -. ${BASH_SOURCE%/*}/login.sh diff --git a/test-resources/logins/login.sh b/test-resources/logins/login.sh deleted file mode 100755 index bedb819..0000000 --- a/test-resources/logins/login.sh +++ /dev/null @@ -1,26 +0,0 @@ -echo $banner -echo " for tenant: [$tenant]" -echo " at host: [$host]" -echo - -# Get password -echo -n "Are you sure? Enter password for [$username] to continue, otherwise hit Enter to cancel: " -read -s password -if [ -z $password ]; then - echo - echo - echo " Cancelled" - echo - return || exit 1 -fi - -token=$(curl -s -X POST -D - -H "Content-type: application/json" -H "X-Okapi-Tenant: $tenant" -d "{ \"username\": \"$username\", \"password\": \"$password\"}" "$protocol://$host/authn/login" | grep x-okapi-token | tr -d '\r' | cut -d " " -f2) - -if [ -z $token ]; then - echo - echo - echo " Exiting. Could not authenticate (get token) for user [$username]" - echo - return || exit 1 -fi -echo diff --git a/test-resources/logins/north@localhost:9130.sh b/test-resources/logins/north@localhost:9130.sh deleted file mode 100755 index 7e6188d..0000000 --- a/test-resources/logins/north@localhost:9130.sh +++ /dev/null @@ -1,7 +0,0 @@ -# Source this file in scripts to run them against tenant north on localhost (username north_admin) -host=localhost:9130 -protocol=http -tenant=north -username=north_admin - -. ${BASH_SOURCE%/*}/login.sh diff --git a/test-resources/post-admin-record.sh b/test-resources/post-admin-record.sh deleted file mode 100755 index 0b2b701..0000000 --- a/test-resources/post-admin-record.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash -DATA=$1 - -if [ -z $token ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi -if [ -z $DATA ] - then - echo "Usage: . ./post-admin-record.sh [json file name]" - return || exit -fi -OPTS=`cat apis/harvester-admin-endpoints`; - -# echo "Choose an endpoint..." -select EP in $OPTS -do - break -done - -curl -i -w '\n' --http1.1 -X POST "$protocol://$host/harvester-admin/${EP}" -d @${DATA} -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" diff --git a/test-resources/post-inventory-record.sh b/test-resources/post-inventory-record.sh deleted file mode 100755 index 6073046..0000000 --- a/test-resources/post-inventory-record.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash -DATA=$1 - -if [ -z $token ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi -if [ -z $DATA ] - then - echo "Usage: . ./put-inventory-record.sh [uuid] [json file name]" - return || exit -fi -OPTS=`cat apis/inventory-endpoints`; - -# echo "Choose an endpoint..." -select EP in $OPTS -do - break -done - -curl -i -w '\n' --http1.1 -X POST "$protocol://$host/${EP}" -d @${DATA} -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" diff --git a/test-resources/put-admin-record.sh b/test-resources/put-admin-record.sh deleted file mode 100755 index fa45c98..0000000 --- a/test-resources/put-admin-record.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -ID=$1 -DATA=$2 - -if [ -z $token ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi -if [ -z $DATA ] - then - echo "Usage: . ./put-admin-record.sh [id] [json file name]" - return || exit -fi -OPTS=`cat apis/harvester-admin-endpoints`; - -# echo "Choose an endpoint..." -select EP in $OPTS -do - break -done - -curl -i -w '\n' --http1.1 -X PUT "$protocol://$host/harvester-admin/${EP}/${ID}" -d @${DATA} -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" diff --git a/test-resources/put-inventory-record.sh b/test-resources/put-inventory-record.sh deleted file mode 100755 index 2776469..0000000 --- a/test-resources/put-inventory-record.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -ID=$1 -DATA=$2 - -if [ -z $token ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi -if [ -z $DATA ] - then - echo "Usage: . ./put-inventory-record.sh [uuid] [json file name]" - return || exit -fi -OPTS=`cat apis/inventory-endpoints`; - -# echo "Choose an endpoint..." -select EP in $OPTS -do - break -done - -curl -i -w '\n' --http1.1 -X PUT "$protocol://$host/${EP}/${ID}" -d @${DATA} -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/json" diff --git a/test-resources/put-script-to-step.sh b/test-resources/put-script-to-step.sh deleted file mode 100755 index ee34793..0000000 --- a/test-resources/put-script-to-step.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -ID=$1 -NAME=$2 -DATA=$3 - -if [ -z $token ] - then - echo "Can't find okapi token-- Make sure to run a login script." - return || exit -fi -if [ -z $DATA ] - then - echo "Usage: . ./put-admin-record.sh [id] [step name (possibly with truncation)] [xslt file name]" - return || exit -fi - - -curl -i -w '\n' --http1.1 -X PUT "$protocol://$host/harvester-admin/steps/${ID}/script?name=${NAME}" --data-binary @${DATA} -H "x-okapi-tenant: $tenant" -H "x-okapi-token: $token" -H "Content-Type: application/xml" diff --git a/test-resources/step-800010.xslt b/test-resources/step-800010.xslt deleted file mode 100644 index 896ccae..0000000 --- a/test-resources/step-800010.xslt +++ /dev/null @@ -1,465 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - MARC - - - - - - 6312d172-f0cf-40f6-b27d-9fa8feaf332f - - 497b5090-3da2-486c-b57f-de5bb3c2e26d - - 497b5090-3da2-486c-b57f-de5bb3c2e26d - - 526aa04d-9289-4511-8866-349299592c18 - - a2c91e87-6bab-44d6-8adb-1fd02481fc4f - - 535e3160-763a-42f9-b0c0-d8ed7df6e2a2 - - 9bce18bd-45bf-4949-8fa8-63163e4b7d7f - - 3be24c14-3551-4180-9292-26a786649c8b - - a2c91e87-6bab-44d6-8adb-1fd02481fc4f - - df5dddff-9c30-4507-8b82-119ff972d4d7 - - a2c91e87-6bab-44d6-8adb-1fd02481fc4f - - a2c91e87-6bab-44d6-8adb-1fd02481fc4f - - c1e95c2b-4efc-48cf-9e71-edb622cf0c22 - - 6312d172-f0cf-40f6-b27d-9fa8feaf332f - - a2c91e87-6bab-44d6-8adb-1fd02481fc4f - - - - - - - - - - - - - - - - - - - - - - - - - c858e4f2-2b6b-4385-842b-60732ee14abb - - - - - - - 8261054f-be78-422d-bd51-4ed9f33c3422 - - - - - - - 913300b2-03ed-469a-8179-c1092c991227 - - - - - - - 2e8b3b6c-0e7d-4e48-bca2-b0b23b376af5 - - - - - - - b5d8cdc4-9441-487c-90cf-0c7ec97728eb - - - - - - - 7e591197-f335-4afb-bc6d-a6d76ca3bace - - - - - - - 351ebc1c-3aae-4825-8765-c6d50dbf011f - - - - - - - - - - - - - - - - - - - - - ; - - - - - ce176ace-a53e-4b4d-aa89-725ed7b2edac - - - - - - - ; - - - - - 42471af9-7d25-4f3a-bf78-60d29dcf463b - - - - - - - 9075b5f8-7d97-49e1-a431-73fdd468d476 - - - - - - - - - - - - <xsl:variable name="dirty-title"> - <xsl:for-each - select="marc:datafield[@tag='245'][1]/marc:subfield[@code='a' or @code='b' or @code='h' or @code='n' or @code='p']"> - <xsl:value-of select="."/> - <xsl:if test="position() != last()"> - <xsl:text> </xsl:text> - </xsl:if> - </xsl:for-each> - </xsl:variable> - <xsl:call-template name="remove-characters-last"> - <xsl:with-param name="input" select="$dirty-title"/> - <xsl:with-param name="characters">,-./ :;</xsl:with-param> - </xsl:call-template> - - - - - - - - - - - - , - - - - ,-. - - - - - - 2b94c631-fca9-4892-a730-03ee529ffe2a - - - true - - - - 2e48e713-17f3-4c13-a9f8-23845bb210aa - - - - e8b311a6-3b21-43f2-a269-dd9310cb2d0a - - - - 2b94c631-fca9-4892-a730-03ee529ffe2a - - - - - - - - - - - - - - - - - - - - - - ; - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ,-./ :; - - - - - - - - - - - - - - - - -- - - - - ,-. - - - - - - - - - - - - - - - - - - - - 86b6e817-e1bc-42fb-bab0-70e7547de6c1 - - 5ba8e385-0e27-462e-a571-ffa1fa34ea54 - - 0e2e11b-450f-45c8-b09b-0f819999966e - - 6a2533a7-4de2-4e64-8466-074c2fa9308c - - - - - - - - - - - - - - - - <xsl:call-template name="remove-characters-last"> - <xsl:with-param name="input" select="marc:subfield[@code='a']"/> - <xsl:with-param name="characters">,-./ :;</xsl:with-param> - </xsl:call-template> - - - : - - - ,-./ :; - - - - - - ,-./ :; - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -