diff --git a/NEWS.md b/NEWS.md
index bccb73fbc..f2bed6a2e 100644
--- a/NEWS.md
+++ b/NEWS.md
@@ -1,10 +1,22 @@
-## 2023-xo-xo v5.7.0-SNAPSHOT
+## 2024-xx-xx 5.8.0-SNAPSHOT
+* [MODSOURCE-733](https://issues.folio.org/browse/MODSOURCE-733) Reduce Memory Allocation of Strings
+* [MODSOURCE-506](https://issues.folio.org/browse/MODSOURCE-506) Remove rawRecord field from source record
+* [MODSOURCE-709](https://issues.folio.org/browse/MODSOURCE-709) MARC authority record is not created when use Job profile with match profile by absent subfield/field
+* [MODSOURCE-677](https://issues.folio.org/browse/MODSOURCE-677) Import is completed with errors when control field that differs from 001 is used for marc-to-marc matching
+* [MODSOURCE-722](https://issues.folio.org/browse/MODSOURCE-722) deleteMarcIndexersOldVersions: relation "marc_records_tracking" does not exist
+* [MODSOURMAN-1106](https://issues.folio.org/browse/MODSOURMAN-1106) The status of Instance is '-' in the Import log after uploading file. The numbers of updated SRS and Instance are not displayed in the Summary table.
+* [MODSOURCE-717](https://issues.folio.org/browse/MODSOURCE-717) MARC modifications not processed when placed after Holdings Update action in a job profile
+
+## 2023-10-13 v5.7.0
* [MODSOURCE-648](https://issues.folio.org/browse/MODSOURCE-648) Upgrade mod-source-record-storage to Java 17
* [MODSOURCE-601](https://issues.folio.org/browse/MODSOURCE-601) Optimize Insert & Update of marc_records_lb table
* [MODSOURCE-635](https://issues.folio.org/browse/MODSOURCE-635) Delete marc_indexers records associated with "OLD" source records
* [MODSOURCE-636](https://issues.folio.org/browse/MODSOURCE-636) Implement async migration service
* [MODSOURCE-674](https://issues.folio.org/browse/MODSOURCE-674) Ensure only one background job can be triggered to clean up outdated marc indexers
* [MODSOURCE-530](https://issues.folio.org/browse/MODSOURCE-530) Fix duplicate records in incoming file causes problems after overlay process with no error reported
+* [MODSOURCE-690](https://issues.folio.org/browse/MODSOURCE-690) Make changes in SRS post processing handler to update MARC for shared Instance
+* [MODSOURCE-646](https://issues.folio.org/browse/MODSOURCE-646) Make changes to perform MARC To MARC Matching in Local Tenant & Central Tenant
+* [MODSOURCE-667](https://issues.folio.org/browse/MODSOURCE-667) Upgrade folio-kafka-wrapper to 3.0.0 version
### Asynchronous migration job API
| METHOD | URL | DESCRIPTION |
@@ -12,9 +24,6 @@
| POST | /source-storage/migrations/jobs | Initialize asynchronous migration job |
| GET | /source-storage/migrations/jobs/{jobId} | Get asynchronous migration job entity by its id |
-## 2023-03-xx v5.6.3-SNAPSHOT
-* [MODSOURCE-615](https://issues.folio.org/browse/MODSOURCE-615) Importing 10,000 MARC authority records > Completes with errors due to timeout - Indices added.
-
## 2023-03-18 v5.6.2
* [MODSOURCE-585](https://issues.folio.org/browse/MODSOURCE-585) Data import matching takes incorrect SRS records into consideration
* [MODDATAIMP-786](https://issues.folio.org/browse/MODDATAIMP-786) Update data-import-util library to v1.11.0
diff --git a/descriptors/ModuleDescriptor-template.json b/descriptors/ModuleDescriptor-template.json
index c32fff8e3..39370f10b 100644
--- a/descriptors/ModuleDescriptor-template.json
+++ b/descriptors/ModuleDescriptor-template.json
@@ -54,7 +54,7 @@
},
{
"id": "source-storage-records",
- "version": "3.1",
+ "version": "3.2",
"handlers": [
{
"methods": [
@@ -92,6 +92,15 @@
"source-storage.records.put"
]
},
+ {
+ "methods": [
+ "PUT"
+ ],
+ "pathPattern": "/source-storage/records/{id}/generation",
+ "permissionsRequired": [
+ "source-storage.records.put"
+ ]
+ },
{
"methods": [
"DELETE"
@@ -441,10 +450,6 @@
"name": "DB_MAXPOOLSIZE",
"value": "15"
},
- {
- "name": "test.mode",
- "value": "true"
- },
{
"name": "KAFKA_HOST",
"value": "10.0.2.15"
diff --git a/mod-source-record-storage-client/pom.xml b/mod-source-record-storage-client/pom.xml
index ee9ba74c0..03a38d25b 100644
--- a/mod-source-record-storage-client/pom.xml
+++ b/mod-source-record-storage-client/pom.xml
@@ -6,7 +6,7 @@
org.folio
mod-source-record-storage
- 5.7.0-SNAPSHOT
+ 5.8.0-SNAPSHOT
diff --git a/mod-source-record-storage-server/pom.xml b/mod-source-record-storage-server/pom.xml
index 2002530f2..676a1871e 100644
--- a/mod-source-record-storage-server/pom.xml
+++ b/mod-source-record-storage-server/pom.xml
@@ -5,7 +5,7 @@
org.folio
mod-source-record-storage
- 5.7.0-SNAPSHOT
+ 5.8.0-SNAPSHOT
@@ -134,7 +134,7 @@
org.folio
folio-liquibase-util
- 1.7.0-SNAPSHOT
+ 1.7.0
jar
@@ -179,7 +179,7 @@
org.folio
data-import-processing-core
- 4.1.0-SNAPSHOT
+ 4.2.0-SNAPSHOT
io.vertx
@@ -188,7 +188,7 @@
org.folio
folio-kafka-wrapper
- 3.0.0-SNAPSHOT
+ 3.1.0-SNAPSHOT
net.mguenther.kafka
@@ -210,6 +210,32 @@
+
+ org.apache.kafka
+ kafka-clients
+ ${kafkaclients.version}
+ test
+ test
+
+
+ org.apache.kafka
+ kafka_2.13
+ ${kafkaclients.version}
+ test
+
+
+ org.apache.kafka
+ kafka_2.13
+ ${kafkaclients.version}
+ test
+ test
+
+
+ org.apache.kafka
+ kafka-tools
+ ${kafkaclients.version}
+ test
+
org.springframework
spring-test
@@ -256,22 +282,10 @@
1.9.19
42.5.1
postgres:12-alpine
- 1.18.24
+ 1.18.30
/source-storage/stream/records,/source-storage/stream/source-records,/source-storage/stream/marc-record-identifiers
-
-
-
- org.apache.logging.log4j
- log4j-bom
- 2.17.2
- pom
- import
-
-
-
-
@@ -455,7 +469,7 @@
org.apache.maven
maven-core
- 3.3.9
+ 3.8.1
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/AuthorityDomainKafkaTopic.java b/mod-source-record-storage-server/src/main/java/org/folio/AuthorityDomainKafkaTopic.java
new file mode 100644
index 000000000..4616bdd34
--- /dev/null
+++ b/mod-source-record-storage-server/src/main/java/org/folio/AuthorityDomainKafkaTopic.java
@@ -0,0 +1,26 @@
+package org.folio;
+
+
+import org.folio.kafka.services.KafkaTopic;
+
+public enum AuthorityDomainKafkaTopic implements KafkaTopic {
+
+ AUTHORITY("authority");
+
+ private static final String AUTHORITIES_PREFIX = "authorities";
+ private final String topic;
+
+ AuthorityDomainKafkaTopic(String topic) {
+ this.topic = topic;
+ }
+
+ @Override
+ public String moduleName() {
+ return AUTHORITIES_PREFIX;
+ }
+
+ @Override
+ public String topicName() {
+ return topic;
+ }
+}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/config/ApplicationConfig.java b/mod-source-record-storage-server/src/main/java/org/folio/config/ApplicationConfig.java
index 80cb59c71..df1358eba 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/config/ApplicationConfig.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/config/ApplicationConfig.java
@@ -8,10 +8,8 @@
import org.springframework.context.annotation.Configuration;
import org.folio.kafka.KafkaConfig;
-import org.springframework.scheduling.annotation.EnableScheduling;
@Configuration
-@EnableScheduling
@ComponentScan(basePackages = {
"org.folio.client",
"org.folio.rest.impl",
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/consumers/AuthorityDomainKafkaHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/consumers/AuthorityDomainKafkaHandler.java
new file mode 100644
index 000000000..ff6575e05
--- /dev/null
+++ b/mod-source-record-storage-server/src/main/java/org/folio/consumers/AuthorityDomainKafkaHandler.java
@@ -0,0 +1,103 @@
+package org.folio.consumers;
+
+import static org.folio.dao.util.RecordDaoUtil.filterRecordByExternalId;
+
+import io.vertx.core.Future;
+import io.vertx.core.json.JsonObject;
+import io.vertx.kafka.client.consumer.KafkaConsumerRecord;
+import java.util.Collections;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.folio.dao.util.RecordType;
+import org.folio.kafka.AsyncRecordHandler;
+import org.folio.rest.jooq.enums.RecordState;
+import org.folio.services.RecordService;
+import org.folio.services.util.KafkaUtil;
+import org.springframework.stereotype.Component;
+
+@Component
+public class AuthorityDomainKafkaHandler implements AsyncRecordHandler {
+
+ private static final Logger log = LogManager.getLogger();
+
+ private static final String DOMAIN_EVENT_TYPE_HEADER = "domain-event-type";
+ private static final String DELETE_DOMAIN_EVENT_TYPE = "DELETE";
+ private static final String DELETE_EVENT_SUB_TYPE_FLD = "deleteEventSubType";
+ private static final String TENANT_FLD = "tenant";
+
+ private final RecordService recordService;
+
+ public AuthorityDomainKafkaHandler(RecordService recordService) {
+ this.recordService = recordService;
+ }
+
+ @Override
+ public Future handle(KafkaConsumerRecord consumerRecord) {
+ log.trace("handle:: Handling kafka record: '{}'", consumerRecord);
+ String authorityId = consumerRecord.key();
+ if (isUnexpectedDomainEvent(consumerRecord)) {
+ log.trace("handle:: Expected only {} domain type. Skipping authority domain kafka record [ID: '{}']",
+ DELETE_DOMAIN_EVENT_TYPE, authorityId);
+ return Future.succeededFuture(authorityId);
+ }
+
+ var eventPayload = new JsonObject(consumerRecord.value());
+ var tenantId = eventPayload.getString(TENANT_FLD);
+ var eventSubType = EventSubType.valueOf(eventPayload.getString(DELETE_EVENT_SUB_TYPE_FLD));
+
+ logInput(authorityId, eventSubType, tenantId);
+ return (switch (eventSubType) {
+ case SOFT_DELETE -> performSoftDelete(authorityId, tenantId);
+ case HARD_DELETE -> performHardDelete(authorityId, tenantId);
+ }).onFailure(throwable -> logError(authorityId, eventSubType, tenantId));
+ }
+
+ private Future performSoftDelete(String authorityId, String tenantId) {
+ var condition = filterRecordByExternalId(authorityId);
+ return recordService.getRecords(condition, RecordType.MARC_AUTHORITY, Collections.emptyList(), 0, 1, tenantId)
+ .compose(recordCollection -> {
+ if (recordCollection.getRecords().isEmpty()) {
+ log.debug("handle:: No records found [externalId: '{}', tenantId: '{}']", authorityId, tenantId);
+ return Future.succeededFuture();
+ }
+ var matchedId = recordCollection.getRecords().get(0).getMatchedId();
+
+ return recordService.updateRecordsState(matchedId, RecordState.DELETED, RecordType.MARC_AUTHORITY, tenantId);
+ }).map(authorityId);
+ }
+
+ private Future performHardDelete(String authorityId, String tenantId) {
+ return recordService.deleteRecordsByExternalId(authorityId, tenantId).map(authorityId);
+ }
+
+ private void logError(String authorityId, EventSubType subType, String tenantId) {
+ log.error("handle:: Failed to {} records [externalId: '{}', tenantId: '{}']", subType.getValueReadable(),
+ authorityId, tenantId);
+ }
+
+ private void logInput(String authorityId, EventSubType subType, String tenantId) {
+ log.info("handle:: Trying to {} records [externalId: '{}', tenantId '{}']",
+ subType.getValueReadable(), authorityId, tenantId);
+ }
+
+ private boolean isUnexpectedDomainEvent(KafkaConsumerRecord consumerRecord) {
+ return !KafkaUtil.headerExists(DOMAIN_EVENT_TYPE_HEADER, DELETE_DOMAIN_EVENT_TYPE, consumerRecord.headers());
+ }
+
+ public enum EventSubType {
+
+ SOFT_DELETE("soft-delete"),
+ HARD_DELETE("hard-delete");
+
+ private final String valueReadable;
+
+ EventSubType(String valueReadable) {
+ this.valueReadable = valueReadable;
+ }
+
+ public String getValueReadable() {
+ return valueReadable;
+ }
+ }
+
+}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/consumers/DataImportKafkaHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/consumers/DataImportKafkaHandler.java
index 57f2dd2f0..2336f5148 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/consumers/DataImportKafkaHandler.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/consumers/DataImportKafkaHandler.java
@@ -4,13 +4,13 @@
import io.vertx.core.Promise;
import io.vertx.core.Vertx;
import io.vertx.core.json.Json;
+import io.vertx.core.json.jackson.DatabindCodec;
import io.vertx.kafka.client.consumer.KafkaConsumerRecord;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.folio.DataImportEventPayload;
import org.folio.dataimport.util.OkapiConnectionParams;
-import org.folio.dbschema.ObjectMapperTool;
import org.folio.kafka.AsyncRecordHandler;
import org.folio.processing.events.EventManager;
import org.folio.processing.exceptions.EventProcessingException;
@@ -29,7 +29,7 @@
@Component
@Qualifier("DataImportKafkaHandler")
-public class DataImportKafkaHandler implements AsyncRecordHandler {
+public class DataImportKafkaHandler implements AsyncRecordHandler {
private static final Logger LOGGER = LogManager.getLogger();
@@ -48,14 +48,14 @@ public DataImportKafkaHandler(Vertx vertx, JobProfileSnapshotCache profileSnapsh
}
@Override
- public Future handle(KafkaConsumerRecord targetRecord) {
+ public Future handle(KafkaConsumerRecord targetRecord) {
LOGGER.trace("handle:: Handling kafka record: {}", targetRecord);
String recordId = extractHeaderValue(RECORD_ID_HEADER, targetRecord.headers());
String chunkId = extractHeaderValue(CHUNK_ID_HEADER, targetRecord.headers());
String userId = extractHeaderValue(USER_ID_HEADER, targetRecord.headers());
try {
Promise promise = Promise.promise();
- Event event = ObjectMapperTool.getMapper().readValue(targetRecord.value(), Event.class);
+ Event event = DatabindCodec.mapper().readValue(targetRecord.value(), Event.class);
DataImportEventPayload eventPayload = Json.decodeValue(event.getEventPayload(), DataImportEventPayload.class);
LOGGER.debug("handle:: Data import event payload has been received with event type: '{}' by jobExecutionId: '{}' and recordId: '{}' and chunkId: '{}' and userId: '{}'",
eventPayload.getEventType(), eventPayload.getJobExecutionId(), recordId, chunkId, userId);
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/consumers/ParsedRecordChunksKafkaHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/consumers/ParsedRecordChunksKafkaHandler.java
index 8ed1d456f..e9cdbe4e7 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/consumers/ParsedRecordChunksKafkaHandler.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/consumers/ParsedRecordChunksKafkaHandler.java
@@ -4,28 +4,26 @@
import io.vertx.core.Promise;
import io.vertx.core.Vertx;
import io.vertx.core.json.Json;
+import io.vertx.core.json.jackson.DatabindCodec;
import io.vertx.kafka.client.consumer.KafkaConsumerRecord;
import io.vertx.kafka.client.producer.KafkaHeader;
import io.vertx.kafka.client.producer.KafkaProducer;
-import io.vertx.kafka.client.producer.KafkaProducerRecord;
-
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
-
-import org.folio.DataImportEventPayload;
import org.folio.dao.util.ParsedRecordDaoUtil;
import org.folio.dataimport.util.OkapiConnectionParams;
import org.folio.kafka.AsyncRecordHandler;
import org.folio.kafka.KafkaConfig;
import org.folio.kafka.KafkaHeaderUtils;
import org.folio.kafka.KafkaTopicNameHelper;
+import org.folio.kafka.SimpleKafkaProducerManager;
+import org.folio.kafka.services.KafkaProducerRecordBuilder;
import org.folio.rest.jaxrs.model.Event;
import org.folio.rest.jaxrs.model.EventMetadata;
import org.folio.rest.jaxrs.model.Metadata;
import org.folio.rest.jaxrs.model.RecordCollection;
import org.folio.rest.jaxrs.model.RecordsBatchResponse;
import org.folio.services.RecordService;
-
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
@@ -40,11 +38,10 @@
import static org.folio.services.util.KafkaUtil.extractHeaderValue;
@Component
-public class ParsedRecordChunksKafkaHandler implements AsyncRecordHandler {
+public class ParsedRecordChunksKafkaHandler implements AsyncRecordHandler {
private static final Logger LOGGER = LogManager.getLogger();
public static final String JOB_EXECUTION_ID_HEADER = "jobExecutionId";
- private static final String RECORD_ID_HEADER = "recordId";
private static final String CHUNK_ID_HEADER = "chunkId";
private static final String USER_ID_HEADER = "userId";
private static final AtomicInteger chunkCounter = new AtomicInteger();
@@ -53,6 +50,7 @@ public class ParsedRecordChunksKafkaHandler implements AsyncRecordHandler handle(KafkaConsumerRecord targetRecord) {
+ public Future handle(KafkaConsumerRecord targetRecord) {
LOGGER.trace("handle:: Handling kafka record: {}", targetRecord);
- Event event = Json.decodeValue(targetRecord.value(), Event.class);
- RecordCollection recordCollection = Json.decodeValue(event.getEventPayload(), RecordCollection.class);
-
- List kafkaHeaders = targetRecord.headers();
-
- OkapiConnectionParams okapiConnectionParams = new OkapiConnectionParams(KafkaHeaderUtils.kafkaHeadersToMap(kafkaHeaders), vertx);
- String tenantId = okapiConnectionParams.getTenantId();
String jobExecutionId = extractHeaderValue(JOB_EXECUTION_ID_HEADER, targetRecord.headers());
- String recordId = extractHeaderValue(RECORD_ID_HEADER, targetRecord.headers());
String chunkId = extractHeaderValue(CHUNK_ID_HEADER, targetRecord.headers());
String userId = extractHeaderValue(USER_ID_HEADER, targetRecord.headers());
- String key = targetRecord.key();
-
int chunkNumber = chunkCounter.incrementAndGet();
- DataImportEventPayload eventPayload = Json.decodeValue(event.getEventPayload(), DataImportEventPayload.class);
+ String key = targetRecord.key();
try {
- LOGGER.debug("handle:: RecordCollection has been received with event: '{}', jobExecutionId '{}', chunkId: '{}', starting processing... chunkNumber '{}'-'{}' with recordId: '{}'' ",
- eventPayload.getEventType(), jobExecutionId, chunkId, chunkNumber, key, recordId);
+ Event event = DatabindCodec.mapper().readValue(targetRecord.value(), Event.class);
+ RecordCollection recordCollection = Json.decodeValue(event.getEventPayload(), RecordCollection.class);
+
+ List kafkaHeaders = targetRecord.headers();
+ OkapiConnectionParams okapiConnectionParams = new OkapiConnectionParams(KafkaHeaderUtils.kafkaHeadersToMap(kafkaHeaders), vertx);
+ String tenantId = okapiConnectionParams.getTenantId();
+
+ LOGGER.debug("handle:: RecordCollection has been received with event: '{}', jobExecutionId '{}', chunkId: '{}', starting processing... chunkNumber '{}'-'{}'",
+ event.getEventType(), jobExecutionId, chunkId, chunkNumber, key);
setUserMetadata(recordCollection, userId);
return recordService.saveRecords(recordCollection, tenantId)
- .compose(recordsBatchResponse -> sendBackRecordsBatchResponse(recordsBatchResponse, kafkaHeaders, tenantId, chunkNumber, eventPayload.getEventType(), targetRecord));
+ .compose(recordsBatchResponse -> sendBackRecordsBatchResponse(recordsBatchResponse, kafkaHeaders, tenantId, chunkNumber, event.getEventType(), targetRecord));
} catch (Exception e) {
- LOGGER.warn("handle:: RecordCollection processing has failed with errors with event: '{}', jobExecutionId '{}', chunkId: '{}', chunkNumber '{}'-'{}' with recordId: '{}' ",
- eventPayload.getEventType(), jobExecutionId, chunkId, chunkNumber, key, recordId);
+ LOGGER.warn("handle:: RecordCollection processing has failed with errors jobExecutionId '{}', chunkId: '{}', chunkNumber '{}'-'{}'",
+ jobExecutionId, chunkId, chunkNumber, key);
return Future.failedFuture(e);
}
}
- private Future sendBackRecordsBatchResponse(RecordsBatchResponse recordsBatchResponse, List kafkaHeaders, String tenantId, int chunkNumber, String eventType, KafkaConsumerRecord commonRecord) {
+ private Future sendBackRecordsBatchResponse(RecordsBatchResponse recordsBatchResponse, List kafkaHeaders, String tenantId, int chunkNumber, String eventType, KafkaConsumerRecord commonRecord) {
Event event;
event = new Event()
.withId(UUID.randomUUID().toString())
@@ -114,31 +110,34 @@ private Future sendBackRecordsBatchResponse(RecordsBatchResponse records
String topicName = KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), KafkaTopicNameHelper.getDefaultNameSpace(),
tenantId, DI_PARSED_RECORDS_CHUNK_SAVED.value());
- KafkaProducerRecord targetRecord =
- KafkaProducerRecord.create(topicName, key, Json.encode(event));
+ var targetRecord =
+ new KafkaProducerRecordBuilder(tenantId)
+ .key(key)
+ .value(event)
+ .topic(topicName)
+ .build();
targetRecord.addHeaders(kafkaHeaders);
Promise writePromise = Promise.promise();
String producerName = DI_PARSED_RECORDS_CHUNK_SAVED + "_Producer";
- KafkaProducer producer =
- KafkaProducer.createShared(Vertx.currentContext().owner(), producerName, kafkaConfig.getProducerProps());
+ KafkaProducer producer = producerManager.createShared(DI_PARSED_RECORDS_CHUNK_SAVED.value());
- producer.write(targetRecord, war -> {
- producer.end(ear -> producer.close());
- if (war.succeeded()) {
- String recordId = extractHeaderValue(RECORD_ID_HEADER, commonRecord.headers());
+ producer.send(targetRecord)
+ .mapEmpty()
+ .eventually(x -> producer.close())
+ .onSuccess(res -> {
String chunkId = extractHeaderValue(CHUNK_ID_HEADER, commonRecord.headers());
- LOGGER.debug("sendBackRecordsBatchResponse:: RecordCollection processing has been completed with response sent... event: '{}', chunkId: '{}', chunkNumber '{}'-'{}' with recordId: '{}'",
- eventType, chunkId, chunkNumber, targetRecord.key(), recordId);
+ LOGGER.debug("sendBackRecordsBatchResponse:: RecordCollection processing has been completed with response sent... event: '{}', chunkId: '{}', chunkNumber '{}'-'{}'",
+ eventType, chunkId, chunkNumber, targetRecord.key());
writePromise.complete(targetRecord.key());
- } else {
- Throwable cause = war.cause();
+ })
+ .onFailure(err -> {
+ Throwable cause = err.getCause();
LOGGER.warn("sendBackRecordsBatchResponse:: {} write error {}", producerName, cause);
writePromise.fail(cause);
- }
- });
+ });
return writePromise.future();
}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java
index 1ac450dca..0e423b6f4 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java
@@ -63,36 +63,27 @@ public QuickMarcKafkaHandler(Vertx vertx, RecordService recordService, KafkaConf
}
@Override
- public Future handle(KafkaConsumerRecord record) {
- log.trace("handle:: Handling kafka record {}", record);
- var event = Json.decodeValue(record.value(), Event.class);
+ public Future handle(KafkaConsumerRecord consumerRecord) {
+ log.trace("handle:: Handling kafka consumerRecord {}", consumerRecord);
- var kafkaHeaders = record.headers();
+ var kafkaHeaders = consumerRecord.headers();
var params = new OkapiConnectionParams(kafkaHeadersToMap(kafkaHeaders), vertx);
- return getEventPayload(event)
+ return getEventPayload(consumerRecord)
.compose(eventPayload -> {
String snapshotId = eventPayload.getOrDefault(SNAPSHOT_ID_KEY, UUID.randomUUID().toString());
return getRecordDto(eventPayload)
- .compose(recordDto -> {
-
- log.info("handle:: recordDto: {}", recordDto);
-
- return recordService.updateSourceRecord(recordDto, snapshotId, params.getTenantId())
- .compose(updatedRecord -> {
-
- log.info("handle:: updatedRecord: {}", updatedRecord);
-
- eventPayload.put(updatedRecord.getRecordType().value(), Json.encode(updatedRecord));
- return sendEvent(eventPayload, QM_SRS_MARC_RECORD_UPDATED, params.getTenantId(), kafkaHeaders)
- .map(aBoolean -> record.key());
- })
- .recover(th -> {
- log.warn("handle:: Failed to handle QM_RECORD_UPDATED event", th);
- eventPayload.put(ERROR_KEY, th.getMessage());
- return sendEvent(eventPayload, QM_ERROR, params.getTenantId(), kafkaHeaders)
- .map(aBoolean -> th.getMessage());
- });
+ .compose(recordDto -> recordService.updateSourceRecord(recordDto, snapshotId, params.getTenantId()))
+ .compose(updatedRecord -> {
+ eventPayload.put(updatedRecord.getRecordType().value(), Json.encode(updatedRecord));
+ return sendEvent(eventPayload, QM_SRS_MARC_RECORD_UPDATED, params.getTenantId(), kafkaHeaders)
+ .map(aBoolean -> consumerRecord.key());
+ })
+ .recover(th -> {
+ log.warn("handle:: Failed to handle QM_RECORD_UPDATED event", th);
+ eventPayload.put(ERROR_KEY, th.getMessage());
+ return sendEvent(eventPayload, QM_ERROR, params.getTenantId(), kafkaHeaders)
+ .map(aBoolean -> th.getMessage());
});
})
.recover(th -> {
@@ -132,8 +123,9 @@ var record = createProducerRecord(eventPayload, eventType.name(), key, tenantId,
}
@SuppressWarnings("unchecked")
- private Future> getEventPayload(Event event) {
+ private Future> getEventPayload(KafkaConsumerRecord consumerRecord) {
try {
+ var event = Json.decodeValue(consumerRecord.value(), Event.class);
var eventPayload = Json.decodeValue(event.getEventPayload(), HashMap.class);
return Future.succeededFuture(eventPayload);
} catch (Exception e) {
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDao.java b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDao.java
index 9d499db6f..c3b9e2192 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDao.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDao.java
@@ -302,6 +302,15 @@ public interface RecordDao {
*/
Future deleteRecordsBySnapshotId(String snapshotId, String tenantId);
+ /**
+ * Deletes in transaction all records associated with externalId
+ *
+ * @param externalId external id
+ * @param tenantId tenant id
+ * @return future with true if succeeded
+ */
+ Future deleteRecordsByExternalId(String externalId, String tenantId);
+
/**
* Performs purge the 'DELETED' records.
* Purges a given limited number of 'DELETED' records updated more the than given number of days back.
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java
index 299115117..bea29fc02 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java
@@ -1,5 +1,38 @@
package org.folio.dao;
+import static java.lang.String.format;
+import static java.util.Collections.emptyList;
+import static org.folio.dao.util.AdvisoryLockUtil.acquireLock;
+import static org.folio.dao.util.ErrorRecordDaoUtil.ERROR_RECORD_CONTENT;
+import static org.folio.dao.util.ParsedRecordDaoUtil.PARSED_RECORD_CONTENT;
+import static org.folio.dao.util.RawRecordDaoUtil.RAW_RECORD_CONTENT;
+import static org.folio.dao.util.RecordDaoUtil.RECORD_NOT_FOUND_TEMPLATE;
+import static org.folio.dao.util.RecordDaoUtil.ensureRecordForeignKeys;
+import static org.folio.dao.util.RecordDaoUtil.filterRecordByExternalIdNonNull;
+import static org.folio.dao.util.RecordDaoUtil.filterRecordByState;
+import static org.folio.dao.util.RecordDaoUtil.filterRecordByType;
+import static org.folio.dao.util.RecordDaoUtil.getExternalHrid;
+import static org.folio.dao.util.RecordDaoUtil.getExternalId;
+import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_FOUND_TEMPLATE;
+import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_STARTED_MESSAGE_TEMPLATE;
+import static org.folio.rest.jooq.Tables.ERROR_RECORDS_LB;
+import static org.folio.rest.jooq.Tables.MARC_RECORDS_LB;
+import static org.folio.rest.jooq.Tables.MARC_RECORDS_TRACKING;
+import static org.folio.rest.jooq.Tables.RAW_RECORDS_LB;
+import static org.folio.rest.jooq.Tables.RECORDS_LB;
+import static org.folio.rest.jooq.Tables.SNAPSHOTS_LB;
+import static org.folio.rest.jooq.enums.RecordType.MARC_BIB;
+import static org.folio.rest.util.QueryParamUtil.toRecordType;
+import static org.jooq.impl.DSL.condition;
+import static org.jooq.impl.DSL.countDistinct;
+import static org.jooq.impl.DSL.field;
+import static org.jooq.impl.DSL.max;
+import static org.jooq.impl.DSL.name;
+import static org.jooq.impl.DSL.primaryKey;
+import static org.jooq.impl.DSL.select;
+import static org.jooq.impl.DSL.table;
+import static org.jooq.impl.DSL.trueCondition;
+
import com.google.common.collect.Lists;
import io.github.jklingsporn.vertx.jooq.classic.reactivepg.ReactiveClassicGenericQueryExecutor;
import io.github.jklingsporn.vertx.jooq.shared.internal.QueryResult;
@@ -11,6 +44,24 @@
import io.vertx.reactivex.pgclient.PgPool;
import io.vertx.reactivex.sqlclient.SqlConnection;
import io.vertx.sqlclient.Row;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.Set;
+import java.util.UUID;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import javax.ws.rs.BadRequestException;
+import javax.ws.rs.NotFoundException;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.text.StrSubstitutor;
import org.apache.commons.lang3.StringUtils;
@@ -27,6 +78,7 @@
import org.folio.kafka.exception.DuplicateEventException;
import org.folio.okapi.common.GenericCompositeFuture;
import org.folio.processing.value.ListValue;
+import org.folio.processing.value.MissingValue;
import org.folio.processing.value.Value;
import org.folio.rest.jaxrs.model.AdditionalInfo;
import org.folio.rest.jaxrs.model.ErrorRecord;
@@ -76,57 +128,6 @@
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.NotFoundException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.time.OffsetDateTime;
-import java.time.ZoneOffset;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Optional;
-import java.util.Set;
-import java.util.UUID;
-import java.util.function.Function;
-import java.util.stream.Collectors;
-
-import static java.lang.String.format;
-import static org.folio.dao.util.AdvisoryLockUtil.acquireLock;
-import static org.folio.dao.util.ErrorRecordDaoUtil.ERROR_RECORD_CONTENT;
-import static org.folio.dao.util.ParsedRecordDaoUtil.PARSED_RECORD_CONTENT;
-import static org.folio.dao.util.RawRecordDaoUtil.RAW_RECORD_CONTENT;
-import static org.folio.dao.util.RecordDaoUtil.RECORD_NOT_FOUND_TEMPLATE;
-import static org.folio.dao.util.RecordDaoUtil.ensureRecordForeignKeys;
-import static org.folio.dao.util.RecordDaoUtil.filterRecordByExternalIdNonNull;
-import static org.folio.dao.util.RecordDaoUtil.filterRecordByState;
-import static org.folio.dao.util.RecordDaoUtil.filterRecordByType;
-import static org.folio.dao.util.RecordDaoUtil.getExternalHrid;
-import static org.folio.dao.util.RecordDaoUtil.getExternalId;
-import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_FOUND_TEMPLATE;
-import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_STARTED_MESSAGE_TEMPLATE;
-import static org.folio.rest.jooq.Tables.ERROR_RECORDS_LB;
-import static org.folio.rest.jooq.Tables.MARC_RECORDS_LB;
-import static org.folio.rest.jooq.Tables.MARC_RECORDS_TRACKING;
-import static org.folio.rest.jooq.Tables.RAW_RECORDS_LB;
-import static org.folio.rest.jooq.Tables.RECORDS_LB;
-import static org.folio.rest.jooq.Tables.SNAPSHOTS_LB;
-import static org.folio.rest.jooq.enums.RecordType.MARC_BIB;
-import static org.folio.rest.util.QueryParamUtil.toRecordType;
-import static org.jooq.impl.DSL.condition;
-import static org.jooq.impl.DSL.countDistinct;
-import static org.jooq.impl.DSL.field;
-import static org.jooq.impl.DSL.max;
-import static org.jooq.impl.DSL.name;
-import static org.jooq.impl.DSL.primaryKey;
-import static org.jooq.impl.DSL.select;
-import static org.jooq.impl.DSL.table;
-import static org.jooq.impl.DSL.trueCondition;
-
@Component
public class RecordDaoImpl implements RecordDao {
@@ -148,7 +149,7 @@ public class RecordDaoImpl implements RecordDao {
private static final int RECORDS_LIMIT = Integer.parseInt(System.getProperty("RECORDS_READING_LIMIT", "999"));
static final int INDEXERS_DELETION_LOCK_NAMESPACE_ID = "delete_marc_indexers".hashCode();
- public static final String CONTROL_FIELD_CONDITION_TEMPLATE = "\"{partition}\".\"value\" = '{value}'";
+ public static final String CONTROL_FIELD_CONDITION_TEMPLATE = "\"{partition}\".\"value\" in ({value})";
public static final String DATA_FIELD_CONDITION_TEMPLATE = "\"{partition}\".\"value\" in ({value}) and \"{partition}\".\"ind1\" LIKE '{ind1}' and \"{partition}\".\"ind2\" LIKE '{ind2}' and \"{partition}\".\"subfield_no\" = '{subfield}'";
private static final String VALUE_IN_SINGLE_QUOTES = "'%s'";
private static final String RECORD_NOT_FOUND_BY_ID_TYPE = "Record with %s id: %s was not found";
@@ -263,6 +264,9 @@ public Future getStrippedParsedRecords(List> getMatchedRecords(MatchField matchedField, TypeConnection typeConnection, boolean externalIdRequired, int offset, int limit, String tenantId) {
Name prt = name(typeConnection.getDbType().getTableName());
Table marcIndexersPartitionTable = table(name(MARC_INDEXERS_PARTITION_PREFIX + matchedField.getTag()));
+ if (matchedField.getValue() instanceof MissingValue)
+ return Future.succeededFuture(emptyList());
+
return getQueryExecutor(tenantId).transaction(txQE -> txQE.query(dsl ->
{
SelectOnConditionStep query = dsl
@@ -324,16 +328,13 @@ public Future> getMatchedRecordsWithoutIndexersVersionUsage(MatchFi
}
private Condition getMatchedFieldCondition(MatchField matchedField, String partition) {
+ Map params = new HashMap<>();
+ params.put("partition", partition);
+ params.put("value", getValueInSqlFormat(matchedField.getValue()));
if (matchedField.isControlField()) {
- Map params = new HashMap<>();
- params.put("partition", partition);
- params.put("value", getValueInSqlFormat(matchedField.getValue()));
String sql = StrSubstitutor.replace(CONTROL_FIELD_CONDITION_TEMPLATE, params, "{", "}");
return condition(sql);
} else {
- Map params = new HashMap<>();
- params.put("partition", partition);
- params.put("value", getValueInSqlFormat(matchedField.getValue()));
params.put("ind1", getSqlInd(matchedField.getInd1()));
params.put("ind2", getSqlInd(matchedField.getInd2()));
params.put("subfield", matchedField.getSubfield());
@@ -1105,6 +1106,21 @@ public Future deleteRecordsBySnapshotId(String snapshotId, String tenan
return SnapshotDaoUtil.delete(getQueryExecutor(tenantId), snapshotId);
}
+ @Override
+ public Future deleteRecordsByExternalId(String externalId, String tenantId) {
+ LOG.trace("deleteRecordsByExternalId:: Deleting records by externalId {} for tenant {}", externalId, tenantId);
+ var externalUuid = UUID.fromString(externalId);
+ return getQueryExecutor(tenantId).transaction(txQE -> txQE
+ .execute(dsl -> dsl.deleteFrom(MARC_RECORDS_LB)
+ .using(RECORDS_LB)
+ .where(MARC_RECORDS_LB.ID.eq(RECORDS_LB.ID))
+ .and(RECORDS_LB.EXTERNAL_ID.eq(externalUuid)))
+ .compose(u ->
+ txQE.execute(dsl -> dsl.deleteFrom(RECORDS_LB)
+ .where(RECORDS_LB.EXTERNAL_ID.eq(externalUuid)))
+ )).map(u -> true);
+ }
+
@Override
public Future deleteRecords(int lastUpdatedDays, int limit, String tenantId) {
LOG.trace("deleteRecords:: Deleting record by last {} days for tenant {}", lastUpdatedDays, tenantId);
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/dao/util/RecordDaoUtil.java b/mod-source-record-storage-server/src/main/java/org/folio/dao/util/RecordDaoUtil.java
index 59c218fc5..5954176d3 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/dao/util/RecordDaoUtil.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/dao/util/RecordDaoUtil.java
@@ -264,7 +264,6 @@ public static SourceRecord toSourceRecord(Record record) {
}
return sourceRecord
.withOrder(record.getOrder())
- .withRawRecord(record.getRawRecord())
.withParsedRecord(record.getParsedRecord())
.withAdditionalInfo(record.getAdditionalInfo())
.withExternalIdsHolder(record.getExternalIdsHolder())
@@ -490,11 +489,21 @@ public static Condition filterRecordBySnapshotId(String snapshotId) {
*/
public static Condition filterRecordByType(String type) {
if (StringUtils.isNotEmpty(type)) {
- return RECORDS_LB.RECORD_TYPE.eq(toRecordType(type));
+ return filterRecordByType(toRecordType(type));
}
return DSL.noCondition();
}
+ /**
+ * Get {@link Condition} to filter by type
+ *
+ * @param recordType type to equal
+ * @return condition
+ */
+ public static Condition filterRecordByType(RecordType recordType) {
+ return RECORDS_LB.RECORD_TYPE.eq(recordType);
+ }
+
/**
* Get {@link Condition} to filter by state
*
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/errorhandlers/ParsedRecordChunksErrorHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/errorhandlers/ParsedRecordChunksErrorHandler.java
index 93a03e0ad..adf1c9d47 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/errorhandlers/ParsedRecordChunksErrorHandler.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/errorhandlers/ParsedRecordChunksErrorHandler.java
@@ -3,6 +3,7 @@
import io.vertx.core.Future;
import io.vertx.core.Vertx;
import io.vertx.core.json.Json;
+import io.vertx.core.json.jackson.DatabindCodec;
import io.vertx.kafka.client.consumer.KafkaConsumerRecord;
import io.vertx.kafka.client.producer.KafkaHeader;
import io.vertx.kafka.client.producer.impl.KafkaHeaderImpl;
@@ -23,6 +24,7 @@
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -38,7 +40,7 @@
* with status 'Completed with errors' with showing error messge instead of hanging progress bar.
*/
@Component
-public class ParsedRecordChunksErrorHandler implements ProcessRecordErrorHandler {
+public class ParsedRecordChunksErrorHandler implements ProcessRecordErrorHandler {
private static final Logger LOGGER = LogManager.getLogger();
@@ -53,12 +55,18 @@ public class ParsedRecordChunksErrorHandler implements ProcessRecordErrorHandler
private Vertx vertx;
@Override
- public void handle(Throwable throwable, KafkaConsumerRecord record) {
- LOGGER.trace("handle:: Handling record {}", record);
- Event event = Json.decodeValue(record.value(), Event.class);
- RecordCollection recordCollection = Json.decodeValue(event.getEventPayload(), RecordCollection.class);
+ public void handle(Throwable throwable, KafkaConsumerRecord consumerRecord) {
+ LOGGER.trace("handle:: Handling record {}", consumerRecord);
+ Event event;
+ try {
+ event = DatabindCodec.mapper().readValue(consumerRecord.value(), Event.class);
+ } catch (IOException e) {
+ LOGGER.error("Something happened when deserializing record", e);
+ return;
+ }
+ RecordCollection recordCollection = Json.decodeValue(event.getEventPayload(), RecordCollection.class);
- List kafkaHeaders = record.headers();
+ List kafkaHeaders = consumerRecord.headers();
OkapiConnectionParams okapiConnectionParams = new OkapiConnectionParams(KafkaHeaderUtils.kafkaHeadersToMap(kafkaHeaders), vertx);
String jobExecutionId = okapiConnectionParams.getHeaders().get(JOB_EXECUTION_ID_HEADER);
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/InitAPIImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/InitAPIImpl.java
index 3607fd906..6c4205e9a 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/InitAPIImpl.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/InitAPIImpl.java
@@ -8,25 +8,20 @@
import io.vertx.core.Promise;
import io.vertx.core.Vertx;
import io.vertx.core.spi.VerticleFactory;
+import java.util.List;
+import java.util.OptionalInt;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.folio.config.ApplicationConfig;
+import org.folio.kafka.KafkaConfig;
import org.folio.okapi.common.GenericCompositeFuture;
import org.folio.processing.events.EventManager;
+import org.folio.processing.events.services.handler.EventHandler;
import org.folio.rest.resource.interfaces.InitAPI;
-import org.folio.services.handlers.AuthorityPostProcessingEventHandler;
-import org.folio.services.handlers.HoldingsPostProcessingEventHandler;
-import org.folio.services.handlers.InstancePostProcessingEventHandler;
-import org.folio.services.handlers.actions.MarcAuthorityDeleteEventHandler;
-import org.folio.services.handlers.actions.MarcAuthorityUpdateModifyEventHandler;
-import org.folio.services.handlers.actions.MarcBibUpdateModifyEventHandler;
-import org.folio.services.handlers.actions.MarcHoldingsUpdateModifyEventHandler;
-import org.folio.services.handlers.match.MarcAuthorityMatchEventHandler;
-import org.folio.services.handlers.match.MarcBibliographicMatchEventHandler;
-import org.folio.services.handlers.match.MarcHoldingsMatchEventHandler;
import org.folio.spring.SpringContextUtil;
import org.folio.verticle.MarcIndexersVersionDeletionVerticle;
import org.folio.verticle.SpringVerticleFactory;
+import org.folio.verticle.consumers.AuthorityDomainConsumersVerticle;
import org.folio.verticle.consumers.AuthorityLinkChunkConsumersVerticle;
import org.folio.verticle.consumers.DataImportConsumersVerticle;
import org.folio.verticle.consumers.ParsedRecordChunkConsumersVerticle;
@@ -35,42 +30,16 @@
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.support.AbstractApplicationContext;
-import java.util.List;
-
public class InitAPIImpl implements InitAPI {
private static final String SPRING_CONTEXT = "springContext";
private static final Logger LOGGER = LogManager.getLogger();
@Autowired
- private InstancePostProcessingEventHandler instancePostProcessingEventHandler;
-
- @Autowired
- private HoldingsPostProcessingEventHandler holdingsPostProcessingEventHandler;
-
- @Autowired
- private AuthorityPostProcessingEventHandler authorityPostProcessingEventHandler;
-
- @Autowired
- private MarcBibUpdateModifyEventHandler marcBibUpdateModifyEventHandler;
-
- @Autowired
- private MarcAuthorityUpdateModifyEventHandler marcAuthorityUpdateModifyEventHandler;
-
- @Autowired
- private MarcBibliographicMatchEventHandler marcBibliographicMatchEventHandler;
-
- @Autowired
- private MarcAuthorityMatchEventHandler marcAuthorityMatchEventHandler;
+ private KafkaConfig kafkaConfig;
@Autowired
- private MarcAuthorityDeleteEventHandler marcAuthorityDeleteEventHandler;
-
- @Autowired
- private MarcHoldingsMatchEventHandler marcHoldingsMatchEventHandler;
-
- @Autowired
- private MarcHoldingsUpdateModifyEventHandler marcHoldingsUpdateModifyEventHandler;
+ private List eventHandlers;
@Value("${srs.kafka.ParsedMarcChunkConsumer.instancesNumber:1}")
private int parsedMarcChunkConsumerInstancesNumber;
@@ -84,6 +53,12 @@ public class InitAPIImpl implements InitAPI {
@Value("${srs.kafka.AuthorityLinkChunkConsumer.instancesNumber:1}")
private int authorityLinkChunkConsumerInstancesNumber;
+ @Value("${srs.kafka.AuthorityDomainConsumer.instancesNumber:1}")
+ private int authorityDomainConsumerInstancesNumber;
+
+ @Value("${srs.kafka.DataImportConsumerVerticle.maxDistributionNum:100}")
+ private int maxDistributionNumber;
+
@Override
public void init(Vertx vertx, Context context, Handler> handler) {
try {
@@ -93,9 +68,11 @@ public void init(Vertx vertx, Context context, Handler> han
VerticleFactory verticleFactory = springContext.getBean(SpringVerticleFactory.class);
vertx.registerVerticleFactory(verticleFactory);
+ EventManager.registerKafkaEventPublisher(kafkaConfig, vertx, maxDistributionNumber);
+
registerEventHandlers();
deployMarcIndexersVersionDeletionVerticle(vertx, verticleFactory);
- deployConsumerVerticles(vertx).onComplete(ar -> {
+ deployConsumerVerticles(vertx, verticleFactory).onComplete(ar -> {
if (ar.succeeded()) {
handler.handle(Future.succeededFuture(true));
} else {
@@ -109,56 +86,49 @@ public void init(Vertx vertx, Context context, Handler> han
}
private void registerEventHandlers() {
- EventManager.registerEventHandler(instancePostProcessingEventHandler);
- EventManager.registerEventHandler(holdingsPostProcessingEventHandler);
- EventManager.registerEventHandler(authorityPostProcessingEventHandler);
- EventManager.registerEventHandler(marcBibUpdateModifyEventHandler);
- EventManager.registerEventHandler(marcAuthorityUpdateModifyEventHandler);
- EventManager.registerEventHandler(marcBibliographicMatchEventHandler);
- EventManager.registerEventHandler(marcAuthorityMatchEventHandler);
- EventManager.registerEventHandler(marcAuthorityDeleteEventHandler);
- EventManager.registerEventHandler(marcHoldingsMatchEventHandler) ;
- EventManager.registerEventHandler(marcHoldingsUpdateModifyEventHandler);
+ eventHandlers.forEach(EventManager::registerEventHandler);
}
- private Future> deployConsumerVerticles(Vertx vertx) {
- //TODO: get rid of this workaround with global spring context
- ParsedRecordChunkConsumersVerticle.setSpringGlobalContext(vertx.getOrCreateContext().get(SPRING_CONTEXT));
- DataImportConsumersVerticle.setSpringGlobalContext(vertx.getOrCreateContext().get(SPRING_CONTEXT));
- QuickMarcConsumersVerticle.setSpringGlobalContext(vertx.getOrCreateContext().get(SPRING_CONTEXT));
- AuthorityLinkChunkConsumersVerticle.setSpringGlobalContext(vertx.getOrCreateContext().get(SPRING_CONTEXT));
-
+ private Future> deployConsumerVerticles(Vertx vertx, VerticleFactory verticleFactory) {
Promise deployConsumer1 = Promise.promise();
Promise deployConsumer2 = Promise.promise();
Promise deployConsumer3 = Promise.promise();
Promise deployConsumer4 = Promise.promise();
-
- vertx.deployVerticle(ParsedRecordChunkConsumersVerticle.class.getCanonicalName(),
- new DeploymentOptions().setWorker(true).setInstances(parsedMarcChunkConsumerInstancesNumber), deployConsumer1);
-
- vertx.deployVerticle(DataImportConsumersVerticle.class.getCanonicalName(),
- new DeploymentOptions().setWorker(true).setInstances(dataImportConsumerInstancesNumber), deployConsumer2);
-
- vertx.deployVerticle(QuickMarcConsumersVerticle.class.getCanonicalName(),
- new DeploymentOptions().setWorker(true).setInstances(quickMarcConsumerInstancesNumber), deployConsumer3);
-
- vertx.deployVerticle(AuthorityLinkChunkConsumersVerticle.class.getCanonicalName(),
- new DeploymentOptions().setWorker(true).setInstances(authorityLinkChunkConsumerInstancesNumber), deployConsumer4);
+ Promise deployConsumer5 = Promise.promise();
+
+ deployVerticle(vertx, verticleFactory, AuthorityLinkChunkConsumersVerticle.class,
+ OptionalInt.of(authorityLinkChunkConsumerInstancesNumber), deployConsumer1);
+ deployVerticle(vertx, verticleFactory, AuthorityDomainConsumersVerticle.class,
+ OptionalInt.of(authorityDomainConsumerInstancesNumber), deployConsumer2);
+ deployVerticle(vertx, verticleFactory, DataImportConsumersVerticle.class,
+ OptionalInt.of(dataImportConsumerInstancesNumber), deployConsumer3);
+ deployVerticle(vertx, verticleFactory, ParsedRecordChunkConsumersVerticle.class,
+ OptionalInt.of(parsedMarcChunkConsumerInstancesNumber), deployConsumer4);
+ deployVerticle(vertx, verticleFactory, QuickMarcConsumersVerticle.class,
+ OptionalInt.of(quickMarcConsumerInstancesNumber), deployConsumer5);
return GenericCompositeFuture.all(List.of(
deployConsumer1.future(),
deployConsumer2.future(),
deployConsumer3.future(),
- deployConsumer4.future()));
+ deployConsumer4.future(),
+ deployConsumer5.future()
+ ));
}
private String getVerticleName(VerticleFactory verticleFactory, Class clazz) {
return verticleFactory.prefix() + ":" + clazz.getName();
}
- private void deployMarcIndexersVersionDeletionVerticle(Vertx vertx, VerticleFactory verticleFactory){
- vertx.deployVerticle(getVerticleName(verticleFactory, MarcIndexersVersionDeletionVerticle.class),
+ private void deployMarcIndexersVersionDeletionVerticle(Vertx vertx, VerticleFactory verticleFactory) {
+ vertx.deployVerticle(getVerticleName(verticleFactory, (Class>) MarcIndexersVersionDeletionVerticle.class),
new DeploymentOptions().setWorker(true));
}
+ private void deployVerticle(Vertx vertx, VerticleFactory verticleFactory, Class> verticleClass,
+ OptionalInt instancesNumber, Promise promise) {
+ vertx.deployVerticle(getVerticleName(verticleFactory, verticleClass),
+ new DeploymentOptions().setWorker(true).setInstances(instancesNumber.orElse(1)), promise);
+ }
+
}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageRecordsImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageRecordsImpl.java
index 8469b3d19..70bd30db8 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageRecordsImpl.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageRecordsImpl.java
@@ -98,6 +98,21 @@ public void putSourceStorageRecordsById(String id, Record entity, Map okapiHeaders,
+ Handler> asyncResultHandler, Context vertxContext) {
+ vertxContext.runOnContext(v -> {
+ try {
+ recordService.updateRecordGeneration(matchedId, entity, tenantId)
+ .map(updated -> PutSourceStorageRecordsGenerationByIdResponse.respond200WithApplicationJson(entity))
+ .map(Response.class::cast).otherwise(ExceptionHelper::mapExceptionToResponse)
+ .onComplete(asyncResultHandler);
+ } catch (Exception e) {
+ LOG.warn("putSourceStorageRecordsGenerationById:: Failed to update record generation by matchedId {}", matchedId, e);
+ asyncResultHandler.handle(Future.succeededFuture(ExceptionHelper.mapExceptionToResponse(e)));
+ }
+ });
+ }
@Override
public void deleteSourceStorageRecordsById(String id, Map okapiHeaders,
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordService.java b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordService.java
index fd39a9b3b..8b7cf1899 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordService.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordService.java
@@ -89,6 +89,16 @@ public interface RecordService {
*/
Future updateRecord(Record record, String tenantId);
+ /**
+ * Updates record generation with given matched id
+ *
+ * @param matchedId matched id
+ * @param record record to update
+ * @param tenantId tenant id
+ * @return future with updated Record generation
+ */
+ Future updateRecordGeneration(String matchedId, Record record, String tenantId);
+
/**
* Searches for {@link SourceRecord} by {@link Condition} and ordered by order fields with offset and limit
*
@@ -202,6 +212,15 @@ public interface RecordService {
*/
Future deleteRecordsBySnapshotId(String snapshotId, String tenantId);
+ /**
+ * Deletes records by external id
+ *
+ * @param externalId external id
+ * @param tenantId tenant id
+ * @return future with true if succeeded
+ */
+ Future deleteRecordsByExternalId(String externalId, String tenantId);
+
/**
* Creates new updated Record with incremented generation linked to a new Snapshot, and sets OLD status to the "old" Record,
* no data is deleted as a result of the update
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java
index 65f0da721..505ffed11 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java
@@ -16,12 +16,10 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
-import java.util.EnumMap;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID;
-import java.util.function.BiFunction;
import java.util.stream.Collectors;
import javax.ws.rs.BadRequestException;
import javax.ws.rs.NotFoundException;
@@ -75,6 +73,9 @@ public class RecordServiceImpl implements RecordService {
private final RecordDao recordDao;
private static final String DUPLICATE_CONSTRAINT = "idx_records_matched_id_gen";
private static final String DUPLICATE_RECORD_MSG = "Incoming file may contain duplicates";
+ private static final String MATCHED_ID_NOT_EQUAL_TO_999_FIELD = "Matched id (%s) not equal to 999ff$s (%s) field";
+ private static final String RECORD_WITH_GIVEN_MATCHED_ID_NOT_FOUND = "Record with given matched id (%s) not found";
+ public static final String UPDATE_RECORD_DUPLICATE_EXCEPTION = "Incoming record could be a duplicate, incoming record generation should not be the same as matched record generation and the execution of job should be started after of creating the previous record generation";
public static final char SUBFIELD_S = 's';
public static final char INDICATOR = 'f';
@@ -154,6 +155,25 @@ public Future updateRecord(Record record, String tenantId) {
return recordDao.updateRecord(ensureRecordForeignKeys(record), tenantId);
}
+ @Override
+ public Future updateRecordGeneration(String matchedId, Record record, String tenantId) {
+ String marcField999s = getFieldFromMarcRecord(record, TAG_999, INDICATOR, INDICATOR, SUBFIELD_S);
+ if (!matchedId.equals(marcField999s)) {
+ return Future.failedFuture(new BadRequestException(format(MATCHED_ID_NOT_EQUAL_TO_999_FIELD, matchedId, marcField999s)));
+ }
+ record.setId(UUID.randomUUID().toString());
+
+ return recordDao.getRecordById(matchedId, tenantId)
+ .map(r -> r.orElseThrow(() -> new NotFoundException(format(RECORD_WITH_GIVEN_MATCHED_ID_NOT_FOUND, matchedId))))
+ .compose(v -> saveRecord(record, tenantId))
+ .recover(throwable -> {
+ if (throwable instanceof DuplicateRecordException) {
+ return Future.failedFuture(new BadRequestException(UPDATE_RECORD_DUPLICATE_EXCEPTION));
+ }
+ return Future.failedFuture(throwable);
+ });
+ }
+
@Override
public Future getSourceRecords(Condition condition, RecordType recordType, Collection> orderFields,
int offset, int limit, String tenantId) {
@@ -236,6 +256,11 @@ public Future deleteRecordsBySnapshotId(String snapshotId, String tenan
return recordDao.deleteRecordsBySnapshotId(snapshotId, tenantId);
}
+ @Override
+ public Future deleteRecordsByExternalId(String externalId, String tenantId) {
+ return recordDao.deleteRecordsByExternalId(externalId, tenantId).map(b -> null);
+ }
+
@Override
public Future updateSourceRecord(ParsedRecordDto parsedRecordDto, String snapshotId, String tenantId) {
String newRecordId = UUID.randomUUID().toString();
@@ -292,7 +317,12 @@ private Future setMatchedIdForRecord(Record record, String tenantId) {
// Set matched id same as record id
promise.complete(record.withMatchedId(record.getId()));
}
- return promise.future().onSuccess(r -> addFieldToMarcRecord(r, TAG_999, SUBFIELD_S, r.getMatchedId()));
+
+ return promise.future().onSuccess(r -> {
+ if (record.getRecordType() != null && !record.getRecordType().equals(Record.RecordType.EDIFACT)) {
+ addFieldToMarcRecord(r, TAG_999, SUBFIELD_S, r.getMatchedId());
+ }
+ });
}
private void setMatchedIdFromExistingSourceRecord(Record record, String tenantId, Promise promise, String externalId, IdType idType) {
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/SnapshotService.java b/mod-source-record-storage-server/src/main/java/org/folio/services/SnapshotService.java
index 591aaa608..6f096d283 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/SnapshotService.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/SnapshotService.java
@@ -15,7 +15,7 @@ public interface SnapshotService {
/**
* Searches for {@link Snapshot} by {@link Condition} and ordered by collection of {@link OrderField}
* with offset and limit
- *
+ *
* @param condition query where condition
* @param orderFields fields to order by
* @param offset starting index in a list of results
@@ -62,4 +62,14 @@ Future getSnapshots(Condition condition, Collection deleteSnapshot(String id, String tenantId);
-}
\ No newline at end of file
+ /**
+ * Copy {@link Snapshot} to other tenant
+ *
+ * @param snapshotId Snapshot id
+ * @param sourceTenantId source tenant id
+ * @param targetTenantId target tenant id
+ * @return future with copied Snapshot
+ */
+ Future copySnapshotToOtherTenant(String snapshotId, String sourceTenantId, String targetTenantId);
+
+}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/SnapshotServiceImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/services/SnapshotServiceImpl.java
index 9088f7a0a..91e737639 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/SnapshotServiceImpl.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/SnapshotServiceImpl.java
@@ -13,6 +13,11 @@
import io.vertx.core.Future;
+import javax.ws.rs.NotFoundException;
+
+import static java.lang.String.format;
+import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_FOUND_TEMPLATE;
+
@Service
public class SnapshotServiceImpl implements SnapshotService {
@@ -49,4 +54,11 @@ public Future deleteSnapshot(String id, String tenantId) {
return snapshotDao.deleteSnapshot(id, tenantId);
}
-}
\ No newline at end of file
+ @Override
+ public Future copySnapshotToOtherTenant(String snapshotId, String sourceTenantId, String targetTenantId) {
+ return snapshotDao.getSnapshotById(snapshotId, sourceTenantId)
+ .map(optionalSnapshot -> optionalSnapshot
+ .orElseThrow(() -> new NotFoundException(format(SNAPSHOT_NOT_FOUND_TEMPLATE, snapshotId))))
+ .compose(sourceSnapshot -> snapshotDao.saveSnapshot(sourceSnapshot, targetTenantId));
+ }
+}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/TenantDataProvider.java b/mod-source-record-storage-server/src/main/java/org/folio/services/TenantDataProvider.java
index 2711e4c45..662887872 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/TenantDataProvider.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/TenantDataProvider.java
@@ -9,9 +9,9 @@
*/
public interface TenantDataProvider {
/**
- * Gets all module tenants.
+ * Gets all module tenants where the given database table exists.
*
* @return tenant ids
*/
- Future> getModuleTenants();
+ Future> getModuleTenants(String table);
}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/TenantDataProviderImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/services/TenantDataProviderImpl.java
index 68d57b7e5..f1320e27d 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/TenantDataProviderImpl.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/TenantDataProviderImpl.java
@@ -1,10 +1,10 @@
package org.folio.services;
import io.vertx.core.Future;
-import io.vertx.core.Promise;
import io.vertx.core.Vertx;
import io.vertx.sqlclient.Row;
-import io.vertx.sqlclient.RowSet;
+import io.vertx.sqlclient.Tuple;
+
import org.folio.rest.persist.PostgresClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@@ -15,6 +15,7 @@
@Service
public class TenantDataProviderImpl implements TenantDataProvider {
+ private static final String SUFFIX = "_mod_source_record_storage";
private Vertx vertx;
@Autowired
@@ -23,22 +24,21 @@ public TenantDataProviderImpl(Vertx vertx) {
}
@Override
- public Future> getModuleTenants() {
+ public Future> getModuleTenants(String table) {
PostgresClient pgClient = PostgresClient.getInstance(vertx);
- Promise> promise = Promise.promise();
- String tenantQuery = "select nspname from pg_catalog.pg_namespace where nspname LIKE '%_mod_source_record_storage';";
- pgClient.select(tenantQuery, promise);
- return promise.future()
- .map(rowSet -> StreamSupport.stream(rowSet.spliterator(), false)
- .map(this::mapToTenant)
- .collect(Collectors.toList())
- );
+ String tenantQuery = """
+ select schemaname from pg_catalog.pg_tables
+ where schemaname LIKE $1 and tablename = $2
+ """;
+ return pgClient.execute(tenantQuery, Tuple.of("%" + SUFFIX, table))
+ .map(rowSet -> StreamSupport.stream(rowSet.spliterator(), false)
+ .map(this::mapToTenant)
+ .collect(Collectors.toList()));
}
private String mapToTenant(Row row) {
- String nsTenant = row.getString("nspname");
- String suffix = "_mod_source_record_storage";
- int tenantNameLength = nsTenant.length() - suffix.length();
- return nsTenant.substring(0, tenantNameLength);
+ String schemaname = row.getString("schemaname");
+ int tenantNameLength = schemaname.length() - SUFFIX.length();
+ return schemaname.substring(0, tenantNameLength);
}
}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/caches/ConsortiumConfigurationCache.java b/mod-source-record-storage-server/src/main/java/org/folio/services/caches/ConsortiumConfigurationCache.java
new file mode 100644
index 000000000..645532b94
--- /dev/null
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/caches/ConsortiumConfigurationCache.java
@@ -0,0 +1,84 @@
+package org.folio.services.caches;
+
+import com.github.benmanes.caffeine.cache.AsyncCache;
+import com.github.benmanes.caffeine.cache.Caffeine;
+import io.vertx.core.Future;
+import io.vertx.core.Vertx;
+import io.vertx.core.http.HttpMethod;
+import io.vertx.core.json.JsonArray;
+import org.apache.http.HttpStatus;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.folio.dataimport.util.OkapiConnectionParams;
+import org.folio.dataimport.util.RestUtil;
+import org.folio.services.entities.ConsortiumConfiguration;
+import org.folio.services.exceptions.CacheLoadingException;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Component;
+
+import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Cache for {@link ConsortiumConfiguration}
+ */
+@Component
+public class ConsortiumConfigurationCache {
+ private static final Logger LOGGER = LogManager.getLogger();
+ private static final String USER_TENANTS_ENDPOINT = "/user-tenants?limit=1";
+
+ @Value("${srs.consortium-configuration-cache.expiration.time.seconds:3600}")
+ private long cacheExpirationTime;
+ private AsyncCache> cache;
+
+ @Autowired
+ public ConsortiumConfigurationCache(Vertx vertx) {
+ cache = Caffeine.newBuilder()
+ .expireAfterAccess(cacheExpirationTime, TimeUnit.SECONDS)
+ .executor(task -> vertx.runOnContext(v -> task.run()))
+ .buildAsync();
+ }
+
+ /**
+ * Searches for {@link ConsortiumConfiguration} cache by tenant id
+ *
+ * @param params okapi connection parameters
+ * @return future with optional {@link ConsortiumConfiguration}
+ */
+ public Future> get(OkapiConnectionParams params) {
+ try {
+ return Future.fromCompletionStage(cache.get(params.getTenantId(), (key, executor) -> loadConsortiumConfiguration(params)));
+ } catch (Exception e) {
+ LOGGER.warn("get:: Error loading ConsortiumConfiguration by id: '{}'", params.getTenantId(), e);
+ return Future.failedFuture(e);
+ }
+ }
+
+ private CompletableFuture extends Optional> loadConsortiumConfiguration(OkapiConnectionParams params) {
+ LOGGER.debug("loadConsortiumConfiguration:: Trying to load consortiumConfiguration by tenantId '{}' for cache, okapi url: {}, tenantId: {}", params.getTenantId(), params.getOkapiUrl(), params.getTenantId());
+
+ return RestUtil.doRequest(params, USER_TENANTS_ENDPOINT, HttpMethod.GET, null)
+ .toCompletionStage()
+ .toCompletableFuture()
+ .thenCompose(httpResponse -> {
+ if (httpResponse.getResponse().statusCode() == HttpStatus.SC_OK) {
+ JsonArray userTenants = httpResponse.getJson().getJsonArray("userTenants");
+ if (userTenants.isEmpty()) {
+ LOGGER.debug("loadConsortiumConfiguration:: consortiumConfiguration was not found for tenantId '{}'", params.getTenantId());
+ return CompletableFuture.completedFuture(Optional.empty());
+ }
+ String centralTenantId = userTenants.getJsonObject(0).getString("centralTenantId");
+ String consortiumId = userTenants.getJsonObject(0).getString("consortiumId");
+ LOGGER.debug("loadConsortiumConfiguration:: Found centralTenantId: '{}' and consortiumId: '{}' was loaded for tenantId: '{}'", centralTenantId, consortiumId, params.getTenantId());
+ return CompletableFuture.completedFuture(Optional.of(new ConsortiumConfiguration(centralTenantId, consortiumId)));
+ } else {
+ String message = String.format("Error loading consortiumConfiguration by tenantId: '%s', status code: %s, response message: %s",
+ params.getTenantId(), httpResponse.getResponse().statusCode(), httpResponse.getBody());
+ LOGGER.warn(String.format("loadConsortiumConfiguration:: %s", message));
+ return CompletableFuture.failedFuture(new CacheLoadingException(message));
+ }
+ });
+ }
+}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/cleanup/RecordCleanupService.java b/mod-source-record-storage-server/src/main/java/org/folio/services/cleanup/RecordCleanupService.java
deleted file mode 100644
index 99a7062cc..000000000
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/cleanup/RecordCleanupService.java
+++ /dev/null
@@ -1,50 +0,0 @@
-package org.folio.services.cleanup;
-
-import io.vertx.core.Vertx;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.folio.dao.RecordDao;
-import org.folio.dao.util.TenantUtil;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.scheduling.annotation.Scheduled;
-import org.springframework.stereotype.Service;
-
-@Service
-public class RecordCleanupService {
- private static final Logger LOGGER = LogManager.getLogger();
- private final Vertx vertx;
- private final RecordDao recordDao;
- private final int lastUpdatedDays;
- private final int limit;
-
- public RecordCleanupService(@Value("${srs.cleanup.last.updated.days:7}") int lastUpdatedDays,
- @Value("${srs.cleanup.limit:100}") int limit,
- @Autowired Vertx vertx,
- @Autowired RecordDao recordDao) {
- this.vertx = vertx;
- this.recordDao = recordDao;
- this.lastUpdatedDays = lastUpdatedDays;
- this.limit = limit;
- }
-
- /**
- * The method is getting run by the Spring Framework's Scheduler, at 12 am (midnight) every day by default.
- * The execution starts automatically after the ApplicationContext setup.
- * The schedule is defined by the cron expression, which allows defining time to run in a fixed format.
- */
- @Scheduled(cron = "${srs.cleanup.cron.expression:0 0 0 * * ?}")
- public void cleanup() {
- LOGGER.info("cleanup:: Starting records cleanup job");
- TenantUtil.getModuleTenants(vertx)
- .onFailure(throwable -> LOGGER.warn("cleanup:: Failed to retrieve tenants available for the module, cause: {}", throwable.getMessage()))
- .onSuccess(tenants -> {
- for (String tenantId : tenants) {
- recordDao.deleteRecords(lastUpdatedDays, limit, tenantId)
- .onFailure(throwable -> LOGGER.warn("cleanup:: Failed to delete records, tenant: {}, cause: {}", tenantId, throwable.getMessage()))
- .onSuccess(ar -> LOGGER.info("cleanup:: Records has been successfully deleted, tenant: {}", tenantId));
- }
- });
- }
-}
-
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/entities/ConsortiumConfiguration.java b/mod-source-record-storage-server/src/main/java/org/folio/services/entities/ConsortiumConfiguration.java
new file mode 100644
index 000000000..ea8899828
--- /dev/null
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/entities/ConsortiumConfiguration.java
@@ -0,0 +1,31 @@
+package org.folio.services.entities;
+
+/**
+ * Entity that contains configuration data for consortium process
+ * Contains of centralTenantId and consortiumId
+ */
+public class ConsortiumConfiguration {
+ private String centralTenantId;
+ private String consortiumId;
+
+ public ConsortiumConfiguration(String centralTenantId, String consortiumId) {
+ this.centralTenantId = centralTenantId;
+ this.consortiumId = consortiumId;
+ }
+
+ public String getCentralTenantId() {
+ return centralTenantId;
+ }
+
+ public void setCentralTenantId(String centralTenantId) {
+ this.centralTenantId = centralTenantId;
+ }
+
+ public String getConsortiumId() {
+ return consortiumId;
+ }
+
+ public void setConsortiumId(String consortiumId) {
+ this.consortiumId = consortiumId;
+ }
+}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java
index 3b16ce42f..091d3314d 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java
@@ -23,7 +23,9 @@
import org.folio.rest.jaxrs.model.EntityType;
import org.folio.rest.jaxrs.model.ExternalIdsHolder;
import org.folio.rest.jaxrs.model.Record;
+import org.folio.rest.jaxrs.model.Snapshot;
import org.folio.services.RecordService;
+import org.folio.services.SnapshotService;
import org.folio.services.caches.MappingParametersSnapshotCache;
import org.folio.services.exceptions.PostProcessingException;
import org.folio.services.util.TypeConnection;
@@ -72,14 +74,19 @@ public abstract class AbstractPostProcessingEventHandler implements EventHandler
private static final String FAILED_UPDATE_STATE_MSG = "Error during update records state to OLD";
private static final String ID_FIELD = "id";
public static final String POST_PROCESSING_INDICATOR = "POST_PROCESSING";
+ public static final String CENTRAL_TENANT_INSTANCE_UPDATED_FLAG = "CENTRAL_TENANT_INSTANCE_UPDATED";
+ public static final String CENTRAL_TENANT_ID = "CENTRAL_TENANT_ID";
private final KafkaConfig kafkaConfig;
private final MappingParametersSnapshotCache mappingParamsCache;
private final Vertx vertx;
private final RecordService recordService;
+ private final SnapshotService snapshotService;
- protected AbstractPostProcessingEventHandler(RecordService recordService, KafkaConfig kafkaConfig,
+
+ protected AbstractPostProcessingEventHandler(RecordService recordService, SnapshotService snapshotService, KafkaConfig kafkaConfig,
MappingParametersSnapshotCache mappingParamsCache, Vertx vertx) {
this.recordService = recordService;
+ this.snapshotService = snapshotService;
this.kafkaConfig = kafkaConfig;
this.mappingParamsCache = mappingParamsCache;
this.vertx = vertx;
@@ -95,7 +102,12 @@ public CompletableFuture handle(DataImportEventPayload d
.compose(parametersOptional -> parametersOptional
.map(mappingParams -> prepareRecord(dataImportEventPayload, mappingParams))
.orElse(Future.failedFuture(format(MAPPING_PARAMS_NOT_FOUND_MSG, jobExecutionId))))
- .compose(record -> saveRecord(record, dataImportEventPayload.getTenant()))
+ .compose(record -> {
+ if (centralTenantOperationExists(dataImportEventPayload)) {
+ return saveRecordForCentralTenant(dataImportEventPayload, record, jobExecutionId);
+ }
+ return saveRecord(record, dataImportEventPayload.getTenant());
+ })
.onSuccess(record -> {
sendReplyEvent(dataImportEventPayload, record);
sendAdditionalEvent(dataImportEventPayload, record);
@@ -124,6 +136,7 @@ public boolean isEligible(DataImportEventPayload dataImportEventPayload) {
}
protected abstract void sendAdditionalEvent(DataImportEventPayload dataImportEventPayload, Record record);
+
protected abstract String getNextEventType(DataImportEventPayload dataImportEventPayload);
protected String getEventKey() {
@@ -316,4 +329,22 @@ private void sendReplyEvent(DataImportEventPayload dataImportEventPayload, Recor
}
}
+ private static boolean centralTenantOperationExists(DataImportEventPayload dataImportEventPayload) {
+ return dataImportEventPayload.getContext().get(CENTRAL_TENANT_INSTANCE_UPDATED_FLAG) != null &&
+ dataImportEventPayload.getContext().get(CENTRAL_TENANT_INSTANCE_UPDATED_FLAG).equals("true");
+ }
+
+ private Future saveRecordForCentralTenant(DataImportEventPayload dataImportEventPayload, Record
+ record, String jobExecutionId) {
+ String centralTenantId = dataImportEventPayload.getContext().get(CENTRAL_TENANT_ID);
+ dataImportEventPayload.getContext().remove(CENTRAL_TENANT_INSTANCE_UPDATED_FLAG);
+ LOG.info("handle:: Processing AbstractPostProcessingEventHandler - saving record by jobExecutionId: {} for the central tenantId: {}", jobExecutionId, centralTenantId);
+ if (centralTenantId != null) {
+ return snapshotService.copySnapshotToOtherTenant(record.getSnapshotId(), dataImportEventPayload.getTenant(), centralTenantId)
+ .compose(f -> saveRecord(record, centralTenantId));
+ }
+ else {
+ return saveRecord(record, dataImportEventPayload.getTenant());
+ }
+ }
}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AuthorityPostProcessingEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AuthorityPostProcessingEventHandler.java
index 5b69ec3c9..3c36c2fda 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AuthorityPostProcessingEventHandler.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AuthorityPostProcessingEventHandler.java
@@ -14,6 +14,7 @@
import io.vertx.core.json.JsonObject;
import io.vertx.kafka.client.producer.KafkaHeader;
import org.folio.services.RecordService;
+import org.folio.services.SnapshotService;
import org.springframework.stereotype.Component;
import org.folio.DataImportEventPayload;
@@ -30,10 +31,10 @@ public class AuthorityPostProcessingEventHandler extends AbstractPostProcessingE
private final KafkaConfig kafkaConfig;
- public AuthorityPostProcessingEventHandler(RecordService recordService, KafkaConfig kafkaConfig,
+ public AuthorityPostProcessingEventHandler(RecordService recordService, SnapshotService snapshotService, KafkaConfig kafkaConfig,
MappingParametersSnapshotCache mappingParamsCache,
Vertx vertx) {
- super(recordService, kafkaConfig, mappingParamsCache, vertx);
+ super(recordService, snapshotService, kafkaConfig, mappingParamsCache, vertx);
this.kafkaConfig = kafkaConfig;
}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/HoldingsPostProcessingEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/HoldingsPostProcessingEventHandler.java
index 0f8f1720f..9db769e41 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/HoldingsPostProcessingEventHandler.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/HoldingsPostProcessingEventHandler.java
@@ -6,6 +6,7 @@
import io.vertx.core.json.JsonObject;
import org.folio.services.RecordService;
+import org.folio.services.SnapshotService;
import org.folio.services.caches.MappingParametersSnapshotCache;
import org.springframework.stereotype.Component;
@@ -19,9 +20,9 @@
@Component
public class HoldingsPostProcessingEventHandler extends AbstractPostProcessingEventHandler {
- public HoldingsPostProcessingEventHandler(RecordService recordService, KafkaConfig kafkaConfig,
+ public HoldingsPostProcessingEventHandler(RecordService recordService, SnapshotService snapshotService, KafkaConfig kafkaConfig,
MappingParametersSnapshotCache mappingParametersCache, Vertx vertx) {
- super(recordService, kafkaConfig, mappingParametersCache, vertx);
+ super(recordService, snapshotService, kafkaConfig, mappingParametersCache, vertx);
}
@Override
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/InstancePostProcessingEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/InstancePostProcessingEventHandler.java
index fdff94bc0..ac64687bd 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/InstancePostProcessingEventHandler.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/InstancePostProcessingEventHandler.java
@@ -15,6 +15,7 @@
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.folio.services.RecordService;
+import org.folio.services.SnapshotService;
import org.folio.services.caches.MappingParametersSnapshotCache;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -36,9 +37,9 @@ public class InstancePostProcessingEventHandler extends AbstractPostProcessingEv
private final KafkaConfig kafkaConfig;
@Autowired
- public InstancePostProcessingEventHandler(RecordService recordService, KafkaConfig kafkaConfig,
+ public InstancePostProcessingEventHandler(RecordService recordService, SnapshotService snapshotService,KafkaConfig kafkaConfig,
MappingParametersSnapshotCache mappingParametersCache, Vertx vertx) {
- super(recordService, kafkaConfig, mappingParametersCache, vertx);
+ super(recordService, snapshotService, kafkaConfig, mappingParametersCache, vertx);
this.kafkaConfig = kafkaConfig;
}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/AbstractUpdateModifyEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/AbstractUpdateModifyEventHandler.java
index 602468658..f3c8f29be 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/AbstractUpdateModifyEventHandler.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/AbstractUpdateModifyEventHandler.java
@@ -23,6 +23,7 @@
import org.folio.rest.jaxrs.model.ProfileSnapshotWrapper;
import org.folio.rest.jaxrs.model.Record;
import org.folio.services.RecordService;
+import org.folio.services.SnapshotService;
import org.folio.services.caches.MappingParametersSnapshotCache;
import org.folio.services.util.RestUtil;
@@ -39,12 +40,14 @@
import static org.folio.ActionProfile.Action.MODIFY;
import static org.folio.ActionProfile.Action.UPDATE;
import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.ACTION_PROFILE;
+import static org.folio.services.handlers.match.AbstractMarcMatchEventHandler.CENTRAL_TENANT_ID;
import static org.folio.services.util.AdditionalFieldsUtil.HR_ID_FROM_FIELD;
import static org.folio.services.util.AdditionalFieldsUtil.addControlledFieldToMarcRecord;
import static org.folio.services.util.AdditionalFieldsUtil.fill035FieldInMarcRecordIfNotExists;
import static org.folio.services.util.AdditionalFieldsUtil.getValueFromControlledField;
import static org.folio.services.util.AdditionalFieldsUtil.remove003FieldIfNeeded;
import static org.folio.services.util.AdditionalFieldsUtil.remove035WithActualHrId;
+import static org.folio.services.util.AdditionalFieldsUtil.updateLatestTransactionDate;
public abstract class AbstractUpdateModifyEventHandler implements EventHandler {
@@ -55,12 +58,14 @@ public abstract class AbstractUpdateModifyEventHandler implements EventHandler {
private static final String MAPPING_PARAMETERS_NOT_FOUND_MSG = "MappingParameters snapshot was not found by jobExecutionId '%s'";
protected RecordService recordService;
+ protected SnapshotService snapshotService;
protected MappingParametersSnapshotCache mappingParametersCache;
protected Vertx vertx;
- protected AbstractUpdateModifyEventHandler(
- RecordService recordService, MappingParametersSnapshotCache mappingParametersCache, Vertx vertx) {
+ protected AbstractUpdateModifyEventHandler(RecordService recordService, SnapshotService snapshotService,
+ MappingParametersSnapshotCache mappingParametersCache, Vertx vertx) {
this.recordService = recordService;
+ this.snapshotService = snapshotService;
this.mappingParametersCache = mappingParametersCache;
this.vertx = vertx;
}
@@ -87,31 +92,38 @@ public CompletableFuture handle(DataImportEventPayload p
mappingParametersCache.get(payload.getJobExecutionId(), okapiParams)
.map(mapMappingParametersOrFail(format(MAPPING_PARAMETERS_NOT_FOUND_MSG, payload.getJobExecutionId())))
- .compose(mappingParameters -> modifyRecord(payload, mappingProfile, mappingParameters))
- .onSuccess(v -> prepareModificationResult(payload, marcMappingOption))
- .map(v -> Json.decodeValue(payloadContext.get(modifiedEntityType().value()), Record.class))
- .onSuccess(changedRecord -> {
- if (isHridFillingNeeded() || isUpdateOption(marcMappingOption)) {
- addControlledFieldToMarcRecord(changedRecord, HR_ID_FROM_FIELD, hrId, true);
-
- String changed001 = getValueFromControlledField(changedRecord, HR_ID_FROM_FIELD);
- if (StringUtils.isNotBlank(incoming001) && !incoming001.equals(changed001)) {
- fill035FieldInMarcRecordIfNotExists(changedRecord, incoming001);
- }
-
- remove035WithActualHrId(changedRecord, hrId);
- remove003FieldIfNeeded(changedRecord, hrId);
+ .compose(mappingParameters ->
+ modifyRecord(payload, mappingProfile, mappingParameters)
+ .onSuccess(v -> prepareModificationResult(payload, marcMappingOption))
+ .map(v -> Json.decodeValue(payloadContext.get(modifiedEntityType().value()), Record.class))
+ .onSuccess(changedRecord -> {
+ if (isHridFillingNeeded() || isUpdateOption(marcMappingOption)) {
+ addControlledFieldToMarcRecord(changedRecord, HR_ID_FROM_FIELD, hrId, true);
+
+ String changed001 = getValueFromControlledField(changedRecord, HR_ID_FROM_FIELD);
+ if (StringUtils.isNotBlank(incoming001) && !incoming001.equals(changed001)) {
+ fill035FieldInMarcRecordIfNotExists(changedRecord, incoming001);
+ }
+
+ remove035WithActualHrId(changedRecord, hrId);
+ remove003FieldIfNeeded(changedRecord, hrId);
+ }
+
+ increaseGeneration(changedRecord);
+ setUpdatedBy(changedRecord, userId);
+ updateLatestTransactionDate(changedRecord, mappingParameters);
+ payloadContext.put(modifiedEntityType().value(), Json.encode(changedRecord));
+ })
+ )
+ .compose(changedRecord -> {
+ String centralTenantId = payload.getContext().get(CENTRAL_TENANT_ID);
+ if (centralTenantId != null) {
+ return snapshotService.copySnapshotToOtherTenant(changedRecord.getSnapshotId(), payload.getTenant(), centralTenantId)
+ .compose(snapshot -> recordService.saveRecord(changedRecord, centralTenantId));
}
-
- increaseGeneration(changedRecord);
- setUpdatedBy(changedRecord, userId);
- payloadContext.put(modifiedEntityType().value(), Json.encode(changedRecord));
- })
- .compose(changedRecord -> recordService.saveRecord(changedRecord, payload.getTenant()))
- .onSuccess(savedRecord -> {
- payload.setEventType(getNextEventType());
- future.complete(payload);
+ return recordService.saveRecord(changedRecord, payload.getTenant());
})
+ .onSuccess(savedRecord -> submitSuccessfulEventType(payload, future, marcMappingOption))
.onFailure(throwable -> {
LOG.warn("handle:: Error while MARC record modifying", throwable);
future.completeExceptionally(throwable);
@@ -123,6 +135,11 @@ public CompletableFuture handle(DataImportEventPayload p
return future;
}
+ protected void submitSuccessfulEventType(DataImportEventPayload payload, CompletableFuture future, MappingDetail.MarcMappingOption marcMappingOption) {
+ payload.setEventType(getUpdateEventType());
+ future.complete(payload);
+ }
+
@Override
public boolean isEligible(DataImportEventPayload payload) {
if (payload.getCurrentNode() != null && ACTION_PROFILE == payload.getCurrentNode().getContentType()) {
@@ -134,11 +151,11 @@ public boolean isEligible(DataImportEventPayload payload) {
protected abstract boolean isHridFillingNeeded();
- protected abstract String getNextEventType();
+ protected abstract String getUpdateEventType();
protected abstract EntityType modifiedEntityType();
- private MappingDetail.MarcMappingOption getMarcMappingOption(MappingProfile mappingProfile) {
+ protected MappingDetail.MarcMappingOption getMarcMappingOption(MappingProfile mappingProfile) {
return mappingProfile.getMappingDetails().getMarcMappingOption();
}
@@ -194,7 +211,7 @@ private boolean isUpdateOption(MappingDetail.MarcMappingOption marcMappingOption
return marcMappingOption == MappingDetail.MarcMappingOption.UPDATE;
}
- private MappingProfile retrieveMappingProfile(DataImportEventPayload dataImportEventPayload) {
+ protected MappingProfile retrieveMappingProfile(DataImportEventPayload dataImportEventPayload) {
ProfileSnapshotWrapper mappingProfileWrapper = dataImportEventPayload.getCurrentNode().getChildSnapshotWrappers().get(0);
return new JsonObject((Map) mappingProfileWrapper.getContent()).mapTo(MappingProfile.class);
}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/MarcAuthorityUpdateModifyEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/MarcAuthorityUpdateModifyEventHandler.java
index 2fd44b10d..2a2c4b123 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/MarcAuthorityUpdateModifyEventHandler.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/MarcAuthorityUpdateModifyEventHandler.java
@@ -6,6 +6,7 @@
import io.vertx.core.Vertx;
+import org.folio.services.SnapshotService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -18,9 +19,10 @@ public class MarcAuthorityUpdateModifyEventHandler extends AbstractUpdateModifyE
@Autowired
public MarcAuthorityUpdateModifyEventHandler(RecordService recordService,
+ SnapshotService snapshotService,
MappingParametersSnapshotCache mappingParametersCache,
Vertx vertx) {
- super(recordService, mappingParametersCache, vertx);
+ super(recordService, snapshotService, mappingParametersCache, vertx);
}
@Override
@@ -39,7 +41,7 @@ public String getPostProcessingInitializationEventType() {
}
@Override
- protected String getNextEventType() {
+ protected String getUpdateEventType() {
return DI_SRS_MARC_AUTHORITY_RECORD_UPDATED.value();
}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/MarcBibUpdateModifyEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/MarcBibUpdateModifyEventHandler.java
index 4ece55fe4..15f395639 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/MarcBibUpdateModifyEventHandler.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/MarcBibUpdateModifyEventHandler.java
@@ -2,18 +2,29 @@
import static java.util.Objects.isNull;
import static org.apache.commons.lang3.StringUtils.isBlank;
+import static org.folio.ActionProfile.Action.MODIFY;
+import static org.folio.ActionProfile.Action.UPDATE;
+import static org.folio.dataimport.util.RestUtil.OKAPI_TENANT_HEADER;
+import static org.folio.dataimport.util.RestUtil.OKAPI_TOKEN_HEADER;
+import static org.folio.dataimport.util.RestUtil.OKAPI_URL_HEADER;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MODIFIED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING;
+import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_UPDATED;
import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC;
+import static org.folio.services.handlers.match.AbstractMarcMatchEventHandler.CENTRAL_TENANT_ID;
import static org.folio.services.util.AdditionalFieldsUtil.isSubfieldExist;
import io.vertx.core.Future;
import io.vertx.core.Promise;
import io.vertx.core.Vertx;
+
import java.io.IOException;
import java.util.Collections;
import java.util.List;
+import java.util.Map;
import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
+
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.folio.DataImportEventPayload;
@@ -31,6 +42,7 @@
import org.folio.rest.jaxrs.model.MappingDetail;
import org.folio.rest.jaxrs.model.Record;
import org.folio.services.RecordService;
+import org.folio.services.SnapshotService;
import org.folio.services.caches.LinkingRulesCache;
import org.folio.services.caches.MappingParametersSnapshotCache;
import org.springframework.beans.factory.annotation.Autowired;
@@ -48,10 +60,11 @@ public class MarcBibUpdateModifyEventHandler extends AbstractUpdateModifyEventHa
@Autowired
public MarcBibUpdateModifyEventHandler(RecordService recordService,
+ SnapshotService snapshotService,
MappingParametersSnapshotCache mappingParametersCache,
Vertx vertx, InstanceLinkClient instanceLinkClient,
LinkingRulesCache linkingRulesCache) {
- super(recordService, mappingParametersCache, vertx);
+ super(recordService, snapshotService, mappingParametersCache, vertx);
this.instanceLinkClient = instanceLinkClient;
this.linkingRulesCache = linkingRulesCache;
}
@@ -72,7 +85,11 @@ protected boolean isHridFillingNeeded() {
}
@Override
- protected String getNextEventType() {
+ protected String getUpdateEventType() {
+ return DI_SRS_MARC_BIB_RECORD_UPDATED.value();
+ }
+
+ protected String getModifyEventType() {
return DI_SRS_MARC_BIB_RECORD_MODIFIED.value();
}
@@ -81,6 +98,17 @@ protected EntityType modifiedEntityType() {
return MARC_BIBLIOGRAPHIC;
}
+ @Override
+ protected void submitSuccessfulEventType(DataImportEventPayload payload, CompletableFuture future, MappingDetail.MarcMappingOption marcMappingOption) {
+ if (marcMappingOption.value().equals(MODIFY.value())) {
+ payload.setEventType(getModifyEventType());
+ }
+ if (marcMappingOption.value().equals(UPDATE.value())) {
+ payload.setEventType(getUpdateEventType());
+ }
+ future.complete(payload);
+ }
+
@Override
protected Future modifyRecord(DataImportEventPayload dataImportEventPayload, MappingProfile mappingProfile,
MappingParameters mappingParameters) {
@@ -97,11 +125,20 @@ protected Future modifyRecord(DataImportEventPayload dataImportEventPayloa
}
var instanceId = matchedRecord.getExternalIdsHolder().getInstanceId();
var okapiParams = getOkapiParams(dataImportEventPayload);
+ var centralTenantId = dataImportEventPayload.getContext().get(CENTRAL_TENANT_ID);
+ if (centralTenantId != null) {
+ okapiParams = new OkapiConnectionParams(Map.of(
+ OKAPI_URL_HEADER, okapiParams.getOkapiUrl(),
+ OKAPI_TENANT_HEADER, centralTenantId,
+ OKAPI_TOKEN_HEADER, okapiParams.getToken()
+ ), vertx);
+ }
- return linkingRulesCache.get(okapiParams)
- .compose(linkingRuleDtos -> loadInstanceLink(matchedRecord, instanceId, okapiParams)
+ OkapiConnectionParams finalOkapiParams = okapiParams;
+ return linkingRulesCache.get(finalOkapiParams)
+ .compose(linkingRuleDtos -> loadInstanceLink(matchedRecord, instanceId, finalOkapiParams)
.compose(links -> modifyMarcBibRecord(dataImportEventPayload, mappingProfile, mappingParameters, links, linkingRuleDtos.orElse(Collections.emptyList())))
- .compose(links -> updateInstanceLinks(instanceId, links, okapiParams)));
+ .compose(links -> updateInstanceLinks(instanceId, links, finalOkapiParams)));
}
private Future> loadInstanceLink(Record oldRecord, String instanceId,
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/MarcHoldingsUpdateModifyEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/MarcHoldingsUpdateModifyEventHandler.java
index 3340ac2d4..1abe83e39 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/MarcHoldingsUpdateModifyEventHandler.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/MarcHoldingsUpdateModifyEventHandler.java
@@ -5,6 +5,7 @@
import static org.folio.rest.jaxrs.model.EntityType.MARC_HOLDINGS;
import io.vertx.core.Vertx;
+import org.folio.services.SnapshotService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -17,9 +18,10 @@ public class MarcHoldingsUpdateModifyEventHandler extends AbstractUpdateModifyEv
@Autowired
public MarcHoldingsUpdateModifyEventHandler(RecordService recordService,
+ SnapshotService snapshotService,
MappingParametersSnapshotCache mappingParametersCache,
Vertx vertx) {
- super(recordService, mappingParametersCache, vertx);
+ super(recordService, snapshotService, mappingParametersCache, vertx);
}
@Override
@@ -38,7 +40,7 @@ public String getPostProcessingInitializationEventType() {
}
@Override
- protected String getNextEventType() {
+ protected String getUpdateEventType() {
return DI_SRS_MARC_HOLDINGS_RECORD_UPDATED.value();
}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/match/AbstractMarcMatchEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/match/AbstractMarcMatchEventHandler.java
index 6389ede3f..d7b222c46 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/match/AbstractMarcMatchEventHandler.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/match/AbstractMarcMatchEventHandler.java
@@ -1,8 +1,10 @@
package org.folio.services.handlers.match;
import io.vertx.core.Future;
+import io.vertx.core.Vertx;
import io.vertx.core.json.Json;
import io.vertx.core.json.JsonObject;
+import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@@ -21,6 +23,8 @@
import org.folio.rest.jaxrs.model.ProfileSnapshotWrapper;
import org.folio.rest.jaxrs.model.Record;
import org.folio.rest.jaxrs.model.RecordCollection;
+import org.folio.services.caches.ConsortiumConfigurationCache;
+import org.folio.services.util.RestUtil;
import org.folio.services.util.TypeConnection;
import org.jooq.Condition;
@@ -30,6 +34,7 @@
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.CompletableFuture;
+import java.util.stream.Stream;
import static java.lang.String.format;
import static org.apache.commons.lang3.StringUtils.isEmpty;
@@ -50,52 +55,95 @@ public abstract class AbstractMarcMatchEventHandler implements EventHandler {
private static final String CANNOT_FIND_RECORDS_ERROR_MESSAGE = "Can`t find records matching specified conditions";
private static final String MATCH_DETAIL_IS_NOT_VALID = "Match detail is not valid: %s";
private static final String USER_ID_HEADER = "userId";
+ public static final String CENTRAL_TENANT_ID = "CENTRAL_TENANT_ID";
private final TypeConnection typeConnection;
private final RecordDao recordDao;
private final DataImportEventTypes matchedEventType;
private final DataImportEventTypes notMatchedEventType;
+ private final ConsortiumConfigurationCache consortiumConfigurationCache;
+ private final Vertx vertx;
- protected AbstractMarcMatchEventHandler(TypeConnection typeConnection, RecordDao recordDao, DataImportEventTypes matchedEventType, DataImportEventTypes notMatchedEventType) {
+ protected AbstractMarcMatchEventHandler(TypeConnection typeConnection, RecordDao recordDao, DataImportEventTypes matchedEventType,
+ DataImportEventTypes notMatchedEventType, ConsortiumConfigurationCache consortiumConfigurationCache,
+ Vertx vertx) {
this.typeConnection = typeConnection;
this.recordDao = recordDao;
this.matchedEventType = matchedEventType;
this.notMatchedEventType = notMatchedEventType;
+ this.consortiumConfigurationCache = consortiumConfigurationCache;
+ this.vertx = vertx;
}
@Override
public CompletableFuture handle(DataImportEventPayload payload) {
- CompletableFuture future = new CompletableFuture<>();
- HashMap context = payload.getContext();
+ try {
+ HashMap context = payload.getContext();
- if (context == null || context.isEmpty() || isEmpty(payload.getContext().get(typeConnection.getMarcType().value())) || Objects.isNull(payload.getCurrentNode()) || Objects.isNull(payload.getEventsChain())) {
- LOG.warn(PAYLOAD_HAS_NO_DATA_MSG);
- future.completeExceptionally(new EventProcessingException(PAYLOAD_HAS_NO_DATA_MSG));
- return future;
- }
- payload.getEventsChain().add(payload.getEventType());
- payload.setAdditionalProperty(USER_ID_HEADER, context.get(USER_ID_HEADER));
-
- String record = context.get(typeConnection.getMarcType().value());
- MatchDetail matchDetail = retrieveMatchDetail(payload);
- if (isValidMatchDetail(matchDetail)) {
- MatchField matchField = prepareMatchField(record, matchDetail);
- if (matchField.isDefaultField()) {
- processDefaultMatchField(matchField, payload.getTenant())
- .onSuccess(recordCollection -> processSucceededResult(recordCollection.getRecords(), payload, future))
- .onFailure(throwable -> future.completeExceptionally(new MatchingException(throwable)));
- } else {
- recordDao.getMatchedRecords(matchField, typeConnection, isNonNullExternalIdRequired(), 0, 2, payload.getTenant())
- .onSuccess(recordList -> processSucceededResult(recordList, payload, future))
- .onFailure(throwable -> future.completeExceptionally(new MatchingException(throwable)));
+ if (MapUtils.isEmpty(context) || isEmpty(payload.getContext().get(typeConnection.getMarcType().value())) || Objects.isNull(payload.getCurrentNode()) || Objects.isNull(payload.getEventsChain())) {
+ LOG.warn(PAYLOAD_HAS_NO_DATA_MSG);
+ return CompletableFuture.failedFuture(new EventProcessingException(PAYLOAD_HAS_NO_DATA_MSG));
+ }
+ payload.getEventsChain().add(payload.getEventType());
+ payload.setAdditionalProperty(USER_ID_HEADER, context.get(USER_ID_HEADER));
+
+ String record = context.get(typeConnection.getMarcType().value());
+ MatchDetail matchDetail = retrieveMatchDetail(payload);
+ if (isValidMatchDetail(matchDetail)) {
+ MatchField matchField = prepareMatchField(record, matchDetail);
+ return retrieveMarcRecords(matchField, payload.getTenant())
+ .compose(localMatchedRecords -> {
+ if (isConsortiumAvailable()) {
+ return matchCentralTenantIfNeededAndCombineWithLocalMatchedRecords(payload, matchField, localMatchedRecords);
+ }
+ return Future.succeededFuture(localMatchedRecords);
+ })
+ .compose(recordList -> processSucceededResult(recordList, payload))
+ .recover(throwable -> Future.failedFuture(mapToMatchException(throwable)))
+ .toCompletionStage().toCompletableFuture();
}
- } else {
constructError(payload, format(MATCH_DETAIL_IS_NOT_VALID, matchDetail));
- future.complete(payload);
+ return CompletableFuture.completedFuture(payload);
+ } catch (Exception e) {
+ LOG.warn("handle:: Error while processing event for MARC record matching", e);
+ return CompletableFuture.failedFuture(e);
}
- return future;
}
+ private Future> matchCentralTenantIfNeededAndCombineWithLocalMatchedRecords(DataImportEventPayload payload, MatchField matchField,
+ List recordList) {
+ return consortiumConfigurationCache.get(RestUtil.retrieveOkapiConnectionParams(payload, vertx))
+ .compose(consortiumConfigurationOptional -> {
+ if (consortiumConfigurationOptional.isPresent() && !consortiumConfigurationOptional.get().getCentralTenantId().equals(payload.getTenant())) {
+ LOG.debug("matchCentralTenantIfNeededAndCombineWithLocalMatchedRecords:: Matching on centralTenant with id: {}",
+ consortiumConfigurationOptional.get().getCentralTenantId());
+ return retrieveMarcRecords(matchField, consortiumConfigurationOptional.get().getCentralTenantId())
+ .onSuccess(result -> {
+ if (!result.isEmpty()) {
+ payload.getContext().put(CENTRAL_TENANT_ID, consortiumConfigurationOptional.get().getCentralTenantId());
+ }
+ })
+ .map(centralTenantResult -> Stream.concat(recordList.stream(), centralTenantResult.stream()).toList());
+ }
+ return Future.succeededFuture(recordList);
+ });
+ }
+
+ private static Throwable mapToMatchException(Throwable throwable) {
+ return throwable instanceof MatchingException ? throwable : new MatchingException(throwable);
+ }
+
+ private Future> retrieveMarcRecords(MatchField matchField, String tenant) {
+ if (matchField.isDefaultField()) {
+ LOG.debug("retrieveMarcRecords:: Process default field matching, matchField {}, tenant {}", matchField, tenant);
+ return processDefaultMatchField(matchField, tenant).map(RecordCollection::getRecords);
+ }
+ LOG.debug("retrieveMarcRecords:: Process matched field matching, matchField {}, tenant {}", matchField, tenant);
+ return recordDao.getMatchedRecords(matchField, typeConnection, isNonNullExternalIdRequired(), 0, 2, tenant);
+ }
+
+ abstract boolean isConsortiumAvailable();
+
/* Creates a {@link MatchField} from the given {@link MatchDetail} */
private MatchField prepareMatchField(String record, MatchDetail matchDetail) {
List matchDetailFields = matchDetail.getExistingMatchExpression().getFields();
@@ -178,18 +226,20 @@ private MatchDetail retrieveMatchDetail(DataImportEventPayload dataImportEventPa
/**
* Prepares {@link DataImportEventPayload} for the further processing based on the number of retrieved records in {@link RecordCollection}
*/
- private void processSucceededResult(List records, DataImportEventPayload payload, CompletableFuture future) {
+ private Future processSucceededResult(List records, DataImportEventPayload payload) {
if (records.size() == 1) {
payload.setEventType(matchedEventType.toString());
payload.getContext().put(getMatchedMarcKey(), Json.encode(records.get(0)));
- future.complete(payload);
- } else if (records.size() > 1) {
+ LOG.debug("processSucceededResult:: Matched 1 record for tenant with id {}", payload.getTenant());
+ return Future.succeededFuture(payload);
+ }
+ if (records.size() > 1) {
constructError(payload, FOUND_MULTIPLE_RECORDS_ERROR_MESSAGE);
- future.completeExceptionally(new MatchingException(FOUND_MULTIPLE_RECORDS_ERROR_MESSAGE));
- } else {
- constructError(payload, CANNOT_FIND_RECORDS_ERROR_MESSAGE);
- future.complete(payload);
+ LOG.warn("processSucceededResult:: Matched multiple record for tenant with id {}", payload.getTenant());
+ return Future.failedFuture(new MatchingException(FOUND_MULTIPLE_RECORDS_ERROR_MESSAGE));
}
+ constructError(payload, CANNOT_FIND_RECORDS_ERROR_MESSAGE);
+ return Future.succeededFuture(payload);
}
/* Logic for processing errors */
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/match/MarcAuthorityMatchEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/match/MarcAuthorityMatchEventHandler.java
index c76d4c8cd..fdf593a71 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/match/MarcAuthorityMatchEventHandler.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/match/MarcAuthorityMatchEventHandler.java
@@ -3,6 +3,8 @@
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_MATCHED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_NOT_MATCHED;
+import io.vertx.core.Vertx;
+import org.folio.services.caches.ConsortiumConfigurationCache;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -16,9 +18,9 @@
public class MarcAuthorityMatchEventHandler extends AbstractMarcMatchEventHandler {
@Autowired
- public MarcAuthorityMatchEventHandler(RecordDao recordDao) {
+ public MarcAuthorityMatchEventHandler(RecordDao recordDao, ConsortiumConfigurationCache consortiumConfigurationCache, Vertx vertx) {
super(TypeConnection.MARC_AUTHORITY, recordDao, DI_SRS_MARC_AUTHORITY_RECORD_MATCHED,
- DI_SRS_MARC_AUTHORITY_RECORD_NOT_MATCHED);
+ DI_SRS_MARC_AUTHORITY_RECORD_NOT_MATCHED, consortiumConfigurationCache, vertx);
}
@Override
@@ -26,4 +28,8 @@ public boolean isPostProcessingNeeded() {
return false;
}
+ @Override
+ boolean isConsortiumAvailable() {
+ return false;
+ }
}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/match/MarcBibliographicMatchEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/match/MarcBibliographicMatchEventHandler.java
index 18e1ea78d..a1b10db73 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/match/MarcBibliographicMatchEventHandler.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/match/MarcBibliographicMatchEventHandler.java
@@ -4,6 +4,8 @@
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MATCHED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_NOT_MATCHED;
+import io.vertx.core.Vertx;
+import org.folio.services.caches.ConsortiumConfigurationCache;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -17,8 +19,9 @@
public class MarcBibliographicMatchEventHandler extends AbstractMarcMatchEventHandler {
@Autowired
- public MarcBibliographicMatchEventHandler(RecordDao recordDao) {
- super(TypeConnection.MARC_BIB, recordDao, DI_SRS_MARC_BIB_RECORD_MATCHED, DI_SRS_MARC_BIB_RECORD_NOT_MATCHED);
+ public MarcBibliographicMatchEventHandler(RecordDao recordDao, ConsortiumConfigurationCache consortiumConfigurationCache, Vertx vertx) {
+ super(TypeConnection.MARC_BIB, recordDao, DI_SRS_MARC_BIB_RECORD_MATCHED,
+ DI_SRS_MARC_BIB_RECORD_NOT_MATCHED, consortiumConfigurationCache, vertx);
}
@Override
@@ -31,6 +34,11 @@ public String getPostProcessingInitializationEventType() {
return DI_SRS_MARC_BIB_RECORD_MATCHED_READY_FOR_POST_PROCESSING.value();
}
+ @Override
+ boolean isConsortiumAvailable() {
+ return true;
+ }
+
@Override
protected boolean isNonNullExternalIdRequired() {
return true;
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/match/MarcHoldingsMatchEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/match/MarcHoldingsMatchEventHandler.java
index e941f0c56..0bdd6a98f 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/match/MarcHoldingsMatchEventHandler.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/match/MarcHoldingsMatchEventHandler.java
@@ -3,6 +3,8 @@
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_MATCHED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_NOT_MATCHED;
+import io.vertx.core.Vertx;
+import org.folio.services.caches.ConsortiumConfigurationCache;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -16,9 +18,9 @@
public class MarcHoldingsMatchEventHandler extends AbstractMarcMatchEventHandler {
@Autowired
- public MarcHoldingsMatchEventHandler(RecordDao recordDao) {
+ public MarcHoldingsMatchEventHandler(RecordDao recordDao, ConsortiumConfigurationCache consortiumConfigurationCache, Vertx vertx) {
super(TypeConnection.MARC_HOLDINGS, recordDao, DI_SRS_MARC_HOLDINGS_RECORD_MATCHED,
- DI_SRS_MARC_HOLDINGS_RECORD_NOT_MATCHED);
+ DI_SRS_MARC_HOLDINGS_RECORD_NOT_MATCHED, consortiumConfigurationCache, vertx);
}
@Override
@@ -26,4 +28,8 @@ public boolean isPostProcessingNeeded() {
return false;
}
+ @Override
+ boolean isConsortiumAvailable() {
+ return false;
+ }
}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java b/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java
index 0f91792c7..e2a4bde97 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java
@@ -1,25 +1,25 @@
package org.folio.services.util;
-import java.util.List;
-import java.util.UUID;
-
import io.vertx.core.Future;
import io.vertx.core.Promise;
import io.vertx.core.Vertx;
-import io.vertx.core.json.Json;
import io.vertx.kafka.client.producer.KafkaHeader;
import io.vertx.kafka.client.producer.KafkaProducer;
import io.vertx.kafka.client.producer.KafkaProducerRecord;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
-
import org.folio.kafka.KafkaConfig;
import org.folio.kafka.KafkaTopicNameHelper;
+import org.folio.kafka.SimpleKafkaProducerManager;
+import org.folio.kafka.services.KafkaProducerRecordBuilder;
import org.folio.processing.events.utils.PomReaderUtil;
import org.folio.rest.jaxrs.model.Event;
import org.folio.rest.jaxrs.model.EventMetadata;
import org.folio.rest.tools.utils.ModuleName;
+import java.util.List;
+import java.util.UUID;
+
public final class EventHandlingUtil {
private static final Logger LOGGER = LogManager.getLogger();
@@ -41,9 +41,8 @@ private EventHandlingUtil() { }
*/
public static Future sendEventToKafka(String tenantId, String eventPayload, String eventType,
List kafkaHeaders, KafkaConfig kafkaConfig, String key) {
- KafkaProducerRecord producerRecord;
-
- producerRecord = createProducerRecord(eventPayload, eventType, key, tenantId, kafkaHeaders, kafkaConfig);
+ KafkaProducerRecord producerRecord =
+ createProducerRecord(eventPayload, eventType, key, tenantId, kafkaHeaders, kafkaConfig);
Promise promise = Promise.promise();
@@ -52,17 +51,18 @@ public static Future sendEventToKafka(String tenantId, String eventPayl
String producerName = eventType + "_Producer";
var producer = createProducer(eventType, kafkaConfig);
- producer.write(producerRecord, war -> {
- producer.end(ear -> producer.close());
- if (war.succeeded()) {
+ producer.send(producerRecord)
+ .mapEmpty()
+ .eventually(x -> producer.close())
+ .onSuccess(res -> {
LOGGER.info("sendEventToKafka:: Event with type {} and recordId {} with chunkId: {} was sent to kafka", eventType, recordId, chunkId);
promise.complete(true);
- } else {
- Throwable cause = war.cause();
- LOGGER.warn("sendEventToKafka:: {} write error for event {} with recordId {} with chunkId: {}, cause:", producerName, eventType, recordId, chunkId, cause);
+ })
+ .onFailure(err -> {
+ Throwable cause = err.getCause();
+ LOGGER.warn("sendEventToKafka:: {} write error for event {} with recordId {} with chunkId: {}, cause:", producerName, eventType, recordId, chunkId, cause);
promise.fail(cause);
- }
- });
+ });
return promise.future();
}
@@ -80,8 +80,12 @@ public static KafkaProducerRecord createProducerRecord(String ev
.withPublishedBy(constructModuleName()));
String topicName = createTopicName(eventType, tenantId, kafkaConfig);
+ var producerRecord = new KafkaProducerRecordBuilder(tenantId)
+ .key(key)
+ .value(event)
+ .topic(topicName)
+ .build();
- KafkaProducerRecord producerRecord = KafkaProducerRecord.create(topicName, key, Json.encode(event));
producerRecord.addHeaders(kafkaHeaders);
return producerRecord;
}
@@ -90,6 +94,7 @@ public static String constructModuleName() {
return PomReaderUtil.INSTANCE.constructModuleVersionAndVersion(ModuleName.getModuleName(),
ModuleName.getModuleVersion());
}
+
public static String createTopicName(String eventType, String tenantId, KafkaConfig kafkaConfig) {
return KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), KafkaTopicNameHelper.getDefaultNameSpace(),
tenantId, eventType);
@@ -100,8 +105,7 @@ public static String createSubscriptionPattern(String env, String eventType) {
}
public static KafkaProducer createProducer(String eventType, KafkaConfig kafkaConfig) {
- String producerName = eventType + "_Producer";
- return KafkaProducer.createShared(Vertx.currentContext().owner(), producerName, kafkaConfig.getProducerProps());
+ return new SimpleKafkaProducerManager(Vertx.currentContext().owner(), kafkaConfig).createShared(eventType);
}
private static String extractRecordId(List kafkaHeaders) {
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/util/KafkaUtil.java b/mod-source-record-storage-server/src/main/java/org/folio/services/util/KafkaUtil.java
index 48fe3b7fb..69ca83a72 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/services/util/KafkaUtil.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/services/util/KafkaUtil.java
@@ -21,4 +21,17 @@ public static String extractHeaderValue(String key, List headers) {
.map(header -> header.value().toString())
.orElse(null);
}
+
+ /**
+ * Checks whether a specified key-value pair exists within a list of Kafka headers.
+ *
+ * @param key The key to search for within the Kafka headers.
+ * @param value The value associated with the key to search for within the Kafka headers.
+ * @param headers The list of KafkaHeader objects to search through.
+ * @return {@code true} if the key-value pair is found in the headers, {@code false} otherwise.
+ */
+ public static boolean headerExists(String key, String value, List headers) {
+ return headers.stream()
+ .anyMatch(kafkaHeader -> key.equals(kafkaHeader.key()) && value.equals(kafkaHeader.value().toString()));
+ }
}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/verticle/MarcIndexersVersionDeletionVerticle.java b/mod-source-record-storage-server/src/main/java/org/folio/verticle/MarcIndexersVersionDeletionVerticle.java
index a649a70bf..9cf82724e 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/verticle/MarcIndexersVersionDeletionVerticle.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/verticle/MarcIndexersVersionDeletionVerticle.java
@@ -57,7 +57,7 @@ public void start(Promise startFuture) {
Future deleteOldMarcIndexerVersions() {
LOGGER.info("Performing marc_indexers old versions deletion...");
long startTime = System.nanoTime();
- return tenantDataProvider.getModuleTenants()
+ return tenantDataProvider.getModuleTenants("marc_records_tracking")
.onFailure(ar ->
LOGGER.error("could not get the list of tenants to delete marc indexer versions", ar.getCause()))
.compose(ar -> {
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/AbstractConsumerVerticle.java b/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/AbstractConsumerVerticle.java
new file mode 100644
index 000000000..eb2d8658e
--- /dev/null
+++ b/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/AbstractConsumerVerticle.java
@@ -0,0 +1,107 @@
+package org.folio.verticle.consumers;
+
+import static org.folio.kafka.KafkaTopicNameHelper.createSubscriptionDefinition;
+import static org.folio.kafka.KafkaTopicNameHelper.getDefaultNameSpace;
+import static org.folio.services.util.EventHandlingUtil.constructModuleName;
+import static org.folio.services.util.EventHandlingUtil.createSubscriptionPattern;
+
+import io.vertx.core.AbstractVerticle;
+import io.vertx.core.Future;
+import io.vertx.core.Promise;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import org.folio.kafka.AsyncRecordHandler;
+import org.folio.kafka.GlobalLoadSensor;
+import org.folio.kafka.KafkaConfig;
+import org.folio.kafka.KafkaConsumerWrapper;
+import org.folio.kafka.ProcessRecordErrorHandler;
+import org.folio.kafka.SubscriptionDefinition;
+import org.folio.okapi.common.GenericCompositeFuture;
+
+public abstract class AbstractConsumerVerticle extends AbstractVerticle {
+
+ private final List> consumers = new ArrayList<>();
+
+ private final KafkaConfig kafkaConfig;
+
+ protected AbstractConsumerVerticle(KafkaConfig kafkaConfig) {
+ this.kafkaConfig = kafkaConfig;
+ }
+
+ @Override
+ public void start(Promise startPromise) {
+ KafkaConfig config;
+ if (getDeserializerClass() != null) {
+ config = kafkaConfig.toBuilder()
+ .consumerValueDeserializerClass(getDeserializerClass())
+ .build();
+ } else {
+ config = kafkaConfig;
+ }
+ eventTypes().forEach(eventType -> {
+ SubscriptionDefinition subscriptionDefinition = getSubscriptionDefinition(eventType);
+ consumers.add(KafkaConsumerWrapper.builder()
+ .context(context)
+ .vertx(vertx)
+ .kafkaConfig(config)
+ .loadLimit(loadLimit())
+ .globalLoadSensor(new GlobalLoadSensor())
+ .subscriptionDefinition(subscriptionDefinition)
+ .processRecordErrorHandler(processRecordErrorHandler())
+ .build());
+ });
+
+ List> futures = new ArrayList<>();
+ consumers.forEach(consumer -> futures.add(consumer.start(recordHandler(), getConsumerName())));
+
+ GenericCompositeFuture.all(futures).onComplete(ar -> startPromise.complete());
+ }
+
+ @Override
+ public void stop(Promise stopPromise) {
+ List> futures = new ArrayList<>();
+ consumers.forEach(consumerWrapper -> futures.add(consumerWrapper.stop()));
+
+ GenericCompositeFuture.join(futures).onComplete(ar -> stopPromise.complete());
+ }
+
+ protected abstract int loadLimit();
+
+ protected Optional namespace() {
+ return Optional.of(getDefaultNameSpace());
+ }
+
+ protected abstract AsyncRecordHandler recordHandler();
+
+ protected ProcessRecordErrorHandler processRecordErrorHandler() {
+ return null;
+ }
+
+ protected abstract List eventTypes();
+
+ private SubscriptionDefinition getSubscriptionDefinition(String eventType) {
+ SubscriptionDefinition subscriptionDefinition;
+ var namespace = namespace();
+ if (namespace.isPresent()) {
+ subscriptionDefinition = createSubscriptionDefinition(kafkaConfig.getEnvId(), namespace.get(), eventType);
+ } else {
+ subscriptionDefinition = SubscriptionDefinition.builder()
+ .eventType(eventType)
+ .subscriptionPattern(createSubscriptionPattern(kafkaConfig.getEnvId(), eventType))
+ .build();
+ }
+ return subscriptionDefinition;
+ }
+
+ private String getConsumerName() {
+ return constructModuleName() + "_" + getClass().getSimpleName();
+ }
+
+ /**
+ * Set a custom deserializer class for this kafka consumer
+ */
+ public String getDeserializerClass() {
+ return null;
+ }
+}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/AuthorityDomainConsumersVerticle.java b/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/AuthorityDomainConsumersVerticle.java
new file mode 100644
index 000000000..d45f3f708
--- /dev/null
+++ b/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/AuthorityDomainConsumersVerticle.java
@@ -0,0 +1,51 @@
+package org.folio.verticle.consumers;
+
+import static org.springframework.beans.factory.config.BeanDefinition.SCOPE_PROTOTYPE;
+
+import java.util.List;
+import java.util.Optional;
+import org.folio.AuthorityDomainKafkaTopic;
+import org.folio.consumers.AuthorityDomainKafkaHandler;
+import org.folio.kafka.AsyncRecordHandler;
+import org.folio.kafka.KafkaConfig;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Scope;
+import org.springframework.stereotype.Component;
+
+@Component
+@Scope(SCOPE_PROTOTYPE)
+public class AuthorityDomainConsumersVerticle extends AbstractConsumerVerticle {
+
+ private final AuthorityDomainKafkaHandler authorityDomainKafkaHandler;
+
+ @Value("${srs.kafka.AuthorityDomainConsumer.loadLimit:10}")
+ private int authorityDomainLoadLimit;
+
+ @Autowired
+ protected AuthorityDomainConsumersVerticle(KafkaConfig kafkaConfig,
+ AuthorityDomainKafkaHandler authorityDomainKafkaHandler) {
+ super(kafkaConfig);
+ this.authorityDomainKafkaHandler = authorityDomainKafkaHandler;
+ }
+
+ @Override
+ protected int loadLimit() {
+ return authorityDomainLoadLimit;
+ }
+
+ @Override
+ protected Optional namespace() {
+ return Optional.empty();
+ }
+
+ @Override
+ protected AsyncRecordHandler recordHandler() {
+ return authorityDomainKafkaHandler;
+ }
+
+ @Override
+ protected List eventTypes() {
+ return List.of(AuthorityDomainKafkaTopic.AUTHORITY.moduleTopicName());
+ }
+}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/AuthorityLinkChunkConsumersVerticle.java b/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/AuthorityLinkChunkConsumersVerticle.java
index 8f5cc4ac9..8094b3841 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/AuthorityLinkChunkConsumersVerticle.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/AuthorityLinkChunkConsumersVerticle.java
@@ -1,73 +1,50 @@
package org.folio.verticle.consumers;
import static org.folio.EntityLinksKafkaTopic.INSTANCE_AUTHORITY;
-import static org.folio.services.util.EventHandlingUtil.constructModuleName;
-import static org.folio.services.util.EventHandlingUtil.createSubscriptionPattern;
+import static org.springframework.beans.factory.config.BeanDefinition.SCOPE_PROTOTYPE;
-import io.vertx.core.AbstractVerticle;
-import io.vertx.core.Promise;
+import java.util.List;
+import java.util.Optional;
import org.folio.consumers.AuthorityLinkChunkKafkaHandler;
-import org.folio.kafka.GlobalLoadSensor;
+import org.folio.kafka.AsyncRecordHandler;
import org.folio.kafka.KafkaConfig;
-import org.folio.kafka.KafkaConsumerWrapper;
-import org.folio.kafka.SubscriptionDefinition;
-import org.folio.spring.SpringContextUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.support.AbstractApplicationContext;
+import org.springframework.context.annotation.Scope;
+import org.springframework.stereotype.Component;
-public class AuthorityLinkChunkConsumersVerticle extends AbstractVerticle {
- private static AbstractApplicationContext springGlobalContext;
+@Component
+@Scope(SCOPE_PROTOTYPE)
+public class AuthorityLinkChunkConsumersVerticle extends AbstractConsumerVerticle {
- private static final GlobalLoadSensor globalLoadSensor = new GlobalLoadSensor();
- public static final String AUTHORITY_INSTANCE_LINKS_TOPIC = INSTANCE_AUTHORITY.moduleTopicName();
-
- @Autowired
- private AuthorityLinkChunkKafkaHandler kafkaHandler;
-
- @Autowired
- private KafkaConfig kafkaConfig;
+ private final AuthorityLinkChunkKafkaHandler kafkaHandler;
@Value("${srs.kafka.AuthorityLinkChunkConsumer.loadLimit:2}")
private int loadLimit;
- private KafkaConsumerWrapper consumer;
-
- /**
- * @deprecated need to be replaced with spring global context
- * */
- @Deprecated(forRemoval = false)
- public static void setSpringGlobalContext(AbstractApplicationContext springGlobalContext) {
- AuthorityLinkChunkConsumersVerticle.springGlobalContext = springGlobalContext;
+ @Autowired
+ public AuthorityLinkChunkConsumersVerticle(KafkaConfig kafkaConfig, AuthorityLinkChunkKafkaHandler kafkaHandler) {
+ super(kafkaConfig);
+ this.kafkaHandler = kafkaHandler;
}
@Override
- public void start(Promise startPromise) {
- context.put("springContext", springGlobalContext);
-
- SpringContextUtil.autowireDependencies(this, context);
-
- SubscriptionDefinition subscriptionDefinition = SubscriptionDefinition.builder()
- .eventType(AUTHORITY_INSTANCE_LINKS_TOPIC)
- .subscriptionPattern(createSubscriptionPattern(kafkaConfig.getEnvId(), AUTHORITY_INSTANCE_LINKS_TOPIC))
- .build();
-
- consumer = KafkaConsumerWrapper.builder()
- .context(context)
- .vertx(vertx)
- .kafkaConfig(kafkaConfig)
- .loadLimit(loadLimit)
- .globalLoadSensor(globalLoadSensor)
- .subscriptionDefinition(subscriptionDefinition)
- .build();
+ protected int loadLimit() {
+ return loadLimit;
+ }
- consumer.start(kafkaHandler, constructModuleName() + "_" + getClass().getSimpleName())
- .onComplete(ar -> startPromise.complete());
+ @Override
+ protected Optional namespace() {
+ return Optional.empty();
}
@Override
- public void stop(Promise stopPromise) {
- consumer.stop().onComplete(ar -> stopPromise.complete());
+ protected AsyncRecordHandler recordHandler() {
+ return kafkaHandler;
}
+ @Override
+ protected List eventTypes() {
+ return List.of(INSTANCE_AUTHORITY.moduleTopicName());
+ }
}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/DataImportConsumersVerticle.java b/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/DataImportConsumersVerticle.java
index 64491dc2b..cf09ac98e 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/DataImportConsumersVerticle.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/DataImportConsumersVerticle.java
@@ -1,35 +1,19 @@
package org.folio.verticle.consumers;
-import io.vertx.core.AbstractVerticle;
-import io.vertx.core.Future;
-import io.vertx.core.Promise;
-import org.folio.kafka.AsyncRecordHandler;
-import org.folio.kafka.GlobalLoadSensor;
-import org.folio.kafka.KafkaConfig;
-import org.folio.kafka.KafkaConsumerWrapper;
-import org.folio.kafka.KafkaTopicNameHelper;
-import org.folio.kafka.SubscriptionDefinition;
-import org.folio.okapi.common.GenericCompositeFuture;
-import org.folio.processing.events.EventManager;
-import org.folio.spring.SpringContextUtil;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.support.AbstractApplicationContext;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
+import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INCOMING_MARC_BIB_RECORD_PARSED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_AUTHORITY_CREATED_READY_FOR_POST_PROCESSING;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_AUTHORITY_UPDATED_READY_FOR_POST_PROCESSING;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_HOLDING_CREATED;
+import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_HOLDING_MATCHED;
+import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_HOLDING_UPDATED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_CREATED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING;
+import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_MATCHED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_UPDATED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_ITEM_CREATED;
+import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_ITEM_MATCHED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_ITEM_UPDATED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_MARC_FOR_DELETE_RECEIVED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_MARC_FOR_UPDATE_RECEIVED;
@@ -40,28 +24,38 @@
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MODIFIED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_NOT_MATCHED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_MATCHED;
-import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_MATCHED;
-import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_HOLDING_MATCHED;
-import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_ITEM_MATCHED;
-
-import static org.folio.services.util.EventHandlingUtil.constructModuleName;
+import static org.springframework.beans.factory.config.BeanDefinition.SCOPE_PROTOTYPE;
-public class DataImportConsumersVerticle extends AbstractVerticle {
- private static AbstractApplicationContext springGlobalContext;
+import java.util.Arrays;
+import java.util.List;
+import org.folio.kafka.AsyncRecordHandler;
+import org.folio.kafka.KafkaConfig;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Scope;
+import org.springframework.stereotype.Component;
- private static final GlobalLoadSensor globalLoadSensor = new GlobalLoadSensor();
+@Component
+@Scope(SCOPE_PROTOTYPE)
+public class DataImportConsumersVerticle extends AbstractConsumerVerticle {
- private final List events = Arrays.asList(
+ private static final List EVENTS = Arrays.asList(
DI_INVENTORY_AUTHORITY_CREATED_READY_FOR_POST_PROCESSING.value(),
DI_INVENTORY_AUTHORITY_UPDATED_READY_FOR_POST_PROCESSING.value(),
DI_INVENTORY_HOLDING_CREATED.value(),
+ DI_INVENTORY_HOLDING_UPDATED.value(),
+ DI_INVENTORY_HOLDING_MATCHED.value(),
DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING.value(),
DI_INVENTORY_INSTANCE_CREATED.value(),
DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING.value(),
+ DI_INVENTORY_INSTANCE_MATCHED.value(),
DI_INVENTORY_INSTANCE_UPDATED.value(),
DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING.value(),
DI_INVENTORY_ITEM_CREATED.value(),
+ DI_INVENTORY_ITEM_MATCHED.value(),
DI_INVENTORY_ITEM_UPDATED.value(),
+ DI_MARC_FOR_DELETE_RECEIVED.value(),
DI_MARC_FOR_UPDATE_RECEIVED.value(),
DI_SRS_MARC_AUTHORITY_RECORD_CREATED.value(),
DI_SRS_MARC_AUTHORITY_RECORD_MATCHED.value(),
@@ -70,70 +64,40 @@ public class DataImportConsumersVerticle extends AbstractVerticle {
DI_SRS_MARC_BIB_RECORD_MODIFIED.value(),
DI_SRS_MARC_BIB_RECORD_NOT_MATCHED.value(),
DI_SRS_MARC_HOLDINGS_RECORD_MATCHED.value(),
- DI_MARC_FOR_DELETE_RECEIVED.value(),
- DI_INVENTORY_INSTANCE_MATCHED.value(),
- DI_INVENTORY_HOLDING_MATCHED.value(),
- DI_INVENTORY_ITEM_MATCHED.value()
+ DI_INCOMING_MARC_BIB_RECORD_PARSED.value()
);
- @Autowired
- @Qualifier("DataImportKafkaHandler")
- private AsyncRecordHandler dataImportKafkaHandler;
-
- @Autowired
- private KafkaConfig kafkaConfig;
+ private final AsyncRecordHandler dataImportKafkaHandler;
@Value("${srs.kafka.DataImportConsumer.loadLimit:5}")
private int loadLimit;
- @Value("${srs.kafka.DataImportConsumerVerticle.maxDistributionNum:100}")
- private int maxDistributionNumber;
-
- private List> consumerWrappersList = new ArrayList<>(events.size());
+ @Autowired
+ public DataImportConsumersVerticle(KafkaConfig kafkaConfig,
+ @Qualifier("DataImportKafkaHandler")
+ AsyncRecordHandler dataImportKafkaHandler) {
+ super(kafkaConfig);
+ this.dataImportKafkaHandler = dataImportKafkaHandler;
+ }
@Override
- public void start(Promise startPromise) {
- context.put("springContext", springGlobalContext);
-
- SpringContextUtil.autowireDependencies(this, context);
-
- EventManager.registerKafkaEventPublisher(kafkaConfig, vertx, maxDistributionNumber);
-
- events.forEach(event -> {
- SubscriptionDefinition subscriptionDefinition = KafkaTopicNameHelper
- .createSubscriptionDefinition(kafkaConfig.getEnvId(),
- KafkaTopicNameHelper.getDefaultNameSpace(),
- event);
- consumerWrappersList.add(KafkaConsumerWrapper.builder()
- .context(context)
- .vertx(vertx)
- .kafkaConfig(kafkaConfig)
- .loadLimit(loadLimit)
- .globalLoadSensor(globalLoadSensor)
- .subscriptionDefinition(subscriptionDefinition)
- .build());
- });
-
- List> futures = new ArrayList<>();
- consumerWrappersList.forEach(consumerWrapper ->
- futures.add(consumerWrapper.start(dataImportKafkaHandler, constructModuleName() + "_" + getClass().getSimpleName())));
-
- GenericCompositeFuture.all(futures).onComplete(ar -> startPromise.complete());
+ protected int loadLimit() {
+ return loadLimit;
}
@Override
- public void stop(Promise stopPromise) {
- List> futures = new ArrayList<>();
- consumerWrappersList.forEach(consumerWrapper ->
- futures.add(consumerWrapper.stop()));
+ protected AsyncRecordHandler recordHandler() {
+ return dataImportKafkaHandler;
+ }
- GenericCompositeFuture.join(futures).onComplete(ar -> stopPromise.complete());
+ @Override
+ protected List eventTypes() {
+ return EVENTS;
}
- //TODO: get rid of this workaround with global spring context
- @Deprecated
- public static void setSpringGlobalContext(AbstractApplicationContext springGlobalContext) {
- DataImportConsumersVerticle.springGlobalContext = springGlobalContext;
+ @Override
+ public String getDeserializerClass() {
+ return "org.apache.kafka.common.serialization.ByteArrayDeserializer";
}
}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/ParsedRecordChunkConsumersVerticle.java b/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/ParsedRecordChunkConsumersVerticle.java
index 366feb25e..05c1dccaa 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/ParsedRecordChunkConsumersVerticle.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/ParsedRecordChunkConsumersVerticle.java
@@ -1,81 +1,63 @@
package org.folio.verticle.consumers;
-import io.vertx.core.AbstractVerticle;
-import io.vertx.core.Promise;
+import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_RAW_RECORDS_CHUNK_PARSED;
+import static org.springframework.beans.factory.config.BeanDefinition.SCOPE_PROTOTYPE;
+
+import java.util.List;
import org.folio.kafka.AsyncRecordHandler;
-import org.folio.kafka.GlobalLoadSensor;
import org.folio.kafka.KafkaConfig;
-import org.folio.kafka.KafkaConsumerWrapper;
-import org.folio.kafka.KafkaTopicNameHelper;
-import org.folio.kafka.SubscriptionDefinition;
import org.folio.kafka.ProcessRecordErrorHandler;
-import org.folio.spring.SpringContextUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.support.AbstractApplicationContext;
+import org.springframework.context.annotation.Scope;
+import org.springframework.stereotype.Component;
-import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_RAW_RECORDS_CHUNK_PARSED;
-import static org.folio.services.util.EventHandlingUtil.constructModuleName;
+@Component
+@Scope(SCOPE_PROTOTYPE)
+public class ParsedRecordChunkConsumersVerticle extends AbstractConsumerVerticle {
-public class ParsedRecordChunkConsumersVerticle extends AbstractVerticle {
- //TODO: get rid of this workaround with global spring context
- private static AbstractApplicationContext springGlobalContext;
+ private final AsyncRecordHandler parsedRecordChunksKafkaHandler;
- private static final GlobalLoadSensor globalLoadSensor = new GlobalLoadSensor();
-
- @Autowired
- @Qualifier("parsedRecordChunksKafkaHandler")
- private AsyncRecordHandler parsedRecordChunksKafkaHandler;
-
- @Autowired
- @Qualifier("parsedRecordChunksErrorHandler")
- private ProcessRecordErrorHandler parsedRecordChunksErrorHandler;
-
- @Autowired
- private KafkaConfig kafkaConfig;
+ private final ProcessRecordErrorHandler parsedRecordChunksErrorHandler;
@Value("${srs.kafka.ParsedMarcChunkConsumer.loadLimit:5}")
private int loadLimit;
- private KafkaConsumerWrapper consumerWrapper;
+ @Autowired
+ protected ParsedRecordChunkConsumersVerticle(KafkaConfig kafkaConfig,
+ @Qualifier("parsedRecordChunksKafkaHandler")
+ AsyncRecordHandler parsedRecordChunksKafkaHandler,
+ @Qualifier("parsedRecordChunksErrorHandler")
+ ProcessRecordErrorHandler parsedRecordChunksErrorHandler) {
+ super(kafkaConfig);
+ this.parsedRecordChunksKafkaHandler = parsedRecordChunksKafkaHandler;
+ this.parsedRecordChunksErrorHandler = parsedRecordChunksErrorHandler;
+ }
@Override
- public void start(Promise startPromise) {
- context.put("springContext", springGlobalContext);
-
- SpringContextUtil.autowireDependencies(this, context);
-
- SubscriptionDefinition subscriptionDefinition = KafkaTopicNameHelper.createSubscriptionDefinition(kafkaConfig.getEnvId(),
- KafkaTopicNameHelper.getDefaultNameSpace(), DI_RAW_RECORDS_CHUNK_PARSED.value());
+ protected ProcessRecordErrorHandler processRecordErrorHandler() {
+ return parsedRecordChunksErrorHandler;
+ }
- consumerWrapper = KafkaConsumerWrapper.builder()
- .context(context)
- .vertx(vertx)
- .kafkaConfig(kafkaConfig)
- .loadLimit(loadLimit)
- .globalLoadSensor(globalLoadSensor)
- .subscriptionDefinition(subscriptionDefinition)
- .processRecordErrorHandler(parsedRecordChunksErrorHandler)
- .build();
+ @Override
+ protected int loadLimit() {
+ return loadLimit;
+ }
- consumerWrapper.start(parsedRecordChunksKafkaHandler, constructModuleName() + "_" + getClass().getSimpleName()).onComplete(sar -> {
- if (sar.succeeded()) {
- startPromise.complete();
- } else {
- startPromise.fail(sar.cause());
- }
- });
+ @Override
+ protected AsyncRecordHandler recordHandler() {
+ return parsedRecordChunksKafkaHandler;
}
@Override
- public void stop(Promise stopPromise) {
- consumerWrapper.stop().onComplete(ar -> stopPromise.complete());
+ protected List eventTypes() {
+ return List.of(DI_RAW_RECORDS_CHUNK_PARSED.value());
}
- @Deprecated
- public static void setSpringGlobalContext(AbstractApplicationContext springGlobalContext) {
- ParsedRecordChunkConsumersVerticle.springGlobalContext = springGlobalContext;
+ @Override
+ public String getDeserializerClass() {
+ return "org.apache.kafka.common.serialization.ByteArrayDeserializer";
}
}
diff --git a/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/QuickMarcConsumersVerticle.java b/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/QuickMarcConsumersVerticle.java
index 5aeaf9ed2..7e3e61764 100644
--- a/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/QuickMarcConsumersVerticle.java
+++ b/mod-source-record-storage-server/src/main/java/org/folio/verticle/consumers/QuickMarcConsumersVerticle.java
@@ -1,74 +1,44 @@
package org.folio.verticle.consumers;
-import io.vertx.core.AbstractVerticle;
-import io.vertx.core.Promise;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.support.AbstractApplicationContext;
+import static org.springframework.beans.factory.config.BeanDefinition.SCOPE_PROTOTYPE;
+import java.util.List;
+import org.folio.consumers.QuickMarcKafkaHandler;
import org.folio.dao.util.QMEventTypes;
-import org.folio.kafka.GlobalLoadSensor;
+import org.folio.kafka.AsyncRecordHandler;
import org.folio.kafka.KafkaConfig;
-import org.folio.kafka.KafkaConsumerWrapper;
-import org.folio.kafka.KafkaTopicNameHelper;
-import org.folio.kafka.SubscriptionDefinition;
-import org.folio.consumers.QuickMarcKafkaHandler;
-import org.folio.spring.SpringContextUtil;
-
-import static org.folio.services.util.EventHandlingUtil.constructModuleName;
-
-public class QuickMarcConsumersVerticle extends AbstractVerticle {
-
- private static final GlobalLoadSensor globalLoadSensor = new GlobalLoadSensor();
-
- private static AbstractApplicationContext springGlobalContext;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Scope;
+import org.springframework.stereotype.Component;
- @Autowired
- private QuickMarcKafkaHandler kafkaHandler;
+@Component
+@Scope(SCOPE_PROTOTYPE)
+public class QuickMarcConsumersVerticle extends AbstractConsumerVerticle {
- @Autowired
- private KafkaConfig kafkaConfig;
+ private final QuickMarcKafkaHandler kafkaHandler;
@Value("${srs.kafka.QuickMarcConsumer.loadLimit:5}")
private int loadLimit;
- @Value("${srs.kafka.QuickMarcConsumerVerticle.maxDistributionNum:100}")
- private int maxDistributionNumber;
-
- private KafkaConsumerWrapper consumer;
-
- //TODO: get rid of this workaround with global spring context
- @Deprecated
- public static void setSpringGlobalContext(AbstractApplicationContext springGlobalContext) {
- QuickMarcConsumersVerticle.springGlobalContext = springGlobalContext;
+ @Autowired
+ protected QuickMarcConsumersVerticle(KafkaConfig kafkaConfig, QuickMarcKafkaHandler kafkaHandler) {
+ super(kafkaConfig);
+ this.kafkaHandler = kafkaHandler;
}
@Override
- public void start(Promise startPromise) {
- context.put("springContext", springGlobalContext);
-
- SpringContextUtil.autowireDependencies(this, context);
-
- SubscriptionDefinition subscriptionDefinition = KafkaTopicNameHelper
- .createSubscriptionDefinition(kafkaConfig.getEnvId(),
- KafkaTopicNameHelper.getDefaultNameSpace(),
- QMEventTypes.QM_RECORD_UPDATED.name());
-
- consumer = KafkaConsumerWrapper.builder()
- .context(context)
- .vertx(vertx)
- .kafkaConfig(kafkaConfig)
- .loadLimit(loadLimit)
- .globalLoadSensor(globalLoadSensor)
- .subscriptionDefinition(subscriptionDefinition)
- .build();
+ protected int loadLimit() {
+ return loadLimit;
+ }
- consumer.start(kafkaHandler, constructModuleName() + "_" + getClass().getSimpleName())
- .onComplete(ar -> startPromise.complete());
+ @Override
+ protected AsyncRecordHandler recordHandler() {
+ return kafkaHandler;
}
@Override
- public void stop(Promise stopPromise) {
- consumer.stop().onComplete(ar -> stopPromise.complete());
+ protected List eventTypes() {
+ return List.of(QMEventTypes.QM_RECORD_UPDATED.name());
}
}
diff --git a/mod-source-record-storage-server/src/main/resources/liquibase/tenant/changelog.xml b/mod-source-record-storage-server/src/main/resources/liquibase/tenant/changelog.xml
index fe598d861..b498662ef 100644
--- a/mod-source-record-storage-server/src/main/resources/liquibase/tenant/changelog.xml
+++ b/mod-source-record-storage-server/src/main/resources/liquibase/tenant/changelog.xml
@@ -68,5 +68,7 @@
+
+
diff --git a/mod-source-record-storage-server/src/main/resources/liquibase/tenant/scripts/v-5.7.0/2023-08-29--16-00-define-version-column-default-value.xml b/mod-source-record-storage-server/src/main/resources/liquibase/tenant/scripts/v-5.7.0/2023-08-29--16-00-define-version-column-default-value.xml
new file mode 100644
index 000000000..1c23578c8
--- /dev/null
+++ b/mod-source-record-storage-server/src/main/resources/liquibase/tenant/scripts/v-5.7.0/2023-08-29--16-00-define-version-column-default-value.xml
@@ -0,0 +1,16 @@
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/mod-source-record-storage-server/src/main/resources/liquibase/tenant/scripts/v-5.7.0/2023-10-11--16-00-recreate-indexes-on-marc-indexers.xml b/mod-source-record-storage-server/src/main/resources/liquibase/tenant/scripts/v-5.7.0/2023-10-11--16-00-recreate-indexes-on-marc-indexers.xml
new file mode 100644
index 000000000..13d4c03b2
--- /dev/null
+++ b/mod-source-record-storage-server/src/main/resources/liquibase/tenant/scripts/v-5.7.0/2023-10-11--16-00-recreate-indexes-on-marc-indexers.xml
@@ -0,0 +1,49 @@
+
+
+
+
+
+ SELECT COUNT(id) FROM ${database.defaultSchemaName}.raw_records_lb;
+
+
+
+
+ do $$ declare
+ index integer;
+ suffix text;
+ begin
+ for index in 0 .. 999 loop
+ suffix = lpad(index::text, 3, '0');
+ execute 'drop index if exists ${database.defaultSchemaName}.idx_marc_indexers_marc_id_' || suffix || ';';
+ raise info 'Starting to create index "${database.defaultSchemaName}.idx_marc_indexers_marc_id_version_%"', suffix;
+ execute 'create index if not exists idx_marc_indexers_marc_id_version_' || suffix || ' on ${database.defaultSchemaName}.marc_indexers_' || suffix || '(marc_id, version);';
+ raise info 'Index "${database.defaultSchemaName}.idx_marc_indexers_marc_id_version_%" has been created', suffix;
+ end loop;
+ end; $$;
+
+
+
+ do $$ declare
+ index integer;
+ suffix text;
+ begin
+ for index in 0 .. 999 loop
+ suffix = lpad(index::text, 3, '0');
+ raise info 'Starting to create not-null constraint on "${database.defaultSchemaName}.idx_marc_indexers_%.version" column', suffix;
+ execute 'ALTER TABLE ${database.defaultSchemaName}.marc_indexers_' || suffix || ' ALTER COLUMN version SET NOT NULL;';
+ raise info 'Constraint has been created on "${database.defaultSchemaName}.idx_marc_indexers_%.version" column', suffix;
+ end loop;
+ end;
+ $$;
+
+
+
+ CREATE INDEX IF NOT EXISTS idx_marc_records_tracking_dirty ON ${database.defaultSchemaName}.marc_records_tracking USING btree (is_dirty);
+
+
+
+
diff --git a/mod-source-record-storage-server/src/main/resources/vertx-default-jul-logging.properties b/mod-source-record-storage-server/src/main/resources/vertx-default-jul-logging.properties
deleted file mode 100644
index b2a56574d..000000000
--- a/mod-source-record-storage-server/src/main/resources/vertx-default-jul-logging.properties
+++ /dev/null
@@ -1,5 +0,0 @@
-handlers = java.util.logging.ConsoleHandler
-.level = ALL
-
-logger.cql2pgjson.level = ERROR
-logger.cql2pgjson.name = org.folio.cql2pgjson.CQL2PgJSON
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/TestMocks.java b/mod-source-record-storage-server/src/test/java/org/folio/TestMocks.java
index b4d2efa96..3f2542af9 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/TestMocks.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/TestMocks.java
@@ -30,24 +30,21 @@
public class TestMocks {
private static final String SOURCE_RECORDS_FOLDER_PATH = "src/test/resources/mock/sourceRecords";
-
private static final String SNAPSHOT_PATH_TEMPLATE = "src/test/resources/mock/snapshots/%s.json";
private static final String RECORD_PATH_TEMPLATE = "src/test/resources/mock/records/%s.json";
+ private static final String RAW_RECORD_PATH_TEMPLATE = "src/test/resources/mock/rawRecords/%s.json";
private static final String ERROR_RECORD_PATH_TEMPLATE = "src/test/resources/mock/errorRecords/%s.json";
private static List snapshots;
private static List records;
- private static List rawRecords;
-
private static List parsedRecords;
private static List errorRecords;
static {
List sourceRecords = readSourceRecords();
- rawRecords = sourceRecords.stream().map(TestMocks::toRawRecord).collect(Collectors.toList());
parsedRecords = sourceRecords.stream().map(TestMocks::toParsedRecord).collect(Collectors.toList());
errorRecords = readErrorRecords(sourceRecords);
records = readRecords(sourceRecords);
@@ -102,14 +99,6 @@ public static ErrorRecord getErrorRecord(int index) {
return clone(errorRecords.get(index));
}
- public static List getRawRecords() {
- return new ArrayList<>(rawRecords.stream().map(TestMocks::clone).collect(Collectors.toList()));
- }
-
- public static RawRecord getRawRecord(int index) {
- return clone(rawRecords.get(index));
- }
-
public static List getParsedRecords() {
return new ArrayList<>(parsedRecords.stream().map(TestMocks::clone).collect(Collectors.toList()));
}
@@ -130,10 +119,6 @@ public static Optional getErrorRecord(String id) {
return errorRecords.stream().map(TestMocks::clone).filter(er -> er.getId().equals(id)).findAny();
}
- public static Optional getRawRecord(String id) {
- return rawRecords.stream().map(TestMocks::clone).filter(rr -> rr.getId().equals(id)).findAny();
- }
-
public static Optional getParsedRecord(String id) {
return parsedRecords.stream().map(TestMocks::clone).filter(pr -> pr.getId().equals(id)).findAny();
}
@@ -145,18 +130,13 @@ public static ParsedRecord normalizeContent(ParsedRecord parsedRecord) {
return parsedRecord;
}
- private static RawRecord toRawRecord(SourceRecord sourceRecord) {
- return sourceRecord.getRawRecord();
- }
-
private static ParsedRecord toParsedRecord(SourceRecord sourceRecord) {
return sourceRecord.getParsedRecord();
}
private static List readSourceRecords() {
File sourceRecordsDirectory = new File(SOURCE_RECORDS_FOLDER_PATH);
- String[] extensions = new String[]{ "json" };
- return FileUtils.listFiles(sourceRecordsDirectory, extensions, false).stream()
+ return FileUtils.listFiles(sourceRecordsDirectory, new String[]{"json"}, false).stream()
.map(TestMocks::readSourceRecord)
.filter(sr -> sr.isPresent())
.map(sr -> sr.get())
@@ -209,18 +189,15 @@ private static Optional readRecord(SourceRecord sourceRecord) {
if (file.exists()) {
try {
Record record = new ObjectMapper().readValue(file, Record.class)
- .withRawRecord(sourceRecord.getRawRecord())
+ .withRawRecord(readRawRecord(sourceRecord.getRecordId()))
.withParsedRecord(sourceRecord.getParsedRecord())
.withExternalIdsHolder(sourceRecord.getExternalIdsHolder())
.withAdditionalInfo(sourceRecord.getAdditionalInfo());
if (Objects.nonNull(sourceRecord.getMetadata())) {
record.withMetadata(sourceRecord.getMetadata());
}
- Optional errorRecord = errorRecords.stream()
- .filter(er -> er.getId().equals(record.getId())).findAny();
- if (errorRecord.isPresent()) {
- record.withErrorRecord(errorRecord.get());
- }
+ errorRecords.stream()
+ .filter(er -> er.getId().equals(record.getId())).findAny().ifPresent(record::withErrorRecord);
return Optional.of(record);
} catch (IOException e) {
e.printStackTrace();
@@ -229,6 +206,10 @@ private static Optional readRecord(SourceRecord sourceRecord) {
return Optional.empty();
}
+ private static RawRecord readRawRecord(String recordId) throws IOException {
+ return new ObjectMapper().readValue(new File(format(RAW_RECORD_PATH_TEMPLATE, recordId)), RawRecord.class);
+ }
+
private static List readErrorRecords(List sourceRecords) {
return sourceRecords.stream()
.map(TestMocks::readErrorRecord)
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/consumers/AuthorityDomainKafkaHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/consumers/AuthorityDomainKafkaHandlerTest.java
new file mode 100644
index 000000000..03151fd0a
--- /dev/null
+++ b/mod-source-record-storage-server/src/test/java/org/folio/consumers/AuthorityDomainKafkaHandlerTest.java
@@ -0,0 +1,158 @@
+package org.folio.consumers;
+
+import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_AUTHORITY;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import io.vertx.core.json.Json;
+import io.vertx.core.json.JsonObject;
+import io.vertx.ext.unit.Async;
+import io.vertx.ext.unit.TestContext;
+import io.vertx.ext.unit.junit.VertxUnitRunner;
+import io.vertx.kafka.client.consumer.impl.KafkaConsumerRecordImpl;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.UUID;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.common.header.internals.RecordHeader;
+import org.folio.TestUtil;
+import org.folio.dao.RecordDao;
+import org.folio.dao.RecordDaoImpl;
+import org.folio.dao.util.IdType;
+import org.folio.dao.util.SnapshotDaoUtil;
+import org.folio.rest.jaxrs.model.ExternalIdsHolder;
+import org.folio.rest.jaxrs.model.ParsedRecord;
+import org.folio.rest.jaxrs.model.RawRecord;
+import org.folio.rest.jaxrs.model.Record;
+import org.folio.rest.jaxrs.model.Snapshot;
+import org.folio.rest.jaxrs.model.SourceRecord;
+import org.folio.rest.jooq.enums.RecordState;
+import org.folio.services.AbstractLBServiceTest;
+import org.folio.services.RecordService;
+import org.folio.services.RecordServiceImpl;
+import org.jetbrains.annotations.NotNull;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+@RunWith(VertxUnitRunner.class)
+public class AuthorityDomainKafkaHandlerTest extends AbstractLBServiceTest {
+
+ private static final String recordId = UUID.randomUUID().toString();
+ private static RawRecord rawRecord;
+ private static ParsedRecord parsedRecord;
+
+ private RecordDao recordDao;
+ private RecordService recordService;
+ private Record record;
+ private AuthorityDomainKafkaHandler handler;
+
+ @BeforeClass
+ public static void setUpClass() throws IOException {
+ rawRecord = new RawRecord().withId(recordId)
+ .withContent(
+ new ObjectMapper().readValue(TestUtil.readFileFromPath(RAW_MARC_RECORD_CONTENT_SAMPLE_PATH), String.class));
+ parsedRecord = new ParsedRecord().withId(recordId)
+ .withContent(
+ new ObjectMapper().readValue(TestUtil.readFileFromPath(PARSED_MARC_RECORD_CONTENT_SAMPLE_PATH),
+ JsonObject.class)
+ .encode());
+ }
+
+ @Before
+ public void setUp(TestContext context) {
+ recordDao = new RecordDaoImpl(postgresClientFactory);
+ recordService = new RecordServiceImpl(recordDao);
+ handler = new AuthorityDomainKafkaHandler(recordService);
+ Async async = context.async();
+ Snapshot snapshot = new Snapshot()
+ .withJobExecutionId(UUID.randomUUID().toString())
+ .withProcessingStartedDate(new Date())
+ .withStatus(Snapshot.Status.COMMITTED);
+ record = new Record()
+ .withId(recordId)
+ .withSnapshotId(snapshot.getJobExecutionId())
+ .withGeneration(0)
+ .withMatchedId(recordId)
+ .withExternalIdsHolder(new ExternalIdsHolder().withAuthorityId(recordId))
+ .withRecordType(MARC_AUTHORITY)
+ .withRawRecord(rawRecord)
+ .withParsedRecord(parsedRecord);
+ SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot)
+ .compose(savedSnapshot -> recordService.saveRecord(record, TENANT_ID))
+ .onSuccess(ar -> async.complete())
+ .onFailure(context::fail);
+ }
+
+ @After
+ public void cleanUp(TestContext context) {
+ Async async = context.async();
+ SnapshotDaoUtil.deleteAll(postgresClientFactory.getQueryExecutor(TENANT_ID)).onComplete(delete -> {
+ if (delete.failed()) {
+ context.fail(delete.cause());
+ }
+ async.complete();
+ });
+ }
+
+ @Test
+ public void shouldSoftDeleteMarcAuthorityRecordOnSoftDeleteDomainEvent(TestContext context) {
+ Async async = context.async();
+
+ var payload = new HashMap();
+ payload.put("deleteEventSubType", "SOFT_DELETE");
+ payload.put("tenant", TENANT_ID);
+
+ handler.handle(new KafkaConsumerRecordImpl<>(getConsumerRecord(payload)))
+ .onComplete(ar -> {
+ if (ar.failed()) {
+ context.fail(ar.cause());
+ }
+ recordService.getSourceRecordById(record.getId(), IdType.RECORD, RecordState.DELETED, TENANT_ID)
+ .onComplete(result -> {
+ if (result.failed()) {
+ context.fail(result.cause());
+ }
+ context.assertTrue(result.result().isPresent());
+ SourceRecord updatedRecord = result.result().get();
+ context.assertTrue(updatedRecord.getDeleted());
+ async.complete();
+ });
+ });
+ }
+
+ @Test
+ public void shouldHardDeleteMarcAuthorityRecordOnHardDeleteDomainEvent(TestContext context) {
+ Async async = context.async();
+
+ var payload = new HashMap();
+ payload.put("deleteEventSubType", "HARD_DELETE");
+ payload.put("tenant", TENANT_ID);
+
+ handler.handle(new KafkaConsumerRecordImpl<>(getConsumerRecord(payload)))
+ .onComplete(ar -> {
+ if (ar.failed()) {
+ context.fail(ar.cause());
+ }
+ recordService.getSourceRecordById(record.getId(), IdType.RECORD, RecordState.ACTUAL, TENANT_ID)
+ .onComplete(result -> {
+ if (result.failed()) {
+ context.fail(result.cause());
+ }
+ context.assertFalse(result.result().isPresent());
+ async.complete();
+ });
+ });
+ }
+
+ @NotNull
+ private ConsumerRecord getConsumerRecord(HashMap payload) {
+ ConsumerRecord consumerRecord = new ConsumerRecord<>("topic", 1, 1, recordId, Json.encode(payload));
+ consumerRecord.headers().add(new RecordHeader("domain-event-type", "DELETE".getBytes(StandardCharsets.UTF_8)));
+ return consumerRecord;
+ }
+
+}
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/dao/RecordDaoImplTest.java b/mod-source-record-storage-server/src/test/java/org/folio/dao/RecordDaoImplTest.java
index 4fd3d5515..78e3b2432 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/dao/RecordDaoImplTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/dao/RecordDaoImplTest.java
@@ -14,6 +14,7 @@
import org.folio.dao.util.AdvisoryLockUtil;
import org.folio.dao.util.MatchField;
import org.folio.dao.util.SnapshotDaoUtil;
+import org.folio.processing.value.MissingValue;
import org.folio.processing.value.StringValue;
import org.folio.rest.jaxrs.model.ExternalIdsHolder;
import org.folio.rest.jaxrs.model.MarcRecordSearchRequest;
@@ -113,6 +114,20 @@ public void shouldReturnRecordOnGetMatchedRecordsWhenThereIsNoTrackingRecordAndF
});
}
+ @Test
+ public void shouldReturnEmptyListIfValueFieldIsEmpty(TestContext context) {
+ var async = context.async();
+ var matchField = new MatchField("010", "1", "", "a", MissingValue.getInstance());
+
+ var future = recordDao.getMatchedRecords(matchField, TypeConnection.MARC_BIB, true, 0, 10, TENANT_ID);
+
+ future.onComplete(ar -> {
+ context.assertTrue(ar.succeeded());
+ context.assertEquals(0, ar.result().size());
+ async.complete();
+ });
+ }
+
@Test
public void shouldReturnIdOnStreamMarcRecordIdsWhenThereIsNoTrackingRecordAndFallbackQueryEnabled(TestContext context) {
Async async = context.async();
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/dao/util/EdifactUtilTest.java b/mod-source-record-storage-server/src/test/java/org/folio/dao/util/EdifactUtilTest.java
index ddd6c2f92..c9e24cb85 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/dao/util/EdifactUtilTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/dao/util/EdifactUtilTest.java
@@ -1,9 +1,8 @@
package org.folio.dao.util;
-import com.fasterxml.jackson.core.JsonParseException;
-import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.xlate.edi.stream.EDIStreamException;
+import org.folio.TestUtil;
import org.folio.rest.jaxrs.model.SourceRecord;
import org.junit.Before;
import org.junit.Test;
@@ -21,6 +20,7 @@ public class EdifactUtilTest {
private static final String SOURCE_RECORD_PATH = "src/test/resources/mock/sourceRecords/e4cfe577-4015-46d8-a54d-7c9b34796955.json";
private static final String SOURCE_RECORD_WITH_IGNORED_CODES_PATH = "src/test/resources/mock/sourceRecords/4ca9d8ac-9de5-432a-83ee-15832f09e868.json";
+ private static final String RAW_EDIFACT_RECORD_CONTENT_SAMPLE_PATH = "src/test/resources/rawEdifactRecordContent.sample";
private static final String INVALID_RAW_EDIFACT_RECORD =
"UNA:+.? '" +
@@ -51,14 +51,15 @@ public class EdifactUtilTest {
private SourceRecord sourceRecord;
@Before
- public void readSourceRecord() throws JsonParseException, JsonMappingException, IOException {
+ public void readSourceRecord() throws IOException {
File file = new File(SOURCE_RECORD_PATH);
sourceRecord = new ObjectMapper().readValue(file, SourceRecord.class);
}
@Test
public void shouldFormatEdifact() throws IOException, EDIStreamException {
- String rawEdifact = sourceRecord.getRawRecord().getContent();
+ String rawEdifact = new ObjectMapper().readValue(
+ TestUtil.readFileFromPath(RAW_EDIFACT_RECORD_CONTENT_SAMPLE_PATH), String.class);
String formattedEdifact = EdifactUtil.formatEdifact(rawEdifact);
assertNotNull(formattedEdifact);
assertEquals(sourceRecord.getParsedRecord().getFormattedContent(), formattedEdifact);
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/dao/util/MarcUtilTest.java b/mod-source-record-storage-server/src/test/java/org/folio/dao/util/MarcUtilTest.java
index 997a0f897..a569dbc9c 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/dao/util/MarcUtilTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/dao/util/MarcUtilTest.java
@@ -1,16 +1,8 @@
package org.folio.dao.util;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-
-import java.io.File;
-import java.io.IOException;
-
-import com.fasterxml.jackson.core.JsonParseException;
-import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.tomakehurst.wiremock.common.Json;
-
+import org.folio.rest.jaxrs.model.RawRecord;
import org.folio.rest.jaxrs.model.SourceRecord;
import org.junit.Before;
import org.junit.Test;
@@ -18,22 +10,29 @@
import org.junit.runners.BlockJUnit4ClassRunner;
import org.marc4j.MarcException;
+import java.io.File;
+import java.io.IOException;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
@RunWith(BlockJUnit4ClassRunner.class)
public class MarcUtilTest {
private static final String SOURCE_RECORD_PATH = "src/test/resources/mock/sourceRecords/d3cd3e1e-a18c-4f7c-b053-9aa50343394e.json";
+ private static final String RAW_RECORD_PATH = "src/test/resources/mock/rawRecords/d3cd3e1e-a18c-4f7c-b053-9aa50343394e.json";
private SourceRecord sourceRecord;
@Before
- public void readSourceRecord() throws JsonParseException, JsonMappingException, IOException {
+ public void readSourceRecord() throws IOException {
File file = new File(SOURCE_RECORD_PATH);
sourceRecord = new ObjectMapper().readValue(file, SourceRecord.class);
}
@Test
public void shouldConvertRawMarcToMarcJson() throws IOException, MarcException {
- String rawMarc = sourceRecord.getRawRecord().getContent();
+ String rawMarc = new ObjectMapper().readValue(new File(RAW_RECORD_PATH), RawRecord.class).getContent();
String marcJson = MarcUtil.rawMarcToMarcJson(rawMarc);
assertNotNull(marcJson);
assertEquals(rawMarc, MarcUtil.marcJsonToRawMarc(marcJson));
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/rest/impl/RecordApiTest.java b/mod-source-record-storage-server/src/test/java/org/folio/rest/impl/RecordApiTest.java
index 1cfa3e26e..3761a3ef9 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/rest/impl/RecordApiTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/rest/impl/RecordApiTest.java
@@ -4,6 +4,7 @@
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.everyItem;
import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
@@ -12,7 +13,6 @@
import java.util.UUID;
import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.Lists;
import io.restassured.RestAssured;
import io.restassured.response.Response;
import io.vertx.core.json.JsonArray;
@@ -50,12 +50,14 @@ public class RecordApiTest extends AbstractRestVerticleTest {
private static final String SIXTH_UUID = UUID.randomUUID().toString();
private static final String SEVENTH_UUID = UUID.randomUUID().toString();
private static final String EIGHTH_UUID = UUID.randomUUID().toString();
+ private static final String GENERATION = "generation";
private static RawRecord rawMarcRecord;
private static ParsedRecord parsedMarcRecord;
private static RawRecord rawEdifactRecord;
private static ParsedRecord parsedEdifactRecord;
+ private static ParsedRecord parsedMarcRecordWith999ff$s;
static {
try {
@@ -67,6 +69,13 @@ public class RecordApiTest extends AbstractRestVerticleTest {
.withContent(new ObjectMapper().readValue(TestUtil.readFileFromPath(RAW_EDIFACT_RECORD_CONTENT_SAMPLE_PATH), String.class));
parsedEdifactRecord = new ParsedRecord()
.withContent(new ObjectMapper().readValue(TestUtil.readFileFromPath(PARSED_EDIFACT_RECORD_CONTENT_SAMPLE_PATH), JsonObject.class).encode());
+ parsedMarcRecordWith999ff$s = new ParsedRecord().withId(FIRST_UUID)
+ .withContent(new JsonObject().put("leader", "01542ccm a2200361 4500")
+ .put("fields", new JsonArray().add(new JsonObject().put("999", new JsonObject()
+ .put("subfields",
+ new JsonArray().add(new JsonObject().put("s", FIRST_UUID)))
+ .put("ind1", "f")
+ .put("ind2", "f")))).encode());
} catch (IOException e) {
e.printStackTrace();
}
@@ -589,6 +598,120 @@ public void shouldUpdateErrorRecordOnPut(TestContext testContext) {
async.complete();
}
+ @Test
+ public void shouldSendBadRequestWhen999ff$sIsNullDuringUpdateRecordGeneration(TestContext testContext) {
+ postSnapshots(testContext, snapshot_1);
+
+ Async async = testContext.async();
+ Response createResponse = RestAssured.given()
+ .spec(spec)
+ .body(record_1)
+ .when()
+ .post(SOURCE_STORAGE_RECORDS_PATH);
+ assertThat(createResponse.statusCode(), is(HttpStatus.SC_CREATED));
+ Record createdRecord = createResponse.body().as(Record.class);
+
+ RestAssured.given()
+ .spec(spec)
+ .body(createdRecord)
+ .when()
+ .put(SOURCE_STORAGE_RECORDS_PATH + "/" + createdRecord.getId() + "/" + GENERATION)
+ .then()
+ .statusCode(HttpStatus.SC_BAD_REQUEST);
+ async.complete();
+ }
+
+ @Test
+ public void shouldSendBadRequestWhenMatchedIfNotEqualTo999ff$sDuringUpdateRecordGeneration(TestContext testContext) {
+ postSnapshots(testContext, snapshot_1);
+
+ Async async = testContext.async();
+ Response createResponse = RestAssured.given()
+ .spec(spec)
+ .body(record_1.withParsedRecord(parsedMarcRecordWith999ff$s))
+ .when()
+ .post(SOURCE_STORAGE_RECORDS_PATH);
+ assertThat(createResponse.statusCode(), is(HttpStatus.SC_CREATED));
+ Record createdRecord = createResponse.body().as(Record.class);
+
+ RestAssured.given()
+ .spec(spec)
+ .body(createdRecord)
+ .when()
+ .put(SOURCE_STORAGE_RECORDS_PATH + "/" + UUID.randomUUID() + "/" + GENERATION)
+ .then()
+ .statusCode(HttpStatus.SC_BAD_REQUEST);
+ async.complete();
+ }
+
+ @Test
+ public void shouldSendNotFoundWhenUpdateRecordGenerationForNonExistingRecord(TestContext testContext) {
+ Async async = testContext.async();
+ RestAssured.given()
+ .spec(spec)
+ .body(record_1.withParsedRecord(parsedMarcRecordWith999ff$s))
+ .when()
+ .put(SOURCE_STORAGE_RECORDS_PATH + "/" + record_1.getMatchedId() + "/" + GENERATION)
+ .then()
+ .statusCode(HttpStatus.SC_NOT_FOUND);
+ async.complete();
+ }
+
+ @Test
+ public void shouldSendBadRequestWhenUpdateRecordGenerationWithDuplicate(TestContext testContext) {
+ postSnapshots(testContext, snapshot_1);
+
+ Async async = testContext.async();
+ Response createResponse = RestAssured.given()
+ .spec(spec)
+ .body(record_1.withParsedRecord(parsedMarcRecordWith999ff$s))
+ .when()
+ .post(SOURCE_STORAGE_RECORDS_PATH);
+ assertThat(createResponse.statusCode(), is(HttpStatus.SC_CREATED));
+ Record createdRecord = createResponse.body().as(Record.class);
+
+ postSnapshots(testContext, snapshot_2);
+ Record recordForUpdate = createdRecord.withSnapshotId(snapshot_2.getJobExecutionId());
+
+ RestAssured.given()
+ .spec(spec)
+ .body(recordForUpdate)
+ .when()
+ .put(SOURCE_STORAGE_RECORDS_PATH + "/" + createdRecord.getMatchedId() + "/" + GENERATION)
+ .then()
+ .statusCode(HttpStatus.SC_BAD_REQUEST);
+ async.complete();
+ }
+
+ @Test
+ public void shouldUpdateRecordGeneration(TestContext testContext) {
+ postSnapshots(testContext, snapshot_1);
+
+ Async async = testContext.async();
+ Response createResponse = RestAssured.given()
+ .spec(spec)
+ .body(record_1.withParsedRecord(parsedMarcRecordWith999ff$s))
+ .when()
+ .post(SOURCE_STORAGE_RECORDS_PATH);
+ assertThat(createResponse.statusCode(), is(HttpStatus.SC_CREATED));
+ Record createdRecord = createResponse.body().as(Record.class);
+
+ postSnapshots(testContext, snapshot_2);
+ Record recordForUpdate = createdRecord.withSnapshotId(snapshot_2.getJobExecutionId()).withGeneration(null);
+
+ RestAssured.given()
+ .spec(spec)
+ .body(recordForUpdate)
+ .when()
+ .put(SOURCE_STORAGE_RECORDS_PATH + "/" + createdRecord.getMatchedId() + "/" + GENERATION)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .body("id", not(createdRecord.getId()))
+ .body("matchedId", is(recordForUpdate.getMatchedId()))
+ .body("generation", is(1));
+ async.complete();
+ }
+
@Test
public void shouldReturnNotFoundOnGetByIdWhenRecordDoesNotExist() {
RestAssured.given()
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/rest/impl/TestMarcRecordsApiTest.java b/mod-source-record-storage-server/src/test/java/org/folio/rest/impl/TestMarcRecordsApiTest.java
index dca7d1b28..0b350dfa3 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/rest/impl/TestMarcRecordsApiTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/rest/impl/TestMarcRecordsApiTest.java
@@ -1,16 +1,18 @@
package org.folio.rest.impl;
-import java.util.Collections;
-
+import com.fasterxml.jackson.databind.ObjectMapper;
+import io.restassured.RestAssured;
+import io.vertx.ext.unit.junit.VertxUnitRunner;
import org.apache.http.HttpStatus;
-import org.folio.TestMocks;
+import org.folio.TestUtil;
import org.folio.rest.jaxrs.model.RawRecord;
import org.folio.rest.jaxrs.model.TestMarcRecordsCollection;
import org.junit.Test;
import org.junit.runner.RunWith;
-import io.restassured.RestAssured;
-import io.vertx.ext.unit.junit.VertxUnitRunner;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
@RunWith(VertxUnitRunner.class)
public class TestMarcRecordsApiTest extends AbstractRestVerticleTest {
@@ -18,11 +20,14 @@ public class TestMarcRecordsApiTest extends AbstractRestVerticleTest {
private static final String POPULATE_TEST_MARK_RECORDS_PATH = "/source-storage/populate-test-marc-records";
@Test
- public void shouldReturnNoContentOnPostRecordCollectionPassedInBody() {
+ public void shouldReturnNoContentOnPostRecordCollectionPassedInBody() throws IOException {
+ RawRecord rawRecord = new RawRecord().withContent(
+ new ObjectMapper().readValue(TestUtil.readFileFromPath(RAW_MARC_RECORD_CONTENT_SAMPLE_PATH), String.class));
+
RestAssured.given()
.spec(spec)
.body(new TestMarcRecordsCollection()
- .withRawRecords(TestMocks.getRawRecords()))
+ .withRawRecords(List.of(rawRecord)))
.when()
.post(POPULATE_TEST_MARK_RECORDS_PATH)
.then()
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/AbstractLBServiceTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/AbstractLBServiceTest.java
index 95ca7103c..752d0e4fa 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/services/AbstractLBServiceTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/services/AbstractLBServiceTest.java
@@ -18,16 +18,24 @@
import org.folio.rest.RestVerticle;
import org.folio.rest.client.TenantClient;
import org.folio.rest.jaxrs.model.Metadata;
+import org.folio.rest.jaxrs.model.Record;
import org.folio.rest.jaxrs.model.TenantAttributes;
import org.folio.rest.jaxrs.model.TenantJob;
import org.folio.rest.persist.PostgresClient;
import org.folio.rest.tools.utils.Envs;
import org.folio.rest.tools.utils.NetworkUtils;
+import org.folio.services.util.AdditionalFieldsUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import static net.mguenther.kafka.junit.EmbeddedKafkaCluster.provisionWith;
import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.defaultClusterConfig;
+import static org.folio.services.util.AdditionalFieldsUtil.TAG_005;
+import static org.junit.Assert.assertEquals;
+
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
public abstract class AbstractLBServiceTest {
@@ -151,4 +159,20 @@ void compareMetadata(TestContext context, Metadata expected, Metadata actual) {
context.assertNotNull(actual.getUpdatedDate());
}
+ protected String get005FieldExpectedDate() {
+ return AdditionalFieldsUtil.dateTime005Formatter
+ .format(ZonedDateTime.ofInstant(Instant.now(), ZoneId.systemDefault()));
+ }
+
+ protected void validate005Field(TestContext testContext, String expectedDate, Record record) {
+ String actualDate = AdditionalFieldsUtil.getValueFromControlledField(record, TAG_005);
+ testContext.assertEquals(expectedDate.substring(0, 10),
+ actualDate.substring(0, 10));
+ }
+
+ protected void validate005Field(String expectedDate, Record record) {
+ String actualDate = AdditionalFieldsUtil.getValueFromControlledField(record, TAG_005);
+ assertEquals(expectedDate.substring(0, 10),
+ actualDate.substring(0, 10));
+ }
}
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityMatchEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityMatchEventHandlerTest.java
index 487579bb6..86b9c126b 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityMatchEventHandlerTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityMatchEventHandlerTest.java
@@ -73,7 +73,7 @@ public void setUp(TestContext context) {
MockitoAnnotations.initMocks(this);
recordDao = new RecordDaoImpl(postgresClientFactory);
- handler = new MarcAuthorityMatchEventHandler(recordDao);
+ handler = new MarcAuthorityMatchEventHandler(recordDao, null, vertx);
Async async = context.async();
Snapshot existingRecordSnapshot = new Snapshot()
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityUpdateModifyEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityUpdateModifyEventHandlerTest.java
index ea5680b23..5e1a621b5 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityUpdateModifyEventHandlerTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityUpdateModifyEventHandlerTest.java
@@ -5,15 +5,13 @@
import static org.folio.ActionProfile.Action.MODIFY;
import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_UPDATED;
-import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MODIFIED;
import static org.folio.rest.jaxrs.model.EntityType.MARC_AUTHORITY;
-import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC;
import static org.folio.rest.jaxrs.model.MappingDetail.MarcMappingOption.UPDATE;
import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.ACTION_PROFILE;
import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.JOB_PROFILE;
import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.MAPPING_PROFILE;
import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB;
-import static org.folio.services.MarcBibUpdateModifyEventHandlerTest.getParsedContentWithoutLeader;
+import static org.folio.services.MarcBibUpdateModifyEventHandlerTest.getParsedContentWithoutLeaderAndDate;
import java.io.IOException;
import java.util.Collections;
@@ -69,7 +67,6 @@
import org.folio.rest.jaxrs.model.Snapshot;
import org.folio.services.caches.MappingParametersSnapshotCache;
import org.folio.services.handlers.actions.MarcAuthorityUpdateModifyEventHandler;
-import org.folio.services.handlers.actions.MarcBibUpdateModifyEventHandler;
@RunWith(VertxUnitRunner.class)
public class MarcAuthorityUpdateModifyEventHandlerTest extends AbstractLBServiceTest {
@@ -163,7 +160,7 @@ public void setUp(TestContext context) {
recordDao = new RecordDaoImpl(postgresClientFactory);
recordService = new RecordServiceImpl(recordDao);
- modifyRecordEventHandler = new MarcAuthorityUpdateModifyEventHandler(recordService, new MappingParametersSnapshotCache(vertx), vertx);
+ modifyRecordEventHandler = new MarcAuthorityUpdateModifyEventHandler(recordService, null, new MappingParametersSnapshotCache(vertx), vertx);
Snapshot snapshot = new Snapshot()
.withJobExecutionId(UUID.randomUUID().toString())
@@ -201,6 +198,7 @@ public void shouldModifyMarcRecord(TestContext context) {
// given
Async async = context.async();
+ String expectedDate = get005FieldExpectedDate();
String expectedParsedContent = "{\"leader\":\"00107nam 22000491a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"999\":{\"subfields\":[{\"s\":\"eae222e8-70fd-4422-852c-60d22bae36b8\"}],\"ind1\":\"f\",\"ind2\":\"f\"}}]}";
HashMap payloadContext = new HashMap<>();
record.getParsedRecord().setContent(Json.encode(record.getParsedRecord().getContent()));
@@ -233,10 +231,11 @@ public void shouldModifyMarcRecord(TestContext context) {
context.assertEquals(DI_SRS_MARC_AUTHORITY_RECORD_UPDATED.value(), eventPayload.getEventType());
Record actualRecord = Json.decodeValue(dataImportEventPayload.getContext().get(MARC_AUTHORITY.value()), Record.class);
- context.assertEquals(getParsedContentWithoutLeader(expectedParsedContent),
- getParsedContentWithoutLeader(actualRecord.getParsedRecord().getContent().toString()));
+ context.assertEquals(getParsedContentWithoutLeaderAndDate(expectedParsedContent),
+ getParsedContentWithoutLeaderAndDate(actualRecord.getParsedRecord().getContent().toString()));
context.assertEquals(Record.State.ACTUAL, actualRecord.getState());
context.assertEquals(userId, actualRecord.getMetadata().getUpdatedByUserId());
+ validate005Field(context, expectedDate, actualRecord);
async.complete();
});
}
@@ -246,6 +245,7 @@ public void shouldUpdateMatchedMarcRecordWithFieldFromIncomingRecord(TestContext
// given
Async async = context.async();
+ String expectedDate = get005FieldExpectedDate();
String incomingParsedContent = "{\"leader\":\"01314nam 22003851a 4500\",\"fields\":[{\"001\":\"ybp7406512\"},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
String expectedParsedContent = "{\"leader\":\"00134nam 22000611a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"035\":{\"subfields\":[{\"a\":\"ybp7406512\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"999\":{\"subfields\":[{\"s\":\"eae222e8-70fd-4422-852c-60d22bae36b8\"}],\"ind1\":\"f\",\"ind2\":\"f\"}}]}";
Record incomingRecord = new Record().withParsedRecord(new ParsedRecord().withContent(incomingParsedContent));
@@ -280,10 +280,11 @@ public void shouldUpdateMatchedMarcRecordWithFieldFromIncomingRecord(TestContext
context.assertEquals(DI_SRS_MARC_AUTHORITY_RECORD_UPDATED.value(), eventPayload.getEventType());
Record actualRecord = Json.decodeValue(dataImportEventPayload.getContext().get(MARC_AUTHORITY.value()), Record.class);
- context.assertEquals(getParsedContentWithoutLeader(expectedParsedContent),
- getParsedContentWithoutLeader(actualRecord.getParsedRecord().getContent().toString()));
+ context.assertEquals(getParsedContentWithoutLeaderAndDate(expectedParsedContent),
+ getParsedContentWithoutLeaderAndDate(actualRecord.getParsedRecord().getContent().toString()));
context.assertEquals(Record.State.ACTUAL, actualRecord.getState());
context.assertEquals(dataImportEventPayload.getJobExecutionId(), actualRecord.getSnapshotId());
+ validate005Field(context, expectedDate, actualRecord);
async.complete();
});
}
@@ -293,6 +294,7 @@ public void shouldModifyMarcRecordAndRemove003Field(TestContext context) {
// given
Async async = context.async();
+ String expectedDate = get005FieldExpectedDate();
String incomingParsedContent = "{\"leader\":\"01314nam 22003851a 4500\",\"fields\":[{\"001\":\"ybp7406512\"},{\"003\":\"OCLC\"},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
String expectedParsedContent = "{\"leader\":\"00134nam 22000611a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"035\":{\"subfields\":[{\"a\":\"ybp7406512\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"999\":{\"subfields\":[{\"s\":\"eae222e8-70fd-4422-852c-60d22bae36b8\"}],\"ind1\":\"f\",\"ind2\":\"f\"}}]}";
Record incomingRecord = new Record().withParsedRecord(new ParsedRecord().withContent(incomingParsedContent));
@@ -327,9 +329,10 @@ public void shouldModifyMarcRecordAndRemove003Field(TestContext context) {
context.assertEquals(DI_SRS_MARC_AUTHORITY_RECORD_UPDATED.value(), eventPayload.getEventType());
Record actualRecord = Json.decodeValue(dataImportEventPayload.getContext().get(MARC_AUTHORITY.value()), Record.class);
- context.assertEquals(getParsedContentWithoutLeader(expectedParsedContent),
- getParsedContentWithoutLeader(actualRecord.getParsedRecord().getContent().toString()));
+ context.assertEquals(getParsedContentWithoutLeaderAndDate(expectedParsedContent),
+ getParsedContentWithoutLeaderAndDate(actualRecord.getParsedRecord().getContent().toString()));
context.assertEquals(Record.State.ACTUAL, actualRecord.getState());
+ validate005Field(context, expectedDate, actualRecord);
async.complete();
});
}
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibUpdateModifyEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibUpdateModifyEventHandlerTest.java
index 42e9930c5..2fa088007 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibUpdateModifyEventHandlerTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibUpdateModifyEventHandlerTest.java
@@ -12,12 +12,14 @@
import static org.folio.ActionProfile.Action.MODIFY;
import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MODIFIED;
+import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_UPDATED;
import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC;
import static org.folio.rest.jaxrs.model.MappingDetail.MarcMappingOption.UPDATE;
import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.ACTION_PROFILE;
import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.JOB_PROFILE;
import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.MAPPING_PROFILE;
import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB;
+import static org.folio.services.util.AdditionalFieldsUtil.TAG_005;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
@@ -27,11 +29,13 @@
import com.github.tomakehurst.wiremock.matching.UrlPathPattern;
import io.github.jklingsporn.vertx.jooq.classic.reactivepg.ReactiveClassicGenericQueryExecutor;
import io.vertx.core.json.Json;
+import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.unit.Async;
import io.vertx.ext.unit.TestContext;
import io.vertx.ext.unit.junit.RunTestOnContext;
import io.vertx.ext.unit.junit.VertxUnitRunner;
+
import java.io.IOException;
import java.util.Collections;
import java.util.Date;
@@ -42,6 +46,7 @@
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
+
import org.folio.ActionProfile;
import org.folio.DataImportEventPayload;
import org.folio.InstanceLinkDtoCollection;
@@ -53,8 +58,11 @@
import org.folio.client.InstanceLinkClient;
import org.folio.dao.RecordDao;
import org.folio.dao.RecordDaoImpl;
+import org.folio.dao.SnapshotDao;
+import org.folio.dao.SnapshotDaoImpl;
import org.folio.dao.util.SnapshotDaoUtil;
import org.folio.processing.mapping.defaultmapper.processor.parameters.MappingParameters;
+import org.folio.rest.client.TenantClient;
import org.folio.rest.jaxrs.model.Data;
import org.folio.rest.jaxrs.model.ExternalIdsHolder;
import org.folio.rest.jaxrs.model.MappingDetail;
@@ -68,6 +76,8 @@
import org.folio.rest.jaxrs.model.RawRecord;
import org.folio.rest.jaxrs.model.Record;
import org.folio.rest.jaxrs.model.Snapshot;
+import org.folio.rest.jaxrs.model.TenantAttributes;
+import org.folio.rest.jaxrs.model.TenantJob;
import org.folio.services.caches.LinkingRulesCache;
import org.folio.services.caches.MappingParametersSnapshotCache;
import org.folio.services.exceptions.DuplicateRecordException;
@@ -92,6 +102,7 @@ public class MarcBibUpdateModifyEventHandlerTest extends AbstractLBServiceTest {
private static final UrlPathPattern URL_PATH_PATTERN =
new UrlPathPattern(new RegexPattern(INSTANCE_LINKS_URL + "/.*"), true);
private static final String LINKING_RULES_URL = "/linking-rules/instance-authority";
+ private static final String CENTRAL_TENANT_ID = "centralTenantId";
private static final String SECOND_PARSED_CONTENT =
"{\"leader\":\"02326cam a2200301Ki 4500\",\"fields\":[{\"001\":\"ybp7406411\"}," +
"{\"100\":{\"ind1\":\"1\",\"ind2\":\" \",\"subfields\":[{\"a\":\"Chin, Staceyann Test,\"},{\"e\":\"author.\"},{\"0\":\"http://id.loc.gov/authorities/names/n2008052404\"},{\"9\":\"5a56ffa8-e274-40ca-8620-34a23b5b45dd\"}]}}]}";
@@ -105,10 +116,13 @@ public class MarcBibUpdateModifyEventHandlerTest extends AbstractLBServiceTest {
@Rule
public RunTestOnContext rule = new RunTestOnContext();
private RecordDao recordDao;
+ private SnapshotDao snapshotDao;
private RecordService recordService;
+ private SnapshotService snapshotService;
private MarcBibUpdateModifyEventHandler modifyRecordEventHandler;
private Snapshot snapshotForRecordUpdate;
private Record record;
+ private Snapshot snapshot;
private JobProfile jobProfile = new JobProfile()
.withId(UUID.randomUUID().toString())
.withName("Modify MARC Bibs")
@@ -183,12 +197,36 @@ public class MarcBibUpdateModifyEventHandlerTest extends AbstractLBServiceTest {
.withMarcMappingOption(MappingDetail.MarcMappingOption.MODIFY));
@BeforeClass
- public static void setUpClass() throws IOException {
+ public static void setUpBeforeClass(TestContext context) throws IOException {
rawRecord = new RawRecord().withId(recordId)
.withContent(
new ObjectMapper().readValue(TestUtil.readFileFromPath(RAW_MARC_RECORD_CONTENT_SAMPLE_PATH), String.class));
parsedRecord = new ParsedRecord().withId(recordId)
.withContent(PARSED_CONTENT);
+ Async async = context.async();
+ TenantClient tenantClient = new TenantClient(OKAPI_URL, CENTRAL_TENANT_ID, TOKEN);
+ try {
+ tenantClient.postTenant(new TenantAttributes().withModuleTo("3.2.0"), res2 -> {
+ if (res2.result().statusCode() == 204) {
+ return;
+ }
+ if (res2.result().statusCode() == 201) {
+ tenantClient.getTenantByOperationId(res2.result().bodyAsJson(TenantJob.class).getId(), 60000, context.asyncAssertSuccess(res3 -> {
+ context.assertTrue(res3.bodyAsJson(TenantJob.class).getComplete());
+ String error = res3.bodyAsJson(TenantJob.class).getError();
+ if (error != null) {
+ context.assertTrue(error.contains("EventDescriptor was not registered for eventType"));
+ }
+ }));
+ } else {
+ context.assertEquals("Failed to make post tenant. Received status code 400", res2.result().bodyAsString());
+ }
+ async.complete();
+ });
+ } catch (Exception e) {
+ e.printStackTrace();
+ async.complete();
+ }
}
@Before
@@ -198,14 +236,21 @@ public void setUp(TestContext context) {
.withMappingParams(Json.encode(new MappingParameters()))))));
recordDao = new RecordDaoImpl(postgresClientFactory);
+ snapshotDao = new SnapshotDaoImpl(postgresClientFactory);
recordService = new RecordServiceImpl(recordDao);
+ snapshotService = new SnapshotServiceImpl(snapshotDao);
InstanceLinkClient instanceLinkClient = new InstanceLinkClient();
LinkingRulesCache linkingRulesCache = new LinkingRulesCache(instanceLinkClient, vertx);
modifyRecordEventHandler =
- new MarcBibUpdateModifyEventHandler(recordService, new MappingParametersSnapshotCache(vertx), vertx,
+ new MarcBibUpdateModifyEventHandler(recordService, snapshotService, new MappingParametersSnapshotCache(vertx), vertx,
instanceLinkClient, linkingRulesCache);
- Snapshot snapshot = new Snapshot()
+ snapshot = new Snapshot()
+ .withJobExecutionId(UUID.randomUUID().toString())
+ .withProcessingStartedDate(new Date())
+ .withStatus(Snapshot.Status.COMMITTED);
+
+ Snapshot snapshot_2 = new Snapshot()
.withJobExecutionId(UUID.randomUUID().toString())
.withProcessingStartedDate(new Date())
.withStatus(Snapshot.Status.COMMITTED);
@@ -225,10 +270,25 @@ record = new Record()
.withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString()))
.withMetadata(new Metadata());
- ReactiveClassicGenericQueryExecutor queryExecutor = postgresClientFactory.getQueryExecutor(TENANT_ID);
- SnapshotDaoUtil.save(queryExecutor, snapshot)
+ Record record_2 = new Record()
+ .withId(UUID.randomUUID().toString())
+ .withSnapshotId(snapshot_2.getJobExecutionId())
+ .withGeneration(0)
+ .withMatchedId(UUID.randomUUID().toString())
+ .withRecordType(MARC_BIB)
+ .withRawRecord(rawRecord)
+ .withParsedRecord(parsedRecord)
+ .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString()))
+ .withMetadata(new Metadata());
+
+ ReactiveClassicGenericQueryExecutor queryExecutorLocalTenant = postgresClientFactory.getQueryExecutor(TENANT_ID);
+ ReactiveClassicGenericQueryExecutor queryExecutorCentralTenant = postgresClientFactory.getQueryExecutor(CENTRAL_TENANT_ID);
+
+ SnapshotDaoUtil.save(queryExecutorLocalTenant, snapshot)
.compose(v -> recordService.saveRecord(record, TENANT_ID))
- .compose(v -> SnapshotDaoUtil.save(queryExecutor, snapshotForRecordUpdate))
+ .compose(v -> SnapshotDaoUtil.save(queryExecutorLocalTenant, snapshotForRecordUpdate))
+ .compose(v -> SnapshotDaoUtil.save(queryExecutorCentralTenant, snapshot_2))
+ .compose(v -> recordService.saveRecord(record_2, CENTRAL_TENANT_ID))
.onComplete(context.asyncAssertSuccess());
}
@@ -236,7 +296,20 @@ record = new Record()
public void tearDown(TestContext context) {
wireMockServer.resetRequests();
SnapshotDaoUtil.deleteAll(postgresClientFactory.getQueryExecutor(TENANT_ID))
- .onComplete(context.asyncAssertSuccess());
+ .onComplete(ar -> {
+ if (ar.failed()) {
+ context.asyncAssertFailure();
+ } else {
+ SnapshotDaoUtil.deleteAll(postgresClientFactory.getQueryExecutor(CENTRAL_TENANT_ID))
+ .onComplete(arCentral -> {
+ if (arCentral.failed()) {
+ context.asyncAssertFailure();
+ } else {
+ context.asyncAssertSuccess();
+ }
+ });
+ }
+ });
}
@Test
@@ -244,6 +317,7 @@ public void shouldModifyMarcRecord(TestContext context) {
// given
Async async = context.async();
+ String expectedDate = get005FieldExpectedDate();
String expectedParsedContent =
"{\"leader\":\"00107nam 22000491a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"999\":{\"subfields\":[{\"s\":\"eae222e8-70fd-4422-852c-60d22bae36b8\"}],\"ind1\":\"f\",\"ind2\":\"f\"}}]}";
HashMap payloadContext = new HashMap<>();
@@ -278,19 +352,80 @@ public void shouldModifyMarcRecord(TestContext context) {
Record actualRecord =
Json.decodeValue(dataImportEventPayload.getContext().get(MARC_BIBLIOGRAPHIC.value()), Record.class);
- context.assertEquals(getParsedContentWithoutLeader(expectedParsedContent),
- getParsedContentWithoutLeader(actualRecord.getParsedRecord().getContent().toString()));
+ context.assertEquals(getParsedContentWithoutLeaderAndDate(expectedParsedContent),
+ getParsedContentWithoutLeaderAndDate(actualRecord.getParsedRecord().getContent().toString()));
+ validate005Field(context, expectedDate, actualRecord);
context.assertEquals(Record.State.ACTUAL, actualRecord.getState());
context.assertEquals(userId, actualRecord.getMetadata().getUpdatedByUserId());
async.complete();
});
}
+ @Test
+ public void shouldModifyMarcRecordOnCentralTenantIfCentralTenantIdIsInContext(TestContext context) {
+ // given
+ Async async = context.async();
+
+ String expectedDate = get005FieldExpectedDate();
+ String expectedParsedContent =
+ "{\"fields\":[{\"001\":\"ybp7406411\"},{\"856\":{\"ind1\":\" \",\"ind2\":\" \",\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"eae222e8-70fd-4422-852c-60d22bae36b8\"}]}}]}";
+ HashMap payloadContext = new HashMap<>();
+ record.getParsedRecord().setContent(Json.encode(record.getParsedRecord().getContent()));
+ payloadContext.put(MARC_BIBLIOGRAPHIC.value(), Json.encode(record));
+ payloadContext.put("CENTRAL_TENANT_ID", CENTRAL_TENANT_ID);
+
+ mappingProfile.getMappingDetails().withMarcMappingOption(MappingDetail.MarcMappingOption.MODIFY);
+ profileSnapshotWrapper.getChildSnapshotWrappers().get(0)
+ .withChildSnapshotWrappers(Collections.singletonList(new ProfileSnapshotWrapper()
+ .withProfileId(mappingProfile.getId())
+ .withContentType(MAPPING_PROFILE)
+ .withContent(JsonObject.mapFrom(mappingProfile).getMap())));
+
+ DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
+ .withTenant(TENANT_ID)
+ .withOkapiUrl(wireMockServer.baseUrl())
+ .withToken(TOKEN)
+ .withJobExecutionId(record.getSnapshotId())
+ .withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
+ .withContext(payloadContext)
+ .withProfileSnapshot(profileSnapshotWrapper)
+ .withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0))
+ .withAdditionalProperty(USER_ID_HEADER, userId);
+
+ // when
+ CompletableFuture future = modifyRecordEventHandler.handle(dataImportEventPayload);
+
+ // then
+ future.whenComplete((eventPayload, throwable) -> {
+ context.assertNull(throwable);
+ context.assertEquals(DI_SRS_MARC_BIB_RECORD_MODIFIED.value(), eventPayload.getEventType());
+
+ Record actualRecord =
+ Json.decodeValue(dataImportEventPayload.getContext().get(MARC_BIBLIOGRAPHIC.value()), Record.class);
+ recordService.getRecordById(actualRecord.getId(), CENTRAL_TENANT_ID)
+ .onComplete(ar -> {
+ context.assertTrue(ar.succeeded());
+ context.assertTrue(ar.result().isPresent());
+ context.assertEquals(getParsedContentWithoutLeaderAndDate(Json.encode(ar.result().get().getParsedRecord().getContent())),
+ getParsedContentWithoutLeaderAndDate(expectedParsedContent));
+ validate005Field(context, expectedDate, actualRecord);
+ snapshotService.getSnapshotById(snapshot.getJobExecutionId(), CENTRAL_TENANT_ID)
+ .onComplete(ar2 -> {
+ context.assertTrue(ar2.succeeded());
+ context.assertTrue(ar2.result().isPresent());
+ context.assertEquals(ar2.result().get().getJobExecutionId(), snapshot.getJobExecutionId());
+ async.complete();
+ });
+ });
+ });
+ }
+
@Test
public void shouldUpdateMatchedMarcRecordWithFieldFromIncomingRecord(TestContext context) {
// given
Async async = context.async();
+ String expectedDate = get005FieldExpectedDate();
String incomingParsedContent =
"{\"leader\":\"01314nam 22003851a 4500\",\"fields\":[{\"001\":\"ybp7406512\"},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
String expectedParsedContent =
@@ -327,14 +462,15 @@ public void shouldUpdateMatchedMarcRecordWithFieldFromIncomingRecord(TestContext
// then
future.whenComplete((eventPayload, throwable) -> {
context.assertNull(throwable);
- context.assertEquals(DI_SRS_MARC_BIB_RECORD_MODIFIED.value(), eventPayload.getEventType());
+ context.assertEquals(DI_SRS_MARC_BIB_RECORD_UPDATED.value(), eventPayload.getEventType());
Record actualRecord =
Json.decodeValue(dataImportEventPayload.getContext().get(MARC_BIBLIOGRAPHIC.value()), Record.class);
- context.assertEquals(getParsedContentWithoutLeader(expectedParsedContent),
- getParsedContentWithoutLeader(actualRecord.getParsedRecord().getContent().toString()));
+ context.assertEquals(getParsedContentWithoutLeaderAndDate(expectedParsedContent),
+ getParsedContentWithoutLeaderAndDate(actualRecord.getParsedRecord().getContent().toString()));
context.assertEquals(Record.State.ACTUAL, actualRecord.getState());
context.assertEquals(dataImportEventPayload.getJobExecutionId(), actualRecord.getSnapshotId());
+ validate005Field(context, expectedDate, actualRecord);
async.complete();
});
}
@@ -344,10 +480,11 @@ public void shouldUpdateMarcRecordAndCreate035FieldAndRemove003Field(TestContext
// given
Async async = context.async();
+ String expectedDate = get005FieldExpectedDate();
String incomingParsedContent =
"{\"leader\":\"01314nam 22003851a 4500\",\"fields\":[{\"001\":\"2300089\"},{\"003\":\"LTSCA\"},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
String expectedParsedContent =
- "{\"leader\":\"00138nam 22000611a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"035\":{\"subfields\":[{\"a\":\"(LTSCA)2300089\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
+ "{\"leader\":\"00167nam 22000731a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"035\":{\"subfields\":[{\"a\":\"(LTSCA)2300089\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
var instanceId = UUID.randomUUID().toString();
Record incomingRecord = new Record()
.withParsedRecord(new ParsedRecord().withContent(incomingParsedContent))
@@ -380,12 +517,13 @@ public void shouldUpdateMarcRecordAndCreate035FieldAndRemove003Field(TestContext
// then
future.whenComplete((eventPayload, throwable) -> {
context.assertNull(throwable);
- context.assertEquals(DI_SRS_MARC_BIB_RECORD_MODIFIED.value(), eventPayload.getEventType());
+ context.assertEquals(DI_SRS_MARC_BIB_RECORD_UPDATED.value(), eventPayload.getEventType());
Record actualRecord =
Json.decodeValue(dataImportEventPayload.getContext().get(MARC_BIBLIOGRAPHIC.value()), Record.class);
- context.assertEquals(expectedParsedContent, actualRecord.getParsedRecord().getContent().toString());
+ context.assertEquals(expectedParsedContent, getParsedContentWithoutDate(actualRecord.getParsedRecord().getContent().toString()));
context.assertEquals(Record.State.ACTUAL, actualRecord.getState());
+ validate005Field(context, expectedDate, actualRecord);
async.complete();
});
}
@@ -396,10 +534,11 @@ public void shouldUpdateMarcRecordAndCreate035FieldAndRemove035withDuplicateHrId
// given
Async async = context.async();
+ String expectedDate = get005FieldExpectedDate();
String incomingParsedContent =
"{\"leader\":\"01314nam 22003851a 4500\",\"fields\":[{\"001\":\"2300089\"},{\"003\":\"LTSCA\"},{\"035\":{\"subfields\":[{\"a\":\"ybp7406411\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
String expectedParsedContent =
- "{\"leader\":\"00138nam 22000611a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"035\":{\"subfields\":[{\"a\":\"(LTSCA)2300089\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
+ "{\"leader\":\"00167nam 22000731a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"035\":{\"subfields\":[{\"a\":\"(LTSCA)2300089\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
var instanceId = UUID.randomUUID().toString();
Record incomingRecord = new Record()
.withParsedRecord(new ParsedRecord().withContent(incomingParsedContent))
@@ -432,12 +571,13 @@ public void shouldUpdateMarcRecordAndCreate035FieldAndRemove035withDuplicateHrId
// then
future.whenComplete((eventPayload, throwable) -> {
context.assertNull(throwable);
- context.assertEquals(DI_SRS_MARC_BIB_RECORD_MODIFIED.value(), eventPayload.getEventType());
+ context.assertEquals(DI_SRS_MARC_BIB_RECORD_UPDATED.value(), eventPayload.getEventType());
Record actualRecord =
Json.decodeValue(dataImportEventPayload.getContext().get(MARC_BIBLIOGRAPHIC.value()), Record.class);
- context.assertEquals(expectedParsedContent, actualRecord.getParsedRecord().getContent().toString());
+ context.assertEquals(expectedParsedContent, getParsedContentWithoutDate(actualRecord.getParsedRecord().getContent().toString()));
context.assertEquals(Record.State.ACTUAL, actualRecord.getState());
+ validate005Field(context, expectedDate, actualRecord);
async.complete();
});
}
@@ -447,6 +587,7 @@ public void shouldModifyMarcBibRecordAndNotRemove003Field(TestContext context) {
// given
Async async = context.async();
+ String expectedDate = get005FieldExpectedDate();
String incomingParsedContent =
"{\"leader\":\"05490cam a2200877Ia 4500\",\"fields\":[{\"001\":\"ocn297303223\"},{\"003\":\"OCoLC\"},{\"005\":\"20210226180151.2\"},{\"006\":\"m o d \"},{\"007\":\"cr unu---uuaua\"},{\"008\":\"090107s1954 dcua ob 100 0 eng d\"},{\"010\":{\"subfields\":[{\"a\":\" 55000367 \"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"015\":{\"subfields\":[{\"a\":\"B67-25185 \"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"016\":{\"subfields\":[{\"a\":\"000002550474\"},{\"2\":\"AU\"}],\"ind1\":\"7\",\"ind2\":\" \"}},{\"019\":{\"subfields\":[{\"a\":\"780330352\"},{\"a\":\"1057910652\"},{\"a\":\"1078369885\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"020\":{\"subfields\":[{\"a\":\"9780841221574\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"020\":{\"subfields\":[{\"a\":\"084122157X\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"020\":{\"subfields\":[{\"a\":\"0841200122\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"020\":{\"subfields\":[{\"a\":\"9780841200128\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"020\":{\"subfields\":[{\"z\":\"9780841200128\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"020\":{\"subfields\":[{\"z\":\"0841200122\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"024\":{\"subfields\":[{\"a\":\"9780841200128\"}],\"ind1\":\"3\",\"ind2\":\" \"}},{\"029\":{\"subfields\":[{\"a\":\"AU@\"},{\"b\":\"000048638076\"}],\"ind1\":\"1\",\"ind2\":\" \"}},{\"035\":{\"subfields\":[{\"a\":\"(OCoLC)on297303223\"},{\"z\":\"(OCoLC)780330352\"},{\"z\":\"(OCoLC)1057910652\"},{\"z\":\"(OCoLC)1078369885\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"035\":{\"subfields\":[{\"a\":\"10.1021/ba-1954-0011\"}],\"ind1\":\"9\",\"ind2\":\" \"}},{\"037\":{\"subfields\":[{\"b\":\"00001081\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"040\":{\"subfields\":[{\"a\":\"OCLCE\"},{\"b\":\"eng\"},{\"e\":\"pn\"},{\"c\":\"OCLCE\"},{\"d\":\"MUU\"},{\"d\":\"OCLCQ\"},{\"d\":\"COO\"},{\"d\":\"OCLCF\"},{\"d\":\"OCLCA\"},{\"d\":\"AU@\"},{\"d\":\"OCLCQ\"},{\"d\":\"OCL\"},{\"d\":\"ACY\"},{\"d\":\"OCLCQ\"},{\"d\":\"OCLCA\"},{\"d\":\"YOU\"},{\"d\":\"CASSC\"},{\"d\":\"OCLCA\"},{\"d\":\"MERER\"},{\"d\":\"OCLCO\"},{\"d\":\"CUY\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"049\":{\"subfields\":[{\"a\":\"AUMM\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"050\":{\"subfields\":[{\"a\":\"QD1\"},{\"b\":\".A355 no. 11\"}],\"ind1\":\" \",\"ind2\":\"4\"}},{\"060\":{\"subfields\":[{\"a\":\"QU 188\"},{\"b\":\"N285 1954\"}],\"ind1\":\" \",\"ind2\":\"4\"}},{\"072\":{\"subfields\":[{\"a\":\"SCI\"},{\"x\":\"013050\"},{\"2\":\"bisacsh\"}],\"ind1\":\" \",\"ind2\":\"7\"}},{\"082\":{\"subfields\":[{\"a\":\"541.3452\"},{\"a\":\"541.375*\"}],\"ind1\":\"0\",\"ind2\":\"4\"}},{\"083\":{\"subfields\":[{\"z\":\"2\"},{\"a\":\"4947\"},{\"2\":\"22\"}],\"ind1\":\"0\",\"ind2\":\" \"}},{\"084\":{\"subfields\":[{\"a\":\"SCI007000\"},{\"2\":\"bisacsh\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"092\":{\"subfields\":[{\"a\":\"551.46 ǂ2 23/eng/2012\"}],\"ind1\":\"0\",\"ind2\":\" \"}},{\"111\":{\"subfields\":[{\"a\":\"Symposium on Natural Plant Hydrocolloids\"},{\"d\":\"(1952 :\"},{\"c\":\"Atlantic City, N.J.)\"}],\"ind1\":\"2\",\"ind2\":\" \"}},{\"245\":{\"subfields\":[{\"a\":\"Natural plant hydrocolloids :\"},{\"b\":\"a collection of papers comprising the Symposium on Natural Plant Hydrocolloids, presented before the Divisions of Colloid Chemistry and Agricultural and Food Chemistry at the 122nd meeting of the American Chemical Society, Atlantic City, N.J., September 1952.\"}],\"ind1\":\"1\",\"ind2\":\"0\"}},{\"260\":{\"subfields\":[{\"a\":\"Washington, D.C. :\"},{\"b\":\"American Chemical Society,\"},{\"c\":\"1954.\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"300\":{\"subfields\":[{\"a\":\"1 online resource (iii, 103 pages) :\"},{\"b\":\"illustrations.\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"336\":{\"subfields\":[{\"a\":\"text\"},{\"b\":\"txt\"},{\"2\":\"rdacontent\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"337\":{\"subfields\":[{\"a\":\"computer\"},{\"b\":\"c\"},{\"2\":\"rdamedia\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"338\":{\"subfields\":[{\"a\":\"online resource\"},{\"b\":\"cr\"},{\"2\":\"rdacarrier\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"490\":{\"subfields\":[{\"a\":\"Advances in chemistry series,\"},{\"x\":\"0065-2393 ;\"},{\"v\":\"no. 11\"}],\"ind1\":\"1\",\"ind2\":\" \"}},{\"504\":{\"subfields\":[{\"a\":\"Includes bibliographical references.\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"505\":{\"subfields\":[{\"t\":\"Introductory Remarks /\"},{\"r\":\"STOLOFF, LEONARD /\"},{\"u\":\"http://dx.doi.org/10.1021/ba-1954-0011.ch001 --\"},{\"t\":\"Calcium Pectinates, Their Preparation and Uses /\"},{\"r\":\"WOODMANSEE, CLINTON W.; BAKER, GEORCE L. /\"},{\"u\":\"http://dx.doi.org/10.1021/ba-1954-0011.ch002 --\"},{\"t\":\"Factors Influencing Gelation with Pectin /\"},{\"r\":\"OWENS, HARRY S.; SWENSON, HAROLD A.; SCHULTZ, THOMAS H. /\"},{\"u\":\"http://dx.doi.org/10.1021/ba-1954-0011.ch003 --\"},{\"t\":\"Agar Since 1943 /\"},{\"r\":\"SELBY, HORACE H. /\"},{\"u\":\"http://dx.doi.org/10.1021/ba-1954-0011.ch004 --\"},{\"t\":\"Technology of Gum Arabic /\"},{\"r\":\"MANTELL, CHARLES L. /\"},{\"u\":\"http://dx.doi.org/10.1021/ba-1954-0011.ch005 --\"},{\"t\":\"Chemistry, Properties, and Application Of Gum Karaya /\"},{\"r\":\"GOLDSTEIN, ARTHUR M. /\"},{\"u\":\"http://dx.doi.org/10.1021/ba-1954-0011.ch006 --\"},{\"t\":\"History, Production, and Uses of Tragacanth /\"},{\"r\":\"BEACH, D. C. /\"},{\"u\":\"http://dx.doi.org/10.1021/ba-1954-0011.ch007 --\"},{\"t\":\"Guar Gum, Locust Bean Gum, and Others /\"},{\"r\":\"WHISTLER, ROY L. /\"},{\"u\":\"http://dx.doi.org/10.1021/ba-1954-0011.ch008 --\"},{\"t\":\"Some Properties of Locust Bean Gum /\"},{\"r\":\"DEUEL, HANS; NEUKOM, HANS /\"},{\"u\":\"http://dx.doi.org/10.1021/ba-1954-0011.ch009 --\"},{\"t\":\"Observations on Pectic Substances /\"},{\"r\":\"DEUEL, HANS; SOLMS, JÜRG /\"},{\"u\":\"http://dx.doi.org/10.1021/ba-1954-0011.ch010 --\"},{\"t\":\"Algin in Review /\"},{\"r\":\"STEINER, ARNOLD B.; McNEELY, WILLIAM H. /\"},{\"u\":\"http://dx.doi.org/10.1021/ba-1954-0011.ch011 --\"},{\"t\":\"Alginates from Common British Brown Marine Algae /\"},{\"r\":\"BLACK, W. A . P.; WOODWARD, F. N. /\"},{\"u\":\"http://dx.doi.org/10.1021/ba-1954-0011.ch012 --\"},{\"t\":\"Irish Moss Extractives /\"},{\"r\":\"STOLOFF, LEONARD /\"},{\"u\":\"http://dx.doi.org/10.1021/ba-1954-0011.ch013 --\"},{\"t\":\"Effect of Different Ions on Gel Strength Of Red Seaweed Extracts /\"},{\"r\":\"MARSHALL, S. M.; ORR, A. P. /\"},{\"u\":\"http://dx.doi.org/10.1021/ba-1954-0011.ch014\"}],\"ind1\":\"0\",\"ind2\":\"0\"}},{\"506\":{\"subfields\":[{\"a\":\"Online full text is restricted to subscribers.\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"583\":{\"subfields\":[{\"a\":\"committed to retain\"},{\"c\":\"20160630\"},{\"d\":\"20310630\"},{\"f\":\"EAST\"},{\"u\":\"https://eastlibraries.org/retained-materials\"},{\"5\":\"DCHS\"}],\"ind1\":\"1\",\"ind2\":\" \"}},{\"588\":{\"subfields\":[{\"a\":\"Print version record.\"}],\"ind1\":\"0\",\"ind2\":\" \"}},{\"650\":{\"subfields\":[{\"a\":\"Biocolloids\"},{\"v\":\"Congresses.\"}],\"ind1\":\" \",\"ind2\":\"0\"}},{\"650\":{\"subfields\":[{\"a\":\"SCIENCE\"},{\"x\":\"Chemistry\"},{\"x\":\"Physical & Theoretical.\"},{\"2\":\"bisacsh\"}],\"ind1\":\" \",\"ind2\":\"7\"}},{\"650\":{\"subfields\":[{\"a\":\"Biocolloids.\"},{\"2\":\"fast\"},{\"0\":\"(OCoLC)fst00831997\"}],\"ind1\":\" \",\"ind2\":\"7\"}},{\"650\":{\"subfields\":[{\"a\":\"Colloids\"},{\"x\":\"chemistry.\"}],\"ind1\":\"1\",\"ind2\":\"2\"}},{\"650\":{\"subfields\":[{\"a\":\"Colloids\"},{\"x\":\"economics.\"}],\"ind1\":\"1\",\"ind2\":\"2\"}},{\"650\":{\"subfields\":[{\"a\":\"Pectins\"},{\"x\":\"chemistry.\"}],\"ind1\":\" \",\"ind2\":\"2\"}},{\"650\":{\"subfields\":[{\"a\":\"Agar\"},{\"x\":\"chemistry.\"}],\"ind1\":\" \",\"ind2\":\"2\"}},{\"650\":{\"subfields\":[{\"a\":\"Gum Arabic\"},{\"x\":\"chemistry.\"}],\"ind1\":\" \",\"ind2\":\"2\"}},{\"650\":{\"subfields\":[{\"a\":\"Karaya Gum\"},{\"x\":\"chemistry.\"}],\"ind1\":\" \",\"ind2\":\"2\"}},{\"650\":{\"subfields\":[{\"a\":\"Tragacanth\"},{\"x\":\"history.\"}],\"ind1\":\" \",\"ind2\":\"2\"}},{\"650\":{\"subfields\":[{\"a\":\"Tragacanth\"},{\"x\":\"chemistry.\"}],\"ind1\":\" \",\"ind2\":\"2\"}},{\"650\":{\"subfields\":[{\"a\":\"Plant Gums\"},{\"x\":\"chemistry.\"}],\"ind1\":\" \",\"ind2\":\"2\"}},{\"650\":{\"subfields\":[{\"a\":\"Alginates\"},{\"x\":\"chemistry.\"}],\"ind1\":\" \",\"ind2\":\"2\"}},{\"650\":{\"subfields\":[{\"a\":\"Phaeophyta\"},{\"x\":\"chemistry.\"}],\"ind1\":\" \",\"ind2\":\"2\"}},{\"650\":{\"subfields\":[{\"a\":\"Chondrus\"},{\"x\":\"chemistry.\"}],\"ind1\":\" \",\"ind2\":\"2\"}},{\"650\":{\"subfields\":[{\"a\":\"Rhodophyta\"},{\"x\":\"chemistry.\"}],\"ind1\":\" \",\"ind2\":\"2\"}},{\"655\":{\"subfields\":[{\"a\":\"Electronic books.\"}],\"ind1\":\" \",\"ind2\":\"4\"}},{\"655\":{\"subfields\":[{\"a\":\"Conference papers and proceedings.\"},{\"2\":\"fast\"},{\"0\":\"(OCoLC)fst01423772\"}],\"ind1\":\" \",\"ind2\":\"7\"}},{\"710\":{\"subfields\":[{\"a\":\"American Chemical Society.\"},{\"b\":\"Division of Agricultural and Food Chemistry.\"},{\"0\":\"http://id.loc.gov/authorities/names/n79007703\"}],\"ind1\":\"2\",\"ind2\":\" \"}},{\"710\":{\"subfields\":[{\"a\":\"American Chemical Society.\"},{\"b\":\"Division of Colloid and Surface Chemistry.\"},{\"0\":\"http://id.loc.gov/authorities/names/n80109319\"}],\"ind1\":\"2\",\"ind2\":\" \"}},{\"710\":{\"subfields\":[{\"a\":\"American Chemical Society.\"},{\"b\":\"Meeting\"},{\"n\":\"(122nd :\"},{\"d\":\"1952 :\"},{\"c\":\"Atlantic City, N.J.)\"}],\"ind1\":\"2\",\"ind2\":\" \"}},{\"776\":{\"subfields\":[{\"i\":\"Print version:\"},{\"a\":\"American Chemical Society. Division of Colloid and Surface Chemistry.\"},{\"t\":\"Natural plant hydrocolloids.\"},{\"d\":\"Washington, D.C. : American Chemical Society, 1954\"},{\"w\":\"(DLC) 55000367\"},{\"w\":\"(OCoLC)280432\"}],\"ind1\":\"0\",\"ind2\":\"8\"}},{\"830\":{\"subfields\":[{\"a\":\"Advances in chemistry series ;\"},{\"v\":\"11.\"}],\"ind1\":\" \",\"ind2\":\"0\"}},{\"850\":{\"subfields\":[{\"a\":\"AAP\"},{\"a\":\"CU\"},{\"a\":\"DLC\"},{\"a\":\"MiU \"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"856\":{\"subfields\":[{\"u\":\"http://silk.library.umass.edu/login?url=https://pubs.acs.org/doi/book/10.1021/ba-1954-0011\"},{\"z\":\"UMass: Link to resource\"}],\"ind1\":\"4\",\"ind2\":\"0\"}},{\"891\":{\"subfields\":[{\"9\":\"853\"},{\"8\":\"1\"},{\"a\":\"(year/year)\"}],\"ind1\":\"3\",\"ind2\":\"3\"}},{\"938\":{\"subfields\":[{\"a\":\"ebrary\"},{\"b\":\"EBRY\"},{\"n\":\"ebr10728707\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
String expectedParsedContent =
@@ -481,7 +622,9 @@ public void shouldModifyMarcBibRecordAndNotRemove003Field(TestContext context) {
var actualRecord = Json.decodeValue(dataImportEventPayload.getContext().get(MARC_BIBLIOGRAPHIC.value()), Record.class);
context.assertEquals(Record.State.ACTUAL, actualRecord.getState());
- verifyRecords(context, expectedParsedContent, actualRecord);
+ verifyRecords(context, getParsedContentWithoutDate(expectedParsedContent),
+ getParsedContentWithoutDate(actualRecord.getParsedRecord().getContent().toString()));
+ validate005Field(context, expectedDate, actualRecord);
async.complete();
});
}
@@ -586,7 +729,7 @@ public void shouldNotUpdateLinksWhenIncomingZeroSubfieldIsSameAsExisting(TestCon
"{\"leader\":\"02340cam a2200301Ki 4500\",\"fields\":[{\"001\":\"ybp7406411\"}," +
"{\"100\":{\"subfields\":[{\"a\":\"Chin, Staceyann Test,\"},{\"e\":\"author updated.\"},{\"0\":\"http://id.loc.gov/authorities/names/n2008052404\"},{\"9\":\"5a56ffa8-e274-40ca-8620-34a23b5b45dd\"}],\"ind1\":\"1\",\"ind2\":\" \"}}]}";
String expectedParsedContent =
- "{\"leader\":\"00191cam a2200049Ki 4500\",\"fields\":[{\"001\":\"ybp7406411\"}," +
+ "{\"leader\":\"00220cam a2200061Ki 4500\",\"fields\":[{\"001\":\"ybp7406411\"}," +
"{\"100\":{\"subfields\":[{\"a\":\"Chin, Staceyann Test,\"},{\"e\":\"author updated.\"},{\"0\":\"http://id.loc.gov/authorities/names/n2008052404\"},{\"9\":\"5a56ffa8-e274-40ca-8620-34a23b5b45dd\"}],\"ind1\":\"1\",\"ind2\":\" \"}}]}";
verifyBibRecordUpdate(incomingParsedContent, expectedParsedContent, 1, 0, context);
@@ -599,7 +742,7 @@ public void shouldUpdateLinksWhenIncomingZeroSubfieldIsNull(TestContext context)
"{\"leader\":\"02340cam a2200301Ki 4500\",\"fields\":[{\"001\":\"ybp7406411\"}," +
"{\"100\":{\"subfields\":[{\"a\":\"Chin, Staceyann Test,\"},{\"e\":\"author updated.\"}],\"ind1\":\"1\",\"ind2\":\" \"}}]}";
String expectedParsedContent =
- "{\"leader\":\"00104cam a2200049Ki 4500\",\"fields\":[{\"001\":\"ybp7406411\"}," +
+ "{\"leader\":\"00133cam a2200061Ki 4500\",\"fields\":[{\"001\":\"ybp7406411\"}," +
"{\"100\":{\"subfields\":[{\"a\":\"Chin, Staceyann Test,\"},{\"e\":\"author updated.\"}],\"ind1\":\"1\",\"ind2\":\" \"}}]}";
verifyBibRecordUpdate(incomingParsedContent, expectedParsedContent, 1, 1, context);
@@ -612,7 +755,7 @@ public void shouldUnlinkBibFieldWhenIncomingZeroSubfieldIsDifferent(TestContext
"{\"leader\":\"02340cam a2200301Ki 4500\",\"fields\":[{\"001\":\"ybp7406411\"}," +
"{\"100\":{\"subfields\":[{\"a\":\"Chin, Staceyann Test,\"},{\"e\":\"author updated.\"},{\"0\":\"test different 0 subfield\"},{\"9\":\"5a56ffa8-e274-40ca-8620-34a23b5b45dd\"}],\"ind1\":\"1\",\"ind2\":\" \"}}]}";
String expectedParsedContent =
- "{\"leader\":\"00131cam a2200049Ki 4500\",\"fields\":[{\"001\":\"ybp7406411\"}," +
+ "{\"leader\":\"00160cam a2200061Ki 4500\",\"fields\":[{\"001\":\"ybp7406411\"}," +
"{\"100\":{\"subfields\":[{\"a\":\"Chin, Staceyann Test,\"},{\"e\":\"author updated.\"},{\"0\":\"test different 0 subfield\"}],\"ind1\":\"1\",\"ind2\":\" \"}}]}";
verifyBibRecordUpdate(incomingParsedContent, expectedParsedContent, 1, 1, context);
@@ -700,6 +843,7 @@ public void shouldReturnExceptionForDuplicateRecord(TestContext context) {
// given
Async async = context.async();
+ String expectedDate = get005FieldExpectedDate();
String incomingParsedContent =
"{\"leader\":\"01314nam 22003851a 4500\",\"fields\":[{\"001\":\"ybp7406512\"},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
String expectedParsedContent =
@@ -752,14 +896,15 @@ public void shouldReturnExceptionForDuplicateRecord(TestContext context) {
// then
future1.whenComplete((eventPayload, throwable) -> {
context.assertNull(throwable);
- context.assertEquals(DI_SRS_MARC_BIB_RECORD_MODIFIED.value(), eventPayload.getEventType());
+ context.assertEquals(DI_SRS_MARC_BIB_RECORD_UPDATED.value(), eventPayload.getEventType());
Record actualRecord =
Json.decodeValue(dataImportEventPayloadOriginalRecord.getContext().get(MARC_BIBLIOGRAPHIC.value()), Record.class);
- context.assertEquals(getParsedContentWithoutLeader(expectedParsedContent),
- getParsedContentWithoutLeader(actualRecord.getParsedRecord().getContent().toString()));
+ context.assertEquals(getParsedContentWithoutLeaderAndDate(expectedParsedContent),
+ getParsedContentWithoutLeaderAndDate(actualRecord.getParsedRecord().getContent().toString()));
context.assertEquals(Record.State.ACTUAL, actualRecord.getState());
context.assertEquals(dataImportEventPayloadOriginalRecord.getJobExecutionId(), actualRecord.getSnapshotId());
+ validate005Field(context, expectedDate, actualRecord);
});
future2.whenComplete((eventPayload, throwable) -> {
@@ -797,6 +942,7 @@ private void verifyBibRecordUpdate(String incomingParsedContent, String expected
wireMockServer.stubFor(put(URL_PATH_PATTERN).willReturn(aResponse().withStatus(202)));
// given
+ String expectedDate = get005FieldExpectedDate();
Async async = context.async();
Snapshot snapshotForRecordUpdate = new Snapshot().withJobExecutionId(UUID.randomUUID().toString())
.withStatus(Snapshot.Status.PARSING_IN_PROGRESS);
@@ -856,17 +1002,19 @@ private void verifyBibRecordUpdate(String incomingParsedContent, String expected
.whenComplete((eventPayload, throwable) -> {
var actualRecord = Json.decodeValue(dataImportEventPayload.getContext().get(MARC_BIBLIOGRAPHIC.value()), Record.class);
context.assertEquals(Record.State.ACTUAL, actualRecord.getState());
- context.assertEquals(DI_SRS_MARC_BIB_RECORD_MODIFIED.value(), eventPayload.getEventType());
+ context.assertEquals(DI_SRS_MARC_BIB_RECORD_UPDATED.value(), eventPayload.getEventType());
context.assertNull(throwable);
- verifyRecords(context, expectedParsedContent, actualRecord);
+ verifyRecords(context, getParsedContentWithoutDate(expectedParsedContent),
+ getParsedContentWithoutDate(actualRecord.getParsedRecord().getContent().toString()));
+ validate005Field(context, expectedDate, actualRecord);
verifyGetAndPut(context, getRequestCount, putRequestCount);
async.complete();
});
});
}
- private void verifyGetAndPut(TestContext context, int getRequestCount, int putRequestCount){
+ private void verifyGetAndPut(TestContext context, int getRequestCount, int putRequestCount) {
try {
wireMockServer.verify(getRequestCount, getRequestedFor(URL_PATH_PATTERN));
wireMockServer.verify(putRequestCount, putRequestedFor(URL_PATH_PATTERN));
@@ -875,18 +1023,40 @@ private void verifyGetAndPut(TestContext context, int getRequestCount, int putRe
}
}
- private void verifyRecords(TestContext context, String expectedParsedContent, Record actualRecord){
+ private void verifyRecords(TestContext context, String expectedParsedContent, String actualParsedContent) {
try {
context.assertEquals(
mapper.readTree(expectedParsedContent),
- mapper.readTree(actualRecord.getParsedRecord().getContent().toString()));
+ mapper.readTree(actualParsedContent));
} catch (JsonProcessingException e) {
context.fail(e);
}
}
- public static String getParsedContentWithoutLeader(String parsedContent) {
+
+ public static String getParsedContentWithoutLeaderAndDate(String parsedContent) {
JsonObject parsedContentAsJson = new JsonObject(parsedContent);
parsedContentAsJson.remove("leader");
+ remove005FieldFromRecord(parsedContentAsJson);
+
return parsedContentAsJson.encode();
}
+
+ public static String getParsedContentWithoutDate(String parsedContent) {
+ JsonObject parsedContentAsJson = new JsonObject(parsedContent);
+ remove005FieldFromRecord(parsedContentAsJson);
+
+ return parsedContentAsJson.encode();
+ }
+
+ private static JsonObject remove005FieldFromRecord(JsonObject recordJson) {
+ JsonArray fieldsArray = recordJson.getJsonArray("fields");
+ for (int i = 0; i < fieldsArray.size(); i++) {
+ JsonObject fieldObject = fieldsArray.getJsonObject(i);
+ if (fieldObject.containsKey(TAG_005)) {
+ fieldsArray.remove(i);
+ break;
+ }
+ }
+ return recordJson;
+ }
}
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibliographicMatchEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibliographicMatchEventHandlerTest.java
index c0887f550..acfa1ec1f 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibliographicMatchEventHandlerTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibliographicMatchEventHandlerTest.java
@@ -1,9 +1,13 @@
package org.folio.services;
import com.fasterxml.jackson.databind.ObjectMapper;
+import com.github.tomakehurst.wiremock.client.WireMock;
+import com.github.tomakehurst.wiremock.matching.RegexPattern;
+import com.github.tomakehurst.wiremock.matching.UrlPathPattern;
import com.google.common.collect.Lists;
import io.vertx.core.Future;
import io.vertx.core.json.Json;
+import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.unit.Async;
import io.vertx.ext.unit.TestContext;
@@ -16,6 +20,8 @@
import org.folio.dao.RecordDao;
import org.folio.dao.RecordDaoImpl;
import org.folio.dao.util.SnapshotDaoUtil;
+import org.folio.processing.exceptions.MatchingException;
+import org.folio.rest.client.TenantClient;
import org.folio.rest.jaxrs.model.EntityType;
import org.folio.rest.jaxrs.model.ExternalIdsHolder;
import org.folio.rest.jaxrs.model.Field;
@@ -24,7 +30,12 @@
import org.folio.rest.jaxrs.model.ProfileSnapshotWrapper;
import org.folio.rest.jaxrs.model.RawRecord;
import org.folio.rest.jaxrs.model.Record;
+import org.folio.rest.jaxrs.model.RecordCollection;
import org.folio.rest.jaxrs.model.Snapshot;
+import org.folio.rest.jaxrs.model.TenantAttributes;
+import org.folio.rest.jaxrs.model.TenantJob;
+import org.folio.services.caches.ConsortiumConfigurationCache;
+import org.folio.services.entities.ConsortiumConfiguration;
import org.folio.services.handlers.match.MarcBibliographicMatchEventHandler;
import org.junit.After;
import org.junit.Assert;
@@ -32,6 +43,7 @@
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
+import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import java.io.IOException;
@@ -39,8 +51,10 @@
import java.util.Date;
import java.util.HashMap;
import java.util.List;
+import java.util.Optional;
import java.util.UUID;
+import static com.github.tomakehurst.wiremock.client.WireMock.get;
import static java.util.Collections.singletonList;
import static org.folio.MatchDetail.MatchCriterion.EXACTLY_MATCHES;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MATCHED;
@@ -54,12 +68,15 @@
@RunWith(VertxUnitRunner.class)
public class MarcBibliographicMatchEventHandlerTest extends AbstractLBServiceTest {
- private static final String PARSED_CONTENT_WITH_ADDITIONAL_FIELDS = "{\"leader\":\"01589ccm a2200373 4500\",\"fields\":[{ \"001\": \"12345\" }, {\"035\": {\"ind1\": \" \", \"ind2\": \" \", \"subfields\": [{\"a\": \"nin00009530412\"}]}}, {\"035\": {\"ind1\": \" \", \"ind2\": \" \", \"subfields\": [{\"a\": \"12345\"}]}}, {\"024\": {\"ind1\": \"8\", \"ind2\": \"0\", \"subfields\": [{\"a\": \"test123\"}]}}, {\"024\": {\"ind1\": \"1\", \"ind2\": \"1\", \"subfields\": [{\"a\": \"test45\"}]}}, {\"245\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"Neue Ausgabe sämtlicher Werke,\"}]}},{\"948\":{\"ind1\":\"\",\"ind2\":\"\",\"subfields\":[{\"a\":\"acf4f6e2-115c-4509-9d4c-536c758ef917\"},{\"b\":\"681394b4-10d8-4cb1-a618-0f9bd6152119\"},{\"d\":\"12345\"},{\"e\":\"lts\"},{\"x\":\"addfast\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"acf4f6e2-115c-4509-9d4c-536c758ef917\"}, {\"i\":\"681394b4-10d8-4cb1-a618-0f9bd6152119\"}]}}]}";
+ private static final String PARSED_CONTENT_WITH_ADDITIONAL_FIELDS = "{\"leader\":\"01589ccm a2200373 4500\",\"fields\":[{ \"001\": \"12345\" }, {\"007\": \"12569\"},{\"035\": {\"ind1\": \" \", \"ind2\": \" \", \"subfields\": [{\"a\": \"nin00009530412\"}]}}, {\"035\": {\"ind1\": \" \", \"ind2\": \" \", \"subfields\": [{\"a\": \"12345\"}]}}, {\"024\": {\"ind1\": \"8\", \"ind2\": \"0\", \"subfields\": [{\"a\": \"test123\"}]}}, {\"024\": {\"ind1\": \"1\", \"ind2\": \"1\", \"subfields\": [{\"a\": \"test45\"}]}}, {\"245\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"Neue Ausgabe sämtlicher Werke,\"}]}},{\"948\":{\"ind1\":\"\",\"ind2\":\"\",\"subfields\":[{\"a\":\"acf4f6e2-115c-4509-9d4c-536c758ef917\"},{\"b\":\"681394b4-10d8-4cb1-a618-0f9bd6152119\"},{\"d\":\"12345\"},{\"e\":\"lts\"},{\"x\":\"addfast\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"acf4f6e2-115c-4509-9d4c-536c758ef917\"}, {\"i\":\"681394b4-10d8-4cb1-a618-0f9bd6152119\"}]}}]}";
private static final String PARSED_CONTENT_WITH_NO_999_FIELD = "{\"leader\": \"01589ccm a2200373 4500\", \"fields\": [{\"001\": \"12345\"}, {\"035\": {\"ind1\": \" \", \"ind2\": \" \", \"subfields\": [{\"a\": \"nin00009530412\"}]}}, {\"245\": {\"ind1\": \"1\", \"ind2\": \"0\", \"subfields\": [{\"a\": \"Neue Ausgabe sämtlicher Werke,\"}]}}, {\"948\": {\"ind1\": \"\", \"ind2\": \"\", \"subfields\": [{\"a\": \"acf4f6e2-115c-4509-9d4c-536c758ef917\"}, {\"b\": \"681394b4-10d8-4cb1-a618-0f9bd6152119\"}, {\"d\": \"12345\"}, {\"e\": \"lts\"}, {\"x\": \"addfast\"}]}}]}";
private static final String PARSED_CONTENT_WITH_MULTIPLE_035_FIELD = "{\"leader\": \"01589ccm a2200373 4500\", \"fields\": [{\"001\": \"12345\"}, {\"035\": {\"ind1\": \" \", \"ind2\": \" \", \"subfields\": [{\"a\": \"12345\"}]}}, {\"035\": {\"ind1\": \" \", \"ind2\": \" \", \"subfields\": [{\"a\": \"nin00009530412\"}]}}, {\"245\": {\"ind1\": \"1\", \"ind2\": \"0\", \"subfields\": [{\"a\": \"Neue Ausgabe sämtlicher Werke,\"}]}}, {\"948\": {\"ind1\": \"\", \"ind2\": \"\", \"subfields\": [{\"a\": \"acf4f6e2-115c-4509-9d4c-536c758ef917\"}, {\"b\": \"681394b4-10d8-4cb1-a618-0f9bd6152119\"}, {\"d\": \"12345\"}, {\"e\": \"lts\"}, {\"x\": \"addfast\"}]}}]}";
+ private static final String PARSED_CONTENT_WITH_MULTIPLE_007_FIELD = "{\"leader\": \"01589ccm a2200373 4500\", \"fields\": [{\"001\": \"12345\"},{\"007\": \"12569\"},{\"007\": \"364345\"} , {\"245\": {\"ind1\": \"1\", \"ind2\": \"0\", \"subfields\": [{\"a\": \"Neue Ausgabe sämtlicher Werke,\"}]}}, {\"948\": {\"ind1\": \"\", \"ind2\": \"\", \"subfields\": [{\"a\": \"acf4f6e2-115c-4509-9d4c-536c758ef917\"}, {\"b\": \"681394b4-10d8-4cb1-a618-0f9bd6152119\"}, {\"d\": \"12345\"}, {\"e\": \"lts\"}, {\"x\": \"addfast\"}]}}]}";
private static final String PARSED_CONTENT_WITH_MULTIPLE_024_FIELD = "{\"leader\": \"01589ccm a2200373 4500\", \"fields\": [{\"001\": \"12345\"}, {\"024\": {\"ind1\": \"1\", \"ind2\": \"2\", \"subfields\": [{\"a\": \"test\"}]}}, {\"024\": {\"ind1\": \"3\", \"ind2\": \" \", \"subfields\": [{\"a\": \"test45\"}]}}, {\"245\": {\"ind1\": \"1\", \"ind2\": \"0\", \"subfields\": [{\"a\": \"Neue Ausgabe sämtlicher Werke,\"}]}}, {\"948\": {\"ind1\": \"\", \"ind2\": \"\", \"subfields\": [{\"a\": \"acf4f6e2-115c-4509-9d4c-536c758ef917\"}, {\"b\": \"681394b4-10d8-4cb1-a618-0f9bd6152119\"}, {\"d\": \"12345\"}, {\"e\": \"lts\"}, {\"x\": \"addfast\"}]}}]}";
-
private static final String MATCHED_MARC_BIB_KEY = "MATCHED_MARC_BIBLIOGRAPHIC";
+ private static final String USER_TENANTS = "/user-tenants";
+ private static final String CENTRAL_TENANT_ID = "centralTenantId";
+ private static final String CONSORTIUM_ID = "consortiumId";
private RecordDao recordDao;
private MarcBibliographicMatchEventHandler handler;
private static String rawRecordContent;
@@ -67,16 +84,42 @@ public class MarcBibliographicMatchEventHandlerTest extends AbstractLBServiceTes
private Record existingRecord;
@BeforeClass
- public static void setUpClass() throws IOException {
+ public static void setUpBeforeClass(TestContext context) throws IOException {
rawRecordContent = new ObjectMapper().readValue(TestUtil.readFileFromPath(RAW_MARC_RECORD_CONTENT_SAMPLE_PATH), String.class);
+ Async async = context.async();
+ TenantClient tenantClient = new TenantClient(OKAPI_URL, CENTRAL_TENANT_ID, TOKEN);
+ try {
+ tenantClient.postTenant(new TenantAttributes().withModuleTo("3.2.0"), res2 -> {
+ if (res2.result().statusCode() == 204) {
+ return;
+ }
+ if (res2.result().statusCode() == 201) {
+ tenantClient.getTenantByOperationId(res2.result().bodyAsJson(TenantJob.class).getId(), 60000, context.asyncAssertSuccess(res3 -> {
+ context.assertTrue(res3.bodyAsJson(TenantJob.class).getComplete());
+ String error = res3.bodyAsJson(TenantJob.class).getError();
+ if (error != null) {
+ context.assertTrue(error.contains("EventDescriptor was not registered for eventType"));
+ }
+ }));
+ } else {
+ context.assertEquals("Failed to make post tenant. Received status code 400", res2.result().bodyAsString());
+ }
+ async.complete();
+ });
+ } catch (Exception e) {
+ e.printStackTrace();
+ async.complete();
+ }
}
@Before
public void setUp(TestContext context) {
MockitoAnnotations.initMocks(this);
+ wireMockServer.stubFor(get(new UrlPathPattern(new RegexPattern(USER_TENANTS), true))
+ .willReturn(WireMock.ok().withBody(Json.encode(new JsonObject().put("userTenants", new JsonArray())))));
recordDao = new RecordDaoImpl(postgresClientFactory);
- handler = new MarcBibliographicMatchEventHandler(recordDao);
+ handler = new MarcBibliographicMatchEventHandler(recordDao, new ConsortiumConfigurationCache(vertx), vertx);
Async async = context.async();
Snapshot existingRecordSnapshot = new Snapshot()
@@ -116,10 +159,16 @@ public void setUp(TestContext context) {
SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshots).onComplete(save -> {
if (save.failed()) {
context.fail(save.cause());
+ async.complete();
+ } else {
+ SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(CENTRAL_TENANT_ID), snapshots).onComplete(saveCentralTenant -> {
+ if (save.failed()) {
+ context.fail(save.cause());
+ }
+ async.complete();
+ });
}
- async.complete();
});
-
}
@After
@@ -128,8 +177,15 @@ public void cleanUp(TestContext context) {
SnapshotDaoUtil.deleteAll(postgresClientFactory.getQueryExecutor(TENANT_ID)).onComplete(delete -> {
if (delete.failed()) {
context.fail(delete.cause());
+ async.complete();
+ } else {
+ SnapshotDaoUtil.deleteAll(postgresClientFactory.getQueryExecutor(CENTRAL_TENANT_ID)).onComplete(deleteCentralTenant -> {
+ if (delete.failed()) {
+ context.fail(delete.cause());
+ }
+ async.complete();
+ });
}
- async.complete();
});
}
@@ -143,6 +199,8 @@ public void shouldMatchByMatchedIdField(TestContext context) {
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withContext(payloadContext)
.withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
.withCurrentNode(new ProfileSnapshotWrapper()
.withId(UUID.randomUUID().toString())
.withContentType(MATCH_PROFILE)
@@ -191,6 +249,8 @@ public void shouldMatchByMultiple035fields(TestContext context) {
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withContext(payloadContext)
.withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
.withCurrentNode(new ProfileSnapshotWrapper()
.withId(UUID.randomUUID().toString())
.withContentType(MATCH_PROFILE)
@@ -228,6 +288,56 @@ public void shouldMatchByMultiple035fields(TestContext context) {
}));
}
+ @Test
+ public void shouldMatchByMultipleControlledFields(TestContext context) {
+ Async async = context.async();
+
+ HashMap payloadContext = new HashMap<>();
+ incomingRecord.getParsedRecord().setContent(PARSED_CONTENT_WITH_MULTIPLE_007_FIELD);
+ payloadContext.put(EntityType.MARC_BIBLIOGRAPHIC.value(), Json.encode(incomingRecord));
+
+ DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
+ .withContext(payloadContext)
+ .withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
+ .withCurrentNode(new ProfileSnapshotWrapper()
+ .withId(UUID.randomUUID().toString())
+ .withContentType(MATCH_PROFILE)
+ .withContent(new MatchProfile()
+ .withExistingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withMatchDetails(singletonList(new MatchDetail()
+ .withMatchCriterion(EXACTLY_MATCHES)
+ .withExistingMatchExpression(new MatchExpression()
+ .withDataValueType(VALUE_FROM_RECORD)
+ .withFields(Lists.newArrayList(
+ new Field().withLabel("field").withValue("007"),
+ new Field().withLabel("indicator1").withValue(""),
+ new Field().withLabel("indicator2").withValue(""),
+ new Field().withLabel("recordSubfield").withValue(""))))
+ .withExistingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingMatchExpression(new MatchExpression()
+ .withDataValueType(VALUE_FROM_RECORD)
+ .withFields(Lists.newArrayList(
+ new Field().withLabel("field").withValue("007"),
+ new Field().withLabel("indicator1").withValue(""),
+ new Field().withLabel("indicator2").withValue(""),
+ new Field().withLabel("recordSubfield").withValue(""))))))));
+
+ recordDao.saveRecord(existingRecord, TENANT_ID)
+ .onComplete(context.asyncAssertSuccess())
+ .onSuccess(record -> handler.handle(dataImportEventPayload)
+ .whenComplete((updatedEventPayload, throwable) -> {
+ context.assertNull(throwable);
+ context.assertEquals(1, updatedEventPayload.getEventsChain().size());
+ context.assertEquals(updatedEventPayload.getEventType(), DI_SRS_MARC_BIB_RECORD_MATCHED.value());
+ context.assertEquals(new JsonObject(updatedEventPayload.getContext().get(MATCHED_MARC_BIB_KEY)).mapTo(Record.class), record);
+ async.complete();
+ }));
+ }
+
@Test
public void shouldMatchByMultiple024fieldsWithWildcardsInd(TestContext context) {
Async async = context.async();
@@ -239,6 +349,8 @@ public void shouldMatchByMultiple024fieldsWithWildcardsInd(TestContext context)
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withContext(payloadContext)
.withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
.withCurrentNode(new ProfileSnapshotWrapper()
.withId(UUID.randomUUID().toString())
.withContentType(MATCH_PROFILE)
@@ -286,6 +398,8 @@ public void shouldMatchByInstanceIdField(TestContext context) {
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withContext(payloadContext)
.withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
.withCurrentNode(new ProfileSnapshotWrapper()
.withId(UUID.randomUUID().toString())
.withContentType(MATCH_PROFILE)
@@ -333,6 +447,8 @@ public void shouldMatchByInstanceHridField(TestContext context) {
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withContext(payloadContext)
.withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
.withCurrentNode(new ProfileSnapshotWrapper()
.withId(UUID.randomUUID().toString())
.withContentType(MATCH_PROFILE)
@@ -380,6 +496,8 @@ public void shouldNotMatchByMatchedIdField(TestContext context) {
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withContext(payloadContext)
.withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
.withCurrentNode(new ProfileSnapshotWrapper()
.withId(UUID.randomUUID().toString())
.withContentType(MATCH_PROFILE)
@@ -426,6 +544,8 @@ public void shouldNotMatchByMatchedIdFieldIfNotMatch(TestContext context) {
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withContext(payloadContext)
.withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
.withCurrentNode(new ProfileSnapshotWrapper()
.withId(UUID.randomUUID().toString())
.withContentType(MATCH_PROFILE)
@@ -475,6 +595,8 @@ public void shouldNotMatchRecordBy035aFieldIfRecordExternalIdIsNull(TestContext
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withContext(payloadContext)
.withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
.withCurrentNode(new ProfileSnapshotWrapper()
.withId(UUID.randomUUID().toString())
.withContentType(MATCH_PROFILE)
@@ -526,6 +648,8 @@ public void shouldReturnTrueWhenHandlerIsEligibleForProfile() {
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
.withEventType("DI_SRS_MARC_BIB_RECORD_CREATED")
.withContext(new HashMap<>())
.withCurrentNode(profileSnapshotWrapper);
@@ -551,6 +675,8 @@ public void shouldReturnFalseWhenHandlerIsNotEligibleForProfile() {
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
.withEventType("DI_SRS_MARC_BIB_RECORD_CREATED")
.withContext(new HashMap<>())
.withCurrentNode(profileSnapshotWrapper);
@@ -576,6 +702,8 @@ public void shouldReturnFalseWhenNotMatchProfileForProfile() {
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
.withEventType("DI_SRS_MARC_BIB_RECORD_CREATED")
.withContext(new HashMap<>())
.withCurrentNode(profileSnapshotWrapper);
@@ -584,4 +712,318 @@ public void shouldReturnFalseWhenNotMatchProfileForProfile() {
Assert.assertFalse(isEligible);
}
+
+ @Test
+ public void shouldNotMatchByMatchedIdFieldAtLocalTenantAndMatchAtCentralTenant(TestContext context) {
+ Async async = context.async();
+
+ HashMap payloadContext = new HashMap<>();
+ payloadContext.put(EntityType.MARC_BIBLIOGRAPHIC.value(), Json.encode(incomingRecord));
+
+ JsonObject centralTenantIdResponse = new JsonObject()
+ .put("userTenants", new JsonArray().add(new JsonObject().put("centralTenantId", CENTRAL_TENANT_ID).put("consortiumId", CONSORTIUM_ID)));
+
+ wireMockServer.stubFor(get(new UrlPathPattern(new RegexPattern(USER_TENANTS), true))
+ .willReturn(WireMock.ok().withBody(centralTenantIdResponse.encode())));
+
+ DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
+ .withContext(payloadContext)
+ .withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
+ .withCurrentNode(new ProfileSnapshotWrapper()
+ .withId(UUID.randomUUID().toString())
+ .withContentType(MATCH_PROFILE)
+ .withContent(new MatchProfile()
+ .withExistingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withMatchDetails(singletonList(new MatchDetail()
+ .withMatchCriterion(EXACTLY_MATCHES)
+ .withExistingMatchExpression(new MatchExpression()
+ .withDataValueType(VALUE_FROM_RECORD)
+ .withFields(Lists.newArrayList(
+ new Field().withLabel("field").withValue("999"),
+ new Field().withLabel("indicator1").withValue("f"),
+ new Field().withLabel("indicator2").withValue("f"),
+ new Field().withLabel("recordSubfield").withValue("s"))))
+ .withExistingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingMatchExpression(new MatchExpression()
+ .withDataValueType(VALUE_FROM_RECORD)
+ .withFields(Lists.newArrayList(
+ new Field().withLabel("field").withValue("948"),
+ new Field().withLabel("indicator1").withValue(""),
+ new Field().withLabel("indicator2").withValue(""),
+ new Field().withLabel("recordSubfield").withValue("a"))))))));
+
+ recordDao.saveRecord(existingRecord, CENTRAL_TENANT_ID)
+ .onComplete(context.asyncAssertSuccess())
+ .onSuccess(record -> handler.handle(dataImportEventPayload)
+ .whenComplete((updatedEventPayload, throwable) -> {
+ context.assertNull(throwable);
+ context.assertEquals(1, updatedEventPayload.getEventsChain().size());
+ context.assertEquals(updatedEventPayload.getEventType(), DI_SRS_MARC_BIB_RECORD_MATCHED.value());
+ context.assertEquals(new JsonObject(updatedEventPayload.getContext().get(MATCHED_MARC_BIB_KEY)).mapTo(Record.class), record);
+ context.assertEquals(updatedEventPayload.getContext().get("CENTRAL_TENANT_ID"), CENTRAL_TENANT_ID);
+ async.complete();
+ }));
+ }
+
+ @Test
+ public void shouldMatchByMatchedIdFieldAtLocalTenantAndNotMatchAtCentralTenant(TestContext context) {
+ Async async = context.async();
+
+ HashMap payloadContext = new HashMap<>();
+ payloadContext.put(EntityType.MARC_BIBLIOGRAPHIC.value(), Json.encode(incomingRecord));
+
+ JsonObject centralTenantIdResponse = new JsonObject()
+ .put("userTenants", new JsonArray().add(new JsonObject().put("centralTenantId", CENTRAL_TENANT_ID).put("consortiumId", CONSORTIUM_ID)));
+
+ wireMockServer.stubFor(get(new UrlPathPattern(new RegexPattern(USER_TENANTS), true))
+ .willReturn(WireMock.ok().withBody(centralTenantIdResponse.encode())));
+
+ DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
+ .withContext(payloadContext)
+ .withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
+ .withCurrentNode(new ProfileSnapshotWrapper()
+ .withId(UUID.randomUUID().toString())
+ .withContentType(MATCH_PROFILE)
+ .withContent(new MatchProfile()
+ .withExistingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withMatchDetails(singletonList(new MatchDetail()
+ .withMatchCriterion(EXACTLY_MATCHES)
+ .withExistingMatchExpression(new MatchExpression()
+ .withDataValueType(VALUE_FROM_RECORD)
+ .withFields(Lists.newArrayList(
+ new Field().withLabel("field").withValue("999"),
+ new Field().withLabel("indicator1").withValue("f"),
+ new Field().withLabel("indicator2").withValue("f"),
+ new Field().withLabel("recordSubfield").withValue("s"))))
+ .withExistingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingMatchExpression(new MatchExpression()
+ .withDataValueType(VALUE_FROM_RECORD)
+ .withFields(Lists.newArrayList(
+ new Field().withLabel("field").withValue("948"),
+ new Field().withLabel("indicator1").withValue(""),
+ new Field().withLabel("indicator2").withValue(""),
+ new Field().withLabel("recordSubfield").withValue("a"))))))));
+
+ recordDao.saveRecord(existingRecord, TENANT_ID)
+ .onComplete(context.asyncAssertSuccess())
+ .onSuccess(record -> handler.handle(dataImportEventPayload)
+ .whenComplete((updatedEventPayload, throwable) -> {
+ context.assertNull(throwable);
+ context.assertEquals(1, updatedEventPayload.getEventsChain().size());
+ context.assertEquals(updatedEventPayload.getEventType(), DI_SRS_MARC_BIB_RECORD_MATCHED.value());
+ context.assertEquals(new JsonObject(updatedEventPayload.getContext().get(MATCHED_MARC_BIB_KEY)).mapTo(Record.class), record);
+ async.complete();
+ }));
+ }
+
+ @Test
+ public void shouldNotMatchByMatchedIdFieldAtCentralTenantIfItSameAsLocalTenant(TestContext context) {
+ Async async = context.async();
+
+ recordDao = Mockito.mock(RecordDao.class);
+ List records = new ArrayList<>();
+ records.add(new Record());
+ Mockito.doAnswer(invocationOnMock -> Future.succeededFuture(new RecordCollection().withTotalRecords(1).withRecords(records)))
+ .when(recordDao).getRecords(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.anyInt(), Mockito.anyInt(), Mockito.any());
+
+ ConsortiumConfigurationCache consortiumConfigurationCache = Mockito.mock(ConsortiumConfigurationCache.class);
+
+ Mockito.doAnswer(invocationOnMock -> Future.succeededFuture(Optional.of(new ConsortiumConfiguration(CENTRAL_TENANT_ID, CONSORTIUM_ID))))
+ .when(consortiumConfigurationCache).get(Mockito.any());
+
+ handler = new MarcBibliographicMatchEventHandler(recordDao, consortiumConfigurationCache, vertx);
+
+ HashMap payloadContext = new HashMap<>();
+ payloadContext.put(EntityType.MARC_BIBLIOGRAPHIC.value(), Json.encode(incomingRecord));
+
+ JsonObject centralTenantIdResponse = new JsonObject()
+ .put("userTenants", new JsonArray().add(new JsonObject().put("centralTenantId", CENTRAL_TENANT_ID).put("consortiumId", CONSORTIUM_ID)));
+
+ wireMockServer.stubFor(get(new UrlPathPattern(new RegexPattern(USER_TENANTS), true))
+ .willReturn(WireMock.ok().withBody(centralTenantIdResponse.encode())));
+
+ DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
+ .withContext(payloadContext)
+ .withTenant(CENTRAL_TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
+ .withCurrentNode(new ProfileSnapshotWrapper()
+ .withId(UUID.randomUUID().toString())
+ .withContentType(MATCH_PROFILE)
+ .withContent(new MatchProfile()
+ .withExistingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withMatchDetails(singletonList(new MatchDetail()
+ .withMatchCriterion(EXACTLY_MATCHES)
+ .withExistingMatchExpression(new MatchExpression()
+ .withDataValueType(VALUE_FROM_RECORD)
+ .withFields(Lists.newArrayList(
+ new Field().withLabel("field").withValue("999"),
+ new Field().withLabel("indicator1").withValue("f"),
+ new Field().withLabel("indicator2").withValue("f"),
+ new Field().withLabel("recordSubfield").withValue("s"))))
+ .withExistingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingMatchExpression(new MatchExpression()
+ .withDataValueType(VALUE_FROM_RECORD)
+ .withFields(Lists.newArrayList(
+ new Field().withLabel("field").withValue("948"),
+ new Field().withLabel("indicator1").withValue(""),
+ new Field().withLabel("indicator2").withValue(""),
+ new Field().withLabel("recordSubfield").withValue("a"))))))));
+
+ handler.handle(dataImportEventPayload)
+ .whenComplete((updatedEventPayload, throwable) -> {
+ Mockito.verify(recordDao, Mockito.times(1))
+ .getRecords(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.anyInt(), Mockito.anyInt(), Mockito.any());
+ context.assertNull(throwable);
+ context.assertEquals(1, updatedEventPayload.getEventsChain().size());
+ context.assertEquals(updatedEventPayload.getEventType(), DI_SRS_MARC_BIB_RECORD_MATCHED.value());
+ async.complete();
+ });
+ }
+
+ @Test
+ public void shouldFailWhenMatchedByMatchedIdFieldAtLocalTenantAndMatchedAtCentralTenant(TestContext context) {
+ Async async = context.async();
+
+ HashMap payloadContext = new HashMap<>();
+ payloadContext.put(EntityType.MARC_BIBLIOGRAPHIC.value(), Json.encode(incomingRecord));
+
+ JsonObject centralTenantIdResponse = new JsonObject()
+ .put("userTenants", new JsonArray().add(new JsonObject().put("centralTenantId", CENTRAL_TENANT_ID).put("consortiumId", CONSORTIUM_ID)));
+
+ wireMockServer.stubFor(get(new UrlPathPattern(new RegexPattern(USER_TENANTS), true))
+ .willReturn(WireMock.ok().withBody(centralTenantIdResponse.encode())));
+
+ DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
+ .withContext(payloadContext)
+ .withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
+ .withCurrentNode(new ProfileSnapshotWrapper()
+ .withId(UUID.randomUUID().toString())
+ .withContentType(MATCH_PROFILE)
+ .withContent(new MatchProfile()
+ .withExistingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withMatchDetails(singletonList(new MatchDetail()
+ .withMatchCriterion(EXACTLY_MATCHES)
+ .withExistingMatchExpression(new MatchExpression()
+ .withDataValueType(VALUE_FROM_RECORD)
+ .withFields(Lists.newArrayList(
+ new Field().withLabel("field").withValue("999"),
+ new Field().withLabel("indicator1").withValue("f"),
+ new Field().withLabel("indicator2").withValue("f"),
+ new Field().withLabel("recordSubfield").withValue("s"))))
+ .withExistingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingMatchExpression(new MatchExpression()
+ .withDataValueType(VALUE_FROM_RECORD)
+ .withFields(Lists.newArrayList(
+ new Field().withLabel("field").withValue("948"),
+ new Field().withLabel("indicator1").withValue(""),
+ new Field().withLabel("indicator2").withValue(""),
+ new Field().withLabel("recordSubfield").withValue("a"))))))));
+
+ recordDao.saveRecord(existingRecord, TENANT_ID)
+ .onComplete(context.asyncAssertSuccess())
+ .onSuccess(record -> recordDao.saveRecord(existingRecord, CENTRAL_TENANT_ID)
+ .onSuccess(v -> handler.handle(dataImportEventPayload)
+ .whenComplete((updatedEventPayload, throwable) -> {
+ context.assertNotNull(throwable);
+ context.assertTrue(throwable instanceof MatchingException);
+ async.complete();
+ })));
+ }
+
+ @Test
+ public void shouldNotMatch035FieldAtLocalTenantAndMatchAtCentralTenant(TestContext context) {
+ Async async = context.async();
+
+ HashMap payloadContext = new HashMap<>();
+ payloadContext.put(EntityType.MARC_BIBLIOGRAPHIC.value(), Json.encode(incomingRecord));
+
+ JsonObject centralTenantIdResponse = new JsonObject()
+ .put("userTenants", new JsonArray().add(new JsonObject().put("centralTenantId", CENTRAL_TENANT_ID).put("consortiumId", CONSORTIUM_ID)));
+
+ wireMockServer.stubFor(get(new UrlPathPattern(new RegexPattern(USER_TENANTS), true))
+ .willReturn(WireMock.ok().withBody(centralTenantIdResponse.encode())));
+
+ DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
+ .withContext(payloadContext)
+ .withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
+ .withCurrentNode(new ProfileSnapshotWrapper()
+ .withId(UUID.randomUUID().toString())
+ .withContentType(MATCH_PROFILE)
+ .withContent(new MatchProfile()
+ .withExistingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withMatchDetails(singletonList(new MatchDetail()
+ .withMatchCriterion(EXACTLY_MATCHES)
+ .withExistingMatchExpression(new MatchExpression()
+ .withDataValueType(VALUE_FROM_RECORD)
+ .withFields(Lists.newArrayList(
+ new Field().withLabel("field").withValue("035"),
+ new Field().withLabel("indicator1").withValue(""),
+ new Field().withLabel("indicator2").withValue(""),
+ new Field().withLabel("recordSubfield").withValue("a"))))
+ .withExistingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC)
+ .withIncomingMatchExpression(new MatchExpression()
+ .withDataValueType(VALUE_FROM_RECORD)
+ .withFields(Lists.newArrayList(
+ new Field().withLabel("field").withValue("035"),
+ new Field().withLabel("indicator1").withValue(""),
+ new Field().withLabel("indicator2").withValue(""),
+ new Field().withLabel("recordSubfield").withValue("a"))))))));
+
+ recordDao.saveRecord(existingRecord, CENTRAL_TENANT_ID)
+ .onComplete(context.asyncAssertSuccess())
+ .onSuccess(record -> handler.handle(dataImportEventPayload)
+ .whenComplete((updatedEventPayload, throwable) -> {
+ context.assertNull(throwable);
+ context.assertEquals(1, updatedEventPayload.getEventsChain().size());
+ context.assertEquals(updatedEventPayload.getEventType(), DI_SRS_MARC_BIB_RECORD_MATCHED.value());
+ context.assertEquals(new JsonObject(updatedEventPayload.getContext().get(MATCHED_MARC_BIB_KEY)).mapTo(Record.class), record);
+ async.complete();
+ }));
+ }
+
+ @Test
+ public void shouldReturnFailedFutureIfFailedToDeserializeMatchProfile(TestContext context) {
+ Async async = context.async();
+
+ HashMap payloadContext = new HashMap<>();
+ payloadContext.put(EntityType.MARC_BIBLIOGRAPHIC.value(), Json.encode(incomingRecord));
+
+ JsonObject invalidMatchProfileJson = new JsonObject()
+ .put("invalidField", "val");
+
+ DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
+ .withContext(payloadContext)
+ .withTenant(TENANT_ID)
+ .withToken(TOKEN)
+ .withOkapiUrl(wireMockServer.baseUrl())
+ .withCurrentNode(new ProfileSnapshotWrapper()
+ .withId(UUID.randomUUID().toString())
+ .withContentType(MATCH_PROFILE)
+ .withContent(invalidMatchProfileJson.getMap()));
+
+ handler.handle(dataImportEventPayload).whenComplete((resultPayload, e) -> {
+ context.assertNotNull(e);
+ async.complete();
+ });
+ }
+
}
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsMatchEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsMatchEventHandlerTest.java
index f8a689092..388f82f94 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsMatchEventHandlerTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsMatchEventHandlerTest.java
@@ -75,7 +75,7 @@ public void setUp(TestContext context) {
MockitoAnnotations.initMocks(this);
recordDao = new RecordDaoImpl(postgresClientFactory);
- handler = new MarcHoldingsMatchEventHandler(recordDao);
+ handler = new MarcHoldingsMatchEventHandler(recordDao, null, vertx);
Async async = context.async();
Snapshot existingRecordSnapshot = new Snapshot()
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsUpdateModifyEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsUpdateModifyEventHandlerTest.java
index 42c7d8b69..cfac747a4 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsUpdateModifyEventHandlerTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsUpdateModifyEventHandlerTest.java
@@ -12,7 +12,7 @@
import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.JOB_PROFILE;
import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.MAPPING_PROFILE;
import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB;
-import static org.folio.services.MarcBibUpdateModifyEventHandlerTest.getParsedContentWithoutLeader;
+import static org.folio.services.MarcBibUpdateModifyEventHandlerTest.getParsedContentWithoutLeaderAndDate;
import java.io.IOException;
import java.util.Collections;
@@ -161,7 +161,7 @@ public void setUp(TestContext context) {
recordDao = new RecordDaoImpl(postgresClientFactory);
recordService = new RecordServiceImpl(recordDao);
- modifyRecordEventHandler = new MarcHoldingsUpdateModifyEventHandler(recordService, new MappingParametersSnapshotCache(vertx), vertx);
+ modifyRecordEventHandler = new MarcHoldingsUpdateModifyEventHandler(recordService, null, new MappingParametersSnapshotCache(vertx), vertx);
Snapshot snapshot = new Snapshot()
.withJobExecutionId(UUID.randomUUID().toString())
@@ -199,6 +199,7 @@ public void shouldModifyMarcRecord(TestContext context) {
// given
Async async = context.async();
+ String expectedDate = get005FieldExpectedDate();
String expectedParsedContent = "{\"leader\":\"00107nam 22000491a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"999\":{\"subfields\":[{\"s\":\"eae222e8-70fd-4422-852c-60d22bae36b8\"}],\"ind1\":\"f\",\"ind2\":\"f\"}}]}";
HashMap payloadContext = new HashMap<>();
record.getParsedRecord().setContent(Json.encode(record.getParsedRecord().getContent()));
@@ -231,9 +232,10 @@ public void shouldModifyMarcRecord(TestContext context) {
context.assertEquals(DI_SRS_MARC_HOLDINGS_RECORD_UPDATED.value(), eventPayload.getEventType());
Record actualRecord = Json.decodeValue(dataImportEventPayload.getContext().get(MARC_HOLDINGS.value()), Record.class);
- context.assertEquals(getParsedContentWithoutLeader(expectedParsedContent), getParsedContentWithoutLeader(actualRecord.getParsedRecord().getContent().toString()));
+ context.assertEquals(getParsedContentWithoutLeaderAndDate(expectedParsedContent), getParsedContentWithoutLeaderAndDate(actualRecord.getParsedRecord().getContent().toString()));
context.assertEquals(Record.State.ACTUAL, actualRecord.getState());
context.assertEquals(userId, actualRecord.getMetadata().getUpdatedByUserId());
+ validate005Field(context, expectedDate, actualRecord);
async.complete();
});
}
@@ -243,6 +245,7 @@ public void shouldUpdateMatchedMarcRecordWithFieldFromIncomingRecord(TestContext
// given
Async async = context.async();
+ String expectedDate = get005FieldExpectedDate();
String incomingParsedContent = "{\"leader\":\"01314nam 22003851a 4500\",\"fields\":[{\"001\":\"ybp7406512\"},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
String expectedParsedContent = "{\"leader\":\"00134nam 22000611a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"035\":{\"subfields\":[{\"a\":\"ybp7406512\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"999\":{\"subfields\":[{\"s\":\"eae222e8-70fd-4422-852c-60d22bae36b8\"}],\"ind1\":\"f\",\"ind2\":\"f\"}}]}";
Record incomingRecord = new Record().withParsedRecord(new ParsedRecord().withContent(incomingParsedContent));
@@ -277,10 +280,11 @@ public void shouldUpdateMatchedMarcRecordWithFieldFromIncomingRecord(TestContext
context.assertEquals(DI_SRS_MARC_HOLDINGS_RECORD_UPDATED.value(), eventPayload.getEventType());
Record actualRecord = Json.decodeValue(dataImportEventPayload.getContext().get(MARC_HOLDINGS.value()), Record.class);
- context.assertEquals(getParsedContentWithoutLeader(expectedParsedContent),
- getParsedContentWithoutLeader(actualRecord.getParsedRecord().getContent().toString()));
+ context.assertEquals(getParsedContentWithoutLeaderAndDate(expectedParsedContent),
+ getParsedContentWithoutLeaderAndDate(actualRecord.getParsedRecord().getContent().toString()));
context.assertEquals(Record.State.ACTUAL, actualRecord.getState());
context.assertEquals(dataImportEventPayload.getJobExecutionId(), actualRecord.getSnapshotId());
+ validate005Field(context, expectedDate, actualRecord);
async.complete();
});
}
@@ -290,6 +294,7 @@ public void shouldModifyMarcRecordAndRemove003Field(TestContext context) {
// given
Async async = context.async();
+ String expectedDate = get005FieldExpectedDate();
String incomingParsedContent = "{\"leader\":\"01314nam 22003851a 4500\",\"fields\":[{\"001\":\"ybp7406512\"},{\"003\":\"OCLC\"},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
String expectedParsedContent = "{\"leader\":\"00134nam 22000611a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"035\":{\"subfields\":[{\"a\":\"ybp7406512\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}},{\"999\":{\"subfields\":[{\"s\":\"eae222e8-70fd-4422-852c-60d22bae36b8\"}],\"ind1\":\"f\",\"ind2\":\"f\"}}]}";
Record incomingRecord = new Record().withParsedRecord(new ParsedRecord().withContent(incomingParsedContent));
@@ -324,9 +329,10 @@ public void shouldModifyMarcRecordAndRemove003Field(TestContext context) {
context.assertEquals(DI_SRS_MARC_HOLDINGS_RECORD_UPDATED.value(), eventPayload.getEventType());
Record actualRecord = Json.decodeValue(dataImportEventPayload.getContext().get(MARC_HOLDINGS.value()), Record.class);
- context.assertEquals(getParsedContentWithoutLeader(expectedParsedContent),
- getParsedContentWithoutLeader(actualRecord.getParsedRecord().getContent().toString()));
+ context.assertEquals(getParsedContentWithoutLeaderAndDate(expectedParsedContent),
+ getParsedContentWithoutLeaderAndDate(actualRecord.getParsedRecord().getContent().toString()));
context.assertEquals(Record.State.ACTUAL, actualRecord.getState());
+ validate005Field(context, expectedDate, actualRecord);
async.complete();
});
}
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/RecordCleanupServiceTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/RecordCleanupServiceTest.java
deleted file mode 100644
index 72d381e33..000000000
--- a/mod-source-record-storage-server/src/test/java/org/folio/services/RecordCleanupServiceTest.java
+++ /dev/null
@@ -1,199 +0,0 @@
-package org.folio.services;
-
-import io.github.jklingsporn.vertx.jooq.classic.reactivepg.ReactiveClassicGenericQueryExecutor;
-import io.vertx.core.CompositeFuture;
-import io.vertx.core.Future;
-import io.vertx.core.Promise;
-import io.vertx.ext.unit.Async;
-import io.vertx.ext.unit.TestContext;
-import io.vertx.ext.unit.junit.VertxUnitRunner;
-import org.folio.TestMocks;
-import org.folio.dao.RecordDao;
-import org.folio.dao.RecordDaoImpl;
-import org.folio.dao.util.ErrorRecordDaoUtil;
-import org.folio.dao.util.ParsedRecordDaoUtil;
-import org.folio.dao.util.RawRecordDaoUtil;
-import org.folio.dao.util.RecordDaoUtil;
-import org.folio.dao.util.RecordType;
-import org.folio.dao.util.SnapshotDaoUtil;
-import org.folio.rest.jaxrs.model.ExternalIdsHolder;
-import org.folio.rest.jaxrs.model.Record;
-import org.folio.rest.jaxrs.model.Snapshot;
-import org.folio.services.cleanup.RecordCleanupService;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-
-import java.util.UUID;
-
-import static org.folio.rest.jaxrs.model.Record.State.DELETED;
-
-/**
- * The test creates a several related records and stores them to DB
- * The records are:
- * DELETED record generation1 <-- DELETED record generation0 (references to DELETED record)
- */
-@RunWith(VertxUnitRunner.class)
-public class RecordCleanupServiceTest extends AbstractLBServiceTest {
- private final RecordDao recordDao = new RecordDaoImpl(postgresClientFactory);
- private final RecordService recordService = new RecordServiceImpl(recordDao);
- private final Snapshot snapshot;
- private final Record deletedRecordGen1;
- private final Record deletedRecordGen0;
-
- public RecordCleanupServiceTest() {
- this.snapshot = TestMocks.getSnapshot(0);
- String deletedRecordGen1Id = UUID.randomUUID().toString();
- this.deletedRecordGen1 = new Record()
- .withId(deletedRecordGen1Id)
- .withState(DELETED)
- .withMatchedId(deletedRecordGen1Id)
- .withSnapshotId(snapshot.getJobExecutionId())
- .withGeneration(1)
- .withRecordType(Record.RecordType.MARC_BIB)
- .withDeleted(true)
- .withOrder(1)
- .withExternalIdsHolder(new ExternalIdsHolder())
- .withLeaderRecordStatus("n")
- .withRawRecord(TestMocks.getRecord(0).getRawRecord().withId(deletedRecordGen1Id))
- .withParsedRecord(TestMocks.getRecord(0).getParsedRecord().withId(deletedRecordGen1Id))
- .withErrorRecord(TestMocks.getRecord(0).getErrorRecord().withId(deletedRecordGen1Id));
- String deletedRecordGen0Id = UUID.randomUUID().toString();
- this.deletedRecordGen0 = new Record()
- .withId(deletedRecordGen0Id)
- .withState(DELETED)
- .withMatchedId(deletedRecordGen1Id)
- .withSnapshotId(snapshot.getJobExecutionId())
- .withGeneration(0)
- .withRecordType(Record.RecordType.MARC_BIB)
- .withDeleted(false)
- .withOrder(0)
- .withExternalIdsHolder(new ExternalIdsHolder())
- .withLeaderRecordStatus("n")
- .withRawRecord(TestMocks.getRecord(0).getRawRecord().withId(deletedRecordGen0Id))
- .withParsedRecord(TestMocks.getRecord(0).getParsedRecord().withId(deletedRecordGen0Id))
- .withErrorRecord(TestMocks.getRecord(0).getErrorRecord().withId(deletedRecordGen0Id));
- }
-
- @Before
- public void before(TestContext context) {
- Async async = context.async();
- ReactiveClassicGenericQueryExecutor queryExecutor = postgresClientFactory.getQueryExecutor(TENANT_ID);
- SnapshotDaoUtil.save(queryExecutor, snapshot)
- .compose(ar -> recordService.saveRecord(deletedRecordGen1, TENANT_ID))
- .compose(ar -> recordService.saveRecord(deletedRecordGen0, TENANT_ID))
- .onComplete(ar -> async.complete())
- .onFailure(throwable -> context.fail(throwable));
- }
-
- @After
- public void after(TestContext testContext) throws InterruptedException {
- Async async = testContext.async();
- ReactiveClassicGenericQueryExecutor queryExecutor = postgresClientFactory.getQueryExecutor(TENANT_ID);
- SnapshotDaoUtil.delete(queryExecutor, snapshot.getJobExecutionId())
- .compose(ar -> recordService.deleteRecordsBySnapshotId(snapshot.getJobExecutionId(), TENANT_ID))
- .onComplete(ar -> async.complete());
- }
-
- /*
- The test verifies whether the records are purged from DB when cleanup is done;
- If the 'limit' = 0, it means to delete all the records
- */
- @Test
- public void shouldPurgeRecords_limitIs0(TestContext context) {
- // given
- Async async = context.async();
- RecordCleanupService service = new RecordCleanupService(0, 0, vertx, recordDao);
- // when
- service.cleanup();
- // then
- vertx.setTimer(1_000, timerHandler -> CompositeFuture.all(
- verifyRecordIsPurged(deletedRecordGen1.getId(), context),
- verifyRecordIsPurged(deletedRecordGen0.getId(), context)
- )
- .onSuccess(ar -> async.complete())
- .onFailure(context::fail)
- );
- }
-
- /*
- The test verifies whether the DELETED record generation1 is purged, and its related record generation0 is stay in DB when cleanup is done.
- The record generation0 is present in DB after cleanup because 'limit' = 1, and the record was created later than record of generation1.
- */
- @Test
- public void shouldPurgeRecord_limitIs1(TestContext context) {
- // given
- Async async = context.async();
- RecordCleanupService service = new RecordCleanupService(0, 1, vertx, recordDao);
- // when
- service.cleanup();
- // then
- vertx.setTimer(1_000, timerHandler -> CompositeFuture.all(
- verifyRecordIsPurged(deletedRecordGen1.getId(), context),
- verifyRecordIsPresent(deletedRecordGen0.getId(), context)
- )
- .onSuccess(ar -> async.complete())
- .onFailure(context::fail)
- );
- }
-
- /*
- The test verifies whether the records are stay in DB when cleanup is done, because 'lastUpdatedDays' = 10 means
- the only records updated more than 10 are getting purged.
- */
- @Test
- public void shouldNotPurgeRecords_lastUpdatedDaysIs10(TestContext context) {
- // given
- Async async = context.async();
- RecordCleanupService service = new RecordCleanupService(10, 2, vertx, recordDao);
- // when
- service.cleanup();
- // then
- vertx.setTimer(1_000, timerHandler -> CompositeFuture.all(
- verifyRecordIsPresent(deletedRecordGen1.getId(), context),
- verifyRecordIsPresent(deletedRecordGen0.getId(), context)
- )
- .onSuccess(ar -> async.complete())
- .onFailure(context::fail)
- );
- }
-
- private Future verifyRecordIsPurged(String recordId, TestContext testContext) {
- Promise promise = Promise.promise();
- ReactiveClassicGenericQueryExecutor queryExecutor = postgresClientFactory.getQueryExecutor(TENANT_ID);
- Future.succeededFuture()
- // verification
- .compose(ar -> RecordDaoUtil.findById(queryExecutor, recordId))
- .onSuccess(optionalRecord -> testContext.assertTrue(optionalRecord.isEmpty()))
- .compose(ar -> ParsedRecordDaoUtil.findById(queryExecutor, recordId, RecordType.MARC_BIB))
- .onSuccess(optionalParsedRecord -> testContext.assertTrue(optionalParsedRecord.isEmpty()))
- .compose(ar -> RawRecordDaoUtil.findById(queryExecutor, recordId))
- .onSuccess(optionalRawRecord -> testContext.assertTrue(optionalRawRecord.isEmpty()))
- .compose(ar -> ErrorRecordDaoUtil.findById(queryExecutor, recordId))
- .onSuccess(optionalErrorRecord -> testContext.assertTrue(optionalErrorRecord.isEmpty()))
- // handling complete
- .onSuccess(ar -> promise.complete())
- .onFailure(ar -> promise.fail(ar));
- return promise.future();
- }
-
- private Future verifyRecordIsPresent(String recordId, TestContext testContext) {
- Promise promise = Promise.promise();
- ReactiveClassicGenericQueryExecutor queryExecutor = postgresClientFactory.getQueryExecutor(TENANT_ID);
- Future.succeededFuture()
- // verification
- .compose(ar -> RecordDaoUtil.findById(queryExecutor, recordId))
- .onSuccess(optionalRecord -> testContext.assertTrue(optionalRecord.isPresent()))
- .compose(ar -> ParsedRecordDaoUtil.findById(queryExecutor, recordId, RecordType.MARC_BIB))
- .onSuccess(optionalParsedRecord -> testContext.assertTrue(optionalParsedRecord.isPresent()))
- .compose(ar -> RawRecordDaoUtil.findById(queryExecutor, recordId))
- .onSuccess(optionalRawRecord -> testContext.assertTrue(optionalRawRecord.isPresent()))
- .compose(ar -> ErrorRecordDaoUtil.findById(queryExecutor, recordId))
- .onSuccess(optionalErrorRecord -> testContext.assertTrue(optionalErrorRecord.isPresent()))
- // handling complete
- .onSuccess(ar -> promise.complete())
- .onFailure(ar -> promise.fail(ar));
- return promise.future();
- }
-}
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java
index 4a257e562..a1da2b8e1 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java
@@ -35,6 +35,7 @@
import org.folio.rest.jaxrs.model.Record.State;
import org.folio.rest.jaxrs.model.RecordCollection;
import org.folio.rest.jaxrs.model.RecordsBatchResponse;
+import org.folio.rest.jaxrs.model.Snapshot;
import org.folio.rest.jaxrs.model.SourceRecord;
import org.folio.rest.jaxrs.model.SourceRecordCollection;
import org.folio.rest.jaxrs.model.StrippedParsedRecord;
@@ -49,11 +50,14 @@
import org.junit.Test;
import org.junit.runner.RunWith;
+import javax.ws.rs.BadRequestException;
+import javax.ws.rs.NotFoundException;
import java.io.IOException;
import java.time.OffsetDateTime;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Collections;
+import java.util.Date;
import java.util.List;
import java.util.Objects;
import java.util.UUID;
@@ -381,6 +385,226 @@ public void shouldSaveMarcBibRecordWithMatchedIdFrom999field(TestContext context
});
}
+ @Test
+ public void shouldFailDuringUpdateRecordGenerationIfIncomingMatchedIdNotEqualToMatchedIdFrom999field(TestContext context) {
+ String matchedId = UUID.randomUUID().toString();
+ String marc999 = UUID.randomUUID().toString();
+ Record original = TestMocks.getMarcBibRecord();
+ ParsedRecord parsedRecord = new ParsedRecord().withId(marc999)
+ .withContent(new JsonObject().put("leader", "01542ccm a2200361 4500")
+ .put("fields", new JsonArray().add(new JsonObject().put("999", new JsonObject()
+ .put("subfields",
+ new JsonArray().add(new JsonObject().put("s", marc999)))
+ .put("ind1", "f")
+ .put("ind2", "f")))).encode());
+ Record record = new Record()
+ .withId(UUID.randomUUID().toString())
+ .withSnapshotId(original.getSnapshotId())
+ .withRecordType(original.getRecordType())
+ .withState(State.ACTUAL)
+ .withOrder(original.getOrder())
+ .withRawRecord(original.getRawRecord())
+ .withParsedRecord(parsedRecord)
+ .withAdditionalInfo(original.getAdditionalInfo())
+ .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString()))
+ .withMetadata(original.getMetadata());
+ Async async = context.async();
+
+ recordService.updateRecordGeneration(matchedId, record, TENANT_ID).onComplete(save -> {
+ context.assertTrue(save.failed());
+ context.assertTrue(save.cause() instanceof BadRequestException);
+ recordDao.getRecordByMatchedId(matchedId, TENANT_ID).onComplete(get -> {
+ if (get.failed()) {
+ context.fail(get.cause());
+ }
+ context.assertTrue(get.result().isEmpty());
+ async.complete();
+ });
+ });
+ }
+
+ @Test
+ public void shouldFailDuringUpdateRecordGenerationIfRecordWithIdAsIncomingMatchedIfNotExist(TestContext context) {
+ String matchedId = UUID.randomUUID().toString();
+ Record original = TestMocks.getMarcBibRecord();
+ ParsedRecord parsedRecord = new ParsedRecord().withId(matchedId)
+ .withContent(new JsonObject().put("leader", "01542ccm a2200361 4500")
+ .put("fields", new JsonArray().add(new JsonObject().put("999", new JsonObject()
+ .put("subfields",
+ new JsonArray().add(new JsonObject().put("s", matchedId)))
+ .put("ind1", "f")
+ .put("ind2", "f")))).encode());
+ Record record = new Record()
+ .withId(UUID.randomUUID().toString())
+ .withSnapshotId(original.getSnapshotId())
+ .withRecordType(original.getRecordType())
+ .withState(State.ACTUAL)
+ .withOrder(original.getOrder())
+ .withRawRecord(original.getRawRecord())
+ .withParsedRecord(parsedRecord)
+ .withAdditionalInfo(original.getAdditionalInfo())
+ .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString()))
+ .withMetadata(original.getMetadata());
+ Async async = context.async();
+
+ recordService.updateRecordGeneration(matchedId, record, TENANT_ID).onComplete(save -> {
+ context.assertTrue(save.failed());
+ context.assertTrue(save.cause() instanceof NotFoundException);
+ recordDao.getRecordByMatchedId(matchedId, TENANT_ID).onComplete(get -> {
+ if (get.failed()) {
+ context.fail(get.cause());
+ }
+ context.assertTrue(get.result().isEmpty());
+ async.complete();
+ });
+ });
+ }
+
+ @Test
+ public void shouldFailUpdateRecordGenerationIfDuplicateError(TestContext context) {
+ String matchedId = UUID.randomUUID().toString();
+ Record original = TestMocks.getMarcBibRecord();
+
+ Record record1 = new Record()
+ .withId(matchedId)
+ .withSnapshotId(original.getSnapshotId())
+ .withRecordType(original.getRecordType())
+ .withState(State.ACTUAL)
+ .withOrder(original.getOrder())
+ .withRawRecord(rawRecord)
+ .withParsedRecord(marcRecord)
+ .withAdditionalInfo(original.getAdditionalInfo())
+ .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString()))
+ .withMetadata(original.getMetadata());
+
+ Snapshot snapshot = new Snapshot().withJobExecutionId(UUID.randomUUID().toString())
+ .withProcessingStartedDate(new Date())
+ .withStatus(Snapshot.Status.PROCESSING_IN_PROGRESS);
+
+ ParsedRecord parsedRecord = new ParsedRecord().withId(matchedId)
+ .withContent(new JsonObject().put("leader", "01542ccm a2200361 4500")
+ .put("fields", new JsonArray().add(new JsonObject().put("999", new JsonObject()
+ .put("subfields",
+ new JsonArray().add(new JsonObject().put("s", matchedId)))
+ .put("ind1", "f")
+ .put("ind2", "f")))).encode());
+ Record recordToUpdateGeneration = new Record()
+ .withId(UUID.randomUUID().toString())
+ .withSnapshotId(snapshot.getJobExecutionId())
+ .withRecordType(original.getRecordType())
+ .withState(State.ACTUAL)
+ .withGeneration(0)
+ .withOrder(original.getOrder())
+ .withRawRecord(original.getRawRecord())
+ .withParsedRecord(parsedRecord)
+ .withAdditionalInfo(original.getAdditionalInfo())
+ .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString()))
+ .withMetadata(original.getMetadata());
+ Async async = context.async();
+
+ recordService.saveRecord(record1, TENANT_ID).onComplete(record1Saved -> {
+ if (record1Saved.failed()) {
+ context.fail(record1Saved.cause());
+ }
+ context.assertNotNull(record1Saved.result().getRawRecord());
+ context.assertNotNull(record1Saved.result().getParsedRecord());
+ context.assertEquals(record1Saved.result().getState(), State.ACTUAL);
+ compareRecords(context, record1, record1Saved.result());
+
+ SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot).onComplete(snapshotSaved -> {
+ if (snapshotSaved.failed()) {
+ context.fail(snapshotSaved.cause());
+ }
+ recordService.updateRecordGeneration(matchedId, recordToUpdateGeneration, TENANT_ID).onComplete(recordToUpdateGenerationSaved -> {
+ context.assertTrue(recordToUpdateGenerationSaved.failed());
+ context.assertTrue(recordToUpdateGenerationSaved.cause() instanceof BadRequestException);
+ async.complete();
+ });
+ });
+ });
+ }
+
+ @Test
+ public void shouldUpdateRecordGeneration(TestContext context) {
+ String matchedId = UUID.randomUUID().toString();
+ Record original = TestMocks.getMarcBibRecord();
+
+ Record record1 = new Record()
+ .withId(matchedId)
+ .withSnapshotId(original.getSnapshotId())
+ .withRecordType(original.getRecordType())
+ .withState(State.ACTUAL)
+ .withOrder(original.getOrder())
+ .withRawRecord(rawRecord)
+ .withParsedRecord(marcRecord)
+ .withAdditionalInfo(original.getAdditionalInfo())
+ .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString()))
+ .withMetadata(original.getMetadata());
+
+ Snapshot snapshot = new Snapshot().withJobExecutionId(UUID.randomUUID().toString())
+ .withProcessingStartedDate(new Date())
+ .withStatus(Snapshot.Status.PROCESSING_IN_PROGRESS);
+
+ ParsedRecord parsedRecord = new ParsedRecord().withId(matchedId)
+ .withContent(new JsonObject().put("leader", "01542ccm a2200361 4500")
+ .put("fields", new JsonArray().add(new JsonObject().put("999", new JsonObject()
+ .put("subfields",
+ new JsonArray().add(new JsonObject().put("s", matchedId)))
+ .put("ind1", "f")
+ .put("ind2", "f")))).encode());
+ Record recordToUpdateGeneration = new Record()
+ .withId(UUID.randomUUID().toString())
+ .withSnapshotId(snapshot.getJobExecutionId())
+ .withRecordType(original.getRecordType())
+ .withState(State.ACTUAL)
+ .withOrder(original.getOrder())
+ .withRawRecord(original.getRawRecord())
+ .withParsedRecord(parsedRecord)
+ .withAdditionalInfo(original.getAdditionalInfo())
+ .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString()))
+ .withMetadata(original.getMetadata());
+ Async async = context.async();
+
+ recordService.saveRecord(record1, TENANT_ID).onComplete(record1Saved -> {
+ if (record1Saved.failed()) {
+ context.fail(record1Saved.cause());
+ }
+ context.assertNotNull(record1Saved.result().getRawRecord());
+ context.assertNotNull(record1Saved.result().getParsedRecord());
+ context.assertEquals(record1Saved.result().getState(), State.ACTUAL);
+ compareRecords(context, record1, record1Saved.result());
+
+ SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot).onComplete(snapshotSaved -> {
+ if (snapshotSaved.failed()) {
+ context.fail(snapshotSaved.cause());
+ }
+ recordService.updateRecordGeneration(matchedId, recordToUpdateGeneration, TENANT_ID).onComplete(recordToUpdateGenerationSaved -> {
+ context.assertTrue(recordToUpdateGenerationSaved.succeeded());
+ context.assertEquals(recordToUpdateGenerationSaved.result().getMatchedId(), matchedId);
+ context.assertEquals(recordToUpdateGenerationSaved.result().getGeneration(), 1);
+ recordDao.getRecordByMatchedId(matchedId, TENANT_ID).onComplete(get -> {
+ if (get.failed()) {
+ context.fail(get.cause());
+ }
+ context.assertTrue(get.result().isPresent());
+ context.assertEquals(get.result().get().getGeneration(), 1);
+ context.assertEquals(get.result().get().getMatchedId(), matchedId);
+ context.assertNotEquals(get.result().get().getId(), matchedId);
+ context.assertEquals(get.result().get().getState(), State.ACTUAL);
+ recordDao.getRecordById(matchedId, TENANT_ID).onComplete(getRecord1 -> {
+ if (getRecord1.failed()) {
+ context.fail(get.cause());
+ }
+ context.assertTrue(getRecord1.result().isPresent());
+ context.assertEquals(getRecord1.result().get().getState(), State.OLD);
+ async.complete();
+ });
+ });
+ });
+ });
+ });
+ }
+
@Test
public void shouldSaveMarcBibRecordWithMatchedIdFromRecordId(TestContext context) {
Record original = TestMocks.getMarcBibRecord();
@@ -419,6 +643,29 @@ public void shouldSaveMarcBibRecordWithMatchedIdFromRecordId(TestContext context
});
}
+ @Test
+ public void shouldSaveEdifactRecordAndNotSet999Field(TestContext context) {
+ Async async = context.async();
+ Record record = TestMocks.getRecords(Record.RecordType.EDIFACT);
+
+ recordService.saveRecord(record, TENANT_ID).onComplete(save -> {
+ if (save.failed()) {
+ context.fail(save.cause());
+ }
+ recordDao.getRecordById(record.getId(), TENANT_ID).onComplete(get -> {
+ if (get.failed()) {
+ context.fail(get.cause());
+ }
+ context.assertTrue(get.result().isPresent());
+ context.assertNotNull(get.result().get().getRawRecord());
+ context.assertNotNull(get.result().get().getParsedRecord());
+ context.assertEquals(record.getId(), get.result().get().getMatchedId());
+ context.assertNull(getFieldFromMarcRecord(get.result().get(), TAG_999, INDICATOR, INDICATOR, SUBFIELD_S));
+ async.complete();
+ });
+ });
+ }
+
@Test
public void shouldSaveMarcBibRecordWithMatchedIdFromExistingSourceRecord(TestContext context) throws IOException {
Async async = context.async();
@@ -1504,7 +1751,6 @@ private void getMarcSourceRecordById(TestContext context, Record expected) {
context.fail(get.cause());
}
context.assertTrue(get.result().isPresent());
- context.assertNotNull(get.result().get().getRawRecord());
context.assertNotNull(get.result().get().getParsedRecord());
compareSourceRecords(context, RecordDaoUtil.toSourceRecord(expected), get.result().get());
async.complete();
@@ -1771,15 +2017,8 @@ private void compareSourceRecords(TestContext context, SourceRecord expected, So
context.assertEquals(expected.getSnapshotId(), actual.getSnapshotId());
context.assertEquals(expected.getRecordType(), actual.getRecordType());
context.assertEquals(expected.getOrder(), actual.getOrder());
- if (Objects.nonNull(expected.getRawRecord())) {
- compareRawRecords(context, expected.getRawRecord(), actual.getRawRecord());
- } else {
- context.assertNull(actual.getRawRecord());
- }
if (Objects.nonNull(expected.getParsedRecord())) {
compareParsedRecords(context, expected.getParsedRecord(), actual.getParsedRecord());
- } else {
- context.assertNull(actual.getRawRecord());
}
if (Objects.nonNull(expected.getAdditionalInfo())) {
compareAdditionalInfo(context, expected.getAdditionalInfo(), actual.getAdditionalInfo());
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/SnapshotServiceTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/SnapshotServiceTest.java
index 22547286a..d54a43442 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/services/SnapshotServiceTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/services/SnapshotServiceTest.java
@@ -1,11 +1,9 @@
package org.folio.services;
-import static org.folio.rest.jooq.Tables.SNAPSHOTS_LB;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Objects;
-
+import io.vertx.core.Future;
+import io.vertx.ext.unit.Async;
+import io.vertx.ext.unit.TestContext;
+import io.vertx.ext.unit.junit.VertxUnitRunner;
import org.folio.TestMocks;
import org.folio.dao.SnapshotDao;
import org.folio.dao.SnapshotDaoImpl;
@@ -21,10 +19,22 @@
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
-import io.vertx.ext.unit.Async;
-import io.vertx.ext.unit.TestContext;
-import io.vertx.ext.unit.junit.VertxUnitRunner;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
+
+import static org.folio.rest.jooq.Tables.SNAPSHOTS_LB;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
@RunWith(VertxUnitRunner.class)
public class SnapshotServiceTest extends AbstractLBServiceTest {
@@ -33,6 +43,12 @@ public class SnapshotServiceTest extends AbstractLBServiceTest {
private SnapshotService snapshotService;
+ @Mock
+ private SnapshotDao mockedSnapshotDao;
+
+ @InjectMocks
+ private SnapshotServiceImpl snapshotServiceForMocks;
+
@Before
public void setUp(TestContext context) {
snapshotDao = new SnapshotDaoImpl(postgresClientFactory);
@@ -220,6 +236,26 @@ public void shouldNotDeleteSnapshot(TestContext context) {
});
}
+ @Test
+ public void shouldCopySnapshotToAnotherTenant(TestContext context) {
+ Async async = context.async();
+ MockitoAnnotations.openMocks(this);
+ Snapshot expected = TestMocks.getSnapshot(0);
+
+ doAnswer(invocationOnMock -> Future.succeededFuture(Optional.of(expected))).when(mockedSnapshotDao).getSnapshotById(anyString(), anyString());
+
+ doAnswer(invocationOnMock -> Future.succeededFuture(expected)).when(mockedSnapshotDao).saveSnapshot(any(), anyString());
+
+ snapshotServiceForMocks.copySnapshotToOtherTenant(expected.getJobExecutionId(), TENANT_ID, "centralTenantId").onComplete(get -> {
+ if (get.failed()) {
+ context.fail(get.cause());
+ }
+ compareSnapshots(context, expected, get.result());
+ verify(mockedSnapshotDao, times(1)).saveSnapshot(any(Snapshot.class), eq("centralTenantId"));
+ async.complete();
+ });
+ }
+
private void compareSnapshots(TestContext context, Snapshot expected, Snapshot actual) {
context.assertEquals(expected.getJobExecutionId(), actual.getJobExecutionId());
context.assertEquals(expected.getStatus(), actual.getStatus());
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/caches/ConsortiumConfigurationCacheTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/caches/ConsortiumConfigurationCacheTest.java
new file mode 100644
index 000000000..f6e677bb4
--- /dev/null
+++ b/mod-source-record-storage-server/src/test/java/org/folio/services/caches/ConsortiumConfigurationCacheTest.java
@@ -0,0 +1,108 @@
+package org.folio.services.caches;
+
+import com.github.tomakehurst.wiremock.client.WireMock;
+import com.github.tomakehurst.wiremock.common.Slf4jNotifier;
+import com.github.tomakehurst.wiremock.core.WireMockConfiguration;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+import com.github.tomakehurst.wiremock.matching.RegexPattern;
+import com.github.tomakehurst.wiremock.matching.UrlPathPattern;
+import io.vertx.core.Future;
+import io.vertx.core.Vertx;
+import io.vertx.core.json.Json;
+import io.vertx.core.json.JsonArray;
+import io.vertx.core.json.JsonObject;
+import io.vertx.ext.unit.Async;
+import io.vertx.ext.unit.TestContext;
+import io.vertx.ext.unit.junit.RunTestOnContext;
+import io.vertx.ext.unit.junit.VertxUnitRunner;
+import org.folio.dataimport.util.OkapiConnectionParams;
+import org.folio.dataimport.util.RestUtil;
+import org.folio.services.entities.ConsortiumConfiguration;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import java.util.Map;
+import java.util.Optional;
+
+import static com.github.tomakehurst.wiremock.client.WireMock.get;
+
+@RunWith(VertxUnitRunner.class)
+public class ConsortiumConfigurationCacheTest {
+ private static final String TENANT_ID = "diku";
+ private static final String CENTRAL_TENANT_ID = "centralTenantId";
+ private static final String CONSORTIUM_ID = "consortiumId";
+ private static final String USER_TENANTS_ENDPOINT = "/user-tenants";
+ private final Vertx vertx = Vertx.vertx();
+ private final ConsortiumConfigurationCache consortiumConfigurationCache = new ConsortiumConfigurationCache(vertx);
+ private OkapiConnectionParams params;
+ private final JsonObject consortiumConfiguration = new JsonObject()
+ .put("userTenants", new JsonArray().add(new JsonObject().put("centralTenantId", CENTRAL_TENANT_ID).put("consortiumId", CONSORTIUM_ID)));
+
+ @Rule
+ public RunTestOnContext rule = new RunTestOnContext();
+
+ @Rule
+ public WireMockRule mockServer = new WireMockRule(
+ WireMockConfiguration.wireMockConfig()
+ .dynamicPort()
+ .notifier(new Slf4jNotifier(true)));
+
+ @Before
+ public void setUp() {
+ WireMock.stubFor(get(new UrlPathPattern(new RegexPattern(USER_TENANTS_ENDPOINT), true))
+ .willReturn(WireMock.ok().withBody(consortiumConfiguration.encode())));
+
+ this.params = new OkapiConnectionParams(Map.of(
+ RestUtil.OKAPI_TENANT_HEADER, TENANT_ID,
+ RestUtil.OKAPI_TOKEN_HEADER, "token",
+ RestUtil.OKAPI_URL_HEADER, mockServer.baseUrl()
+ ), vertx);
+ }
+
+ @Test
+ public void shouldReturnConsortiumConfiguration(TestContext context) {
+ Async async = context.async();
+
+ Future> optionalFuture = consortiumConfigurationCache.get(this.params);
+
+ optionalFuture.onComplete(ar -> {
+ context.assertTrue(ar.succeeded());
+ context.assertTrue(ar.result().isPresent());
+ ConsortiumConfiguration actualConsortiumConfiguration = ar.result().get();
+ context.assertEquals(actualConsortiumConfiguration.getCentralTenantId(), CENTRAL_TENANT_ID);
+ context.assertEquals(actualConsortiumConfiguration.getConsortiumId(), CONSORTIUM_ID);
+ async.complete();
+ });
+ }
+
+ @Test
+ public void shouldReturnEmptyOptionalWhenGetNotFoundOnConfigurationLoading(TestContext context) {
+ Async async = context.async();
+ WireMock.stubFor(get(new UrlPathPattern(new RegexPattern(USER_TENANTS_ENDPOINT), true))
+ .willReturn(WireMock.ok().withBody(Json.encode(new JsonObject().put("userTenants", new JsonArray())))));
+
+ Future> optionalFuture = consortiumConfigurationCache.get(this.params);
+
+ optionalFuture.onComplete(ar -> {
+ context.assertTrue(ar.succeeded());
+ context.assertTrue(ar.result().isEmpty());
+ async.complete();
+ });
+ }
+
+ @Test
+ public void shouldReturnFailedFutureWhenGetServerErrorOnConfigurationLoading(TestContext context) {
+ Async async = context.async();
+ WireMock.stubFor(get(new UrlPathPattern(new RegexPattern(USER_TENANTS_ENDPOINT), true))
+ .willReturn(WireMock.serverError()));
+
+ Future> optionalFuture = consortiumConfigurationCache.get(this.params);
+
+ optionalFuture.onComplete(ar -> {
+ context.assertTrue(ar.failed());
+ async.complete();
+ });
+ }
+}
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AbstractPostProcessingEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AbstractPostProcessingEventHandlerTest.java
index edd9a56c4..e9a7b75c7 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AbstractPostProcessingEventHandlerTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AbstractPostProcessingEventHandlerTest.java
@@ -25,10 +25,13 @@
import io.vertx.core.json.JsonObject;
import io.vertx.ext.unit.Async;
import io.vertx.ext.unit.TestContext;
+import org.folio.dao.SnapshotDao;
import org.folio.processing.mapping.defaultmapper.processor.parameters.MappingParameters;
import org.folio.rest.jaxrs.model.MappingMetadataDto;
import org.folio.services.RecordService;
import org.folio.services.RecordServiceImpl;
+import org.folio.services.SnapshotService;
+import org.folio.services.SnapshotServiceImpl;
import org.folio.services.caches.MappingParametersSnapshotCache;
import org.junit.After;
import org.junit.Before;
@@ -64,6 +67,10 @@ public abstract class AbstractPostProcessingEventHandlerTest extends AbstractLBS
protected Record record;
protected RecordDao recordDao;
protected RecordService recordService;
+ protected SnapshotDao snapshotDao;
+
+ protected SnapshotService snapshotService;
+
protected MappingParametersSnapshotCache mappingParametersCache;
protected AbstractPostProcessingEventHandler handler;
@@ -96,7 +103,8 @@ public void setUp(TestContext context) {
mappingParametersCache = new MappingParametersSnapshotCache(vertx);
recordDao = new RecordDaoImpl(postgresClientFactory);
recordService = new RecordServiceImpl(recordDao);
- handler = createHandler(recordService, kafkaConfig);
+ snapshotService = new SnapshotServiceImpl(snapshotDao);
+ handler = createHandler(recordService, snapshotService, kafkaConfig);
Async async = context.async();
Snapshot snapshot1 = new Snapshot()
@@ -132,7 +140,7 @@ public void setUp(TestContext context) {
protected abstract Record.RecordType getMarcType();
- protected abstract AbstractPostProcessingEventHandler createHandler(RecordService recordService, KafkaConfig kafkaConfig);
+ protected abstract AbstractPostProcessingEventHandler createHandler(RecordService recordService, SnapshotService snapshotService, KafkaConfig kafkaConfig);
@After
public void cleanUp(TestContext context) {
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AuthorityPostProcessingEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AuthorityPostProcessingEventHandlerTest.java
index 774e02730..e04c3651e 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AuthorityPostProcessingEventHandlerTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AuthorityPostProcessingEventHandlerTest.java
@@ -32,6 +32,7 @@
import io.vertx.ext.unit.junit.RunTestOnContext;
import io.vertx.ext.unit.junit.VertxUnitRunner;
import org.folio.services.RecordService;
+import org.folio.services.SnapshotService;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
@@ -65,8 +66,8 @@ protected Record.RecordType getMarcType() {
}
@Override
- protected AbstractPostProcessingEventHandler createHandler(RecordService recordService, KafkaConfig kafkaConfig) {
- return new AuthorityPostProcessingEventHandler(recordService, kafkaConfig, mappingParametersCache, vertx);
+ protected AbstractPostProcessingEventHandler createHandler(RecordService recordService, SnapshotService snapshotService, KafkaConfig kafkaConfig) {
+ return new AuthorityPostProcessingEventHandler(recordService, snapshotService, kafkaConfig, mappingParametersCache, vertx);
}
@Test
@@ -277,8 +278,7 @@ public void shouldSetAuthorityIdToParsedRecordWhenContentHasField999(TestContext
public void shouldUpdateField005WhenThisFiledIsNotProtected(TestContext context) throws IOException {
Async async = context.async();
- String expectedDate = AdditionalFieldsUtil.dateTime005Formatter
- .format(ZonedDateTime.ofInstant(Instant.now(), ZoneId.systemDefault()));
+ String expectedDate = get005FieldExpectedDate();
String recordId = UUID.randomUUID().toString();
RawRecord rawRecord = new RawRecord().withId(recordId)
@@ -321,9 +321,7 @@ public void shouldUpdateField005WhenThisFiledIsNotProtected(TestContext context)
context.assertTrue(getAr.result().isPresent());
Record updatedRecord = getAr.result().get();
- String actualDate = AdditionalFieldsUtil.getValueFromControlledField(updatedRecord, TAG_005);
- Assert.assertEquals(expectedDate.substring(0, 10),
- actualDate.substring(0, 10));
+ validate005Field(context, expectedDate, updatedRecord);
async.complete();
});
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/HoldingsPostProcessingEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/HoldingsPostProcessingEventHandlerTest.java
index 99abd31f5..6384883f1 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/HoldingsPostProcessingEventHandlerTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/HoldingsPostProcessingEventHandlerTest.java
@@ -32,6 +32,7 @@
import io.vertx.ext.unit.junit.VertxUnitRunner;
import org.folio.rest.jaxrs.model.MappingMetadataDto;
import org.folio.services.RecordService;
+import org.folio.services.SnapshotService;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
@@ -62,8 +63,8 @@ protected Record.RecordType getMarcType() {
}
@Override
- protected AbstractPostProcessingEventHandler createHandler(RecordService recordService, KafkaConfig kafkaConfig) {
- return new HoldingsPostProcessingEventHandler(recordService, kafkaConfig, mappingParametersCache, vertx);
+ protected AbstractPostProcessingEventHandler createHandler(RecordService recordService, SnapshotService snapshotService, KafkaConfig kafkaConfig) {
+ return new HoldingsPostProcessingEventHandler(recordService, snapshotService, kafkaConfig, mappingParametersCache, vertx);
}
@Test
@@ -235,8 +236,7 @@ public void shouldSetHoldingsIdToParsedRecordWhenContentHasField999(TestContext
public void shouldUpdateField005WhenThisFiledIsNotProtected(TestContext context) throws IOException {
Async async = context.async();
- String expectedDate = AdditionalFieldsUtil.dateTime005Formatter
- .format(ZonedDateTime.ofInstant(Instant.now(), ZoneId.systemDefault()));
+ String expectedDate = get005FieldExpectedDate();
String recordId = UUID.randomUUID().toString();
RawRecord rawRecord = new RawRecord().withId(recordId)
@@ -280,9 +280,7 @@ public void shouldUpdateField005WhenThisFiledIsNotProtected(TestContext context)
context.assertTrue(getAr.result().isPresent());
Record updatedRecord = getAr.result().get();
- String actualDate = AdditionalFieldsUtil.getValueFromControlledField(updatedRecord, TAG_005);
- Assert.assertEquals(expectedDate.substring(0, 10),
- actualDate.substring(0, 10));
+ validate005Field(context, expectedDate, updatedRecord);
async.complete();
});
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java
index 47707c4bb..b8e333f68 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java
@@ -26,13 +26,20 @@
import org.folio.rest.jaxrs.model.ProfileSnapshotWrapper;
import org.folio.rest.jaxrs.model.RawRecord;
import org.folio.rest.jaxrs.model.Record;
+import org.folio.rest.jaxrs.model.RecordCollection;
+import org.folio.rest.jaxrs.model.Snapshot;
import org.folio.services.RecordService;
+import org.folio.services.RecordServiceImpl;
+import org.folio.services.SnapshotService;
+import org.folio.services.SnapshotServiceImpl;
import org.folio.services.exceptions.DuplicateRecordException;
import org.folio.services.util.AdditionalFieldsUtil;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
import java.io.IOException;
import java.time.Instant;
@@ -40,14 +47,15 @@
import java.time.ZonedDateTime;
import java.util.HashMap;
import java.util.List;
+import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import static com.github.tomakehurst.wiremock.client.WireMock.get;
-import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_ORDER_CREATED_READY_FOR_POST_PROCESSING;
import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING;
import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING;
+import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_ORDER_CREATED_READY_FOR_POST_PROCESSING;
import static org.folio.rest.jaxrs.model.EntityType.INSTANCE;
import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC;
import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.ACTION_PROFILE;
@@ -55,10 +63,27 @@
import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB;
import static org.folio.services.handlers.InstancePostProcessingEventHandler.POST_PROCESSING_RESULT_EVENT;
import static org.folio.services.util.AdditionalFieldsUtil.TAG_005;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
@RunWith(VertxUnitRunner.class)
public class InstancePostProcessingEventHandlerTest extends AbstractPostProcessingEventHandlerTest {
+ public static final String CENTRAL_TENANT_INSTANCE_UPDATED_FLAG = "CENTRAL_TENANT_INSTANCE_UPDATED";
+ public static final String CENTRAL_TENANT_ID = "CENTRAL_TENANT_ID";
+
+ @Mock
+ private RecordServiceImpl mockedRecordService;
+
+ @Mock
+ private SnapshotServiceImpl mockedSnapshotService;
+ @Mock
+ private RecordCollection recordCollection;
+
@Rule
public RunTestOnContext rule = new RunTestOnContext();
@@ -68,8 +93,8 @@ protected Record.RecordType getMarcType() {
}
@Override
- protected AbstractPostProcessingEventHandler createHandler(RecordService recordService, KafkaConfig kafkaConfig) {
- return new InstancePostProcessingEventHandler(recordService, kafkaConfig, mappingParametersCache, vertx);
+ protected AbstractPostProcessingEventHandler createHandler(RecordService recordService, SnapshotService snapshotService, KafkaConfig kafkaConfig) {
+ return new InstancePostProcessingEventHandler(recordService, snapshotService, kafkaConfig, mappingParametersCache, vertx);
}
@Test
@@ -164,6 +189,61 @@ public void shouldSetInstanceIdToRecord(TestContext context) {
});
}
+ @Test
+ public void shouldProceedIfConsortiumTrackExists(TestContext context) {
+ MockitoAnnotations.openMocks(this);
+
+ Async async = context.async();
+
+ doAnswer(invocationOnMock -> Future.succeededFuture(Optional.of(record))).when(mockedRecordService).getRecordById(anyString(), anyString());
+
+ doAnswer(invocationOnMock -> Future.succeededFuture(new Snapshot())).when(mockedSnapshotService).copySnapshotToOtherTenant(anyString(), anyString(), anyString());
+
+ doAnswer(invocationOnMock -> Future.succeededFuture(Optional.of(record))).when(mockedRecordService).getRecordById(anyString(), anyString());
+
+ doAnswer(invocationOnMock -> Future.succeededFuture(record.getParsedRecord())).when(mockedRecordService).updateParsedRecord(any(), anyString());
+
+ doAnswer(invocationOnMock -> Future.succeededFuture(recordCollection)).when(mockedRecordService).getRecords(any(), any(), any(), anyInt(), anyInt(), anyString());
+
+ doAnswer(invocationOnMock -> List.of(record)).when(recordCollection).getRecords();
+
+ doAnswer(invocationOnMock -> Future.succeededFuture(record)).when(mockedRecordService).updateRecord(any(), anyString());
+
+ InstancePostProcessingEventHandler handler = new InstancePostProcessingEventHandler(mockedRecordService, mockedSnapshotService, kafkaConfig, mappingParametersCache, vertx);
+
+ String expectedInstanceId = UUID.randomUUID().toString();
+ String expectedHrId = UUID.randomUUID().toString();
+ String expectedCentralTenantId = "centralTenantId";
+
+ JsonObject instance = createExternalEntity(expectedInstanceId, expectedHrId);
+
+ HashMap payloadContext = new HashMap<>();
+ payloadContext.put(INSTANCE.value(), instance.encode());
+ payloadContext.put(MARC_BIBLIOGRAPHIC.value(), Json.encode(record));
+ payloadContext.put("recordId", record.getId());
+ payloadContext.put(CENTRAL_TENANT_INSTANCE_UPDATED_FLAG, "true");
+ payloadContext.put(CENTRAL_TENANT_ID, expectedCentralTenantId);
+
+ DataImportEventPayload dataImportEventPayload =
+ createDataImportEventPayload(payloadContext, DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING);
+
+ CompletableFuture future = new CompletableFuture<>();
+
+ handler.handle(dataImportEventPayload)
+ .thenApply(future::complete)
+ .exceptionally(future::completeExceptionally);
+
+ future.whenComplete((payload, e) -> {
+ if (e != null) {
+ context.fail(e);
+ }
+ verify(mockedRecordService, times(1)).updateParsedRecord(any(), anyString());
+ context.assertNull(payload.getContext().get(CENTRAL_TENANT_INSTANCE_UPDATED_FLAG));
+ context.assertEquals(expectedCentralTenantId, payload.getContext().get(CENTRAL_TENANT_ID));
+ async.complete();
+ });
+ }
+
@Test
public void shouldSaveRecordWhenRecordDoesntExist(TestContext context) throws IOException {
Async async = context.async();
@@ -416,7 +496,7 @@ public void checkGeneration035FiledAfterUpdateMarcBib(TestContext context) throw
context.assertEquals(Record.State.ACTUAL, savedIncomingRecord.getState());
context.assertNotNull(savedIncomingRecord.getGeneration());
context.assertTrue(existingRec.getGeneration() < savedIncomingRecord.getGeneration());
- context.assertFalse(((String)savedIncomingRecord.getParsedRecord().getContent()).contains("(LTSA)in00000000040"));
+ context.assertFalse(((String) savedIncomingRecord.getParsedRecord().getContent()).contains("(LTSA)in00000000040"));
async.complete();
});
@@ -430,8 +510,8 @@ public void shouldSetInstanceIdToParsedRecordWhenContentHasField999(TestContext
var expectedHrid = "in0002";
record.withParsedRecord(new ParsedRecord()
- .withId(recordId)
- .withContent(PARSED_CONTENT_WITH_999_FIELD))
+ .withId(recordId)
+ .withContent(PARSED_CONTENT_WITH_999_FIELD))
.withExternalIdsHolder(new ExternalIdsHolder().withInstanceHrid("in0001").withInstanceId(expectedInstanceId));
HashMap payloadContext = new HashMap<>();
@@ -481,8 +561,7 @@ public void shouldSetInstanceIdToParsedRecordWhenContentHasField999(TestContext
public void shouldUpdateField005WhenThisFiledIsNotProtected(TestContext context) throws IOException {
Async async = context.async();
- String expectedDate = AdditionalFieldsUtil.dateTime005Formatter
- .format(ZonedDateTime.ofInstant(Instant.now(), ZoneId.systemDefault()));
+ String expectedDate = get005FieldExpectedDate();
String recordId = UUID.randomUUID().toString();
RawRecord rawRecord = new RawRecord().withId(recordId)
@@ -526,9 +605,7 @@ public void shouldUpdateField005WhenThisFiledIsNotProtected(TestContext context)
context.assertTrue(getAr.result().isPresent());
Record updatedRecord = getAr.result().get();
- String actualDate = AdditionalFieldsUtil.getValueFromControlledField(updatedRecord, TAG_005);
- Assert.assertEquals(expectedDate.substring(0, 10),
- actualDate.substring(0, 10));
+ validate005Field(context, expectedDate, updatedRecord);
async.complete();
});
diff --git a/mod-source-record-storage-server/src/test/java/org/folio/verticle/consumers/DataImportConsumersVerticleTest.java b/mod-source-record-storage-server/src/test/java/org/folio/verticle/consumers/DataImportConsumersVerticleTest.java
index f7c2cdfe9..65164d01f 100644
--- a/mod-source-record-storage-server/src/test/java/org/folio/verticle/consumers/DataImportConsumersVerticleTest.java
+++ b/mod-source-record-storage-server/src/test/java/org/folio/verticle/consumers/DataImportConsumersVerticleTest.java
@@ -3,7 +3,7 @@
import static com.github.tomakehurst.wiremock.client.WireMock.get;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Collections.singletonList;
-import static org.folio.services.MarcBibUpdateModifyEventHandlerTest.getParsedContentWithoutLeader;
+import static org.folio.services.MarcBibUpdateModifyEventHandlerTest.getParsedContentWithoutLeaderAndDate;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
@@ -173,6 +173,7 @@ public void shouldModifyRecordWhenPayloadContainsModifyMarcBibActionInCurrentNod
WireMock.stubFor(get(new UrlPathPattern(new RegexPattern(PROFILE_SNAPSHOT_URL + "/.*"), true))
.willReturn(WireMock.ok().withBody(Json.encode(profileSnapshotWrapper))));
+ String expectedDate = get005FieldExpectedDate();
String expectedParsedContent =
"{\"leader\":\"00107nam 22000491a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
@@ -213,9 +214,11 @@ public void shouldModifyRecordWhenPayloadContainsModifyMarcBibActionInCurrentNod
Record actualRecord =
Json.decodeValue(dataImportEventPayload.getContext().get(MARC_BIBLIOGRAPHIC.value()), Record.class);
- assertEquals(getParsedContentWithoutLeader(expectedParsedContent), getParsedContentWithoutLeader(actualRecord.getParsedRecord().getContent().toString()));
+ assertEquals(getParsedContentWithoutLeaderAndDate(expectedParsedContent),
+ getParsedContentWithoutLeaderAndDate(actualRecord.getParsedRecord().getContent().toString()));
assertEquals(Record.State.ACTUAL, actualRecord.getState());
assertEquals(dataImportEventPayload.getJobExecutionId(), actualRecord.getSnapshotId());
+ validate005Field(expectedDate, actualRecord);
assertNotNull(observedRecords.get(0).getHeaders().lastHeader(RECORD_ID_HEADER));
}
diff --git a/mod-source-record-storage-server/src/test/resources/mock/rawRecords/0f0fe962-d502-4a4f-9e74-7732bec94ee8.json b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/0f0fe962-d502-4a4f-9e74-7732bec94ee8.json
new file mode 100644
index 000000000..cb9a3b8bb
--- /dev/null
+++ b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/0f0fe962-d502-4a4f-9e74-7732bec94ee8.json
@@ -0,0 +1,4 @@
+{
+ "id": "0f0fe962-d502-4a4f-9e74-7732bec94ee8",
+ "content": "{\"leader\":\"02042nma a2200349 c 4500\",\"fields\":[{\"001\":\"inst000000000019\"},{\"003\":\"DE-601\"},{\"005\":\"20180122150003.0\"},{\"007\":\"cu\\\\uuu---uuuuu\"},{\"008\":\"180122s2017\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\eng\\\\d\"},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"9781450351430\"},{\"9\":\"978-1-4503-5143-0\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV1011273942\"}]}},{\"035\":\"1011273942\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"eng\"}]}},{\"110\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ACM Workshop on Millimeter Wave Networks and Sensing Systems\"},{\"n\":\"1.\"},{\"d\":\"2017\"},{\"c\":\"Snowbird, Utah\"},{\"e\":\"VerfasserIn\"},{\"4\":\"aut\"}]}},{\"245\":{\"ind1\":\"0\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"MobiCom'17\"},{\"n\":\"5\"},{\"p\":\"mmNets'17, October 16, 2017, Snowbird, UT, USA / general chairs: Haitham Hassanieh (University of Illinois at Urbana Champaign, USA), Xinyu Zhang (University of California San Diego, USA)\"}]}},{\"246\":{\"ind1\":\"3\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"1st First ACM Workshop Millimeter Wave Networks Sensing Systems\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Text\"},{\"b\":\"txt\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Computermedien\"},{\"b\":\"c\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Computerdisk\"},{\"b\":\"cd\"},{\"2\":\"rdacarrier\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Hassanieh, Haitham\"},{\"e\":\"VeranstalterIn\"},{\"4\":\"orm\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Zhang, Xinyu\"},{\"e\":\"VeranstalterIn\"},{\"4\":\"orm\"}]}},{\"710\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"MobiCom\"},{\"n\":\"23.\"},{\"d\":\"2017\"},{\"c\":\"Snowbird, Utah\"},{\"e\":\"VeranstalterIn\"},{\"4\":\"orm\"}]}},{\"710\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Association for Computing Machinery\"},{\"b\":\"Special Interest Group on Mobility of Systems Users, Data, and Computing\"},{\"e\":\"SponsorIn\"},{\"4\":\"spn\"},{\"0\":\"(DE-601)499677137\"},{\"0\":\"(DE-588)10113390-X\"}]}},{\"711\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ACM Workshop on Millimeter Wave Networks and Sensing Systems\"},{\"n\":\"1\"},{\"d\":\"2017.10.16\"},{\"c\":\"Snowbird, Utah\"}]}},{\"711\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"mmNets\"},{\"n\":\"1\"},{\"d\":\"2017.10.16\"},{\"c\":\"Snowbird, Utah\"}]}},{\"711\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Annual International Conference on Mobile Computing and Networking (ACM MobiCom)\"},{\"n\":\"23\"},{\"d\":\"2017.10.16-20\"},{\"c\":\"Snowbird, Utah\"}]}},{\"773\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"w\":\"(DE-601)1011270897\"},{\"t\":\"MobiCom'17, MobiCom'17, proceedings and co-located workshops of the 23rd Annual International Conference on Mobile Computing and Networking : October 16-20, 2017, Snowbird, UT, USA, MobiCom, Snowbird, Utah. - New York, NY : ACM, Association for Computing Machinery\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"TIB/UB Hannover <89>\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"TIB/UB Hannover <89>\"},{\"a\":\"70\"},{\"b\":\"1743063695\"},{\"c\":\"01\"},{\"x\":\"0089\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"0f0fe962-d502-4a4f-9e74-7732bec94ee8\"},{\"i\":\"6b4ae089-e1ee-431f-af83-e1133f8e3da0\"}]}}]}"
+}
diff --git a/mod-source-record-storage-server/src/test/resources/mock/rawRecords/3187432f-9434-40a8-8782-35a111a1491e.json b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/3187432f-9434-40a8-8782-35a111a1491e.json
new file mode 100644
index 000000000..93cbc0c42
--- /dev/null
+++ b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/3187432f-9434-40a8-8782-35a111a1491e.json
@@ -0,0 +1,4 @@
+{
+ "id": "3187432f-9434-40a8-8782-35a111a1491e",
+ "content": "{\"leader\":\"01463nja a2200313 c 4500\",\"fields\":[{\"001\":\"inst000000000007\"},{\"003\":\"DE-601\"},{\"005\":\"20180118183625.0\"},{\"007\":\"su\\\\uuuuuuuuuuu\"},{\"008\":\"180118s2017\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\ger\\\\d\"},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV1011162431\"}]}},{\"035\":\"1011162431\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ger\"}]}},{\"100\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Bach, Johann Sebastian\"},{\"e\":\"KomponistIn\"},{\"4\":\"cmp\"},{\"0\":\"(DE-601)134579348\"},{\"0\":\"(DE-588)11850553X\"}]}},{\"240\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"0\":\"(DE-601)701589477\"},{\"0\":\"(DE-588)300007736\"},{\"a\":\"Ich habe genung\"}]}},{\"245\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"Cantatas for bass\"},{\"n\":\"4\"},{\"p\":\"Ich habe genug : BWV 82 / Johann Sebastian Bach ; Matthias Goerne, baritone ; Freiburger Barockorchester, Gottfried von der Goltz, violin and conductor\"}]}},{\"246\":{\"ind1\":\"1\",\"ind2\":\"3\",\"subfields\":[{\"i\":\"Abweichender Titel\"},{\"a\":\"Ich habe genung\"}]}},{\"300\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Track 10-14\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"aufgeführte Musik\"},{\"b\":\"prm\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"audio\"},{\"b\":\"s\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Audiodisk\"},{\"b\":\"sd\"},{\"2\":\"rdacarrier\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Arfken, Katharina\"},{\"e\":\"InstrumentalmusikerIn\"},{\"4\":\"itr\"},{\"0\":\"(DE-601)576364940\"},{\"0\":\"(DE-588)135158265\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Goltz, Gottfried von der\"},{\"e\":\"DirigentIn\"},{\"4\":\"cnd\"},{\"0\":\"(DE-601)081724969\"},{\"0\":\"(DE-588)122080912\"}]}},{\"710\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Freiburger Barockorchester\"},{\"e\":\"InstrumentalmusikerIn\"},{\"4\":\"itr\"},{\"0\":\"(DE-601)12121060X\"},{\"0\":\"(DE-588)5066798-1\"}]}},{\"773\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"w\":\"(DE-601)895161729\"},{\"t\":\"Cantatas for bass, Bach, Johann Sebastian. - Arles : Harmonia Mundi\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"SBB-PK Berlin <1+1A>\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"SBB-PK Berlin <1+1A>\"},{\"a\":\"11\"},{\"b\":\"1742288871\"},{\"c\":\"01\"},{\"x\":\"0001\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"3187432f-9434-40a8-8782-35a111a1491e\"},{\"i\":\"ce00bca2-9270-4c6b-b096-b83a2e56e8e9\"}]}}]}"
+}
diff --git a/mod-source-record-storage-server/src/test/resources/mock/rawRecords/4c0ff739-3f4d-4670-a693-84dd48e31c53.json b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/4c0ff739-3f4d-4670-a693-84dd48e31c53.json
new file mode 100644
index 000000000..71cfc5e0c
--- /dev/null
+++ b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/4c0ff739-3f4d-4670-a693-84dd48e31c53.json
@@ -0,0 +1,4 @@
+{
+ "id": "4c0ff739-3f4d-4670-a693-84dd48e31c53",
+ "content": "{\"leader\":\"02258ngm a2200433 c 4500\",\"fields\":[{\"001\":\"inst000000000018\"},{\"003\":\"DE-601\"},{\"005\":\"20160520072526.0\"},{\"007\":\"vu\\\\uvuuuu\"},{\"008\":\"110113s2010\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\ger\\\\d\"},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"9789279164316\"},{\"9\":\"978-92-79-16431-6\"}]}},{\"024\":{\"ind1\":\"7\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"10.2768/21035\"},{\"2\":\"doi\"}]}},{\"028\":{\"ind1\":\"5\",\"ind2\":\"2\",\"subfields\":[{\"a\":\"MI-32-10-386-57-Z\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV643935371\"}]}},{\"035\":\"643935371\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ger\"},{\"a\":\"eng\"},{\"a\":\"spa\"},{\"a\":\"fre\"},{\"a\":\"ita\"},{\"a\":\"dut\"},{\"a\":\"por\"}]}},{\"084\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"55.80\"},{\"9\":\"Verkehrswesen\"},{\"9\":\"Transportwesen: Allgemeines\"},{\"2\":\"bkl\"}]}},{\"245\":{\"ind1\":\"0\",\"ind2\":\"2\",\"subfields\":[{\"a\":\"A journey through Europe\"},{\"h\":\"Bildtontraeger\"},{\"b\":\"high-speed lines\"},{\"c\":\"European Commission, Directorate-General for Mobility and Transport\"}]}},{\"300\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"1 DVD-Video (14 Min.)\"},{\"b\":\"farb.\"},{\"c\":\"12 cm\"}]}},{\"610\":{\"ind1\":\"1\",\"ind2\":\"7\",\"subfields\":[{\"0\":\"(DE-601)132918269\"},{\"0\":\"(DE-588)5098525-5\"},{\"a\":\"Europäische Union\"},{\"2\":\"gnd\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"0\":\"(DE-601)104616431\"},{\"0\":\"(DE-588)4113934-3\"},{\"a\":\"Hochgeschwindigkeitszug\"},{\"2\":\"gnd\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"0\":\"(DE-601)106129678\"},{\"0\":\"(DE-588)4062953-3\"},{\"a\":\"Verkehrsnetz\"},{\"2\":\"gnd\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"8\":\"1.1\\\\x\"},{\"a\":\"Hochgeschwindigkeitsverkehr\"},{\"0\":\"(DE-601)091366011\"},{\"0\":\"(DE-STW)18089-3\"},{\"2\":\"stw\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"8\":\"1.2\\\\x\"},{\"a\":\"Hochgeschwindigkeitsverkehr\"},{\"0\":\"(DE-601)091366011\"},{\"0\":\"(DE-STW)18089-3\"},{\"2\":\"stw\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"8\":\"1.3\\\\x\"},{\"a\":\"Schienenverkehr\"},{\"0\":\"(DE-601)091388066\"},{\"0\":\"(DE-STW)13255-2\"},{\"2\":\"stw\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"8\":\"1.4\\\\x\"},{\"a\":\"EU-Verkehrspolitik\"},{\"0\":\"(DE-601)091358701\"},{\"0\":\"(DE-STW)18627-1\"},{\"2\":\"stw\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"8\":\"1.5\\\\x\"},{\"a\":\"EU-Staaten\"},{\"0\":\"(DE-601)091358639\"},{\"0\":\"(DE-STW)17983-5\"},{\"2\":\"stw\"}]}},{\"710\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Europäische Kommission\"},{\"b\":\"Generaldirektion Mobilität und Verkehr\"},{\"0\":\"(DE-601)667202439\"},{\"0\":\"(DE-588)16174192-7\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"ZBW Kiel <206>\"},{\"d\":\"!K:! DVD 49\"},{\"x\":\"L\"},{\"z\":\"LC\"},{\"s\":\"206\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"THULB Jena <27>\"},{\"d\":\"!Mag5! EDZ 0720 17\"},{\"x\":\"L\"},{\"z\":\"LC\"},{\"f\":\"Bestand Europäisches Dokumentationszentrum\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"TIB/UB Hannover <89>\"},{\"d\":\"!FBW MagNB! CD oek 6715/042\"},{\"x\":\"L\"},{\"z\":\"C\"},{\"s\":\"89/18\"},{\"g\":\"!FBW EU! CD oek 6715/042\"},{\"x\":\"L\"},{\"z\":\"C\"},{\"s\":\"89/18\"}]}},{\"951\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"eu\"},{\"2\":\"26\"}]}},{\"951\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"oek 6715\"},{\"2\":\"70\"}]}},{\"951\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"oek 6715 3ah\"},{\"2\":\"70\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"ZBW Kiel <206>\"},{\"a\":\"26\"},{\"b\":\"1231491841\"},{\"c\":\"01\"},{\"f\":\"K:\"},{\"d\":\"DVD 49\"},{\"e\":\"u\"},{\"x\":\"0206\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"THULB Jena <27>\"},{\"a\":\"31\"},{\"b\":\"1219949035\"},{\"c\":\"01\"},{\"f\":\"Mag5\"},{\"d\":\"EDZ 0720 17\"},{\"e\":\"d\"},{\"k\":\"Bestand Europäisches Dokumentationszentrum\"},{\"x\":\"0027\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"TIB/UB Hannover <89>\"},{\"a\":\"70\"},{\"b\":\"1219604739\"},{\"c\":\"70\"},{\"f\":\"FBW MagNB\"},{\"d\":\"CD oek 6715/042\"},{\"e\":\"s\"},{\"f\":\"FBW EU\"},{\"d\":\"CD oek 6715/042\"},{\"e\":\"s\"},{\"x\":\"89/18\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"4c0ff739-3f4d-4670-a693-84dd48e31c53\"},{\"i\":\"1b74ab75-9f41-4837-8662-a1d99118008d\"}]}}]}"
+}
diff --git a/mod-source-record-storage-server/src/test/resources/mock/rawRecords/4ca9d8ac-9de5-432a-83ee-15832f09e868.json b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/4ca9d8ac-9de5-432a-83ee-15832f09e868.json
new file mode 100644
index 000000000..fb2a473e4
--- /dev/null
+++ b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/4ca9d8ac-9de5-432a-83ee-15832f09e868.json
@@ -0,0 +1,4 @@
+{
+ "id": "4ca9d8ac-9de5-432a-83ee-15832f09e868",
+ "content": "UNA:+.?*'UNB+UNOA:1+EDIASD:31B+EDITRCK:ZZ+030407:1204+2451840'UNH+00001+ORDERS:D:96A:UN'BGM+1::9+03134+9'DTM+2:0:805'DTM+137:200304071204:203'DTM+1:200304080400200304090400:719'RFF+AHI:RWE001'NAD+ZSH+TRCKRWE::9'NAD+ZZZ+TRCK::9'TDT+41G++70'LOC+7+:::TTFH'LIN+1'LOC+11'QTY+2:123456:JM1'DTM+2:200304080400200304080600:719'LOC+11'QTY+2:-123456:JM1'DTM+2:200304080600200304080800:719'LOC+11'QTY+2:0:JM1'DTM+2:200304080800200304081000:719'LOC+11'QTY+2:52301:JM1'DTM+2:200304081000200304081100:719'LOC+11'QTY+2:55324:JM1'DTM+2:200304081100200304081600:719'LOC+11'QTY+2:-12056:JM1'DTM+2:200304081600200304081900:719'LOC+11'QTY+2:0:JM1'DTM+2:200304081900200304082300:719'LOC+11'QTY+2:5587:JM1'DTM+2:200304082300200304090400:719'NAD+AA+TRCK::9'UNS+S'UNT+38+00001'UNZ+1+2451840'"
+}
diff --git a/mod-source-record-storage-server/src/test/resources/mock/rawRecords/7293f287-bb51-41f5-805d-00ff18a1f791.json b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/7293f287-bb51-41f5-805d-00ff18a1f791.json
new file mode 100644
index 000000000..f9653296f
--- /dev/null
+++ b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/7293f287-bb51-41f5-805d-00ff18a1f791.json
@@ -0,0 +1,4 @@
+{
+ "id": "7293f287-bb51-41f5-805d-00ff18a1f791",
+ "content": "{\"leader\":\"01980nam a2200361 ca4500\",\"fields\":[{\"001\":\"inst000000000009\"},{\"003\":\"DE-601\"},{\"005\":\"20180214103458.0\"},{\"008\":\"180111s2018\\\\\\\\\\\\\\\\xxu\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\eng\\\\d\"},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"9781473937703\"},{\"c\":\"set\"},{\"9\":\"978-1-4739-3770-3\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV1010770160\"}]}},{\"035\":\"1010770160\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"eng\"}]}},{\"044\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"xxu\"},{\"a\":\"xxk\"},{\"a\":\"ii\"}]}},{\"050\":{\"ind1\":\"\\\\\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"H\"}]}},{\"082\":{\"ind1\":\"0\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"300\"}]}},{\"084\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"74.72\"},{\"9\":\"Stadtplanung\"},{\"9\":\"kommunale Planung\"},{\"2\":\"bkl\"}]}},{\"084\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"74.12\"},{\"9\":\"Stadtgeographie\"},{\"9\":\"Siedlungsgeographie\"},{\"2\":\"bkl\"}]}},{\"245\":{\"ind1\":\"0\",\"ind2\":\"4\",\"subfields\":[{\"a\":\"The city\"},{\"b\":\"post-modernity\"},{\"c\":\"edited by Alan Latham\"}]}},{\"264\":{\"ind1\":\"3\",\"ind2\":\"1\",\"subfields\":[{\"a\":\"Los Angeles\"},{\"a\":\"London\"},{\"a\":\"New Delhi\"},{\"a\":\"Singapore\"},{\"a\":\"Washington DC\"},{\"a\":\"Melbourne\"},{\"b\":\"SAGE\"},{\"c\":\"2018\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Text\"},{\"b\":\"txt\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ohne Hilfsmittel zu benutzen\"},{\"b\":\"n\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Band\"},{\"b\":\"nc\"},{\"2\":\"rdacarrier\"}]}},{\"490\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"SAGE benchmarks in culture and society\"}]}},{\"520\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Cities are sites of great wealth and poverty, of hope and despair, of social and economic dynamism, as well as tradition and established power. Social scientists and humanities scholars have over the past three decades generated an impressive range of perspectives for making sense of the vast complexities of cities. These perspectives tell both of the economic, social and political dynamism cities generate, and point to possible lines of future development. The four volumes, The City: Post-Modernity, will focus more exclusively on the contemporary city, looking at the subject through the lenses of globalization and post-colonialism, amongst others\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"0\":\"(DE-601)106153919\"},{\"0\":\"(DE-588)4056723-0\"},{\"a\":\"Stadt\"},{\"2\":\"gnd\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"0\":\"(DE-601)104288515\"},{\"0\":\"(DE-588)4115604-3\"},{\"a\":\"Postmoderne\"},{\"2\":\"gnd\"}]}},{\"655\":{\"ind1\":\"0\",\"ind2\":\"7\",\"subfields\":[{\"0\":\"(DE-588)4143413-4\"},{\"a\":\"Aufsatzsammlung\"},{\"2\":\"gnd-content\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Latham, Alan\"},{\"e\":\"HerausgeberIn\"},{\"4\":\"edt\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"SUB+Uni Hamburg <18>\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"SUB+Uni Hamburg <18>\"},{\"a\":\"22\"},{\"b\":\"1740834291\"},{\"c\":\"01\"},{\"x\":\"0018\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"7293f287-bb51-41f5-805d-00ff18a1f791\"},{\"i\":\"c1d3be12-ecec-4fab-9237-baf728575185\"}]}}]}"
+}
diff --git a/mod-source-record-storage-server/src/test/resources/mock/rawRecords/8452daf9-c130-4955-99ce-1c397a218900.json b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/8452daf9-c130-4955-99ce-1c397a218900.json
new file mode 100644
index 000000000..3e07f8386
--- /dev/null
+++ b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/8452daf9-c130-4955-99ce-1c397a218900.json
@@ -0,0 +1,4 @@
+{
+ "id": "8452daf9-c130-4955-99ce-1c397a218900",
+ "content": "{\"leader\":\"01024nmm a2200277 ca4500\",\"fields\":[{\"001\":\"inst000000000008\"},{\"003\":\"DE-601\"},{\"005\":\"20160502164752.0\"},{\"007\":\"cu\\\\uuu---uuuuu\"},{\"008\":\"160502m20169999gw\\\\\\\\\\\\\\\\\\\\\\\\o\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\eng\\\\d\"},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV858092093\"}]}},{\"035\":\"858092093\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"eng\"}]}},{\"100\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Ris, Robert\"},{\"e\":\"VerfasserIn\"},{\"4\":\"aut\"},{\"0\":\"(DE-601)778649407\"},{\"0\":\"(DE-588)1047595397\"}]}},{\"245\":{\"ind1\":\"1\",\"ind2\":\"4\",\"subfields\":[{\"a\":\"The chess player’s mating guide\"},{\"h\":\"Computer Datei\"},{\"c\":\"Robert Ris\"}]}},{\"264\":{\"ind1\":\"3\",\"ind2\":\"1\",\"subfields\":[{\"a\":\"Hamburg\"},{\"b\":\"Chessbase GmbH\"},{\"c\":\"[2016]-\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Computerprogramm\"},{\"b\":\"cop\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Computermedien\"},{\"b\":\"c\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Computerdisk\"},{\"b\":\"cd\"},{\"2\":\"rdacarrier\"}]}},{\"490\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Fritztrainer\"},{\"a\":\"Tactics\"}]}},{\"655\":{\"ind1\":\"0\",\"ind2\":\"7\",\"subfields\":[{\"0\":\"(DE-588)4585131-1\"},{\"a\":\"DVD-ROM\"},{\"2\":\"gnd-carrier\"}]}},{\"710\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ChessBase GmbH\"},{\"g\":\"Hamburg\"},{\"e\":\"Verlag\"},{\"4\":\"pbl\"},{\"0\":\"(DE-601)269520015\"},{\"0\":\"(DE-588)2177108-X\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"SUB+Uni Hamburg <18>\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"SUB+Uni Hamburg <18>\"},{\"a\":\"22\"},{\"b\":\"1613697813\"},{\"c\":\"01\"},{\"x\":\"0018\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"8452daf9-c130-4955-99ce-1c397a218900\"},{\"i\":\"3c4ae3f3-b460-4a89-a2f9-78ce3145e4fc\"}]}}]}"
+}
diff --git a/mod-source-record-storage-server/src/test/resources/mock/rawRecords/8f462542-387c-4f06-a01b-50829c7c7b13.json b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/8f462542-387c-4f06-a01b-50829c7c7b13.json
new file mode 100644
index 000000000..6c94b9bf5
--- /dev/null
+++ b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/8f462542-387c-4f06-a01b-50829c7c7b13.json
@@ -0,0 +1,4 @@
+{
+ "id": "8f462542-387c-4f06-a01b-50829c7c7b13",
+ "content": "{\"leader\":\"02046nam a2200421 cc4500\",\"fields\":[{\"001\":\"inst000000000004\"},{\"003\":\"DE-601\"},{\"005\":\"20180615233857.0\"},{\"007\":\"he\\\\amu000uuuu\"},{\"008\":\"170217s2016\\\\\\\\\\\\\\\\xxu\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\eng\\\\d\"},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"9780866989732\"},{\"9\":\"978-0-86698-973-2\"}]}},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"0866989730\"},{\"9\":\"0-86698-973-0\"}]}},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"9780866985529\"},{\"9\":\"978-0-86698-552-9\"}]}},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"0866985522\"},{\"9\":\"0-86698-552-2\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(OCoLC)962073864\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(OCoLC)ocn962073864\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(OCoLC)962073864\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV880391235\"}]}},{\"035\":\"880391235\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"eng\"},{\"a\":\"ang\"},{\"a\":\"lat\"}]}},{\"245\":{\"ind1\":\"0\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"Anglo-Saxon manuscripts in microfiche facsimile\"},{\"n\":\"Volume 25\"},{\"p\":\"Corpus Christi College, Cambridge II, MSS 12, 144, 162, 178, 188, 198, 265, 285, 322, 326, 449\"},{\"h\":\"microform\"},{\"c\":\"A. N. Doane (editor and director), Matthew T. Hussey (associate editor), Phillip Pulsiano (founding editor)\"}]}},{\"264\":{\"ind1\":\"3\",\"ind2\":\"1\",\"subfields\":[{\"a\":\"Tempe, Arizona\"},{\"b\":\"ACMRS, Arizona Center for Medieval and Renaissance Studies\"},{\"c\":\"2016\"}]}},{\"300\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"69 Mikrofiches\"},{\"e\":\"1 Begleitbuch (XII, 167 Seiten)\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Text\"},{\"b\":\"txt\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Mikroform\"},{\"b\":\"h\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Mikrofiche\"},{\"b\":\"he\"},{\"2\":\"rdacarrier\"}]}},{\"490\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Medieval and Renaissance Texts and Studies\"},{\"v\":\"volume 497\"}]}},{\"500\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Titel und Angaben zu beteiligter Person vom Begleitheft\"}]}},{\"546\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"In English with segments in Anglo-Saxon and Latin\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Lucas, Peter J.\"},{\"e\":\"VerfasserIn von Zusatztexten\"},{\"4\":\"wat\"},{\"0\":\"(DE-601)699400066\"},{\"0\":\"(DE-588)188475893\"}]}},{\"710\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Arizona Center for Medieval and Renaissance Studies\"}]}},{\"773\":{\"ind1\":\"0\",\"ind2\":\"8\",\"subfields\":[{\"q\":\"25.2016\"},{\"w\":\"(DE-601)281985480\"}]}},{\"830\":{\"ind1\":\"\\\\\",\"ind2\":\"0\",\"subfields\":[{\"w\":\"(DE-601)13055846X\"},{\"v\":\"volume 497\"},{\"9\":\"49700\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"SBB-PK Berlin <1+1A>\"},{\"d\":\"!2! 1 F 5327-25\"},{\"x\":\"L\"},{\"z\":\"LC\"},{\"f\":\"Mikrofiches\"},{\"d\":\"!2! 1 F 5327-25,Beil.\"},{\"x\":\"L\"},{\"z\":\"LC\"},{\"f\":\"Begleitbuch\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"SBB-PK Berlin <1+1A>\"},{\"a\":\"11\"},{\"b\":\"1665961309\"},{\"c\":\"01\"},{\"f\":\"2\"},{\"d\":\"1 F 5327-25\"},{\"e\":\"d\"},{\"k\":\"Mikrofiches\"},{\"x\":\"0001\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"SBB-PK Berlin <1+1A>\"},{\"a\":\"11\"},{\"b\":\"1665961317\"},{\"c\":\"02\"},{\"f\":\"2\"},{\"d\":\"1 F 5327-25,Beil.\"},{\"e\":\"u\"},{\"k\":\"Begleitbuch\"},{\"x\":\"0001\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"8f462542-387c-4f06-a01b-50829c7c7b13\"},{\"i\":\"8be05cf5-fb4f-4752-8094-8e179d08fb99\"}]}}]}"
+}
diff --git a/mod-source-record-storage-server/src/test/resources/mock/rawRecords/8fb19e31-0920-49d7-9438-b573c292b1a6.json b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/8fb19e31-0920-49d7-9438-b573c292b1a6.json
new file mode 100644
index 000000000..5e1e9d10e
--- /dev/null
+++ b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/8fb19e31-0920-49d7-9438-b573c292b1a6.json
@@ -0,0 +1,4 @@
+{
+ "id": "8fb19e31-0920-49d7-9438-b573c292b1a6",
+ "content": "{\"leader\":\"01859nam a2200397 cb4500\",\"fields\":[{\"001\":\"inst000000000005\"},{\"003\":\"DE-601\"},{\"005\":\"20180416162657.0\"},{\"008\":\"180111s2018\\\\\\\\\\\\\\\\sz\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\eng\\\\d\"},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"3319643991\"},{\"9\":\"3-319-64399-1\"}]}},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"9783319643991\"},{\"9\":\"978-3-319-64399-1\"}]}},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"9783319644004 (electronic)\"},{\"9\":\"978-3-319-64400-4\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(OCoLC)ocn992783736\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(OCoLC)992783736\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV101073931X\"}]}},{\"035\":\"101073931X\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"eng\"}]}},{\"245\":{\"ind1\":\"0\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"Futures, biometrics and neuroscience research\"},{\"c\":\"Luiz Moutinho, Mladen Sokele, editors\"}]}},{\"264\":{\"ind1\":\"3\",\"ind2\":\"1\",\"subfields\":[{\"a\":\"Cham\"},{\"b\":\"Palgrave Macmillan\"},{\"c\":\"[2018]\"}]}},{\"300\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"xxix, 224 Seiten\"},{\"b\":\"Illustrationen\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Text\"},{\"b\":\"txt\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ohne Hilfsmittel zu benutzen\"},{\"b\":\"n\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Band\"},{\"b\":\"nc\"},{\"2\":\"rdacarrier\"}]}},{\"490\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Innovative research methodologies in management\"},{\"v\":\" / Luiz Moutinho, Mladen Sokele ; Volume 2\"}]}},{\"500\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Enthält 9 Beiträge\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"8\":\"1.1\\\\x\"},{\"a\":\"Betriebswirtschaftslehre\"},{\"0\":\"(DE-601)091351391\"},{\"0\":\"(DE-STW)12041-5\"},{\"2\":\"stw\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"8\":\"1.2\\\\x\"},{\"a\":\"Management\"},{\"0\":\"(DE-601)091376173\"},{\"0\":\"(DE-STW)12085-6\"},{\"2\":\"stw\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"8\":\"1.3\\\\x\"},{\"a\":\"Wissenschaftliche Methode\"},{\"0\":\"(DE-601)091401445\"},{\"0\":\"(DE-STW)16727-0\"},{\"2\":\"stw\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Moutinho, Luiz\"},{\"e\":\"HerausgeberIn\"},{\"4\":\"edt\"},{\"0\":\"(DE-601)509450954\"},{\"0\":\"(DE-588)131450204\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Sokele, Mladen\"},{\"e\":\"HerausgeberIn\"},{\"4\":\"edt\"}]}},{\"830\":{\"ind1\":\"\\\\\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"Innovative research methodologies in management\"},{\"b\":\" / Luiz Moutinho, Mladen Sokele\"},{\"v\":\"Volume 2\"},{\"9\":\"2.2018\"},{\"w\":\"(DE-601)1011380293\"}]}},{\"856\":{\"ind1\":\"4\",\"ind2\":\"2\",\"subfields\":[{\"y\":\"Inhaltsverzeichnis\"},{\"u\":\"http://www.gbv.de/dms/zbw/101073931X.pdf\"},{\"m\":\"V:DE-601;B:DE-206\"},{\"q\":\"application/pdf\"},{\"3\":\"Inhaltsverzeichnis\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"ZBW Kiel <206>\"},{\"d\":\"!H:! A18-1775\"},{\"x\":\"L\"},{\"z\":\"LC\"},{\"s\":\"206/1\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"ZBW Kiel <206>\"},{\"a\":\"26\"},{\"b\":\"1740761685\"},{\"c\":\"01\"},{\"f\":\"H:\"},{\"d\":\"A18-1775\"},{\"e\":\"u\"},{\"x\":\"206/1\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"8fb19e31-0920-49d7-9438-b573c292b1a6\"},{\"i\":\"1640f178-f243-4e4a-bf1c-9e1e62b3171d\"}]}}]}"
+}
diff --git a/mod-source-record-storage-server/src/test/resources/mock/rawRecords/be1b25ae-4a9d-4077-93e6-7f8e59efd609.json b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/be1b25ae-4a9d-4077-93e6-7f8e59efd609.json
new file mode 100644
index 000000000..7dd98cd89
--- /dev/null
+++ b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/be1b25ae-4a9d-4077-93e6-7f8e59efd609.json
@@ -0,0 +1,4 @@
+{
+ "id": "be1b25ae-4a9d-4077-93e6-7f8e59efd609",
+ "content": "{\"leader\":\"01463nja a2200313 c 4500\",\"fields\":[{\"001\":\"inst000000000007\"},{\"003\":\"DE-601\"},{\"005\":\"20180118183625.0\"},{\"007\":\"su\\\\uuuuuuuuuuu\"},{\"008\":\"180118s2017\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\ger\\\\d\"},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV1011162431\"}]}},{\"035\":\"1011162431\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ger\"}]}},{\"100\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Bach, Johann Sebastian\"},{\"e\":\"KomponistIn\"},{\"4\":\"cmp\"},{\"0\":\"(DE-601)134579348\"},{\"0\":\"(DE-588)11850553X\"}]}},{\"240\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"0\":\"(DE-601)701589477\"},{\"0\":\"(DE-588)300007736\"},{\"a\":\"Ich habe genung\"}]}},{\"245\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"Cantatas for bass\"},{\"n\":\"4\"},{\"p\":\"Ich habe genug : BWV 82 / Johann Sebastian Bach ; Matthias Goerne, baritone ; Freiburger Barockorchester, Gottfried von der Goltz, violin and conductor\"}]}},{\"246\":{\"ind1\":\"1\",\"ind2\":\"3\",\"subfields\":[{\"i\":\"Abweichender Titel\"},{\"a\":\"Ich habe genung\"}]}},{\"300\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Track 10-14\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"aufgeführte Musik\"},{\"b\":\"prm\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"audio\"},{\"b\":\"s\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Audiodisk\"},{\"b\":\"sd\"},{\"2\":\"rdacarrier\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Arfken, Katharina\"},{\"e\":\"InstrumentalmusikerIn\"},{\"4\":\"itr\"},{\"0\":\"(DE-601)576364940\"},{\"0\":\"(DE-588)135158265\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Goltz, Gottfried von der\"},{\"e\":\"DirigentIn\"},{\"4\":\"cnd\"},{\"0\":\"(DE-601)081724969\"},{\"0\":\"(DE-588)122080912\"}]}},{\"710\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Freiburger Barockorchester\"},{\"e\":\"InstrumentalmusikerIn\"},{\"4\":\"itr\"},{\"0\":\"(DE-601)12121060X\"},{\"0\":\"(DE-588)5066798-1\"}]}},{\"773\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"w\":\"(DE-601)895161729\"},{\"t\":\"Cantatas for bass, Bach, Johann Sebastian. - Arles : Harmonia Mundi\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"SBB-PK Berlin <1+1A>\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"SBB-PK Berlin <1+1A>\"},{\"a\":\"11\"},{\"b\":\"1742288871\"},{\"c\":\"01\"},{\"x\":\"0001\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"be1b25ae-4a9d-4077-93e6-7f8e59efd609\"},{\"i\":\"ce00bca2-9270-4c6b-b096-b83a2e56e8e9\"}]}}]}"
+}
diff --git a/mod-source-record-storage-server/src/test/resources/mock/rawRecords/d3cd3e1e-a18c-4f7c-b053-9aa50343394e.json b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/d3cd3e1e-a18c-4f7c-b053-9aa50343394e.json
new file mode 100644
index 000000000..8aa986864
--- /dev/null
+++ b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/d3cd3e1e-a18c-4f7c-b053-9aa50343394e.json
@@ -0,0 +1,4 @@
+{
+ "id": "d3cd3e1e-a18c-4f7c-b053-9aa50343394e",
+ "content": "01743nai a2200409 i 4500001000800000005001700008006001900025007001500044008004100059010001800100035002400118042000800142043001200150040003200162049000800194074002300202086002100225100004300246245011300289250004200402264006500444300002300509310002400532336002600556337002600582338003600608490005400644500014400698504004800842588009200890610007200982710007301054773012901127830002601256856004001282922001101322\u001E5962418\u001E20200210110404.0\u001Em o d f \u001Ecr mn|||||||||\u001E200124c20189999dcu x w obb f0 2eng c\u001E \u001Fa 2020230732\u001E \u001Fa(OCoLC)on1137385866\u001E \u001Fapcc\u001E \u001Fan-us---\u001E \u001FaGPO\u001Fbeng\u001Ferda\u001Fepn\u001FcGPO\u001FdMvI\u001E \u001FaWWW\u001E \u001Fa0807-A-07 (online)\u001E0 \u001FaLC 14.23:98-888/\u001E1 \u001FaDavis, Christopher M.,\u001Fd1966-\u001Feauthor.\u001E10\u001Fa\"Fast-track\" or expedited procedures :\u001Fbtheir purpose, elements, and implications /\u001FcChristopher M. Davis.\u001E \u001Fa[Library of Congress public edition].\u001E 1\u001Fa[Washington, D.C.] :\u001FbCongressional Research Service,\u001Fc2018-\u001E \u001Fa1 online resource.\u001E \u001FaUpdated irregularly\u001E \u001Fatext\u001Fbtxt\u001F2rdacontent\u001E \u001Facomputer\u001Fbc\u001F2rdamedia\u001E \u001Faonline resource\u001Fbcr\u001F2rdacarrier\u001E1 \u001FaReport / Congressional Research Service ;\u001Fv98-888\u001E \u001FaThe CRS report home page provides access to all versions published since 2018 in accordance with P.L. 115-141; earliest version dated 2003.\u001E \u001FaReport includes bibliographical references.\u001E \u001FaDescription based on contents viewed on Jan. 21, 2020; title from CRS report home page.\u001E10\u001FaUnited States.\u001FbCongress\u001FxRules and practice\u001FxFast-track procedure.\u001E2 \u001FaLibrary of Congress.\u001FbCongressional Research Service,\u001Feissuing body.\u001E08\u001FiContained in (work):\u001FtCRS reports (Library of Congress. Congressional Research Service)\u001Fw(DLC) 2018231131\u001Fw(OCoLC)1052784408\u001E 0\u001FaCRS report ;\u001Fv98-888.\u001E40\u001Fuhttps://purl.fdlp.gov/GPO/gpo130975\u001E \u001FaCRSREP\u001E\u001D"
+}
diff --git a/mod-source-record-storage-server/src/test/resources/mock/rawRecords/db70de02-9205-4e05-8333-5848163b82b5.json b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/db70de02-9205-4e05-8333-5848163b82b5.json
new file mode 100644
index 000000000..32614d665
--- /dev/null
+++ b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/db70de02-9205-4e05-8333-5848163b82b5.json
@@ -0,0 +1,4 @@
+{
+ "id": "db70de02-9205-4e05-8333-5848163b82b5",
+ "content": "{\"leader\":\"01463nja a2200313 c 4500\",\"fields\":[{\"001\":\"inst000000000007\"},{\"003\":\"DE-601\"},{\"005\":\"20180118183625.0\"},{\"007\":\"su\\\\uuuuuuuuuuu\"},{\"008\":\"180118s2017\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\ger\\\\d\"},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV1011162431\"}]}},{\"035\":\"1011162431\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ger\"}]}},{\"100\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Bach, Johann Sebastian\"},{\"e\":\"KomponistIn\"},{\"4\":\"cmp\"},{\"0\":\"(DE-601)134579348\"},{\"0\":\"(DE-588)11850553X\"}]}},{\"240\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"0\":\"(DE-601)701589477\"},{\"0\":\"(DE-588)300007736\"},{\"a\":\"Ich habe genung\"}]}},{\"245\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"Cantatas for bass\"},{\"n\":\"4\"},{\"p\":\"Ich habe genug : BWV 82 / Johann Sebastian Bach ; Matthias Goerne, baritone ; Freiburger Barockorchester, Gottfried von der Goltz, violin and conductor\"}]}},{\"246\":{\"ind1\":\"1\",\"ind2\":\"3\",\"subfields\":[{\"i\":\"Abweichender Titel\"},{\"a\":\"Ich habe genung\"}]}},{\"300\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Track 10-14\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"aufgeführte Musik\"},{\"b\":\"prm\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"audio\"},{\"b\":\"s\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Audiodisk\"},{\"b\":\"sd\"},{\"2\":\"rdacarrier\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Arfken, Katharina\"},{\"e\":\"InstrumentalmusikerIn\"},{\"4\":\"itr\"},{\"0\":\"(DE-601)576364940\"},{\"0\":\"(DE-588)135158265\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Goltz, Gottfried von der\"},{\"e\":\"DirigentIn\"},{\"4\":\"cnd\"},{\"0\":\"(DE-601)081724969\"},{\"0\":\"(DE-588)122080912\"}]}},{\"710\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Freiburger Barockorchester\"},{\"e\":\"InstrumentalmusikerIn\"},{\"4\":\"itr\"},{\"0\":\"(DE-601)12121060X\"},{\"0\":\"(DE-588)5066798-1\"}]}},{\"773\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"w\":\"(DE-601)895161729\"},{\"t\":\"Cantatas for bass, Bach, Johann Sebastian. - Arles : Harmonia Mundi\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"SBB-PK Berlin <1+1A>\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"SBB-PK Berlin <1+1A>\"},{\"a\":\"11\"},{\"b\":\"1742288871\"},{\"c\":\"01\"},{\"x\":\"0001\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"db70de02-9205-4e05-8333-5848163b82b5\"},{\"i\":\"ce00bca2-9270-4c6b-b096-b83a2e56e8e9\"}]}}]}"
+}
diff --git a/mod-source-record-storage-server/src/test/resources/mock/rawRecords/e4cfe577-4015-46d8-a54d-7c9b34796955.json b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/e4cfe577-4015-46d8-a54d-7c9b34796955.json
new file mode 100644
index 000000000..d506f608e
--- /dev/null
+++ b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/e4cfe577-4015-46d8-a54d-7c9b34796955.json
@@ -0,0 +1,4 @@
+{
+ "id": "e4cfe577-4015-46d8-a54d-7c9b34796955",
+ "content": "UNA:+.?*'UNB+UNOA:1+EDIASD:ZZZ+EDITRCK:ZZZ+030407:1204+2451840'UNH+00001+ORDERS:D:96A:UN'BGM+1::9+03134+9'DTM+2:0:805'DTM+137:200304071204:203'DTM+1:200304080400200304090400:719'RFF+AHI:RWE001'NAD+ZSH+TRCKRWE::9'NAD+ZZZ+TRCK::9'TDT+41G++70'LOC+7+:::TTFH'LIN+1'LOC+11'QTY+2:123456:JM1'DTM+2:200304080400200304080600:719'LOC+11'QTY+2:-123456:JM1'DTM+2:200304080600200304080800:719'LOC+11'QTY+2:0:JM1'DTM+2:200304080800200304081000:719'LOC+11'QTY+2:52301:JM1'DTM+2:200304081000200304081100:719'LOC+11'QTY+2:55324:JM1'DTM+2:200304081100200304081600:719'LOC+11'QTY+2:-12056:JM1'DTM+2:200304081600200304081900:719'LOC+11'QTY+2:0:JM1'DTM+2:200304081900200304082300:719'LOC+11'QTY+2:5587:JM1'DTM+2:200304082300200304090400:719'NAD+AA+TRCK::9'UNS+S'UNT+38+00001'UNZ+1+2451840'"
+}
diff --git a/mod-source-record-storage-server/src/test/resources/mock/rawRecords/e567b8e2-a45b-45f1-a85a-6b6312bdf4d8.json b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/e567b8e2-a45b-45f1-a85a-6b6312bdf4d8.json
new file mode 100644
index 000000000..0f5a10198
--- /dev/null
+++ b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/e567b8e2-a45b-45f1-a85a-6b6312bdf4d8.json
@@ -0,0 +1,4 @@
+{
+ "id": "e567b8e2-a45b-45f1-a85a-6b6312bdf4d8",
+ "content": "{\"leader\":\"02026naa a2200373 c 4500\",\"fields\":[{\"001\":\"inst000000000023\"},{\"003\":\"DE-601\"},{\"005\":\"20180301091013.0\"},{\"008\":\"180119s2018\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\eng\\\\d\"},{\"016\":{\"ind1\":\"7\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"243003-4\"},{\"2\":\"DE-600\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV1011184508\"}]}},{\"035\":\"1011184508\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"eng\"}]}},{\"100\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Da, Gaofeng\"},{\"e\":\"VerfasserIn\"},{\"4\":\"aut\"},{\"0\":\"(DE-601)1015051618\"},{\"0\":\"(DE-588)1153556588\"}]}},{\"245\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"On the signature of complex system\"},{\"b\":\"a decomposed approach\"},{\"c\":\"Gaofeng Da, Ping Shing Chan, Maochao Xu\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Text\"},{\"b\":\"txt\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ohne Hilfsmittel zu benutzen\"},{\"b\":\"n\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Band\"},{\"b\":\"nc\"},{\"2\":\"rdacarrier\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Chan, Ping Shing\"},{\"e\":\"VerfasserIn\"},{\"4\":\"aut\"},{\"0\":\"(DE-601)718654811\"},{\"0\":\"(DE-588)1023876256\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Xu, Maochao\"},{\"e\":\"VerfasserIn\"},{\"4\":\"aut\"},{\"0\":\"(DE-601)720932068\"},{\"0\":\"(DE-588)1025070240\"}]}},{\"773\":{\"ind1\":\"0\",\"ind2\":\"8\",\"subfields\":[{\"i\":\"Enthalten in\"},{\"t\":\"European journal of operational research : EJOR\"},{\"d\":\"Amsterdam : Elsevier\"},{\"g\":\"Vol. 265, No. 3 (2018), p. 1115-1123\"},{\"q\":\"265:3<1115-1123\"},{\"w\":\"(DE-601)129611131\"},{\"x\":\"0377-2217\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"ZBW Kiel <206>\"},{\"d\":\"!H:ls! Z 6556\"},{\"x\":\"L\"},{\"z\":\"LC\"},{\"s\":\"206/1\"},{\"c\":\"184.2008 -\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"UB Clausthal <104>\"},{\"d\":\"!104/13! CL 13\"},{\"x\":\"N\"},{\"z\":\"N\"},{\"g\":\"lfdcl13\"},{\"c\":\"2006 -\"},{\"f\":\"Genauen Bestand bitte im Institut erfragen\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"TIB/UB Hannover <89>\"},{\"d\":\"ZN 8146 Haus2\"},{\"x\":\"L\"},{\"z\":\"C\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"DNB <101>\"},{\"d\":\"!101a! ZB 93727\"},{\"x\":\"L\"},{\"z\":\"C\"},{\"s\":\"0101/001\"},{\"g\":\"!MZLS! 10-n\"},{\"c\":\"54.1992(1991),1u.3; 56.1992 -\"}]}},{\"951\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"bwl\"},{\"2\":\"26\"}]}},{\"951\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"11\"},{\"2\":\"30\"}]}},{\"952\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"d\":\"265\"},{\"j\":\"2018\"},{\"e\":\"3\"},{\"b\":\"16\"},{\"c\":\"3\"},{\"h\":\"1115-1123\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"ZBW Kiel <206>\"},{\"a\":\"26\"},{\"b\":\"892666773\"},{\"c\":\"04\"},{\"f\":\"H:ls\"},{\"d\":\"Z 6556\"},{\"e\":\"b\"},{\"g\":\"Bestand: 184.2008 -\"},{\"x\":\"206/1\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"UB Clausthal <104>\"},{\"a\":\"30\"},{\"b\":\"1174535881\"},{\"c\":\"02\"},{\"f\":\"104/13\"},{\"d\":\" CL 13\"},{\"e\":\"g\"},{\"d\":\"lfdcl13\"},{\"g\":\"Bestand: 2006 -\"},{\"k\":\"Genauen Bestand bitte im Institut erfragen\"},{\"x\":\"0104\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"TIB/UB Hannover <89>\"},{\"a\":\"70\"},{\"b\":\"13765989X\"},{\"c\":\"01\"},{\"d\":\"ZN 8146 Haus2\"},{\"e\":\"f\"},{\"g\":\"Bestand: 1.1977 - \"},{\"x\":\"0089\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"DNB <101>\"},{\"a\":\"267\"},{\"b\":\"9001929018\"},{\"c\":\"01\"},{\"f\":\"101a\"},{\"d\":\"ZB 93727\"},{\"e\":\"f\"},{\"f\":\"MZLS\"},{\"d\":\"10-n\"},{\"g\":\"Bestand: 54.1992(1991),1u.3; 56.1992 -\"},{\"x\":\"0101/001\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"e567b8e2-a45b-45f1-a85a-6b6312bdf4d8\"},{\"i\":\"54cc0262-76df-4cac-acca-b10e9bc5c79a\"}]}}]}"
+}
diff --git a/mod-source-record-storage-server/src/test/resources/mock/rawRecords/ec53a386-9616-428b-92a9-e1f07756ea1f.json b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/ec53a386-9616-428b-92a9-e1f07756ea1f.json
new file mode 100644
index 000000000..922bb997b
--- /dev/null
+++ b/mod-source-record-storage-server/src/test/resources/mock/rawRecords/ec53a386-9616-428b-92a9-e1f07756ea1f.json
@@ -0,0 +1,4 @@
+{
+ "id": "ec53a386-9616-428b-92a9-e1f07756ea1f",
+ "content": "{\"leader\":\"01227nam a2200277 ca4500\",\"fields\":[{\"001\":\"inst000000000010\"},{\"003\":\"DE-601\"},{\"005\":\"20180615212835.0\"},{\"007\":\"hu\\\\uuu\\\\\\\\\\\\uuuu\"},{\"008\":\"171211s1993\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\eng\\\\d\"},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV1008673218\"}]}},{\"035\":\"1008673218\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"eng\"}]}},{\"100\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Matwiejczyk-Montgomery, Marian Ann J.\"},{\"e\":\"VerfasserIn\"},{\"4\":\"aut\"}]}},{\"245\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"Concepts of fashion 1921 - 1987\"},{\"h\":\"microform\"},{\"b\":\"a study of garments worn by selected winners of the Miss America Pageant\"},{\"c\":\"Marian Ann J. Matwiejczyk-Montgomery\"}]}},{\"264\":{\"ind1\":\"3\",\"ind2\":\"1\",\"subfields\":[{\"a\":\"Ann Arbor, MI\"},{\"b\":\"University Microfims International\"},{\"c\":\"1993\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Text\"},{\"b\":\"txt\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Mikroform\"},{\"b\":\"h\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Mikrofiche\"},{\"b\":\"he\"},{\"2\":\"rdacarrier\"}]}},{\"502\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"Dissertation\"},{\"c\":\"New York University\"},{\"d\":\"1993\"}]}},{\"533\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Mikrofiche-Ausgabe\"}]}},{\"655\":{\"ind1\":\"0\",\"ind2\":\"7\",\"subfields\":[{\"0\":\"(DE-588)4113937-9\"},{\"a\":\"Hochschulschrift\"},{\"2\":\"gnd-content\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"PKB (Museen) Berlin\"},{\"d\":\"!KB LIPP RARA! R-MF-Lipp Mc 101 f kl\"},{\"x\":\"N\"},{\"z\":\"N\"},{\"s\":\"3181/011\"},{\"g\":\"R-MF-Lipp:Mc:101:f:kl:\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"PKB (Museen) Berlin\"},{\"a\":\"181\"},{\"b\":\"1729698859\"},{\"c\":\"10\"},{\"f\":\"KB LIPP RARA\"},{\"d\":\"R-MF-Lipp Mc 101 f kl\"},{\"e\":\"i\"},{\"d\":\"R-MF-Lipp:Mc:101:f:kl:\"},{\"x\":\"3181/011\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"ec53a386-9616-428b-92a9-e1f07756ea1f\"},{\"i\":\"5b1eb450-ff9f-412d-a9e7-887f6eaeb5b4\"}]}}]}"
+}
diff --git a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/0f0fe962-d502-4a4f-9e74-7732bec94ee8.json b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/0f0fe962-d502-4a4f-9e74-7732bec94ee8.json
index ba4d0cd4b..a44e8b86a 100644
--- a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/0f0fe962-d502-4a4f-9e74-7732bec94ee8.json
+++ b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/0f0fe962-d502-4a4f-9e74-7732bec94ee8.json
@@ -2,10 +2,6 @@
"recordId" : "0f0fe962-d502-4a4f-9e74-7732bec94ee8",
"snapshotId": "7f939c0b-618c-4eab-8276-a14e0bfe5728",
"recordType" : "MARC_BIB",
- "rawRecord" : {
- "id" : "0f0fe962-d502-4a4f-9e74-7732bec94ee8",
- "content" : "{\"leader\":\"02042nma a2200349 c 4500\",\"fields\":[{\"001\":\"inst000000000019\"},{\"003\":\"DE-601\"},{\"005\":\"20180122150003.0\"},{\"007\":\"cu\\\\uuu---uuuuu\"},{\"008\":\"180122s2017\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\eng\\\\d\"},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"9781450351430\"},{\"9\":\"978-1-4503-5143-0\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV1011273942\"}]}},{\"035\":\"1011273942\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"eng\"}]}},{\"110\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ACM Workshop on Millimeter Wave Networks and Sensing Systems\"},{\"n\":\"1.\"},{\"d\":\"2017\"},{\"c\":\"Snowbird, Utah\"},{\"e\":\"VerfasserIn\"},{\"4\":\"aut\"}]}},{\"245\":{\"ind1\":\"0\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"MobiCom'17\"},{\"n\":\"5\"},{\"p\":\"mmNets'17, October 16, 2017, Snowbird, UT, USA / general chairs: Haitham Hassanieh (University of Illinois at Urbana Champaign, USA), Xinyu Zhang (University of California San Diego, USA)\"}]}},{\"246\":{\"ind1\":\"3\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"1st First ACM Workshop Millimeter Wave Networks Sensing Systems\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Text\"},{\"b\":\"txt\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Computermedien\"},{\"b\":\"c\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Computerdisk\"},{\"b\":\"cd\"},{\"2\":\"rdacarrier\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Hassanieh, Haitham\"},{\"e\":\"VeranstalterIn\"},{\"4\":\"orm\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Zhang, Xinyu\"},{\"e\":\"VeranstalterIn\"},{\"4\":\"orm\"}]}},{\"710\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"MobiCom\"},{\"n\":\"23.\"},{\"d\":\"2017\"},{\"c\":\"Snowbird, Utah\"},{\"e\":\"VeranstalterIn\"},{\"4\":\"orm\"}]}},{\"710\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Association for Computing Machinery\"},{\"b\":\"Special Interest Group on Mobility of Systems Users, Data, and Computing\"},{\"e\":\"SponsorIn\"},{\"4\":\"spn\"},{\"0\":\"(DE-601)499677137\"},{\"0\":\"(DE-588)10113390-X\"}]}},{\"711\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ACM Workshop on Millimeter Wave Networks and Sensing Systems\"},{\"n\":\"1\"},{\"d\":\"2017.10.16\"},{\"c\":\"Snowbird, Utah\"}]}},{\"711\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"mmNets\"},{\"n\":\"1\"},{\"d\":\"2017.10.16\"},{\"c\":\"Snowbird, Utah\"}]}},{\"711\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Annual International Conference on Mobile Computing and Networking (ACM MobiCom)\"},{\"n\":\"23\"},{\"d\":\"2017.10.16-20\"},{\"c\":\"Snowbird, Utah\"}]}},{\"773\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"w\":\"(DE-601)1011270897\"},{\"t\":\"MobiCom'17, MobiCom'17, proceedings and co-located workshops of the 23rd Annual International Conference on Mobile Computing and Networking : October 16-20, 2017, Snowbird, UT, USA, MobiCom, Snowbird, Utah. - New York, NY : ACM, Association for Computing Machinery\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"TIB/UB Hannover <89>\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"TIB/UB Hannover <89>\"},{\"a\":\"70\"},{\"b\":\"1743063695\"},{\"c\":\"01\"},{\"x\":\"0089\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"0f0fe962-d502-4a4f-9e74-7732bec94ee8\"},{\"i\":\"6b4ae089-e1ee-431f-af83-e1133f8e3da0\"}]}}]}"
- },
"parsedRecord" : {
"id" : "0f0fe962-d502-4a4f-9e74-7732bec94ee8",
"content" : {
diff --git a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/3187432f-9434-40a8-8782-35a111a1491e.json b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/3187432f-9434-40a8-8782-35a111a1491e.json
index 353e398e5..038d92fc2 100644
--- a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/3187432f-9434-40a8-8782-35a111a1491e.json
+++ b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/3187432f-9434-40a8-8782-35a111a1491e.json
@@ -2,10 +2,6 @@
"recordId" : "3187432f-9434-40a8-8782-35a111a1491e",
"snapshotId": "ee561342-3098-47a8-ab6e-0f3eba120b04",
"recordType" : "MARC_HOLDING",
- "rawRecord" : {
- "id" : "3187432f-9434-40a8-8782-35a111a1491e",
- "content" : "{\"leader\":\"01463nja a2200313 c 4500\",\"fields\":[{\"001\":\"inst000000000007\"},{\"003\":\"DE-601\"},{\"005\":\"20180118183625.0\"},{\"007\":\"su\\\\uuuuuuuuuuu\"},{\"008\":\"180118s2017\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\ger\\\\d\"},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV1011162431\"}]}},{\"035\":\"1011162431\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ger\"}]}},{\"100\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Bach, Johann Sebastian\"},{\"e\":\"KomponistIn\"},{\"4\":\"cmp\"},{\"0\":\"(DE-601)134579348\"},{\"0\":\"(DE-588)11850553X\"}]}},{\"240\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"0\":\"(DE-601)701589477\"},{\"0\":\"(DE-588)300007736\"},{\"a\":\"Ich habe genung\"}]}},{\"245\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"Cantatas for bass\"},{\"n\":\"4\"},{\"p\":\"Ich habe genug : BWV 82 / Johann Sebastian Bach ; Matthias Goerne, baritone ; Freiburger Barockorchester, Gottfried von der Goltz, violin and conductor\"}]}},{\"246\":{\"ind1\":\"1\",\"ind2\":\"3\",\"subfields\":[{\"i\":\"Abweichender Titel\"},{\"a\":\"Ich habe genung\"}]}},{\"300\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Track 10-14\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"aufgeführte Musik\"},{\"b\":\"prm\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"audio\"},{\"b\":\"s\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Audiodisk\"},{\"b\":\"sd\"},{\"2\":\"rdacarrier\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Arfken, Katharina\"},{\"e\":\"InstrumentalmusikerIn\"},{\"4\":\"itr\"},{\"0\":\"(DE-601)576364940\"},{\"0\":\"(DE-588)135158265\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Goltz, Gottfried von der\"},{\"e\":\"DirigentIn\"},{\"4\":\"cnd\"},{\"0\":\"(DE-601)081724969\"},{\"0\":\"(DE-588)122080912\"}]}},{\"710\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Freiburger Barockorchester\"},{\"e\":\"InstrumentalmusikerIn\"},{\"4\":\"itr\"},{\"0\":\"(DE-601)12121060X\"},{\"0\":\"(DE-588)5066798-1\"}]}},{\"773\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"w\":\"(DE-601)895161729\"},{\"t\":\"Cantatas for bass, Bach, Johann Sebastian. - Arles : Harmonia Mundi\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"SBB-PK Berlin <1+1A>\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"SBB-PK Berlin <1+1A>\"},{\"a\":\"11\"},{\"b\":\"1742288871\"},{\"c\":\"01\"},{\"x\":\"0001\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"3187432f-9434-40a8-8782-35a111a1491e\"},{\"i\":\"ce00bca2-9270-4c6b-b096-b83a2e56e8e9\"}]}}]}"
- },
"parsedRecord" : {
"id" : "3187432f-9434-40a8-8782-35a111a1491e",
"content" : {
diff --git a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/4c0ff739-3f4d-4670-a693-84dd48e31c53.json b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/4c0ff739-3f4d-4670-a693-84dd48e31c53.json
index 7e490f71f..379730894 100644
--- a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/4c0ff739-3f4d-4670-a693-84dd48e31c53.json
+++ b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/4c0ff739-3f4d-4670-a693-84dd48e31c53.json
@@ -2,10 +2,6 @@
"recordId" : "4c0ff739-3f4d-4670-a693-84dd48e31c53",
"snapshotId": "7f939c0b-618c-4eab-8276-a14e0bfe5728",
"recordType" : "MARC_BIB",
- "rawRecord" : {
- "id" : "4c0ff739-3f4d-4670-a693-84dd48e31c53",
- "content" : "{\"leader\":\"02258ngm a2200433 c 4500\",\"fields\":[{\"001\":\"inst000000000018\"},{\"003\":\"DE-601\"},{\"005\":\"20160520072526.0\"},{\"007\":\"vu\\\\uvuuuu\"},{\"008\":\"110113s2010\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\ger\\\\d\"},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"9789279164316\"},{\"9\":\"978-92-79-16431-6\"}]}},{\"024\":{\"ind1\":\"7\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"10.2768/21035\"},{\"2\":\"doi\"}]}},{\"028\":{\"ind1\":\"5\",\"ind2\":\"2\",\"subfields\":[{\"a\":\"MI-32-10-386-57-Z\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV643935371\"}]}},{\"035\":\"643935371\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ger\"},{\"a\":\"eng\"},{\"a\":\"spa\"},{\"a\":\"fre\"},{\"a\":\"ita\"},{\"a\":\"dut\"},{\"a\":\"por\"}]}},{\"084\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"55.80\"},{\"9\":\"Verkehrswesen\"},{\"9\":\"Transportwesen: Allgemeines\"},{\"2\":\"bkl\"}]}},{\"245\":{\"ind1\":\"0\",\"ind2\":\"2\",\"subfields\":[{\"a\":\"A journey through Europe\"},{\"h\":\"Bildtontraeger\"},{\"b\":\"high-speed lines\"},{\"c\":\"European Commission, Directorate-General for Mobility and Transport\"}]}},{\"300\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"1 DVD-Video (14 Min.)\"},{\"b\":\"farb.\"},{\"c\":\"12 cm\"}]}},{\"610\":{\"ind1\":\"1\",\"ind2\":\"7\",\"subfields\":[{\"0\":\"(DE-601)132918269\"},{\"0\":\"(DE-588)5098525-5\"},{\"a\":\"Europäische Union\"},{\"2\":\"gnd\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"0\":\"(DE-601)104616431\"},{\"0\":\"(DE-588)4113934-3\"},{\"a\":\"Hochgeschwindigkeitszug\"},{\"2\":\"gnd\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"0\":\"(DE-601)106129678\"},{\"0\":\"(DE-588)4062953-3\"},{\"a\":\"Verkehrsnetz\"},{\"2\":\"gnd\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"8\":\"1.1\\\\x\"},{\"a\":\"Hochgeschwindigkeitsverkehr\"},{\"0\":\"(DE-601)091366011\"},{\"0\":\"(DE-STW)18089-3\"},{\"2\":\"stw\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"8\":\"1.2\\\\x\"},{\"a\":\"Hochgeschwindigkeitsverkehr\"},{\"0\":\"(DE-601)091366011\"},{\"0\":\"(DE-STW)18089-3\"},{\"2\":\"stw\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"8\":\"1.3\\\\x\"},{\"a\":\"Schienenverkehr\"},{\"0\":\"(DE-601)091388066\"},{\"0\":\"(DE-STW)13255-2\"},{\"2\":\"stw\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"8\":\"1.4\\\\x\"},{\"a\":\"EU-Verkehrspolitik\"},{\"0\":\"(DE-601)091358701\"},{\"0\":\"(DE-STW)18627-1\"},{\"2\":\"stw\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"8\":\"1.5\\\\x\"},{\"a\":\"EU-Staaten\"},{\"0\":\"(DE-601)091358639\"},{\"0\":\"(DE-STW)17983-5\"},{\"2\":\"stw\"}]}},{\"710\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Europäische Kommission\"},{\"b\":\"Generaldirektion Mobilität und Verkehr\"},{\"0\":\"(DE-601)667202439\"},{\"0\":\"(DE-588)16174192-7\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"ZBW Kiel <206>\"},{\"d\":\"!K:! DVD 49\"},{\"x\":\"L\"},{\"z\":\"LC\"},{\"s\":\"206\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"THULB Jena <27>\"},{\"d\":\"!Mag5! EDZ 0720 17\"},{\"x\":\"L\"},{\"z\":\"LC\"},{\"f\":\"Bestand Europäisches Dokumentationszentrum\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"TIB/UB Hannover <89>\"},{\"d\":\"!FBW MagNB! CD oek 6715/042\"},{\"x\":\"L\"},{\"z\":\"C\"},{\"s\":\"89/18\"},{\"g\":\"!FBW EU! CD oek 6715/042\"},{\"x\":\"L\"},{\"z\":\"C\"},{\"s\":\"89/18\"}]}},{\"951\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"eu\"},{\"2\":\"26\"}]}},{\"951\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"oek 6715\"},{\"2\":\"70\"}]}},{\"951\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"oek 6715 3ah\"},{\"2\":\"70\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"ZBW Kiel <206>\"},{\"a\":\"26\"},{\"b\":\"1231491841\"},{\"c\":\"01\"},{\"f\":\"K:\"},{\"d\":\"DVD 49\"},{\"e\":\"u\"},{\"x\":\"0206\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"THULB Jena <27>\"},{\"a\":\"31\"},{\"b\":\"1219949035\"},{\"c\":\"01\"},{\"f\":\"Mag5\"},{\"d\":\"EDZ 0720 17\"},{\"e\":\"d\"},{\"k\":\"Bestand Europäisches Dokumentationszentrum\"},{\"x\":\"0027\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"TIB/UB Hannover <89>\"},{\"a\":\"70\"},{\"b\":\"1219604739\"},{\"c\":\"70\"},{\"f\":\"FBW MagNB\"},{\"d\":\"CD oek 6715/042\"},{\"e\":\"s\"},{\"f\":\"FBW EU\"},{\"d\":\"CD oek 6715/042\"},{\"e\":\"s\"},{\"x\":\"89/18\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"4c0ff739-3f4d-4670-a693-84dd48e31c53\"},{\"i\":\"1b74ab75-9f41-4837-8662-a1d99118008d\"}]}}]}"
- },
"parsedRecord" : {
"id" : "4c0ff739-3f4d-4670-a693-84dd48e31c53",
"content" : {
diff --git a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/4ca9d8ac-9de5-432a-83ee-15832f09e868.json b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/4ca9d8ac-9de5-432a-83ee-15832f09e868.json
index dcc74e07d..be16cae67 100644
--- a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/4ca9d8ac-9de5-432a-83ee-15832f09e868.json
+++ b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/4ca9d8ac-9de5-432a-83ee-15832f09e868.json
@@ -2,10 +2,6 @@
"recordId": "4ca9d8ac-9de5-432a-83ee-15832f09e868",
"snapshotId": "dcd898af-03bb-4b12-b8a6-f6a02e86459b",
"recordType": "EDIFACT",
- "rawRecord": {
- "id": "4ca9d8ac-9de5-432a-83ee-15832f09e868",
- "content": "UNA:+.?*'UNB+UNOA:1+EDIASD:31B+EDITRCK:ZZ+030407:1204+2451840'UNH+00001+ORDERS:D:96A:UN'BGM+1::9+03134+9'DTM+2:0:805'DTM+137:200304071204:203'DTM+1:200304080400200304090400:719'RFF+AHI:RWE001'NAD+ZSH+TRCKRWE::9'NAD+ZZZ+TRCK::9'TDT+41G++70'LOC+7+:::TTFH'LIN+1'LOC+11'QTY+2:123456:JM1'DTM+2:200304080400200304080600:719'LOC+11'QTY+2:-123456:JM1'DTM+2:200304080600200304080800:719'LOC+11'QTY+2:0:JM1'DTM+2:200304080800200304081000:719'LOC+11'QTY+2:52301:JM1'DTM+2:200304081000200304081100:719'LOC+11'QTY+2:55324:JM1'DTM+2:200304081100200304081600:719'LOC+11'QTY+2:-12056:JM1'DTM+2:200304081600200304081900:719'LOC+11'QTY+2:0:JM1'DTM+2:200304081900200304082300:719'LOC+11'QTY+2:5587:JM1'DTM+2:200304082300200304090400:719'NAD+AA+TRCK::9'UNS+S'UNT+38+00001'UNZ+1+2451840'"
- },
"parsedRecord": {
"id": "4ca9d8ac-9de5-432a-83ee-15832f09e868",
"content": {
diff --git a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/7293f287-bb51-41f5-805d-00ff18a1f791.json b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/7293f287-bb51-41f5-805d-00ff18a1f791.json
index 956bb1f03..8a3dfbbc8 100644
--- a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/7293f287-bb51-41f5-805d-00ff18a1f791.json
+++ b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/7293f287-bb51-41f5-805d-00ff18a1f791.json
@@ -2,10 +2,6 @@
"recordId" : "7293f287-bb51-41f5-805d-00ff18a1f791",
"snapshotId": "d787a937-cc4b-49b3-85ef-35bcd643c689",
"recordType" : "MARC_BIB",
- "rawRecord" : {
- "id" : "7293f287-bb51-41f5-805d-00ff18a1f791",
- "content" : "{\"leader\":\"01980nam a2200361 ca4500\",\"fields\":[{\"001\":\"inst000000000009\"},{\"003\":\"DE-601\"},{\"005\":\"20180214103458.0\"},{\"008\":\"180111s2018\\\\\\\\\\\\\\\\xxu\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\eng\\\\d\"},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"9781473937703\"},{\"c\":\"set\"},{\"9\":\"978-1-4739-3770-3\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV1010770160\"}]}},{\"035\":\"1010770160\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"eng\"}]}},{\"044\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"xxu\"},{\"a\":\"xxk\"},{\"a\":\"ii\"}]}},{\"050\":{\"ind1\":\"\\\\\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"H\"}]}},{\"082\":{\"ind1\":\"0\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"300\"}]}},{\"084\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"74.72\"},{\"9\":\"Stadtplanung\"},{\"9\":\"kommunale Planung\"},{\"2\":\"bkl\"}]}},{\"084\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"74.12\"},{\"9\":\"Stadtgeographie\"},{\"9\":\"Siedlungsgeographie\"},{\"2\":\"bkl\"}]}},{\"245\":{\"ind1\":\"0\",\"ind2\":\"4\",\"subfields\":[{\"a\":\"The city\"},{\"b\":\"post-modernity\"},{\"c\":\"edited by Alan Latham\"}]}},{\"264\":{\"ind1\":\"3\",\"ind2\":\"1\",\"subfields\":[{\"a\":\"Los Angeles\"},{\"a\":\"London\"},{\"a\":\"New Delhi\"},{\"a\":\"Singapore\"},{\"a\":\"Washington DC\"},{\"a\":\"Melbourne\"},{\"b\":\"SAGE\"},{\"c\":\"2018\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Text\"},{\"b\":\"txt\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ohne Hilfsmittel zu benutzen\"},{\"b\":\"n\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Band\"},{\"b\":\"nc\"},{\"2\":\"rdacarrier\"}]}},{\"490\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"SAGE benchmarks in culture and society\"}]}},{\"520\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Cities are sites of great wealth and poverty, of hope and despair, of social and economic dynamism, as well as tradition and established power. Social scientists and humanities scholars have over the past three decades generated an impressive range of perspectives for making sense of the vast complexities of cities. These perspectives tell both of the economic, social and political dynamism cities generate, and point to possible lines of future development. The four volumes, The City: Post-Modernity, will focus more exclusively on the contemporary city, looking at the subject through the lenses of globalization and post-colonialism, amongst others\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"0\":\"(DE-601)106153919\"},{\"0\":\"(DE-588)4056723-0\"},{\"a\":\"Stadt\"},{\"2\":\"gnd\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"0\":\"(DE-601)104288515\"},{\"0\":\"(DE-588)4115604-3\"},{\"a\":\"Postmoderne\"},{\"2\":\"gnd\"}]}},{\"655\":{\"ind1\":\"0\",\"ind2\":\"7\",\"subfields\":[{\"0\":\"(DE-588)4143413-4\"},{\"a\":\"Aufsatzsammlung\"},{\"2\":\"gnd-content\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Latham, Alan\"},{\"e\":\"HerausgeberIn\"},{\"4\":\"edt\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"SUB+Uni Hamburg <18>\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"SUB+Uni Hamburg <18>\"},{\"a\":\"22\"},{\"b\":\"1740834291\"},{\"c\":\"01\"},{\"x\":\"0018\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"7293f287-bb51-41f5-805d-00ff18a1f791\"},{\"i\":\"c1d3be12-ecec-4fab-9237-baf728575185\"}]}}]}"
- },
"parsedRecord" : {
"id" : "7293f287-bb51-41f5-805d-00ff18a1f791",
"content" : {
diff --git a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/8452daf9-c130-4955-99ce-1c397a218900.json b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/8452daf9-c130-4955-99ce-1c397a218900.json
index 6c0121173..68d571c1b 100644
--- a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/8452daf9-c130-4955-99ce-1c397a218900.json
+++ b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/8452daf9-c130-4955-99ce-1c397a218900.json
@@ -2,10 +2,6 @@
"recordId" : "8452daf9-c130-4955-99ce-1c397a218900",
"snapshotId": "ee561342-3098-47a8-ab6e-0f3eba120b04",
"recordType" : "MARC_BIB",
- "rawRecord" : {
- "id" : "8452daf9-c130-4955-99ce-1c397a218900",
- "content" : "{\"leader\":\"01024nmm a2200277 ca4500\",\"fields\":[{\"001\":\"inst000000000008\"},{\"003\":\"DE-601\"},{\"005\":\"20160502164752.0\"},{\"007\":\"cu\\\\uuu---uuuuu\"},{\"008\":\"160502m20169999gw\\\\\\\\\\\\\\\\\\\\\\\\o\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\eng\\\\d\"},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV858092093\"}]}},{\"035\":\"858092093\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"eng\"}]}},{\"100\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Ris, Robert\"},{\"e\":\"VerfasserIn\"},{\"4\":\"aut\"},{\"0\":\"(DE-601)778649407\"},{\"0\":\"(DE-588)1047595397\"}]}},{\"245\":{\"ind1\":\"1\",\"ind2\":\"4\",\"subfields\":[{\"a\":\"The chess player’s mating guide\"},{\"h\":\"Computer Datei\"},{\"c\":\"Robert Ris\"}]}},{\"264\":{\"ind1\":\"3\",\"ind2\":\"1\",\"subfields\":[{\"a\":\"Hamburg\"},{\"b\":\"Chessbase GmbH\"},{\"c\":\"[2016]-\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Computerprogramm\"},{\"b\":\"cop\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Computermedien\"},{\"b\":\"c\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Computerdisk\"},{\"b\":\"cd\"},{\"2\":\"rdacarrier\"}]}},{\"490\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Fritztrainer\"},{\"a\":\"Tactics\"}]}},{\"655\":{\"ind1\":\"0\",\"ind2\":\"7\",\"subfields\":[{\"0\":\"(DE-588)4585131-1\"},{\"a\":\"DVD-ROM\"},{\"2\":\"gnd-carrier\"}]}},{\"710\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ChessBase GmbH\"},{\"g\":\"Hamburg\"},{\"e\":\"Verlag\"},{\"4\":\"pbl\"},{\"0\":\"(DE-601)269520015\"},{\"0\":\"(DE-588)2177108-X\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"SUB+Uni Hamburg <18>\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"SUB+Uni Hamburg <18>\"},{\"a\":\"22\"},{\"b\":\"1613697813\"},{\"c\":\"01\"},{\"x\":\"0018\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"8452daf9-c130-4955-99ce-1c397a218900\"},{\"i\":\"3c4ae3f3-b460-4a89-a2f9-78ce3145e4fc\"}]}}]}"
- },
"parsedRecord" : {
"id" : "8452daf9-c130-4955-99ce-1c397a218900",
"content" : {
diff --git a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/8f462542-387c-4f06-a01b-50829c7c7b13.json b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/8f462542-387c-4f06-a01b-50829c7c7b13.json
index 2dfe79788..b3494a3c3 100644
--- a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/8f462542-387c-4f06-a01b-50829c7c7b13.json
+++ b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/8f462542-387c-4f06-a01b-50829c7c7b13.json
@@ -2,10 +2,6 @@
"recordId" : "8f462542-387c-4f06-a01b-50829c7c7b13",
"snapshotId": "6681ef31-03fe-4abc-9596-23de06d575c5",
"recordType" : "MARC_BIB",
- "rawRecord" : {
- "id" : "8f462542-387c-4f06-a01b-50829c7c7b13",
- "content" : "{\"leader\":\"02046nam a2200421 cc4500\",\"fields\":[{\"001\":\"inst000000000004\"},{\"003\":\"DE-601\"},{\"005\":\"20180615233857.0\"},{\"007\":\"he\\\\amu000uuuu\"},{\"008\":\"170217s2016\\\\\\\\\\\\\\\\xxu\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\eng\\\\d\"},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"9780866989732\"},{\"9\":\"978-0-86698-973-2\"}]}},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"0866989730\"},{\"9\":\"0-86698-973-0\"}]}},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"9780866985529\"},{\"9\":\"978-0-86698-552-9\"}]}},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"0866985522\"},{\"9\":\"0-86698-552-2\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(OCoLC)962073864\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(OCoLC)ocn962073864\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(OCoLC)962073864\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV880391235\"}]}},{\"035\":\"880391235\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"eng\"},{\"a\":\"ang\"},{\"a\":\"lat\"}]}},{\"245\":{\"ind1\":\"0\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"Anglo-Saxon manuscripts in microfiche facsimile\"},{\"n\":\"Volume 25\"},{\"p\":\"Corpus Christi College, Cambridge II, MSS 12, 144, 162, 178, 188, 198, 265, 285, 322, 326, 449\"},{\"h\":\"microform\"},{\"c\":\"A. N. Doane (editor and director), Matthew T. Hussey (associate editor), Phillip Pulsiano (founding editor)\"}]}},{\"264\":{\"ind1\":\"3\",\"ind2\":\"1\",\"subfields\":[{\"a\":\"Tempe, Arizona\"},{\"b\":\"ACMRS, Arizona Center for Medieval and Renaissance Studies\"},{\"c\":\"2016\"}]}},{\"300\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"69 Mikrofiches\"},{\"e\":\"1 Begleitbuch (XII, 167 Seiten)\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Text\"},{\"b\":\"txt\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Mikroform\"},{\"b\":\"h\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Mikrofiche\"},{\"b\":\"he\"},{\"2\":\"rdacarrier\"}]}},{\"490\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Medieval and Renaissance Texts and Studies\"},{\"v\":\"volume 497\"}]}},{\"500\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Titel und Angaben zu beteiligter Person vom Begleitheft\"}]}},{\"546\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"In English with segments in Anglo-Saxon and Latin\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Lucas, Peter J.\"},{\"e\":\"VerfasserIn von Zusatztexten\"},{\"4\":\"wat\"},{\"0\":\"(DE-601)699400066\"},{\"0\":\"(DE-588)188475893\"}]}},{\"710\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Arizona Center for Medieval and Renaissance Studies\"}]}},{\"773\":{\"ind1\":\"0\",\"ind2\":\"8\",\"subfields\":[{\"q\":\"25.2016\"},{\"w\":\"(DE-601)281985480\"}]}},{\"830\":{\"ind1\":\"\\\\\",\"ind2\":\"0\",\"subfields\":[{\"w\":\"(DE-601)13055846X\"},{\"v\":\"volume 497\"},{\"9\":\"49700\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"SBB-PK Berlin <1+1A>\"},{\"d\":\"!2! 1 F 5327-25\"},{\"x\":\"L\"},{\"z\":\"LC\"},{\"f\":\"Mikrofiches\"},{\"d\":\"!2! 1 F 5327-25,Beil.\"},{\"x\":\"L\"},{\"z\":\"LC\"},{\"f\":\"Begleitbuch\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"SBB-PK Berlin <1+1A>\"},{\"a\":\"11\"},{\"b\":\"1665961309\"},{\"c\":\"01\"},{\"f\":\"2\"},{\"d\":\"1 F 5327-25\"},{\"e\":\"d\"},{\"k\":\"Mikrofiches\"},{\"x\":\"0001\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"SBB-PK Berlin <1+1A>\"},{\"a\":\"11\"},{\"b\":\"1665961317\"},{\"c\":\"02\"},{\"f\":\"2\"},{\"d\":\"1 F 5327-25,Beil.\"},{\"e\":\"u\"},{\"k\":\"Begleitbuch\"},{\"x\":\"0001\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"8f462542-387c-4f06-a01b-50829c7c7b13\"},{\"i\":\"8be05cf5-fb4f-4752-8094-8e179d08fb99\"}]}}]}"
- },
"parsedRecord" : {
"id" : "8f462542-387c-4f06-a01b-50829c7c7b13",
"content" : {
diff --git a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/8fb19e31-0920-49d7-9438-b573c292b1a6.json b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/8fb19e31-0920-49d7-9438-b573c292b1a6.json
index bd9c7289e..23c2d2cb1 100644
--- a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/8fb19e31-0920-49d7-9438-b573c292b1a6.json
+++ b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/8fb19e31-0920-49d7-9438-b573c292b1a6.json
@@ -2,10 +2,6 @@
"recordId" : "8fb19e31-0920-49d7-9438-b573c292b1a6",
"snapshotId": "6681ef31-03fe-4abc-9596-23de06d575c5",
"recordType" : "MARC_BIB",
- "rawRecord" : {
- "id" : "8fb19e31-0920-49d7-9438-b573c292b1a6",
- "content" : "{\"leader\":\"01859nam a2200397 cb4500\",\"fields\":[{\"001\":\"inst000000000005\"},{\"003\":\"DE-601\"},{\"005\":\"20180416162657.0\"},{\"008\":\"180111s2018\\\\\\\\\\\\\\\\sz\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\eng\\\\d\"},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"3319643991\"},{\"9\":\"3-319-64399-1\"}]}},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"9783319643991\"},{\"9\":\"978-3-319-64399-1\"}]}},{\"020\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"9783319644004 (electronic)\"},{\"9\":\"978-3-319-64400-4\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(OCoLC)ocn992783736\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(OCoLC)992783736\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV101073931X\"}]}},{\"035\":\"101073931X\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"eng\"}]}},{\"245\":{\"ind1\":\"0\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"Futures, biometrics and neuroscience research\"},{\"c\":\"Luiz Moutinho, Mladen Sokele, editors\"}]}},{\"264\":{\"ind1\":\"3\",\"ind2\":\"1\",\"subfields\":[{\"a\":\"Cham\"},{\"b\":\"Palgrave Macmillan\"},{\"c\":\"[2018]\"}]}},{\"300\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"xxix, 224 Seiten\"},{\"b\":\"Illustrationen\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Text\"},{\"b\":\"txt\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ohne Hilfsmittel zu benutzen\"},{\"b\":\"n\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Band\"},{\"b\":\"nc\"},{\"2\":\"rdacarrier\"}]}},{\"490\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Innovative research methodologies in management\"},{\"v\":\" / Luiz Moutinho, Mladen Sokele ; Volume 2\"}]}},{\"500\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Enthält 9 Beiträge\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"8\":\"1.1\\\\x\"},{\"a\":\"Betriebswirtschaftslehre\"},{\"0\":\"(DE-601)091351391\"},{\"0\":\"(DE-STW)12041-5\"},{\"2\":\"stw\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"8\":\"1.2\\\\x\"},{\"a\":\"Management\"},{\"0\":\"(DE-601)091376173\"},{\"0\":\"(DE-STW)12085-6\"},{\"2\":\"stw\"}]}},{\"650\":{\"ind1\":\"\\\\\",\"ind2\":\"7\",\"subfields\":[{\"8\":\"1.3\\\\x\"},{\"a\":\"Wissenschaftliche Methode\"},{\"0\":\"(DE-601)091401445\"},{\"0\":\"(DE-STW)16727-0\"},{\"2\":\"stw\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Moutinho, Luiz\"},{\"e\":\"HerausgeberIn\"},{\"4\":\"edt\"},{\"0\":\"(DE-601)509450954\"},{\"0\":\"(DE-588)131450204\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Sokele, Mladen\"},{\"e\":\"HerausgeberIn\"},{\"4\":\"edt\"}]}},{\"830\":{\"ind1\":\"\\\\\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"Innovative research methodologies in management\"},{\"b\":\" / Luiz Moutinho, Mladen Sokele\"},{\"v\":\"Volume 2\"},{\"9\":\"2.2018\"},{\"w\":\"(DE-601)1011380293\"}]}},{\"856\":{\"ind1\":\"4\",\"ind2\":\"2\",\"subfields\":[{\"y\":\"Inhaltsverzeichnis\"},{\"u\":\"http://www.gbv.de/dms/zbw/101073931X.pdf\"},{\"m\":\"V:DE-601;B:DE-206\"},{\"q\":\"application/pdf\"},{\"3\":\"Inhaltsverzeichnis\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"ZBW Kiel <206>\"},{\"d\":\"!H:! A18-1775\"},{\"x\":\"L\"},{\"z\":\"LC\"},{\"s\":\"206/1\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"ZBW Kiel <206>\"},{\"a\":\"26\"},{\"b\":\"1740761685\"},{\"c\":\"01\"},{\"f\":\"H:\"},{\"d\":\"A18-1775\"},{\"e\":\"u\"},{\"x\":\"206/1\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"8fb19e31-0920-49d7-9438-b573c292b1a6\"},{\"i\":\"1640f178-f243-4e4a-bf1c-9e1e62b3171d\"}]}}]}"
- },
"parsedRecord" : {
"id" : "8fb19e31-0920-49d7-9438-b573c292b1a6",
"content" : {
diff --git a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/be1b25ae-4a9d-4077-93e6-7f8e59efd609.json b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/be1b25ae-4a9d-4077-93e6-7f8e59efd609.json
index d7ffce165..7ab5390bb 100644
--- a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/be1b25ae-4a9d-4077-93e6-7f8e59efd609.json
+++ b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/be1b25ae-4a9d-4077-93e6-7f8e59efd609.json
@@ -2,10 +2,6 @@
"recordId" : "be1b25ae-4a9d-4077-93e6-7f8e59efd609",
"snapshotId": "ee561342-3098-47a8-ab6e-0f3eba120b04",
"recordType" : "MARC_BIB",
- "rawRecord" : {
- "id" : "be1b25ae-4a9d-4077-93e6-7f8e59efd609",
- "content" : "{\"leader\":\"01463nja a2200313 c 4500\",\"fields\":[{\"001\":\"inst000000000007\"},{\"003\":\"DE-601\"},{\"005\":\"20180118183625.0\"},{\"007\":\"su\\\\uuuuuuuuuuu\"},{\"008\":\"180118s2017\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\ger\\\\d\"},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV1011162431\"}]}},{\"035\":\"1011162431\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ger\"}]}},{\"100\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Bach, Johann Sebastian\"},{\"e\":\"KomponistIn\"},{\"4\":\"cmp\"},{\"0\":\"(DE-601)134579348\"},{\"0\":\"(DE-588)11850553X\"}]}},{\"240\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"0\":\"(DE-601)701589477\"},{\"0\":\"(DE-588)300007736\"},{\"a\":\"Ich habe genung\"}]}},{\"245\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"Cantatas for bass\"},{\"n\":\"4\"},{\"p\":\"Ich habe genug : BWV 82 / Johann Sebastian Bach ; Matthias Goerne, baritone ; Freiburger Barockorchester, Gottfried von der Goltz, violin and conductor\"}]}},{\"246\":{\"ind1\":\"1\",\"ind2\":\"3\",\"subfields\":[{\"i\":\"Abweichender Titel\"},{\"a\":\"Ich habe genung\"}]}},{\"300\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Track 10-14\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"aufgeführte Musik\"},{\"b\":\"prm\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"audio\"},{\"b\":\"s\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Audiodisk\"},{\"b\":\"sd\"},{\"2\":\"rdacarrier\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Arfken, Katharina\"},{\"e\":\"InstrumentalmusikerIn\"},{\"4\":\"itr\"},{\"0\":\"(DE-601)576364940\"},{\"0\":\"(DE-588)135158265\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Goltz, Gottfried von der\"},{\"e\":\"DirigentIn\"},{\"4\":\"cnd\"},{\"0\":\"(DE-601)081724969\"},{\"0\":\"(DE-588)122080912\"}]}},{\"710\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Freiburger Barockorchester\"},{\"e\":\"InstrumentalmusikerIn\"},{\"4\":\"itr\"},{\"0\":\"(DE-601)12121060X\"},{\"0\":\"(DE-588)5066798-1\"}]}},{\"773\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"w\":\"(DE-601)895161729\"},{\"t\":\"Cantatas for bass, Bach, Johann Sebastian. - Arles : Harmonia Mundi\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"SBB-PK Berlin <1+1A>\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"SBB-PK Berlin <1+1A>\"},{\"a\":\"11\"},{\"b\":\"1742288871\"},{\"c\":\"01\"},{\"x\":\"0001\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"be1b25ae-4a9d-4077-93e6-7f8e59efd609\"},{\"i\":\"ce00bca2-9270-4c6b-b096-b83a2e56e8e9\"}]}}]}"
- },
"parsedRecord" : {
"id" : "be1b25ae-4a9d-4077-93e6-7f8e59efd609",
"content" : {
diff --git a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/d3cd3e1e-a18c-4f7c-b053-9aa50343394e.json b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/d3cd3e1e-a18c-4f7c-b053-9aa50343394e.json
index b66f5408c..13977df28 100644
--- a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/d3cd3e1e-a18c-4f7c-b053-9aa50343394e.json
+++ b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/d3cd3e1e-a18c-4f7c-b053-9aa50343394e.json
@@ -2,10 +2,6 @@
"recordId" : "d3cd3e1e-a18c-4f7c-b053-9aa50343394e",
"snapshotId": "ee561342-3098-47a8-ab6e-0f3eba120b04",
"recordType" : "MARC_BIB",
- "rawRecord" : {
- "id" : "d3cd3e1e-a18c-4f7c-b053-9aa50343394e",
- "content" : "01743nai a2200409 i 4500001000800000005001700008006001900025007001500044008004100059010001800100035002400118042000800142043001200150040003200162049000800194074002300202086002100225100004300246245011300289250004200402264006500444300002300509310002400532336002600556337002600582338003600608490005400644500014400698504004800842588009200890610007200982710007301054773012901127830002601256856004001282922001101322\u001E5962418\u001E20200210110404.0\u001Em o d f \u001Ecr mn|||||||||\u001E200124c20189999dcu x w obb f0 2eng c\u001E \u001Fa 2020230732\u001E \u001Fa(OCoLC)on1137385866\u001E \u001Fapcc\u001E \u001Fan-us---\u001E \u001FaGPO\u001Fbeng\u001Ferda\u001Fepn\u001FcGPO\u001FdMvI\u001E \u001FaWWW\u001E \u001Fa0807-A-07 (online)\u001E0 \u001FaLC 14.23:98-888/\u001E1 \u001FaDavis, Christopher M.,\u001Fd1966-\u001Feauthor.\u001E10\u001Fa\"Fast-track\" or expedited procedures :\u001Fbtheir purpose, elements, and implications /\u001FcChristopher M. Davis.\u001E \u001Fa[Library of Congress public edition].\u001E 1\u001Fa[Washington, D.C.] :\u001FbCongressional Research Service,\u001Fc2018-\u001E \u001Fa1 online resource.\u001E \u001FaUpdated irregularly\u001E \u001Fatext\u001Fbtxt\u001F2rdacontent\u001E \u001Facomputer\u001Fbc\u001F2rdamedia\u001E \u001Faonline resource\u001Fbcr\u001F2rdacarrier\u001E1 \u001FaReport / Congressional Research Service ;\u001Fv98-888\u001E \u001FaThe CRS report home page provides access to all versions published since 2018 in accordance with P.L. 115-141; earliest version dated 2003.\u001E \u001FaReport includes bibliographical references.\u001E \u001FaDescription based on contents viewed on Jan. 21, 2020; title from CRS report home page.\u001E10\u001FaUnited States.\u001FbCongress\u001FxRules and practice\u001FxFast-track procedure.\u001E2 \u001FaLibrary of Congress.\u001FbCongressional Research Service,\u001Feissuing body.\u001E08\u001FiContained in (work):\u001FtCRS reports (Library of Congress. Congressional Research Service)\u001Fw(DLC) 2018231131\u001Fw(OCoLC)1052784408\u001E 0\u001FaCRS report ;\u001Fv98-888.\u001E40\u001Fuhttps://purl.fdlp.gov/GPO/gpo130975\u001E \u001FaCRSREP\u001E\u001D"
- },
"parsedRecord" : {
"id" : "d3cd3e1e-a18c-4f7c-b053-9aa50343394e",
"content" : {
diff --git a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/db70de02-9205-4e05-8333-5848163b82b5.json b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/db70de02-9205-4e05-8333-5848163b82b5.json
index efe6a43f7..562ff1c19 100644
--- a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/db70de02-9205-4e05-8333-5848163b82b5.json
+++ b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/db70de02-9205-4e05-8333-5848163b82b5.json
@@ -2,10 +2,6 @@
"recordId" : "db70de02-9205-4e05-8333-5848163b82b5",
"snapshotId": "ee561342-3098-47a8-ab6e-0f3eba120b04",
"recordType" : "MARC_AUTHORITY",
- "rawRecord" : {
- "id" : "db70de02-9205-4e05-8333-5848163b82b5",
- "content" : "{\"leader\":\"01463nja a2200313 c 4500\",\"fields\":[{\"001\":\"inst000000000007\"},{\"003\":\"DE-601\"},{\"005\":\"20180118183625.0\"},{\"007\":\"su\\\\uuuuuuuuuuu\"},{\"008\":\"180118s2017\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\ger\\\\d\"},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV1011162431\"}]}},{\"035\":\"1011162431\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ger\"}]}},{\"100\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Bach, Johann Sebastian\"},{\"e\":\"KomponistIn\"},{\"4\":\"cmp\"},{\"0\":\"(DE-601)134579348\"},{\"0\":\"(DE-588)11850553X\"}]}},{\"240\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"0\":\"(DE-601)701589477\"},{\"0\":\"(DE-588)300007736\"},{\"a\":\"Ich habe genung\"}]}},{\"245\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"Cantatas for bass\"},{\"n\":\"4\"},{\"p\":\"Ich habe genug : BWV 82 / Johann Sebastian Bach ; Matthias Goerne, baritone ; Freiburger Barockorchester, Gottfried von der Goltz, violin and conductor\"}]}},{\"246\":{\"ind1\":\"1\",\"ind2\":\"3\",\"subfields\":[{\"i\":\"Abweichender Titel\"},{\"a\":\"Ich habe genung\"}]}},{\"300\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Track 10-14\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"aufgeführte Musik\"},{\"b\":\"prm\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"audio\"},{\"b\":\"s\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Audiodisk\"},{\"b\":\"sd\"},{\"2\":\"rdacarrier\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Arfken, Katharina\"},{\"e\":\"InstrumentalmusikerIn\"},{\"4\":\"itr\"},{\"0\":\"(DE-601)576364940\"},{\"0\":\"(DE-588)135158265\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Goltz, Gottfried von der\"},{\"e\":\"DirigentIn\"},{\"4\":\"cnd\"},{\"0\":\"(DE-601)081724969\"},{\"0\":\"(DE-588)122080912\"}]}},{\"710\":{\"ind1\":\"2\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Freiburger Barockorchester\"},{\"e\":\"InstrumentalmusikerIn\"},{\"4\":\"itr\"},{\"0\":\"(DE-601)12121060X\"},{\"0\":\"(DE-588)5066798-1\"}]}},{\"773\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"w\":\"(DE-601)895161729\"},{\"t\":\"Cantatas for bass, Bach, Johann Sebastian. - Arles : Harmonia Mundi\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"SBB-PK Berlin <1+1A>\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"SBB-PK Berlin <1+1A>\"},{\"a\":\"11\"},{\"b\":\"1742288871\"},{\"c\":\"01\"},{\"x\":\"0001\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"db70de02-9205-4e05-8333-5848163b82b5\"},{\"i\":\"ce00bca2-9270-4c6b-b096-b83a2e56e8e9\"}]}}]}"
- },
"parsedRecord" : {
"id" : "db70de02-9205-4e05-8333-5848163b82b5",
"content" : {
diff --git a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/e4cfe577-4015-46d8-a54d-7c9b34796955.json b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/e4cfe577-4015-46d8-a54d-7c9b34796955.json
index f0bf3ac80..2fd6907ac 100644
--- a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/e4cfe577-4015-46d8-a54d-7c9b34796955.json
+++ b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/e4cfe577-4015-46d8-a54d-7c9b34796955.json
@@ -2,10 +2,6 @@
"recordId": "e4cfe577-4015-46d8-a54d-7c9b34796955",
"snapshotId": "dcd898af-03bb-4b12-b8a6-f6a02e86459b",
"recordType": "EDIFACT",
- "rawRecord": {
- "id": "e4cfe577-4015-46d8-a54d-7c9b34796955",
- "content": "UNA:+.?*'UNB+UNOA:1+EDIASD:ZZZ+EDITRCK:ZZZ+030407:1204+2451840'UNH+00001+ORDERS:D:96A:UN'BGM+1::9+03134+9'DTM+2:0:805'DTM+137:200304071204:203'DTM+1:200304080400200304090400:719'RFF+AHI:RWE001'NAD+ZSH+TRCKRWE::9'NAD+ZZZ+TRCK::9'TDT+41G++70'LOC+7+:::TTFH'LIN+1'LOC+11'QTY+2:123456:JM1'DTM+2:200304080400200304080600:719'LOC+11'QTY+2:-123456:JM1'DTM+2:200304080600200304080800:719'LOC+11'QTY+2:0:JM1'DTM+2:200304080800200304081000:719'LOC+11'QTY+2:52301:JM1'DTM+2:200304081000200304081100:719'LOC+11'QTY+2:55324:JM1'DTM+2:200304081100200304081600:719'LOC+11'QTY+2:-12056:JM1'DTM+2:200304081600200304081900:719'LOC+11'QTY+2:0:JM1'DTM+2:200304081900200304082300:719'LOC+11'QTY+2:5587:JM1'DTM+2:200304082300200304090400:719'NAD+AA+TRCK::9'UNS+S'UNT+38+00001'UNZ+1+2451840'"
- },
"parsedRecord": {
"id": "e4cfe577-4015-46d8-a54d-7c9b34796955",
"content": {
diff --git a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/e567b8e2-a45b-45f1-a85a-6b6312bdf4d8.json b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/e567b8e2-a45b-45f1-a85a-6b6312bdf4d8.json
index 369c3745f..2036d950c 100644
--- a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/e567b8e2-a45b-45f1-a85a-6b6312bdf4d8.json
+++ b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/e567b8e2-a45b-45f1-a85a-6b6312bdf4d8.json
@@ -2,10 +2,6 @@
"recordId" : "e567b8e2-a45b-45f1-a85a-6b6312bdf4d8",
"snapshotId": "f2b11593-d6e0-4f78-b4c5-ca3c7eb4c727",
"recordType" : "MARC_BIB",
- "rawRecord" : {
- "id" : "e567b8e2-a45b-45f1-a85a-6b6312bdf4d8",
- "content" : "{\"leader\":\"02026naa a2200373 c 4500\",\"fields\":[{\"001\":\"inst000000000023\"},{\"003\":\"DE-601\"},{\"005\":\"20180301091013.0\"},{\"008\":\"180119s2018\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\eng\\\\d\"},{\"016\":{\"ind1\":\"7\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"243003-4\"},{\"2\":\"DE-600\"}]}},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV1011184508\"}]}},{\"035\":\"1011184508\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"eng\"}]}},{\"100\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Da, Gaofeng\"},{\"e\":\"VerfasserIn\"},{\"4\":\"aut\"},{\"0\":\"(DE-601)1015051618\"},{\"0\":\"(DE-588)1153556588\"}]}},{\"245\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"On the signature of complex system\"},{\"b\":\"a decomposed approach\"},{\"c\":\"Gaofeng Da, Ping Shing Chan, Maochao Xu\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Text\"},{\"b\":\"txt\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"ohne Hilfsmittel zu benutzen\"},{\"b\":\"n\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Band\"},{\"b\":\"nc\"},{\"2\":\"rdacarrier\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Chan, Ping Shing\"},{\"e\":\"VerfasserIn\"},{\"4\":\"aut\"},{\"0\":\"(DE-601)718654811\"},{\"0\":\"(DE-588)1023876256\"}]}},{\"700\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Xu, Maochao\"},{\"e\":\"VerfasserIn\"},{\"4\":\"aut\"},{\"0\":\"(DE-601)720932068\"},{\"0\":\"(DE-588)1025070240\"}]}},{\"773\":{\"ind1\":\"0\",\"ind2\":\"8\",\"subfields\":[{\"i\":\"Enthalten in\"},{\"t\":\"European journal of operational research : EJOR\"},{\"d\":\"Amsterdam : Elsevier\"},{\"g\":\"Vol. 265, No. 3 (2018), p. 1115-1123\"},{\"q\":\"265:3<1115-1123\"},{\"w\":\"(DE-601)129611131\"},{\"x\":\"0377-2217\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"ZBW Kiel <206>\"},{\"d\":\"!H:ls! Z 6556\"},{\"x\":\"L\"},{\"z\":\"LC\"},{\"s\":\"206/1\"},{\"c\":\"184.2008 -\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"UB Clausthal <104>\"},{\"d\":\"!104/13! CL 13\"},{\"x\":\"N\"},{\"z\":\"N\"},{\"g\":\"lfdcl13\"},{\"c\":\"2006 -\"},{\"f\":\"Genauen Bestand bitte im Institut erfragen\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"TIB/UB Hannover <89>\"},{\"d\":\"ZN 8146 Haus2\"},{\"x\":\"L\"},{\"z\":\"C\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"DNB <101>\"},{\"d\":\"!101a! ZB 93727\"},{\"x\":\"L\"},{\"z\":\"C\"},{\"s\":\"0101/001\"},{\"g\":\"!MZLS! 10-n\"},{\"c\":\"54.1992(1991),1u.3; 56.1992 -\"}]}},{\"951\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"bwl\"},{\"2\":\"26\"}]}},{\"951\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"11\"},{\"2\":\"30\"}]}},{\"952\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"d\":\"265\"},{\"j\":\"2018\"},{\"e\":\"3\"},{\"b\":\"16\"},{\"c\":\"3\"},{\"h\":\"1115-1123\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"ZBW Kiel <206>\"},{\"a\":\"26\"},{\"b\":\"892666773\"},{\"c\":\"04\"},{\"f\":\"H:ls\"},{\"d\":\"Z 6556\"},{\"e\":\"b\"},{\"g\":\"Bestand: 184.2008 -\"},{\"x\":\"206/1\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"UB Clausthal <104>\"},{\"a\":\"30\"},{\"b\":\"1174535881\"},{\"c\":\"02\"},{\"f\":\"104/13\"},{\"d\":\" CL 13\"},{\"e\":\"g\"},{\"d\":\"lfdcl13\"},{\"g\":\"Bestand: 2006 -\"},{\"k\":\"Genauen Bestand bitte im Institut erfragen\"},{\"x\":\"0104\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"TIB/UB Hannover <89>\"},{\"a\":\"70\"},{\"b\":\"13765989X\"},{\"c\":\"01\"},{\"d\":\"ZN 8146 Haus2\"},{\"e\":\"f\"},{\"g\":\"Bestand: 1.1977 - \"},{\"x\":\"0089\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"DNB <101>\"},{\"a\":\"267\"},{\"b\":\"9001929018\"},{\"c\":\"01\"},{\"f\":\"101a\"},{\"d\":\"ZB 93727\"},{\"e\":\"f\"},{\"f\":\"MZLS\"},{\"d\":\"10-n\"},{\"g\":\"Bestand: 54.1992(1991),1u.3; 56.1992 -\"},{\"x\":\"0101/001\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"e567b8e2-a45b-45f1-a85a-6b6312bdf4d8\"},{\"i\":\"54cc0262-76df-4cac-acca-b10e9bc5c79a\"}]}}]}"
- },
"parsedRecord" : {
"id" : "e567b8e2-a45b-45f1-a85a-6b6312bdf4d8",
"content" : {
diff --git a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/ec53a386-9616-428b-92a9-e1f07756ea1f.json b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/ec53a386-9616-428b-92a9-e1f07756ea1f.json
index 055bae7f5..ae618c4a7 100644
--- a/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/ec53a386-9616-428b-92a9-e1f07756ea1f.json
+++ b/mod-source-record-storage-server/src/test/resources/mock/sourceRecords/ec53a386-9616-428b-92a9-e1f07756ea1f.json
@@ -2,10 +2,6 @@
"recordId" : "ec53a386-9616-428b-92a9-e1f07756ea1f",
"snapshotId": "f2b11593-d6e0-4f78-b4c5-ca3c7eb4c727",
"recordType" : "MARC_BIB",
- "rawRecord" : {
- "id" : "ec53a386-9616-428b-92a9-e1f07756ea1f",
- "content" : "{\"leader\":\"01227nam a2200277 ca4500\",\"fields\":[{\"001\":\"inst000000000010\"},{\"003\":\"DE-601\"},{\"005\":\"20180615212835.0\"},{\"007\":\"hu\\\\uuu\\\\\\\\\\\\uuuu\"},{\"008\":\"171211s1993\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\000\\\\0\\\\eng\\\\d\"},{\"035\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"(DE-599)GBV1008673218\"}]}},{\"035\":\"1008673218\"},{\"040\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"ger\"},{\"c\":\"GBVCP\"},{\"e\":\"rda\"}]}},{\"041\":{\"ind1\":\"0\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"eng\"}]}},{\"100\":{\"ind1\":\"1\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Matwiejczyk-Montgomery, Marian Ann J.\"},{\"e\":\"VerfasserIn\"},{\"4\":\"aut\"}]}},{\"245\":{\"ind1\":\"1\",\"ind2\":\"0\",\"subfields\":[{\"a\":\"Concepts of fashion 1921 - 1987\"},{\"h\":\"microform\"},{\"b\":\"a study of garments worn by selected winners of the Miss America Pageant\"},{\"c\":\"Marian Ann J. Matwiejczyk-Montgomery\"}]}},{\"264\":{\"ind1\":\"3\",\"ind2\":\"1\",\"subfields\":[{\"a\":\"Ann Arbor, MI\"},{\"b\":\"University Microfims International\"},{\"c\":\"1993\"}]}},{\"336\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Text\"},{\"b\":\"txt\"},{\"2\":\"rdacontent\"}]}},{\"337\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Mikroform\"},{\"b\":\"h\"},{\"2\":\"rdamedia\"}]}},{\"338\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Mikrofiche\"},{\"b\":\"he\"},{\"2\":\"rdacarrier\"}]}},{\"502\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"b\":\"Dissertation\"},{\"c\":\"New York University\"},{\"d\":\"1993\"}]}},{\"533\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"Mikrofiche-Ausgabe\"}]}},{\"655\":{\"ind1\":\"0\",\"ind2\":\"7\",\"subfields\":[{\"0\":\"(DE-588)4113937-9\"},{\"a\":\"Hochschulschrift\"},{\"2\":\"gnd-content\"}]}},{\"900\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"a\":\"GBV\"},{\"b\":\"PKB (Museen) Berlin\"},{\"d\":\"!KB LIPP RARA! R-MF-Lipp Mc 101 f kl\"},{\"x\":\"N\"},{\"z\":\"N\"},{\"s\":\"3181/011\"},{\"g\":\"R-MF-Lipp:Mc:101:f:kl:\"}]}},{\"954\":{\"ind1\":\"\\\\\",\"ind2\":\"\\\\\",\"subfields\":[{\"0\":\"PKB (Museen) Berlin\"},{\"a\":\"181\"},{\"b\":\"1729698859\"},{\"c\":\"10\"},{\"f\":\"KB LIPP RARA\"},{\"d\":\"R-MF-Lipp Mc 101 f kl\"},{\"e\":\"i\"},{\"d\":\"R-MF-Lipp:Mc:101:f:kl:\"},{\"x\":\"3181/011\"}]}},{\"999\":{\"ind1\":\"f\",\"ind2\":\"f\",\"subfields\":[{\"s\":\"ec53a386-9616-428b-92a9-e1f07756ea1f\"},{\"i\":\"5b1eb450-ff9f-412d-a9e7-887f6eaeb5b4\"}]}}]}"
- },
"parsedRecord" : {
"id" : "ec53a386-9616-428b-92a9-e1f07756ea1f",
"content" : {
diff --git a/pom.xml b/pom.xml
index 9f38b60f9..bb64d9ebf 100644
--- a/pom.xml
+++ b/pom.xml
@@ -3,7 +3,7 @@
4.0.0
org.folio
mod-source-record-storage
- 5.7.0-SNAPSHOT
+ 5.8.0-SNAPSHOT
pom
@@ -20,19 +20,27 @@
UTF-8
+ 2.19.0
35.0.6
${project.basedir}
4.3.7
4.13.2
4.5.1
- 3.3.2
- 3.3.0
- 5.3.23
+ 3.6.0
+ 3.5.1
+ 5.3.30
/source-storage/stream/records,/source-storage/stream/source-records,/source-storage/stream/marc-record-identifiers
+
+ org.apache.logging.log4j
+ log4j-bom
+ ${log4j.version}
+ pom
+ import
+