From 7cbd57727ad4fd44a26e521e500f94f290f72894 Mon Sep 17 00:00:00 2001 From: pbobylev Date: Wed, 24 Jul 2024 18:11:38 +0500 Subject: [PATCH 1/8] MODSOURCE-752: draft design --- .../AuthorityLinkChunkKafkaHandler.java | 4 +- .../ParsedRecordChunksKafkaHandler.java | 3 +- .../main/java/org/folio/dao/RecordDao.java | 15 +- .../java/org/folio/dao/RecordDaoImpl.java | 141 ++++++++++-------- .../rest/impl/SourceStorageBatchImpl.java | 2 +- ...rceStoragePopulateTestMarcRecordsImpl.java | 2 +- .../rest/impl/SourceStorageRecordsImpl.java | 8 +- .../org/folio/services/RecordService.java | 19 +-- .../org/folio/services/RecordServiceImpl.java | 31 ++-- .../RecordDomainEventPublisher.java | 48 ++++++ .../AbstractPostProcessingEventHandler.java | 26 ++-- .../AbstractUpdateModifyEventHandler.java | 8 +- .../services/util/EventHandlingUtil.java | 36 ++++- 13 files changed, 227 insertions(+), 116 deletions(-) create mode 100644 mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java diff --git a/mod-source-record-storage-server/src/main/java/org/folio/consumers/AuthorityLinkChunkKafkaHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/consumers/AuthorityLinkChunkKafkaHandler.java index 1238d0ccb..345f9680a 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/consumers/AuthorityLinkChunkKafkaHandler.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/consumers/AuthorityLinkChunkKafkaHandler.java @@ -10,6 +10,7 @@ import static org.folio.consumers.RecordMappingUtils.readParsedContentToObjectRepresentation; import static org.folio.rest.jaxrs.model.LinkUpdateReport.Status.FAIL; import static org.folio.services.util.EventHandlingUtil.createProducer; +import static org.folio.services.util.EventHandlingUtil.toOkapiHeaders; import static org.folio.services.util.KafkaUtil.extractHeaderValue; import io.vertx.core.Future; @@ -92,7 +93,8 @@ public Future handle(KafkaConsumerRecord consumerRecord) .compose(this::createSnapshot) .compose(event -> retrieveRecords(event, event.getTenant()) .compose(recordCollection -> mapRecordFieldsChanges(event, recordCollection, userId)) - .compose(recordCollection -> recordService.saveRecords(recordCollection, event.getTenant())) + .compose(recordCollection -> recordService.saveRecords(recordCollection, + toOkapiHeaders(consumerRecord.headers(), event.getTenant()))) .map(recordsBatchResponse -> sendReports(recordsBatchResponse, event, consumerRecord.headers())) .map(recordsBatchResponse -> mapRecordsToBibUpdateEvents(recordsBatchResponse, event)) .compose(marcBibUpdates -> sendEvents(marcBibUpdates, event, consumerRecord)) diff --git a/mod-source-record-storage-server/src/main/java/org/folio/consumers/ParsedRecordChunksKafkaHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/consumers/ParsedRecordChunksKafkaHandler.java index e9cdbe4e7..4dec329df 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/consumers/ParsedRecordChunksKafkaHandler.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/consumers/ParsedRecordChunksKafkaHandler.java @@ -35,6 +35,7 @@ import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_PARSED_RECORDS_CHUNK_SAVED; import static org.folio.services.util.EventHandlingUtil.constructModuleName; +import static org.folio.services.util.EventHandlingUtil.toOkapiHeaders; import static org.folio.services.util.KafkaUtil.extractHeaderValue; @Component @@ -85,7 +86,7 @@ public Future handle(KafkaConsumerRecord targetRecord) { LOGGER.debug("handle:: RecordCollection has been received with event: '{}', jobExecutionId '{}', chunkId: '{}', starting processing... chunkNumber '{}'-'{}'", event.getEventType(), jobExecutionId, chunkId, chunkNumber, key); setUserMetadata(recordCollection, userId); - return recordService.saveRecords(recordCollection, tenantId) + return recordService.saveRecords(recordCollection, toOkapiHeaders(kafkaHeaders, null)) .compose(recordsBatchResponse -> sendBackRecordsBatchResponse(recordsBatchResponse, kafkaHeaders, tenantId, chunkNumber, event.getEventType(), targetRecord)); } catch (Exception e) { LOGGER.warn("handle:: RecordCollection processing has failed with errors jobExecutionId '{}', chunkId: '{}', chunkNumber '{}'-'{}'", diff --git a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDao.java b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDao.java index 3aaefd990..d6679aff0 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDao.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDao.java @@ -2,6 +2,7 @@ import java.util.Collection; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.function.Function; @@ -189,10 +190,10 @@ Future getMatchedRecordsIdentifiers(MatchField mat * Saves {@link Record} to the db * * @param record Record to save - * @param tenantId tenant id + * @param okapiHeaders okapi headers * @return future with saved Record */ - Future saveRecord(Record record, String tenantId); + Future saveRecord(Record record, Map okapiHeaders); /** * Saves {@link Record} to the db using {@link ReactiveClassicGenericQueryExecutor} @@ -201,25 +202,25 @@ Future getMatchedRecordsIdentifiers(MatchField mat * @param record Record to save * @return future with saved Record */ - Future saveRecord(ReactiveClassicGenericQueryExecutor txQE, Record record); + Future saveRecord(ReactiveClassicGenericQueryExecutor txQE, Record record, Map okapiHeaders); /** * Saves {@link RecordCollection} to the db * * @param recordCollection Record collection to save - * @param tenantId tenant id + * @param okapiHeaders okapi headers * @return future with saved {@link RecordsBatchResponse} */ - Future saveRecords(RecordCollection recordCollection, String tenantId); + Future saveRecords(RecordCollection recordCollection, Map okapiHeaders); /** * Updates {{@link Record} in the db * * @param record Record to update - * @param tenantId tenant id + * @param okapiHeaders okapi headers * @return future with updated Record */ - Future updateRecord(Record record, String tenantId); + Future updateRecord(Record record, Map okapiHeaders); /** * Increments generation in case a record with the same matchedId exists diff --git a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java index 676def094..b7a25d8fd 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java @@ -1,5 +1,42 @@ package org.folio.dao; +import static java.lang.String.format; +import static java.util.Collections.emptyList; +import static org.folio.dao.util.AdvisoryLockUtil.acquireLock; +import static org.folio.dao.util.ErrorRecordDaoUtil.ERROR_RECORD_CONTENT; +import static org.folio.dao.util.ParsedRecordDaoUtil.PARSED_RECORD_CONTENT; +import static org.folio.dao.util.RawRecordDaoUtil.RAW_RECORD_CONTENT; +import static org.folio.dao.util.RecordDaoUtil.RECORD_NOT_FOUND_TEMPLATE; +import static org.folio.dao.util.RecordDaoUtil.ensureRecordForeignKeys; +import static org.folio.dao.util.RecordDaoUtil.filterRecordByExternalIdNonNull; +import static org.folio.dao.util.RecordDaoUtil.filterRecordByState; +import static org.folio.dao.util.RecordDaoUtil.filterRecordByType; +import static org.folio.dao.util.RecordDaoUtil.getExternalHrid; +import static org.folio.dao.util.RecordDaoUtil.getExternalId; +import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_FOUND_TEMPLATE; +import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_STARTED_MESSAGE_TEMPLATE; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; +import static org.folio.rest.jooq.Tables.ERROR_RECORDS_LB; +import static org.folio.rest.jooq.Tables.MARC_RECORDS_LB; +import static org.folio.rest.jooq.Tables.MARC_RECORDS_TRACKING; +import static org.folio.rest.jooq.Tables.RAW_RECORDS_LB; +import static org.folio.rest.jooq.Tables.RECORDS_LB; +import static org.folio.rest.jooq.Tables.SNAPSHOTS_LB; +import static org.folio.rest.jooq.enums.RecordType.MARC_BIB; +import static org.folio.rest.util.QueryParamUtil.toRecordType; +import static org.jooq.impl.DSL.condition; +import static org.jooq.impl.DSL.countDistinct; +import static org.jooq.impl.DSL.exists; +import static org.jooq.impl.DSL.field; +import static org.jooq.impl.DSL.inline; +import static org.jooq.impl.DSL.max; +import static org.jooq.impl.DSL.name; +import static org.jooq.impl.DSL.primaryKey; +import static org.jooq.impl.DSL.select; +import static org.jooq.impl.DSL.selectDistinct; +import static org.jooq.impl.DSL.table; +import static org.jooq.impl.DSL.trueCondition; + import com.google.common.collect.Lists; import io.github.jklingsporn.vertx.jooq.classic.reactivepg.ReactiveClassicGenericQueryExecutor; import io.github.jklingsporn.vertx.jooq.shared.internal.QueryResult; @@ -11,6 +48,24 @@ import io.vertx.reactivex.pgclient.PgPool; import io.vertx.reactivex.sqlclient.SqlConnection; import io.vertx.sqlclient.Row; +import java.sql.Connection; +import java.sql.SQLException; +import java.time.OffsetDateTime; +import java.time.ZoneOffset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.UUID; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.ws.rs.BadRequestException; +import javax.ws.rs.NotFoundException; import net.sf.jsqlparser.JSQLParserException; import net.sf.jsqlparser.expression.BinaryExpression; import net.sf.jsqlparser.expression.Expression; @@ -60,6 +115,7 @@ import org.folio.rest.jooq.tables.records.RecordsLbRecord; import org.folio.rest.jooq.tables.records.SnapshotsLbRecord; import org.folio.services.RecordSearchParameters; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.folio.services.util.TypeConnection; import org.folio.services.util.parser.ParseFieldsResult; import org.folio.services.util.parser.ParseLeaderResult; @@ -90,61 +146,6 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; -import javax.ws.rs.BadRequestException; -import javax.ws.rs.NotFoundException; -import java.sql.Connection; -import java.sql.SQLException; -import java.time.OffsetDateTime; -import java.time.ZoneOffset; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.function.Function; -import java.util.stream.Collectors; - -import static java.lang.String.format; -import static java.util.Collections.emptyList; -import static org.folio.dao.util.AdvisoryLockUtil.acquireLock; -import static org.folio.dao.util.ErrorRecordDaoUtil.ERROR_RECORD_CONTENT; -import static org.folio.dao.util.ParsedRecordDaoUtil.PARSED_RECORD_CONTENT; -import static org.folio.dao.util.RawRecordDaoUtil.RAW_RECORD_CONTENT; -import static org.folio.dao.util.RecordDaoUtil.RECORD_NOT_FOUND_TEMPLATE; -import static org.folio.dao.util.RecordDaoUtil.ensureRecordForeignKeys; -import static org.folio.dao.util.RecordDaoUtil.filterRecordByExternalIdNonNull; -import static org.folio.dao.util.RecordDaoUtil.filterRecordByState; -import static org.folio.dao.util.RecordDaoUtil.filterRecordByType; -import static org.folio.dao.util.RecordDaoUtil.getExternalHrid; -import static org.folio.dao.util.RecordDaoUtil.getExternalId; -import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_FOUND_TEMPLATE; -import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_STARTED_MESSAGE_TEMPLATE; -import static org.folio.rest.jooq.Tables.ERROR_RECORDS_LB; -import static org.folio.rest.jooq.Tables.MARC_RECORDS_LB; -import static org.folio.rest.jooq.Tables.MARC_RECORDS_TRACKING; -import static org.folio.rest.jooq.Tables.RAW_RECORDS_LB; -import static org.folio.rest.jooq.Tables.RECORDS_LB; -import static org.folio.rest.jooq.Tables.SNAPSHOTS_LB; -import static org.folio.rest.jooq.enums.RecordType.MARC_BIB; -import static org.folio.rest.util.QueryParamUtil.toRecordType; -import static org.jooq.impl.DSL.condition; -import static org.jooq.impl.DSL.countDistinct; -import static org.jooq.impl.DSL.field; -import static org.jooq.impl.DSL.inline; -import static org.jooq.impl.DSL.max; -import static org.jooq.impl.DSL.name; -import static org.jooq.impl.DSL.primaryKey; -import static org.jooq.impl.DSL.select; -import static org.jooq.impl.DSL.table; -import static org.jooq.impl.DSL.trueCondition; -import static org.jooq.impl.DSL.selectDistinct; -import static org.jooq.impl.DSL.exists; - @Component public class RecordDaoImpl implements RecordDao { @@ -229,13 +230,16 @@ public class RecordDaoImpl implements RecordDao { public static final Field MARC_INDEXERS_MARC_ID = field(TABLE_FIELD_TEMPLATE, UUID.class, field(MARC_INDEXERS), field(MARC_ID)); private final PostgresClientFactory postgresClientFactory; + private final RecordDomainEventPublisher recordDomainEventPublisher; @org.springframework.beans.factory.annotation.Value("${srs.record.matching.fallback-query.enable:false}") private boolean enableFallbackQuery; @Autowired - public RecordDaoImpl(final PostgresClientFactory postgresClientFactory) { + public RecordDaoImpl(final PostgresClientFactory postgresClientFactory, + final RecordDomainEventPublisher recordDomainEventPublisher) { this.postgresClientFactory = postgresClientFactory; + this.recordDomainEventPublisher = recordDomainEventPublisher; } @Override @@ -728,19 +732,24 @@ public Future> getRecordByCondition(ReactiveClassicGenericQuery } @Override - public Future saveRecord(Record record, String tenantId) { + public Future saveRecord(Record record, Map okapiHeaders) { + var tenantId = okapiHeaders.get(TENANT); LOG.trace("saveRecord:: Saving {} record {} for tenant {}", record.getRecordType(), record.getId(), tenantId); - return getQueryExecutor(tenantId).transaction(txQE -> saveRecord(txQE, record)); + return getQueryExecutor(tenantId).transaction(txQE -> saveRecord(txQE, record, okapiHeaders)) + .onSuccess(created -> recordDomainEventPublisher.publishRecordCreated(created, okapiHeaders)); } @Override - public Future saveRecord(ReactiveClassicGenericQueryExecutor txQE, Record record) { + public Future saveRecord(ReactiveClassicGenericQueryExecutor txQE, Record record, + Map okapiHeaders) { LOG.trace("saveRecord:: Saving {} record {}", record.getRecordType(), record.getId()); - return insertOrUpdateRecord(txQE, record); + return insertOrUpdateRecord(txQE, record) + .onSuccess(created -> recordDomainEventPublisher.publishRecordCreated(created, okapiHeaders)); } @Override - public Future saveRecords(RecordCollection recordCollection, String tenantId) { + public Future saveRecords(RecordCollection recordCollection, Map okapiHeaders) { + var tenantId = okapiHeaders.get(TENANT); logRecordCollection("saveRecords:: Saving", recordCollection, tenantId); Promise finalPromise = Promise.promise(); Context context = Vertx.currentContext(); @@ -950,15 +959,19 @@ public Future saveRecords(RecordCollection recordCollectio } }); - return finalPromise.future(); + return finalPromise.future() + .onSuccess(response -> response.getRecords() + .forEach(r -> recordDomainEventPublisher.publishRecordCreated(r, okapiHeaders)) + ); } @Override - public Future updateRecord(Record record, String tenantId) { + public Future updateRecord(Record record, Map okapiHeaders) { + var tenantId = okapiHeaders.get(TENANT); LOG.trace("updateRecord:: Updating {} record {} for tenant {}", record.getRecordType(), record.getId(), tenantId); return getQueryExecutor(tenantId).transaction(txQE -> getRecordById(txQE, record.getId()) .compose(optionalRecord -> optionalRecord - .map(r -> saveRecord(txQE, record)) + .map(r -> saveRecord(txQE, record, okapiHeaders)) .orElse(Future.failedFuture(new NotFoundException(format(RECORD_NOT_FOUND_TEMPLATE, record.getId())))))); } diff --git a/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageBatchImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageBatchImpl.java index c0bcf6166..b4317a84e 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageBatchImpl.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageBatchImpl.java @@ -59,7 +59,7 @@ public void postSourceStorageBatchRecords(RecordCollection entity, Map { try { MetadataUtil.populateMetadata(entity.getRecords(), okapiHeaders); - recordService.saveRecords(entity, tenantId) + recordService.saveRecords(entity, okapiHeaders) .map(recordsBatchResponse -> { if (!recordsBatchResponse.getRecords().isEmpty()) { return PostSourceStorageBatchRecordsResponse.respond201WithApplicationJson(recordsBatchResponse); diff --git a/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStoragePopulateTestMarcRecordsImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStoragePopulateTestMarcRecordsImpl.java index fd8ae01ef..82bf9aa61 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStoragePopulateTestMarcRecordsImpl.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStoragePopulateTestMarcRecordsImpl.java @@ -67,7 +67,7 @@ public void postSourceStoragePopulateTestMarcRecords(TestMarcRecordsCollection e } return record; }) - .forEach(marcRecord -> futures.add(recordService.saveRecord(marcRecord, tenantId))); + .forEach(marcRecord -> futures.add(recordService.saveRecord(marcRecord, okapiHeaders))); GenericCompositeFuture.all(futures).onComplete(result -> { if (result.succeeded()) { diff --git a/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageRecordsImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageRecordsImpl.java index 7b68b1c02..15991393f 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageRecordsImpl.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageRecordsImpl.java @@ -53,7 +53,7 @@ public void postSourceStorageRecords(Record entity, Map okapiHea Handler> asyncResultHandler, Context vertxContext) { vertxContext.runOnContext(v -> { try { - recordService.saveRecord(entity, tenantId) + recordService.saveRecord(entity, okapiHeaders) .map((Response) PostSourceStorageRecordsResponse.respond201WithApplicationJson(entity, PostSourceStorageRecordsResponse.headersFor201())) .otherwise(ExceptionHelper::mapExceptionToResponse).onComplete(asyncResultHandler); } catch (Exception e) { @@ -88,7 +88,7 @@ public void putSourceStorageRecordsById(String id, Record entity, Map { try { entity.setId(id); - recordService.updateRecord(entity, tenantId) + recordService.updateRecord(entity, okapiHeaders) .map(updated -> PutSourceStorageRecordsByIdResponse.respond200WithApplicationJson(entity)) .map(Response.class::cast).otherwise(ExceptionHelper::mapExceptionToResponse) .onComplete(asyncResultHandler); @@ -103,7 +103,7 @@ public void putSourceStorageRecordsGenerationById(String matchedId, Record entit Handler> asyncResultHandler, Context vertxContext) { vertxContext.runOnContext(v -> { try { - recordService.updateRecordGeneration(matchedId, entity, tenantId) + recordService.updateRecordGeneration(matchedId, entity, okapiHeaders) .map(updated -> PutSourceStorageRecordsGenerationByIdResponse.respond200WithApplicationJson(entity)) .map(Response.class::cast).otherwise(ExceptionHelper::mapExceptionToResponse) .onComplete(asyncResultHandler); @@ -119,7 +119,7 @@ public void deleteSourceStorageRecordsById(String id, String idType, Map> asyncResultHandler, Context vertxContext) { vertxContext.runOnContext(v -> { try { - recordService.deleteRecordById(id, toExternalIdType(idType), tenantId).map(r -> true) + recordService.deleteRecordById(id, toExternalIdType(idType), okapiHeaders).map(r -> true) .map(updated -> DeleteSourceStorageRecordsByIdResponse.respond204()).map(Response.class::cast) .otherwise(ExceptionHelper::mapExceptionToResponse).onComplete(asyncResultHandler); } catch (Exception e) { diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordService.java b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordService.java index f97b0bef8..b8e666b55 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordService.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordService.java @@ -2,6 +2,7 @@ import java.util.Collection; import java.util.List; +import java.util.Map; import java.util.Optional; import io.vertx.sqlclient.Row; @@ -68,38 +69,38 @@ public interface RecordService { * Saves record * * @param record record to save - * @param tenantId tenant id + * @param okapiHeaders okapi headers * @return future with saved Record */ - Future saveRecord(Record record, String tenantId); + Future saveRecord(Record record, Map okapiHeaders); /** * Saves collection of records * * @param recordsCollection records to save - * @param tenantId tenant id + * @param okapiHeaders okapi headers * @return future with response containing list of successfully saved records and error messages for records that were not saved */ - Future saveRecords(RecordCollection recordsCollection, String tenantId); + Future saveRecords(RecordCollection recordsCollection, Map okapiHeaders); /** * Updates record with given id * * @param record record to update - * @param tenantId tenant id + * @param okapiHeaders okapi headers * @return future with updated Record */ - Future updateRecord(Record record, String tenantId); + Future updateRecord(Record record, Map okapiHeaders); /** * Updates record generation with given matched id * * @param matchedId matched id * @param record record to update - * @param tenantId tenant id + * @param okapiHeaders okapi headers * @return future with updated Record generation */ - Future updateRecordGeneration(String matchedId, Record record, String tenantId); + Future updateRecordGeneration(String matchedId, Record record, Map okapiHeaders); /** * Searches for {@link SourceRecord} by {@link Condition} and ordered by order fields with offset and limit @@ -267,5 +268,5 @@ public interface RecordService { */ Future updateRecordsState(String matchedId, RecordState state, RecordType recordType, String tenantId); - Future deleteRecordById(String id, IdType idType, String tenantId); + Future deleteRecordById(String id, IdType idType, Map okapiHeaders); } diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java index a145ef32e..06c455cd6 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java @@ -14,6 +14,7 @@ import static org.folio.dao.util.RecordDaoUtil.getExternalIdsConditionWithQualifier; import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_FOUND_TEMPLATE; import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_STARTED_MESSAGE_TEMPLATE; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.util.QueryParamUtil.toRecordType; import static org.folio.services.util.AdditionalFieldsUtil.TAG_999; import static org.folio.services.util.AdditionalFieldsUtil.addFieldToMarcRecord; @@ -24,6 +25,7 @@ import java.util.Collections; import java.util.Date; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.UUID; @@ -122,7 +124,8 @@ public Future> getRecordById(String id, String tenantId) { } @Override - public Future saveRecord(Record record, String tenantId) { + public Future saveRecord(Record record, Map okapiHeaders) { + var tenantId = okapiHeaders.get(TENANT); LOG.debug("saveRecord:: Saving record with id: {} for tenant: {}", record.getId(), tenantId); ensureRecordHasId(record); ensureRecordHasSuppressDiscovery(record); @@ -147,46 +150,47 @@ public Future saveRecord(Record record, String tenantId) { return recordDao.getRecordByMatchedId(txQE, record.getMatchedId()) .compose(optionalMatchedRecord -> optionalMatchedRecord .map(matchedRecord -> recordDao.saveUpdatedRecord(txQE, ensureRecordForeignKeys(record.withGeneration(generation)), matchedRecord.withState(Record.State.OLD))) - .orElseGet(() -> recordDao.saveRecord(txQE, ensureRecordForeignKeys(record.withGeneration(generation))))); + .orElseGet(() -> recordDao.saveRecord(txQE, ensureRecordForeignKeys(record.withGeneration(generation)), okapiHeaders))); } else { - return recordDao.saveRecord(txQE, ensureRecordForeignKeys(record.withGeneration(generation))); + return recordDao.saveRecord(txQE, ensureRecordForeignKeys(record.withGeneration(generation)), okapiHeaders); } }), tenantId) .recover(RecordServiceImpl::mapToDuplicateExceptionIfNeeded); } @Override - public Future saveRecords(RecordCollection recordCollection, String tenantId) { + public Future saveRecords(RecordCollection recordCollection, Map okapiHeaders) { if (recordCollection.getRecords().isEmpty()) { Promise promise = Promise.promise(); promise.complete(new RecordsBatchResponse().withTotalRecords(0)); return promise.future(); } List setMatchedIdsFutures = new ArrayList<>(); - recordCollection.getRecords().forEach(record -> setMatchedIdsFutures.add(setMatchedIdForRecord(record, tenantId))); + recordCollection.getRecords().forEach(record -> setMatchedIdsFutures.add(setMatchedIdForRecord(record, + okapiHeaders.get(TENANT)))); return GenericCompositeFuture.all(setMatchedIdsFutures) .compose(ar -> ar.succeeded() ? - recordDao.saveRecords(recordCollection, tenantId) + recordDao.saveRecords(recordCollection, okapiHeaders) : Future.failedFuture(ar.cause())) .recover(RecordServiceImpl::mapToDuplicateExceptionIfNeeded); } @Override - public Future updateRecord(Record record, String tenantId) { - return recordDao.updateRecord(ensureRecordForeignKeys(record), tenantId); + public Future updateRecord(Record record, Map okapiHeaders) { + return recordDao.updateRecord(ensureRecordForeignKeys(record), okapiHeaders); } @Override - public Future updateRecordGeneration(String matchedId, Record record, String tenantId) { + public Future updateRecordGeneration(String matchedId, Record record, Map okapiHeaders) { String marcField999s = getFieldFromMarcRecord(record, TAG_999, INDICATOR, INDICATOR, SUBFIELD_S); if (!matchedId.equals(marcField999s)) { return Future.failedFuture(new BadRequestException(format(MATCHED_ID_NOT_EQUAL_TO_999_FIELD, matchedId, marcField999s))); } record.setId(UUID.randomUUID().toString()); - return recordDao.getRecordByMatchedId(matchedId, tenantId) + return recordDao.getRecordByMatchedId(matchedId, okapiHeaders.get(TENANT)) .map(r -> r.orElseThrow(() -> new NotFoundException(format(RECORD_WITH_GIVEN_MATCHED_ID_NOT_FOUND, matchedId)))) - .compose(v -> saveRecord(record, tenantId)) + .compose(v -> saveRecord(record, okapiHeaders)) .recover(throwable -> { if (throwable instanceof DuplicateRecordException) { return Future.failedFuture(new BadRequestException(UPDATE_RECORD_DUPLICATE_EXCEPTION)); @@ -340,7 +344,8 @@ public Future getMatchedRecordsIdentifiers(RecordM } @Override - public Future deleteRecordById(String id, IdType idType, String tenantId) { + public Future deleteRecordById(String id, IdType idType, Map okapiHeaders) { + var tenantId = okapiHeaders.get(TENANT); return recordDao.getRecordByExternalId(id, idType, tenantId) .map(recordOptional -> recordOptional.orElseThrow(() -> new NotFoundException(format(NOT_FOUND_MESSAGE, Record.class.getSimpleName(), id)))) .map(record -> { @@ -350,7 +355,7 @@ public Future deleteRecordById(String id, IdType idType, String tenantId) ParsedRecordDaoUtil.updateLeaderStatus(record.getParsedRecord(), DELETED_LEADER_RECORD_STATUS); return record; }) - .compose(record -> updateRecord(record, tenantId)).map(r -> null); + .compose(record -> updateRecord(record, okapiHeaders)).map(r -> null); } private Future setMatchedIdForRecord(Record record, String tenantId) { diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java b/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java new file mode 100644 index 000000000..cc1cf072b --- /dev/null +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java @@ -0,0 +1,48 @@ +package org.folio.services.domainevent; + +import static org.folio.okapi.common.XOkapiHeaders.TENANT; +import static org.folio.okapi.common.XOkapiHeaders.TOKEN; +import static org.folio.okapi.common.XOkapiHeaders.URL; +import static org.folio.services.util.EventHandlingUtil.sendEventToKafka; + +import io.vertx.core.Vertx; +import io.vertx.core.json.Json; +import io.vertx.kafka.client.producer.KafkaHeader; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import org.folio.kafka.KafkaConfig; +import org.folio.rest.jaxrs.model.Record; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class RecordDomainEventPublisher { + + public static final String RECORD_DOMAIN_TOPIC = "srs.source_records"; + public static final String SOURCE_RECORD_CREATED = "SOURCE_RECORD_CREATED"; + public static final String SOURCE_RECORD_UPDATED = "SOURCE_RECORD_UPDATED"; + private static final String RECORD_TYPE = "folio.srs.recordType"; + + @Autowired + private KafkaConfig kafkaConfig; + + public void publishRecordCreated(Record created, Map okapiHeaders) { + Vertx.vertx().executeBlocking(() -> { + var kafkaHeaders = getKafkaHeaders(okapiHeaders, created.getRecordType()); + var key = created.getId(); + return sendEventToKafka(okapiHeaders.get(TENANT), Json.encode(created), SOURCE_RECORD_CREATED, kafkaHeaders, + kafkaConfig, key); + }); + } + + private List getKafkaHeaders(Map okapiHeaders, Record.RecordType recordType) { + return new ArrayList<>(List.of( + KafkaHeader.header(URL, okapiHeaders.get(URL)), + KafkaHeader.header(TENANT, okapiHeaders.get(TENANT)), + KafkaHeader.header(TOKEN, okapiHeaders.get(TOKEN)), + KafkaHeader.header(RECORD_TYPE, recordType.value())) + ); + } + +} diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java index 6c550f296..adfdbf87e 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java @@ -6,6 +6,7 @@ import io.vertx.core.json.Json; import io.vertx.core.json.JsonObject; import io.vertx.kafka.client.producer.KafkaHeader; +import java.util.Map; import org.apache.commons.lang3.tuple.Pair; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -43,6 +44,7 @@ import static org.folio.dao.util.MarcUtil.reorderMarcRecordFields; import static org.folio.dao.util.RecordDaoUtil.filterRecordByExternalId; import static org.folio.dao.util.RecordDaoUtil.filterRecordByNotSnapshotId; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING; import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; @@ -57,6 +59,7 @@ import static org.folio.services.util.AdditionalFieldsUtil.remove035WithActualHrId; import static org.folio.services.util.AdditionalFieldsUtil.updateLatestTransactionDate; import static org.folio.services.util.EventHandlingUtil.sendEventToKafka; +import static org.folio.services.util.EventHandlingUtil.toOkapiHeaders; import static org.folio.services.util.RestUtil.retrieveOkapiConnectionParams; public abstract class AbstractPostProcessingEventHandler implements EventHandler { @@ -108,7 +111,7 @@ public CompletableFuture handle(DataImportEventPayload d if (centralTenantOperationExists(dataImportEventPayload)) { return saveRecordForCentralTenant(dataImportEventPayload, record, jobExecutionId); } - return saveRecord(record, dataImportEventPayload.getTenant()); + return saveRecord(record, toOkapiHeaders(dataImportEventPayload)); }) .onSuccess(record -> { sendReplyEvent(dataImportEventPayload, record); @@ -247,17 +250,17 @@ private void setSuppressFormDiscovery(Record record, boolean suppressFromDiscove } } - private Future updatePreviousRecordsState(String externalId, String snapshotId, String tenantId) { + private Future updatePreviousRecordsState(String externalId, String snapshotId, Map okapiHeaders) { Condition condition = filterRecordByNotSnapshotId(snapshotId) .and(filterRecordByExternalId(externalId)); - return recordService.getRecords(condition, getDbType(), new ArrayList<>(), 0, 999, tenantId) + return recordService.getRecords(condition, getDbType(), new ArrayList<>(), 0, 999, okapiHeaders.get(TENANT)) .compose(recordCollection -> { Promise result = Promise.promise(); @SuppressWarnings("squid:S3740") List> futures = new ArrayList<>(); recordCollection.getRecords() - .forEach(record -> futures.add(recordService.updateRecord(record.withState(Record.State.OLD), tenantId))); + .forEach(record -> futures.add(recordService.updateRecord(record.withState(Record.State.OLD), okapiHeaders))); GenericCompositeFuture.all(futures).onComplete(ar -> { if (ar.succeeded()) { result.complete(); @@ -308,21 +311,22 @@ private void executeHridManipulation(Record record, JsonObject externalEntity) { * Updates specific record. If it doesn't exist - then just save it. * * @param record - target record - * @param tenantId - tenantId + * @param okapiHeaders - okapi headers * @return - Future with Record result */ - private Future saveRecord(Record record, String tenantId) { + private Future saveRecord(Record record, Map okapiHeaders) { + var tenantId = okapiHeaders.get(TENANT); return recordService.getRecordById(record.getId(), tenantId) .compose(r -> { if (r.isPresent()) { return recordService.updateParsedRecord(record, tenantId).map(record.withGeneration(r.get().getGeneration())); } else { record.getRawRecord().setId(record.getId()); - return recordService.saveRecord(record, tenantId).map(record); + return recordService.saveRecord(record, okapiHeaders).map(record); } }) .compose(updatedRecord -> - updatePreviousRecordsState(getExternalId(updatedRecord), updatedRecord.getSnapshotId(), tenantId) + updatePreviousRecordsState(getExternalId(updatedRecord), updatedRecord.getSnapshotId(), okapiHeaders) .map(updatedRecord) ); } @@ -348,12 +352,14 @@ private Future saveRecordForCentralTenant(DataImportEventPayload dataImp String centralTenantId = dataImportEventPayload.getContext().get(CENTRAL_TENANT_ID); dataImportEventPayload.getContext().remove(CENTRAL_TENANT_INSTANCE_UPDATED_FLAG); LOG.info("handle:: Processing AbstractPostProcessingEventHandler - saving record by jobExecutionId: {} for the central tenantId: {}", jobExecutionId, centralTenantId); + var okapiHeaders = toOkapiHeaders(dataImportEventPayload); if (centralTenantId != null) { + okapiHeaders.put(TENANT, centralTenantId); return snapshotService.copySnapshotToOtherTenant(record.getSnapshotId(), dataImportEventPayload.getTenant(), centralTenantId) - .compose(f -> saveRecord(record, centralTenantId)); + .compose(f -> saveRecord(record, okapiHeaders)); } else { - return saveRecord(record, dataImportEventPayload.getTenant()); + return saveRecord(record, okapiHeaders); } } diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/AbstractUpdateModifyEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/AbstractUpdateModifyEventHandler.java index fa37481c7..5b6f38a36 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/AbstractUpdateModifyEventHandler.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/AbstractUpdateModifyEventHandler.java @@ -38,6 +38,7 @@ import static java.util.Objects.nonNull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.folio.ActionProfile.Action.UPDATE; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; import static org.folio.services.handlers.match.AbstractMarcMatchEventHandler.CENTRAL_TENANT_ID; import static org.folio.services.util.AdditionalFieldsUtil.HR_ID_FROM_FIELD; @@ -48,6 +49,7 @@ import static org.folio.services.util.AdditionalFieldsUtil.remove003FieldIfNeeded; import static org.folio.services.util.AdditionalFieldsUtil.remove035WithActualHrId; import static org.folio.services.util.AdditionalFieldsUtil.updateLatestTransactionDate; +import static org.folio.services.util.EventHandlingUtil.toOkapiHeaders; public abstract class AbstractUpdateModifyEventHandler implements EventHandler { @@ -118,11 +120,13 @@ public CompletableFuture handle(DataImportEventPayload p ) .compose(changedRecord -> { String centralTenantId = payload.getContext().get(CENTRAL_TENANT_ID); + var okapiHeaders = toOkapiHeaders(payload); if (centralTenantId != null) { + okapiHeaders.put(TENANT, centralTenantId); return snapshotService.copySnapshotToOtherTenant(changedRecord.getSnapshotId(), payload.getTenant(), centralTenantId) - .compose(snapshot -> recordService.saveRecord(changedRecord, centralTenantId)); + .compose(snapshot -> recordService.saveRecord(changedRecord, okapiHeaders)); } - return recordService.saveRecord(changedRecord, payload.getTenant()); + return recordService.saveRecord(changedRecord, okapiHeaders); }) .onSuccess(savedRecord -> submitSuccessfulEventType(payload, future, marcMappingOption)) .onFailure(throwable -> { diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java b/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java index e2a4bde97..69e0bd0e4 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java @@ -1,13 +1,27 @@ package org.folio.services.util; +import static java.util.Objects.nonNull; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; +import static org.folio.okapi.common.XOkapiHeaders.TOKEN; +import static org.folio.okapi.common.XOkapiHeaders.URL; +import static org.folio.services.domainevent.RecordDomainEventPublisher.RECORD_DOMAIN_TOPIC; +import static org.folio.services.domainevent.RecordDomainEventPublisher.SOURCE_RECORD_CREATED; +import static org.folio.services.domainevent.RecordDomainEventPublisher.SOURCE_RECORD_UPDATED; +import static org.folio.services.util.KafkaUtil.extractHeaderValue; + import io.vertx.core.Future; import io.vertx.core.Promise; import io.vertx.core.Vertx; import io.vertx.kafka.client.producer.KafkaHeader; import io.vertx.kafka.client.producer.KafkaProducer; import io.vertx.kafka.client.producer.KafkaProducerRecord; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.folio.DataImportEventPayload; import org.folio.kafka.KafkaConfig; import org.folio.kafka.KafkaTopicNameHelper; import org.folio.kafka.SimpleKafkaProducerManager; @@ -17,9 +31,6 @@ import org.folio.rest.jaxrs.model.EventMetadata; import org.folio.rest.tools.utils.ModuleName; -import java.util.List; -import java.util.UUID; - public final class EventHandlingUtil { private static final Logger LOGGER = LogManager.getLogger(); @@ -96,6 +107,9 @@ public static String constructModuleName() { } public static String createTopicName(String eventType, String tenantId, KafkaConfig kafkaConfig) { + if (SOURCE_RECORD_CREATED.equals(eventType) || SOURCE_RECORD_UPDATED.equals(eventType)) { + return KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), tenantId, RECORD_DOMAIN_TOPIC); + } return KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), KafkaTopicNameHelper.getDefaultNameSpace(), tenantId, eventType); } @@ -108,6 +122,22 @@ public static KafkaProducer createProducer(String eventType, Kaf return new SimpleKafkaProducerManager(Vertx.currentContext().owner(), kafkaConfig).createShared(eventType); } + public static Map toOkapiHeaders(DataImportEventPayload eventPayload) { + var okapiHeaders = new HashMap(); + okapiHeaders.put(URL, eventPayload.getOkapiUrl()); + okapiHeaders.put(TENANT, eventPayload.getTenant()); + okapiHeaders.put(TOKEN, eventPayload.getToken()); + return okapiHeaders; + } + + public static Map toOkapiHeaders(List kafkaHeaders, String eventTenantId) { + var okapiHeaders = new HashMap(); + okapiHeaders.put(URL, extractHeaderValue(URL, kafkaHeaders)); + okapiHeaders.put(TENANT, nonNull(eventTenantId) ? eventTenantId : extractHeaderValue(TENANT, kafkaHeaders)); + okapiHeaders.put(TOKEN, extractHeaderValue(TOKEN, kafkaHeaders)); + return okapiHeaders; + } + private static String extractRecordId(List kafkaHeaders) { return kafkaHeaders.stream() .filter(header -> header.key().equals(RECORD_ID_HEADER)) From 83f7d7b88f31408f048702457cf96975574cce15 Mon Sep 17 00:00:00 2001 From: pbobylev Date: Fri, 26 Jul 2024 14:37:00 +0500 Subject: [PATCH 2/8] MODSOURCE-752: test fixes --- .../AuthorityDomainKafkaHandlerTest.java | 15 +- .../java/org/folio/dao/RecordDaoImplTest.java | 16 +- .../AuthorityLinkChunkKafkaHandlerTest.java | 20 ++- .../MarcAuthorityDeleteEventHandlerTest.java | 17 +- .../MarcAuthorityMatchEventHandlerTest.java | 23 ++- ...AuthorityUpdateModifyEventHandlerTest.java | 15 +- .../MarcBibUpdateModifyEventHandlerTest.java | 20 ++- .../MarcHoldingsMatchEventHandlerTest.java | 23 ++- ...cHoldingsUpdateModifyEventHandlerTest.java | 15 +- .../services/QuickMarcKafkaHandlerTest.java | 15 +- .../org/folio/services/RecordServiceTest.java | 170 +++++++++++------- ...bstractPostProcessingEventHandlerTest.java | 6 +- ...thorityPostProcessingEventHandlerTest.java | 13 +- ...oldingsPostProcessingEventHandlerTest.java | 17 +- ...nstancePostProcessingEventHandlerTest.java | 27 ++- ...rcIndexersVersionDeletionVerticleTest.java | 22 ++- .../DataImportConsumersVerticleTest.java | 13 +- 17 files changed, 308 insertions(+), 139 deletions(-) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/consumers/AuthorityDomainKafkaHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/consumers/AuthorityDomainKafkaHandlerTest.java index 455894eaf..099849baa 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/consumers/AuthorityDomainKafkaHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/consumers/AuthorityDomainKafkaHandlerTest.java @@ -1,10 +1,10 @@ package org.folio.consumers; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_AUTHORITY; import com.fasterxml.jackson.databind.ObjectMapper; import io.vertx.core.json.Json; -import io.vertx.core.json.JsonObject; import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; @@ -13,6 +13,7 @@ import java.nio.charset.StandardCharsets; import java.util.Date; import java.util.HashMap; +import java.util.Map; import java.util.UUID; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.common.header.internals.RecordHeader; @@ -31,12 +32,15 @@ import org.folio.services.AbstractLBServiceTest; import org.folio.services.RecordService; import org.folio.services.RecordServiceImpl; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.jetbrains.annotations.NotNull; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; @RunWith(VertxUnitRunner.class) public class AuthorityDomainKafkaHandlerTest extends AbstractLBServiceTest { @@ -44,7 +48,8 @@ public class AuthorityDomainKafkaHandlerTest extends AbstractLBServiceTest { private static final String recordId = UUID.randomUUID().toString(); private static RawRecord rawRecord; private static ParsedRecord parsedRecord; - + @Mock + private RecordDomainEventPublisher recordDomainEventPublisher; private RecordDao recordDao; private RecordService recordService; private Record record; @@ -61,7 +66,8 @@ public static void setUpClass() throws IOException { @Before public void setUp(TestContext context) { - recordDao = new RecordDaoImpl(postgresClientFactory); + MockitoAnnotations.openMocks(this); + recordDao = new RecordDaoImpl(postgresClientFactory, recordDomainEventPublisher); recordService = new RecordServiceImpl(recordDao); handler = new AuthorityDomainKafkaHandler(recordService); Async async = context.async(); @@ -78,8 +84,9 @@ record = new Record() .withRecordType(MARC_AUTHORITY) .withRawRecord(rawRecord) .withParsedRecord(parsedRecord); + var okapiHeaders = Map.of(TENANT, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot) - .compose(savedSnapshot -> recordService.saveRecord(record, TENANT_ID)) + .compose(savedSnapshot -> recordService.saveRecord(record, okapiHeaders)) .onSuccess(ar -> async.complete()) .onFailure(context::fail); } diff --git a/mod-source-record-storage-server/src/test/java/org/folio/dao/RecordDaoImplTest.java b/mod-source-record-storage-server/src/test/java/org/folio/dao/RecordDaoImplTest.java index 47ebec140..22fdc8e4f 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/dao/RecordDaoImplTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/dao/RecordDaoImplTest.java @@ -5,6 +5,7 @@ import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.util.Map; import org.folio.TestMocks; import org.folio.TestUtil; import org.folio.dao.util.AdvisoryLockUtil; @@ -19,12 +20,15 @@ import org.folio.rest.jaxrs.model.Record; import org.folio.rest.jaxrs.model.Snapshot; import org.folio.services.AbstractLBServiceTest; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.folio.services.util.TypeConnection; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import org.springframework.test.util.ReflectionTestUtils; import java.io.IOException; @@ -33,6 +37,7 @@ import java.util.UUID; import static org.folio.dao.RecordDaoImpl.INDEXERS_DELETION_LOCK_NAMESPACE_ID; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.Record.State.ACTUAL; import static org.folio.rest.jaxrs.model.Record.State.DELETED; import static org.folio.rest.jooq.Tables.MARC_RECORDS_TRACKING; @@ -41,7 +46,8 @@ public class RecordDaoImplTest extends AbstractLBServiceTest { private static final String ENABLE_FALLBACK_QUERY_FIELD = "enableFallbackQuery"; - + @Mock + private RecordDomainEventPublisher recordDomainEventPublisher; private RecordDao recordDao; private Record record; private Record deletedRecord; @@ -49,8 +55,9 @@ public class RecordDaoImplTest extends AbstractLBServiceTest { @Before public void setUp(TestContext context) throws IOException { + MockitoAnnotations.openMocks(this); Async async = context.async(); - recordDao = new RecordDaoImpl(postgresClientFactory); + recordDao = new RecordDaoImpl(postgresClientFactory, recordDomainEventPublisher); RawRecord rawRecord = new RawRecord() .withContent(new ObjectMapper().readValue(TestUtil.readFileFromPath(RAW_MARC_RECORD_CONTENT_SAMPLE_PATH), String.class)); ParsedRecord marcRecord = new ParsedRecord() @@ -85,9 +92,10 @@ public void setUp(TestContext context) throws IOException { .withExternalIdsHolder(new ExternalIdsHolder() .withInstanceId(UUID.randomUUID().toString())); + var okapiHeaders = Map.of(TENANT, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot) - .compose(savedSnapshot -> recordDao.saveRecord(record, TENANT_ID)) - .compose(savedSnapshot -> recordDao.saveRecord(deletedRecord, TENANT_ID)) + .compose(savedSnapshot -> recordDao.saveRecord(record, okapiHeaders)) + .compose(savedSnapshot -> recordDao.saveRecord(deletedRecord, okapiHeaders)) .onComplete(save -> { if (save.failed()) { context.fail(save.cause()); diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/AuthorityLinkChunkKafkaHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/AuthorityLinkChunkKafkaHandlerTest.java index 2f592a51e..eeac85ac7 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/AuthorityLinkChunkKafkaHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/AuthorityLinkChunkKafkaHandlerTest.java @@ -4,12 +4,12 @@ import static org.folio.EntityLinksKafkaTopic.INSTANCE_AUTHORITY; import static org.folio.EntityLinksKafkaTopic.LINKS_STATS; import static org.folio.RecordStorageKafkaTopic.MARC_BIB; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.LinkUpdateReport.Status.FAIL; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; import io.vertx.core.json.Json; -import io.vertx.core.json.JsonArray; import io.vertx.core.json.JsonObject; import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; @@ -34,7 +34,6 @@ import org.folio.TestUtil; import org.folio.dao.RecordDao; import org.folio.dao.RecordDaoImpl; -import org.folio.dao.util.ParsedRecordDaoUtil; import org.folio.dao.util.SnapshotDaoUtil; import org.folio.kafka.services.KafkaTopic; import org.folio.okapi.common.XOkapiHeaders; @@ -52,11 +51,13 @@ import org.folio.rest.jaxrs.model.Subfield; import org.folio.rest.jaxrs.model.SubfieldsChange; import org.folio.rest.jaxrs.model.UpdateTarget; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.junit.After; import org.junit.Before; -import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper; @RunWith(VertxUnitRunner.class) @@ -90,7 +91,8 @@ public class AuthorityLinkChunkKafkaHandlerTest extends AbstractLBServiceTest { ); private final RawRecord rawRecord = new RawRecord().withId(RECORD_ID) .withContent("test content"); - + @Mock + private RecordDomainEventPublisher recordDomainEventPublisher; private RecordDao recordDao; private RecordService recordService; private Record record; @@ -99,7 +101,8 @@ public class AuthorityLinkChunkKafkaHandlerTest extends AbstractLBServiceTest { @Before public void setUp(TestContext context) throws IOException { - recordDao = new RecordDaoImpl(postgresClientFactory); + MockitoAnnotations.openMocks(this); + recordDao = new RecordDaoImpl(postgresClientFactory, recordDomainEventPublisher); recordService = new RecordServiceImpl(recordDao); var async = context.async(); @@ -142,10 +145,11 @@ record = new Record() .withSnapshotId(snapshot.getJobExecutionId()) .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(ERROR_INSTANCE_ID)); + var okapiHeaders = Map.of(TENANT, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot) - .compose(savedSnapshot -> recordService.saveRecord(record, TENANT_ID)) - .compose(savedRecord -> recordService.saveRecord(secondRecord, TENANT_ID)) - .compose(savedRecord -> recordService.saveRecord(errorRecord, TENANT_ID)) + .compose(savedSnapshot -> recordService.saveRecord(record, okapiHeaders)) + .compose(savedRecord -> recordService.saveRecord(secondRecord, okapiHeaders)) + .compose(savedRecord -> recordService.saveRecord(errorRecord, okapiHeaders)) .onSuccess(ar -> async.complete()) .onFailure(context::fail); } diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityDeleteEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityDeleteEventHandlerTest.java index 03fcd78fe..bffdc9ada 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityDeleteEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityDeleteEventHandlerTest.java @@ -5,6 +5,7 @@ import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.util.Map; import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.dao.RecordDaoImpl; @@ -16,6 +17,7 @@ import org.folio.rest.jaxrs.model.RawRecord; import org.folio.rest.jaxrs.model.Record; import org.folio.rest.jaxrs.model.Snapshot; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.folio.services.handlers.actions.MarcAuthorityDeleteEventHandler; import org.junit.Assert; import org.junit.Before; @@ -27,9 +29,12 @@ import java.util.HashMap; import java.util.UUID; import java.util.concurrent.CompletableFuture; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import static org.folio.ActionProfile.Action.DELETE; import static org.folio.ActionProfile.Action.UPDATE; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_DELETED; import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_AUTHORITY; @@ -39,12 +44,17 @@ public class MarcAuthorityDeleteEventHandlerTest extends AbstractLBServiceTest { private static final String PARSED_CONTENT = "{\"leader\":\"01314nam 22003851a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"856\":{\"subfields\":[{\"u\":\"example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}"; - private final RecordService recordService = new RecordServiceImpl(new RecordDaoImpl(postgresClientFactory)); - private final EventHandler eventHandler = new MarcAuthorityDeleteEventHandler(recordService); + @Mock + private RecordDomainEventPublisher recordDomainEventPublisher; + private RecordService recordService; + private EventHandler eventHandler; private Record record; @Before public void before(TestContext testContext) throws IOException { + MockitoAnnotations.openMocks(this); + recordService = new RecordServiceImpl(new RecordDaoImpl(postgresClientFactory, recordDomainEventPublisher)); + eventHandler = new MarcAuthorityDeleteEventHandler(recordService); Snapshot snapshot = new Snapshot() .withJobExecutionId(UUID.randomUUID().toString()) .withProcessingStartedDate(new Date()) @@ -86,7 +96,8 @@ public void shouldDeleteRecord(TestContext context) { .withFolioRecord(ActionProfile.FolioRecord.MARC_AUTHORITY) ) ); - recordService.saveRecord(record, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordService.saveRecord(record, okapiHeaders) // when .onSuccess(ar -> eventHandler.handle(dataImportEventPayload) // then diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityMatchEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityMatchEventHandlerTest.java index 811d3d210..3198628a6 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityMatchEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityMatchEventHandlerTest.java @@ -7,6 +7,7 @@ import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.util.Map; import org.folio.DataImportEventPayload; import org.folio.MappingProfile; import org.folio.MatchDetail; @@ -25,6 +26,7 @@ import org.folio.rest.jaxrs.model.RawRecord; import org.folio.rest.jaxrs.model.Record; import org.folio.rest.jaxrs.model.Snapshot; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.folio.services.handlers.match.MarcAuthorityMatchEventHandler; import org.junit.After; import org.junit.Assert; @@ -32,6 +34,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mock; import org.mockito.MockitoAnnotations; import java.io.IOException; @@ -43,6 +46,7 @@ import static java.util.Collections.singletonList; import static org.folio.MatchDetail.MatchCriterion.EXACTLY_MATCHES; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_CREATED; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_MATCHED; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_NOT_MATCHED; @@ -58,6 +62,8 @@ public class MarcAuthorityMatchEventHandlerTest extends AbstractLBServiceTest { private static final String MATCHED_MARC_KEY = "MATCHED_MARC_AUTHORITY"; private static final String existingRecordId = "b90cb1bc-601f-45d7-b99e-b11efd281dcd"; private static String rawRecordContent; + @Mock + private RecordDomainEventPublisher recordDomainEventPublisher; private RecordDao recordDao; private Record existingRecord; private Record incomingRecord; @@ -72,7 +78,7 @@ public static void setUpClass() throws IOException { public void setUp(TestContext context) { MockitoAnnotations.initMocks(this); - recordDao = new RecordDaoImpl(postgresClientFactory); + recordDao = new RecordDaoImpl(postgresClientFactory, recordDomainEventPublisher); handler = new MarcAuthorityMatchEventHandler(recordDao, null, vertx); Async async = context.async(); @@ -169,7 +175,8 @@ public void shouldMatchBy999ffsField(TestContext context) { ))) )))); - recordDao.saveRecord(existingRecord, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) .whenComplete((updatedEventPayload, throwable) -> { @@ -218,7 +225,8 @@ public void shouldMatchBy001Field(TestContext context) { new Field().withLabel("recordSubfield").withValue("") ))))))); - recordDao.saveRecord(existingRecord, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) .whenComplete((updatedEventPayload, throwable) -> { @@ -268,7 +276,8 @@ public void shouldMatchBy010aField(TestContext context) { ))) )))); - recordDao.saveRecord(existingRecord, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) .whenComplete((updatedEventPayload, throwable) -> { @@ -315,7 +324,8 @@ public void shouldNotMatchBy999ffsField(TestContext context) { new Field().withLabel("indicator2").withValue(""), new Field().withLabel("recordSubfield").withValue("a")))))))); - recordDao.saveRecord(existingRecord, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) .whenComplete((updatedEventPayload, throwable) -> { @@ -360,7 +370,8 @@ public void shouldNotMatchBy001Field(TestContext context) { new Field().withLabel("indicator2").withValue(""), new Field().withLabel("recordSubfield").withValue("a")))))))); - recordDao.saveRecord(existingRecord, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(record -> handler.handle(dataImportEventPayload) .whenComplete((updatedEventPayload, throwable) -> { diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityUpdateModifyEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityUpdateModifyEventHandlerTest.java index 2452c0ec6..d9ec3939f 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityUpdateModifyEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityUpdateModifyEventHandlerTest.java @@ -3,6 +3,7 @@ import static com.github.tomakehurst.wiremock.client.WireMock.get; import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_UPDATED; import static org.folio.rest.jaxrs.model.EntityType.MARC_AUTHORITY; import static org.folio.rest.jaxrs.model.MappingDetail.MarcMappingOption.UPDATE; @@ -16,6 +17,7 @@ import java.util.Collections; import java.util.Date; import java.util.HashMap; +import java.util.Map; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; @@ -36,6 +38,7 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -66,6 +69,8 @@ import org.folio.rest.jaxrs.model.Snapshot; import org.folio.services.caches.MappingParametersSnapshotCache; import org.folio.services.handlers.actions.MarcAuthorityUpdateModifyEventHandler; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; @RunWith(VertxUnitRunner.class) public class MarcAuthorityUpdateModifyEventHandlerTest extends AbstractLBServiceTest { @@ -73,13 +78,13 @@ public class MarcAuthorityUpdateModifyEventHandlerTest extends AbstractLBService private static final String PARSED_CONTENT = "{\"leader\":\"01314nam 22003851a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"856\":{\"subfields\":[{\"u\":\"example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}"; private static final String MAPPING_METADATA__URL = "/mapping-metadata"; private static final String MATCHED_MARC_BIB_KEY = "MATCHED_MARC_AUTHORITY"; - private static final String USER_ID_HEADER = "userId"; private static String recordId = "eae222e8-70fd-4422-852c-60d22bae36b8"; - private static String userId = UUID.randomUUID().toString(); private static RawRecord rawRecord; private static ParsedRecord parsedRecord; + @Mock + private RecordDomainEventPublisher recordDomainEventPublisher; private RecordDao recordDao; private RecordService recordService; private MarcAuthorityUpdateModifyEventHandler modifyRecordEventHandler; @@ -153,11 +158,12 @@ public static void setUpClass() throws IOException { @Before public void setUp(TestContext context) { + MockitoAnnotations.openMocks(this); WireMock.stubFor(get(new UrlPathPattern(new RegexPattern(MAPPING_METADATA__URL + "/.*"), true)) .willReturn(WireMock.ok().withBody(Json.encode(new MappingMetadataDto() .withMappingParams(Json.encode(new MappingParameters())))))); - recordDao = new RecordDaoImpl(postgresClientFactory); + recordDao = new RecordDaoImpl(postgresClientFactory, recordDomainEventPublisher); recordService = new RecordServiceImpl(recordDao); modifyRecordEventHandler = new MarcAuthorityUpdateModifyEventHandler(recordService, null, new MappingParametersSnapshotCache(vertx), vertx); @@ -180,8 +186,9 @@ record = new Record() .withParsedRecord(parsedRecord); ReactiveClassicGenericQueryExecutor queryExecutor = postgresClientFactory.getQueryExecutor(TENANT_ID); + var okapiHeaders = Map.of(TENANT, TENANT_ID); SnapshotDaoUtil.save(queryExecutor, snapshot) - .compose(v -> recordService.saveRecord(record, TENANT_ID)) + .compose(v -> recordService.saveRecord(record, okapiHeaders)) .compose(v -> SnapshotDaoUtil.save(queryExecutor, snapshotForRecordUpdate)) .onComplete(context.asyncAssertSuccess()); } diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibUpdateModifyEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibUpdateModifyEventHandlerTest.java index a0c57ce07..5c3a2c126 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibUpdateModifyEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibUpdateModifyEventHandlerTest.java @@ -11,6 +11,7 @@ import static org.apache.commons.lang3.RandomUtils.nextInt; import static org.folio.ActionProfile.Action.MODIFY; import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_UPDATED; import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC; import static org.folio.rest.jaxrs.model.MappingDetail.MarcMappingOption.UPDATE; @@ -41,6 +42,7 @@ import java.util.Date; import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; @@ -80,6 +82,7 @@ import org.folio.rest.jaxrs.model.TenantJob; import org.folio.services.caches.LinkingRulesCache; import org.folio.services.caches.MappingParametersSnapshotCache; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.folio.services.exceptions.DuplicateRecordException; import org.folio.services.handlers.actions.MarcBibUpdateModifyEventHandler; import org.junit.After; @@ -89,6 +92,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; @RunWith(VertxUnitRunner.class) public class MarcBibUpdateModifyEventHandlerTest extends AbstractLBServiceTest { @@ -115,6 +120,8 @@ public class MarcBibUpdateModifyEventHandlerTest extends AbstractLBServiceTest { @Rule public RunTestOnContext rule = new RunTestOnContext(); + @Mock + private RecordDomainEventPublisher recordDomainEventPublisher; private RecordDao recordDao; private SnapshotDao snapshotDao; private RecordService recordService; @@ -231,11 +238,12 @@ public static void setUpBeforeClass(TestContext context) throws IOException { @Before public void setUp(TestContext context) { + MockitoAnnotations.openMocks(this); wireMockServer.stubFor(get(new UrlPathPattern(new RegexPattern(MAPPING_METADATA__URL + "/.*"), true)) .willReturn(WireMock.ok().withBody(Json.encode(new MappingMetadataDto() .withMappingParams(Json.encode(new MappingParameters())))))); - recordDao = new RecordDaoImpl(postgresClientFactory); + recordDao = new RecordDaoImpl(postgresClientFactory, recordDomainEventPublisher); snapshotDao = new SnapshotDaoImpl(postgresClientFactory); recordService = new RecordServiceImpl(recordDao); snapshotService = new SnapshotServiceImpl(snapshotDao); @@ -285,10 +293,10 @@ record = new Record() ReactiveClassicGenericQueryExecutor queryExecutorCentralTenant = postgresClientFactory.getQueryExecutor(CENTRAL_TENANT_ID); SnapshotDaoUtil.save(queryExecutorLocalTenant, snapshot) - .compose(v -> recordService.saveRecord(record, TENANT_ID)) + .compose(v -> recordService.saveRecord(record, Map.of(TENANT, TENANT_ID))) .compose(v -> SnapshotDaoUtil.save(queryExecutorLocalTenant, snapshotForRecordUpdate)) .compose(v -> SnapshotDaoUtil.save(queryExecutorCentralTenant, snapshot_2)) - .compose(v -> recordService.saveRecord(record_2, CENTRAL_TENANT_ID)) + .compose(v -> recordService.saveRecord(record_2, Map.of(TENANT, CENTRAL_TENANT_ID))) .onComplete(context.asyncAssertSuccess()); } @@ -705,8 +713,9 @@ public void shouldNotUpdateBibFieldWhen500ErrorGetEntityLinkRequest(TestContext .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(instanceId)) .withMetadata(new Metadata()); + var okapiHeaders = Map.of(TENANT, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), secondSnapshot) - .compose(v -> recordService.saveRecord(secondRecord, TENANT_ID)) + .compose(v -> recordService.saveRecord(secondRecord, okapiHeaders)) .compose(v -> SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshotForRecordUpdate)) .onComplete(context.asyncAssertSuccess()) .onSuccess(result -> { @@ -882,8 +891,9 @@ private void verifyBibRecordUpdate(String incomingParsedContent, String expected .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(instanceId)) .withMetadata(new Metadata()); + var okapiHeaders = Map.of(TENANT, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), secondSnapshot) - .compose(v -> recordService.saveRecord(secondRecord, TENANT_ID)) + .compose(v -> recordService.saveRecord(secondRecord, okapiHeaders)) .compose(v -> SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshotForRecordUpdate)) .onComplete(context.asyncAssertSuccess()) .onSuccess(result -> { diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsMatchEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsMatchEventHandlerTest.java index 596ec23b1..b319404bf 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsMatchEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsMatchEventHandlerTest.java @@ -3,6 +3,7 @@ import static java.util.Collections.singletonList; import static org.folio.MatchDetail.MatchCriterion.EXACTLY_MATCHES; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_MATCHED; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_NOT_MATCHED; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDING_RECORD_CREATED; @@ -16,6 +17,7 @@ import java.util.Date; import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.UUID; import com.fasterxml.jackson.databind.ObjectMapper; @@ -25,12 +27,14 @@ import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.folio.DataImportEventPayload; @@ -60,6 +64,8 @@ public class MarcHoldingsMatchEventHandlerTest extends AbstractLBServiceTest { private static final String MATCHED_MARC_KEY = "MATCHED_MARC_HOLDINGS"; private static final String existingRecordId = "b90cb1bc-601f-45d7-b99e-b11efd281dcd"; private static String rawRecordContent; + @Mock + private RecordDomainEventPublisher recordDomainEventPublisher; private RecordDao recordDao; private Record existingRecord; private Record incomingRecord; @@ -74,7 +80,7 @@ public static void setUpClass() throws IOException { public void setUp(TestContext context) { MockitoAnnotations.initMocks(this); - recordDao = new RecordDaoImpl(postgresClientFactory); + recordDao = new RecordDaoImpl(postgresClientFactory, recordDomainEventPublisher); handler = new MarcHoldingsMatchEventHandler(recordDao, null, vertx); Async async = context.async(); @@ -171,7 +177,8 @@ public void shouldMatchBy999ffsField(TestContext context) { ))) )))); - recordDao.saveRecord(existingRecord, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) .whenComplete((updatedEventPayload, throwable) -> { @@ -220,7 +227,8 @@ public void shouldMatchBy001Field(TestContext context) { new Field().withLabel("recordSubfield").withValue("") ))))))); - recordDao.saveRecord(existingRecord, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) .whenComplete((updatedEventPayload, throwable) -> { @@ -270,7 +278,8 @@ public void shouldMatchBy010aField(TestContext context) { ))) )))); - recordDao.saveRecord(existingRecord, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) .whenComplete((updatedEventPayload, throwable) -> { @@ -317,7 +326,8 @@ public void shouldNotMatchBy999ffsField(TestContext context) { new Field().withLabel("indicator2").withValue(""), new Field().withLabel("recordSubfield").withValue("a")))))))); - recordDao.saveRecord(existingRecord, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) .whenComplete((updatedEventPayload, throwable) -> { @@ -362,7 +372,8 @@ public void shouldNotMatchBy001Field(TestContext context) { new Field().withLabel("indicator2").withValue(""), new Field().withLabel("recordSubfield").withValue("a")))))))); - recordDao.saveRecord(existingRecord, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(record -> handler.handle(dataImportEventPayload) .whenComplete((updatedEventPayload, throwable) -> { diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsUpdateModifyEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsUpdateModifyEventHandlerTest.java index 9e0bee052..defffe2bc 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsUpdateModifyEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsUpdateModifyEventHandlerTest.java @@ -3,6 +3,7 @@ import static com.github.tomakehurst.wiremock.client.WireMock.get; import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_MODIFIED_READY_FOR_POST_PROCESSING; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_UPDATED; import static org.folio.rest.jaxrs.model.EntityType.MARC_HOLDINGS; @@ -17,6 +18,7 @@ import java.util.Collections; import java.util.Date; import java.util.HashMap; +import java.util.Map; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; @@ -37,6 +39,7 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -67,6 +70,8 @@ import org.folio.rest.jaxrs.model.Snapshot; import org.folio.services.caches.MappingParametersSnapshotCache; import org.folio.services.handlers.actions.MarcHoldingsUpdateModifyEventHandler; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; @RunWith(VertxUnitRunner.class) public class MarcHoldingsUpdateModifyEventHandlerTest extends AbstractLBServiceTest { @@ -74,13 +79,13 @@ public class MarcHoldingsUpdateModifyEventHandlerTest extends AbstractLBServiceT private static final String PARSED_CONTENT = "{\"leader\":\"01314nam 22003851a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"856\":{\"subfields\":[{\"u\":\"example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}"; private static final String MAPPING_METADATA__URL = "/mapping-metadata"; private static final String MATCHED_MARC_BIB_KEY = "MATCHED_MARC_HOLDINGS"; - private static final String USER_ID_HEADER = "userId"; private static String recordId = "eae222e8-70fd-4422-852c-60d22bae36b8"; - private static String userId = UUID.randomUUID().toString(); private static RawRecord rawRecord; private static ParsedRecord parsedRecord; + @Mock + private RecordDomainEventPublisher recordDomainEventPublisher; private RecordDao recordDao; private RecordService recordService; private MarcHoldingsUpdateModifyEventHandler modifyRecordEventHandler; @@ -154,11 +159,12 @@ public static void setUpClass() throws IOException { @Before public void setUp(TestContext context) { + MockitoAnnotations.openMocks(this); WireMock.stubFor(get(new UrlPathPattern(new RegexPattern(MAPPING_METADATA__URL + "/.*"), true)) .willReturn(WireMock.ok().withBody(Json.encode(new MappingMetadataDto() .withMappingParams(Json.encode(new MappingParameters())))))); - recordDao = new RecordDaoImpl(postgresClientFactory); + recordDao = new RecordDaoImpl(postgresClientFactory, recordDomainEventPublisher); recordService = new RecordServiceImpl(recordDao); modifyRecordEventHandler = new MarcHoldingsUpdateModifyEventHandler(recordService, null, new MappingParametersSnapshotCache(vertx), vertx); @@ -181,8 +187,9 @@ record = new Record() .withParsedRecord(parsedRecord); ReactiveClassicGenericQueryExecutor queryExecutor = postgresClientFactory.getQueryExecutor(TENANT_ID); + var okapiHeaders = Map.of(TENANT, TENANT_ID); SnapshotDaoUtil.save(queryExecutor, snapshot) - .compose(v -> recordService.saveRecord(record, TENANT_ID)) + .compose(v -> recordService.saveRecord(record, okapiHeaders)) .compose(v -> SnapshotDaoUtil.save(queryExecutor, snapshotForRecordUpdate)) .onComplete(context.asyncAssertSuccess()); } diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/QuickMarcKafkaHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/QuickMarcKafkaHandlerTest.java index 7ba9d917d..8726b2bf8 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/QuickMarcKafkaHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/QuickMarcKafkaHandlerTest.java @@ -3,10 +3,10 @@ import com.fasterxml.jackson.databind.ObjectMapper; import io.vertx.core.Future; import io.vertx.core.json.Json; -import io.vertx.core.json.JsonObject; import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.util.Map; import net.mguenther.kafka.junit.KeyValue; import net.mguenther.kafka.junit.ObserveKeyValues; import net.mguenther.kafka.junit.SendKeyValues; @@ -26,11 +26,13 @@ import org.folio.rest.jooq.Tables; import org.folio.rest.jooq.enums.RecordState; import org.folio.rest.util.OkapiConnectionParams; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mock; import org.mockito.MockitoAnnotations; import java.io.IOException; @@ -46,6 +48,7 @@ import static org.folio.dao.util.QMEventTypes.QM_SRS_MARC_RECORD_UPDATED; import static org.folio.kafka.KafkaTopicNameHelper.formatTopicName; import static org.folio.kafka.KafkaTopicNameHelper.getDefaultNameSpace; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; @RunWith(VertxUnitRunner.class) @@ -60,6 +63,8 @@ public class QuickMarcKafkaHandlerTest extends AbstractLBServiceTest { private static RawRecord rawRecord; private static ParsedRecord parsedRecord; + @Mock + private RecordDomainEventPublisher recordDomainEventPublisher; private RecordDao recordDao; private RecordService recordService; private Record record; @@ -76,7 +81,7 @@ public static void setUpClass() throws IOException { @Before public void setUp(TestContext context) { MockitoAnnotations.initMocks(this); - recordDao = new RecordDaoImpl(postgresClientFactory); + recordDao = new RecordDaoImpl(postgresClientFactory, recordDomainEventPublisher); recordService = new RecordServiceImpl(recordDao); Async async = context.async(); Snapshot snapshot = new Snapshot() @@ -91,8 +96,9 @@ record = new Record() .withRecordType(MARC_BIB) .withRawRecord(rawRecord) .withParsedRecord(parsedRecord); + var okapiHeaders = Map.of(TENANT, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot) - .compose(savedSnapshot -> recordService.saveRecord(record, TENANT_ID)) + .compose(savedSnapshot -> recordService.saveRecord(record, okapiHeaders)) .onSuccess(ar -> async.complete()) .onFailure(context::fail); } @@ -114,7 +120,8 @@ public void shouldUpdateParsedRecordAndSendRecordUpdatedEvent(TestContext contex ParsedRecord parsedRecord = record.getParsedRecord(); - Future future = recordService.saveRecord(record, TENANT_ID); + var okapiHeaders = Map.of(TENANT, TENANT_ID); + Future future = recordService.saveRecord(record, okapiHeaders); ParsedRecordDto parsedRecordDto = new ParsedRecordDto() .withId(record.getMatchedId()) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java index 6de404c4b..c5c116c8f 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java @@ -1,5 +1,14 @@ package org.folio.services; +import static java.util.Comparator.comparing; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; +import static org.folio.rest.jooq.Tables.RECORDS_LB; +import static org.folio.services.RecordServiceImpl.INDICATOR; +import static org.folio.services.RecordServiceImpl.SUBFIELD_S; +import static org.folio.services.util.AdditionalFieldsUtil.TAG_999; +import static org.folio.services.util.AdditionalFieldsUtil.getFieldFromMarcRecord; +import static org.junit.Assert.assertThrows; + import com.fasterxml.jackson.databind.ObjectMapper; import io.reactivex.Flowable; import io.vertx.core.AsyncResult; @@ -11,6 +20,19 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.io.IOException; +import java.time.OffsetDateTime; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.UUID; +import java.util.stream.Collectors; +import javax.ws.rs.BadRequestException; +import javax.ws.rs.NotFoundException; import org.folio.TestMocks; import org.folio.TestUtil; import org.folio.dao.RecordDao; @@ -40,6 +62,7 @@ import org.folio.rest.jaxrs.model.SourceRecordCollection; import org.folio.rest.jaxrs.model.StrippedParsedRecord; import org.folio.rest.jooq.enums.RecordState; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.jooq.Condition; import org.jooq.OrderField; import org.jooq.SortOrder; @@ -49,27 +72,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; - -import javax.ws.rs.BadRequestException; -import javax.ws.rs.NotFoundException; -import java.io.IOException; -import java.time.OffsetDateTime; -import java.time.temporal.ChronoUnit; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Objects; -import java.util.UUID; -import java.util.stream.Collectors; - -import static java.util.Comparator.comparing; -import static org.folio.rest.jooq.Tables.RECORDS_LB; -import static org.folio.services.RecordServiceImpl.INDICATOR; -import static org.folio.services.RecordServiceImpl.SUBFIELD_S; -import static org.folio.services.util.AdditionalFieldsUtil.TAG_999; -import static org.folio.services.util.AdditionalFieldsUtil.getFieldFromMarcRecord; -import static org.junit.Assert.assertThrows; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; @RunWith(VertxUnitRunner.class) public class RecordServiceTest extends AbstractLBServiceTest { @@ -78,6 +82,8 @@ public class RecordServiceTest extends AbstractLBServiceTest { private static final String MARC_AUTHORITY_RECORD_SNAPSHOT_ID = "ee561342-3098-47a8-ab6e-0f3eba120b04"; @Rule public RunTestOnContext rule = new RunTestOnContext(); + @Mock + private RecordDomainEventPublisher recordDomainEventPublisher; private RecordDao recordDao; private RecordService recordService; @@ -87,12 +93,12 @@ public class RecordServiceTest extends AbstractLBServiceTest { @Before public void setUp(TestContext context) throws IOException { + MockitoAnnotations.openMocks(this); rawRecord = new RawRecord() .withContent(new ObjectMapper().readValue(TestUtil.readFileFromPath(RAW_MARC_RECORD_CONTENT_SAMPLE_PATH), String.class)); marcRecord = new ParsedRecord() .withContent(TestUtil.readFileFromPath(PARSED_MARC_RECORD_CONTENT_SAMPLE_PATH)); - - recordDao = new RecordDaoImpl(postgresClientFactory); + recordDao = new RecordDaoImpl(postgresClientFactory, recordDomainEventPublisher); recordService = new RecordServiceImpl(recordDao); Async async = context.async(); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), TestMocks.getSnapshots()).onComplete(save -> { @@ -364,8 +370,9 @@ public void shouldSaveMarcBibRecordWithMatchedIdFrom999field(TestContext context .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); + var okapiHeaders = Map.of(TENANT, TENANT_ID); - recordService.saveRecord(record, TENANT_ID).onComplete(save -> { + recordService.saveRecord(record, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); } @@ -409,8 +416,9 @@ public void shouldFailDuringUpdateRecordGenerationIfIncomingMatchedIdNotEqualToM .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); + var okapiHeaders = Map.of(TENANT, TENANT_ID); - recordService.updateRecordGeneration(matchedId, record, TENANT_ID).onComplete(save -> { + recordService.updateRecordGeneration(matchedId, record, okapiHeaders).onComplete(save -> { context.assertTrue(save.failed()); context.assertTrue(save.cause() instanceof BadRequestException); recordDao.getRecordByMatchedId(matchedId, TENANT_ID).onComplete(get -> { @@ -446,8 +454,9 @@ public void shouldFailDuringUpdateRecordGenerationIfRecordWithIdAsIncomingMatche .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); + var okapiHeaders = Map.of(TENANT, TENANT_ID); - recordService.updateRecordGeneration(matchedId, record, TENANT_ID).onComplete(save -> { + recordService.updateRecordGeneration(matchedId, record, okapiHeaders).onComplete(save -> { context.assertTrue(save.failed()); context.assertTrue(save.cause() instanceof NotFoundException); recordDao.getRecordByMatchedId(matchedId, TENANT_ID).onComplete(get -> { @@ -501,8 +510,9 @@ public void shouldFailUpdateRecordGenerationIfDuplicateError(TestContext context .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); + var okapiHeaders = Map.of(TENANT, TENANT_ID); - recordService.saveRecord(record1, TENANT_ID).onComplete(record1Saved -> { + recordService.saveRecord(record1, okapiHeaders).onComplete(record1Saved -> { if (record1Saved.failed()) { context.fail(record1Saved.cause()); } @@ -515,7 +525,7 @@ public void shouldFailUpdateRecordGenerationIfDuplicateError(TestContext context if (snapshotSaved.failed()) { context.fail(snapshotSaved.cause()); } - recordService.updateRecordGeneration(matchedId, recordToUpdateGeneration, TENANT_ID).onComplete(recordToUpdateGenerationSaved -> { + recordService.updateRecordGeneration(matchedId, recordToUpdateGeneration, okapiHeaders).onComplete(recordToUpdateGenerationSaved -> { context.assertTrue(recordToUpdateGenerationSaved.failed()); context.assertTrue(recordToUpdateGenerationSaved.cause() instanceof BadRequestException); async.complete(); @@ -564,8 +574,9 @@ public void shouldUpdateRecordGeneration(TestContext context) { .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); + var okapiHeaders = Map.of(TENANT, TENANT_ID); - recordService.saveRecord(record1, TENANT_ID).onComplete(record1Saved -> { + recordService.saveRecord(record1, okapiHeaders).onComplete(record1Saved -> { if (record1Saved.failed()) { context.fail(record1Saved.cause()); } @@ -578,7 +589,7 @@ public void shouldUpdateRecordGeneration(TestContext context) { if (snapshotSaved.failed()) { context.fail(snapshotSaved.cause()); } - recordService.updateRecordGeneration(matchedId, recordToUpdateGeneration, TENANT_ID).onComplete(recordToUpdateGenerationSaved -> { + recordService.updateRecordGeneration(matchedId, recordToUpdateGeneration, okapiHeaders).onComplete(recordToUpdateGenerationSaved -> { context.assertTrue(recordToUpdateGenerationSaved.succeeded()); context.assertEquals(recordToUpdateGenerationSaved.result().getMatchedId(), matchedId); context.assertEquals(recordToUpdateGenerationSaved.result().getGeneration(), 1); @@ -621,8 +632,9 @@ public void shouldUpdateRecordGenerationByMatchId(TestContext context) { .withMetadata(mock.getMetadata()); var async = context.async(); + var okapiHeaders = Map.of(TENANT, TENANT_ID); - recordService.saveRecord(recordToSave, TENANT_ID).onComplete(savedRecord -> { + recordService.saveRecord(recordToSave, okapiHeaders).onComplete(savedRecord -> { if (savedRecord.failed()) { context.fail(savedRecord.cause()); } @@ -661,7 +673,7 @@ public void shouldUpdateRecordGenerationByMatchId(TestContext context) { context.fail(snapshotSaved.cause()); } - recordService.updateRecordGeneration(matchedId, recordToUpdateGeneration, TENANT_ID).onComplete(recordToUpdateGenerationSaved -> { + recordService.updateRecordGeneration(matchedId, recordToUpdateGeneration, okapiHeaders).onComplete(recordToUpdateGenerationSaved -> { context.assertTrue(recordToUpdateGenerationSaved.succeeded()); context.assertEquals(recordToUpdateGenerationSaved.result().getMatchedId(), matchedId); context.assertEquals(recordToUpdateGenerationSaved.result().getGeneration(), 1); @@ -704,8 +716,9 @@ public void shouldSaveMarcBibRecordWithMatchedIdFromRecordId(TestContext context .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); + var okapiHeaders = Map.of(TENANT, TENANT_ID); - recordService.saveRecord(record, TENANT_ID).onComplete(save -> { + recordService.saveRecord(record, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); } @@ -730,8 +743,9 @@ public void shouldSaveMarcBibRecordWithMatchedIdFromRecordId(TestContext context public void shouldSaveEdifactRecordAndNotSet999Field(TestContext context) { Async async = context.async(); Record record = TestMocks.getRecords(Record.RecordType.EDIFACT); + var okapiHeaders = Map.of(TENANT, TENANT_ID); - recordService.saveRecord(record, TENANT_ID).onComplete(save -> { + recordService.saveRecord(record, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); } @@ -783,8 +797,9 @@ public void shouldSaveMarcBibRecordWithMatchedIdFromExistingSourceRecord(TestCon .withAdditionalInfo(original.getAdditionalInfo()) .withExternalIdsHolder(externalIdsHolder) .withMetadata(original.getMetadata()); + var okapiHeaders = Map.of(TENANT, TENANT_ID); - recordService.saveRecord(record1, TENANT_ID).onComplete(save -> { + recordService.saveRecord(record1, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); } @@ -801,7 +816,7 @@ public void shouldSaveMarcBibRecordWithMatchedIdFromExistingSourceRecord(TestCon context.assertEquals(recordId1, get.result().get().getMatchedId()); context.assertEquals(getFieldFromMarcRecord(get.result().get(), TAG_999, INDICATOR, INDICATOR, SUBFIELD_S), recordId1); - recordService.saveRecord(record2, TENANT_ID).onComplete(save2 -> { + recordService.saveRecord(record2, okapiHeaders).onComplete(save2 -> { if (save2.failed()) { context.fail(save2.cause()); } @@ -861,7 +876,8 @@ public void shouldFailToSaveRecord(TestContext context) { .withAdditionalInfo(valid.getAdditionalInfo()) .withExternalIdsHolder(valid.getExternalIdsHolder()) .withMetadata(valid.getMetadata()); - recordService.saveRecord(invalid, TENANT_ID).onComplete(save -> { + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordService.saveRecord(invalid, okapiHeaders).onComplete(save -> { context.assertTrue(save.failed()); String expected = "Invalid UUID string: " + fakeSnapshotId; context.assertTrue(save.cause().getMessage().contains(expected)); @@ -893,7 +909,8 @@ public void shouldSaveMarcBibRecordsWithExpectedErrors(TestContext context) { public void shouldUpdateMarcRecord(TestContext context) { Async async = context.async(); Record original = TestMocks.getRecord(0); - recordDao.saveRecord(original, TENANT_ID).onComplete(save -> { + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(original, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); } @@ -910,7 +927,7 @@ public void shouldUpdateMarcRecord(TestContext context) { .withAdditionalInfo(original.getAdditionalInfo()) .withExternalIdsHolder(original.getExternalIdsHolder()) .withMetadata(original.getMetadata()); - recordService.updateRecord(expected, TENANT_ID).onComplete(update -> { + recordService.updateRecord(expected, okapiHeaders).onComplete(update -> { if (update.failed()) { context.fail(update.cause()); } @@ -945,8 +962,9 @@ public void shouldUpdateRecordState(TestContext context) { .withAdditionalInfo(original.getAdditionalInfo()) .withExternalIdsHolder(original.getExternalIdsHolder()) .withMetadata(original.getMetadata()); + var okapiHeaders = Map.of(TENANT, TENANT_ID); - recordDao.saveRecord(original, TENANT_ID) + recordDao.saveRecord(original, okapiHeaders) .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, TENANT_ID)) .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, TENANT_ID)) .compose(ar -> recordService.updateRecordsState(original.getMatchedId(), RecordState.DRAFT, RecordType.MARC_BIB, TENANT_ID)) @@ -981,8 +999,9 @@ public void shouldUpdateMarcAuthorityRecordStateToDeleted(TestContext context) { .withAdditionalInfo(original.getAdditionalInfo()) .withExternalIdsHolder(original.getExternalIdsHolder()) .withMetadata(original.getMetadata()); + var okapiHeaders = Map.of(TENANT, TENANT_ID); - recordDao.saveRecord(original, TENANT_ID) + recordDao.saveRecord(original, okapiHeaders) .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, TENANT_ID)) .compose(ar -> recordService.updateRecordsState(original.getMatchedId(), RecordState.DELETED, RecordType.MARC_AUTHORITY, TENANT_ID)) .onComplete(update -> { @@ -1009,7 +1028,9 @@ public void shouldUpdateMarcAuthorityRecordStateToDeleted(TestContext context) { public void shouldUpdateEdifactRecord(TestContext context) { Async async = context.async(); Record original = TestMocks.getEdifactRecord(); - recordDao.saveRecord(original, TENANT_ID).onComplete(save -> { + var okapiHeaders = Map.of(TENANT, TENANT_ID); + + recordDao.saveRecord(original, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); } @@ -1026,7 +1047,7 @@ public void shouldUpdateEdifactRecord(TestContext context) { .withAdditionalInfo(original.getAdditionalInfo()) .withExternalIdsHolder(original.getExternalIdsHolder()) .withMetadata(original.getMetadata()); - recordService.updateRecord(expected, TENANT_ID).onComplete(update -> { + recordService.updateRecord(expected, okapiHeaders).onComplete(update -> { if (update.failed()) { context.fail(update.cause()); } @@ -1053,12 +1074,14 @@ public void shouldUpdateEdifactRecord(TestContext context) { public void shouldFailToUpdateRecord(TestContext context) { Async async = context.async(); Record record = TestMocks.getRecord(0); + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.getRecordById(record.getMatchedId(), TENANT_ID).onComplete(get -> { if (get.failed()) { context.fail(get.cause()); } context.assertFalse(get.result().isPresent()); - recordService.updateRecord(record, TENANT_ID).onComplete(update -> { + recordService.updateRecord(record, okapiHeaders).onComplete(update -> { context.assertTrue(update.failed()); String expected = String.format("Record with id '%s' was not found", record.getId()); context.assertEquals(expected, update.cause().getMessage()); @@ -1165,8 +1188,9 @@ public void shouldGetMarcBibSourceRecordByMatchedIdNotEqualToId(TestContext cont .withAdditionalInfo(expected.getAdditionalInfo()) .withExternalIdsHolder(expected.getExternalIdsHolder()) .withMetadata(expected.getMetadata()); + var okapiHeaders = Map.of(TENANT, TENANT_ID); - recordDao.saveRecord(expected, TENANT_ID) + recordDao.saveRecord(expected, okapiHeaders) .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, TENANT_ID)) .onComplete(update -> { if (update.failed()) { @@ -1253,7 +1277,9 @@ public void shouldGetFormattedMarcHoldingsRecord(TestContext context) { public void shouldGetFormattedEdifactRecord(TestContext context) { Async async = context.async(); Record expected = TestMocks.getEdifactRecord(); - recordDao.saveRecord(expected, TENANT_ID).onComplete(save -> { + var okapiHeaders = Map.of(TENANT, TENANT_ID); + + recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); } @@ -1274,7 +1300,9 @@ public void shouldGetFormattedDeletedRecord(TestContext context) { Async async = context.async(); Record expected = TestMocks.getMarcBibRecord(); expected.setState(State.DELETED); - recordDao.saveRecord(expected, TENANT_ID).onComplete(save -> { + var okapiHeaders = Map.of(TENANT, TENANT_ID); + + recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); } @@ -1319,7 +1347,9 @@ public void shouldDeleteMarcAuthorityRecordsBySnapshotId(TestContext context) { public void shouldUpdateSourceRecord(TestContext context) { Async async = context.async(); Record expected = TestMocks.getRecord(0); - recordDao.saveRecord(expected, TENANT_ID).onComplete(save -> { + var okapiHeaders = Map.of(TENANT, TENANT_ID); + + recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); } @@ -1384,8 +1414,9 @@ public void shouldThrowExceptionWhenSavedDuplicateRecord(TestContext context) { RecordCollection recordCollection = new RecordCollection() .withRecords(expected) .withTotalRecords(expected.size()); - List> futures = List.of(recordService.saveRecords(recordCollection, TENANT_ID), - recordService.saveRecords(recordCollection, TENANT_ID)); + var okapiHeaders = Map.of(TENANT, TENANT_ID); + List> futures = List.of(recordService.saveRecords(recordCollection, okapiHeaders), + recordService.saveRecords(recordCollection, okapiHeaders)); GenericCompositeFuture.all(futures).onComplete(ar -> { context.assertTrue(ar.failed()); @@ -1517,7 +1548,9 @@ private void streamRecordsBySnapshotId(TestContext context, String snapshotId, R private void getMarcRecordById(TestContext context, Record expected) { Async async = context.async(); - recordDao.saveRecord(expected, TENANT_ID).onComplete(save -> { + var okapiHeaders = Map.of(TENANT, TENANT_ID); + + recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); } @@ -1536,7 +1569,9 @@ private void getMarcRecordById(TestContext context, Record expected) { private void saveMarcRecord(TestContext context, Record expected, Record.RecordType marcBib) { Async async = context.async(); - recordService.saveRecord(expected, TENANT_ID).onComplete(save -> { + var okapiHeaders = Map.of(TENANT, TENANT_ID); + + recordService.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); } @@ -1560,7 +1595,9 @@ private void saveMarcRecord(TestContext context, Record expected, Record.RecordT private void saveMarcRecordWithGenerationGreaterThanZero(TestContext context, Record expected, Record.RecordType marcBib) { Async async = context.async(); expected.setGeneration(1); - recordService.saveRecord(expected, TENANT_ID).onComplete(save -> { + var okapiHeaders = Map.of(TENANT, TENANT_ID); + + recordService.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); } @@ -1591,7 +1628,8 @@ private void saveMarcRecords(TestContext context, Record.RecordType marcBib) { RecordCollection recordCollection = new RecordCollection() .withRecords(expected) .withTotalRecords(expected.size()); - recordService.saveRecords(recordCollection, TENANT_ID).onComplete(batch -> { + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordService.saveRecords(recordCollection, okapiHeaders).onComplete(batch -> { if (batch.failed()) { context.fail(batch.cause()); } @@ -1621,7 +1659,8 @@ private void saveMarcRecordsWithExpectedErrors(TestContext context, Record.Recor RecordCollection recordCollection = new RecordCollection() .withRecords(expected) .withTotalRecords(expected.size()); - recordService.saveRecords(recordCollection, TENANT_ID).onComplete(batch -> { + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordService.saveRecords(recordCollection, okapiHeaders).onComplete(batch -> { if (batch.failed()) { context.fail(batch.cause()); } @@ -1844,7 +1883,9 @@ private void getMarcSourceRecordsByListOfIdsThatAreDeleted(TestContext context, private void getMarcSourceRecordById(TestContext context, Record expected) { Async async = context.async(); - recordDao.saveRecord(expected, TENANT_ID).onComplete(save -> { + var okapiHeaders = Map.of(TENANT, TENANT_ID); + + recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); } @@ -1925,10 +1966,12 @@ private void updateParsedMarcRecords(TestContext context, Record.RecordType reco private void updateParsedMarcRecordsAndGetOnlyActualRecord(TestContext context, Record expected) { Async async = context.async(); - recordDao.saveRecord(expected, TENANT_ID).onComplete(save -> { + var okapiHeaders = Map.of(TENANT, TENANT_ID); + + recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { context.assertTrue(save.succeeded()); expected.setLeaderRecordStatus("a"); - recordService.updateRecord(expected, TENANT_ID) + recordService.updateRecord(expected, okapiHeaders) .compose(v -> recordService.getFormattedRecord(expected.getMatchedId(), IdType.RECORD, TENANT_ID)) .onComplete(get -> { context.assertTrue(get.succeeded()); @@ -1943,7 +1986,9 @@ private void updateParsedMarcRecordsAndGetOnlyActualRecord(TestContext context, private void getFormattedMarcRecord(TestContext context, Record expected) { Async async = context.async(); - recordDao.saveRecord(expected, TENANT_ID).onComplete(save -> { + var okapiHeaders = Map.of(TENANT, TENANT_ID); + + recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); } @@ -1963,7 +2008,9 @@ private void getFormattedMarcRecord(TestContext context, Record expected) { private void updateSuppressFromDiscoveryForMarcRecord(TestContext context, Record expected) { Async async = context.async(); - recordDao.saveRecord(expected, TENANT_ID).onComplete(save -> { + var okapiHeaders = Map.of(TENANT, TENANT_ID); + + recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); } @@ -2037,8 +2084,9 @@ private void deleteMarcRecordsBySnapshotId(TestContext context, String snapshotI } private CompositeFuture saveRecords(List records) { + var okapiHeaders = Map.of(TENANT, TENANT_ID); return GenericCompositeFuture.all(records.stream() - .map(record -> recordService.saveRecord(record, AbstractLBServiceTest.TENANT_ID)) + .map(record -> recordService.saveRecord(record, okapiHeaders)) .collect(Collectors.toList()) ); } diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AbstractPostProcessingEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AbstractPostProcessingEventHandlerTest.java index e4099ab67..bb394223e 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AbstractPostProcessingEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AbstractPostProcessingEventHandlerTest.java @@ -33,10 +33,12 @@ import org.folio.services.SnapshotService; import org.folio.services.SnapshotServiceImpl; import org.folio.services.caches.MappingParametersSnapshotCache; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; +import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.folio.DataImportEventPayload; @@ -65,6 +67,8 @@ public abstract class AbstractPostProcessingEventHandlerTest extends AbstractLBS private static ParsedRecord parsedRecord; protected final String snapshotId1 = UUID.randomUUID().toString(); protected final String snapshotId2 = UUID.randomUUID().toString(); + @Mock + private RecordDomainEventPublisher recordDomainEventPublisher; protected Record record; protected RecordDao recordDao; protected RecordService recordService; @@ -100,7 +104,7 @@ public void setUp(TestContext context) { .withMappingParams(Json.encode(new MappingParameters())))))); mappingParametersCache = new MappingParametersSnapshotCache(vertx); - recordDao = new RecordDaoImpl(postgresClientFactory); + recordDao = new RecordDaoImpl(postgresClientFactory, recordDomainEventPublisher); recordService = new RecordServiceImpl(recordDao); snapshotService = new SnapshotServiceImpl(snapshotDao); handler = createHandler(recordService, snapshotService, kafkaConfig); diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AuthorityPostProcessingEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AuthorityPostProcessingEventHandlerTest.java index 1f1b0510e..f2fe2dd79 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AuthorityPostProcessingEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AuthorityPostProcessingEventHandlerTest.java @@ -2,6 +2,7 @@ import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_AUTHORITY_CREATED_READY_FOR_POST_PROCESSING; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_AUTHORITY_UPDATED_READY_FOR_POST_PROCESSING; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_CREATED; @@ -15,6 +16,7 @@ import java.util.Date; import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.UUID; import java.util.concurrent.CompletableFuture; @@ -86,7 +88,8 @@ public void shouldSetAuthorityIdToRecord(TestContext context) { createDataImportEventPayload(payloadContext, DI_INVENTORY_AUTHORITY_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - recordDao.saveRecord(record, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) .thenApply(future::complete) @@ -135,7 +138,7 @@ public void shouldSetAuthorityIdToRecord(TestContext context) { createDataImportEventPayload(payloadContextForUpdate, DI_INVENTORY_AUTHORITY_UPDATED_READY_FOR_POST_PROCESSING); CompletableFuture future2 = new CompletableFuture<>(); - recordDao.saveRecord(recordForUpdate, TENANT_ID) + recordDao.saveRecord(recordForUpdate, okapiHeaders) .onFailure(future2::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayloadForUpdate) .thenApply(future2::complete) @@ -243,7 +246,8 @@ public void shouldSetAuthorityIdToParsedRecordWhenContentHasField999(TestContext createDataImportEventPayload(payloadContext, DI_INVENTORY_AUTHORITY_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - recordDao.saveRecord(record, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(rec -> handler.handle(dataImportEventPayload) .thenApply(future::complete) @@ -429,7 +433,8 @@ public void shouldReturnFailedFutureWhenParsedRecordHasNoFields(TestContext cont .withToken(TOKEN); CompletableFuture future = new CompletableFuture<>(); - recordDao.saveRecord(record, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) .thenApply(future::complete) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/HoldingsPostProcessingEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/HoldingsPostProcessingEventHandlerTest.java index bfa9a5be8..135149f41 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/HoldingsPostProcessingEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/HoldingsPostProcessingEventHandlerTest.java @@ -1,6 +1,7 @@ package org.folio.services.handlers; import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDING_RECORD_CREATED; import static org.folio.rest.jaxrs.model.EntityType.HOLDINGS; @@ -11,11 +12,9 @@ import static org.folio.services.util.AdditionalFieldsUtil.TAG_005; import java.io.IOException; -import java.time.Instant; -import java.time.ZoneId; -import java.time.ZonedDateTime; import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.UUID; import java.util.concurrent.CompletableFuture; @@ -84,7 +83,8 @@ public void shouldSetHoldingsIdToRecord(TestContext context) { createDataImportEventPayload(payloadContext, DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - recordDao.saveRecord(record, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) .thenApply(future::complete) @@ -197,7 +197,8 @@ public void shouldSetHoldingsIdToParsedRecordWhenContentHasField999(TestContext createDataImportEventPayload(payloadContext, DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - recordDao.saveRecord(record, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(rec -> handler.handle(dataImportEventPayload) .thenApply(future::complete) @@ -367,7 +368,8 @@ public void shouldSetHoldingsHridToParsedRecordWhenContentHasNotField001(TestCon createDataImportEventPayload(payloadContext, DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - recordDao.saveRecord(record, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(rec -> handler.handle(dataImportEventPayload) .thenApply(future::complete) @@ -439,7 +441,8 @@ public void shouldReturnFailedFutureWhenParsedRecordHasNoFields(TestContext cont .withToken(TOKEN); CompletableFuture future = new CompletableFuture<>(); - recordDao.saveRecord(record, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) .thenApply(future::complete) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java index 6625fe1a4..f202445fc 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java @@ -12,6 +12,7 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.util.Map; import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.MappingProfile; @@ -50,6 +51,7 @@ import static com.github.tomakehurst.wiremock.client.WireMock.get; import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_ORDER_CREATED_READY_FOR_POST_PROCESSING; @@ -112,7 +114,8 @@ public void shouldSetInstanceIdToRecord(TestContext context) { createDataImportEventPayload(payloadContext, DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - recordDao.saveRecord(record, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) .thenApply(future::complete) @@ -159,7 +162,7 @@ public void shouldSetInstanceIdToRecord(TestContext context) { createDataImportEventPayload(payloadContextForUpdate, DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING); CompletableFuture future2 = new CompletableFuture<>(); - recordDao.saveRecord(recordForUpdate, TENANT_ID) + recordDao.saveRecord(recordForUpdate, okapiHeaders) .onFailure(future2::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayloadForUpdate) .thenApply(future2::complete) @@ -204,7 +207,7 @@ public void shouldProceedIfConsortiumTrackExists(TestContext context) { doAnswer(invocationOnMock -> List.of(record)).when(recordCollection).getRecords(); - doAnswer(invocationOnMock -> Future.succeededFuture(record)).when(mockedRecordService).updateRecord(any(), anyString()); + doAnswer(invocationOnMock -> Future.succeededFuture(record)).when(mockedRecordService).updateRecord(any(), any()); InstancePostProcessingEventHandler handler = new InstancePostProcessingEventHandler(mockedRecordService, mockedSnapshotService, kafkaConfig, mappingParametersCache, vertx); @@ -415,7 +418,8 @@ public void shouldSaveIncomingRecordAndMarkExistingAsOldWhenIncomingRecordHasSam DataImportEventPayload dataImportEventPayload = createDataImportEventPayload(payloadContext, DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING); - Future future = recordDao.saveRecord(existingRecord, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + Future future = recordDao.saveRecord(existingRecord, okapiHeaders) .compose(v -> Future.fromCompletionStage(handler.handle(dataImportEventPayload))); future.onComplete(ar -> { @@ -468,7 +472,8 @@ public void checkGeneration035FiledAfterUpdateMarcBib(TestContext context) throw DataImportEventPayload dataImportEventPayload = createDataImportEventPayload(payloadContext, DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING); - Future future = recordDao.saveRecord(existingRecord, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + Future future = recordDao.saveRecord(existingRecord, okapiHeaders) .compose(v -> Future.fromCompletionStage(handler.handle(dataImportEventPayload))); future.onComplete(ar -> { @@ -511,7 +516,8 @@ public void shouldSetInstanceIdToParsedRecordWhenContentHasField999(TestContext createDataImportEventPayload(payloadContext, DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - recordDao.saveRecord(record, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(rec -> handler.handle(dataImportEventPayload) .thenApply(future::complete) @@ -682,7 +688,8 @@ public void shouldSetInstanceHridToParsedRecordWhenContentHasNotField001(TestCon createDataImportEventPayload(payloadContext, DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - recordDao.saveRecord(record, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(rec -> handler.handle(dataImportEventPayload) .thenApply(future::complete) @@ -754,7 +761,8 @@ public void shouldReturnFailedFutureWhenParsedRecordHasNoFields(TestContext cont .withToken(TOKEN); CompletableFuture future = new CompletableFuture<>(); - recordDao.saveRecord(record, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) .thenApply(future::complete) @@ -886,7 +894,8 @@ public void shouldFillEventPayloadWithPostProcessingFlagIfOrderEventExists(TestC dataImportEventPayload.getContext().put(POST_PROCESSING_RESULT_EVENT, DI_ORDER_CREATED_READY_FOR_POST_PROCESSING.value()); CompletableFuture future = new CompletableFuture<>(); - recordDao.saveRecord(record, TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) .thenApply(future::complete) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/verticle/MarcIndexersVersionDeletionVerticleTest.java b/mod-source-record-storage-server/src/test/java/org/folio/verticle/MarcIndexersVersionDeletionVerticleTest.java index 1df1c1edb..34c902477 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/verticle/MarcIndexersVersionDeletionVerticleTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/verticle/MarcIndexersVersionDeletionVerticleTest.java @@ -4,6 +4,7 @@ import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.util.Map; import org.folio.TestMocks; import org.folio.dao.RecordDao; import org.folio.dao.RecordDaoImpl; @@ -15,6 +16,7 @@ import org.folio.services.RecordServiceImpl; import org.folio.services.TenantDataProvider; import org.folio.services.TenantDataProviderImpl; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.jooq.Field; import org.jooq.Table; import org.junit.After; @@ -23,7 +25,10 @@ import org.junit.runner.RunWith; import java.util.UUID; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.Record.State.ACTUAL; import static org.folio.rest.jaxrs.model.Record.State.OLD; import static org.folio.rest.jooq.Tables.MARC_RECORDS_TRACKING; @@ -38,6 +43,8 @@ public class MarcIndexersVersionDeletionVerticleTest extends AbstractLBServiceTe private static final String MARC_ID_FIELD = "marc_id"; private static final String VERSION_FIELD = "version"; + @Mock + private RecordDomainEventPublisher recordDomainEventPublisher; private RecordDao recordDao; private TenantDataProvider tenantDataProvider; private RecordService recordService; @@ -46,8 +53,9 @@ public class MarcIndexersVersionDeletionVerticleTest extends AbstractLBServiceTe @Before public void setUp(TestContext context) { + MockitoAnnotations.openMocks(this); Async async = context.async(); - recordDao = new RecordDaoImpl(postgresClientFactory); + recordDao = new RecordDaoImpl(postgresClientFactory, recordDomainEventPublisher); tenantDataProvider = new TenantDataProviderImpl(vertx); recordService = new RecordServiceImpl(recordDao); marcIndexersVersionDeletionVerticle = new MarcIndexersVersionDeletionVerticle(recordDao, tenantDataProvider); @@ -65,8 +73,9 @@ public void setUp(TestContext context) { .withRawRecord(TestMocks.getRecord(0).getRawRecord().withId(recordId)) .withParsedRecord(TestMocks.getRecord(0).getParsedRecord().withId(recordId)); + var okapiHeaders = Map.of(TENANT, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot) - .compose(savedSnapshot -> recordService.saveRecord(record, TENANT_ID)) + .compose(savedSnapshot -> recordService.saveRecord(record, okapiHeaders)) .onComplete(save -> { if (save.failed()) { context.fail(save.cause()); @@ -90,9 +99,10 @@ public void cleanUp(TestContext context) { public void shouldDeleteOldVersionsOfMarcIndexers(TestContext context) { Async async = context.async(); + var okapiHeaders = Map.of(TENANT, TENANT_ID); // performs record update in the DB that leads to new indexers creation with incremented version // so that previous existing indexers become old and should be deleted - Future future = recordService.updateRecord(record, TENANT_ID) + Future future = recordService.updateRecord(record, okapiHeaders) .compose(v -> existOldMarcIndexersVersions()) .onSuccess(context::assertTrue) .compose(v -> marcIndexersVersionDeletionVerticle.deleteOldMarcIndexerVersions()) @@ -109,7 +119,8 @@ public void shouldDeleteOldVersionsOfMarcIndexers(TestContext context) { public void shouldDeleteMarcIndexersRelatedToRecordInOldState(TestContext context) { Async async = context.async(); - Future future = recordService.updateRecord(record.withState(OLD), TENANT_ID) + var okapiHeaders = Map.of(TENANT, TENANT_ID); + Future future = recordService.updateRecord(record.withState(OLD), okapiHeaders) .compose(v -> existMarcIndexersByRecordId(record.getId())) .onSuccess(context::assertTrue) .compose(v -> marcIndexersVersionDeletionVerticle.deleteOldMarcIndexerVersions()) @@ -149,6 +160,3 @@ private Future existMarcIndexersByRecordId(String recordId) { } } - - - diff --git a/mod-source-record-storage-server/src/test/java/org/folio/verticle/consumers/DataImportConsumersVerticleTest.java b/mod-source-record-storage-server/src/test/java/org/folio/verticle/consumers/DataImportConsumersVerticleTest.java index 53ffd9e32..c9f7556b0 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/verticle/consumers/DataImportConsumersVerticleTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/verticle/consumers/DataImportConsumersVerticleTest.java @@ -4,6 +4,7 @@ import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Collections.singletonList; import static org.folio.ActionProfile.Action.UPDATE; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING; import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC; @@ -27,6 +28,7 @@ import java.util.Date; import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.UUID; import java.util.concurrent.TimeUnit; @@ -45,6 +47,7 @@ import net.mguenther.kafka.junit.KeyValue; import net.mguenther.kafka.junit.ObserveKeyValues; import net.mguenther.kafka.junit.SendKeyValues; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -74,6 +77,8 @@ import org.folio.rest.jaxrs.model.Record; import org.folio.rest.jaxrs.model.Snapshot; import org.folio.services.AbstractLBServiceTest; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; @RunWith(VertxUnitRunner.class) public class DataImportConsumersVerticleTest extends AbstractLBServiceTest { @@ -99,6 +104,8 @@ public class DataImportConsumersVerticleTest extends AbstractLBServiceTest { .withSubaction(MarcSubfield.Subaction.INSERT) .withPosition(MarcSubfield.Position.BEFORE_STRING) .withData(new Data().withText("http://libproxy.smith.edu?url="))))); + @Mock + private RecordDomainEventPublisher recordDomainEventPublisher; @Rule public WireMockRule mockServer = new WireMockRule( @@ -110,6 +117,7 @@ public class DataImportConsumersVerticleTest extends AbstractLBServiceTest { @Before public void setUp(TestContext context) throws IOException { + MockitoAnnotations.openMocks(this); WireMock.stubFor(get(new UrlPathPattern(new RegexPattern(MAPPING_METADATA_URL + "/.*"), true)) .willReturn(WireMock.ok().withBody(Json.encode(new MappingMetadataDto() .withMappingParams(Json.encode(new MappingParameters())))))); @@ -140,10 +148,11 @@ record = new Record() .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())); ReactiveClassicGenericQueryExecutor queryExecutor = postgresClientFactory.getQueryExecutor(TENANT_ID); - RecordDaoImpl recordDao = new RecordDaoImpl(postgresClientFactory); + RecordDaoImpl recordDao = new RecordDaoImpl(postgresClientFactory, recordDomainEventPublisher); + var okapiHeaders = Map.of(TENANT, TENANT_ID); SnapshotDaoUtil.save(queryExecutor, snapshot) - .compose(v -> recordDao.saveRecord(record, TENANT_ID)) + .compose(v -> recordDao.saveRecord(record, okapiHeaders)) .compose(v -> SnapshotDaoUtil.save(queryExecutor, snapshotForRecordUpdate)) .onComplete(context.asyncAssertSuccess()); } From 39e8e3738832c194b188d04683a721efde1d85a9 Mon Sep 17 00:00:00 2001 From: pbobylev Date: Mon, 29 Jul 2024 16:50:37 +0500 Subject: [PATCH 3/8] MODSOURCE-752: Use RAML definition for event DTO --- .../RecordDomainEventPublisher.java | 39 ++++++-- .../services/util/EventHandlingUtil.java | 6 +- ramls/source-record-domain-event.json | 97 +++++++++++++++++++ ramls/source-record-storage-records.raml | 1 + 4 files changed, 134 insertions(+), 9 deletions(-) create mode 100644 ramls/source-record-domain-event.json diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java b/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java index cc1cf072b..adc483814 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java @@ -3,16 +3,23 @@ import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.okapi.common.XOkapiHeaders.TOKEN; import static org.folio.okapi.common.XOkapiHeaders.URL; +import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType.SOURCE_RECORD_CREATED; +import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType.SOURCE_RECORD_UPDATED; import static org.folio.services.util.EventHandlingUtil.sendEventToKafka; +import io.vertx.core.Future; import io.vertx.core.Vertx; import io.vertx.core.json.Json; import io.vertx.kafka.client.producer.KafkaHeader; import java.util.ArrayList; import java.util.List; import java.util.Map; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.folio.kafka.KafkaConfig; import org.folio.rest.jaxrs.model.Record; +import org.folio.rest.jaxrs.model.SourceRecordDomainEvent; +import org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -20,19 +27,31 @@ public class RecordDomainEventPublisher { public static final String RECORD_DOMAIN_TOPIC = "srs.source_records"; - public static final String SOURCE_RECORD_CREATED = "SOURCE_RECORD_CREATED"; - public static final String SOURCE_RECORD_UPDATED = "SOURCE_RECORD_UPDATED"; private static final String RECORD_TYPE = "folio.srs.recordType"; + private static final Logger LOG = LogManager.getLogger(); @Autowired private KafkaConfig kafkaConfig; public void publishRecordCreated(Record created, Map okapiHeaders) { + publishRecord(created, okapiHeaders, SOURCE_RECORD_CREATED); + } + + public void publishRecordUpdated(Record updated, Map okapiHeaders) { + publishRecord(updated, okapiHeaders, SOURCE_RECORD_UPDATED); + } + + private void publishRecord(Record aRecord, Map okapiHeaders, EventType eventType) { Vertx.vertx().executeBlocking(() -> { - var kafkaHeaders = getKafkaHeaders(okapiHeaders, created.getRecordType()); - var key = created.getId(); - return sendEventToKafka(okapiHeaders.get(TENANT), Json.encode(created), SOURCE_RECORD_CREATED, kafkaHeaders, - kafkaConfig, key); + try { + var kafkaHeaders = getKafkaHeaders(okapiHeaders, aRecord.getRecordType()); + var key = aRecord.getId(); + return sendEventToKafka(okapiHeaders.get(TENANT), getEvent(aRecord, eventType), + eventType.value(), kafkaHeaders, kafkaConfig, key); + } catch (Exception e) { + LOG.error("Exception during Record domain event sending", e); + return Future.failedFuture(e); + } }); } @@ -45,4 +64,12 @@ private List getKafkaHeaders(Map okapiHeaders, Reco ); } + private String getEvent(Record eventRecord, EventType type) { + var event = new SourceRecordDomainEvent() + .withId(eventRecord.getId()) + .withEventType(type) + .withEventPayload((String) eventRecord.getParsedRecord().getContent()); + return Json.encode(event); + } + } diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java b/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java index 69e0bd0e4..3008bf1eb 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java @@ -1,12 +1,12 @@ package org.folio.services.util; +import static java.util.Arrays.stream; import static java.util.Objects.nonNull; import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.okapi.common.XOkapiHeaders.TOKEN; import static org.folio.okapi.common.XOkapiHeaders.URL; +import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType; import static org.folio.services.domainevent.RecordDomainEventPublisher.RECORD_DOMAIN_TOPIC; -import static org.folio.services.domainevent.RecordDomainEventPublisher.SOURCE_RECORD_CREATED; -import static org.folio.services.domainevent.RecordDomainEventPublisher.SOURCE_RECORD_UPDATED; import static org.folio.services.util.KafkaUtil.extractHeaderValue; import io.vertx.core.Future; @@ -107,7 +107,7 @@ public static String constructModuleName() { } public static String createTopicName(String eventType, String tenantId, KafkaConfig kafkaConfig) { - if (SOURCE_RECORD_CREATED.equals(eventType) || SOURCE_RECORD_UPDATED.equals(eventType)) { + if (stream(EventType.values()).anyMatch(et -> et.value().equals(eventType))) { return KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), tenantId, RECORD_DOMAIN_TOPIC); } return KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), KafkaTopicNameHelper.getDefaultNameSpace(), diff --git a/ramls/source-record-domain-event.json b/ramls/source-record-domain-event.json new file mode 100644 index 000000000..24fa5f1e4 --- /dev/null +++ b/ramls/source-record-domain-event.json @@ -0,0 +1,97 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "Source record domain event data model", + "javaType": "org.folio.rest.jaxrs.model.SourceRecordDomainEvent", + "type": "object", + "additionalProperties": false, + "properties": { + "id": { + "description": "UUID", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "eventType": { + "type": "string", + "enum": ["SOURCE_RECORD_CREATED", "SOURCE_RECORD_UPDATED"], + "description": "Source record domain event type" + }, + "sourceRecordDomainEventMetadata": { + "description": "Event metadata", + "type": "object", + "additionalProperties": false, + "properties": { + "eventTTL": { + "description": "Time-to-live (TTL) for event in minutes", + "type": "integer" + }, + "correlationId": { + "description": "Id to track related events, can be a meaningful string or a UUID", + "type": "string" + }, + "originalEventId": { + "description": "Id of the event that started the sequence of related events", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "publisherCallback": { + "description": "Allows a publisher to provide a callback endpoint or an error Event Type to be notified that despite the fact that there are subscribers for such an event type no one has received the event within the specified period of time", + "type": "object", + "properties": { + "endpoint": { + "description": "Callback endpoint", + "type": "string" + }, + "eventType": { + "description": "Error Event Type", + "type": "string" + } + } + }, + "createdDate": { + "description": "Timestamp when event was created", + "type": "string", + "format": "date-time" + }, + "publishedDate": { + "description": "Timestamp when event was initially published to the underlying topic", + "type": "string", + "format": "date-time" + }, + "createdBy": { + "description": "Username of the user whose action caused an event", + "type": "string" + }, + "publishedBy": { + "description": "Name and version of the module that published an event", + "type": "string" + } + }, + "required": [ + "eventTTL", + "publishedBy" + ] + }, + "eventPayload": { + "type": "string", + "description": "The source record JSON string" + }, + "tenant": { + "description": "Tenant id", + "type": "string" + }, + "ts": { + "description": "Message timestamp", + "type": "string", + "format": "date-time" + } + }, + "excludedFromEqualsAndHashCode": [ + "eventMetadata", + "tenant", + "ts" + ], + "required": [ + "id", + "eventType" + ] +} diff --git a/ramls/source-record-storage-records.raml b/ramls/source-record-storage-records.raml index 1d7a99083..964125f86 100644 --- a/ramls/source-record-storage-records.raml +++ b/ramls/source-record-storage-records.raml @@ -29,6 +29,7 @@ types: linkUpdateReport: !include raml-storage/schemas/mod-source-record-storage/linkUpdateReport.json recordMatchingDto: !include raml-storage/schemas/dto/recordMatchingRqDto.json recordsIdentifiersCollection: !include raml-storage/schemas/dto/recordsIdentifiersCollection.json + sourceRecordDomainEvent: !include source-record-domain-event.json traits: validate: !include raml-storage/raml-util/traits/validation.raml From 15a07eec78c79407aea5caa6d648878449519627 Mon Sep 17 00:00:00 2001 From: pbobylev Date: Mon, 29 Jul 2024 16:58:04 +0500 Subject: [PATCH 4/8] MODSOURCE-752: add config param to enable/disable domain events --- .../folio/services/domainevent/RecordDomainEventPublisher.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java b/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java index adc483814..3303f9bff 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java @@ -21,6 +21,7 @@ import org.folio.rest.jaxrs.model.SourceRecordDomainEvent; import org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; @Component @@ -29,6 +30,8 @@ public class RecordDomainEventPublisher { public static final String RECORD_DOMAIN_TOPIC = "srs.source_records"; private static final String RECORD_TYPE = "folio.srs.recordType"; private static final Logger LOG = LogManager.getLogger(); + @Value("${ENABLE_DOMAIN_EVENTS:true}") + private boolean enableDomainEvents; @Autowired private KafkaConfig kafkaConfig; From ab9cacecf91355cfee8fcba3b2fd145d83c130ac Mon Sep 17 00:00:00 2001 From: pbobylev Date: Tue, 30 Jul 2024 15:00:53 +0500 Subject: [PATCH 5/8] MODSOURCE-752: add domain event sending for Update record methods --- .../consumers/QuickMarcKafkaHandler.java | 31 +++++++++---------- .../main/java/org/folio/dao/RecordDao.java | 15 +++++---- .../java/org/folio/dao/RecordDaoImpl.java | 13 ++++---- .../org/folio/services/RecordService.java | 4 +-- .../org/folio/services/RecordServiceImpl.java | 9 +++--- .../org/folio/services/RecordServiceTest.java | 10 +++--- 6 files changed, 41 insertions(+), 41 deletions(-) diff --git a/mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java index 0e423b6f4..a54182612 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java @@ -2,15 +2,10 @@ import static org.folio.dao.util.QMEventTypes.QM_ERROR; import static org.folio.dao.util.QMEventTypes.QM_SRS_MARC_RECORD_UPDATED; -import static org.folio.kafka.KafkaHeaderUtils.kafkaHeadersToMap; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.services.util.EventHandlingUtil.createProducer; import static org.folio.services.util.EventHandlingUtil.createProducerRecord; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.atomic.AtomicInteger; +import static org.folio.services.util.EventHandlingUtil.toOkapiHeaders; import io.vertx.core.Future; import io.vertx.core.Promise; @@ -19,19 +14,22 @@ import io.vertx.kafka.client.consumer.KafkaConsumerRecord; import io.vertx.kafka.client.producer.KafkaHeader; import io.vertx.kafka.client.producer.KafkaProducer; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.lang.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.folio.services.RecordService; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.stereotype.Component; - import org.folio.dao.util.QMEventTypes; import org.folio.kafka.AsyncRecordHandler; import org.folio.kafka.KafkaConfig; import org.folio.rest.jaxrs.model.Event; import org.folio.rest.jaxrs.model.ParsedRecordDto; -import org.folio.rest.util.OkapiConnectionParams; +import org.folio.services.RecordService; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; @Component public class QuickMarcKafkaHandler implements AsyncRecordHandler { @@ -67,22 +65,23 @@ public Future handle(KafkaConsumerRecord consumerRecord) log.trace("handle:: Handling kafka consumerRecord {}", consumerRecord); var kafkaHeaders = consumerRecord.headers(); - var params = new OkapiConnectionParams(kafkaHeadersToMap(kafkaHeaders), vertx); + var okapiHeaders = toOkapiHeaders(kafkaHeaders, null); return getEventPayload(consumerRecord) .compose(eventPayload -> { String snapshotId = eventPayload.getOrDefault(SNAPSHOT_ID_KEY, UUID.randomUUID().toString()); + var tenantId = okapiHeaders.get(TENANT); return getRecordDto(eventPayload) - .compose(recordDto -> recordService.updateSourceRecord(recordDto, snapshotId, params.getTenantId())) + .compose(recordDto -> recordService.updateSourceRecord(recordDto, snapshotId, okapiHeaders)) .compose(updatedRecord -> { eventPayload.put(updatedRecord.getRecordType().value(), Json.encode(updatedRecord)); - return sendEvent(eventPayload, QM_SRS_MARC_RECORD_UPDATED, params.getTenantId(), kafkaHeaders) + return sendEvent(eventPayload, QM_SRS_MARC_RECORD_UPDATED, tenantId, kafkaHeaders) .map(aBoolean -> consumerRecord.key()); }) .recover(th -> { log.warn("handle:: Failed to handle QM_RECORD_UPDATED event", th); eventPayload.put(ERROR_KEY, th.getMessage()); - return sendEvent(eventPayload, QM_ERROR, params.getTenantId(), kafkaHeaders) + return sendEvent(eventPayload, QM_ERROR, tenantId, kafkaHeaders) .map(aBoolean -> th.getMessage()); }); }) diff --git a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDao.java b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDao.java index d6679aff0..d784997fc 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDao.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDao.java @@ -1,14 +1,17 @@ package org.folio.dao; +import io.github.jklingsporn.vertx.jooq.classic.reactivepg.ReactiveClassicGenericQueryExecutor; +import io.reactivex.Flowable; +import io.vertx.core.Future; +import io.vertx.sqlclient.Row; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Function; - -import io.vertx.sqlclient.Row; import net.sf.jsqlparser.JSQLParserException; import org.folio.dao.util.IdType; +import org.folio.dao.util.MatchField; import org.folio.dao.util.RecordType; import org.folio.rest.jaxrs.model.MarcBibCollection; import org.folio.rest.jaxrs.model.ParsedRecord; @@ -22,17 +25,12 @@ import org.folio.rest.jaxrs.model.StrippedParsedRecordCollection; import org.folio.rest.jooq.enums.RecordState; import org.folio.services.RecordSearchParameters; -import org.folio.dao.util.MatchField; import org.folio.services.util.TypeConnection; import org.folio.services.util.parser.ParseFieldsResult; import org.folio.services.util.parser.ParseLeaderResult; import org.jooq.Condition; import org.jooq.OrderField; -import io.github.jklingsporn.vertx.jooq.classic.reactivepg.ReactiveClassicGenericQueryExecutor; -import io.reactivex.Flowable; -import io.vertx.core.Future; - /** * Data access object for {@link Record} */ @@ -372,9 +370,10 @@ Future getMatchedRecordsIdentifiers(MatchField mat * @param txQE query execution * @param newRecord new Record to create * @param oldRecord old Record that has to be marked as "old" + * @param okapiHeaders okapi headers * @return future with new "updated" Record */ - Future saveUpdatedRecord(ReactiveClassicGenericQueryExecutor txQE, Record newRecord, Record oldRecord); + Future saveUpdatedRecord(ReactiveClassicGenericQueryExecutor txQE, Record newRecord, Record oldRecord, Map okapiHeaders); /** * Change suppress from discovery flag for record by external relation id diff --git a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java index b7a25d8fd..a7a7fb3e5 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java @@ -735,8 +735,7 @@ public Future> getRecordByCondition(ReactiveClassicGenericQuery public Future saveRecord(Record record, Map okapiHeaders) { var tenantId = okapiHeaders.get(TENANT); LOG.trace("saveRecord:: Saving {} record {} for tenant {}", record.getRecordType(), record.getId(), tenantId); - return getQueryExecutor(tenantId).transaction(txQE -> saveRecord(txQE, record, okapiHeaders)) - .onSuccess(created -> recordDomainEventPublisher.publishRecordCreated(created, okapiHeaders)); + return getQueryExecutor(tenantId).transaction(txQE -> saveRecord(txQE, record, okapiHeaders)); } @Override @@ -971,8 +970,9 @@ public Future updateRecord(Record record, Map okapiHeade LOG.trace("updateRecord:: Updating {} record {} for tenant {}", record.getRecordType(), record.getId(), tenantId); return getQueryExecutor(tenantId).transaction(txQE -> getRecordById(txQE, record.getId()) .compose(optionalRecord -> optionalRecord - .map(r -> saveRecord(txQE, record, okapiHeaders)) - .orElse(Future.failedFuture(new NotFoundException(format(RECORD_NOT_FOUND_TEMPLATE, record.getId())))))); + .map(r -> insertOrUpdateRecord(txQE, record)) + .orElse(Future.failedFuture(new NotFoundException(format(RECORD_NOT_FOUND_TEMPLATE, record.getId())))))) + .onSuccess(updated -> recordDomainEventPublisher.publishRecordUpdated(updated, okapiHeaders)); } @Override @@ -1299,9 +1299,10 @@ private MarcBibCollection toMarcBibCollection(QueryResult result) { } @Override - public Future saveUpdatedRecord(ReactiveClassicGenericQueryExecutor txQE, Record newRecord, Record oldRecord) { + public Future saveUpdatedRecord(ReactiveClassicGenericQueryExecutor txQE, Record newRecord, Record oldRecord, Map okapiHeaders) { LOG.trace("saveUpdatedRecord:: Saving updated record {}", newRecord.getId()); - return insertOrUpdateRecord(txQE, oldRecord).compose(r -> insertOrUpdateRecord(txQE, newRecord)); + return insertOrUpdateRecord(txQE, oldRecord).compose(r -> insertOrUpdateRecord(txQE, newRecord)) + .onSuccess(r -> recordDomainEventPublisher.publishRecordUpdated(r, okapiHeaders)); } @Override diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordService.java b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordService.java index b8e666b55..c1cf7294c 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordService.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordService.java @@ -241,10 +241,10 @@ public interface RecordService { * * @param parsedRecordDto parsed record DTO containing updates to parsed record * @param snapshotId snapshot id to which new Record should be linked - * @param tenantId tenant id + * @param okapiHeaders okapi headers * @return future with updated Record */ - Future updateSourceRecord(ParsedRecordDto parsedRecordDto, String snapshotId, String tenantId); + Future updateSourceRecord(ParsedRecordDto parsedRecordDto, String snapshotId, Map okapiHeaders); /** * Find marc bib ids by incoming arrays from SRM and exclude all valid marc bib and return only marc bib ids, diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java index 06c455cd6..f8b8f39d2 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java @@ -149,7 +149,8 @@ public Future saveRecord(Record record, Map okapiHeaders if (generation > 0) { return recordDao.getRecordByMatchedId(txQE, record.getMatchedId()) .compose(optionalMatchedRecord -> optionalMatchedRecord - .map(matchedRecord -> recordDao.saveUpdatedRecord(txQE, ensureRecordForeignKeys(record.withGeneration(generation)), matchedRecord.withState(Record.State.OLD))) + .map(matchedRecord -> recordDao.saveUpdatedRecord(txQE, ensureRecordForeignKeys(record.withGeneration(generation)), + matchedRecord.withState(Record.State.OLD), okapiHeaders)) .orElseGet(() -> recordDao.saveRecord(txQE, ensureRecordForeignKeys(record.withGeneration(generation)), okapiHeaders))); } else { return recordDao.saveRecord(txQE, ensureRecordForeignKeys(record.withGeneration(generation)), okapiHeaders); @@ -293,7 +294,7 @@ public Future deleteRecordsByExternalId(String externalId, String tenantId } @Override - public Future updateSourceRecord(ParsedRecordDto parsedRecordDto, String snapshotId, String tenantId) { + public Future updateSourceRecord(ParsedRecordDto parsedRecordDto, String snapshotId, Map okapiHeaders) { String newRecordId = UUID.randomUUID().toString(); return recordDao.executeInTransaction(txQE -> recordDao.getRecordByMatchedId(txQE, parsedRecordDto.getId()) .compose(optionalRecord -> optionalRecord @@ -313,9 +314,9 @@ public Future updateSourceRecord(ParsedRecordDto parsedRecordDto, String .withParsedRecord(new ParsedRecord().withId(newRecordId).withContent(parsedRecordDto.getParsedRecord().getContent())) .withExternalIdsHolder(parsedRecordDto.getExternalIdsHolder()) .withAdditionalInfo(parsedRecordDto.getAdditionalInfo()) - .withMetadata(parsedRecordDto.getMetadata()), existingRecord.withState(Record.State.OLD)))) + .withMetadata(parsedRecordDto.getMetadata()), existingRecord.withState(Record.State.OLD), okapiHeaders))) .orElse(Future.failedFuture(new NotFoundException( - format(RECORD_NOT_FOUND_TEMPLATE, parsedRecordDto.getId()))))), tenantId); + format(RECORD_NOT_FOUND_TEMPLATE, parsedRecordDto.getId()))))), okapiHeaders.get(TENANT)); } @Override diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java index c5c116c8f..f56caef20 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java @@ -965,8 +965,8 @@ public void shouldUpdateRecordState(TestContext context) { var okapiHeaders = Map.of(TENANT, TENANT_ID); recordDao.saveRecord(original, okapiHeaders) - .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, TENANT_ID)) - .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, TENANT_ID)) + .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, okapiHeaders)) + .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, okapiHeaders)) .compose(ar -> recordService.updateRecordsState(original.getMatchedId(), RecordState.DRAFT, RecordType.MARC_BIB, TENANT_ID)) .onComplete(update -> { if (update.failed()) { @@ -1002,7 +1002,7 @@ public void shouldUpdateMarcAuthorityRecordStateToDeleted(TestContext context) { var okapiHeaders = Map.of(TENANT, TENANT_ID); recordDao.saveRecord(original, okapiHeaders) - .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, TENANT_ID)) + .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, okapiHeaders)) .compose(ar -> recordService.updateRecordsState(original.getMatchedId(), RecordState.DELETED, RecordType.MARC_AUTHORITY, TENANT_ID)) .onComplete(update -> { if (update.failed()) { @@ -1191,7 +1191,7 @@ public void shouldGetMarcBibSourceRecordByMatchedIdNotEqualToId(TestContext cont var okapiHeaders = Map.of(TENANT, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders) - .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, TENANT_ID)) + .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, okapiHeaders)) .onComplete(update -> { if (update.failed()) { context.fail(update.cause()); @@ -1361,7 +1361,7 @@ public void shouldUpdateSourceRecord(TestContext context) { .withAdditionalInfo(expected.getAdditionalInfo()) .withExternalIdsHolder(expected.getExternalIdsHolder()) .withMetadata(expected.getMetadata()); - recordService.updateSourceRecord(parsedRecordDto, snapshotId, TENANT_ID).onComplete(update -> { + recordService.updateSourceRecord(parsedRecordDto, snapshotId, okapiHeaders).onComplete(update -> { if (update.failed()) { context.fail(update.cause()); } From 57d354f015d810b634073f2eb5a353ea28f0e7bc Mon Sep 17 00:00:00 2001 From: pbobylev Date: Thu, 1 Aug 2024 13:53:35 +0500 Subject: [PATCH 6/8] MODSOURCE-752: topic creation + fixes + UT --- NEWS.md | 1 + README.md | 2 + .../folio/services/SRSKafkaTopicService.java | 21 +- .../RecordDomainEventPublisher.java | 61 +++--- .../org/folio/services/kafka/KafkaSender.java | 21 ++ .../services/util/EventHandlingUtil.java | 4 +- .../src/main/resources/kafka.properties | 1 + .../RecordDomainEventPublisherUnitTest.java | 183 ++++++++++++++++++ 8 files changed, 266 insertions(+), 28 deletions(-) create mode 100644 mod-source-record-storage-server/src/main/java/org/folio/services/kafka/KafkaSender.java create mode 100644 mod-source-record-storage-server/src/test/java/org/folio/services/domainevent/RecordDomainEventPublisherUnitTest.java diff --git a/NEWS.md b/NEWS.md index 288e630a7..660c08033 100644 --- a/NEWS.md +++ b/NEWS.md @@ -8,6 +8,7 @@ * [MODINV-1049](https://folio-org.atlassian.net/browse/MODINV-1049) Existing "035" field is not retained the original position in imported record * [MODSOURCE-785](https://folio-org.atlassian.net/browse/MODSOURCE-785) Update 005 field when set MARC for deletion * [MODSOURMAN-783](https://folio-org.atlassian.net/browse/MODSOURCE-783) Extend MARC-MARC search query to account for qualifiers +* [MODSOURCE-752](https://folio-org.atlassian.net/browse/MODSOURCE-752) Emit Domain Events For Source Records ## 2024-03-20 5.8.0 * [MODSOURCE-733](https://issues.folio.org/browse/MODSOURCE-733) Reduce Memory Allocation of Strings diff --git a/README.md b/README.md index fbe73b6ad..7592d131d 100644 --- a/README.md +++ b/README.md @@ -141,7 +141,9 @@ After setup, it is good to check logs in all related modules for errors. Data im * DI_SRS_MARC_HOLDINGS_RECORD_MATCHED * DI_SRS_MARC_HOLDINGS_RECORD_NOT_MATCHED * DI_SRS_MARC_AUTHORITY_RECORD_UPDATED + * SRS_SOURCE_RECORDS_PARTITIONS Default value for all partitions is 1 +* DOMAIN_EVENTS_ENABLED env variable defines if Source Record Domain Event publishing should occur. True by default. ## Database schemas The mod-source-record-storage module uses relational approach and Liquibase to define database schemas. diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/SRSKafkaTopicService.java b/mod-source-record-storage-server/src/main/java/org/folio/services/SRSKafkaTopicService.java index a1354062a..ff862a031 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/SRSKafkaTopicService.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/SRSKafkaTopicService.java @@ -19,6 +19,7 @@ import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_NOT_MATCHED; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_UPDATED; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING; +import static org.folio.services.domainevent.RecordDomainEventPublisher.RECORD_DOMAIN_EVENT_TOPIC; import org.folio.kafka.services.KafkaTopic; import org.springframework.beans.factory.annotation.Value; @@ -74,6 +75,9 @@ public class SRSKafkaTopicService { @Value("${di_marc_authority_record_updated.partitions}") private Integer diMarcAuthorityRecordUpdatedPartitions; + @Value("${source_records.partitions}") + private Integer sourceRecordsPartitions; + public KafkaTopic[] createTopicObjects() { return new KafkaTopic[] { MARC_BIB, @@ -91,7 +95,8 @@ public KafkaTopic[] createTopicObjects() { new SRSKafkaTopic(DI_LOG_SRS_MARC_AUTHORITY_RECORD_UPDATED.value(), diLogSrsMarcAuthorityRecordUpdatedPartitions), new SRSKafkaTopic(DI_SRS_MARC_HOLDINGS_RECORD_MATCHED.value(), diMarcHoldingsMatchedPartitions), new SRSKafkaTopic(DI_SRS_MARC_HOLDINGS_RECORD_NOT_MATCHED.value(), diMarcHoldingsNotMatchedPartitions), - new SRSKafkaTopic(DI_SRS_MARC_AUTHORITY_RECORD_UPDATED.value(), diMarcAuthorityRecordUpdatedPartitions) + new SRSKafkaTopic(DI_SRS_MARC_AUTHORITY_RECORD_UPDATED.value(), diMarcAuthorityRecordUpdatedPartitions), + new SRSKafkaTopic(RECORD_DOMAIN_EVENT_TOPIC, sourceRecordsPartitions, false) }; } @@ -99,10 +104,18 @@ public static class SRSKafkaTopic implements KafkaTopic { private final String topic; private final int numPartitions; + private final boolean includeNamespace; public SRSKafkaTopic(String topic, int numPartitions) { this.topic = topic; this.numPartitions = numPartitions; + this.includeNamespace = true; + } + + public SRSKafkaTopic(String topic, int numPartitions, boolean includeNamespace) { + this.topic = topic; + this.numPartitions = numPartitions; + this.includeNamespace = includeNamespace; } @Override @@ -122,7 +135,11 @@ public int numPartitions() { @Override public String fullTopicName(String tenant) { - return formatTopicName(environment(), getDefaultNameSpace(), tenant, topicName()); + if (includeNamespace) { + return formatTopicName(environment(), getDefaultNameSpace(), tenant, topicName()); + } else { + return formatTopicName(environment(), tenant, topicName()); + } } } } diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java b/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java index 3303f9bff..15d5289c7 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java @@ -1,22 +1,19 @@ package org.folio.services.domainevent; +import static java.util.Objects.isNull; import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.okapi.common.XOkapiHeaders.TOKEN; import static org.folio.okapi.common.XOkapiHeaders.URL; import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType.SOURCE_RECORD_CREATED; import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType.SOURCE_RECORD_UPDATED; -import static org.folio.services.util.EventHandlingUtil.sendEventToKafka; -import io.vertx.core.Future; -import io.vertx.core.Vertx; import io.vertx.core.json.Json; import io.vertx.kafka.client.producer.KafkaHeader; -import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.folio.kafka.KafkaConfig; +import org.folio.services.kafka.KafkaSender; import org.folio.rest.jaxrs.model.Record; import org.folio.rest.jaxrs.model.SourceRecordDomainEvent; import org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType; @@ -27,14 +24,13 @@ @Component public class RecordDomainEventPublisher { - public static final String RECORD_DOMAIN_TOPIC = "srs.source_records"; + public static final String RECORD_DOMAIN_EVENT_TOPIC = "srs.source_records"; private static final String RECORD_TYPE = "folio.srs.recordType"; private static final Logger LOG = LogManager.getLogger(); - @Value("${ENABLE_DOMAIN_EVENTS:true}") - private boolean enableDomainEvents; - + @Value("${DOMAIN_EVENTS_ENABLED:true}") + private boolean domainEventsEnabled; @Autowired - private KafkaConfig kafkaConfig; + private KafkaSender kafkaSender; public void publishRecordCreated(Record created, Map okapiHeaders) { publishRecord(created, okapiHeaders, SOURCE_RECORD_CREATED); @@ -45,25 +41,42 @@ public void publishRecordUpdated(Record updated, Map okapiHeader } private void publishRecord(Record aRecord, Map okapiHeaders, EventType eventType) { - Vertx.vertx().executeBlocking(() -> { - try { - var kafkaHeaders = getKafkaHeaders(okapiHeaders, aRecord.getRecordType()); - var key = aRecord.getId(); - return sendEventToKafka(okapiHeaders.get(TENANT), getEvent(aRecord, eventType), - eventType.value(), kafkaHeaders, kafkaConfig, key); - } catch (Exception e) { - LOG.error("Exception during Record domain event sending", e); - return Future.failedFuture(e); - } - }); + if (!domainEventsEnabled || notValidForPublishing(aRecord)) { + return; + } + try { + var kafkaHeaders = getKafkaHeaders(okapiHeaders, aRecord.getRecordType()); + var key = aRecord.getId(); + kafkaSender.sendEventToKafka(okapiHeaders.get(TENANT), getEvent(aRecord, eventType), eventType.value(), + kafkaHeaders, key); + } catch (Exception e) { + LOG.error("Exception during Record domain event sending", e); + } + } + + private boolean notValidForPublishing(Record aRecord) { + if (isNull(aRecord.getRecordType())) { + LOG.error("Record [with id {}] contains no type information and won't be sent as domain event", aRecord.getId()); + return true; + } + if (isNull(aRecord.getRawRecord())) { + LOG.error("Record [with id {}] contains no raw record and won't be sent as domain event", aRecord.getId()); + return true; + } + if (isNull(aRecord.getRawRecord().getContent())) { + LOG.error("Record [with id {}] contains no raw record content and won't be sent as domain event", + aRecord.getId()); + return true; + } + return false; } private List getKafkaHeaders(Map okapiHeaders, Record.RecordType recordType) { - return new ArrayList<>(List.of( + return List.of( KafkaHeader.header(URL, okapiHeaders.get(URL)), KafkaHeader.header(TENANT, okapiHeaders.get(TENANT)), KafkaHeader.header(TOKEN, okapiHeaders.get(TOKEN)), - KafkaHeader.header(RECORD_TYPE, recordType.value())) + KafkaHeader.header(RECORD_TYPE, recordType.value()) ); } @@ -71,7 +84,7 @@ private String getEvent(Record eventRecord, EventType type) { var event = new SourceRecordDomainEvent() .withId(eventRecord.getId()) .withEventType(type) - .withEventPayload((String) eventRecord.getParsedRecord().getContent()); + .withEventPayload(eventRecord.getRawRecord().getContent()); return Json.encode(event); } diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/kafka/KafkaSender.java b/mod-source-record-storage-server/src/main/java/org/folio/services/kafka/KafkaSender.java new file mode 100644 index 000000000..a8700f59c --- /dev/null +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/kafka/KafkaSender.java @@ -0,0 +1,21 @@ +package org.folio.services.kafka; + +import io.vertx.core.Future; +import io.vertx.kafka.client.producer.KafkaHeader; +import java.util.List; +import org.folio.kafka.KafkaConfig; +import org.folio.services.util.EventHandlingUtil; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class KafkaSender { + + @Autowired + private KafkaConfig kafkaConfig; + + public Future sendEventToKafka(String tenantId, String eventPayload, String eventType, + List kafkaHeaders, String key) { + return EventHandlingUtil.sendEventToKafka(tenantId, eventPayload, eventType, kafkaHeaders, kafkaConfig, key); + } +} diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java b/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java index 3008bf1eb..5f74b924b 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java @@ -6,7 +6,7 @@ import static org.folio.okapi.common.XOkapiHeaders.TOKEN; import static org.folio.okapi.common.XOkapiHeaders.URL; import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType; -import static org.folio.services.domainevent.RecordDomainEventPublisher.RECORD_DOMAIN_TOPIC; +import static org.folio.services.domainevent.RecordDomainEventPublisher.RECORD_DOMAIN_EVENT_TOPIC; import static org.folio.services.util.KafkaUtil.extractHeaderValue; import io.vertx.core.Future; @@ -108,7 +108,7 @@ public static String constructModuleName() { public static String createTopicName(String eventType, String tenantId, KafkaConfig kafkaConfig) { if (stream(EventType.values()).anyMatch(et -> et.value().equals(eventType))) { - return KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), tenantId, RECORD_DOMAIN_TOPIC); + return KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), tenantId, RECORD_DOMAIN_EVENT_TOPIC); } return KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), KafkaTopicNameHelper.getDefaultNameSpace(), tenantId, eventType); diff --git a/mod-source-record-storage-server/src/main/resources/kafka.properties b/mod-source-record-storage-server/src/main/resources/kafka.properties index 8bfed4f60..f63d82f14 100644 --- a/mod-source-record-storage-server/src/main/resources/kafka.properties +++ b/mod-source-record-storage-server/src/main/resources/kafka.properties @@ -14,3 +14,4 @@ di_logs_srs_marc_authority_record_updated.partitions = ${DI_LOG_SRS_MARC_AUTHORI di_marc_holdings_matched.partitions = ${DI_SRS_MARC_HOLDINGS_RECORD_MATCHED:1} di_marc_holdings_not_matched.partitions = ${DI_SRS_MARC_HOLDINGS_RECORD_NOT_MATCHED:1} di_marc_authority_record_updated.partitions = ${DI_SRS_MARC_AUTHORITY_RECORD_UPDATED:1} +source_records.partitions = ${SRS_SOURCE_RECORDS_PARTITIONS:1} diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/domainevent/RecordDomainEventPublisherUnitTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/domainevent/RecordDomainEventPublisherUnitTest.java new file mode 100644 index 000000000..3a2a918fd --- /dev/null +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/domainevent/RecordDomainEventPublisherUnitTest.java @@ -0,0 +1,183 @@ +package org.folio.services.domainevent; + +import static org.folio.okapi.common.XOkapiHeaders.TENANT; +import static org.folio.okapi.common.XOkapiHeaders.TOKEN; +import static org.folio.okapi.common.XOkapiHeaders.URL; +import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType.SOURCE_RECORD_CREATED; +import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType.SOURCE_RECORD_UPDATED; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; + +import io.vertx.kafka.client.producer.KafkaHeader; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import org.folio.rest.jaxrs.model.RawRecord; +import org.folio.rest.jaxrs.model.Record; +import org.folio.services.kafka.KafkaSender; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.springframework.test.util.ReflectionTestUtils; + +@RunWith(MockitoJUnitRunner.class) +public class RecordDomainEventPublisherUnitTest { + + @InjectMocks + private RecordDomainEventPublisher publisher; + @Mock + private KafkaSender kafkaSender; + + @Test + public void publishRecordCreated_shouldSendNoEvents_ifDomainEventsAreNotEnabled() { + // given + ReflectionTestUtils.setField(publisher, "domainEventsEnabled", false); + var aRecord = new Record(); + var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + + // when + publisher.publishRecordCreated(aRecord, headers); + + // then + verifyNoInteractions(kafkaSender); + } + + @Test + public void publishRecordUpdated_shouldSendNoEvents_ifDomainEventsAreNotEnabled() { + // given + ReflectionTestUtils.setField(publisher, "domainEventsEnabled", false); + var aRecord = new Record(); + var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + + // when + publisher.publishRecordUpdated(aRecord, headers); + + // then + verifyNoInteractions(kafkaSender); + } + + @Test + public void publishRecordCreated_shouldSendNoEvents_ifRecordHasNoType() { + // given + ReflectionTestUtils.setField(publisher, "domainEventsEnabled", true); + var aRecord = new Record(); + var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + + // when + publisher.publishRecordCreated(aRecord, headers); + + // then + verifyNoInteractions(kafkaSender); + } + + @Test + public void publishRecordUpdated_shouldSendNoEvents_ifRecordHasNoType() { + // given + ReflectionTestUtils.setField(publisher, "domainEventsEnabled", true); + var aRecord = new Record(); + var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + + // when + publisher.publishRecordUpdated(aRecord, headers); + + // then + verifyNoInteractions(kafkaSender); + } + + @Test + public void publishRecordCreated_shouldSendNoEvents_ifRecordContainsNoParsedContent() { + // given + ReflectionTestUtils.setField(publisher, "domainEventsEnabled", true); + var aRecord = new Record().withRecordType(Record.RecordType.MARC_BIB); + var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + + // when + publisher.publishRecordCreated(aRecord, headers); + + // then + verifyNoInteractions(kafkaSender); + } + + @Test + public void publishRecordUpdated_shouldSendNoEvents_ifRecordContainsNoParsedContent() { + // given + ReflectionTestUtils.setField(publisher, "domainEventsEnabled", true); + var aRecord = new Record().withRecordType(Record.RecordType.MARC_BIB); + var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + + // when + publisher.publishRecordUpdated(aRecord, headers); + + // then + verifyNoInteractions(kafkaSender); + } + + @Test + public void publishRecordCreated_shouldSendEvent_ifRecordIsValid() { + // given + ReflectionTestUtils.setField(publisher, "domainEventsEnabled", true); + var rawContent = "rawContent"; + var aRecord = new Record() + .withId(UUID.randomUUID().toString()) + .withRecordType(Record.RecordType.MARC_BIB) + .withRawRecord(new RawRecord().withContent(rawContent)); + var tenantId = "TENANT"; + var okapiUrl = "OKAPI_URL"; + var token = "TOKEN"; + var givenHeaders = Map.of(TENANT, tenantId, URL, okapiUrl, TOKEN, token); + var expectedHeaders = getKafkaHeaders(okapiUrl, tenantId, token, aRecord); + var eventType = SOURCE_RECORD_CREATED.value(); + var expectedPayload = "{" + + "\"id\":\"" + aRecord.getId() + "\"" + + ",\"eventType\":\"" + eventType + "\"" + + ",\"eventPayload\":\"" + rawContent + "\"" + + "}"; + + // when + publisher.publishRecordCreated(aRecord, givenHeaders); + + // then + verify(kafkaSender).sendEventToKafka(tenantId, expectedPayload, eventType, expectedHeaders, + aRecord.getId()); + } + + @Test + public void publishRecordUpdated_shouldSendEvent_ifRecordIsValid() { + // given + ReflectionTestUtils.setField(publisher, "domainEventsEnabled", true); + var rawContent = "rawContent"; + var aRecord = new Record() + .withId(UUID.randomUUID().toString()) + .withRecordType(Record.RecordType.MARC_BIB) + .withRawRecord(new RawRecord().withContent(rawContent)); + var tenantId = "TENANT"; + var okapiUrl = "OKAPI_URL"; + var token = "TOKEN"; + var givenHeaders = Map.of(TENANT, tenantId, URL, okapiUrl, TOKEN, token); + var expectedHeaders = getKafkaHeaders(okapiUrl, tenantId, token, aRecord); + var eventType = SOURCE_RECORD_UPDATED.value(); + var expectedPayload = "{" + + "\"id\":\"" + aRecord.getId() + "\"" + + ",\"eventType\":\"" + eventType + "\"" + + ",\"eventPayload\":\"" + rawContent + "\"" + + "}"; + + // when + publisher.publishRecordUpdated(aRecord, givenHeaders); + + // thenÏ + verify(kafkaSender).sendEventToKafka(tenantId, expectedPayload, eventType, expectedHeaders, + aRecord.getId()); + } + + private List getKafkaHeaders(String okapiUrl, String tenantId, String token, Record aRecord) { + return List.of( + KafkaHeader.header(URL, okapiUrl), + KafkaHeader.header(TENANT, tenantId), + KafkaHeader.header(TOKEN, token), + KafkaHeader.header("folio.srs.recordType", aRecord.getRecordType().value()) + ); + } +} From 8208cde71bb12e931a6dda592de45a1edf8a6eb2 Mon Sep 17 00:00:00 2001 From: pbobylev Date: Thu, 1 Aug 2024 17:04:36 +0500 Subject: [PATCH 7/8] MODSOURCE-752: updateParsedRecord & updateParsedRecords methods handled --- .../main/java/org/folio/dao/RecordDao.java | 8 ++-- .../java/org/folio/dao/RecordDaoImpl.java | 38 +++++++++++-------- .../rest/impl/SourceStorageBatchImpl.java | 6 ++- .../org/folio/services/RecordService.java | 8 ++-- .../org/folio/services/RecordServiceImpl.java | 8 ++-- .../AbstractPostProcessingEventHandler.java | 2 +- .../org/folio/services/RecordServiceTest.java | 3 +- ...nstancePostProcessingEventHandlerTest.java | 4 +- 8 files changed, 45 insertions(+), 32 deletions(-) diff --git a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDao.java b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDao.java index d784997fc..2517ba4c2 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDao.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDao.java @@ -234,19 +234,19 @@ Future getMatchedRecordsIdentifiers(MatchField mat * Updates {@link ParsedRecord} in the db * * @param record record dto from which {@link ParsedRecord} will be updated - * @param tenantId tenant id + * @param okapiHeaders okapi headers * @return future with updated ParsedRecord */ - Future updateParsedRecord(Record record, String tenantId); + Future updateParsedRecord(Record record, Map okapiHeaders); /** * Update parsed records from collection of records and external relations ids in one transaction * * @param recordCollection collection of records from which parsed records will be updated - * @param tenantId tenant id + * @param okapiHeaders okapi headers * @return future with response containing list of successfully updated records and error messages for records that were not updated */ - Future updateParsedRecords(RecordCollection recordCollection, String tenantId); + Future updateParsedRecords(RecordCollection recordCollection, Map okapiHeaders); /** * Searches for {@link Record} by id of external entity which was created from desired record diff --git a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java index a7a7fb3e5..6a9f89a63 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java @@ -1077,21 +1077,26 @@ public Future calculateGeneration(ReactiveClassicGenericQueryExecutor t } @Override - public Future updateParsedRecord(Record record, String tenantId) { - LOG.trace("updateParsedRecord:: Updating {} record {} for tenant {}", record.getRecordType(), record.getId(), tenantId); + public Future updateParsedRecord(Record record, Map okapiHeaders) { + var tenantId = okapiHeaders.get(TENANT); + LOG.trace("updateParsedRecord:: Updating {} record {} for tenant {}", record.getRecordType(), + record.getId(), tenantId); return getQueryExecutor(tenantId).transaction(txQE -> GenericCompositeFuture.all(Lists.newArrayList( updateExternalIdsForRecord(txQE, record), ParsedRecordDaoUtil.update(txQE, record.getParsedRecord(), ParsedRecordDaoUtil.toRecordType(record)) - )).map(res -> record.getParsedRecord())); + )).onSuccess(updated -> recordDomainEventPublisher.publishRecordUpdated(record, okapiHeaders)) + .map(res -> record.getParsedRecord())); } @Override - public Future updateParsedRecords(RecordCollection recordCollection, String tenantId) { + public Future updateParsedRecords(RecordCollection recordCollection, Map okapiHeaders) { + var tenantId = okapiHeaders.get(TENANT); logRecordCollection("updateParsedRecords:: Updating", recordCollection, tenantId); Promise promise = Promise.promise(); Context context = Vertx.currentContext(); if(context == null) return Future.failedFuture("updateParsedRecords must be called by a vertx thread"); + var recordsUpdated = new ArrayList(); context.owner().executeBlocking(blockingPromise -> { Set recordTypes = new HashSet<>(); @@ -1105,7 +1110,7 @@ public Future updateParsedRecords(RecordCollection r Field prtId = field(name(ID), UUID.class); Field prtContent = field(name(CONTENT), JSONB.class); - List parsedRecords = recordCollection.getRecords() + List processedRecords = recordCollection.getRecords() .stream() .map(this::validateParsedRecordId) .peek(record -> { @@ -1187,9 +1192,9 @@ public Future updateParsedRecords(RecordCollection r .setId(null); } - }).map(Record::getParsedRecord) - .filter(parsedRecord -> Objects.nonNull(parsedRecord.getId())) - .collect(Collectors.toList()); + }) + .filter(processedRecord -> Objects.nonNull(processedRecord.getParsedRecord().getId())) + .toList(); try (Connection connection = getConnection(tenantId)) { DSL.using(connection).transaction(ctx -> { @@ -1210,21 +1215,21 @@ public Future updateParsedRecords(RecordCollection r int[] parsedRecordUpdateResults = dsl.batch(parsedRecordUpdates).execute(); // check parsed record update results - List parsedRecordsUpdated = new ArrayList<>(); for (int i = 0; i < parsedRecordUpdateResults.length; i++) { int result = parsedRecordUpdateResults[i]; - ParsedRecord parsedRecord = parsedRecords.get(i); + var processedRecord = processedRecords.get(i); if (result == 0) { - errorMessages.add(format("Parsed Record with id '%s' was not updated", parsedRecord.getId())); + errorMessages.add(format("Parsed Record with id '%s' was not updated", + processedRecord.getParsedRecord().getId())); } else { - parsedRecordsUpdated.add(parsedRecord); + recordsUpdated.add(processedRecord); } } blockingPromise.complete(new ParsedRecordsBatchResponse() .withErrorMessages(errorMessages) - .withParsedRecords(parsedRecordsUpdated) - .withTotalRecords(parsedRecordsUpdated.size())); + .withParsedRecords(recordsUpdated.stream().map(Record::getParsedRecord).collect(Collectors.toList())) + .withTotalRecords(recordsUpdated.size())); }); } catch (SQLException e) { LOG.warn("updateParsedRecords:: Failed to update records", e); @@ -1242,7 +1247,10 @@ public Future updateParsedRecords(RecordCollection r } }); - return promise.future(); + return promise.future() + .onSuccess(response -> + recordsUpdated.forEach(updated -> recordDomainEventPublisher.publishRecordUpdated(updated, okapiHeaders)) + ); } @Override diff --git a/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageBatchImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageBatchImpl.java index b4317a84e..557258640 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageBatchImpl.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageBatchImpl.java @@ -1,5 +1,7 @@ package org.folio.rest.impl; +import static org.folio.okapi.common.XOkapiHeaders.TENANT; + import java.util.List; import java.util.Map; @@ -57,6 +59,7 @@ public void postSourceStorageBatchVerifiedRecords(List marcBibIds, Map okapiHeaders, Handler> asyncResultHandler, Context vertxContext) { vertxContext.runOnContext(v -> { + okapiHeaders.put(TENANT, tenantId); try { MetadataUtil.populateMetadata(entity.getRecords(), okapiHeaders); recordService.saveRecords(entity, okapiHeaders) @@ -82,9 +85,10 @@ public void postSourceStorageBatchRecords(RecordCollection entity, Map okapiHeaders, Handler> asyncResultHandler, Context vertxContext) { vertxContext.runOnContext(v -> { + okapiHeaders.put(TENANT, tenantId); try { MetadataUtil.populateMetadata(entity.getRecords(), okapiHeaders); - recordService.updateParsedRecords(entity, tenantId) + recordService.updateParsedRecords(entity, okapiHeaders) .map(parsedRecordsBatchResponse -> { if (!parsedRecordsBatchResponse.getParsedRecords().isEmpty()) { return PutSourceStorageBatchParsedRecordsResponse.respond200WithApplicationJson(parsedRecordsBatchResponse); diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordService.java b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordService.java index c1cf7294c..14ded8a17 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordService.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordService.java @@ -173,19 +173,19 @@ public interface RecordService { * Updates {@link ParsedRecord} in the db * * @param record record dto from which {@link ParsedRecord} will be updated - * @param tenantId tenant id + * @param okapiHeaders okapi headers * @return future with updated ParsedRecord */ - Future updateParsedRecord(Record record, String tenantId); + Future updateParsedRecord(Record record, Map okapiHeaders); /** * Update parsed records from collection of records and external relations ids in one transaction * * @param recordCollection collection of records from which parsed records will be updated - * @param tenantId tenant id + * @param okapiHeaders okapi headers * @return future with response containing list of successfully updated records and error messages for records that were not updated */ - Future updateParsedRecords(RecordCollection recordCollection, String tenantId); + Future updateParsedRecords(RecordCollection recordCollection, Map okapiHeaders); /** * Fetch stripped parsed records by ids and filter marc fields by provided range of fields diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java index f8b8f39d2..1eabfc84a 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java @@ -238,18 +238,18 @@ public Future> getSourceRecordById(String id, IdType idTy } @Override - public Future updateParsedRecord(Record record, String tenantId) { - return recordDao.updateParsedRecord(record, tenantId); + public Future updateParsedRecord(Record record, Map okapiHeaders) { + return recordDao.updateParsedRecord(record, okapiHeaders); } @Override - public Future updateParsedRecords(RecordCollection recordCollection, String tenantId) { + public Future updateParsedRecords(RecordCollection recordCollection, Map okapiHeaders) { if (recordCollection.getRecords().isEmpty()) { Promise promise = Promise.promise(); promise.complete(new ParsedRecordsBatchResponse().withTotalRecords(0)); return promise.future(); } - return recordDao.updateParsedRecords(recordCollection, tenantId); + return recordDao.updateParsedRecords(recordCollection, okapiHeaders); } @Override diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java index adfdbf87e..58a8a630b 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java @@ -319,7 +319,7 @@ private Future saveRecord(Record record, Map okapiHeader return recordService.getRecordById(record.getId(), tenantId) .compose(r -> { if (r.isPresent()) { - return recordService.updateParsedRecord(record, tenantId).map(record.withGeneration(r.get().getGeneration())); + return recordService.updateParsedRecord(record, okapiHeaders).map(record.withGeneration(r.get().getGeneration())); } else { record.getRawRecord().setId(record.getId()); return recordService.saveRecord(record, okapiHeaders).map(record); diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java index f56caef20..131ec2c5f 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java @@ -1938,7 +1938,8 @@ private void updateParsedMarcRecords(TestContext context, Record.RecordType reco List expected = updated.stream() .map(Record::getParsedRecord) .collect(Collectors.toList()); - recordService.updateParsedRecords(recordCollection, TENANT_ID).onComplete(update -> { + var okapiHeaders = Map.of(TENANT, TENANT_ID); + recordService.updateParsedRecords(recordCollection, okapiHeaders).onComplete(update -> { if (update.failed()) { context.fail(update.cause()); } diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java index f202445fc..7ce917c14 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java @@ -201,7 +201,7 @@ public void shouldProceedIfConsortiumTrackExists(TestContext context) { doAnswer(invocationOnMock -> Future.succeededFuture(Optional.of(record))).when(mockedRecordService).getRecordById(anyString(), anyString()); - doAnswer(invocationOnMock -> Future.succeededFuture(record.getParsedRecord())).when(mockedRecordService).updateParsedRecord(any(), anyString()); + doAnswer(invocationOnMock -> Future.succeededFuture(record.getParsedRecord())).when(mockedRecordService).updateParsedRecord(any(), any()); doAnswer(invocationOnMock -> Future.succeededFuture(recordCollection)).when(mockedRecordService).getRecords(any(), any(), any(), anyInt(), anyInt(), anyString()); @@ -237,7 +237,7 @@ public void shouldProceedIfConsortiumTrackExists(TestContext context) { if (e != null) { context.fail(e); } - verify(mockedRecordService, times(1)).updateParsedRecord(any(), anyString()); + verify(mockedRecordService, times(1)).updateParsedRecord(any(), any()); context.assertNull(payload.getContext().get(CENTRAL_TENANT_INSTANCE_UPDATED_FLAG)); context.assertEquals(expectedCentralTenantId, payload.getContext().get(CENTRAL_TENANT_ID)); async.complete(); From d11f6ac6d0b5447f22de1c741d867e7c54c4beae Mon Sep 17 00:00:00 2001 From: PBobylev Date: Tue, 6 Aug 2024 19:40:39 +0500 Subject: [PATCH 8/8] MODSOURCE-752: fix kafka headers and event model --- .../ParsedRecordChunksKafkaHandler.java | 2 +- .../consumers/QuickMarcKafkaHandler.java | 6 +- .../java/org/folio/dao/RecordDaoImpl.java | 12 +-- .../rest/impl/SourceStorageBatchImpl.java | 23 ++--- .../org/folio/services/RecordServiceImpl.java | 55 ++++++----- .../RecordDomainEventPublisher.java | 35 +++---- .../SourceRecordDomainEventType.java | 5 + .../AbstractPostProcessingEventHandler.java | 66 ++++++------- .../AbstractUpdateModifyEventHandler.java | 49 +++++----- .../services/util/EventHandlingUtil.java | 26 ++--- .../AuthorityDomainKafkaHandlerTest.java | 4 +- .../java/org/folio/dao/RecordDaoImplTest.java | 24 +++-- .../AuthorityLinkChunkKafkaHandlerTest.java | 12 ++- .../MarcAuthorityDeleteEventHandlerTest.java | 27 +++--- .../MarcAuthorityMatchEventHandlerTest.java | 45 +++++---- ...AuthorityUpdateModifyEventHandlerTest.java | 45 ++++----- .../MarcBibUpdateModifyEventHandlerTest.java | 13 +-- .../MarcHoldingsMatchEventHandlerTest.java | 47 +++++---- ...cHoldingsUpdateModifyEventHandlerTest.java | 43 ++++---- .../services/QuickMarcKafkaHandlerTest.java | 35 ++++--- .../org/folio/services/RecordServiceTest.java | 64 ++++++------ .../RecordDomainEventPublisherUnitTest.java | 54 ++++------- ...thorityPostProcessingEventHandlerTest.java | 41 ++++---- ...oldingsPostProcessingEventHandlerTest.java | 38 ++++---- ...nstancePostProcessingEventHandlerTest.java | 67 +++++++------ ...rcIndexersVersionDeletionVerticleTest.java | 25 +++-- .../DataImportConsumersVerticleTest.java | 47 +++++---- ramls/source-record-domain-event.json | 97 ------------------- ramls/source-record-storage-records.raml | 1 - 29 files changed, 432 insertions(+), 576 deletions(-) create mode 100644 mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/SourceRecordDomainEventType.java delete mode 100644 ramls/source-record-domain-event.json diff --git a/mod-source-record-storage-server/src/main/java/org/folio/consumers/ParsedRecordChunksKafkaHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/consumers/ParsedRecordChunksKafkaHandler.java index 4dec329df..2e4c6a163 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/consumers/ParsedRecordChunksKafkaHandler.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/consumers/ParsedRecordChunksKafkaHandler.java @@ -86,7 +86,7 @@ public Future handle(KafkaConsumerRecord targetRecord) { LOGGER.debug("handle:: RecordCollection has been received with event: '{}', jobExecutionId '{}', chunkId: '{}', starting processing... chunkNumber '{}'-'{}'", event.getEventType(), jobExecutionId, chunkId, chunkNumber, key); setUserMetadata(recordCollection, userId); - return recordService.saveRecords(recordCollection, toOkapiHeaders(kafkaHeaders, null)) + return recordService.saveRecords(recordCollection, toOkapiHeaders(kafkaHeaders)) .compose(recordsBatchResponse -> sendBackRecordsBatchResponse(recordsBatchResponse, kafkaHeaders, tenantId, chunkNumber, event.getEventType(), targetRecord)); } catch (Exception e) { LOGGER.warn("handle:: RecordCollection processing has failed with errors jobExecutionId '{}', chunkId: '{}', chunkNumber '{}'-'{}'", diff --git a/mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java index a54182612..136e2ea57 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java @@ -2,7 +2,7 @@ import static org.folio.dao.util.QMEventTypes.QM_ERROR; import static org.folio.dao.util.QMEventTypes.QM_SRS_MARC_RECORD_UPDATED; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import static org.folio.services.util.EventHandlingUtil.createProducer; import static org.folio.services.util.EventHandlingUtil.createProducerRecord; import static org.folio.services.util.EventHandlingUtil.toOkapiHeaders; @@ -65,12 +65,12 @@ public Future handle(KafkaConsumerRecord consumerRecord) log.trace("handle:: Handling kafka consumerRecord {}", consumerRecord); var kafkaHeaders = consumerRecord.headers(); - var okapiHeaders = toOkapiHeaders(kafkaHeaders, null); + var okapiHeaders = toOkapiHeaders(kafkaHeaders); return getEventPayload(consumerRecord) .compose(eventPayload -> { String snapshotId = eventPayload.getOrDefault(SNAPSHOT_ID_KEY, UUID.randomUUID().toString()); - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); return getRecordDto(eventPayload) .compose(recordDto -> recordService.updateSourceRecord(recordDto, snapshotId, okapiHeaders)) .compose(updatedRecord -> { diff --git a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java index 6a9f89a63..c8a940bb5 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java @@ -15,7 +15,6 @@ import static org.folio.dao.util.RecordDaoUtil.getExternalId; import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_FOUND_TEMPLATE; import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_STARTED_MESSAGE_TEMPLATE; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jooq.Tables.ERROR_RECORDS_LB; import static org.folio.rest.jooq.Tables.MARC_RECORDS_LB; import static org.folio.rest.jooq.Tables.MARC_RECORDS_TRACKING; @@ -23,6 +22,7 @@ import static org.folio.rest.jooq.Tables.RECORDS_LB; import static org.folio.rest.jooq.Tables.SNAPSHOTS_LB; import static org.folio.rest.jooq.enums.RecordType.MARC_BIB; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import static org.folio.rest.util.QueryParamUtil.toRecordType; import static org.jooq.impl.DSL.condition; import static org.jooq.impl.DSL.countDistinct; @@ -733,7 +733,7 @@ public Future> getRecordByCondition(ReactiveClassicGenericQuery @Override public Future saveRecord(Record record, Map okapiHeaders) { - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); LOG.trace("saveRecord:: Saving {} record {} for tenant {}", record.getRecordType(), record.getId(), tenantId); return getQueryExecutor(tenantId).transaction(txQE -> saveRecord(txQE, record, okapiHeaders)); } @@ -748,7 +748,7 @@ public Future saveRecord(ReactiveClassicGenericQueryExecutor txQE, Recor @Override public Future saveRecords(RecordCollection recordCollection, Map okapiHeaders) { - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); logRecordCollection("saveRecords:: Saving", recordCollection, tenantId); Promise finalPromise = Promise.promise(); Context context = Vertx.currentContext(); @@ -966,7 +966,7 @@ public Future saveRecords(RecordCollection recordCollectio @Override public Future updateRecord(Record record, Map okapiHeaders) { - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); LOG.trace("updateRecord:: Updating {} record {} for tenant {}", record.getRecordType(), record.getId(), tenantId); return getQueryExecutor(tenantId).transaction(txQE -> getRecordById(txQE, record.getId()) .compose(optionalRecord -> optionalRecord @@ -1078,7 +1078,7 @@ public Future calculateGeneration(ReactiveClassicGenericQueryExecutor t @Override public Future updateParsedRecord(Record record, Map okapiHeaders) { - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); LOG.trace("updateParsedRecord:: Updating {} record {} for tenant {}", record.getRecordType(), record.getId(), tenantId); return getQueryExecutor(tenantId).transaction(txQE -> GenericCompositeFuture.all(Lists.newArrayList( @@ -1090,7 +1090,7 @@ public Future updateParsedRecord(Record record, Map updateParsedRecords(RecordCollection recordCollection, Map okapiHeaders) { - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); logRecordCollection("updateParsedRecords:: Updating", recordCollection, tenantId); Promise promise = Promise.promise(); Context context = Vertx.currentContext(); diff --git a/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageBatchImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageBatchImpl.java index 557258640..7a128b607 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageBatchImpl.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageBatchImpl.java @@ -1,12 +1,17 @@ package org.folio.rest.impl; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import io.vertx.core.AsyncResult; +import io.vertx.core.Context; +import io.vertx.core.Future; +import io.vertx.core.Handler; +import io.vertx.core.Vertx; import java.util.List; import java.util.Map; - import javax.ws.rs.core.Response; - +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.folio.dataimport.util.ExceptionHelper; import org.folio.rest.jaxrs.model.FetchParsedRecordsBatchRequest; import org.folio.rest.jaxrs.model.RecordCollection; @@ -17,14 +22,6 @@ import org.folio.spring.SpringContextUtil; import org.springframework.beans.factory.annotation.Autowired; -import io.vertx.core.AsyncResult; -import io.vertx.core.Context; -import io.vertx.core.Future; -import io.vertx.core.Handler; -import io.vertx.core.Vertx; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - public class SourceStorageBatchImpl implements SourceStorageBatch { private static final Logger LOG = LogManager.getLogger(); @@ -59,7 +56,7 @@ public void postSourceStorageBatchVerifiedRecords(List marcBibIds, Map okapiHeaders, Handler> asyncResultHandler, Context vertxContext) { vertxContext.runOnContext(v -> { - okapiHeaders.put(TENANT, tenantId); + okapiHeaders.put(OKAPI_TENANT_HEADER, tenantId); try { MetadataUtil.populateMetadata(entity.getRecords(), okapiHeaders); recordService.saveRecords(entity, okapiHeaders) @@ -85,7 +82,7 @@ public void postSourceStorageBatchRecords(RecordCollection entity, Map okapiHeaders, Handler> asyncResultHandler, Context vertxContext) { vertxContext.runOnContext(v -> { - okapiHeaders.put(TENANT, tenantId); + okapiHeaders.put(OKAPI_TENANT_HEADER, tenantId); try { MetadataUtil.populateMetadata(entity.getRecords(), okapiHeaders); recordService.updateParsedRecords(entity, okapiHeaders) diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java index 1eabfc84a..d8d3ce82f 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java @@ -14,12 +14,20 @@ import static org.folio.dao.util.RecordDaoUtil.getExternalIdsConditionWithQualifier; import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_FOUND_TEMPLATE; import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_STARTED_MESSAGE_TEMPLATE; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import static org.folio.rest.util.QueryParamUtil.toRecordType; import static org.folio.services.util.AdditionalFieldsUtil.TAG_999; import static org.folio.services.util.AdditionalFieldsUtil.addFieldToMarcRecord; import static org.folio.services.util.AdditionalFieldsUtil.getFieldFromMarcRecord; +import io.reactivex.Flowable; +import io.vertx.core.AsyncResult; +import io.vertx.core.Future; +import io.vertx.core.Promise; +import io.vertx.core.json.JsonArray; +import io.vertx.core.json.JsonObject; +import io.vertx.pgclient.PgException; +import io.vertx.sqlclient.Row; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -32,41 +40,22 @@ import java.util.stream.Collectors; import javax.ws.rs.BadRequestException; import javax.ws.rs.NotFoundException; - -import io.reactivex.Flowable; -import io.vertx.core.AsyncResult; -import io.vertx.core.Future; -import io.vertx.core.Promise; -import io.vertx.core.json.JsonArray; -import io.vertx.core.json.JsonObject; -import io.vertx.pgclient.PgException; -import io.vertx.sqlclient.Row; import net.sf.jsqlparser.JSQLParserException; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.folio.dao.RecordDao; import org.folio.dao.util.IdType; -import org.folio.dao.util.ParsedRecordDaoUtil; import org.folio.dao.util.MatchField; +import org.folio.dao.util.ParsedRecordDaoUtil; import org.folio.dao.util.RecordDaoUtil; import org.folio.dao.util.RecordType; import org.folio.dao.util.SnapshotDaoUtil; import org.folio.okapi.common.GenericCompositeFuture; import org.folio.processing.value.ListValue; -import org.folio.rest.jaxrs.model.Filter; -import org.folio.rest.jaxrs.model.RecordIdentifiersDto; -import org.folio.rest.jaxrs.model.RecordMatchingDto; -import org.folio.rest.jaxrs.model.RecordsIdentifiersCollection; -import org.folio.services.exceptions.DuplicateRecordException; -import org.folio.services.util.AdditionalFieldsUtil; -import org.folio.services.util.TypeConnection; -import org.jooq.Condition; -import org.jooq.OrderField; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.folio.dao.RecordDao; import org.folio.rest.jaxrs.model.FetchParsedRecordsBatchRequest; import org.folio.rest.jaxrs.model.FieldRange; +import org.folio.rest.jaxrs.model.Filter; import org.folio.rest.jaxrs.model.MarcBibCollection; import org.folio.rest.jaxrs.model.ParsedRecord; import org.folio.rest.jaxrs.model.ParsedRecordDto; @@ -74,15 +63,25 @@ import org.folio.rest.jaxrs.model.RawRecord; import org.folio.rest.jaxrs.model.Record; import org.folio.rest.jaxrs.model.RecordCollection; +import org.folio.rest.jaxrs.model.RecordIdentifiersDto; +import org.folio.rest.jaxrs.model.RecordMatchingDto; import org.folio.rest.jaxrs.model.RecordsBatchResponse; +import org.folio.rest.jaxrs.model.RecordsIdentifiersCollection; import org.folio.rest.jaxrs.model.Snapshot; import org.folio.rest.jaxrs.model.SourceRecord; import org.folio.rest.jaxrs.model.SourceRecordCollection; import org.folio.rest.jaxrs.model.StrippedParsedRecordCollection; import org.folio.rest.jooq.enums.RecordState; +import org.folio.services.exceptions.DuplicateRecordException; +import org.folio.services.util.AdditionalFieldsUtil; +import org.folio.services.util.TypeConnection; import org.folio.services.util.parser.ParseFieldsResult; import org.folio.services.util.parser.ParseLeaderResult; import org.folio.services.util.parser.SearchExpressionParser; +import org.jooq.Condition; +import org.jooq.OrderField; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; @Service public class RecordServiceImpl implements RecordService { @@ -125,7 +124,7 @@ public Future> getRecordById(String id, String tenantId) { @Override public Future saveRecord(Record record, Map okapiHeaders) { - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); LOG.debug("saveRecord:: Saving record with id: {} for tenant: {}", record.getId(), tenantId); ensureRecordHasId(record); ensureRecordHasSuppressDiscovery(record); @@ -168,7 +167,7 @@ public Future saveRecords(RecordCollection recordCollectio } List setMatchedIdsFutures = new ArrayList<>(); recordCollection.getRecords().forEach(record -> setMatchedIdsFutures.add(setMatchedIdForRecord(record, - okapiHeaders.get(TENANT)))); + okapiHeaders.get(OKAPI_TENANT_HEADER)))); return GenericCompositeFuture.all(setMatchedIdsFutures) .compose(ar -> ar.succeeded() ? recordDao.saveRecords(recordCollection, okapiHeaders) @@ -189,7 +188,7 @@ public Future updateRecordGeneration(String matchedId, Record record, Ma } record.setId(UUID.randomUUID().toString()); - return recordDao.getRecordByMatchedId(matchedId, okapiHeaders.get(TENANT)) + return recordDao.getRecordByMatchedId(matchedId, okapiHeaders.get(OKAPI_TENANT_HEADER)) .map(r -> r.orElseThrow(() -> new NotFoundException(format(RECORD_WITH_GIVEN_MATCHED_ID_NOT_FOUND, matchedId)))) .compose(v -> saveRecord(record, okapiHeaders)) .recover(throwable -> { @@ -316,7 +315,7 @@ public Future updateSourceRecord(ParsedRecordDto parsedRecordDto, String .withAdditionalInfo(parsedRecordDto.getAdditionalInfo()) .withMetadata(parsedRecordDto.getMetadata()), existingRecord.withState(Record.State.OLD), okapiHeaders))) .orElse(Future.failedFuture(new NotFoundException( - format(RECORD_NOT_FOUND_TEMPLATE, parsedRecordDto.getId()))))), okapiHeaders.get(TENANT)); + format(RECORD_NOT_FOUND_TEMPLATE, parsedRecordDto.getId()))))), okapiHeaders.get(OKAPI_TENANT_HEADER)); } @Override @@ -346,7 +345,7 @@ public Future getMatchedRecordsIdentifiers(RecordM @Override public Future deleteRecordById(String id, IdType idType, Map okapiHeaders) { - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); return recordDao.getRecordByExternalId(id, idType, tenantId) .map(recordOptional -> recordOptional.orElseThrow(() -> new NotFoundException(format(NOT_FOUND_MESSAGE, Record.class.getSimpleName(), id)))) .map(record -> { diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java b/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java index 15d5289c7..ed4758847 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java @@ -1,22 +1,19 @@ package org.folio.services.domainevent; import static java.util.Objects.isNull; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.okapi.common.XOkapiHeaders.TOKEN; -import static org.folio.okapi.common.XOkapiHeaders.URL; -import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType.SOURCE_RECORD_CREATED; -import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType.SOURCE_RECORD_UPDATED; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TOKEN_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_URL_HEADER; +import static org.folio.services.domainevent.SourceRecordDomainEventType.SOURCE_RECORD_CREATED; +import static org.folio.services.domainevent.SourceRecordDomainEventType.SOURCE_RECORD_UPDATED; -import io.vertx.core.json.Json; import io.vertx.kafka.client.producer.KafkaHeader; import java.util.List; import java.util.Map; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.folio.services.kafka.KafkaSender; import org.folio.rest.jaxrs.model.Record; -import org.folio.rest.jaxrs.model.SourceRecordDomainEvent; -import org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType; +import org.folio.services.kafka.KafkaSender; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; @@ -40,15 +37,15 @@ public void publishRecordUpdated(Record updated, Map okapiHeader publishRecord(updated, okapiHeaders, SOURCE_RECORD_UPDATED); } - private void publishRecord(Record aRecord, Map okapiHeaders, EventType eventType) { + private void publishRecord(Record aRecord, Map okapiHeaders, SourceRecordDomainEventType eventType) { if (!domainEventsEnabled || notValidForPublishing(aRecord)) { return; } try { var kafkaHeaders = getKafkaHeaders(okapiHeaders, aRecord.getRecordType()); var key = aRecord.getId(); - kafkaSender.sendEventToKafka(okapiHeaders.get(TENANT), getEvent(aRecord, eventType), eventType.value(), - kafkaHeaders, key); + kafkaSender.sendEventToKafka(okapiHeaders.get(OKAPI_TENANT_HEADER), aRecord.getRawRecord().getContent(), + eventType.name(), kafkaHeaders, key); } catch (Exception e) { LOG.error("Exception during Record domain event sending", e); } @@ -73,19 +70,11 @@ private boolean notValidForPublishing(Record aRecord) { private List getKafkaHeaders(Map okapiHeaders, Record.RecordType recordType) { return List.of( - KafkaHeader.header(URL, okapiHeaders.get(URL)), - KafkaHeader.header(TENANT, okapiHeaders.get(TENANT)), - KafkaHeader.header(TOKEN, okapiHeaders.get(TOKEN)), + KafkaHeader.header(OKAPI_URL_HEADER, okapiHeaders.get(OKAPI_URL_HEADER)), + KafkaHeader.header(OKAPI_TENANT_HEADER, okapiHeaders.get(OKAPI_TENANT_HEADER)), + KafkaHeader.header(OKAPI_TOKEN_HEADER, okapiHeaders.get(OKAPI_TOKEN_HEADER)), KafkaHeader.header(RECORD_TYPE, recordType.value()) ); } - private String getEvent(Record eventRecord, EventType type) { - var event = new SourceRecordDomainEvent() - .withId(eventRecord.getId()) - .withEventType(type) - .withEventPayload(eventRecord.getRawRecord().getContent()); - return Json.encode(event); - } - } diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/SourceRecordDomainEventType.java b/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/SourceRecordDomainEventType.java new file mode 100644 index 000000000..983d3fe25 --- /dev/null +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/SourceRecordDomainEventType.java @@ -0,0 +1,5 @@ +package org.folio.services.domainevent; + +public enum SourceRecordDomainEventType { + SOURCE_RECORD_CREATED, SOURCE_RECORD_UPDATED +} diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java index 58a8a630b..85a263b77 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java @@ -1,12 +1,40 @@ package org.folio.services.handlers; +import static java.lang.String.format; +import static org.apache.commons.lang.StringUtils.isEmpty; +import static org.apache.commons.lang.StringUtils.isNotEmpty; +import static org.folio.dao.util.MarcUtil.reorderMarcRecordFields; +import static org.folio.dao.util.RecordDaoUtil.filterRecordByExternalId; +import static org.folio.dao.util.RecordDaoUtil.filterRecordByNotSnapshotId; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING; +import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TOKEN_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_URL_HEADER; +import static org.folio.services.util.AdditionalFieldsUtil.HR_ID_FROM_FIELD; +import static org.folio.services.util.AdditionalFieldsUtil.TAG_999; +import static org.folio.services.util.AdditionalFieldsUtil.addFieldToMarcRecord; +import static org.folio.services.util.AdditionalFieldsUtil.fillHrIdFieldInMarcRecord; +import static org.folio.services.util.AdditionalFieldsUtil.getValueFromControlledField; +import static org.folio.services.util.AdditionalFieldsUtil.isFieldsFillingNeeded; +import static org.folio.services.util.AdditionalFieldsUtil.remove035WithActualHrId; +import static org.folio.services.util.AdditionalFieldsUtil.updateLatestTransactionDate; +import static org.folio.services.util.EventHandlingUtil.sendEventToKafka; +import static org.folio.services.util.EventHandlingUtil.toOkapiHeaders; +import static org.folio.services.util.RestUtil.retrieveOkapiConnectionParams; + import io.vertx.core.Future; import io.vertx.core.Promise; import io.vertx.core.Vertx; import io.vertx.core.json.Json; import io.vertx.core.json.JsonObject; import io.vertx.kafka.client.producer.KafkaHeader; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.lang3.tuple.Pair; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -32,36 +60,6 @@ import org.folio.services.util.TypeConnection; import org.jooq.Condition; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.atomic.AtomicInteger; - -import static java.lang.String.format; -import static org.apache.commons.lang.StringUtils.isEmpty; -import static org.apache.commons.lang.StringUtils.isNotEmpty; -import static org.folio.dao.util.MarcUtil.reorderMarcRecordFields; -import static org.folio.dao.util.RecordDaoUtil.filterRecordByExternalId; -import static org.folio.dao.util.RecordDaoUtil.filterRecordByNotSnapshotId; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING; -import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; -import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; -import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TOKEN_HEADER; -import static org.folio.rest.util.OkapiConnectionParams.OKAPI_URL_HEADER; -import static org.folio.services.util.AdditionalFieldsUtil.HR_ID_FROM_FIELD; -import static org.folio.services.util.AdditionalFieldsUtil.TAG_999; -import static org.folio.services.util.AdditionalFieldsUtil.addFieldToMarcRecord; -import static org.folio.services.util.AdditionalFieldsUtil.fillHrIdFieldInMarcRecord; -import static org.folio.services.util.AdditionalFieldsUtil.getValueFromControlledField; -import static org.folio.services.util.AdditionalFieldsUtil.isFieldsFillingNeeded; -import static org.folio.services.util.AdditionalFieldsUtil.remove035WithActualHrId; -import static org.folio.services.util.AdditionalFieldsUtil.updateLatestTransactionDate; -import static org.folio.services.util.EventHandlingUtil.sendEventToKafka; -import static org.folio.services.util.EventHandlingUtil.toOkapiHeaders; -import static org.folio.services.util.RestUtil.retrieveOkapiConnectionParams; - public abstract class AbstractPostProcessingEventHandler implements EventHandler { private static final String USER_ID_HEADER = "userId"; @@ -254,8 +252,8 @@ private Future updatePreviousRecordsState(String externalId, String snapsh Condition condition = filterRecordByNotSnapshotId(snapshotId) .and(filterRecordByExternalId(externalId)); - return recordService.getRecords(condition, getDbType(), new ArrayList<>(), 0, 999, okapiHeaders.get(TENANT)) - .compose(recordCollection -> { + return recordService.getRecords(condition, getDbType(), new ArrayList<>(), 0, 999, + okapiHeaders.get(OKAPI_TENANT_HEADER)).compose(recordCollection -> { Promise result = Promise.promise(); @SuppressWarnings("squid:S3740") List> futures = new ArrayList<>(); @@ -315,7 +313,7 @@ private void executeHridManipulation(Record record, JsonObject externalEntity) { * @return - Future with Record result */ private Future saveRecord(Record record, Map okapiHeaders) { - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); return recordService.getRecordById(record.getId(), tenantId) .compose(r -> { if (r.isPresent()) { @@ -354,7 +352,7 @@ private Future saveRecordForCentralTenant(DataImportEventPayload dataImp LOG.info("handle:: Processing AbstractPostProcessingEventHandler - saving record by jobExecutionId: {} for the central tenantId: {}", jobExecutionId, centralTenantId); var okapiHeaders = toOkapiHeaders(dataImportEventPayload); if (centralTenantId != null) { - okapiHeaders.put(TENANT, centralTenantId); + okapiHeaders.put(OKAPI_TENANT_HEADER, centralTenantId); return snapshotService.copySnapshotToOtherTenant(record.getSnapshotId(), dataImportEventPayload.getTenant(), centralTenantId) .compose(f -> saveRecord(record, okapiHeaders)); } diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/AbstractUpdateModifyEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/AbstractUpdateModifyEventHandler.java index 5b6f38a36..09c1b9d49 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/AbstractUpdateModifyEventHandler.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/AbstractUpdateModifyEventHandler.java @@ -1,10 +1,33 @@ package org.folio.services.handlers.actions; +import static java.lang.String.format; +import static java.util.Objects.isNull; +import static java.util.Objects.nonNull; +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.folio.ActionProfile.Action.UPDATE; +import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.folio.services.handlers.match.AbstractMarcMatchEventHandler.CENTRAL_TENANT_ID; +import static org.folio.services.util.AdditionalFieldsUtil.HR_ID_FROM_FIELD; +import static org.folio.services.util.AdditionalFieldsUtil.addControlledFieldToMarcRecord; +import static org.folio.services.util.AdditionalFieldsUtil.fill035FieldInMarcRecordIfNotExists; +import static org.folio.services.util.AdditionalFieldsUtil.getValueFromControlledField; +import static org.folio.services.util.AdditionalFieldsUtil.normalize035; +import static org.folio.services.util.AdditionalFieldsUtil.remove003FieldIfNeeded; +import static org.folio.services.util.AdditionalFieldsUtil.remove035WithActualHrId; +import static org.folio.services.util.AdditionalFieldsUtil.updateLatestTransactionDate; +import static org.folio.services.util.EventHandlingUtil.toOkapiHeaders; + import io.vertx.core.Future; import io.vertx.core.Vertx; import io.vertx.core.json.Json; import io.vertx.core.json.JsonObject; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; import java.util.Optional; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; import java.util.function.Function; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; @@ -27,30 +50,6 @@ import org.folio.services.caches.MappingParametersSnapshotCache; import org.folio.services.util.RestUtil; -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; - -import static java.lang.String.format; -import static java.util.Objects.isNull; -import static java.util.Objects.nonNull; -import static org.apache.commons.lang3.StringUtils.isBlank; -import static org.folio.ActionProfile.Action.UPDATE; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; -import static org.folio.services.handlers.match.AbstractMarcMatchEventHandler.CENTRAL_TENANT_ID; -import static org.folio.services.util.AdditionalFieldsUtil.HR_ID_FROM_FIELD; -import static org.folio.services.util.AdditionalFieldsUtil.addControlledFieldToMarcRecord; -import static org.folio.services.util.AdditionalFieldsUtil.fill035FieldInMarcRecordIfNotExists; -import static org.folio.services.util.AdditionalFieldsUtil.getValueFromControlledField; -import static org.folio.services.util.AdditionalFieldsUtil.normalize035; -import static org.folio.services.util.AdditionalFieldsUtil.remove003FieldIfNeeded; -import static org.folio.services.util.AdditionalFieldsUtil.remove035WithActualHrId; -import static org.folio.services.util.AdditionalFieldsUtil.updateLatestTransactionDate; -import static org.folio.services.util.EventHandlingUtil.toOkapiHeaders; - public abstract class AbstractUpdateModifyEventHandler implements EventHandler { private static final Logger LOG = LogManager.getLogger(); @@ -122,7 +121,7 @@ public CompletableFuture handle(DataImportEventPayload p String centralTenantId = payload.getContext().get(CENTRAL_TENANT_ID); var okapiHeaders = toOkapiHeaders(payload); if (centralTenantId != null) { - okapiHeaders.put(TENANT, centralTenantId); + okapiHeaders.put(OKAPI_TENANT_HEADER, centralTenantId); return snapshotService.copySnapshotToOtherTenant(changedRecord.getSnapshotId(), payload.getTenant(), centralTenantId) .compose(snapshot -> recordService.saveRecord(changedRecord, okapiHeaders)); } diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java b/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java index 5f74b924b..8b836df5d 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java @@ -2,10 +2,9 @@ import static java.util.Arrays.stream; import static java.util.Objects.nonNull; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.okapi.common.XOkapiHeaders.TOKEN; -import static org.folio.okapi.common.XOkapiHeaders.URL; -import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TOKEN_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_URL_HEADER; import static org.folio.services.domainevent.RecordDomainEventPublisher.RECORD_DOMAIN_EVENT_TOPIC; import static org.folio.services.util.KafkaUtil.extractHeaderValue; @@ -30,6 +29,7 @@ import org.folio.rest.jaxrs.model.Event; import org.folio.rest.jaxrs.model.EventMetadata; import org.folio.rest.tools.utils.ModuleName; +import org.folio.services.domainevent.SourceRecordDomainEventType; public final class EventHandlingUtil { @@ -107,7 +107,7 @@ public static String constructModuleName() { } public static String createTopicName(String eventType, String tenantId, KafkaConfig kafkaConfig) { - if (stream(EventType.values()).anyMatch(et -> et.value().equals(eventType))) { + if (stream(SourceRecordDomainEventType.values()).anyMatch(et -> et.name().equals(eventType))) { return KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), tenantId, RECORD_DOMAIN_EVENT_TOPIC); } return KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), KafkaTopicNameHelper.getDefaultNameSpace(), @@ -124,17 +124,21 @@ public static KafkaProducer createProducer(String eventType, Kaf public static Map toOkapiHeaders(DataImportEventPayload eventPayload) { var okapiHeaders = new HashMap(); - okapiHeaders.put(URL, eventPayload.getOkapiUrl()); - okapiHeaders.put(TENANT, eventPayload.getTenant()); - okapiHeaders.put(TOKEN, eventPayload.getToken()); + okapiHeaders.put(OKAPI_URL_HEADER, eventPayload.getOkapiUrl()); + okapiHeaders.put(OKAPI_TENANT_HEADER, eventPayload.getTenant()); + okapiHeaders.put(OKAPI_TOKEN_HEADER, eventPayload.getToken()); return okapiHeaders; } + public static Map toOkapiHeaders(List kafkaHeaders) { + return toOkapiHeaders(kafkaHeaders, null); + } + public static Map toOkapiHeaders(List kafkaHeaders, String eventTenantId) { var okapiHeaders = new HashMap(); - okapiHeaders.put(URL, extractHeaderValue(URL, kafkaHeaders)); - okapiHeaders.put(TENANT, nonNull(eventTenantId) ? eventTenantId : extractHeaderValue(TENANT, kafkaHeaders)); - okapiHeaders.put(TOKEN, extractHeaderValue(TOKEN, kafkaHeaders)); + okapiHeaders.put(OKAPI_URL_HEADER, extractHeaderValue(OKAPI_URL_HEADER, kafkaHeaders)); + okapiHeaders.put(OKAPI_TENANT_HEADER, nonNull(eventTenantId) ? eventTenantId : extractHeaderValue(OKAPI_TENANT_HEADER, kafkaHeaders)); + okapiHeaders.put(OKAPI_TOKEN_HEADER, extractHeaderValue(OKAPI_TOKEN_HEADER, kafkaHeaders)); return okapiHeaders; } diff --git a/mod-source-record-storage-server/src/test/java/org/folio/consumers/AuthorityDomainKafkaHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/consumers/AuthorityDomainKafkaHandlerTest.java index 099849baa..4de699fa8 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/consumers/AuthorityDomainKafkaHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/consumers/AuthorityDomainKafkaHandlerTest.java @@ -1,7 +1,7 @@ package org.folio.consumers; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_AUTHORITY; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import com.fasterxml.jackson.databind.ObjectMapper; import io.vertx.core.json.Json; @@ -84,7 +84,7 @@ record = new Record() .withRecordType(MARC_AUTHORITY) .withRawRecord(rawRecord) .withParsedRecord(parsedRecord); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot) .compose(savedSnapshot -> recordService.saveRecord(record, okapiHeaders)) .onSuccess(ar -> async.complete()) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/dao/RecordDaoImplTest.java b/mod-source-record-storage-server/src/test/java/org/folio/dao/RecordDaoImplTest.java index 22fdc8e4f..8fac17c66 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/dao/RecordDaoImplTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/dao/RecordDaoImplTest.java @@ -1,11 +1,21 @@ package org.folio.dao; +import static org.folio.dao.RecordDaoImpl.INDEXERS_DELETION_LOCK_NAMESPACE_ID; +import static org.folio.rest.jaxrs.model.Record.State.ACTUAL; +import static org.folio.rest.jaxrs.model.Record.State.DELETED; +import static org.folio.rest.jooq.Tables.MARC_RECORDS_TRACKING; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; + import com.fasterxml.jackson.databind.ObjectMapper; import io.vertx.core.Future; import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.io.IOException; +import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.UUID; import org.folio.TestMocks; import org.folio.TestUtil; import org.folio.dao.util.AdvisoryLockUtil; @@ -22,7 +32,6 @@ import org.folio.services.AbstractLBServiceTest; import org.folio.services.domainevent.RecordDomainEventPublisher; import org.folio.services.util.TypeConnection; - import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -31,17 +40,6 @@ import org.mockito.MockitoAnnotations; import org.springframework.test.util.ReflectionTestUtils; -import java.io.IOException; -import java.util.List; -import java.util.Optional; -import java.util.UUID; - -import static org.folio.dao.RecordDaoImpl.INDEXERS_DELETION_LOCK_NAMESPACE_ID; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.Record.State.ACTUAL; -import static org.folio.rest.jaxrs.model.Record.State.DELETED; -import static org.folio.rest.jooq.Tables.MARC_RECORDS_TRACKING; - @RunWith(VertxUnitRunner.class) public class RecordDaoImplTest extends AbstractLBServiceTest { @@ -92,7 +90,7 @@ public void setUp(TestContext context) throws IOException { .withExternalIdsHolder(new ExternalIdsHolder() .withInstanceId(UUID.randomUUID().toString())); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot) .compose(savedSnapshot -> recordDao.saveRecord(record, okapiHeaders)) .compose(savedSnapshot -> recordDao.saveRecord(deletedRecord, okapiHeaders)) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/AuthorityLinkChunkKafkaHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/AuthorityLinkChunkKafkaHandlerTest.java index eeac85ac7..870b83ac4 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/AuthorityLinkChunkKafkaHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/AuthorityLinkChunkKafkaHandlerTest.java @@ -4,8 +4,10 @@ import static org.folio.EntityLinksKafkaTopic.INSTANCE_AUTHORITY; import static org.folio.EntityLinksKafkaTopic.LINKS_STATS; import static org.folio.RecordStorageKafkaTopic.MARC_BIB; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.LinkUpdateReport.Status.FAIL; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TOKEN_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_URL_HEADER; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; @@ -84,9 +86,9 @@ public class AuthorityLinkChunkKafkaHandlerTest extends AbstractLBServiceTest { private static final String USER_ID = UUID.randomUUID().toString(); private static final ObjectMapper objectMapper = new ObjectMapper(); private static final Map OKAPI_HEADERS = Map.of( - XOkapiHeaders.URL, OKAPI_URL, - XOkapiHeaders.TENANT, TENANT_ID, - XOkapiHeaders.TOKEN, TOKEN, + OKAPI_URL_HEADER, OKAPI_URL, + OKAPI_TENANT_HEADER, TENANT_ID, + OKAPI_TOKEN_HEADER, TOKEN, XOkapiHeaders.USER_ID, USER_ID ); private final RawRecord rawRecord = new RawRecord().withId(RECORD_ID) @@ -145,7 +147,7 @@ record = new Record() .withSnapshotId(snapshot.getJobExecutionId()) .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(ERROR_INSTANCE_ID)); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot) .compose(savedSnapshot -> recordService.saveRecord(record, okapiHeaders)) .compose(savedRecord -> recordService.saveRecord(secondRecord, okapiHeaders)) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityDeleteEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityDeleteEventHandlerTest.java index bffdc9ada..789309018 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityDeleteEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityDeleteEventHandlerTest.java @@ -1,11 +1,23 @@ package org.folio.services; +import static org.folio.ActionProfile.Action.DELETE; +import static org.folio.ActionProfile.Action.UPDATE; +import static org.folio.dataimport.util.RestUtil.OKAPI_TENANT_HEADER; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_DELETED; +import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; +import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_AUTHORITY; + import io.vertx.core.json.Json; import io.vertx.core.json.JsonObject; import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.io.IOException; +import java.util.Date; +import java.util.HashMap; import java.util.Map; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.dao.RecordDaoImpl; @@ -23,22 +35,9 @@ import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; - -import java.io.IOException; -import java.util.Date; -import java.util.HashMap; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import static org.folio.ActionProfile.Action.DELETE; -import static org.folio.ActionProfile.Action.UPDATE; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_DELETED; -import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; -import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_AUTHORITY; - @RunWith(VertxUnitRunner.class) public class MarcAuthorityDeleteEventHandlerTest extends AbstractLBServiceTest { @@ -96,7 +95,7 @@ public void shouldDeleteRecord(TestContext context) { .withFolioRecord(ActionProfile.FolioRecord.MARC_AUTHORITY) ) ); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(record, okapiHeaders) // when .onSuccess(ar -> eventHandler.handle(dataImportEventPayload) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityMatchEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityMatchEventHandlerTest.java index 3198628a6..bd659ad58 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityMatchEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityMatchEventHandlerTest.java @@ -1,5 +1,16 @@ package org.folio.services; +import static java.util.Collections.singletonList; +import static org.folio.MatchDetail.MatchCriterion.EXACTLY_MATCHES; +import static org.folio.dataimport.util.RestUtil.OKAPI_TENANT_HEADER; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_CREATED; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_MATCHED; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_NOT_MATCHED; +import static org.folio.rest.jaxrs.model.MatchExpression.DataValueType.VALUE_FROM_RECORD; +import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; +import static org.folio.rest.jaxrs.model.ProfileType.MATCH_PROFILE; +import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_AUTHORITY; + import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; import io.vertx.core.json.Json; @@ -7,7 +18,13 @@ import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.UUID; import org.folio.DataImportEventPayload; import org.folio.MappingProfile; import org.folio.MatchDetail; @@ -37,24 +54,6 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.UUID; - -import static java.util.Collections.singletonList; -import static org.folio.MatchDetail.MatchCriterion.EXACTLY_MATCHES; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_CREATED; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_MATCHED; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_NOT_MATCHED; -import static org.folio.rest.jaxrs.model.MatchExpression.DataValueType.VALUE_FROM_RECORD; -import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; -import static org.folio.rest.jaxrs.model.ProfileType.MATCH_PROFILE; -import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_AUTHORITY; - @RunWith(VertxUnitRunner.class) public class MarcAuthorityMatchEventHandlerTest extends AbstractLBServiceTest { @@ -175,7 +174,7 @@ public void shouldMatchBy999ffsField(TestContext context) { ))) )))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) @@ -225,7 +224,7 @@ public void shouldMatchBy001Field(TestContext context) { new Field().withLabel("recordSubfield").withValue("") ))))))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) @@ -276,7 +275,7 @@ public void shouldMatchBy010aField(TestContext context) { ))) )))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) @@ -324,7 +323,7 @@ public void shouldNotMatchBy999ffsField(TestContext context) { new Field().withLabel("indicator2").withValue(""), new Field().withLabel("recordSubfield").withValue("a")))))))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) @@ -370,7 +369,7 @@ public void shouldNotMatchBy001Field(TestContext context) { new Field().withLabel("indicator2").withValue(""), new Field().withLabel("recordSubfield").withValue("a")))))))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(record -> handler.handle(dataImportEventPayload) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityUpdateModifyEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityUpdateModifyEventHandlerTest.java index d9ec3939f..05b2c97de 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityUpdateModifyEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityUpdateModifyEventHandlerTest.java @@ -1,29 +1,17 @@ package org.folio.services; import static com.github.tomakehurst.wiremock.client.WireMock.get; - import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_UPDATED; import static org.folio.rest.jaxrs.model.EntityType.MARC_AUTHORITY; import static org.folio.rest.jaxrs.model.MappingDetail.MarcMappingOption.UPDATE; -import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; import static org.folio.rest.jaxrs.model.ProfileType.JOB_PROFILE; +import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import static org.folio.services.MarcBibUpdateModifyEventHandlerTest.getParsedContentWithoutLeaderAndDate; -import java.io.IOException; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - import com.fasterxml.jackson.databind.ObjectMapper; import com.github.tomakehurst.wiremock.client.WireMock; import com.github.tomakehurst.wiremock.common.Slf4jNotifier; @@ -38,15 +26,16 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; -import org.folio.services.domainevent.RecordDomainEventPublisher; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; - +import java.io.IOException; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.JobProfile; @@ -68,7 +57,15 @@ import org.folio.rest.jaxrs.model.Record; import org.folio.rest.jaxrs.model.Snapshot; import org.folio.services.caches.MappingParametersSnapshotCache; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.folio.services.handlers.actions.MarcAuthorityUpdateModifyEventHandler; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -186,7 +183,7 @@ record = new Record() .withParsedRecord(parsedRecord); ReactiveClassicGenericQueryExecutor queryExecutor = postgresClientFactory.getQueryExecutor(TENANT_ID); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(queryExecutor, snapshot) .compose(v -> recordService.saveRecord(record, okapiHeaders)) .compose(v -> SnapshotDaoUtil.save(queryExecutor, snapshotForRecordUpdate)) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibUpdateModifyEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibUpdateModifyEventHandlerTest.java index 5c3a2c126..1b8f5e6dd 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibUpdateModifyEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibUpdateModifyEventHandlerTest.java @@ -11,15 +11,14 @@ import static org.apache.commons.lang3.RandomUtils.nextInt; import static org.folio.ActionProfile.Action.MODIFY; import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_UPDATED; import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC; import static org.folio.rest.jaxrs.model.MappingDetail.MarcMappingOption.UPDATE; - import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; import static org.folio.rest.jaxrs.model.ProfileType.JOB_PROFILE; import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import static org.folio.services.util.AdditionalFieldsUtil.TAG_005; import com.fasterxml.jackson.core.JsonProcessingException; @@ -36,7 +35,6 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; - import java.io.IOException; import java.util.Collections; import java.util.Date; @@ -48,7 +46,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; - import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.InstanceLinkDtoCollection; @@ -293,10 +290,10 @@ record = new Record() ReactiveClassicGenericQueryExecutor queryExecutorCentralTenant = postgresClientFactory.getQueryExecutor(CENTRAL_TENANT_ID); SnapshotDaoUtil.save(queryExecutorLocalTenant, snapshot) - .compose(v -> recordService.saveRecord(record, Map.of(TENANT, TENANT_ID))) + .compose(v -> recordService.saveRecord(record, Map.of(OKAPI_TENANT_HEADER, TENANT_ID))) .compose(v -> SnapshotDaoUtil.save(queryExecutorLocalTenant, snapshotForRecordUpdate)) .compose(v -> SnapshotDaoUtil.save(queryExecutorCentralTenant, snapshot_2)) - .compose(v -> recordService.saveRecord(record_2, Map.of(TENANT, CENTRAL_TENANT_ID))) + .compose(v -> recordService.saveRecord(record_2, Map.of(OKAPI_TENANT_HEADER, CENTRAL_TENANT_ID))) .onComplete(context.asyncAssertSuccess()); } @@ -713,7 +710,7 @@ public void shouldNotUpdateBibFieldWhen500ErrorGetEntityLinkRequest(TestContext .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(instanceId)) .withMetadata(new Metadata()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), secondSnapshot) .compose(v -> recordService.saveRecord(secondRecord, okapiHeaders)) .compose(v -> SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshotForRecordUpdate)) @@ -891,7 +888,7 @@ private void verifyBibRecordUpdate(String incomingParsedContent, String expected .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(instanceId)) .withMetadata(new Metadata()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), secondSnapshot) .compose(v -> recordService.saveRecord(secondRecord, okapiHeaders)) .compose(v -> SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshotForRecordUpdate)) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsMatchEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsMatchEventHandlerTest.java index b319404bf..d0e430146 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsMatchEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsMatchEventHandlerTest.java @@ -1,9 +1,8 @@ package org.folio.services; import static java.util.Collections.singletonList; - import static org.folio.MatchDetail.MatchCriterion.EXACTLY_MATCHES; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; +import static org.folio.dataimport.util.RestUtil.OKAPI_TENANT_HEADER; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_MATCHED; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_NOT_MATCHED; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDING_RECORD_CREATED; @@ -12,14 +11,6 @@ import static org.folio.rest.jaxrs.model.ProfileType.MATCH_PROFILE; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_HOLDING; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; - import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; import io.vertx.core.json.Json; @@ -27,16 +18,13 @@ import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; -import org.folio.services.domainevent.RecordDomainEventPublisher; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - +import java.io.IOException; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; import org.folio.DataImportEventPayload; import org.folio.MappingProfile; import org.folio.MatchDetail; @@ -55,7 +43,16 @@ import org.folio.rest.jaxrs.model.RawRecord; import org.folio.rest.jaxrs.model.Record; import org.folio.rest.jaxrs.model.Snapshot; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.folio.services.handlers.match.MarcHoldingsMatchEventHandler; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; @RunWith(VertxUnitRunner.class) public class MarcHoldingsMatchEventHandlerTest extends AbstractLBServiceTest { @@ -177,7 +174,7 @@ public void shouldMatchBy999ffsField(TestContext context) { ))) )))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) @@ -227,7 +224,7 @@ public void shouldMatchBy001Field(TestContext context) { new Field().withLabel("recordSubfield").withValue("") ))))))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) @@ -278,7 +275,7 @@ public void shouldMatchBy010aField(TestContext context) { ))) )))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) @@ -326,7 +323,7 @@ public void shouldNotMatchBy999ffsField(TestContext context) { new Field().withLabel("indicator2").withValue(""), new Field().withLabel("recordSubfield").withValue("a")))))))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) @@ -372,7 +369,7 @@ public void shouldNotMatchBy001Field(TestContext context) { new Field().withLabel("indicator2").withValue(""), new Field().withLabel("recordSubfield").withValue("a")))))))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(record -> handler.handle(dataImportEventPayload) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsUpdateModifyEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsUpdateModifyEventHandlerTest.java index defffe2bc..3dbb9b299 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsUpdateModifyEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsUpdateModifyEventHandlerTest.java @@ -1,9 +1,7 @@ package org.folio.services; import static com.github.tomakehurst.wiremock.client.WireMock.get; - import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_MODIFIED_READY_FOR_POST_PROCESSING; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_UPDATED; import static org.folio.rest.jaxrs.model.EntityType.MARC_HOLDINGS; @@ -12,19 +10,9 @@ import static org.folio.rest.jaxrs.model.ProfileType.JOB_PROFILE; import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import static org.folio.services.MarcBibUpdateModifyEventHandlerTest.getParsedContentWithoutLeaderAndDate; -import java.io.IOException; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - import com.fasterxml.jackson.databind.ObjectMapper; import com.github.tomakehurst.wiremock.client.WireMock; import com.github.tomakehurst.wiremock.common.Slf4jNotifier; @@ -39,15 +27,16 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; -import org.folio.services.domainevent.RecordDomainEventPublisher; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; - +import java.io.IOException; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.JobProfile; @@ -69,7 +58,15 @@ import org.folio.rest.jaxrs.model.Record; import org.folio.rest.jaxrs.model.Snapshot; import org.folio.services.caches.MappingParametersSnapshotCache; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.folio.services.handlers.actions.MarcHoldingsUpdateModifyEventHandler; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -187,7 +184,7 @@ record = new Record() .withParsedRecord(parsedRecord); ReactiveClassicGenericQueryExecutor queryExecutor = postgresClientFactory.getQueryExecutor(TENANT_ID); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(queryExecutor, snapshot) .compose(v -> recordService.saveRecord(record, okapiHeaders)) .compose(v -> SnapshotDaoUtil.save(queryExecutor, snapshotForRecordUpdate)) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/QuickMarcKafkaHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/QuickMarcKafkaHandlerTest.java index 8726b2bf8..9a79e9587 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/QuickMarcKafkaHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/QuickMarcKafkaHandlerTest.java @@ -1,12 +1,27 @@ package org.folio.services; +import static org.folio.dao.util.QMEventTypes.QM_ERROR; +import static org.folio.dao.util.QMEventTypes.QM_RECORD_UPDATED; +import static org.folio.dao.util.QMEventTypes.QM_SRS_MARC_RECORD_UPDATED; +import static org.folio.dataimport.util.RestUtil.OKAPI_TENANT_HEADER; +import static org.folio.kafka.KafkaTopicNameHelper.formatTopicName; +import static org.folio.kafka.KafkaTopicNameHelper.getDefaultNameSpace; +import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; + import com.fasterxml.jackson.databind.ObjectMapper; import io.vertx.core.Future; import io.vertx.core.json.Json; import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; import java.util.Map; +import java.util.UUID; +import java.util.concurrent.TimeUnit; import net.mguenther.kafka.junit.KeyValue; import net.mguenther.kafka.junit.ObserveKeyValues; import net.mguenther.kafka.junit.SendKeyValues; @@ -35,22 +50,6 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import java.io.IOException; -import java.nio.charset.Charset; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.UUID; -import java.util.concurrent.TimeUnit; - -import static org.folio.dao.util.QMEventTypes.QM_ERROR; -import static org.folio.dao.util.QMEventTypes.QM_RECORD_UPDATED; -import static org.folio.dao.util.QMEventTypes.QM_SRS_MARC_RECORD_UPDATED; -import static org.folio.kafka.KafkaTopicNameHelper.formatTopicName; -import static org.folio.kafka.KafkaTopicNameHelper.getDefaultNameSpace; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; - @RunWith(VertxUnitRunner.class) public class QuickMarcKafkaHandlerTest extends AbstractLBServiceTest { @@ -96,7 +95,7 @@ record = new Record() .withRecordType(MARC_BIB) .withRawRecord(rawRecord) .withParsedRecord(parsedRecord); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot) .compose(savedSnapshot -> recordService.saveRecord(record, okapiHeaders)) .onSuccess(ar -> async.complete()) @@ -120,7 +119,7 @@ public void shouldUpdateParsedRecordAndSendRecordUpdatedEvent(TestContext contex ParsedRecord parsedRecord = record.getParsedRecord(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); Future future = recordService.saveRecord(record, okapiHeaders); ParsedRecordDto parsedRecordDto = new ParsedRecordDto() diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java index 131ec2c5f..24f1e02ca 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java @@ -1,7 +1,7 @@ package org.folio.services; import static java.util.Comparator.comparing; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; +import static org.folio.dataimport.util.RestUtil.OKAPI_TENANT_HEADER; import static org.folio.rest.jooq.Tables.RECORDS_LB; import static org.folio.services.RecordServiceImpl.INDICATOR; import static org.folio.services.RecordServiceImpl.SUBFIELD_S; @@ -370,7 +370,7 @@ public void shouldSaveMarcBibRecordWithMatchedIdFrom999field(TestContext context .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(record, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -416,7 +416,7 @@ public void shouldFailDuringUpdateRecordGenerationIfIncomingMatchedIdNotEqualToM .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.updateRecordGeneration(matchedId, record, okapiHeaders).onComplete(save -> { context.assertTrue(save.failed()); @@ -454,7 +454,7 @@ public void shouldFailDuringUpdateRecordGenerationIfRecordWithIdAsIncomingMatche .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.updateRecordGeneration(matchedId, record, okapiHeaders).onComplete(save -> { context.assertTrue(save.failed()); @@ -510,7 +510,7 @@ public void shouldFailUpdateRecordGenerationIfDuplicateError(TestContext context .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(record1, okapiHeaders).onComplete(record1Saved -> { if (record1Saved.failed()) { @@ -574,7 +574,7 @@ public void shouldUpdateRecordGeneration(TestContext context) { .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(record1, okapiHeaders).onComplete(record1Saved -> { if (record1Saved.failed()) { @@ -632,7 +632,7 @@ public void shouldUpdateRecordGenerationByMatchId(TestContext context) { .withMetadata(mock.getMetadata()); var async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(recordToSave, okapiHeaders).onComplete(savedRecord -> { if (savedRecord.failed()) { @@ -716,7 +716,7 @@ public void shouldSaveMarcBibRecordWithMatchedIdFromRecordId(TestContext context .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(record, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -743,7 +743,7 @@ public void shouldSaveMarcBibRecordWithMatchedIdFromRecordId(TestContext context public void shouldSaveEdifactRecordAndNotSet999Field(TestContext context) { Async async = context.async(); Record record = TestMocks.getRecords(Record.RecordType.EDIFACT); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(record, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -797,7 +797,7 @@ public void shouldSaveMarcBibRecordWithMatchedIdFromExistingSourceRecord(TestCon .withAdditionalInfo(original.getAdditionalInfo()) .withExternalIdsHolder(externalIdsHolder) .withMetadata(original.getMetadata()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(record1, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -876,7 +876,7 @@ public void shouldFailToSaveRecord(TestContext context) { .withAdditionalInfo(valid.getAdditionalInfo()) .withExternalIdsHolder(valid.getExternalIdsHolder()) .withMetadata(valid.getMetadata()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(invalid, okapiHeaders).onComplete(save -> { context.assertTrue(save.failed()); String expected = "Invalid UUID string: " + fakeSnapshotId; @@ -909,7 +909,7 @@ public void shouldSaveMarcBibRecordsWithExpectedErrors(TestContext context) { public void shouldUpdateMarcRecord(TestContext context) { Async async = context.async(); Record original = TestMocks.getRecord(0); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(original, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); @@ -962,7 +962,7 @@ public void shouldUpdateRecordState(TestContext context) { .withAdditionalInfo(original.getAdditionalInfo()) .withExternalIdsHolder(original.getExternalIdsHolder()) .withMetadata(original.getMetadata()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(original, okapiHeaders) .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, okapiHeaders)) @@ -999,7 +999,7 @@ public void shouldUpdateMarcAuthorityRecordStateToDeleted(TestContext context) { .withAdditionalInfo(original.getAdditionalInfo()) .withExternalIdsHolder(original.getExternalIdsHolder()) .withMetadata(original.getMetadata()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(original, okapiHeaders) .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, okapiHeaders)) @@ -1028,7 +1028,7 @@ public void shouldUpdateMarcAuthorityRecordStateToDeleted(TestContext context) { public void shouldUpdateEdifactRecord(TestContext context) { Async async = context.async(); Record original = TestMocks.getEdifactRecord(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(original, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -1074,7 +1074,7 @@ public void shouldUpdateEdifactRecord(TestContext context) { public void shouldFailToUpdateRecord(TestContext context) { Async async = context.async(); Record record = TestMocks.getRecord(0); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.getRecordById(record.getMatchedId(), TENANT_ID).onComplete(get -> { if (get.failed()) { @@ -1188,7 +1188,7 @@ public void shouldGetMarcBibSourceRecordByMatchedIdNotEqualToId(TestContext cont .withAdditionalInfo(expected.getAdditionalInfo()) .withExternalIdsHolder(expected.getExternalIdsHolder()) .withMetadata(expected.getMetadata()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders) .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, okapiHeaders)) @@ -1277,7 +1277,7 @@ public void shouldGetFormattedMarcHoldingsRecord(TestContext context) { public void shouldGetFormattedEdifactRecord(TestContext context) { Async async = context.async(); Record expected = TestMocks.getEdifactRecord(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -1300,7 +1300,7 @@ public void shouldGetFormattedDeletedRecord(TestContext context) { Async async = context.async(); Record expected = TestMocks.getMarcBibRecord(); expected.setState(State.DELETED); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -1347,7 +1347,7 @@ public void shouldDeleteMarcAuthorityRecordsBySnapshotId(TestContext context) { public void shouldUpdateSourceRecord(TestContext context) { Async async = context.async(); Record expected = TestMocks.getRecord(0); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -1414,7 +1414,7 @@ public void shouldThrowExceptionWhenSavedDuplicateRecord(TestContext context) { RecordCollection recordCollection = new RecordCollection() .withRecords(expected) .withTotalRecords(expected.size()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); List> futures = List.of(recordService.saveRecords(recordCollection, okapiHeaders), recordService.saveRecords(recordCollection, okapiHeaders)); @@ -1548,7 +1548,7 @@ private void streamRecordsBySnapshotId(TestContext context, String snapshotId, R private void getMarcRecordById(TestContext context, Record expected) { Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -1569,7 +1569,7 @@ private void getMarcRecordById(TestContext context, Record expected) { private void saveMarcRecord(TestContext context, Record expected, Record.RecordType marcBib) { Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -1595,7 +1595,7 @@ private void saveMarcRecord(TestContext context, Record expected, Record.RecordT private void saveMarcRecordWithGenerationGreaterThanZero(TestContext context, Record expected, Record.RecordType marcBib) { Async async = context.async(); expected.setGeneration(1); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -1628,7 +1628,7 @@ private void saveMarcRecords(TestContext context, Record.RecordType marcBib) { RecordCollection recordCollection = new RecordCollection() .withRecords(expected) .withTotalRecords(expected.size()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecords(recordCollection, okapiHeaders).onComplete(batch -> { if (batch.failed()) { context.fail(batch.cause()); @@ -1659,7 +1659,7 @@ private void saveMarcRecordsWithExpectedErrors(TestContext context, Record.Recor RecordCollection recordCollection = new RecordCollection() .withRecords(expected) .withTotalRecords(expected.size()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecords(recordCollection, okapiHeaders).onComplete(batch -> { if (batch.failed()) { context.fail(batch.cause()); @@ -1883,7 +1883,7 @@ private void getMarcSourceRecordsByListOfIdsThatAreDeleted(TestContext context, private void getMarcSourceRecordById(TestContext context, Record expected) { Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -1938,7 +1938,7 @@ private void updateParsedMarcRecords(TestContext context, Record.RecordType reco List expected = updated.stream() .map(Record::getParsedRecord) .collect(Collectors.toList()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.updateParsedRecords(recordCollection, okapiHeaders).onComplete(update -> { if (update.failed()) { context.fail(update.cause()); @@ -1967,7 +1967,7 @@ private void updateParsedMarcRecords(TestContext context, Record.RecordType reco private void updateParsedMarcRecordsAndGetOnlyActualRecord(TestContext context, Record expected) { Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { context.assertTrue(save.succeeded()); @@ -1987,7 +1987,7 @@ private void updateParsedMarcRecordsAndGetOnlyActualRecord(TestContext context, private void getFormattedMarcRecord(TestContext context, Record expected) { Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -2009,7 +2009,7 @@ private void getFormattedMarcRecord(TestContext context, Record expected) { private void updateSuppressFromDiscoveryForMarcRecord(TestContext context, Record expected) { Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -2085,7 +2085,7 @@ private void deleteMarcRecordsBySnapshotId(TestContext context, String snapshotI } private CompositeFuture saveRecords(List records) { - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); return GenericCompositeFuture.all(records.stream() .map(record -> recordService.saveRecord(record, okapiHeaders)) .collect(Collectors.toList()) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/domainevent/RecordDomainEventPublisherUnitTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/domainevent/RecordDomainEventPublisherUnitTest.java index 3a2a918fd..56160f14c 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/domainevent/RecordDomainEventPublisherUnitTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/domainevent/RecordDomainEventPublisherUnitTest.java @@ -1,10 +1,10 @@ package org.folio.services.domainevent; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.okapi.common.XOkapiHeaders.TOKEN; -import static org.folio.okapi.common.XOkapiHeaders.URL; -import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType.SOURCE_RECORD_CREATED; -import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType.SOURCE_RECORD_UPDATED; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TOKEN_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_URL_HEADER; +import static org.folio.services.domainevent.SourceRecordDomainEventType.SOURCE_RECORD_CREATED; +import static org.folio.services.domainevent.SourceRecordDomainEventType.SOURCE_RECORD_UPDATED; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoInteractions; @@ -35,7 +35,7 @@ public void publishRecordCreated_shouldSendNoEvents_ifDomainEventsAreNotEnabled( // given ReflectionTestUtils.setField(publisher, "domainEventsEnabled", false); var aRecord = new Record(); - var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + var headers = Map.of(OKAPI_TENANT_HEADER, "TENANT", OKAPI_URL_HEADER, "OKAPI_URL", OKAPI_TOKEN_HEADER, "TOKEN"); // when publisher.publishRecordCreated(aRecord, headers); @@ -49,7 +49,7 @@ public void publishRecordUpdated_shouldSendNoEvents_ifDomainEventsAreNotEnabled( // given ReflectionTestUtils.setField(publisher, "domainEventsEnabled", false); var aRecord = new Record(); - var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + var headers = Map.of(OKAPI_TENANT_HEADER, "TENANT", OKAPI_URL_HEADER, "OKAPI_URL", OKAPI_TOKEN_HEADER, "TOKEN"); // when publisher.publishRecordUpdated(aRecord, headers); @@ -63,7 +63,7 @@ public void publishRecordCreated_shouldSendNoEvents_ifRecordHasNoType() { // given ReflectionTestUtils.setField(publisher, "domainEventsEnabled", true); var aRecord = new Record(); - var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + var headers = Map.of(OKAPI_TENANT_HEADER, "TENANT", OKAPI_URL_HEADER, "OKAPI_URL", OKAPI_TOKEN_HEADER, "TOKEN"); // when publisher.publishRecordCreated(aRecord, headers); @@ -77,7 +77,7 @@ public void publishRecordUpdated_shouldSendNoEvents_ifRecordHasNoType() { // given ReflectionTestUtils.setField(publisher, "domainEventsEnabled", true); var aRecord = new Record(); - var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + var headers = Map.of(OKAPI_TENANT_HEADER, "TENANT", OKAPI_URL_HEADER, "OKAPI_URL", OKAPI_TOKEN_HEADER, "TOKEN"); // when publisher.publishRecordUpdated(aRecord, headers); @@ -91,7 +91,7 @@ public void publishRecordCreated_shouldSendNoEvents_ifRecordContainsNoParsedCont // given ReflectionTestUtils.setField(publisher, "domainEventsEnabled", true); var aRecord = new Record().withRecordType(Record.RecordType.MARC_BIB); - var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + var headers = Map.of(OKAPI_TENANT_HEADER, "TENANT", OKAPI_URL_HEADER, "OKAPI_URL", OKAPI_TOKEN_HEADER, "TOKEN"); // when publisher.publishRecordCreated(aRecord, headers); @@ -105,7 +105,7 @@ public void publishRecordUpdated_shouldSendNoEvents_ifRecordContainsNoParsedCont // given ReflectionTestUtils.setField(publisher, "domainEventsEnabled", true); var aRecord = new Record().withRecordType(Record.RecordType.MARC_BIB); - var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + var headers = Map.of(OKAPI_TENANT_HEADER, "TENANT", OKAPI_URL_HEADER, "OKAPI_URL", OKAPI_TOKEN_HEADER, "TOKEN"); // when publisher.publishRecordUpdated(aRecord, headers); @@ -123,24 +123,18 @@ public void publishRecordCreated_shouldSendEvent_ifRecordIsValid() { .withId(UUID.randomUUID().toString()) .withRecordType(Record.RecordType.MARC_BIB) .withRawRecord(new RawRecord().withContent(rawContent)); - var tenantId = "TENANT"; + var tenantId = "OKAPI_TENANT_HEADER"; var okapiUrl = "OKAPI_URL"; var token = "TOKEN"; - var givenHeaders = Map.of(TENANT, tenantId, URL, okapiUrl, TOKEN, token); + var givenHeaders = Map.of(OKAPI_TENANT_HEADER, tenantId, OKAPI_URL_HEADER, okapiUrl, OKAPI_TOKEN_HEADER, token); var expectedHeaders = getKafkaHeaders(okapiUrl, tenantId, token, aRecord); - var eventType = SOURCE_RECORD_CREATED.value(); - var expectedPayload = "{" - + "\"id\":\"" + aRecord.getId() + "\"" - + ",\"eventType\":\"" + eventType + "\"" - + ",\"eventPayload\":\"" + rawContent + "\"" - + "}"; + var eventType = SOURCE_RECORD_CREATED.name(); // when publisher.publishRecordCreated(aRecord, givenHeaders); // then - verify(kafkaSender).sendEventToKafka(tenantId, expectedPayload, eventType, expectedHeaders, - aRecord.getId()); + verify(kafkaSender).sendEventToKafka(tenantId, rawContent, eventType, expectedHeaders, aRecord.getId()); } @Test @@ -155,28 +149,22 @@ public void publishRecordUpdated_shouldSendEvent_ifRecordIsValid() { var tenantId = "TENANT"; var okapiUrl = "OKAPI_URL"; var token = "TOKEN"; - var givenHeaders = Map.of(TENANT, tenantId, URL, okapiUrl, TOKEN, token); + var givenHeaders = Map.of(OKAPI_TENANT_HEADER, tenantId, OKAPI_URL_HEADER, okapiUrl, OKAPI_TOKEN_HEADER, token); var expectedHeaders = getKafkaHeaders(okapiUrl, tenantId, token, aRecord); - var eventType = SOURCE_RECORD_UPDATED.value(); - var expectedPayload = "{" - + "\"id\":\"" + aRecord.getId() + "\"" - + ",\"eventType\":\"" + eventType + "\"" - + ",\"eventPayload\":\"" + rawContent + "\"" - + "}"; + var eventType = SOURCE_RECORD_UPDATED.name(); // when publisher.publishRecordUpdated(aRecord, givenHeaders); // thenÏ - verify(kafkaSender).sendEventToKafka(tenantId, expectedPayload, eventType, expectedHeaders, - aRecord.getId()); + verify(kafkaSender).sendEventToKafka(tenantId, rawContent, eventType, expectedHeaders, aRecord.getId()); } private List getKafkaHeaders(String okapiUrl, String tenantId, String token, Record aRecord) { return List.of( - KafkaHeader.header(URL, okapiUrl), - KafkaHeader.header(TENANT, tenantId), - KafkaHeader.header(TOKEN, token), + KafkaHeader.header(OKAPI_URL_HEADER, okapiUrl), + KafkaHeader.header(OKAPI_TENANT_HEADER, tenantId), + KafkaHeader.header(OKAPI_TOKEN_HEADER, token), KafkaHeader.header("folio.srs.recordType", aRecord.getRecordType().value()) ); } diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AuthorityPostProcessingEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AuthorityPostProcessingEventHandlerTest.java index f2fe2dd79..03330d0ba 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AuthorityPostProcessingEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AuthorityPostProcessingEventHandlerTest.java @@ -1,25 +1,16 @@ package org.folio.services.handlers; import static com.github.tomakehurst.wiremock.client.WireMock.get; - -import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_AUTHORITY_CREATED_READY_FOR_POST_PROCESSING; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_AUTHORITY_UPDATED_READY_FOR_POST_PROCESSING; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_CREATED; import static org.folio.rest.jaxrs.model.EntityType.AUTHORITY; -import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; +import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_AUTHORITY; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import static org.folio.services.util.AdditionalFieldsUtil.TAG_005; -import java.io.IOException; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; - import com.fasterxml.jackson.databind.ObjectMapper; import com.github.tomakehurst.wiremock.client.WireMock; import com.github.tomakehurst.wiremock.matching.RegexPattern; @@ -31,14 +22,13 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; -import org.folio.rest.jaxrs.model.Metadata; -import org.folio.services.RecordService; -import org.folio.services.SnapshotService; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; - +import java.io.IOException; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.MappingProfile; @@ -49,11 +39,18 @@ import org.folio.rest.jaxrs.model.ExternalIdsHolder; import org.folio.rest.jaxrs.model.MappingMetadataDto; import org.folio.rest.jaxrs.model.MarcFieldProtectionSetting; +import org.folio.rest.jaxrs.model.Metadata; import org.folio.rest.jaxrs.model.ParsedRecord; import org.folio.rest.jaxrs.model.ProfileSnapshotWrapper; import org.folio.rest.jaxrs.model.RawRecord; import org.folio.rest.jaxrs.model.Record; +import org.folio.services.RecordService; +import org.folio.services.SnapshotService; import org.folio.services.util.AdditionalFieldsUtil; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; @RunWith(VertxUnitRunner.class) public class AuthorityPostProcessingEventHandlerTest extends AbstractPostProcessingEventHandlerTest { @@ -88,7 +85,7 @@ public void shouldSetAuthorityIdToRecord(TestContext context) { createDataImportEventPayload(payloadContext, DI_INVENTORY_AUTHORITY_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) @@ -246,7 +243,7 @@ public void shouldSetAuthorityIdToParsedRecordWhenContentHasField999(TestContext createDataImportEventPayload(payloadContext, DI_INVENTORY_AUTHORITY_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(rec -> handler.handle(dataImportEventPayload) @@ -433,7 +430,7 @@ public void shouldReturnFailedFutureWhenParsedRecordHasNoFields(TestContext cont .withToken(TOKEN); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/HoldingsPostProcessingEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/HoldingsPostProcessingEventHandlerTest.java index 135149f41..a2a2b1dc1 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/HoldingsPostProcessingEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/HoldingsPostProcessingEventHandlerTest.java @@ -1,7 +1,6 @@ package org.folio.services.handlers; import static com.github.tomakehurst.wiremock.client.WireMock.get; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDING_RECORD_CREATED; import static org.folio.rest.jaxrs.model.EntityType.HOLDINGS; @@ -9,15 +8,9 @@ import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_HOLDING; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import static org.folio.services.util.AdditionalFieldsUtil.TAG_005; -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; - import com.fasterxml.jackson.databind.ObjectMapper; import com.github.tomakehurst.wiremock.client.WireMock; import com.github.tomakehurst.wiremock.matching.RegexPattern; @@ -29,26 +22,31 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; -import org.folio.rest.jaxrs.model.MappingMetadataDto; -import org.folio.services.RecordService; -import org.folio.services.SnapshotService; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; - +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.MappingProfile; import org.folio.TestUtil; import org.folio.kafka.KafkaConfig; import org.folio.processing.mapping.defaultmapper.processor.parameters.MappingParameters; +import org.folio.rest.jaxrs.model.MappingMetadataDto; import org.folio.rest.jaxrs.model.MarcFieldProtectionSetting; import org.folio.rest.jaxrs.model.ParsedRecord; import org.folio.rest.jaxrs.model.ProfileSnapshotWrapper; import org.folio.rest.jaxrs.model.RawRecord; import org.folio.rest.jaxrs.model.Record; +import org.folio.services.RecordService; +import org.folio.services.SnapshotService; import org.folio.services.util.AdditionalFieldsUtil; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; @RunWith(VertxUnitRunner.class) public class HoldingsPostProcessingEventHandlerTest extends AbstractPostProcessingEventHandlerTest { @@ -83,7 +81,7 @@ public void shouldSetHoldingsIdToRecord(TestContext context) { createDataImportEventPayload(payloadContext, DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) @@ -197,7 +195,7 @@ public void shouldSetHoldingsIdToParsedRecordWhenContentHasField999(TestContext createDataImportEventPayload(payloadContext, DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(rec -> handler.handle(dataImportEventPayload) @@ -368,7 +366,7 @@ public void shouldSetHoldingsHridToParsedRecordWhenContentHasNotField001(TestCon createDataImportEventPayload(payloadContext, DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(rec -> handler.handle(dataImportEventPayload) @@ -441,7 +439,7 @@ public void shouldReturnFailedFutureWhenParsedRecordHasNoFields(TestContext cont .withToken(TOKEN); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java index 7ce917c14..6d8860fbc 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java @@ -1,5 +1,25 @@ package org.folio.services.handlers; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_ORDER_CREATED_READY_FOR_POST_PROCESSING; +import static org.folio.rest.jaxrs.model.EntityType.INSTANCE; +import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC; +import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; +import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; +import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.folio.services.handlers.InstancePostProcessingEventHandler.POST_PROCESSING_RESULT_EVENT; +import static org.folio.services.util.AdditionalFieldsUtil.TAG_005; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + import com.fasterxml.jackson.databind.ObjectMapper; import com.github.tomakehurst.wiremock.client.WireMock; import com.github.tomakehurst.wiremock.matching.RegexPattern; @@ -12,7 +32,13 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.io.IOException; +import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.MappingProfile; @@ -42,33 +68,6 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; - -import static com.github.tomakehurst.wiremock.client.WireMock.get; -import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_ORDER_CREATED_READY_FOR_POST_PROCESSING; -import static org.folio.rest.jaxrs.model.EntityType.INSTANCE; -import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC; -import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; -import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; -import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; -import static org.folio.services.handlers.InstancePostProcessingEventHandler.POST_PROCESSING_RESULT_EVENT; -import static org.folio.services.util.AdditionalFieldsUtil.TAG_005; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - @RunWith(VertxUnitRunner.class) public class InstancePostProcessingEventHandlerTest extends AbstractPostProcessingEventHandlerTest { @@ -114,7 +113,7 @@ public void shouldSetInstanceIdToRecord(TestContext context) { createDataImportEventPayload(payloadContext, DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) @@ -418,7 +417,7 @@ public void shouldSaveIncomingRecordAndMarkExistingAsOldWhenIncomingRecordHasSam DataImportEventPayload dataImportEventPayload = createDataImportEventPayload(payloadContext, DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); Future future = recordDao.saveRecord(existingRecord, okapiHeaders) .compose(v -> Future.fromCompletionStage(handler.handle(dataImportEventPayload))); @@ -472,7 +471,7 @@ public void checkGeneration035FiledAfterUpdateMarcBib(TestContext context) throw DataImportEventPayload dataImportEventPayload = createDataImportEventPayload(payloadContext, DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); Future future = recordDao.saveRecord(existingRecord, okapiHeaders) .compose(v -> Future.fromCompletionStage(handler.handle(dataImportEventPayload))); @@ -516,7 +515,7 @@ public void shouldSetInstanceIdToParsedRecordWhenContentHasField999(TestContext createDataImportEventPayload(payloadContext, DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(rec -> handler.handle(dataImportEventPayload) @@ -688,7 +687,7 @@ public void shouldSetInstanceHridToParsedRecordWhenContentHasNotField001(TestCon createDataImportEventPayload(payloadContext, DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(rec -> handler.handle(dataImportEventPayload) @@ -761,7 +760,7 @@ public void shouldReturnFailedFutureWhenParsedRecordHasNoFields(TestContext cont .withToken(TOKEN); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) @@ -894,7 +893,7 @@ public void shouldFillEventPayloadWithPostProcessingFlagIfOrderEventExists(TestC dataImportEventPayload.getContext().put(POST_PROCESSING_RESULT_EVENT, DI_ORDER_CREATED_READY_FOR_POST_PROCESSING.value()); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/verticle/MarcIndexersVersionDeletionVerticleTest.java b/mod-source-record-storage-server/src/test/java/org/folio/verticle/MarcIndexersVersionDeletionVerticleTest.java index 34c902477..30c7b3293 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/verticle/MarcIndexersVersionDeletionVerticleTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/verticle/MarcIndexersVersionDeletionVerticleTest.java @@ -1,10 +1,19 @@ package org.folio.verticle; +import static org.folio.rest.jaxrs.model.Record.State.ACTUAL; +import static org.folio.rest.jaxrs.model.Record.State.OLD; +import static org.folio.rest.jooq.Tables.MARC_RECORDS_TRACKING; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.jooq.impl.DSL.field; +import static org.jooq.impl.DSL.name; +import static org.jooq.impl.DSL.table; + import io.vertx.core.Future; import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; import java.util.Map; +import java.util.UUID; import org.folio.TestMocks; import org.folio.dao.RecordDao; import org.folio.dao.RecordDaoImpl; @@ -23,19 +32,9 @@ import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; - -import java.util.UUID; import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.Record.State.ACTUAL; -import static org.folio.rest.jaxrs.model.Record.State.OLD; -import static org.folio.rest.jooq.Tables.MARC_RECORDS_TRACKING; -import static org.jooq.impl.DSL.field; -import static org.jooq.impl.DSL.name; -import static org.jooq.impl.DSL.table; - @RunWith(VertxUnitRunner.class) public class MarcIndexersVersionDeletionVerticleTest extends AbstractLBServiceTest { @@ -73,7 +72,7 @@ public void setUp(TestContext context) { .withRawRecord(TestMocks.getRecord(0).getRawRecord().withId(recordId)) .withParsedRecord(TestMocks.getRecord(0).getParsedRecord().withId(recordId)); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot) .compose(savedSnapshot -> recordService.saveRecord(record, okapiHeaders)) .onComplete(save -> { @@ -99,7 +98,7 @@ public void cleanUp(TestContext context) { public void shouldDeleteOldVersionsOfMarcIndexers(TestContext context) { Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); // performs record update in the DB that leads to new indexers creation with incremented version // so that previous existing indexers become old and should be deleted Future future = recordService.updateRecord(record, okapiHeaders) @@ -119,7 +118,7 @@ public void shouldDeleteOldVersionsOfMarcIndexers(TestContext context) { public void shouldDeleteMarcIndexersRelatedToRecordInOldState(TestContext context) { Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); Future future = recordService.updateRecord(record.withState(OLD), okapiHeaders) .compose(v -> existMarcIndexersByRecordId(record.getId())) .onSuccess(context::assertTrue) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/verticle/consumers/DataImportConsumersVerticleTest.java b/mod-source-record-storage-server/src/test/java/org/folio/verticle/consumers/DataImportConsumersVerticleTest.java index c9f7556b0..9f266b3d0 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/verticle/consumers/DataImportConsumersVerticleTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/verticle/consumers/DataImportConsumersVerticleTest.java @@ -3,35 +3,25 @@ import static com.github.tomakehurst.wiremock.client.WireMock.get; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Collections.singletonList; -import static org.folio.ActionProfile.Action.UPDATE; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING; -import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC; -import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; -import static org.folio.services.MarcBibUpdateModifyEventHandlerTest.getParsedContentWithoutLeaderAndDate; -import static org.junit.Assert.assertEquals; - import static org.folio.ActionProfile.Action.DELETE; +import static org.folio.ActionProfile.Action.UPDATE; import static org.folio.consumers.DataImportKafkaHandler.PROFILE_SNAPSHOT_ID_KEY; import static org.folio.consumers.ParsedRecordChunksKafkaHandler.JOB_EXECUTION_ID_HEADER; import static org.folio.kafka.KafkaTopicNameHelper.getDefaultNameSpace; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_MARC_FOR_DELETE_RECEIVED; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_DELETED; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING; +import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC; import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; import static org.folio.rest.jaxrs.model.ProfileType.JOB_PROFILE; +import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.folio.services.MarcBibUpdateModifyEventHandlerTest.getParsedContentWithoutLeaderAndDate; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.TimeUnit; - import com.fasterxml.jackson.databind.ObjectMapper; import com.github.tomakehurst.wiremock.client.WireMock; import com.github.tomakehurst.wiremock.common.Slf4jNotifier; @@ -44,15 +34,17 @@ import io.vertx.core.json.JsonObject; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.TimeUnit; import net.mguenther.kafka.junit.KeyValue; import net.mguenther.kafka.junit.ObserveKeyValues; import net.mguenther.kafka.junit.SendKeyValues; -import org.folio.services.domainevent.RecordDomainEventPublisher; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; - import org.folio.ActionProfile; import org.folio.JobProfile; import org.folio.MappingProfile; @@ -77,6 +69,11 @@ import org.folio.rest.jaxrs.model.Record; import org.folio.rest.jaxrs.model.Snapshot; import org.folio.services.AbstractLBServiceTest; +import org.folio.services.domainevent.RecordDomainEventPublisher; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -150,7 +147,7 @@ record = new Record() ReactiveClassicGenericQueryExecutor queryExecutor = postgresClientFactory.getQueryExecutor(TENANT_ID); RecordDaoImpl recordDao = new RecordDaoImpl(postgresClientFactory, recordDomainEventPublisher); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(queryExecutor, snapshot) .compose(v -> recordDao.saveRecord(record, okapiHeaders)) .compose(v -> SnapshotDaoUtil.save(queryExecutor, snapshotForRecordUpdate)) diff --git a/ramls/source-record-domain-event.json b/ramls/source-record-domain-event.json deleted file mode 100644 index 24fa5f1e4..000000000 --- a/ramls/source-record-domain-event.json +++ /dev/null @@ -1,97 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - "description": "Source record domain event data model", - "javaType": "org.folio.rest.jaxrs.model.SourceRecordDomainEvent", - "type": "object", - "additionalProperties": false, - "properties": { - "id": { - "description": "UUID", - "type": "string", - "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" - }, - "eventType": { - "type": "string", - "enum": ["SOURCE_RECORD_CREATED", "SOURCE_RECORD_UPDATED"], - "description": "Source record domain event type" - }, - "sourceRecordDomainEventMetadata": { - "description": "Event metadata", - "type": "object", - "additionalProperties": false, - "properties": { - "eventTTL": { - "description": "Time-to-live (TTL) for event in minutes", - "type": "integer" - }, - "correlationId": { - "description": "Id to track related events, can be a meaningful string or a UUID", - "type": "string" - }, - "originalEventId": { - "description": "Id of the event that started the sequence of related events", - "type": "string", - "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" - }, - "publisherCallback": { - "description": "Allows a publisher to provide a callback endpoint or an error Event Type to be notified that despite the fact that there are subscribers for such an event type no one has received the event within the specified period of time", - "type": "object", - "properties": { - "endpoint": { - "description": "Callback endpoint", - "type": "string" - }, - "eventType": { - "description": "Error Event Type", - "type": "string" - } - } - }, - "createdDate": { - "description": "Timestamp when event was created", - "type": "string", - "format": "date-time" - }, - "publishedDate": { - "description": "Timestamp when event was initially published to the underlying topic", - "type": "string", - "format": "date-time" - }, - "createdBy": { - "description": "Username of the user whose action caused an event", - "type": "string" - }, - "publishedBy": { - "description": "Name and version of the module that published an event", - "type": "string" - } - }, - "required": [ - "eventTTL", - "publishedBy" - ] - }, - "eventPayload": { - "type": "string", - "description": "The source record JSON string" - }, - "tenant": { - "description": "Tenant id", - "type": "string" - }, - "ts": { - "description": "Message timestamp", - "type": "string", - "format": "date-time" - } - }, - "excludedFromEqualsAndHashCode": [ - "eventMetadata", - "tenant", - "ts" - ], - "required": [ - "id", - "eventType" - ] -} diff --git a/ramls/source-record-storage-records.raml b/ramls/source-record-storage-records.raml index 964125f86..1d7a99083 100644 --- a/ramls/source-record-storage-records.raml +++ b/ramls/source-record-storage-records.raml @@ -29,7 +29,6 @@ types: linkUpdateReport: !include raml-storage/schemas/mod-source-record-storage/linkUpdateReport.json recordMatchingDto: !include raml-storage/schemas/dto/recordMatchingRqDto.json recordsIdentifiersCollection: !include raml-storage/schemas/dto/recordsIdentifiersCollection.json - sourceRecordDomainEvent: !include source-record-domain-event.json traits: validate: !include raml-storage/raml-util/traits/validation.raml