diff --git a/NEWS.md b/NEWS.md index e4202b743..03552f05b 100644 --- a/NEWS.md +++ b/NEWS.md @@ -20,6 +20,8 @@ * [MODSOURMAN-1122](https://issues.folio.org/browse/MODSOURMAN-1122) Add additional check for the childSnapshotWrappers * [MODSOURMAN-1140](https://folio-org.atlassian.net/browse/MODSOURMAN-1140) Invalidate cache before saving new parsed content in cache * [MODSOURMAN-1133](https://folio-org.atlassian.net/browse/MODSOURMAN-1133) Adjust SQL condition to include DISCARDED holding and items +* [MODDATAIMP-1001](https://folio-org.atlassian.net/browse/MODDATAIMP-1001) Remove 999 validation for instance creation +* [MODSOURMAN-956](https://folio-org.atlassian.net/browse/MODSOURMAN-956) Stop processing the job with incorrect profile ## 2023-10-13 v3.7.0 * [MODSOURMAN-1045](https://issues.folio.org/browse/MODSOURMAN-1045) Allow create action with non-matches for instance without match profile diff --git a/mod-source-record-manager-client/pom.xml b/mod-source-record-manager-client/pom.xml index 2327c17c6..9290b106c 100644 --- a/mod-source-record-manager-client/pom.xml +++ b/mod-source-record-manager-client/pom.xml @@ -44,7 +44,7 @@ org.codehaus.mojo build-helper-maven-plugin - 3.2.0 + 3.5.0 generate-sources diff --git a/mod-source-record-manager-server/pom.xml b/mod-source-record-manager-server/pom.xml index 8abdba7c1..911364cbb 100644 --- a/mod-source-record-manager-server/pom.xml +++ b/mod-source-record-manager-server/pom.xml @@ -233,7 +233,7 @@ org.folio data-import-processing-core - 4.2.0-SNAPSHOT + 4.2.1-SNAPSHOT org.folio @@ -285,7 +285,7 @@ org.codehaus.mojo build-helper-maven-plugin - 3.2.0 + 3.5.0 add_generated_sources_folder @@ -305,7 +305,7 @@ org.codehaus.mojo versions-maven-plugin - 2.8.1 + 2.16.2 false @@ -396,7 +396,7 @@ org.apache.maven.plugins maven-resources-plugin - 3.2.0 + 3.3.1 copy-resources @@ -437,7 +437,7 @@ org.apache.maven.plugins maven-shade-plugin - 3.2.4 + 3.5.2 package @@ -472,7 +472,7 @@ org.apache.maven.plugins maven-release-plugin - 2.5.3 + 3.0.1 clean verify v@{project.version} diff --git a/mod-source-record-manager-server/src/main/java/org/folio/dao/JournalRecordDaoImpl.java b/mod-source-record-manager-server/src/main/java/org/folio/dao/JournalRecordDaoImpl.java index cac083df7..06c80af07 100644 --- a/mod-source-record-manager-server/src/main/java/org/folio/dao/JournalRecordDaoImpl.java +++ b/mod-source-record-manager-server/src/main/java/org/folio/dao/JournalRecordDaoImpl.java @@ -346,7 +346,7 @@ private RecordProcessingLogDto mapJobLogEntryRow(Row row) { .withSourceRecordType(entityType) .withJobExecutionId(row.getValue(JOB_EXECUTION_ID).toString()) .withIncomingRecordId(row.getValue(INCOMING_RECORD_ID).toString()) - .withSourceRecordId(row.getValue(SOURCE_ID).toString()) + .withSourceRecordId(row.getValue(SOURCE_ID) != null ? row.getValue(SOURCE_ID).toString() : null) .withSourceRecordOrder(isEmpty(row.getString(INVOICE_ACTION_STATUS)) ? row.getInteger(SOURCE_RECORD_ORDER).toString() : row.getString(INVOICE_LINE_NUMBER)) @@ -553,32 +553,32 @@ private static RecordProcessingLogDtoCollection processMultipleHoldingsAndItemsI if (!ifNeedToMerge(entries)) { return recordProcessingLogDto; } - Map> relatedHoldingsInfoBySourceRecordId = + Map> relatedHoldingsInfoByIncomingRecordId = entries.stream() .collect(Collectors.groupingBy( - RecordProcessingLogDto::getSourceRecordId, + RecordProcessingLogDto::getIncomingRecordId, Collectors.mapping(RecordProcessingLogDto::getRelatedHoldingsInfo, Collectors.flatMapping(List::stream, toList()) ))); - Map> relatedItemInfoBySourceId = + Map> relatedItemInfoByIncomingRecordId = entries.stream() .collect(Collectors.groupingBy( - RecordProcessingLogDto::getSourceRecordId, + RecordProcessingLogDto::getIncomingRecordId, Collectors.mapping(RecordProcessingLogDto::getRelatedItemInfo, Collectors.flatMapping(List::stream, toList()) ))); - List mergedEntries = relatedHoldingsInfoBySourceRecordId.entrySet() + List mergedEntries = relatedHoldingsInfoByIncomingRecordId.entrySet() .stream().map(e -> { - String sourceRecordId = e.getKey(); - List relatedItemInfos = relatedItemInfoBySourceId.get(sourceRecordId); + String incomingRecordId = e.getKey(); + List relatedItemInfos = relatedItemInfoByIncomingRecordId.get(incomingRecordId); - RecordProcessingLogDto firstRecordWithCurrentSourceId = entries.stream() - .filter(record -> record.getSourceRecordId().equals(sourceRecordId)) + RecordProcessingLogDto firstRecordWithCurrentIncomingRecordId = entries.stream() + .filter(record -> record.getIncomingRecordId().equals(incomingRecordId)) .findFirst().orElseGet(RecordProcessingLogDto::new); - return firstRecordWithCurrentSourceId + return firstRecordWithCurrentIncomingRecordId .withRelatedHoldingsInfo(e.getValue().stream().distinct().toList()) .withRelatedItemInfo(relatedItemInfos.stream().distinct().toList()); }).collect(toList()); @@ -586,15 +586,15 @@ private static RecordProcessingLogDtoCollection processMultipleHoldingsAndItemsI } private static boolean ifNeedToMerge(List entries) { - Map sourceRecordIdCounts = entries.stream() + Map holdingsIncomingRecordIdCounts = entries.stream() .filter(e -> e.getRelatedHoldingsInfo() != null && !e.getRelatedHoldingsInfo().isEmpty()) - .collect(Collectors.groupingBy(RecordProcessingLogDto::getSourceRecordId, Collectors.counting())); + .collect(Collectors.groupingBy(RecordProcessingLogDto::getIncomingRecordId, Collectors.counting())); - Map sourceItemRecordIdCounts = entries.stream() + Map itemIncomingRecordIdCounts = entries.stream() .filter(e -> e.getRelatedItemInfo() != null && !e.getRelatedItemInfo().isEmpty()) - .collect(Collectors.groupingBy(RecordProcessingLogDto::getSourceRecordId, Collectors.counting())); + .collect(Collectors.groupingBy(RecordProcessingLogDto::getIncomingRecordId, Collectors.counting())); - return sourceRecordIdCounts.values().stream().anyMatch(count -> count > 1) || - sourceItemRecordIdCounts.values().stream().anyMatch(count -> count > 1); + return holdingsIncomingRecordIdCounts.values().stream().anyMatch(count -> count > 1) || + itemIncomingRecordIdCounts.values().stream().anyMatch(count -> count > 1); } } diff --git a/mod-source-record-manager-server/src/main/java/org/folio/services/AbstractChunkProcessingService.java b/mod-source-record-manager-server/src/main/java/org/folio/services/AbstractChunkProcessingService.java index dc683cf1e..62e6bcee7 100644 --- a/mod-source-record-manager-server/src/main/java/org/folio/services/AbstractChunkProcessingService.java +++ b/mod-source-record-manager-server/src/main/java/org/folio/services/AbstractChunkProcessingService.java @@ -61,6 +61,7 @@ public Future processChunk(RawRecordsDto incomingChunk, JobExecution jo prepareChunk(incomingChunk); return mapJobExecution(incomingChunk, jobExecution, false, params); } + private Future mapJobExecution(RawRecordsDto incomingChunk, JobExecution jobExecution, boolean acceptInstanceId, OkapiConnectionParams params) { if (isNotSupportedJobProfileExists(jobExecution)) { throw new UnsupportedProfileException("Unsupported type of Job Profile."); @@ -111,10 +112,10 @@ private boolean isExistsMatchProfileToInstanceWithActionUpdateMarcBib(Collection } private ProfileSnapshotWrapper getChildSnapshotWrapperByType(ProfileSnapshotWrapper profileSnapshotWrapper, - ProfileSnapshotWrapper.ContentType contentType) { + ProfileSnapshotWrapper.ContentType contentType) { if (!CollectionUtils.isEmpty(profileSnapshotWrapper.getChildSnapshotWrappers())) { List childSnapshotWrappers = profileSnapshotWrapper.getChildSnapshotWrappers(); - for(ProfileSnapshotWrapper snapshotWrapper : childSnapshotWrappers) { + for (ProfileSnapshotWrapper snapshotWrapper : childSnapshotWrappers) { if (snapshotWrapper.getContentType() == contentType) { return snapshotWrapper; } diff --git a/mod-source-record-manager-server/src/main/java/org/folio/services/ChangeEngineServiceImpl.java b/mod-source-record-manager-server/src/main/java/org/folio/services/ChangeEngineServiceImpl.java index c2e7a5df3..a2296c4f5 100644 --- a/mod-source-record-manager-server/src/main/java/org/folio/services/ChangeEngineServiceImpl.java +++ b/mod-source-record-manager-server/src/main/java/org/folio/services/ChangeEngineServiceImpl.java @@ -39,18 +39,21 @@ import io.vertx.core.json.jackson.DatabindCodec; import io.vertx.kafka.client.producer.KafkaHeader; import io.vertx.kafka.client.producer.impl.KafkaHeaderImpl; + import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Objects; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.stream.Collectors; import javax.ws.rs.NotFoundException; + import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.IterableUtils; import org.apache.commons.lang.StringUtils; @@ -58,6 +61,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.folio.MappingProfile; +import org.folio.services.exceptions.InvalidJobProfileForFileException; import org.folio.services.journal.JournalUtil; import org.folio.dao.JobExecutionSourceChunkDao; import org.folio.dataimport.util.OkapiConnectionParams; @@ -95,6 +99,7 @@ import org.folio.services.parsers.RecordParserBuilder; import org.folio.services.util.RecordConversionUtil; import org.folio.services.validation.JobProfileSnapshotValidationService; +import org.folio.verticle.consumers.util.JobExecutionUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; @@ -177,7 +182,9 @@ public Future> parseRawRecordsChunkForJobExecution(RawRecordsDto ch return isJobProfileCompatibleWithRecordsType(jobExecution.getJobProfileSnapshotWrapper(), parsedRecords) ? Future.succeededFuture(parsedRecords) - : Future.failedFuture(prepareWrongJobProfileErrorMessage(jobExecution, parsedRecords)); + : Future.failedFuture(new InvalidJobProfileForFileException( + prepareWrongJobProfileErrorMessage(jobExecution, parsedRecords)) + ); }) .compose(parsedRecords -> ensureMappingMetaDataSnapshot(jobExecution.getId(), parsedRecords, params) .map(parsedRecords)) @@ -391,21 +398,30 @@ private boolean deleteMarcActionExists(JobExecution jobExecution) { } private boolean isCreateInstanceActionExists(JobExecution jobExecution) { - return containsCreateInstanceActionWithMatch(jobExecution.getJobProfileSnapshotWrapper()); + return containsCreateInstanceActionWithoutMarcBib(jobExecution.getJobProfileSnapshotWrapper()); } - private boolean containsCreateInstanceActionWithMatch(ProfileSnapshotWrapper profileSnapshot) { + private boolean containsCreateInstanceActionWithoutMarcBib(ProfileSnapshotWrapper profileSnapshot) { for (ProfileSnapshotWrapper childWrapper : profileSnapshot.getChildSnapshotWrappers()) { if (childWrapper.getContentType() == ProfileSnapshotWrapper.ContentType.ACTION_PROFILE && actionProfileMatches(childWrapper, List.of(FolioRecord.INSTANCE), Action.CREATE)) { - return childWrapper.getReactTo() != NON_MATCH; - } else if (containsCreateInstanceActionWithMatch(childWrapper)) { + return childWrapper.getReactTo() != NON_MATCH && !containsMarcBibToInstanceMappingProfile(childWrapper); + } else if (containsCreateInstanceActionWithoutMarcBib(childWrapper)) { return true; } } return false; } + private boolean containsMarcBibToInstanceMappingProfile(ProfileSnapshotWrapper actionWrapper) { + return actionWrapper.getChildSnapshotWrappers() + .stream() + .map(mappingWrapper -> Optional.ofNullable(mappingWrapper.getContent())) + .filter(Optional::isPresent) + .map(content -> DatabindCodec.mapper().convertValue(content.get(), MappingProfile.class)) + .anyMatch(mappingProfile -> mappingProfile.getIncomingRecordType() == EntityType.MARC_BIBLIOGRAPHIC); + } + private boolean isCreateAuthorityActionExists(JobExecution jobExecution) { return containsMarcActionProfile( jobExecution.getJobProfileSnapshotWrapper(), @@ -858,6 +874,7 @@ private Future> saveRecords(OkapiConnectionParams params, JobExecut } private String prepareWrongJobProfileErrorMessage(JobExecution jobExecution, List records) { + JobExecutionUtils.cache.put(jobExecution.getId(), JobExecution.Status.ERROR); return String.format(WRONG_JOB_PROFILE_ERROR_MESSAGE, jobExecution.getJobProfileInfo().getName(), records.get(0).getRecordType()); } } diff --git a/mod-source-record-manager-server/src/main/java/org/folio/services/exceptions/InvalidJobProfileForFileException.java b/mod-source-record-manager-server/src/main/java/org/folio/services/exceptions/InvalidJobProfileForFileException.java new file mode 100644 index 000000000..d63cf749f --- /dev/null +++ b/mod-source-record-manager-server/src/main/java/org/folio/services/exceptions/InvalidJobProfileForFileException.java @@ -0,0 +1,15 @@ +package org.folio.services.exceptions; + +/** + * This exception is thrown when an invalid job profile is selected for an uploaded file. + */ +public class InvalidJobProfileForFileException extends Exception { + + public InvalidJobProfileForFileException(String message) { + super(message); + } + + public InvalidJobProfileForFileException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/mod-source-record-manager-server/src/main/java/org/folio/services/journal/JournalUtil.java b/mod-source-record-manager-server/src/main/java/org/folio/services/journal/JournalUtil.java index 9dd953446..0535d99d3 100644 --- a/mod-source-record-manager-server/src/main/java/org/folio/services/journal/JournalUtil.java +++ b/mod-source-record-manager-server/src/main/java/org/folio/services/journal/JournalUtil.java @@ -27,6 +27,7 @@ import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_ERROR; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_CREATED; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_UPDATED; +import static org.folio.rest.jaxrs.model.JournalRecord.ActionType.UPDATE; import static org.folio.rest.jaxrs.model.JournalRecord.EntityType.AUTHORITY; import static org.folio.rest.jaxrs.model.JournalRecord.EntityType.HOLDINGS; import static org.folio.rest.jaxrs.model.JournalRecord.EntityType.INSTANCE; @@ -106,7 +107,6 @@ public static List buildJournalRecordsByEvent(DataImportEventPayl JournalRecord.ActionStatus actionStatus) throws JournalRecordMapperException { try { HashMap eventPayloadContext = eventPayload.getContext(); - String incomingRecordId = eventPayloadContext.get(INCOMING_RECORD_ID); String recordAsString = extractRecord(eventPayloadContext); Record record; @@ -119,6 +119,7 @@ record = new Record() } else { record = Json.decodeValue(recordAsString, Record.class); } + String incomingRecordId = eventPayloadContext.get(INCOMING_RECORD_ID) != null ? eventPayloadContext.get(INCOMING_RECORD_ID) : record.getId(); String entityAsString = eventPayloadContext.get(entityType.value()); JournalRecord journalRecord = buildCommonJournalRecord(actionStatus, actionType, record, eventPayload, eventPayloadContext, incomingRecordId) @@ -142,7 +143,8 @@ record = Json.decodeValue(recordAsString, Record.class); } if (!isEmpty(entityAsString)) { - if (entityType == INSTANCE || entityType == PO_LINE || entityType == AUTHORITY) { + if (entityType == INSTANCE || entityType == PO_LINE || entityType == AUTHORITY || + (entityType == MARC_BIBLIOGRAPHIC && actionType == UPDATE)) { JsonObject entityJson = new JsonObject(entityAsString); journalRecord.setEntityId(entityJson.getString(ID_KEY)); if (entityType == INSTANCE || entityType == PO_LINE) { @@ -174,6 +176,11 @@ record = Json.decodeValue(recordAsString, Record.class); } else { return Lists.newArrayList(journalRecord); } + } else { + if (eventPayload.getEventType().equals(DI_ERROR.value()) && eventPayloadContext.containsKey(MARC_BIBLIOGRAPHIC.value())) { + var journalRecordWithMarcBib = buildJournalRecordWithMarcBibType(actionStatus, actionType, record, eventPayload, eventPayloadContext, incomingRecordId); + return Lists.newArrayList(journalRecord, journalRecordWithMarcBib); + } } return Lists.newArrayList(journalRecord); } catch (Exception e) { @@ -200,7 +207,7 @@ private static JournalRecord buildJournalRecordWithMarcBibType(JournalRecord.Act String actionTypeFromContext = eventPayloadContext.get(MARC_BIB_RECORD_CREATED); if (actionTypeFromContext.equals(Boolean.TRUE.toString())) actionTypeForMarcBib = JournalRecord.ActionType.CREATE; - else actionTypeForMarcBib = JournalRecord.ActionType.UPDATE; + else actionTypeForMarcBib = UPDATE; } return buildCommonJournalRecord(actionStatus, actionTypeForMarcBib, currentRecord, eventPayload, eventPayloadContext, incomingRecordId) diff --git a/mod-source-record-manager-server/src/main/java/org/folio/verticle/consumers/RawMarcChunksKafkaHandler.java b/mod-source-record-manager-server/src/main/java/org/folio/verticle/consumers/RawMarcChunksKafkaHandler.java index 01aadc564..108ef1c61 100644 --- a/mod-source-record-manager-server/src/main/java/org/folio/verticle/consumers/RawMarcChunksKafkaHandler.java +++ b/mod-source-record-manager-server/src/main/java/org/folio/verticle/consumers/RawMarcChunksKafkaHandler.java @@ -4,7 +4,6 @@ import io.vertx.core.Vertx; import io.vertx.core.impl.future.FailedFuture; import io.vertx.core.json.Json; -import io.vertx.core.json.JsonObject; import io.vertx.core.json.jackson.DatabindCodec; import io.vertx.kafka.client.consumer.KafkaConsumerRecord; import io.vertx.kafka.client.producer.KafkaHeader; @@ -16,7 +15,9 @@ import org.folio.kafka.exception.DuplicateEventException; import org.folio.rest.jaxrs.model.Event; import org.folio.rest.jaxrs.model.RawRecordsDto; +import org.folio.rest.jaxrs.model.StatusDto; import org.folio.services.ChunkProcessingService; +import org.folio.services.exceptions.InvalidJobProfileForFileException; import org.folio.services.JobExecutionService; import org.folio.services.exceptions.RawChunkRecordsParsingException; import org.folio.services.exceptions.RecordsPublishingException; @@ -43,7 +44,7 @@ public class RawMarcChunksKafkaHandler implements AsyncRecordHandler handle(KafkaConsumerRecord record) { } else if (th instanceof RecordsPublishingException) { LOGGER.warn("handle:: RawRecordsDto entries publishing to Kafka has failed for chunkId: {} chunkNumber: {} - {} for jobExecutionId: {}", chunkId, chunkNumber, rawRecordsDto.getRecordsMetadata(), jobExecutionId, th); return Future.failedFuture(th); + } else if (th instanceof InvalidJobProfileForFileException) { + jobExecutionService.updateJobExecutionStatus(jobExecutionId, new StatusDto() + .withStatus(StatusDto.Status.ERROR) + .withErrorStatus(StatusDto.ErrorStatus.FILE_PROCESSING_ERROR), + okapiParams); + LOGGER.warn("handle:: Invalid job profile selected for uploaded file for chunkId: {} chunkNumber: {} - {} for jobExecutionId: {} chunkNUmber - {}", chunkId, chunkNumber, rawRecordsDto.getRecordsMetadata(), jobExecutionId, chunkNumber); + return Future.failedFuture(th); } else { LOGGER.warn("handle:: RawRecordsDto processing has failed with errors chunkId: {} chunkNumber: {} - {} for jobExecutionId: {}", chunkId, chunkNumber, rawRecordsDto.getRecordsMetadata(), jobExecutionId, th); return Future.failedFuture(new RawChunkRecordsParsingException(th, rawRecordsDto)); diff --git a/mod-source-record-manager-server/src/main/java/org/folio/verticle/consumers/util/JobExecutionUtils.java b/mod-source-record-manager-server/src/main/java/org/folio/verticle/consumers/util/JobExecutionUtils.java index 0dcef12da..ffe560433 100644 --- a/mod-source-record-manager-server/src/main/java/org/folio/verticle/consumers/util/JobExecutionUtils.java +++ b/mod-source-record-manager-server/src/main/java/org/folio/verticle/consumers/util/JobExecutionUtils.java @@ -1,5 +1,9 @@ package org.folio.verticle.consumers.util; +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; +import io.vertx.core.Context; +import io.vertx.core.Vertx; import lombok.AccessLevel; import lombok.NoArgsConstructor; import org.folio.rest.jaxrs.model.JobExecution; @@ -7,11 +11,38 @@ import java.util.Arrays; import java.util.List; import java.util.Objects; +import java.util.concurrent.Executor; +import java.util.concurrent.ForkJoinPool; + @NoArgsConstructor(access = AccessLevel.PRIVATE) public class JobExecutionUtils { - public static final List SKIP_STATUSES = Arrays.asList(JobExecution.Status.CANCELLED); + + public static final List SKIP_STATUSES = Arrays.asList(JobExecution.Status.CANCELLED, JobExecution.Status.ERROR); + + private static final Executor cacheExecutor = JobExecutionUtils::execute; + public static final Cache cache = Caffeine.newBuilder().maximumSize(20).executor(cacheExecutor).build(); + public static boolean isNeedToSkip(JobExecution jobExecution) { - return Objects.nonNull(jobExecution.getStatus()) && SKIP_STATUSES.contains(jobExecution.getStatus()); + JobExecution.Status prevJobExecStat = null; + if (jobExecution.getId() != null) { + prevJobExecStat = cache.getIfPresent(jobExecution.getId()); + } + return Objects.nonNull(jobExecution.getStatus()) && SKIP_STATUSES.contains( + prevJobExecStat != null ? prevJobExecStat : + jobExecution.getStatus()); } + public static void clearCache() { + cache.invalidateAll(); + } + + private static void execute(Runnable serviceExecutor) { + Context context = Vertx.currentContext(); + if (context != null) { + context.runOnContext(ar -> serviceExecutor.run()); + } else { + // The common pool below is used because it is the default executor for caffeine + ForkJoinPool.commonPool().execute(serviceExecutor); + } + } } diff --git a/mod-source-record-manager-server/src/main/resources/templates/db_scripts/create_get_job_log_entries_function.sql b/mod-source-record-manager-server/src/main/resources/templates/db_scripts/create_get_job_log_entries_function.sql index 713bb59ec..dc77765ec 100644 --- a/mod-source-record-manager-server/src/main/resources/templates/db_scripts/create_get_job_log_entries_function.sql +++ b/mod-source-record-manager-server/src/main/resources/templates/db_scripts/create_get_job_log_entries_function.sql @@ -79,12 +79,12 @@ WITH (SELECT CASE WHEN EXISTS (SELECT condition_result.entity_id FROM temp_result condition_result - WHERE (condition_result.action_type IN (''CREATED'', ''DISCARDED'') AND + WHERE (condition_result.action_type IN (''CREATED'', ''UPDATED'', ''DISCARDED'') AND condition_result.entity_type = ''HOLDINGS'')) THEN (SELECT deep_nested.id FROM temp_result deep_nested - WHERE (deep_nested.id = nested_result.id AND (deep_nested.action_type = ''CREATED'' OR + WHERE (deep_nested.id = nested_result.id AND (deep_nested.action_type IN (''CREATED'', ''UPDATED'') OR (deep_nested.action_type = ''DISCARDED'' AND deep_nested.error != ''''))) OR (deep_nested.action_type = ''DISCARDED'' AND deep_nested.id = nested_result.id AND @@ -145,11 +145,15 @@ WHERE tmp.entity_type = ''ITEM'' marc_holdings AS ( SELECT temp_result.job_execution_id, entity_id, title, source_record_order, action_type, error, source_id, tenant_id FROM temp_result WHERE entity_type = ''MARC_HOLDINGS'' + ), + marc_bibliographic AS ( + SELECT temp_result.job_execution_id, entity_id, title, source_record_order, action_type, error, source_id, tenant_id + FROM temp_result WHERE entity_type = ''MARC_BIBLIOGRAPHIC'' ) SELECT records_actions.job_execution_id AS job_execution_id, - records_actions.source_id AS source_id, records_actions.source_id AS incoming_record_id, + coalesce(marc_bibliographic_entity_id::uuid, marc_authority_entity_id::uuid, marc_holdings_entity_id::uuid) AS source_id, records_actions.source_record_order AS source_record_order, '''' as invoiceline_number, coalesce(rec_titles.title, marc_holdings_info.title) AS title, @@ -292,12 +296,21 @@ FROM ( FROM marc_authority ) AS marc_authority_info ON marc_authority_info.source_id = records_actions.source_id + LEFT JOIN ( + SELECT marc_bibliographic.action_type AS action_type, + marc_bibliographic.source_id AS source_id, + marc_bibliographic.title AS title, + marc_bibliographic.entity_id AS marc_bibliographic_entity_id, + marc_bibliographic.error AS marc_bibliographic_entity_error + FROM marc_bibliographic WHERE entity_id IS NOT NULL +) AS marc_bibliographic_info ON marc_bibliographic_info.source_id = records_actions.source_id + LEFT JOIN ( SELECT marc_holdings.action_type AS action_type, marc_holdings.source_id AS source_id, marc_holdings.title AS title, - marc_holdings.entity_id AS marc_authority_entity_id, - marc_holdings.error AS marc_authority_entity_error + marc_holdings.entity_id AS marc_holdings_entity_id, + marc_holdings.error AS marc_holdings_entity_error FROM marc_holdings ) AS marc_holdings_info ON marc_holdings_info.source_id = records_actions.source_id @@ -315,8 +328,8 @@ FROM ( UNION SELECT records_actions.job_execution_id AS job_execution_id, - records_actions.source_id AS source_id, records_actions.source_id AS incoming_record_id, + records_actions.source_id AS source_id, source_record_order AS source_record_order, entity_hrid as invoiceline_number, invoice_line_info.title AS title, diff --git a/mod-source-record-manager-server/src/main/resources/templates/db_scripts/create_get_record_processing_log_function.sql b/mod-source-record-manager-server/src/main/resources/templates/db_scripts/create_get_record_processing_log_function.sql index 2658dbf17..99e3041b2 100644 --- a/mod-source-record-manager-server/src/main/resources/templates/db_scripts/create_get_record_processing_log_function.sql +++ b/mod-source-record-manager-server/src/main/resources/templates/db_scripts/create_get_record_processing_log_function.sql @@ -12,7 +12,9 @@ BEGIN THEN 'CREATED' WHEN action_type = 'UPDATE' THEN 'UPDATED' - END AS action_type, journal_records.action_status, journal_records.action_date, journal_records.source_record_order, journal_records.error, journal_records.title, journal_records.tenant_id, journal_records.instance_id, journal_records.holdings_id, journal_records.order_id, journal_records.permanent_location_id + WHEN action_type = 'PARSE' + then 'PARSED' + END AS action_type, journal_records.action_status, journal_records.action_date, journal_records.source_record_order, journal_records.error, journal_records.title, journal_records.tenant_id, journal_records.instance_id, journal_records.holdings_id, journal_records.order_id, journal_records.permanent_location_id FROM journal_records INNER JOIN (SELECT entity_type as entity_type_max, entity_id as entity_id_max,action_status as action_status_max, max(error) AS error_max,(array_agg(id ORDER BY array_position(array['CREATE', 'UPDATE', 'MODIFY', 'NON_MATCH'], action_type)))[1] AS id_max @@ -64,9 +66,11 @@ BEGIN null AS invoice_line_entity_id, null AS invoice_line_entity_hrid, null AS invoice_line_entity_error - FROM - (SELECT temp_result.job_execution_id, entity_id, temp_result.title, temp_result.source_record_order, action_type, error, temp_result.source_id, temp_result.tenant_id - FROM temp_result WHERE entity_type IN ('MARC_BIBLIOGRAPHIC', 'MARC_HOLDINGS', 'MARC_AUTHORITY', 'PO_LINE')) AS marc + FROM (SELECT temp_result.source_id FROM temp_result WHERE action_type = 'PARSED') as parsed + LEFT JOIN + (SELECT temp_result.job_execution_id, entity_id, temp_result.title, temp_result.source_record_order, action_type, error, temp_result.source_id, temp_result.tenant_id + FROM temp_result WHERE entity_type IN ('MARC_BIBLIOGRAPHIC', 'MARC_HOLDINGS', 'MARC_AUTHORITY', 'PO_LINE')) AS marc + ON marc.source_id = parsed.source_id LEFT JOIN (SELECT action_type, entity_id, temp_result.source_id, entity_hrid, error, temp_result.job_execution_id, temp_result.title, temp_result.source_record_order, temp_result.tenant_id FROM temp_result WHERE entity_type = 'INSTANCE') AS instances diff --git a/mod-source-record-manager-server/src/test/java/org/folio/rest/impl/metadataProvider/MetaDataProviderJobLogEntriesAPITest.java b/mod-source-record-manager-server/src/test/java/org/folio/rest/impl/metadataProvider/MetaDataProviderJobLogEntriesAPITest.java index 3e52ae718..9824b6e37 100644 --- a/mod-source-record-manager-server/src/test/java/org/folio/rest/impl/metadataProvider/MetaDataProviderJobLogEntriesAPITest.java +++ b/mod-source-record-manager-server/src/test/java/org/folio/rest/impl/metadataProvider/MetaDataProviderJobLogEntriesAPITest.java @@ -3,48 +3,35 @@ import static org.folio.rest.jaxrs.model.JournalRecord.ActionStatus.COMPLETED; import static org.folio.rest.jaxrs.model.JournalRecord.ActionStatus.ERROR; import static org.folio.rest.jaxrs.model.JournalRecord.ActionType.CREATE; -import static org.folio.rest.jaxrs.model.JournalRecord.ActionType.MATCH; import static org.folio.rest.jaxrs.model.JournalRecord.ActionType.MODIFY; import static org.folio.rest.jaxrs.model.JournalRecord.ActionType.NON_MATCH; import static org.folio.rest.jaxrs.model.JournalRecord.ActionType.UPDATE; -import static org.folio.rest.jaxrs.model.JournalRecord.EntityType.AUTHORITY; +import static org.folio.rest.jaxrs.model.JournalRecord.ActionType.PARSE; import static org.folio.rest.jaxrs.model.JournalRecord.EntityType.EDIFACT; import static org.folio.rest.jaxrs.model.JournalRecord.EntityType.HOLDINGS; import static org.folio.rest.jaxrs.model.JournalRecord.EntityType.INSTANCE; import static org.folio.rest.jaxrs.model.JournalRecord.EntityType.INVOICE; import static org.folio.rest.jaxrs.model.JournalRecord.EntityType.ITEM; -import static org.folio.rest.jaxrs.model.JournalRecord.EntityType.MARC_AUTHORITY; import static org.folio.rest.jaxrs.model.JournalRecord.EntityType.MARC_BIBLIOGRAPHIC; -import static org.folio.rest.jaxrs.model.JournalRecord.EntityType.MARC_HOLDINGS; import static org.folio.rest.jaxrs.model.JournalRecord.EntityType.PO_LINE; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.emptyOrNullString; -import static org.hamcrest.Matchers.everyItem; -import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.oneOf; import java.util.Date; -import java.util.List; import java.util.UUID; import java.util.stream.IntStream; -import com.google.common.collect.Lists; import org.apache.http.HttpStatus; import org.folio.dao.JournalRecordDaoImpl; import org.folio.dao.util.PostgresClientFactory; -import org.folio.okapi.common.GenericCompositeFuture; import org.folio.rest.impl.AbstractRestTest; import org.folio.rest.jaxrs.model.ActionStatus; import org.folio.rest.jaxrs.model.JobExecution; import org.folio.rest.jaxrs.model.JournalRecord; -import org.folio.rest.jaxrs.model.RecordProcessingLogDto; -import org.folio.rest.jaxrs.model.RecordProcessingLogDtoCollection; -import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @@ -53,7 +40,6 @@ import org.mockito.Spy; import io.restassured.RestAssured; -import io.vertx.core.CompositeFuture; import io.vertx.core.Future; import io.vertx.core.Promise; import io.vertx.core.Vertx; @@ -140,6 +126,7 @@ public void shouldReturnPoLineWithOrderIdWhenMarcCreate(TestContext context) { String orderId = UUID.randomUUID().toString(); Future future = Future.succeededFuture() + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, PARSE, null, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, "marcEntityID", null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, "poLineEntityID", null, null, 0, CREATE, PO_LINE, COMPLETED, "Test error", orderId)) .onFailure(context::fail); @@ -173,6 +160,7 @@ public void shouldReturnOneInstanceIdWhenMarcBibUpdatedAndInstanceUpdated(TestCo String recordTitle = "test title"; Future future = Future.succeededFuture() + .compose(v -> createJournalRecord(marcBibAndInstanceUpdateJobExecution.getId(), marcBibAndInstanceUpdateSourceRecordId, null, null, null, 0, PARSE, null, COMPLETED, null, null)) .compose(v -> createJournalRecord(marcBibAndInstanceUpdateJobExecution.getId(), marcBibAndInstanceUpdateSourceRecordId, "instanceEntityID", "in00000000001", null, 0, UPDATE, INSTANCE, COMPLETED, null, null)) .compose(v -> createJournalRecord(marcBibAndInstanceUpdateJobExecution.getId(), marcBibAndInstanceUpdateSourceRecordId, "instanceEntityID", "in00000000001", null, 0, UPDATE, INSTANCE, COMPLETED, null, null)) .compose(v -> createJournalRecord(marcBibAndInstanceUpdateJobExecution.getId(), marcBibAndInstanceUpdateSourceRecordId, "marcBibEntityID", null, recordTitle, 0, MODIFY, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) @@ -214,6 +202,7 @@ public void shouldReturnMarcBibUpdatedByJobAndRecordIds(TestContext context) { String recordTitle = "test title"; Future future = Future.succeededFuture() + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, PARSE, null, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, UPDATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .onFailure(context::fail); @@ -245,6 +234,7 @@ public void shouldReturnEmptyMarcBibErrorAndInstanceDiscardedWhenInstanceCreatio String recordTitle = "test title"; Future future = Future.succeededFuture() + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, PARSE, null, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, entityId, entityHrid, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, entityId, entityHrid, null, 0, CREATE, INSTANCE, ERROR, "error msg", null)) .onFailure(context::fail); @@ -277,6 +267,7 @@ public void shouldReturnNotEmptyMarcBibErrorWhenMarcBibFailed(TestContext contex String recordTitle = "test title"; Future future = Future.succeededFuture() + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, PARSE, null, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, ERROR, "MarcBib error msg", null)) .onFailure(context::fail); @@ -317,6 +308,7 @@ public void shouldReturnMarcBibAndAllEntitiesWithoutErrors(TestContext context) String poLineHrid = "po001"; Future future = Future.succeededFuture() + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, PARSE, null, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, instanceId, instanceHrid, null, 0, CREATE, INSTANCE, COMPLETED, null, null)) .compose(v -> createJournalRecordAllFields(createdJobExecution.getId(), sourceRecordId, holdingsId, holdingsHrid, null, 0, CREATE, HOLDINGS, COMPLETED, null, null,instanceId,null,null)) @@ -369,6 +361,7 @@ public void shouldReturnDiscardedForHoldingsIfNoHoldingsCreated(TestContext cont String testError = "testError"; Future future = Future.succeededFuture() + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, PARSE, null, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, instanceId, instanceHrid, null, 0, CREATE, INSTANCE, COMPLETED, null, null)) .compose(v -> createJournalRecordAllFields(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, HOLDINGS, ERROR, testError, null, null,null,null)); @@ -412,6 +405,7 @@ public void shouldReturnDiscardedForItemsIfNoItemsCreated(TestContext context) { String testError = "testError"; Future future = Future.succeededFuture() + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, PARSE, null, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, instanceId, instanceHrid, null, 0, CREATE, INSTANCE, COMPLETED, null, null)) .compose(v -> createJournalRecordAllFields(createdJobExecution.getId(), sourceRecordId, holdingsId, holdingsHrid, recordTitle, 0, CREATE, HOLDINGS, COMPLETED, null, null,instanceId,null,null)) @@ -557,6 +551,7 @@ public void shouldReturnMarcBibAndAllEntitiesWithMultipleItemsHoldings(TestConte String errorMsg = "test error"; Future future = Future.succeededFuture() + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, PARSE, null, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, instanceId, instanceHrid, null, 0, CREATE, INSTANCE, COMPLETED, null, null)) @@ -637,6 +632,7 @@ public void shouldReturnMarcBibAndAllEntitiesWithItemsHoldingsWithoutDiscarded(T String permanentLocation = UUID.randomUUID().toString(); Future future = Future.succeededFuture() + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, PARSE, null, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, instanceId, instanceHrid, recordTitle, 0, CREATE, INSTANCE, COMPLETED, null, null)) @@ -683,6 +679,64 @@ public void shouldReturnMarcBibAndAllEntitiesWithItemsHoldingsWithoutDiscarded(T })); } + @Test + public void shouldReturnItemsHoldingsWithUpdatedAction(TestContext context) { + var async = context.async(); + var createdJobExecution = constructAndPostInitJobExecutionRqDto(1).getJobExecutions().get(0); + var sourceRecordId = UUID.randomUUID().toString(); + var recordTitle = "test title"; + + var instanceId = UUID.randomUUID().toString(); + var holdingsId = UUID.randomUUID().toString(); + var itemId = UUID.randomUUID().toString(); + + var instanceHrid = "i001"; + var holdingsHrid = "h001"; + var itemHrid = "it001"; + + var actionStatus = "UPDATED"; + + Future future = Future.succeededFuture() + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, PARSE, null, COMPLETED, null, null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, instanceId, instanceHrid, recordTitle, 0, CREATE, INSTANCE, COMPLETED, null, null)) + + .compose(v -> createJournalRecordAllFields(createdJobExecution.getId(), sourceRecordId, itemId, itemHrid, recordTitle, 0, UPDATE, ITEM, COMPLETED, null, null, instanceId, holdingsId, null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, holdingsId, holdingsHrid, recordTitle, 0, UPDATE, HOLDINGS, COMPLETED, null, null)) + + .onFailure(context::fail); + + future.onComplete(ar -> context.verify(v -> { + RestAssured.given() + .spec(spec) + .when() + .get(GET_JOB_EXECUTION_JOURNAL_RECORDS_PATH + "/" + createdJobExecution.getId() + "/records/" + sourceRecordId) + .then() + .statusCode(HttpStatus.SC_OK) + .log().all() + .body("jobExecutionId", is(createdJobExecution.getId())) + .body("sourceRecordId", is(sourceRecordId)) + .body("sourceRecordTitle", is(recordTitle)) + .body("sourceRecordOrder", is("0")) + .body("error", emptyOrNullString()) + .body("relatedInstanceInfo.idList[0]", is(instanceId)) + .body("relatedInstanceInfo.hridList[0]", is(instanceHrid)) + .body("relatedInstanceInfo.error", emptyOrNullString()) + .body("relatedHoldingsInfo.size()", is(1)) + .body("relatedHoldingsInfo[0].id", is(holdingsId)) + .body("relatedHoldingsInfo[0].hrid", is(holdingsHrid)) + .body("relatedHoldingsInfo[0].actionStatus", is(actionStatus)) + .body("relatedHoldingsInfo[0].error", emptyOrNullString()) + .body("relatedItemInfo.size()", is(1)) + .body("relatedItemInfo[0].id", is(itemId)) + .body("relatedItemInfo[0].hrid", is(itemHrid)) + .body("relatedItemInfo[0].holdingsId", is(holdingsId)) + .body("relatedHoldingsInfo[0].actionStatus", is(actionStatus)) + .body("relatedItemInfo[0].error", emptyOrNullString()); + async.complete(); + })); + } + @Test public void shouldReturnMarcBibAndAllEntitiesWithDiscardedItemsHoldings(TestContext context) { Async async = context.async(); @@ -694,6 +748,7 @@ public void shouldReturnMarcBibAndAllEntitiesWithDiscardedItemsHoldings(TestCont String instanceHrid = "i001"; Future future = Future.succeededFuture() + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, PARSE, null, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, instanceId, instanceHrid, recordTitle, 0, CREATE, INSTANCE, COMPLETED, null, null)) @@ -750,6 +805,7 @@ public void shouldReturnMarcBibAndAllEntitiesWithMultipleItemsUpdate(TestContext String[] itemHrid = {"it001", "it002"}; Future future = Future.succeededFuture() + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, PARSE, null, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, instanceId, instanceHrid, null, 0, CREATE, INSTANCE, COMPLETED, null, null)) .compose(v -> createJournalRecordAllFields(createdJobExecution.getId(), sourceRecordId, itemId[0], itemHrid[0], null, 0, UPDATE, ITEM, COMPLETED, null, null, instanceId, holdingsId[0], null)) @@ -798,6 +854,7 @@ public void shouldReturnCentralTenantIdForMarcRecordAndInstanceIfItIsSavedInJour String expectedCentralTenantId = "mobius"; Future future = Future.succeededFuture() + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, PARSE, null, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, UPDATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null, expectedCentralTenantId)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, instanceId, "in00000000001", null, 0, UPDATE, INSTANCE, COMPLETED, null, null, expectedCentralTenantId)); diff --git a/mod-source-record-manager-server/src/test/java/org/folio/rest/impl/metadataProvider/MetaDataProviderRecordProcessingLogCollectionAPITest.java b/mod-source-record-manager-server/src/test/java/org/folio/rest/impl/metadataProvider/MetaDataProviderRecordProcessingLogCollectionAPITest.java index 4f875dadc..98c7054bd 100644 --- a/mod-source-record-manager-server/src/test/java/org/folio/rest/impl/metadataProvider/MetaDataProviderRecordProcessingLogCollectionAPITest.java +++ b/mod-source-record-manager-server/src/test/java/org/folio/rest/impl/metadataProvider/MetaDataProviderRecordProcessingLogCollectionAPITest.java @@ -1,6 +1,7 @@ package org.folio.rest.impl.metadataProvider; import io.restassured.RestAssured; +import io.restassured.response.ValidatableResponse; import io.vertx.core.CompositeFuture; import io.vertx.core.Future; import io.vertx.core.Vertx; @@ -37,6 +38,7 @@ import static org.folio.rest.jaxrs.model.JournalRecord.ActionType.MODIFY; import static org.folio.rest.jaxrs.model.JournalRecord.ActionType.NON_MATCH; import static org.folio.rest.jaxrs.model.JournalRecord.ActionType.UPDATE; +import static org.folio.rest.jaxrs.model.JournalRecord.ActionType.PARSE; import static org.folio.rest.jaxrs.model.JournalRecord.EntityType.AUTHORITY; import static org.folio.rest.jaxrs.model.JournalRecord.EntityType.EDIFACT; import static org.folio.rest.jaxrs.model.JournalRecord.EntityType.HOLDINGS; @@ -94,9 +96,10 @@ public void shouldReturnMarcBibUpdatedWhenMarcBibWasUpdated(TestContext context) JobExecution createdJobExecution = constructAndPostInitJobExecutionRqDto(1).getJobExecutions().get(0); String sourceRecordId = UUID.randomUUID().toString(); String recordTitle = "test title"; + String marcBibEntityId = UUID.randomUUID().toString(); Future future = Future.succeededFuture() - .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, marcBibEntityId, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, "in00000000001", null, 0, UPDATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .onFailure(context::fail); @@ -110,7 +113,8 @@ public void shouldReturnMarcBibUpdatedWhenMarcBibWasUpdated(TestContext context) .body("entries", hasSize(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].sourceRecordId", is(marcBibEntityId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].sourceRecordActionStatus", is(ActionStatus.CREATED.value())); @@ -155,7 +159,7 @@ public void shouldReturnOneEntryIfWithAllMultipleHoldingsTwoErrorsDuringMultiple .body("entries", hasSize(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].sourceRecordActionStatus", is(ActionStatus.CREATED.value())) .body("entries[0].relatedHoldingsInfo[0].actionStatus", is(ActionStatus.CREATED.value())) @@ -174,9 +178,10 @@ public void shouldReturnMarcBibUpdatedWhenMarcBibWasModified(TestContext context JobExecution createdJobExecution = constructAndPostInitJobExecutionRqDto(1).getJobExecutions().get(0); String sourceRecordId = UUID.randomUUID().toString(); String recordTitle = "test title"; + String marcBibEntityId = UUID.randomUUID().toString(); Future future = Future.succeededFuture() - .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, marcBibEntityId, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, MODIFY, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .onFailure(context::fail); @@ -190,7 +195,8 @@ public void shouldReturnMarcBibUpdatedWhenMarcBibWasModified(TestContext context .body("entries", hasSize(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].sourceRecordId", is(marcBibEntityId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].sourceRecordActionStatus", is(ActionStatus.CREATED.value())); @@ -204,10 +210,11 @@ public void shouldReturnMarcBibCreatedWhenMarcBibWasCreatedInNonMatchSection(Tes JobExecution createdJobExecution = constructAndPostInitJobExecutionRqDto(1).getJobExecutions().get(0); String sourceRecordId = UUID.randomUUID().toString(); String recordTitle = "test title"; + String marcBibEntityId = UUID.randomUUID().toString(); Future future = Future.succeededFuture() .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, NON_MATCH, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) - .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, marcBibEntityId, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .onFailure(context::fail); future.onComplete(ar -> context.verify(v -> { @@ -220,7 +227,8 @@ public void shouldReturnMarcBibCreatedWhenMarcBibWasCreatedInNonMatchSection(Tes .body("entries", hasSize(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].sourceRecordId", is(marcBibEntityId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].sourceRecordActionStatus", is(ActionStatus.CREATED.value())); @@ -269,9 +277,10 @@ public void shouldReturnInstanceDiscardedWhenInstanceCreationFailed(TestContext JobExecution createdJobExecution = constructAndPostInitJobExecutionRqDto(1).getJobExecutions().get(0); String sourceRecordId = UUID.randomUUID().toString(); String recordTitle = "test title"; + String marcBibEntityId = UUID.randomUUID().toString(); Future future = Future.succeededFuture() - .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, marcBibEntityId, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, CREATE, INSTANCE, ERROR, "error msg", null)) .onFailure(context::fail); @@ -285,7 +294,8 @@ public void shouldReturnInstanceDiscardedWhenInstanceCreationFailed(TestContext .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].sourceRecordId", is(marcBibEntityId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].relatedInstanceInfo.actionStatus", is(ActionStatus.DISCARDED.value())) .body("entries[0].relatedInstanceInfo.error", not(emptyOrNullString())); @@ -300,10 +310,11 @@ public void shouldReturnInstanceCreatedWhenMarcModify(TestContext context) { JobExecution createdJobExecution = constructAndPostInitJobExecutionRqDto(1).getJobExecutions().get(0); String sourceRecordId = UUID.randomUUID().toString(); String recordTitle = "test title"; + String marcBibEntityId = UUID.randomUUID().toString(); Future future = Future.succeededFuture() - .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, "marcEntityID", null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) - .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, "marcEntityID", null, recordTitle, 0, MODIFY, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, marcBibEntityId, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, MODIFY, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, UPDATE, INSTANCE, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, "instanceEntityID", "in00000000001", null, 0, CREATE, INSTANCE, COMPLETED, null, null)) .onFailure(context::fail); @@ -318,7 +329,8 @@ public void shouldReturnInstanceCreatedWhenMarcModify(TestContext context) { .body("entries.size()", is(2)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].sourceRecordId", is(marcBibEntityId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].relatedInstanceInfo.actionStatus", is(ActionStatus.CREATED.value())); @@ -332,9 +344,10 @@ public void shouldReturnPoLineCreatedWhenMarcCreate(TestContext context) { JobExecution createdJobExecution = constructAndPostInitJobExecutionRqDto(1).getJobExecutions().get(0); String sourceRecordId = UUID.randomUUID().toString(); String recordTitle = "test title"; + String marcBibEntityId = UUID.randomUUID().toString(); Future future = Future.succeededFuture() - .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, "marcEntityID", null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, marcBibEntityId, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, "poLineEntityID", null, null, 0, CREATE, PO_LINE, COMPLETED, null, null)) .onFailure(context::fail); @@ -348,7 +361,8 @@ public void shouldReturnPoLineCreatedWhenMarcCreate(TestContext context) { .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].sourceRecordId", is(marcBibEntityId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].relatedPoLineInfo.actionStatus", is(ActionStatus.CREATED.value())); @@ -362,9 +376,10 @@ public void shouldReturnAuthorityDiscardedWhenErrorOnMatch(TestContext context) JobExecution createdJobExecution = constructAndPostInitJobExecutionRqDto(1).getJobExecutions().get(0); String sourceRecordId = UUID.randomUUID().toString(); String recordTitle = "test title"; + String authorityEntityId = UUID.randomUUID().toString(); Future future = Future.succeededFuture() - .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, "authorityEntityID", null, recordTitle, 0, MATCH, MARC_AUTHORITY, ERROR, "errorMsg", null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, authorityEntityId, null, recordTitle, 0, MATCH, MARC_AUTHORITY, ERROR, "errorMsg", null)) .onFailure(context::fail); future.onComplete(ar -> context.verify(v -> { @@ -377,7 +392,8 @@ public void shouldReturnAuthorityDiscardedWhenErrorOnMatch(TestContext context) .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].sourceRecordId", is(authorityEntityId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordType", is(MARC_AUTHORITY.value())) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].relatedAuthorityInfo.error", is(notNullValue())) @@ -425,11 +441,10 @@ public void shouldReturnInstanceIdWhenHoldingsCreatedRecordProcessingLogDTOColle .get(GET_JOB_EXECUTION_JOURNAL_RECORDS_PATH + "/" + holdingsCreationJobExecution.getId()) .then() .statusCode(HttpStatus.SC_OK) - .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(holdingsCreationJobExecution.getId())) - .body("entries[0].sourceRecordId", is(holdingsCreationSourceRecordId)) + .body("entries[0].incomingRecordId", is(holdingsCreationSourceRecordId)) .body("entries[0].sourceRecordOrder", is("0")) .body("entries[0].error", emptyOrNullString()) .body("entries[0].relatedInstanceInfo.idList[0]", is("instanceEntityID")) @@ -448,9 +463,10 @@ public void shouldReturnPoLineWithOrderIdWhenMarcCreateRecordProcessingLogDTOCol String sourceRecordId = UUID.randomUUID().toString(); String recordTitle = "test title"; String orderId = UUID.randomUUID().toString(); + String marcBibEntityId = UUID.randomUUID().toString(); Future future = Future.succeededFuture() - .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, "marcEntityID", null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, marcBibEntityId, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, "poLineEntityID", null, null, 0, CREATE, PO_LINE, COMPLETED, "Test error", orderId)) .onFailure(context::fail); @@ -464,7 +480,8 @@ public void shouldReturnPoLineWithOrderIdWhenMarcCreateRecordProcessingLogDTOCol .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].sourceRecordId", is(marcBibEntityId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].sourceRecordActionStatus", is(ActionStatus.CREATED.value())) .body("entries[0].relatedPoLineInfo", notNullValue()) @@ -483,11 +500,12 @@ public void shouldReturnOneInstanceIdWhenMarcBibUpdatedAndInstanceUpdatedRecordP String marcBibAndInstanceUpdateSourceRecordId = UUID.randomUUID().toString(); String recordTitle = "test title"; + String marcBibEntityId = UUID.randomUUID().toString(); Future future = Future.succeededFuture() .compose(v -> createJournalRecord(marcBibAndInstanceUpdateJobExecution.getId(), marcBibAndInstanceUpdateSourceRecordId, "instanceEntityID", "in00000000001", null, 0, UPDATE, INSTANCE, COMPLETED, null, null)) .compose(v -> createJournalRecord(marcBibAndInstanceUpdateJobExecution.getId(), marcBibAndInstanceUpdateSourceRecordId, "instanceEntityID", "in00000000001", null, 0, UPDATE, INSTANCE, COMPLETED, null, null)) - .compose(v -> createJournalRecord(marcBibAndInstanceUpdateJobExecution.getId(), marcBibAndInstanceUpdateSourceRecordId, "marcBibEntityID", null, recordTitle, 0, MODIFY, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) + .compose(v -> createJournalRecord(marcBibAndInstanceUpdateJobExecution.getId(), marcBibAndInstanceUpdateSourceRecordId, marcBibEntityId, null, recordTitle, 0, MODIFY, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .onFailure(context::fail); future.onComplete(ar -> context.verify(v -> { @@ -500,7 +518,8 @@ public void shouldReturnOneInstanceIdWhenMarcBibUpdatedAndInstanceUpdatedRecordP .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(marcBibAndInstanceUpdateJobExecution.getId())) - .body("entries[0].sourceRecordId", is(marcBibAndInstanceUpdateSourceRecordId)) + .body("entries[0].sourceRecordId", is(marcBibEntityId)) + .body("entries[0].incomingRecordId", is(marcBibAndInstanceUpdateSourceRecordId)) .body("entries[0].error", emptyOrNullString()) .body("entries[0].relatedInstanceInfo.idList.size()", is(1)) .body("entries[0].relatedInstanceInfo.hridList.size()", is(1)) @@ -532,7 +551,7 @@ public void shouldReturnHoldingsMultipleWhenMultipleHoldingsWereProcessed(TestCo .statusCode(HttpStatus.SC_OK) .body("entries.size()", is(1)) .body("totalRecords", is(1)) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].relatedHoldingsInfo[0].actionStatus", is(ActionStatus.CREATED.value())); async.complete(); @@ -559,7 +578,7 @@ public void shouldReturnHoldingsTitleWithHoldingsHrid(TestContext context) { .statusCode(HttpStatus.SC_OK) .body("entries.size()", is(1)) .body("totalRecords", is(1)) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is("Holdings ho00000000001")) .body("entries[0].relatedHoldingsInfo[0].hrid", is("ho00000000001")) .body("entries[0].sourceRecordType", is(MARC_HOLDINGS.value())); @@ -624,13 +643,16 @@ public void shouldReturnLimitedCollectionOnGetWithLimitAndOffset(TestContext con String sourceRecordId2 = UUID.randomUUID().toString(); String sourceRecordId3 = UUID.randomUUID().toString(); String recordTitle1 = "title1"; + String marcBibEntityId1 = UUID.randomUUID().toString(); + String marcBibEntityId2 = UUID.randomUUID().toString(); + String marcBibEntityId3 = UUID.randomUUID().toString(); Future future = Future.succeededFuture() - .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId1, null, null, recordTitle1, 1, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId1, marcBibEntityId1, null, recordTitle1, 1, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId1, null, "in00000000001", null, 1, CREATE, INSTANCE, COMPLETED, null, null)) - .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId2, null, null, "title0", 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId2, marcBibEntityId2, null, "title0", 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId2, null, null, null, 0, CREATE, INSTANCE, COMPLETED, null, null)) - .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId3, null, null, "title3", 3, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId3, marcBibEntityId3, null, "title3", 3, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId3, null, null, null, 3, CREATE, INSTANCE, COMPLETED, null, null)) .onFailure(context::fail); @@ -648,7 +670,8 @@ public void shouldReturnLimitedCollectionOnGetWithLimitAndOffset(TestContext con .body("entries.size()", is(1)) .body("totalRecords", is(3)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId1)) + .body("entries[0].sourceRecordId", is(marcBibEntityId1)) + .body("entries[0].incomingRecordId", is(sourceRecordId1)) .body("entries[0].sourceRecordTitle", is(recordTitle1)) .body("entries[0].sourceRecordOrder", is("1")) .body("entries[0].relatedHoldingsInfo.hrid", is(empty())) @@ -673,7 +696,7 @@ public void shouldReturnAuthorityCreated(TestContext context) { .onFailure(context::fail); future.onComplete(ar -> context.verify(v -> { - RestAssured.given() + ValidatableResponse r = RestAssured.given() .spec(spec) .when() .get(GET_JOB_EXECUTION_JOURNAL_RECORDS_PATH + "/" + createdJobExecution.getId()) @@ -681,7 +704,8 @@ public void shouldReturnAuthorityCreated(TestContext context) { .statusCode(HttpStatus.SC_OK) .body("entries.size()", is(1)) .body("totalRecords", is(1)) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].sourceRecordId", is(entityId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].sourceRecordActionStatus", is(ActionStatus.CREATED.value())) .body("entries[0].relatedAuthorityInfo.actionStatus", is(ActionStatus.CREATED.value())); @@ -695,9 +719,10 @@ public void shouldReturnMarcBibUpdatedByJobAndRecordIdsRecordProcessingLogDTOCol JobExecution createdJobExecution = constructAndPostInitJobExecutionRqDto(1).getJobExecutions().get(0); String sourceRecordId = UUID.randomUUID().toString(); String recordTitle = "test title"; + String marcBibEntityId = UUID.randomUUID().toString(); Future future = Future.succeededFuture() - .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, marcBibEntityId, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, UPDATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .onFailure(context::fail); @@ -711,7 +736,8 @@ public void shouldReturnMarcBibUpdatedByJobAndRecordIdsRecordProcessingLogDTOCol .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].sourceRecordId", is(marcBibEntityId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].sourceRecordOrder", is("0")) .body("entries[0].sourceRecordActionStatus", is(ActionStatus.CREATED.value())); @@ -727,10 +753,11 @@ public void shouldReturnEmptyMarcBibErrorAndInstanceDiscardedWhenInstanceCreatio String sourceRecordId = UUID.randomUUID().toString(); String entityId = UUID.randomUUID().toString(); String entityHrid = "001"; + String marcBibEntityId = UUID.randomUUID().toString(); String recordTitle = "test title"; Future future = Future.succeededFuture() - .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, entityId, entityHrid, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, marcBibEntityId, entityHrid, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, entityId, entityHrid, null, 0, CREATE, INSTANCE, ERROR, "error msg", null)) .onFailure(context::fail); @@ -744,7 +771,8 @@ public void shouldReturnEmptyMarcBibErrorAndInstanceDiscardedWhenInstanceCreatio .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].sourceRecordId", is(marcBibEntityId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].sourceRecordOrder", is("0")) .body("entries[0].error", emptyOrNullString()) @@ -762,9 +790,10 @@ public void shouldReturnNotEmptyMarcBibErrorWhenMarcBibFailedRecordProcessingLog JobExecution createdJobExecution = constructAndPostInitJobExecutionRqDto(1).getJobExecutions().get(0); String sourceRecordId = UUID.randomUUID().toString(); String recordTitle = "test title"; + String marcBibEntityId = UUID.randomUUID().toString(); Future future = Future.succeededFuture() - .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, ERROR, "MarcBib error msg", null)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, marcBibEntityId, null, recordTitle, 0, CREATE, MARC_BIBLIOGRAPHIC, ERROR, "MarcBib error msg", null)) .onFailure(context::fail); future.onComplete(ar -> context.verify(v -> { @@ -774,11 +803,11 @@ public void shouldReturnNotEmptyMarcBibErrorWhenMarcBibFailedRecordProcessingLog .get(GET_JOB_EXECUTION_JOURNAL_RECORDS_PATH + "/" + createdJobExecution.getId()) .then() .statusCode(HttpStatus.SC_OK) - .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].sourceRecordId", is(marcBibEntityId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].sourceRecordOrder", is("0")) .body("entries[0].error", is("MarcBib error msg")); @@ -824,7 +853,7 @@ public void shouldReturnMarcBibAndAllEntitiesWithoutErrorsRecordProcessingLogDTO .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].sourceRecordOrder", is("0")) .body("entries[0].error", emptyOrNullString()) @@ -876,7 +905,7 @@ public void shouldReturnDiscardedForHoldingsIfNoHoldingsCreatedRecordProcessingL .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].sourceRecordOrder", is("0")) .body("entries[0].error", emptyOrNullString()) @@ -922,7 +951,7 @@ public void shouldReturnDiscardedForItemsIfNoItemsCreatedRecordProcessingLogDTOC .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].sourceRecordOrder", is("0")) .body("entries[0].error", emptyOrNullString()) @@ -952,6 +981,7 @@ public void shouldReturnDataForInvoiceLinesRecordProcessingLogDTOCollection(Test String invoiceLineDescription = "Some description"; Future future = Future.succeededFuture() + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, PARSE, null, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, null, 0, CREATE, EDIFACT, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, invoiceId, invoiceHrid, "INVOICE", 0, CREATE, INVOICE, COMPLETED, null, null)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, invoiceLineId1, invoiceVendorNumber + "-1", invoiceLineDescription + "1", 1, CREATE, INVOICE, COMPLETED, null, null)) @@ -1360,7 +1390,7 @@ public void shouldReturnMarcBibAndAllEntitiesWithMultipleItemsAndHoldings(TestCo .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)); async.complete(); })); @@ -1408,7 +1438,7 @@ public void shouldReturnMarcBibAndAllEntitiesWithItemsHoldingsWithoutDiscardedRe .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].sourceRecordOrder", is("0")) .body("entries[0].error", emptyOrNullString()) @@ -1464,7 +1494,7 @@ public void shouldReturnMarcBibAndAllEntitiesWithDiscardedItemsHoldingsRecordPro .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].sourceRecordOrder", is("0")) .body("entries[0].error", emptyOrNullString()) @@ -1518,7 +1548,7 @@ public void shouldReturnMarcBibAndAllEntitiesWithMultipleItemsUpdateRecordProces .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].sourceRecordOrder", is("0")) .body("entries[0].error", emptyOrNullString()) @@ -1548,9 +1578,10 @@ public void shouldReturnCentralTenantIdForMarcRecordAndInstanceIfItIsSavedInJour String instanceId = UUID.randomUUID().toString(); String recordTitle = "test title"; String expectedCentralTenantId = "mobius"; + String marcBibEntityId = UUID.randomUUID().toString(); Future future = Future.succeededFuture() - .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, null, null, recordTitle, 0, UPDATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null, expectedCentralTenantId)) + .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, marcBibEntityId, null, recordTitle, 0, UPDATE, MARC_BIBLIOGRAPHIC, COMPLETED, null, null, expectedCentralTenantId)) .compose(v -> createJournalRecord(createdJobExecution.getId(), sourceRecordId, instanceId, "in00000000001", null, 0, UPDATE, INSTANCE, COMPLETED, null, null, expectedCentralTenantId)); future.onComplete(context.asyncAssertSuccess(v -> @@ -1564,7 +1595,8 @@ public void shouldReturnCentralTenantIdForMarcRecordAndInstanceIfItIsSavedInJour .body("entries.size()", is(1)) .body("totalRecords", is(1)) .body("entries[0].jobExecutionId", is(createdJobExecution.getId())) - .body("entries[0].sourceRecordId", is(sourceRecordId)) + .body("entries[0].sourceRecordId", is(marcBibEntityId)) + .body("entries[0].incomingRecordId", is(sourceRecordId)) .body("entries[0].sourceRecordTitle", is(recordTitle)) .body("entries[0].sourceRecordOrder", is("0")) .body("entries[0].sourceRecordTenantId", is(expectedCentralTenantId)) diff --git a/mod-source-record-manager-server/src/test/java/org/folio/services/JournalUtilTest.java b/mod-source-record-manager-server/src/test/java/org/folio/services/JournalUtilTest.java index 283ba20c3..f83ef221b 100644 --- a/mod-source-record-manager-server/src/test/java/org/folio/services/JournalUtilTest.java +++ b/mod-source-record-manager-server/src/test/java/org/folio/services/JournalUtilTest.java @@ -533,6 +533,37 @@ public void shouldBuildJournalRecordWhenNoRecord() throws JournalRecordMapperExc Assert.assertNotNull(journalRecord.get(0).getActionDate()); } + @Test + public void shouldBuildMarcBibJournalRecordOnError() throws JournalRecordMapperException { + var testError = "Something Happened"; + var testJobExecutionId = UUID.randomUUID().toString(); + var recordJson = new JsonObject() + .put("id", UUID.randomUUID().toString()) + .put("snapshotId", testJobExecutionId) + .put("order", 1); + + var context = new HashMap(); + context.put(ERROR_KEY, testError); + context.put(MARC_BIBLIOGRAPHIC.value(), recordJson.encode()); + + var eventPayload = new DataImportEventPayload() + .withEventType(DI_ERROR.value()) + .withJobExecutionId(testJobExecutionId) + .withContext(context); + + var journalRecord = JournalUtil.buildJournalRecordsByEvent(eventPayload, + CREATE, INSTANCE, COMPLETED); + + Assert.assertNotNull(journalRecord); + Assert.assertEquals(1, journalRecord.get(0).getSourceRecordOrder().intValue()); + Assert.assertEquals(testError, journalRecord.get(0).getError()); + Assert.assertEquals(testJobExecutionId, journalRecord.get(0).getJobExecutionId()); + Assert.assertEquals(INSTANCE, journalRecord.get(0).getEntityType()); + Assert.assertEquals(CREATE, journalRecord.get(0).getActionType()); + Assert.assertEquals(COMPLETED, journalRecord.get(0).getActionStatus()); + Assert.assertNotNull(journalRecord.get(0).getActionDate()); + } + @Test public void shouldBuildJournalRecordForInstanceEvenIfEntityIsNotExists() throws JournalRecordMapperException { String recordId = UUID.randomUUID().toString(); @@ -1043,6 +1074,38 @@ public void shouldBuildJournalRecordWithCentralTenantIdFromPayload() throws Jour Assert.assertEquals(COMPLETED, journalRecords.get(0).getActionStatus()); } + @Test + public void shouldBuildJournalRecordForMarcBibliographicUpdate() throws JournalRecordMapperException { + String incomingRecordId = UUID.randomUUID().toString(); + + Record record = new Record() + .withId(UUID.randomUUID().toString()) + .withSnapshotId(UUID.randomUUID().toString()) + .withOrder(1); + + HashMap context = new HashMap<>() {{ + put(MARC_BIBLIOGRAPHIC.value(), Json.encode(record)); + put(INCOMING_RECORD_ID, incomingRecordId); + }}; + + DataImportEventPayload eventPayload = new DataImportEventPayload() + .withEventType(DI_SRS_MARC_BIB_RECORD_UPDATED.value()) + .withContext(context); + + List journalRecords = JournalUtil.buildJournalRecordsByEvent(eventPayload, + UPDATE, MARC_BIBLIOGRAPHIC, COMPLETED); + + Assert.assertEquals(1, journalRecords.size()); + Assert.assertEquals(incomingRecordId, journalRecords.get(0).getSourceId()); + Assert.assertEquals(1, journalRecords.get(0).getSourceRecordOrder().intValue()); + Assert.assertEquals(record.getId(), journalRecords.get(0).getEntityId()); + Assert.assertEquals(MARC_BIBLIOGRAPHIC, journalRecords.get(0).getEntityType()); + Assert.assertEquals(UPDATE, journalRecords.get(0).getActionType()); + Assert.assertEquals(COMPLETED, journalRecords.get(0).getActionStatus()); + Assert.assertEquals(MARC_BIBLIOGRAPHIC, journalRecords.get(0).getEntityType()); + } + + @Test public void shouldBuildJournalRecordForNonMatchWithErrorAndMatchedNumberNotAvailable() throws JournalRecordMapperException { String recordId = UUID.randomUUID().toString(); diff --git a/mod-source-record-manager-server/src/test/java/org/folio/services/validation/JobProfileSnapshotValidationServiceImplTest.java b/mod-source-record-manager-server/src/test/java/org/folio/services/validation/JobProfileSnapshotValidationServiceImplTest.java index 59d9e3ce3..a0d4e6eda 100644 --- a/mod-source-record-manager-server/src/test/java/org/folio/services/validation/JobProfileSnapshotValidationServiceImplTest.java +++ b/mod-source-record-manager-server/src/test/java/org/folio/services/validation/JobProfileSnapshotValidationServiceImplTest.java @@ -5,7 +5,6 @@ import org.folio.rest.jaxrs.model.EntityType; import org.folio.rest.jaxrs.model.JobProfile; import org.folio.rest.jaxrs.model.MappingProfile; -import org.folio.rest.jaxrs.model.MatchProfile; import org.folio.rest.jaxrs.model.ProfileSnapshotWrapper; import org.junit.Test; @@ -14,7 +13,6 @@ import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.ACTION_PROFILE; import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.JOB_PROFILE; import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.MAPPING_PROFILE; -import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.MATCH_PROFILE; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_HOLDING; import static org.junit.Assert.assertFalse; diff --git a/mod-source-record-manager-server/src/test/java/org/folio/verticle/consumers/RawMarcChunksKafkaHandlerTest.java b/mod-source-record-manager-server/src/test/java/org/folio/verticle/consumers/RawMarcChunksKafkaHandlerTest.java index eb838ba8f..ee48e4f21 100644 --- a/mod-source-record-manager-server/src/test/java/org/folio/verticle/consumers/RawMarcChunksKafkaHandlerTest.java +++ b/mod-source-record-manager-server/src/test/java/org/folio/verticle/consumers/RawMarcChunksKafkaHandlerTest.java @@ -2,7 +2,6 @@ import io.vertx.core.Future; import io.vertx.core.Vertx; -import io.vertx.core.json.JsonArray; import io.vertx.core.json.JsonObject; import io.vertx.kafka.client.consumer.KafkaConsumerRecord; import io.vertx.kafka.client.producer.KafkaHeader; @@ -11,24 +10,22 @@ import org.folio.kafka.AsyncRecordHandler; import org.folio.rest.jaxrs.model.JobExecution; import org.folio.services.ChunkProcessingService; -import org.folio.services.EventProcessedService; import org.folio.services.JobExecutionService; -import org.folio.services.MappingRuleCache; import org.folio.services.RecordsPublishingService; import org.folio.services.flowcontrol.RawRecordsFlowControlService; -import org.folio.services.journal.JournalService; +import org.folio.verticle.consumers.util.JobExecutionUtils; +import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; -import org.mockito.ArgumentCaptor; -import org.mockito.Captor; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import java.io.IOException; import java.util.List; import java.util.Optional; +import java.util.UUID; import static org.folio.rest.RestVerticle.OKAPI_HEADER_TENANT; import static org.junit.Assert.assertTrue; @@ -51,19 +48,11 @@ public class RawMarcChunksKafkaHandlerTest { @Mock private KafkaConsumerRecord kafkaRecord; @Mock - private JournalService journalService; - @Mock - private EventProcessedService eventProcessedService; - @Mock private ChunkProcessingService eventDrivenChunkProcessingService; @Mock private RawRecordsFlowControlService flowControlService; @Mock - private MappingRuleCache mappingRuleCache; - @Mock private JobExecutionService jobExecutionService; - @Captor - private ArgumentCaptor journalRecordsCaptor; private Vertx vertx = Vertx.vertx(); private AsyncRecordHandler rawMarcChunksKafkaHandler; @@ -75,8 +64,11 @@ public static void setUpClass() throws IOException { @Before public void setUp() { rawMarcChunksKafkaHandler = new RawMarcChunksKafkaHandler(eventDrivenChunkProcessingService, flowControlService, jobExecutionService, vertx); -// when(jobExecutionService.getJobExecutionById(anyString(), anyString())) -// .thenReturn(Future.succeededFuture(Optional.of(new JobExecution()))); + } + + @After + public void invalidateCache() { + JobExecutionUtils.clearCache(); } @Test @@ -91,4 +83,19 @@ public void shouldNotHandleEventWhenJobExecutionWasCancelled() { verify(recordsPublishingService, never()).sendEventsWithRecords(anyList(), anyString(), any(OkapiConnectionParams.class), anyString()); assertTrue(future.succeeded()); } + + @Test + public void shouldNotHandleEventWhenIncorrectJobProfileIsPickedForUploadedFile() { + var jobExecId = UUID.randomUUID().toString(); + when(kafkaRecord.headers()).thenReturn(List.of(KafkaHeader.header(OKAPI_HEADER_TENANT.toLowerCase(), TENANT_ID))); + when(jobExecutionService.getJobExecutionById(any(), any())).thenReturn(Future.succeededFuture(Optional.of(new JobExecution().withId(jobExecId).withStatus(JobExecution.Status.PARSING_IN_PROGRESS)))); + // when error status is cached due to incorrect job profile is selected for uploaded file + JobExecutionUtils.cache.put(jobExecId, JobExecution.Status.ERROR); + // when + Future future = rawMarcChunksKafkaHandler.handle(kafkaRecord); + + // then + verify(recordsPublishingService, never()).sendEventsWithRecords(anyList(), anyString(), any(OkapiConnectionParams.class), anyString()); + assertTrue(future.succeeded()); + } } diff --git a/pom.xml b/pom.xml index 1df066e07..0a32a304c 100644 --- a/pom.xml +++ b/pom.xml @@ -19,12 +19,12 @@ - 35.0.1 - 4.3.7 + 35.2.0 + 4.5.4 5.3.30 5.10.1 4.3.1 - 2.27.2 + 3.0.1 ${project.basedir} ${basedir}/ramls UTF-8 @@ -42,7 +42,7 @@ org.codehaus.mojo versions-maven-plugin - 2.8.1 + 2.16.2 false @@ -84,7 +84,7 @@ org.apache.maven.plugins maven-resources-plugin - 3.2.0 + 3.3.1 filter-descriptor-inputs @@ -137,7 +137,7 @@ org.apache.maven.plugins maven-release-plugin - 2.5.3 + 3.0.1 clean verify v@{project.version} diff --git a/ramls/raml-storage b/ramls/raml-storage index 28a5e2034..938d9f8ac 160000 --- a/ramls/raml-storage +++ b/ramls/raml-storage @@ -1 +1 @@ -Subproject commit 28a5e2034f1cf11fa62d2b6f5df2ee6dcc18d921 +Subproject commit 938d9f8aca487c70bb1a9a8c118df40266022ab2