Skip to content

Commit

Permalink
MODSOURMAN-1022: migrate from DI_SRS_MARC_BIB_RECORD_CREATED to DI_IN…
Browse files Browse the repository at this point in the history
…COMING_MARC_BIB_RECORD_PARSED event (#650)
  • Loading branch information
yaroslav-epam committed Dec 7, 2023
1 parent 542d786 commit 9126400
Show file tree
Hide file tree
Showing 12 changed files with 66 additions and 63 deletions.
3 changes: 3 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
## 20.2.0-SNAPSHOT 2023-XX-XX
* [MODSOURMAN-1022](https://issues.folio.org/browse/MODSOURMAN-1022) Remove step of initial saving of incoming records to SRS

## 20.1.0 2023-10-13
* Update status when user attempts to update shared auth record from member tenant ([MODDATAIMP-926](https://issues.folio.org/browse/MODDATAIMP-926))
* Add cache to get and store consortium data configurations ([MODINV-872](https://issues.folio.org/browse/MODINV-872))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
import static org.folio.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_CREATED;
import static org.folio.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_MODIFIED_READY_FOR_POST_PROCESSING;
import static org.folio.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_NOT_MATCHED;
import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED;
import static org.folio.DataImportEventTypes.DI_INCOMING_MARC_BIB_RECORD_PARSED;
import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MATCHED;
import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MATCHED_READY_FOR_POST_PROCESSING;
import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MODIFIED;
Expand Down Expand Up @@ -87,7 +87,7 @@ public class DataImportConsumerVerticle extends AbstractVerticle {
DI_SRS_MARC_AUTHORITY_RECORD_DELETED,
DI_SRS_MARC_AUTHORITY_RECORD_MODIFIED_READY_FOR_POST_PROCESSING,
DI_SRS_MARC_AUTHORITY_RECORD_NOT_MATCHED,
DI_SRS_MARC_BIB_RECORD_CREATED,
DI_INCOMING_MARC_BIB_RECORD_PARSED,
DI_SRS_MARC_BIB_RECORD_MATCHED,
DI_SRS_MARC_BIB_RECORD_MATCHED_READY_FOR_POST_PROCESSING,
DI_SRS_MARC_BIB_RECORD_MODIFIED,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.defaultClusterConfig;
import static org.folio.ActionProfile.Action.CREATE;
import static org.folio.DataImportEventTypes.DI_COMPLETED;
import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED;
import static org.folio.DataImportEventTypes.DI_INCOMING_MARC_BIB_RECORD_PARSED;
import static org.folio.inventory.dataimport.util.KafkaConfigConstants.KAFKA_ENV;
import static org.folio.inventory.dataimport.util.KafkaConfigConstants.KAFKA_HOST;
import static org.folio.inventory.dataimport.util.KafkaConfigConstants.KAFKA_MAX_REQUEST_SIZE;
Expand Down Expand Up @@ -169,7 +169,7 @@ public void setUp() {
public void shouldSendEventWithProcessedEventPayloadWhenProcessingCoreHandlerSucceeded() throws InterruptedException {
// given
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withTenant(TENANT_ID)
.withOkapiUrl(mockServer.baseUrl())
.withToken("test-token")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
import static net.mguenther.kafka.junit.EmbeddedKafkaCluster.provisionWith;
import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.defaultClusterConfig;
import static org.folio.ActionProfile.Action.CREATE;
import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED;
import static org.folio.DataImportEventTypes.DI_INCOMING_MARC_BIB_RECORD_PARSED;
import static org.folio.rest.jaxrs.model.EntityType.INSTANCE;
import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC;
import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.ACTION_PROFILE;
Expand Down Expand Up @@ -198,7 +198,7 @@ public void shouldReturnFailedFutureWhenProcessingCoreHandlerFailed(TestContext
// given
Async async = context.async();
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withTenant("diku")
.withOkapiUrl(mockServer.baseUrl())
.withToken("test-token")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@
import static org.folio.ActionProfile.FolioRecord.HOLDINGS;
import static org.folio.ActionProfile.FolioRecord.MARC_BIBLIOGRAPHIC;
import static org.folio.DataImportEventTypes.DI_INVENTORY_HOLDING_CREATED;
import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED;
import static org.folio.DataImportEventTypes.DI_INCOMING_MARC_BIB_RECORD_PARSED;
import static org.folio.inventory.dataimport.handlers.actions.CreateHoldingEventHandler.ACTION_HAS_NO_MAPPING_MSG;
import static org.folio.inventory.dataimport.util.DataImportConstants.UNIQUE_ID_ERROR_MESSAGE;
import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.ACTION_PROFILE;
Expand Down Expand Up @@ -754,7 +754,7 @@ public void shouldNotProcessEventWhenRecordToHoldingsFutureFails() throws Execut
payloadContext.put(EntityType.HOLDINGS.value(), holdingAsJson.encode());

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@
import static org.folio.ActionProfile.FolioRecord.MARC_BIBLIOGRAPHIC;
import static org.folio.DataImportEventTypes.DI_INVENTORY_INSTANCE_CREATED;
import static org.folio.DataImportEventTypes.DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING;
import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED;
import static org.folio.DataImportEventTypes.DI_INCOMING_MARC_BIB_RECORD_PARSED;
import static org.folio.inventory.dataimport.util.DataImportConstants.UNIQUE_ID_ERROR_MESSAGE;
import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.ACTION_PROFILE;
import static org.folio.rest.jaxrs.model.ProfileSnapshotWrapper.ContentType.JOB_PROFILE;
Expand Down Expand Up @@ -454,7 +454,7 @@ public void shouldReturnFailedFutureIfCurrentActionProfileHasNoMappingProfile()
context.put(MARC_BIBLIOGRAPHIC.value(), Json.encode(new Record().withParsedRecord(new ParsedRecord().withContent(PARSED_CONTENT))));

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withContext(context)
.withCurrentNode(new ProfileSnapshotWrapper()
.withContentType(ACTION_PROFILE)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@
import java.util.function.Consumer;

import static org.folio.DataImportEventTypes.DI_INVENTORY_ITEM_CREATED;
import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED;
import static org.folio.DataImportEventTypes.DI_INCOMING_MARC_BIB_RECORD_PARSED;
import static org.folio.inventory.dataimport.handlers.actions.CreateItemEventHandler.ACTION_HAS_NO_MAPPING_MSG;
import static org.folio.inventory.dataimport.util.DataImportConstants.UNIQUE_ID_ERROR_MESSAGE;
import static org.folio.inventory.domain.items.ItemStatusName.AVAILABLE;
Expand Down Expand Up @@ -209,7 +209,7 @@ public void shouldCreateItemAndFillInHoldingsRecordIdFromHoldingsEntityAndFillIn
payloadContext.put(HOLDINGS_IDENTIFIERS, Json.encode(List.of(PERMANENT_LOCATION_ID)));

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down Expand Up @@ -277,7 +277,7 @@ public void shouldCreateMultipleItems()
payloadContext.put(HOLDINGS_IDENTIFIERS, Json.encode(List.of(PERMANENT_LOCATION_ID, permanentLocationId2)));

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down Expand Up @@ -356,7 +356,7 @@ public void shouldCreateMultipleItemsAndSkipItemsWithInvalidHoldingsIdentifiers(
payloadContext.put(HOLDINGS_IDENTIFIERS, Json.encode(Lists.newArrayList(null, PERMANENT_LOCATION_ID, permanentLocationId2, "fake")));

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down Expand Up @@ -435,7 +435,7 @@ public void shouldCreateMultipleItemsAndPopulatePartialErrorsForFailedItems()
payloadContext.put(HOLDINGS_IDENTIFIERS, Json.encode(List.of(PERMANENT_LOCATION_ID, permanentLocationId2)));

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down Expand Up @@ -504,7 +504,7 @@ public void shouldPopulateSameHoldingsItForAllItemsIfOnlyOneHoldingExist()
payloadContext.put(HOLDINGS_IDENTIFIERS, Json.encode(List.of(PERMANENT_LOCATION_ID)));

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down Expand Up @@ -554,7 +554,7 @@ public void shouldCreateItemAndFillInHoldingsRecordIdFromParsedRecordContent()
payloadContext.put(ERRORS, Json.encode(new PartialError(null, "testError")));

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down Expand Up @@ -601,7 +601,7 @@ public void shouldCreateItemAndFillInHoldingsRecordIdFromMatchedHolding()
payloadContext.put(ERRORS, Json.encode(new PartialError(null, "testError")));

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down Expand Up @@ -671,7 +671,7 @@ public void shouldNotReturnFailedFutureIfInventoryStorageErrorExists()
payloadContext.put(HOLDINGS_IDENTIFIERS, Json.encode(List.of(PERMANENT_LOCATION_ID, permanentLocationId2)));

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down Expand Up @@ -728,7 +728,7 @@ public void shouldCompleteFutureAndReturnErrorsWhenMappedItemWithoutStatus()
payloadContext.put(MULTIPLE_HOLDINGS_FIELD, "945");

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down Expand Up @@ -783,7 +783,7 @@ public void shouldCompleteAndReturnErrorWhenMappedItemWithUnrecognizedStatusName
payloadContext.put(MULTIPLE_HOLDINGS_FIELD, "945");

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down Expand Up @@ -847,7 +847,7 @@ public void shouldCompleteAndReturnErrorWhenCreatedItemHasExistingBarcode()
payloadContext.put(MULTIPLE_HOLDINGS_FIELD, "945");

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down Expand Up @@ -901,7 +901,7 @@ public void shouldCompleteReturnErrorWhenMappedItemWithoutPermanentLoanType()
payloadContext.put(MULTIPLE_HOLDINGS_FIELD, "945");

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down Expand Up @@ -964,7 +964,7 @@ public void shouldReturnFailedFutureIfDuplicatedErrorExists()
payloadContext.put(MULTIPLE_HOLDINGS_FIELD, "945");

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down Expand Up @@ -1021,7 +1021,7 @@ public void shouldNotRequestWhenCreatedItemHasEmptyBarcode()
)));

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(new ProfileSnapshotWrapper()
Expand Down Expand Up @@ -1050,7 +1050,7 @@ public void shouldReturnFailedFutureWhenHasNoMarcRecord()

// given
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withContext(new HashMap<>())
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));

Expand All @@ -1073,7 +1073,7 @@ public void shouldReturnFailedFutureWhenCouldNotFindHoldingsRecordIdInEventPaylo
payloadContext.put(HOLDINGS_IDENTIFIERS, Json.encode(List.of(PERMANENT_LOCATION_ID)));

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down Expand Up @@ -1109,7 +1109,7 @@ public void shouldReturnFailedFutureWhenCouldNotFindPoLineIdInEventPayload()
payloadContext.put(EntityType.PO_LINE.value(), new JsonObject().encode());

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down Expand Up @@ -1137,7 +1137,7 @@ public void shouldFailWhenNoItemsCreated()
payloadContext.put(HOLDINGS_IDENTIFIERS, Json.encode(List.of(PERMANENT_LOCATION_ID)));

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand All @@ -1161,7 +1161,7 @@ public void shouldReturnFailedFutureWhenCurrentActionProfileHasNoMappingProfile(
payloadContext.put(EntityType.MARC_BIBLIOGRAPHIC.value(), Json.encode(record));

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withContext(payloadContext)
.withCurrentNode(new ProfileSnapshotWrapper()
.withContent(JsonObject.mapFrom(actionProfile).getMap())
Expand All @@ -1179,7 +1179,7 @@ public void shouldReturnFailedFutureWhenCurrentActionProfileHasNoMappingProfile(
public void shouldReturnTrueWhenHandlerIsEligibleForActionProfile() {
// given
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));

// when
Expand All @@ -1205,7 +1205,7 @@ public void shouldReturnFalseWhenHandlerIsNotEligibleForActionProfile() {
.withContent(actionProfile);

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withCurrentNode(profileSnapshotWrapper);

// when
Expand All @@ -1227,7 +1227,7 @@ public void shouldNotProcessEventWhenRecordToItemFutureFails() throws ExecutionE
payloadContext.put(EntityType.HOLDINGS.value(), holdingAsJson.encode());

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withJobExecutionId(UUID.randomUUID().toString())
.withContext(payloadContext)
.withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
import java.util.function.Consumer;

import static org.folio.ActionProfile.Action.MODIFY;
import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED;
import static org.folio.DataImportEventTypes.DI_INCOMING_MARC_BIB_RECORD_PARSED;
import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING;
import static org.folio.rest.jaxrs.model.EntityType.INSTANCE;
import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC;
Expand Down Expand Up @@ -343,7 +343,7 @@ public void shouldReturnFalseWhenHandlerIsNotEligibleForProfile() {
.withContent(actionProfile);

DataImportEventPayload dataImportEventPayload = new DataImportEventPayload()
.withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value())
.withEventType(DI_INCOMING_MARC_BIB_RECORD_PARSED.value())
.withCurrentNode(profileSnapshotWrapper);

// when
Expand Down
Loading

0 comments on commit 9126400

Please sign in to comment.