Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/release/2024_R1_v20.2.0_Quesneli…
Browse files Browse the repository at this point in the history
…a' into release/2024_R1_v20.2.0_Quesnelia

# Conflicts:
#	NEWS.md
#	src/main/java/org/folio/inventory/dataimport/util/AdditionalFieldsUtil.java
#	src/test/java/org/folio/inventory/dataimport/util/AdditionalFieldsUtilTest.java
#	src/test/resources/marc/reorderedParsedRecord.json
#	src/test/resources/marc/reorderingResultRecord.json
  • Loading branch information
psmagin committed May 24, 2024
2 parents 79b4e4e + 8219ae1 commit e34a4e7
Show file tree
Hide file tree
Showing 19 changed files with 1,073 additions and 292 deletions.
18 changes: 17 additions & 1 deletion NEWS.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,21 @@
## 20.2.5 2024-05-24
* Don't group fields with same tags together ([MODINV-1026](https://issues.folio.org/browse/MODINV-1026))
* [MODINV-1026](https://folio-org.atlassian.net/browse/MODINV-1026) Don't group fields with same tags together

## 20.2.4 2024-05-09
* [MODINV-1024](https://folio-org.atlassian.net/browse/MODINV-1024) Fix error while updating Instances and Items with electronic access without URI field populated.

## 20.2.3 2024-04-25
* [MODINV-1022](https://folio-org.atlassian.net/browse/MODINV-1022) Circulation History in Item record removed when Item is marked Missing/Long missing/ Unavailable

## 20.2.2 2024-04-22
* [MODINV-1009](https://folio-org.atlassian.net/browse/MODINV-1009) Circulation History in Item record removed when Item is marked Missing/Long missing/ Unavailable
* [MODINV-1012](https://folio-org.atlassian.net/browse/MODINV-1012) Invalid values (as it is) created in JSON when value is not matching accepted options provided in Instance field mapping for Nature of Content term

## 20.2.1 2024-04-12
* [MODINV-1001](https://folio-org.atlassian.net/browse/MODINV-1001) Fix sorting for Items on Instance details page
* [MODINV-999](https://folio-org.atlassian.net/browse/MODINV-999)"PMSystem" displayed as source in "quickmarc" view when record was created by "Non-matches" action of job profile
* [MODINV-1003](https://folio-org.atlassian.net/browse/MODINV-1003) The result table is not displayed in the file details log
* [MODINV-997](https://folio-org.atlassian.net/browse/MODINV-997) Keep order of MARC fields while Creating/Deriving/Editing MARC records

## 20.2.0 2023-03-20
* Inventory cannot process Holdings with virtual fields ([MODINV-941](https://issues.folio.org/browse/MODINV-941))
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
<modelVersion>4.0.0</modelVersion>
<artifactId>mod-inventory</artifactId>
<groupId>org.folio</groupId>
<version>20.2.1-SNAPSHOT</version>
<version>20.2.5-SNAPSHOT</version>
<licenses>
<license>
<name>Apache License 2.0</name>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ public class DataImportKafkaHandler implements AsyncRecordHandler<String, String
private static final Logger LOGGER = LogManager.getLogger(DataImportKafkaHandler.class);
private static final String RECORD_ID_HEADER = "recordId";
private static final String CHUNK_ID_HEADER = "chunkId";
private static final String USER_ID_HEADER = "userId";
private static final String PROFILE_SNAPSHOT_ID_KEY = "JOB_PROFILE_SNAPSHOT_ID";

private final Vertx vertx;
Expand Down Expand Up @@ -123,10 +124,12 @@ public Future<String> handle(KafkaConsumerRecord<String, String> record) {
Map<String, String> headersMap = KafkaHeaderUtils.kafkaHeadersToMap(record.headers());
String recordId = headersMap.get(RECORD_ID_HEADER);
String chunkId = headersMap.get(CHUNK_ID_HEADER);
String userId = headersMap.get(USER_ID_HEADER);
String jobExecutionId = eventPayload.getJobExecutionId();
LOGGER.info("Data import event payload has been received with event type: {}, recordId: {} by jobExecution: {} and chunkId: {}", eventPayload.getEventType(), recordId, jobExecutionId, chunkId);
eventPayload.getContext().put(RECORD_ID_HEADER, recordId);
eventPayload.getContext().put(CHUNK_ID_HEADER, chunkId);
eventPayload.getContext().put(USER_ID_HEADER, userId);

Context context = EventHandlingUtil.constructContext(eventPayload.getTenant(), eventPayload.getToken(), eventPayload.getOkapiUrl());
String jobProfileSnapshotId = eventPayload.getContext().get(PROFILE_SNAPSHOT_ID_KEY);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import org.folio.inventory.dataimport.services.OrderHelperService;
import org.folio.inventory.dataimport.util.AdditionalFieldsUtil;
import org.folio.inventory.dataimport.util.ParsedRecordUtil;
import org.folio.inventory.dataimport.util.ValidationUtil;
import org.folio.inventory.domain.instances.Instance;
import org.folio.inventory.domain.instances.InstanceCollection;
import org.folio.inventory.domain.relationship.RecordToEntity;
Expand All @@ -41,8 +42,7 @@
import static org.folio.ActionProfile.FolioRecord.MARC_BIBLIOGRAPHIC;
import static org.folio.DataImportEventTypes.DI_INVENTORY_INSTANCE_CREATED;
import static org.folio.DataImportEventTypes.DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING;
import static org.folio.inventory.dataimport.util.AdditionalFieldsUtil.SUBFIELD_I;
import static org.folio.inventory.dataimport.util.AdditionalFieldsUtil.TAG_999;
import static org.folio.inventory.dataimport.util.AdditionalFieldsUtil.*;
import static org.folio.inventory.dataimport.util.DataImportConstants.UNIQUE_ID_ERROR_MESSAGE;
import static org.folio.inventory.dataimport.util.LoggerUtil.logParametersEventHandler;
import static org.folio.inventory.domain.instances.Instance.HRID_KEY;
Expand Down Expand Up @@ -93,6 +93,7 @@ public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload d

Context context = EventHandlingUtil.constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
Record targetRecord = Json.decodeValue(payloadContext.get(EntityType.MARC_BIBLIOGRAPHIC.value()), Record.class);
var sourceContent = targetRecord.getParsedRecord().getContent().toString();

if (!Boolean.parseBoolean(payloadContext.get("acceptInstanceId")) && AdditionalFieldsUtil.getValue(targetRecord, TAG_999, SUBFIELD_I).isPresent()) {
LOGGER.error(INSTANCE_CREATION_999_ERROR_MESSAGE);
Expand All @@ -118,19 +119,33 @@ public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload d
.compose(v -> {
InstanceCollection instanceCollection = storage.getInstanceCollection(context);
JsonObject instanceAsJson = prepareInstance(dataImportEventPayload, instanceId, jobExecutionId);
List<String> errors = EventHandlingUtil.validateJsonByRequiredFields(instanceAsJson, requiredFields);
if (!errors.isEmpty()) {
String msg = format("Mapped Instance is invalid: %s, by jobExecutionId: '%s' and recordId: '%s' and chunkId: '%s' ", errors,
List<String> requiredFieldsErrors = EventHandlingUtil.validateJsonByRequiredFields(instanceAsJson, requiredFields);
if (!requiredFieldsErrors.isEmpty()) {
String msg = format("Mapped Instance is invalid: %s, by jobExecutionId: '%s' and recordId: '%s' and chunkId: '%s' ", requiredFieldsErrors,
jobExecutionId, recordId, chunkId);
LOGGER.warn(msg);
return Future.failedFuture(msg);
}

Instance mappedInstance = Instance.fromJson(instanceAsJson);

List<String> invalidUUIDsErrors = ValidationUtil.validateUUIDs(mappedInstance);
if (!invalidUUIDsErrors.isEmpty()) {
String msg = format("Mapped Instance is invalid: %s, by jobExecutionId: '%s' and recordId: '%s' and chunkId: '%s' ", invalidUUIDsErrors,
jobExecutionId, recordId, chunkId);
LOGGER.warn(msg);
return Future.failedFuture(msg);
}

return addInstance(mappedInstance, instanceCollection)
.compose(createdInstance -> getPrecedingSucceedingTitlesHelper().createPrecedingSucceedingTitles(mappedInstance, context).map(createdInstance))
.compose(createdInstance -> executeFieldsManipulation(createdInstance, targetRecord))
.compose(createdInstance -> saveRecordInSrsAndHandleResponse(dataImportEventPayload, targetRecord, createdInstance, instanceCollection, dataImportEventPayload.getTenant()));
.compose(createdInstance -> {
var targetContent = targetRecord.getParsedRecord().getContent().toString();
var content = reorderMarcRecordFields(sourceContent, targetContent);
targetRecord.setParsedRecord(targetRecord.getParsedRecord().withContent(content));
return saveRecordInSrsAndHandleResponse(dataImportEventPayload, targetRecord, createdInstance, instanceCollection, dataImportEventPayload.getTenant());
});
})
.onSuccess(ar -> {
dataImportEventPayload.getContext().put(INSTANCE.value(), Json.encode(ar));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import org.folio.inventory.dataimport.cache.MappingMetadataCache;
import org.folio.inventory.dataimport.handlers.matching.util.EventHandlingUtil;
import org.folio.inventory.dataimport.util.AdditionalFieldsUtil;
import org.folio.inventory.dataimport.util.ValidationUtil;
import org.folio.inventory.domain.instances.Instance;
import org.folio.inventory.domain.instances.InstanceCollection;
import org.folio.inventory.exceptions.NotFoundException;
Expand Down Expand Up @@ -194,6 +195,14 @@ private void processInstanceUpdate(DataImportEventPayload dataImportEventPayload
org.folio.rest.jaxrs.model.Record targetRecord = Json.decodeValue(marcBibAsJson, org.folio.rest.jaxrs.model.Record.class);

Instance mappedInstance = Instance.fromJson(instanceAsJson);
List<String> invalidUUIDsErrors = ValidationUtil.validateUUIDs(mappedInstance);
if (!invalidUUIDsErrors.isEmpty()) {
String msg = format("Mapped Instance is invalid: %s, by jobExecutionId: '%s' and recordId: '%s' and chunkId: '%s' ", invalidUUIDsErrors,
jobExecutionId, recordId, chunkId);
LOGGER.warn(msg);
return Future.failedFuture(msg);
}

return updateInstanceAndRetryIfOlExists(mappedInstance, instanceCollection, dataImportEventPayload)
.compose(updatedInstance -> getPrecedingSucceedingTitlesHelper().getExistingPrecedingSucceedingTitles(mappedInstance, context))
.map(precedingSucceedingTitles -> precedingSucceedingTitles.stream()
Expand Down
Loading

0 comments on commit e34a4e7

Please sign in to comment.