Skip to content

Commit

Permalink
Merge branch 'master' into MODSOURCE-test
Browse files Browse the repository at this point in the history
# Conflicts:
#	mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java
  • Loading branch information
TsaghikKhachatryan committed Feb 1, 2024
2 parents 0f1a60b + 0a0800f commit 1b12198
Show file tree
Hide file tree
Showing 104 changed files with 2,889 additions and 1,070 deletions.
17 changes: 13 additions & 4 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,20 +1,29 @@
## 2023-xo-xo v5.7.0-SNAPSHOT
## 2024-xx-xx 5.8.0-SNAPSHOT
* [MODSOURCE-733](https://issues.folio.org/browse/MODSOURCE-733) Reduce Memory Allocation of Strings
* [MODSOURCE-506](https://issues.folio.org/browse/MODSOURCE-506) Remove rawRecord field from source record
* [MODSOURCE-709](https://issues.folio.org/browse/MODSOURCE-709) MARC authority record is not created when use Job profile with match profile by absent subfield/field
* [MODSOURCE-677](https://issues.folio.org/browse/MODSOURCE-677) Import is completed with errors when control field that differs from 001 is used for marc-to-marc matching
* [MODSOURCE-722](https://issues.folio.org/browse/MODSOURCE-722) deleteMarcIndexersOldVersions: relation "marc_records_tracking" does not exist
* [MODSOURMAN-1106](https://issues.folio.org/browse/MODSOURMAN-1106) The status of Instance is '-' in the Import log after uploading file. The numbers of updated SRS and Instance are not displayed in the Summary table.
* [MODSOURCE-717](https://issues.folio.org/browse/MODSOURCE-717) MARC modifications not processed when placed after Holdings Update action in a job profile

## 2023-10-13 v5.7.0
* [MODSOURCE-648](https://issues.folio.org/browse/MODSOURCE-648) Upgrade mod-source-record-storage to Java 17
* [MODSOURCE-601](https://issues.folio.org/browse/MODSOURCE-601) Optimize Insert & Update of marc_records_lb table
* [MODSOURCE-635](https://issues.folio.org/browse/MODSOURCE-635) Delete marc_indexers records associated with "OLD" source records
* [MODSOURCE-636](https://issues.folio.org/browse/MODSOURCE-636) Implement async migration service
* [MODSOURCE-674](https://issues.folio.org/browse/MODSOURCE-674) Ensure only one background job can be triggered to clean up outdated marc indexers
* [MODSOURCE-530](https://issues.folio.org/browse/MODSOURCE-530) Fix duplicate records in incoming file causes problems after overlay process with no error reported
* [MODSOURCE-690](https://issues.folio.org/browse/MODSOURCE-690) Make changes in SRS post processing handler to update MARC for shared Instance
* [MODSOURCE-646](https://issues.folio.org/browse/MODSOURCE-646) Make changes to perform MARC To MARC Matching in Local Tenant & Central Tenant
* [MODSOURCE-667](https://issues.folio.org/browse/MODSOURCE-667) Upgrade folio-kafka-wrapper to 3.0.0 version

### Asynchronous migration job API
| METHOD | URL | DESCRIPTION |
|--------|-----------------------------------------|-------------------------------------------------|
| POST | /source-storage/migrations/jobs | Initialize asynchronous migration job |
| GET | /source-storage/migrations/jobs/{jobId} | Get asynchronous migration job entity by its id |

## 2023-03-xx v5.6.3-SNAPSHOT
* [MODSOURCE-615](https://issues.folio.org/browse/MODSOURCE-615) Importing 10,000 MARC authority records > Completes with errors due to timeout - Indices added.

## 2023-03-18 v5.6.2
* [MODSOURCE-585](https://issues.folio.org/browse/MODSOURCE-585) Data import matching takes incorrect SRS records into consideration
* [MODDATAIMP-786](https://issues.folio.org/browse/MODDATAIMP-786) Update data-import-util library to v1.11.0
Expand Down
15 changes: 10 additions & 5 deletions descriptors/ModuleDescriptor-template.json
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
},
{
"id": "source-storage-records",
"version": "3.1",
"version": "3.2",
"handlers": [
{
"methods": [
Expand Down Expand Up @@ -92,6 +92,15 @@
"source-storage.records.put"
]
},
{
"methods": [
"PUT"
],
"pathPattern": "/source-storage/records/{id}/generation",
"permissionsRequired": [
"source-storage.records.put"
]
},
{
"methods": [
"DELETE"
Expand Down Expand Up @@ -441,10 +450,6 @@
"name": "DB_MAXPOOLSIZE",
"value": "15"
},
{
"name": "test.mode",
"value": "true"
},
{
"name": "KAFKA_HOST",
"value": "10.0.2.15"
Expand Down
2 changes: 1 addition & 1 deletion mod-source-record-storage-client/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
<parent>
<groupId>org.folio</groupId>
<artifactId>mod-source-record-storage</artifactId>
<version>5.7.0-SNAPSHOT</version>
<version>5.8.0-SNAPSHOT</version>
</parent>

<properties>
Expand Down
50 changes: 32 additions & 18 deletions mod-source-record-storage-server/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
<parent>
<groupId>org.folio</groupId>
<artifactId>mod-source-record-storage</artifactId>
<version>5.7.0-SNAPSHOT</version>
<version>5.8.0-SNAPSHOT</version>
</parent>

<dependencies>
Expand Down Expand Up @@ -134,7 +134,7 @@
<dependency>
<groupId>org.folio</groupId>
<artifactId>folio-liquibase-util</artifactId>
<version>1.7.0-SNAPSHOT</version>
<version>1.7.0</version>
<type>jar</type>
<exclusions>
<exclusion>
Expand Down Expand Up @@ -179,7 +179,7 @@
<dependency>
<groupId>org.folio</groupId>
<artifactId>data-import-processing-core</artifactId>
<version>4.1.0-SNAPSHOT</version>
<version>4.2.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
Expand All @@ -188,7 +188,7 @@
<dependency>
<groupId>org.folio</groupId>
<artifactId>folio-kafka-wrapper</artifactId>
<version>3.0.0-SNAPSHOT</version>
<version>3.1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>net.mguenther.kafka</groupId>
Expand All @@ -210,6 +210,32 @@
</exclusion>
</exclusions>
</dependency>
<dependency> <!-- remove this dependency when using kafka-junit >= 3.6.0 -->
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>${kafkaclients.version}</version>
<classifier>test</classifier>
<scope>test</scope>
</dependency>
<dependency> <!-- remove this dependency when using kafka-junit >= 3.6.0 -->
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.13</artifactId>
<version>${kafkaclients.version}</version>
<scope>test</scope>
</dependency>
<dependency> <!-- remove this dependency when using kafka-junit >= 3.6.0 -->
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.13</artifactId>
<version>${kafkaclients.version}</version>
<classifier>test</classifier>
<scope>test</scope>
</dependency>
<dependency> <!-- remove this dependency when using kafka-junit >= 3.6.0 -->
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-tools</artifactId>
<version>${kafkaclients.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
Expand Down Expand Up @@ -256,22 +282,10 @@
<aspectj.version>1.9.19</aspectj.version>
<postgres.version>42.5.1</postgres.version>
<postgres.image>postgres:12-alpine</postgres.image>
<lombok.version>1.18.24</lombok.version>
<lombok.version>1.18.30</lombok.version>
<generate_routing_context>/source-storage/stream/records,/source-storage/stream/source-records,/source-storage/stream/marc-record-identifiers</generate_routing_context>
</properties>

<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-bom</artifactId>
<version>2.17.2</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>

<build>
<resources>
<resource>
Expand Down Expand Up @@ -455,7 +469,7 @@
<dependency>
<groupId>org.apache.maven</groupId>
<artifactId>maven-core</artifactId>
<version>3.3.9</version>
<version>3.8.1</version>
</dependency>
</dependencies>
<configuration>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
package org.folio;


import org.folio.kafka.services.KafkaTopic;

public enum AuthorityDomainKafkaTopic implements KafkaTopic {

AUTHORITY("authority");

private static final String AUTHORITIES_PREFIX = "authorities";
private final String topic;

AuthorityDomainKafkaTopic(String topic) {
this.topic = topic;
}

@Override
public String moduleName() {
return AUTHORITIES_PREFIX;
}

@Override
public String topicName() {
return topic;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,8 @@
import org.springframework.context.annotation.Configuration;

import org.folio.kafka.KafkaConfig;
import org.springframework.scheduling.annotation.EnableScheduling;

@Configuration
@EnableScheduling
@ComponentScan(basePackages = {
"org.folio.client",
"org.folio.rest.impl",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
package org.folio.consumers;

import static org.folio.dao.util.RecordDaoUtil.filterRecordByExternalId;

import io.vertx.core.Future;
import io.vertx.core.json.JsonObject;
import io.vertx.kafka.client.consumer.KafkaConsumerRecord;
import java.util.Collections;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.folio.dao.util.RecordType;
import org.folio.kafka.AsyncRecordHandler;
import org.folio.rest.jooq.enums.RecordState;
import org.folio.services.RecordService;
import org.folio.services.util.KafkaUtil;
import org.springframework.stereotype.Component;

@Component
public class AuthorityDomainKafkaHandler implements AsyncRecordHandler<String, String> {

private static final Logger log = LogManager.getLogger();

private static final String DOMAIN_EVENT_TYPE_HEADER = "domain-event-type";
private static final String DELETE_DOMAIN_EVENT_TYPE = "DELETE";
private static final String DELETE_EVENT_SUB_TYPE_FLD = "deleteEventSubType";
private static final String TENANT_FLD = "tenant";

private final RecordService recordService;

public AuthorityDomainKafkaHandler(RecordService recordService) {
this.recordService = recordService;
}

@Override
public Future<String> handle(KafkaConsumerRecord<String, String> consumerRecord) {
log.trace("handle:: Handling kafka record: '{}'", consumerRecord);
String authorityId = consumerRecord.key();
if (isUnexpectedDomainEvent(consumerRecord)) {
log.trace("handle:: Expected only {} domain type. Skipping authority domain kafka record [ID: '{}']",
DELETE_DOMAIN_EVENT_TYPE, authorityId);
return Future.succeededFuture(authorityId);
}

var eventPayload = new JsonObject(consumerRecord.value());
var tenantId = eventPayload.getString(TENANT_FLD);
var eventSubType = EventSubType.valueOf(eventPayload.getString(DELETE_EVENT_SUB_TYPE_FLD));

logInput(authorityId, eventSubType, tenantId);
return (switch (eventSubType) {
case SOFT_DELETE -> performSoftDelete(authorityId, tenantId);
case HARD_DELETE -> performHardDelete(authorityId, tenantId);
}).onFailure(throwable -> logError(authorityId, eventSubType, tenantId));
}

private Future<String> performSoftDelete(String authorityId, String tenantId) {
var condition = filterRecordByExternalId(authorityId);
return recordService.getRecords(condition, RecordType.MARC_AUTHORITY, Collections.emptyList(), 0, 1, tenantId)
.compose(recordCollection -> {
if (recordCollection.getRecords().isEmpty()) {
log.debug("handle:: No records found [externalId: '{}', tenantId: '{}']", authorityId, tenantId);
return Future.succeededFuture();
}
var matchedId = recordCollection.getRecords().get(0).getMatchedId();

return recordService.updateRecordsState(matchedId, RecordState.DELETED, RecordType.MARC_AUTHORITY, tenantId);
}).map(authorityId);
}

private Future<String> performHardDelete(String authorityId, String tenantId) {
return recordService.deleteRecordsByExternalId(authorityId, tenantId).map(authorityId);
}

private void logError(String authorityId, EventSubType subType, String tenantId) {
log.error("handle:: Failed to {} records [externalId: '{}', tenantId: '{}']", subType.getValueReadable(),
authorityId, tenantId);
}

private void logInput(String authorityId, EventSubType subType, String tenantId) {
log.info("handle:: Trying to {} records [externalId: '{}', tenantId '{}']",
subType.getValueReadable(), authorityId, tenantId);
}

private boolean isUnexpectedDomainEvent(KafkaConsumerRecord<String, String> consumerRecord) {
return !KafkaUtil.headerExists(DOMAIN_EVENT_TYPE_HEADER, DELETE_DOMAIN_EVENT_TYPE, consumerRecord.headers());
}

public enum EventSubType {

SOFT_DELETE("soft-delete"),
HARD_DELETE("hard-delete");

private final String valueReadable;

EventSubType(String valueReadable) {
this.valueReadable = valueReadable;
}

public String getValueReadable() {
return valueReadable;
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@
import io.vertx.core.Promise;
import io.vertx.core.Vertx;
import io.vertx.core.json.Json;
import io.vertx.core.json.jackson.DatabindCodec;
import io.vertx.kafka.client.consumer.KafkaConsumerRecord;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.folio.DataImportEventPayload;
import org.folio.dataimport.util.OkapiConnectionParams;
import org.folio.dbschema.ObjectMapperTool;
import org.folio.kafka.AsyncRecordHandler;
import org.folio.processing.events.EventManager;
import org.folio.processing.exceptions.EventProcessingException;
Expand All @@ -29,7 +29,7 @@

@Component
@Qualifier("DataImportKafkaHandler")
public class DataImportKafkaHandler implements AsyncRecordHandler<String, String> {
public class DataImportKafkaHandler implements AsyncRecordHandler<String, byte[]> {

private static final Logger LOGGER = LogManager.getLogger();

Expand All @@ -48,14 +48,14 @@ public DataImportKafkaHandler(Vertx vertx, JobProfileSnapshotCache profileSnapsh
}

@Override
public Future<String> handle(KafkaConsumerRecord<String, String> targetRecord) {
public Future<String> handle(KafkaConsumerRecord<String, byte[]> targetRecord) {
LOGGER.trace("handle:: Handling kafka record: {}", targetRecord);
String recordId = extractHeaderValue(RECORD_ID_HEADER, targetRecord.headers());
String chunkId = extractHeaderValue(CHUNK_ID_HEADER, targetRecord.headers());
String userId = extractHeaderValue(USER_ID_HEADER, targetRecord.headers());
try {
Promise<String> promise = Promise.promise();
Event event = ObjectMapperTool.getMapper().readValue(targetRecord.value(), Event.class);
Event event = DatabindCodec.mapper().readValue(targetRecord.value(), Event.class);
DataImportEventPayload eventPayload = Json.decodeValue(event.getEventPayload(), DataImportEventPayload.class);
LOGGER.debug("handle:: Data import event payload has been received with event type: '{}' by jobExecutionId: '{}' and recordId: '{}' and chunkId: '{}' and userId: '{}'",
eventPayload.getEventType(), eventPayload.getJobExecutionId(), recordId, chunkId, userId);
Expand Down
Loading

0 comments on commit 1b12198

Please sign in to comment.