Skip to content

Commit

Permalink
MODSOURMAN-1021: add endpoint to get IncomingRecord by id with DAO an…
Browse files Browse the repository at this point in the history
…d service layers functionality (#816)
  • Loading branch information
yaroslav-epam authored Nov 6, 2023
1 parent 3b67c20 commit 6ce3a60
Show file tree
Hide file tree
Showing 13 changed files with 201 additions and 21 deletions.
1 change: 1 addition & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
## 2023-xx-xx v3.8.0-SNAPSHOT
* [MODSOURMAN-1020](https://issues.folio.org/browse/MODSOURMAN-1020) Add table to save incoming records for DI logs
* [MODSOURMAN-1021](https://issues.folio.org/browse/MODSOURMAN-1021) Provide endpoint for getting parsed content for DI log

## 2023-10-13 v3.7.0
* [MODSOURMAN-1045](https://issues.folio.org/browse/MODSOURMAN-1045) Allow create action with non-matches for instance without match profile
Expand Down
15 changes: 15 additions & 0 deletions descriptors/ModuleDescriptor-template.json
Original file line number Diff line number Diff line change
Expand Up @@ -501,6 +501,15 @@
"permissionsRequired": [
"metadata-provider.jobexecutions.get"
]
},
{
"methods": [
"GET"
],
"pathPattern": "/metadata-provider/incomingRecords/{recordId}",
"permissionsRequired": [
"metadata-provider.incomingrecords.get"
]
}
]
},
Expand Down Expand Up @@ -651,6 +660,11 @@
"displayName": "Metadata Provider - get jobExecution logs",
"description": "Get JobExecutionLogDto"
},
{
"permissionName": "metadata-provider.incomingrecords.get",
"displayName": "Metadata Provider - get incoming record",
"description": "Get IncomingRecord"
},
{
"permissionName": "change-manager.jobexecutions.post",
"displayName": "Change Manager - create jobExecutions",
Expand Down Expand Up @@ -718,6 +732,7 @@
"subPermissions": [
"metadata-provider.jobexecutions.get",
"metadata-provider.logs.get",
"metadata-provider.incomingrecords.get",
"change-manager.jobexecutions.post",
"change-manager.jobexecutions.put",
"change-manager.jobexecutions.get",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,21 @@
import org.folio.rest.jaxrs.model.IncomingRecord;

import java.util.List;
import java.util.Optional;

/**
* DAO interface for the {@link IncomingRecord} entity
*/
public interface IncomingRecordDao {

/**
* Searches for {@link IncomingRecord} by id
*
* @param id incomingRecord id
* @return optional of incomingRecord
*/
Future<Optional<IncomingRecord>> getById(String id, String tenantId);

/**
* Saves {@link IncomingRecord} entities into DB
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import org.springframework.stereotype.Repository;

import java.util.List;
import java.util.Optional;
import java.util.UUID;

import static java.lang.String.format;
Expand All @@ -24,14 +25,30 @@ public class IncomingRecordDaoImpl implements IncomingRecordDao {

private static final Logger LOGGER = LogManager.getLogger();
public static final String INCOMING_RECORDS_TABLE = "incoming_records";
private static final String GET_BY_ID_SQL = "SELECT * FROM %s.%s WHERE id = $1";
private static final String INSERT_SQL = "INSERT INTO %s.%s (id, job_execution_id, incoming_record) VALUES ($1, $2, $3)";

@Autowired
private PostgresClientFactory pgClientFactory;

@Override
public Future<Optional<IncomingRecord>> getById(String id, String tenantId) {
LOGGER.debug("getById:: Get IncomingRecord by id = {} from the {} table", id, INCOMING_RECORDS_TABLE);
Promise<RowSet<Row>> promise = Promise.promise();
try {
String query = format(GET_BY_ID_SQL, convertToPsqlStandard(tenantId), INCOMING_RECORDS_TABLE);
pgClientFactory.createInstance(tenantId).selectRead(query, Tuple.of(UUID.fromString(id)), promise);
} catch (Exception e) {
LOGGER.warn("getById:: Error getting IncomingRecord by id", e);
promise.fail(e);
}
return promise.future().map(rowSet -> rowSet.rowCount() == 0 ? Optional.empty()
: Optional.of(mapRowToIncomingRecord(rowSet.iterator().next())));
}

@Override
public Future<List<RowSet<Row>>> saveBatch(List<IncomingRecord> incomingRecords, String tenantId) {
LOGGER.info("saveBatch:: Save IncomingRecord entity to the {} table", INCOMING_RECORDS_TABLE);
LOGGER.debug("saveBatch:: Save IncomingRecord entity to the {} table", INCOMING_RECORDS_TABLE);
Promise<List<RowSet<Row>>> promise = Promise.promise();
try {
String query = format(INSERT_SQL, convertToPsqlStandard(tenantId), INCOMING_RECORDS_TABLE);
Expand All @@ -45,6 +62,16 @@ public Future<List<RowSet<Row>>> saveBatch(List<IncomingRecord> incomingRecords,
return promise.future().onFailure(e -> LOGGER.warn("saveBatch:: Error saving JournalRecord entity", e));
}

private IncomingRecord mapRowToIncomingRecord(Row row) {
JsonObject jsonObject = row.getJsonObject("incoming_record");
return new IncomingRecord().withId(String.valueOf(row.getUUID("id")))
.withJobExecutionId(String.valueOf(row.getUUID("job_execution_id")))
.withRecordType(IncomingRecord.RecordType.fromValue(jsonObject.getString("recordType")))
.withOrder(jsonObject.getInteger("order"))
.withRawRecordContent(jsonObject.getString("rawRecordContent"))
.withParsedRecordContent(jsonObject.getString("parsedRecordContent"));
}

private Tuple prepareInsertQueryParameters(IncomingRecord incomingRecord) {
return Tuple.of(UUID.fromString(incomingRecord.getId()), UUID.fromString(incomingRecord.getJobExecutionId()),
JsonObject.mapFrom(incomingRecord));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import org.folio.rest.jaxrs.model.MetadataProviderJobLogEntriesJobExecutionIdGetEntityType;
import org.folio.rest.jaxrs.resource.MetadataProvider;
import org.folio.rest.tools.utils.TenantTool;
import org.folio.services.IncomingRecordService;
import org.folio.services.JobExecutionService;
import org.folio.services.JournalRecordService;
import org.folio.spring.SpringContextUtil;
Expand Down Expand Up @@ -51,6 +52,8 @@ public class MetadataProviderImpl implements MetadataProvider {
private JobExecutionService jobExecutionService;
@Autowired
private JournalRecordService journalRecordService;
@Autowired
private IncomingRecordService incomingRecordService;
private String tenantId;

public MetadataProviderImpl(Vertx vertx, String tenantId) { //NOSONAR
Expand Down Expand Up @@ -197,6 +200,26 @@ public void getMetadataProviderJobExecutionsUsers(String totalRecords, int offse
});
}

@Override
public void getMetadataProviderIncomingRecordsByRecordId(String recordId, Map<String, String> okapiHeaders, Handler<AsyncResult<Response>> asyncResultHandler, Context vertxContext) {
vertxContext.runOnContext(v -> {
try {
LOGGER.debug("getMetadataProviderIncomingRecordsByRecordId:: tenantId {}", tenantId);
incomingRecordService.getById(recordId, tenantId)
.map(incomingRecordOptional -> incomingRecordOptional
.map(GetMetadataProviderIncomingRecordsByRecordIdResponse::respond200WithApplicationJson)
.orElseGet(() -> GetMetadataProviderIncomingRecordsByRecordIdResponse
.respond404WithTextPlain(format("IncomingRecord by id: '%s' was not found", recordId))))
.map(Response.class::cast)
.otherwise(ExceptionHelper::mapExceptionToResponse)
.onComplete(asyncResultHandler);
} catch (Exception e) {
LOGGER.warn("getMetadataProviderIncomingRecordsByRecordId:: Failed to retrieve IncomingRecord by id", e);
asyncResultHandler.handle(Future.succeededFuture(ExceptionHelper.mapExceptionToResponse(e)));
}
});
}

private JobExecutionFilter buildJobExecutionFilter(List<String> statusAny, List<String> profileIdNotAny, String statusNot,
List<String> uiStatusAny, String hrIdPattern, String fileNamePattern,
List<String> fileNameNotAny, List<String> profileIdAny, List<String> subordinationTypeNotAny,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,24 @@
package org.folio.services;

import io.vertx.core.Future;
import org.folio.rest.jaxrs.model.IncomingRecord;

import java.util.List;
import java.util.Optional;

/**
* {@link IncomingRecord} Service interface
*/
public interface IncomingRecordService {

/**
* Searches for {@link IncomingRecord} by id
*
* @param id incomingRecord id
* @return future with optional incomingRecord
*/
Future<Optional<IncomingRecord>> getById(String id, String tenantId);

/**
* Saves {@link IncomingRecord}s into DB
*
Expand Down
Original file line number Diff line number Diff line change
@@ -1,18 +1,25 @@
package org.folio.services;

import io.vertx.core.Future;
import org.folio.dao.IncomingRecordDao;
import org.folio.rest.jaxrs.model.IncomingRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.List;
import java.util.Optional;

@Service
public class IncomingRecordServiceImpl implements IncomingRecordService {

@Autowired
private IncomingRecordDao incomingRecordDao;

@Override
public Future<Optional<IncomingRecord>> getById(String id, String tenantId) {
return incomingRecordDao.getById(id, tenantId);
}

@Override
public void saveBatch(List<IncomingRecord> incomingRecords, String tenantId) {
incomingRecordDao.saveBatch(incomingRecords, tenantId);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import org.folio.dao.util.PostgresClientFactory;
import org.folio.rest.impl.AbstractRestTest;
import org.folio.rest.jaxrs.model.IncomingRecord;
import org.folio.rest.jaxrs.model.InitJobExecutionsRsDto;
import org.folio.rest.jaxrs.model.JobExecution;
import org.junit.Before;
import org.junit.Test;
Expand Down Expand Up @@ -37,21 +36,41 @@ public void setUp(TestContext context) throws IOException {
}

@Test
public void saveBatch(TestContext context) {
public void shouldGetById(TestContext context) {
Async async = context.async();

InitJobExecutionsRsDto response = constructAndPostInitJobExecutionRqDto(1);
List<JobExecution> createdJobExecutions = response.getJobExecutions();
final String jobExecutionId = createdJobExecutions.get(0).getId();
List<JobExecution> createdJobExecutions = constructAndPostInitJobExecutionRqDto(1).getJobExecutions();
String jobExecutionId = createdJobExecutions.get(0).getId();

String id = UUID.randomUUID().toString();
IncomingRecord incomingRecord = buildIncomingRecord(id, jobExecutionId);

incomingRecordDao.saveBatch(List.of(incomingRecord), TENANT_ID)
.compose(r ->
incomingRecordDao.getById(id, TENANT_ID)
.onComplete(ar -> {
context.assertTrue(ar.succeeded());
context.assertTrue(ar.result().isPresent());
IncomingRecord result = ar.result().get();
context.assertEquals(id, result.getId());
context.assertEquals(jobExecutionId, result.getJobExecutionId());
context.assertEquals("rawRecord", result.getRawRecordContent());
context.assertEquals("parsedRecord", result.getParsedRecordContent());
async.complete();
}));
}

@Test
public void shouldSaveBatch(TestContext context) {
Async async = context.async();

List<JobExecution> createdJobExecutions = constructAndPostInitJobExecutionRqDto(1).getJobExecutions();
String jobExecutionId = createdJobExecutions.get(0).getId();

String id1 = UUID.randomUUID().toString();
String id2 = UUID.randomUUID().toString();
IncomingRecord incomingRecord1 = new IncomingRecord()
.withId(id1).withJobExecutionId(jobExecutionId).withRecordType(IncomingRecord.RecordType.MARC_BIB).withOrder(0)
.withRawRecordContent("rawRecord").withParsedRecordContent("parsedRecord");
IncomingRecord incomingRecord2 = new IncomingRecord()
.withId(id2).withJobExecutionId(jobExecutionId).withRecordType(IncomingRecord.RecordType.MARC_BIB).withOrder(0)
.withRawRecordContent("rawRecord").withParsedRecordContent("parsedRecord");
IncomingRecord incomingRecord1 = buildIncomingRecord(id1, jobExecutionId);
IncomingRecord incomingRecord2 = buildIncomingRecord(id2, jobExecutionId);

incomingRecordDao.saveBatch(List.of(incomingRecord1, incomingRecord2), TENANT_ID)
.onComplete(ar -> {
Expand All @@ -60,4 +79,10 @@ public void saveBatch(TestContext context) {
async.complete();
});
}

private static IncomingRecord buildIncomingRecord(String id, String jobExecutionId) {
return new IncomingRecord()
.withId(id).withJobExecutionId(jobExecutionId).withRecordType(IncomingRecord.RecordType.MARC_BIB).withOrder(0)
.withRawRecordContent("rawRecord").withParsedRecordContent("parsedRecord");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@
import static com.github.tomakehurst.wiremock.client.WireMock.post;
import static net.mguenther.kafka.junit.EmbeddedKafkaCluster.provisionWith;
import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.defaultClusterConfig;
import static org.folio.dao.IncomingRecordDaoImpl.INCOMING_RECORDS_TABLE;
import static org.folio.dataimport.util.RestUtil.OKAPI_TENANT_HEADER;
import static org.folio.dataimport.util.RestUtil.OKAPI_URL_HEADER;
import static org.folio.kafka.KafkaTopicNameHelper.getDefaultNameSpace;
Expand Down Expand Up @@ -504,13 +505,14 @@ private void clearTable(TestContext context) {
PostgresClient pgClient = PostgresClient.getInstance(vertx, TENANT_ID);
pgClient.delete(CHUNKS_TABLE_NAME, new Criterion(), event1 ->
pgClient.delete(JOURNAL_RECORDS_TABLE, new Criterion(), event2 ->
pgClient.delete(JOB_EXECUTION_PROGRESS_TABLE, new Criterion(), event3 ->
pgClient.delete(JOB_EXECUTIONS_TABLE_NAME, new Criterion(), event4 -> {
if (event3.failed()) {
context.fail(event3.cause());
}
async.complete();
}))));
pgClient.delete(INCOMING_RECORDS_TABLE, new Criterion(), event3 ->
pgClient.delete(JOB_EXECUTION_PROGRESS_TABLE, new Criterion(), event4 ->
pgClient.delete(JOB_EXECUTIONS_TABLE_NAME, new Criterion(), event5 -> {
if (event4.failed()) {
context.fail(event4.cause());
}
async.complete();
})))));
}

protected InitJobExecutionsRsDto constructAndPostInitJobExecutionRqDto(int filesNumber) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,15 @@
import io.vertx.ext.unit.TestContext;
import io.vertx.ext.unit.junit.VertxUnitRunner;
import org.apache.http.HttpStatus;
import org.folio.dao.IncomingRecordDaoImpl;
import org.folio.dao.JournalRecordDaoImpl;
import org.folio.dao.util.PostgresClientFactory;
import org.folio.rest.impl.AbstractRestTest;
import org.folio.rest.impl.changeManager.ChangeManagerAPITest;
import org.folio.rest.jaxrs.model.DeleteJobExecutionsReq;
import org.folio.rest.jaxrs.model.DeleteJobExecutionsResp;
import org.folio.rest.jaxrs.model.EntityType;
import org.folio.rest.jaxrs.model.IncomingRecord;
import org.folio.rest.jaxrs.model.InitJobExecutionsRsDto;
import org.folio.rest.jaxrs.model.JobExecution;
import org.folio.rest.jaxrs.model.JobExecutionDto;
Expand Down Expand Up @@ -95,6 +97,7 @@ public class MetadataProviderJobExecutionAPITest extends AbstractRestTest {
private static final String GET_JOB_EXECUTION_SUMMARY_PATH = "/metadata-provider/jobSummary";
private static final String GET_JOB_EXECUTION_JOB_PROFILES_PATH = "/metadata-provider/jobExecutions/jobProfiles";
private static final String GET_UNIQUE_USERS_INFO = "/metadata-provider/jobExecutions/users";
private static final String GET_INCOMING_RECORDS_BY_ID = "/metadata-provider/incomingRecords/";

private final JsonObject userResponse = new JsonObject()
.put("users",
Expand All @@ -108,6 +111,9 @@ public class MetadataProviderJobExecutionAPITest extends AbstractRestTest {
@Spy
@InjectMocks
private JournalRecordDaoImpl journalRecordDao;
@Spy
@InjectMocks
private IncomingRecordDaoImpl incomingRecordDao;
private AutoCloseable mocks;

@Before
Expand Down Expand Up @@ -1695,4 +1701,39 @@ public void shouldNotReturnUsersForParentJobExecutions() {
.body("totalRecords", is(0));
}

@Test
public void shouldReturnNotFoundIncomingRecordById() {
RestAssured.given()
.spec(spec)
.when()
.get(GET_INCOMING_RECORDS_BY_ID + UUID.randomUUID())
.then()
.statusCode(HttpStatus.SC_NOT_FOUND);
}

@Test
public void shouldReturnIncomingRecordById(TestContext context) {
Async async = context.async();
List<JobExecution> createdJobExecutions = constructAndPostInitJobExecutionRqDto(1).getJobExecutions();
JobExecution jobExecution = createdJobExecutions.get(0);
String jobExecutionId = jobExecution.getId();
String id = UUID.randomUUID().toString();

IncomingRecord incomingRecord = new IncomingRecord()
.withId(id).withJobExecutionId(jobExecutionId).withRecordType(IncomingRecord.RecordType.MARC_BIB).withOrder(0)
.withRawRecordContent("rawRecord").withParsedRecordContent("parsedRecord");

incomingRecordDao.saveBatch(List.of(incomingRecord), TENANT_ID)
.onComplete(v -> {
RestAssured.given()
.spec(spec)
.when()
.get(GET_INCOMING_RECORDS_BY_ID + id)
.then()
.statusCode(HttpStatus.SC_OK)
.body("id", is(id))
.body("jobExecutionId", is(jobExecutionId));
async.complete();
});
}
}
Loading

0 comments on commit 6ce3a60

Please sign in to comment.