diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementDefinition.java index 41ec12464ddc..8982f23ade35 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementDefinition.java @@ -33,6 +33,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; public abstract class BaseRuntimeElementDefinition { @@ -40,7 +41,7 @@ public abstract class BaseRuntimeElementDefinition { private final Class myImplementingClass; private final String myName; private final boolean myStandardType; - private Map, Constructor> myConstructors = Collections.synchronizedMap(new HashMap<>()); + private final Map, Constructor> myConstructors = new ConcurrentHashMap<>(); private List myExtensions = new ArrayList<>(); private List myExtensionsModifier = new ArrayList<>(); private List myExtensionsNonModifier = new ArrayList<>(); @@ -84,27 +85,24 @@ private Constructor getConstructor(@Nullable Object theArgument) { argumentType = theArgument.getClass(); } - Constructor retVal = myConstructors.get(argumentType); - if (retVal == null) { + Constructor retVal = myConstructors.computeIfAbsent(argumentType, type -> { for (Constructor next : getImplementingClass().getConstructors()) { - if (argumentType == VOID_CLASS) { + if (type == VOID_CLASS) { if (next.getParameterTypes().length == 0) { - retVal = (Constructor) next; - break; - } - } else if (next.getParameterTypes().length == 1) { - if (next.getParameterTypes()[0].isAssignableFrom(argumentType)) { - retVal = (Constructor) next; - break; + return (Constructor) next; } + } else if (next.getParameterTypes().length == 1 && next.getParameterTypes()[0].isAssignableFrom(type)) { + return (Constructor) next; } } - if (retVal == null) { - throw new ConfigurationException(Msg.code(1695) + "Class " + getImplementingClass() - + " has no constructor with a single argument of type " + argumentType); - } - myConstructors.put(argumentType, retVal); + return null; + }); + + if (retVal == null) { + throw new ConfigurationException(Msg.code(1695) + "Class " + getImplementingClass() + + " has no constructor with a single argument of type " + argumentType); } + return retVal; } diff --git a/hapi-fhir-checkstyle/src/checkstyle/hapi-base-checkstyle.xml b/hapi-fhir-checkstyle/src/checkstyle/hapi-base-checkstyle.xml index c74e8b7e01c4..11c9b669ad40 100644 --- a/hapi-fhir-checkstyle/src/checkstyle/hapi-base-checkstyle.xml +++ b/hapi-fhir-checkstyle/src/checkstyle/hapi-base-checkstyle.xml @@ -64,4 +64,12 @@ + + + + + + + + diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6285-batch2-reindex-version2-added.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6285-batch2-reindex-version2-added.yaml new file mode 100644 index 000000000000..2e48e6e7bd91 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6285-batch2-reindex-version2-added.yaml @@ -0,0 +1,17 @@ +--- +type: fix +issue: 6285 +title: "Updated the Reindex Batch2 job to allow + for an additional step that will check to ensure + that no pending 'reindex' work is needed. + This was done to prevent a bug in which + value set expansion would not return all + the existing CodeSystem Concepts after + a reindex call, due to some of the concepts + being deferred to future job runs. + + As such, `$reindex` operations on CodeSystems + will no longer result in incorrect value set + expansion when such an expansion is called + 'too soon' after a $reindex operation. +" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6323-concurrent-constructor-access.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6323-concurrent-constructor-access.yaml new file mode 100644 index 000000000000..51b32ae0b6b9 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6323-concurrent-constructor-access.yaml @@ -0,0 +1,5 @@ +--- +type: perf +issue: 6323 +title: "A synchronization choke point was removed from the model object initialization code, reducing the risk of +multi-thread contention." diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java index fbd583d0090e..ebe9c4aa5746 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java @@ -21,7 +21,6 @@ import ca.uhn.fhir.batch2.api.IJobCoordinator; import ca.uhn.fhir.batch2.api.IJobPartitionProvider; -import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx; import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; import ca.uhn.fhir.batch2.model.JobInstanceStartRequest; import ca.uhn.fhir.context.FhirVersionEnum; @@ -158,6 +157,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.JOB_REINDEX; import static java.util.Objects.isNull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -1315,7 +1315,7 @@ protected void requestReindexForRelatedResources( myJobPartitionProvider.getPartitionedUrls(theRequestDetails, urls).forEach(params::addPartitionedUrl); JobInstanceStartRequest request = new JobInstanceStartRequest(); - request.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + request.setJobDefinitionId(JOB_REINDEX); request.setParameters(params); myJobCoordinator.startInstance(theRequestDetails, request); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoCodeSystem.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoCodeSystem.java index 5e8f6b053bb8..270238da3fb5 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoCodeSystem.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoCodeSystem.java @@ -27,7 +27,11 @@ import ca.uhn.fhir.context.support.LookupCodeRequest; import ca.uhn.fhir.context.support.ValidationSupportContext; import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem; +import ca.uhn.fhir.jpa.api.dao.ReindexOutcome; +import ca.uhn.fhir.jpa.api.dao.ReindexParameters; +import ca.uhn.fhir.jpa.api.model.ReindexJobStatus; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.jpa.model.entity.ResourceTable; @@ -176,6 +180,47 @@ protected void preDelete(T theResourceToDelete, ResourceTable theEntityToDelete, myTermDeferredStorageSvc.deleteCodeSystemForResource(theEntityToDelete); } + /** + * If there are more code systems to process + * than {@link JpaStorageSettings#getDeferIndexingForCodesystemsOfSize()}, + * then these codes will have their processing deferred (for a later time). + * + * This can result in future reindex steps *skipping* these code systems (if + * they're still deferred) and thus incorrect expansions resulting. + * + * So we override the reindex method for CodeSystems specifically to + * force reindex batch jobs to wait until all code systems are processed before + * moving on. + */ + @SuppressWarnings("rawtypes") + @Override + public ReindexOutcome reindex( + IResourcePersistentId thePid, + ReindexParameters theReindexParameters, + RequestDetails theRequest, + TransactionDetails theTransactionDetails) { + ReindexOutcome outcome = super.reindex(thePid, theReindexParameters, theRequest, theTransactionDetails); + + if (outcome.getWarnings().isEmpty()) { + outcome.setHasPendingWork(true); + } + return outcome; + } + + @Override + public ReindexJobStatus getReindexJobStatus() { + boolean isQueueEmpty = myTermDeferredStorageSvc.isStorageQueueEmpty(true); + + ReindexJobStatus status = new ReindexJobStatus(); + status.setHasReindexWorkPending(!isQueueEmpty); + if (status.isHasReindexWorkPending()) { + // force a run + myTermDeferredStorageSvc.saveDeferred(); + } + + return status; + } + @Override public ResourceTable updateEntity( RequestDetails theRequest, diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java index af01a692a324..433f3f5d01c9 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java @@ -593,7 +593,7 @@ private void addConceptInHierarchy( if (theStatisticsTracker.getUpdatedConceptCount() <= myStorageSettings.getDeferIndexingForCodesystemsOfSize()) { saveConcept(conceptToAdd); Long nextConceptPid = conceptToAdd.getId(); - Validate.notNull(nextConceptPid); + Objects.requireNonNull(nextConceptPid); } else { myDeferredStorageSvc.addConceptToStorageQueue(conceptToAdd); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java index f5e1b95d49c2..47aafd066dda 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java @@ -22,7 +22,6 @@ import ca.uhn.fhir.batch2.api.IJobCoordinator; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.JobInstanceStartRequest; -import ca.uhn.fhir.batch2.model.StatusEnum; import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao; @@ -79,6 +78,8 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas private static final long SAVE_ALL_DEFERRED_WARN_MINUTES = 1; private static final long SAVE_ALL_DEFERRED_ERROR_MINUTES = 5; private boolean myAllowDeferredTasksTimeout = true; + private static final List BATCH_JOBS_TO_CARE_ABOUT = + List.of(TERM_CODE_SYSTEM_DELETE_JOB_NAME, TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME); private final List myDeferredCodeSystemsDeletions = Collections.synchronizedList(new ArrayList<>()); private final Queue myDeferredCodeSystemVersionsDeletions = new ConcurrentLinkedQueue<>(); private final List myDeferredConcepts = Collections.synchronizedList(new ArrayList<>()); @@ -436,7 +437,7 @@ public boolean isStorageQueueEmpty(boolean theIncludeExecutingJobs) { return retVal; } - private boolean isJobsExecuting() { + public boolean isJobsExecuting() { cleanseEndedJobs(); return !myJobExecutions.isEmpty(); @@ -448,15 +449,18 @@ private void cleanseEndedJobs() { * This is mostly a fail-safe * because "cancelled" jobs are never removed. */ - List executions = new ArrayList<>(myJobExecutions); List idsToDelete = new ArrayList<>(); - for (String id : executions) { - // TODO - might want to consider a "fetch all instances" - JobInstance instance = myJobCoordinator.getInstance(id); - if (StatusEnum.getEndedStatuses().contains(instance.getStatus())) { + for (String jobId : BATCH_JOBS_TO_CARE_ABOUT) { + List jobInstanceInEndedState = myJobCoordinator.getInstancesbyJobDefinitionIdAndEndedStatus( + jobId, + true, // ended = true (COMPLETED, FAILED, CANCELLED jobs only) + Math.max(myJobExecutions.size(), 1), // at most this many + 0); + for (JobInstance instance : jobInstanceInEndedState) { idsToDelete.add(instance.getInstanceId()); } } + for (String id : idsToDelete) { myJobExecutions.remove(id); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermDeferredStorageSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermDeferredStorageSvc.java index 62fb8d8957a6..6b8c3ad0dab3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermDeferredStorageSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermDeferredStorageSvc.java @@ -78,6 +78,8 @@ default boolean isStorageQueueEmpty() { void logQueueForUnitTest(); + boolean isJobsExecuting(); + /** * Only to be used from tests - Disallow test timeouts on deferred tasks */ diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderCustomSearchParamDstu3Test.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderCustomSearchParamDstu3Test.java index 9dcf6439dbb9..6e1511543c84 100644 --- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderCustomSearchParamDstu3Test.java +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderCustomSearchParamDstu3Test.java @@ -1,14 +1,11 @@ package ca.uhn.fhir.jpa.provider.dstu3; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx; import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; -import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.IBundleProvider; @@ -46,9 +43,11 @@ import java.util.List; import java.util.Map; +import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.JOB_REINDEX; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.fail; @@ -202,7 +201,7 @@ public void testCreatingParamMarksCorrectResourcesForReindexing() { mySearchParameterDao.create(fooSp, mySrd); runInTransaction(()->{ - List allJobs = myBatch2JobHelper.findJobsByDefinition(ReindexAppCtx.JOB_REINDEX); + List allJobs = myBatch2JobHelper.findJobsByDefinition(JOB_REINDEX); assertEquals(1, allJobs.size()); assertEquals(1, allJobs.get(0).getParameters(ReindexJobParameters.class).getPartitionedUrls().size()); assertEquals("Patient?", allJobs.get(0).getParameters(ReindexJobParameters.class).getPartitionedUrls().get(0).getUrl()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ComboUniqueParamTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ComboUniqueParamTest.java index 05f4c5a5e24a..b46d8b964d63 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ComboUniqueParamTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ComboUniqueParamTest.java @@ -55,6 +55,7 @@ import java.util.UUID; import java.util.stream.Collectors; +import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.JOB_REINDEX; import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.INDEX_STATUS_INDEXED; import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.INDEX_STATUS_INDEXING_FAILED; import static org.assertj.core.api.Assertions.assertThat; @@ -1075,7 +1076,7 @@ private void executeReindex(String... theUrls) { parameters.addUrl(url); } JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); startRequest.setParameters(parameters); Batch2JobStartResponse res = myJobCoordinator.startInstance(mySrd, startRequest); ourLog.info("Started reindex job with id {}", res.getInstanceId()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4IndexStorageOptimizedTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4IndexStorageOptimizedTest.java index d94b00893373..98eeb191a218 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4IndexStorageOptimizedTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4IndexStorageOptimizedTest.java @@ -23,7 +23,7 @@ import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.model.util.SearchParamHash; import ca.uhn.fhir.jpa.model.util.UcumServiceUtil; -import ca.uhn.fhir.jpa.reindex.ReindexStepTest; +import ca.uhn.fhir.jpa.reindex.ReindexStepV1Test; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.test.BaseJpaR4Test; import ca.uhn.fhir.rest.param.BaseParam; @@ -57,6 +57,7 @@ import java.util.List; +import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.JOB_REINDEX; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -312,7 +313,7 @@ private void executeReindex(String... theUrls) { parameters.addUrl(url); } JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); startRequest.setParameters(parameters); Batch2JobStartResponse res = myJobCoordinator.startInstance(mySrd, startRequest); ourLog.info("Started reindex job with id {}", res.getInstanceId()); @@ -321,7 +322,7 @@ private void executeReindex(String... theUrls) { // Additional existing tests with enabled IndexStorageOptimized @Nested - public class IndexStorageOptimizedReindexStepTest extends ReindexStepTest { + public class IndexStorageOptimizedReindexStepTestV1 extends ReindexStepV1Test { @BeforeEach void setUp() { myStorageSettings.setIndexStorageOptimized(true); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java index 52735147cd9c..9f5ed69f7a96 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java @@ -2,12 +2,15 @@ import ca.uhn.fhir.batch2.api.IJobDataSink; import ca.uhn.fhir.batch2.api.RunOutcome; +import ca.uhn.fhir.batch2.api.StepExecutionDetails; import ca.uhn.fhir.batch2.api.VoidModel; import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson; import ca.uhn.fhir.batch2.jobs.chunk.TypedPidJson; import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeStep; +import ca.uhn.fhir.batch2.jobs.reindex.v1.ReindexStepV1; import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; -import ca.uhn.fhir.batch2.jobs.reindex.ReindexStep; +import ca.uhn.fhir.batch2.model.JobInstance; +import ca.uhn.fhir.batch2.model.WorkChunk; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.support.ValidationSupportContext; import ca.uhn.fhir.context.support.ValueSetExpansionOptions; @@ -18,13 +21,13 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum; import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao; -import ca.uhn.fhir.jpa.reindex.ReindexTestHelper; import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum; import ca.uhn.fhir.jpa.interceptor.ForceOffsetSearchModeInterceptor; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test; +import ca.uhn.fhir.jpa.reindex.ReindexTestHelper; import ca.uhn.fhir.jpa.search.PersistedJpaSearchFirstPageBundleProvider; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.subscription.triggering.ISubscriptionTriggeringSvc; @@ -146,7 +149,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test @Autowired private ISubscriptionTriggeringSvc mySubscriptionTriggeringSvc; @Autowired - private ReindexStep myReindexStep; + private ReindexStepV1 myReindexStepV1; @Autowired private DeleteExpungeStep myDeleteExpungeStep; @Autowired @@ -1018,7 +1021,6 @@ public void testReferenceToForcedId_DeletesDisabled() { // insert to: HFJ_RESOURCE, HFJ_RES_VER, HFJ_RES_LINK assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); - } @ParameterizedTest @@ -1031,7 +1033,6 @@ public void testReferenceToForcedId_DeletesDisabled() { }) public void testReindexJob_OptimizeStorage(boolean theOptimisticLock, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageModeEnum, int theExpectedSelectCount, int theExpectedUpdateCount) { // Setup - ResourceIdListWorkChunkJson data = new ResourceIdListWorkChunkJson(); IIdType patientId = createPatient(withActiveTrue()); IIdType orgId = createOrganization(withName("MY ORG")); @@ -1056,7 +1057,14 @@ public void testReindexJob_OptimizeStorage(boolean theOptimisticLock, ReindexPar // execute myCaptureQueriesListener.clear(); - RunOutcome outcome = myReindexStep.doReindex(data, mock(IJobDataSink.class), "123", "456", params); + JobInstance instance = new JobInstance(); + StepExecutionDetails stepExecutionDetails = new StepExecutionDetails<>( + params, + data, + instance, + mock(WorkChunk.class) + ); + RunOutcome outcome = myReindexStepV1.run(stepExecutionDetails, mock(IJobDataSink.class)); // validate assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread()).hasSize(theExpectedSelectCount); @@ -1064,7 +1072,6 @@ public void testReindexJob_OptimizeStorage(boolean theOptimisticLock, ReindexPar assertThat(myCaptureQueriesListener.getInsertQueriesForCurrentThread()).isEmpty(); assertThat(myCaptureQueriesListener.getDeleteQueriesForCurrentThread()).isEmpty(); assertEquals(10, outcome.getRecordsProcessed()); - } @Test @@ -1095,7 +1102,14 @@ public void testReindexJob_ComboParamIndexesInUse() { // execute myCaptureQueriesListener.clear(); - RunOutcome outcome = myReindexStep.doReindex(data, mock(IJobDataSink.class), "123", "456", params); + JobInstance instance = new JobInstance(); + StepExecutionDetails stepExecutionDetails = new StepExecutionDetails<>( + params, + data, + instance, + mock(WorkChunk.class) + ); + RunOutcome outcome = myReindexStepV1.run(stepExecutionDetails, mock(IJobDataSink.class)); assertEquals(20, outcome.getRecordsProcessed()); // validate @@ -1103,10 +1117,8 @@ public void testReindexJob_ComboParamIndexesInUse() { assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size()); assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size()); assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size()); - } - public void assertNoPartitionSelectors() { List selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread(); for (SqlQuery next : selectQueries) { diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchCustomSearchParamTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchCustomSearchParamTest.java index bff403a5ea40..a9f3cc575194 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchCustomSearchParamTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchCustomSearchParamTest.java @@ -1,6 +1,5 @@ package ca.uhn.fhir.jpa.dao.r4; -import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.context.phonetic.PhoneticEncoderEnum; @@ -22,7 +21,6 @@ import ca.uhn.fhir.rest.param.ReferenceOrListParam; import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.StringParam; -import ca.uhn.fhir.rest.param.TokenOrListParam; import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; @@ -70,6 +68,7 @@ import java.util.List; import java.util.stream.Collectors; +import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.JOB_REINDEX; import static org.apache.commons.lang3.StringUtils.countMatches; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -441,11 +440,11 @@ public void testCreateSearchParameterOnSearchParameterDoesntCauseEndlessReindexL fooSp.setXpathUsage(org.hl7.fhir.r4.model.SearchParameter.XPathUsageType.NORMAL); fooSp.setStatus(org.hl7.fhir.r4.model.Enumerations.PublicationStatus.ACTIVE); - List initialJobs = myBatch2JobHelper.findJobsByDefinition(ReindexAppCtx.JOB_REINDEX); + List initialJobs = myBatch2JobHelper.findJobsByDefinition(JOB_REINDEX); mySearchParameterDao.create(fooSp, mySrd); - List finalJobs = myBatch2JobHelper.findJobsByDefinition(ReindexAppCtx.JOB_REINDEX); + List finalJobs = myBatch2JobHelper.findJobsByDefinition(JOB_REINDEX); List newJobs = finalJobs.stream().filter(t -> !initialJobs.contains(t)).collect(Collectors.toList()); assertThat(newJobs.size()).as("number of jobs created").isEqualTo(1); } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/tx/ReindexStepTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/tx/ReindexStepV1Test.java similarity index 92% rename from hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/tx/ReindexStepTest.java rename to hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/tx/ReindexStepV1Test.java index 69ed1e1507cf..539c4590a4c1 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/tx/ReindexStepTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/tx/ReindexStepV1Test.java @@ -6,7 +6,7 @@ import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson; import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrl; import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; -import ca.uhn.fhir.batch2.jobs.reindex.ReindexStep; +import ca.uhn.fhir.batch2.jobs.reindex.v1.ReindexStepV1; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -26,7 +26,7 @@ import static org.mockito.Mockito.when; @ExtendWith(MockitoExtension.class) -public class ReindexStepTest { +public class ReindexStepV1Test { @Mock private HapiTransactionService myHapiTransactionService; @@ -34,7 +34,7 @@ public class ReindexStepTest { private IJobDataSink myDataSink; @InjectMocks - private ReindexStep myReindexStep; + private ReindexStepV1 myReindexStepV1; @Captor private ArgumentCaptor builderArgumentCaptor; @@ -51,7 +51,7 @@ public void testMethodReindex_withRequestPartitionId_willExecuteWithPartitionId( when(myHapiTransactionService.buildExecutionBuilder(any())).thenCallRealMethod(); // when - myReindexStep.doReindex(data, myDataSink, "index-id", "chunk-id", reindexJobParameters); + myReindexStepV1.doReindex(data, myDataSink, "index-id", "chunk-id", reindexJobParameters); // then assertMethodArgumentRequestPartitionId(expectedPartitionId); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java index 987f34308737..0bee635ccc0e 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java @@ -1,8 +1,5 @@ package ca.uhn.fhir.jpa.provider.r4; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx; import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.i18n.Msg; @@ -60,9 +57,11 @@ import java.util.Map; import java.util.stream.Collectors; +import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.JOB_REINDEX; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.fail; @@ -244,7 +243,7 @@ public void testCreatingParamMarksCorrectResourcesForReindexing() { runInTransaction(() -> { myBatch2JobHelper.forceRunMaintenancePass(); - List allJobs = myBatch2JobHelper.findJobsByDefinition(ReindexAppCtx.JOB_REINDEX); + List allJobs = myBatch2JobHelper.findJobsByDefinition(JOB_REINDEX); assertEquals(1, allJobs.size()); assertEquals(1, allJobs.get(0).getParameters(ReindexJobParameters.class).getPartitionedUrls().size()); assertEquals("Patient?", allJobs.get(0).getParameters(ReindexJobParameters.class).getPartitionedUrls().get(0).getUrl()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepV1Test.java similarity index 80% rename from hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java rename to hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepV1Test.java index d7ed24e67898..8075789db8d0 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepV1Test.java @@ -2,10 +2,13 @@ import ca.uhn.fhir.batch2.api.IJobDataSink; import ca.uhn.fhir.batch2.api.RunOutcome; +import ca.uhn.fhir.batch2.api.StepExecutionDetails; import ca.uhn.fhir.batch2.api.VoidModel; import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson; import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; -import ca.uhn.fhir.batch2.jobs.reindex.ReindexStep; +import ca.uhn.fhir.batch2.jobs.reindex.v1.ReindexStepV1; +import ca.uhn.fhir.batch2.model.JobInstance; +import ca.uhn.fhir.batch2.model.WorkChunk; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.test.BaseJpaR4Test; @@ -25,13 +28,14 @@ import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.INDEX_STATUS_INDEXING_FAILED; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -public class ReindexStepTest extends BaseJpaR4Test { +public class ReindexStepV1Test extends BaseJpaR4Test { @Autowired - private ReindexStep myReindexStep; + private ReindexStepV1 myReindexStepV1; @Mock private IJobDataSink myDataSink; @@ -46,9 +50,7 @@ public void after() { @Test public void testReindex_NoActionNeeded() { - // Setup - Long id0 = createPatient(withActiveTrue(), withFamily("SIMPSON")).getIdPartAsLong(); Long id1 = createPatient(withActiveTrue(), withFamily("FLANDERS")).getIdPartAsLong(); @@ -57,9 +59,19 @@ public void testReindex_NoActionNeeded() { data.addTypedPid("Patient", id1); // Execute - + ReindexJobParameters params = new ReindexJobParameters(); myCaptureQueriesListener.clear(); - RunOutcome outcome = myReindexStep.doReindex(data, myDataSink, "index-id", "chunk-id", new ReindexJobParameters()); + JobInstance instance = new JobInstance(); + instance.setInstanceId("index-id"); + WorkChunk chunk = new WorkChunk(); + chunk.setId("chunk-id"); + StepExecutionDetails stepExecutionDetails = new StepExecutionDetails<>( + params, + data, + instance, + chunk + ); + RunOutcome outcome = myReindexStepV1.run(stepExecutionDetails, myDataSink); // Verify assertEquals(2, outcome.getRecordsProcessed()); @@ -72,12 +84,9 @@ public void testReindex_NoActionNeeded() { assertEquals(0, myCaptureQueriesListener.getRollbackCount()); } - @Test public void testReindex_NoActionNeeded_IndexMissingFieldsEnabled() { - // Setup - myStorageSettings.setIndexMissingFields(JpaStorageSettings.IndexEnabledEnum.ENABLED); Long id0 = createPatient(withActiveTrue(), withFamily("SIMPSON")).getIdPartAsLong(); @@ -88,9 +97,16 @@ public void testReindex_NoActionNeeded_IndexMissingFieldsEnabled() { data.addTypedPid("Patient", id1); // Execute - + ReindexJobParameters params = new ReindexJobParameters(); myCaptureQueriesListener.clear(); - RunOutcome outcome = myReindexStep.doReindex(data, myDataSink, "index-id", "chunk-id", new ReindexJobParameters()); + JobInstance instance = new JobInstance(); + StepExecutionDetails stepExecutionDetails = new StepExecutionDetails<>( + params, + data, + instance, + mock(WorkChunk.class) + ); + RunOutcome outcome = myReindexStepV1.run(stepExecutionDetails, myDataSink); // Verify assertEquals(2, outcome.getRecordsProcessed()); @@ -121,9 +137,16 @@ public void testReindex_IndexesWereMissing() { }); // Execute - + ReindexJobParameters params = new ReindexJobParameters(); myCaptureQueriesListener.clear(); - RunOutcome outcome = myReindexStep.doReindex(data, myDataSink, "index-id", "chunk-id", new ReindexJobParameters()); + JobInstance instance = new JobInstance(); + StepExecutionDetails stepExecutionDetails = new StepExecutionDetails<>( + params, + data, + instance, + mock(WorkChunk.class) + ); + RunOutcome outcome = myReindexStepV1.run(stepExecutionDetails, myDataSink); // Verify assertEquals(2, outcome.getRecordsProcessed()); @@ -136,12 +159,9 @@ public void testReindex_IndexesWereMissing() { assertEquals(0, myCaptureQueriesListener.getRollbackCount()); } - @Test public void testReindex_IndexesAddedAndRemoved_IndexMissingFieldsEnabled() { - // Setup - myStorageSettings.setIndexMissingFields(JpaStorageSettings.IndexEnabledEnum.ENABLED); boolean markResourcesForReindexingUponSearchParameterChange = myStorageSettings.isMarkResourcesForReindexingUponSearchParameterChange(); myStorageSettings.setMarkResourcesForReindexingUponSearchParameterChange(false); // if this were true, it would set up a lot of reindex jobs extraneous to the one we're trying to test @@ -189,9 +209,16 @@ public void testReindex_IndexesAddedAndRemoved_IndexMissingFieldsEnabled() { mySearchParamRegistry.forceRefresh(); // Execute - + ReindexJobParameters params = new ReindexJobParameters(); myCaptureQueriesListener.clear(); - RunOutcome outcome = myReindexStep.doReindex(data, myDataSink, "index-id", "chunk-id", new ReindexJobParameters()); + JobInstance instance = new JobInstance(); + StepExecutionDetails stepExecutionDetails = new StepExecutionDetails<>( + params, + data, + instance, + mock(WorkChunk.class) + ); + RunOutcome outcome = myReindexStepV1.run(stepExecutionDetails, myDataSink); // Verify assertEquals(2, outcome.getRecordsProcessed()); @@ -207,9 +234,7 @@ public void testReindex_IndexesAddedAndRemoved_IndexMissingFieldsEnabled() { @Test public void testReindex_OneResourceReindexFailedButOthersSucceeded() { - // Setup - Long id0 = createPatient(withActiveTrue(), withFamily("SIMPSON")).getIdPartAsLong(); Long id1 = createPatient(withActiveTrue(), withFamily("FLANDERS")).getIdPartAsLong(); Long idPatientToInvalidate = createPatient().getIdPartAsLong(); @@ -234,9 +259,19 @@ public void testReindex_OneResourceReindexFailedButOthersSucceeded() { }); // Execute - + ReindexJobParameters params = new ReindexJobParameters(); myCaptureQueriesListener.clear(); - RunOutcome outcome = myReindexStep.doReindex(data, myDataSink, "index-id", "chunk-id", new ReindexJobParameters()); + JobInstance instance = new JobInstance(); + instance.setInstanceId("index-id"); + WorkChunk workChunk = new WorkChunk(); + workChunk.setId("workid"); + StepExecutionDetails stepExecutionDetails = new StepExecutionDetails<>( + params, + data, + instance, + workChunk + ); + RunOutcome outcome = myReindexStepV1.run(stepExecutionDetails, myDataSink); // Verify assertEquals(4, outcome.getRecordsProcessed()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexJobTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexTaskTest.java similarity index 95% rename from hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexJobTest.java rename to hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexTaskTest.java index fa9d93fb4ab2..c04a40f1ac7f 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexJobTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexTaskTest.java @@ -2,7 +2,6 @@ import ca.uhn.fhir.batch2.api.IJobCoordinator; import ca.uhn.fhir.batch2.api.IJobPersistence; -import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx; import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.JobInstanceStartRequest; @@ -39,6 +38,7 @@ import java.util.List; import java.util.stream.Stream; +import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.JOB_REINDEX; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -47,7 +47,7 @@ import static org.junit.jupiter.api.Assertions.fail; @SuppressWarnings("SqlDialectInspection") -public class ReindexJobTest extends BaseJpaR4Test { +public class ReindexTaskTest extends BaseJpaR4Test { @Autowired private IJobCoordinator myJobCoordinator; @@ -101,7 +101,7 @@ public void testOptimizeStorage_CurrentVersion() { // execute JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); startRequest.setParameters( new ReindexJobParameters() .setOptimizeStorage(ReindexParameters.OptimizeStorageModeEnum.CURRENT_VERSION) @@ -158,7 +158,7 @@ public void testOptimizeStorage_AllVersions() { // execute JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); startRequest.setParameters( new ReindexJobParameters() .setOptimizeStorage(ReindexParameters.OptimizeStorageModeEnum.ALL_VERSIONS) @@ -217,7 +217,7 @@ public void testOptimizeStorage_AllVersions_CopyProvenanceEntityData() { // execute JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); startRequest.setParameters( new ReindexJobParameters() .setOptimizeStorage(ReindexParameters.OptimizeStorageModeEnum.ALL_VERSIONS) @@ -252,7 +252,7 @@ public void testOptimizeStorage_DeletedRecords() { // execute JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); startRequest.setParameters( new ReindexJobParameters() .setOptimizeStorage(ReindexParameters.OptimizeStorageModeEnum.CURRENT_VERSION) @@ -294,7 +294,7 @@ public void testReindex_ByUrl() { // execute JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); startRequest.setParameters(parameters); Batch2JobStartResponse res = myJobCoordinator.startInstance(mySrd, startRequest); myBatch2JobHelper.awaitJobCompletion(res); @@ -325,7 +325,7 @@ public void testReindex_byMultipleUrls_indexesMatchingResources() { // execute JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); startRequest.setParameters(parameters); Batch2JobStartResponse res = myJobCoordinator.startInstance(startRequest); JobInstance jobInstance = myBatch2JobHelper.awaitJobCompletion(res); @@ -356,7 +356,7 @@ public void testReindexDeletedResources_byUrl_willRemoveDeletedResourceEntriesFr parameters.addUrl("Observation?status=final"); JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); startRequest.setParameters(parameters); Batch2JobStartResponse res = myJobCoordinator.startInstance(mySrd, startRequest); myBatch2JobHelper.awaitJobCompletion(res); @@ -387,7 +387,7 @@ public void testReindex_Everything() { // execute JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); startRequest.setParameters(new ReindexJobParameters()); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(mySrd, startRequest); myBatch2JobHelper.awaitJobCompletion(startResponse); @@ -406,7 +406,7 @@ public void testReindex_DuplicateResourceBeforeEnforceUniqueShouldSaveWarning() DaoMethodOutcome searchParameter = myReindexTestHelper.createUniqueCodeSearchParameter(); JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); startRequest.setParameters(new ReindexJobParameters()); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(new SystemRequestDetails(), startRequest); JobInstance myJob = myBatch2JobHelper.awaitJobCompletion(startResponse); @@ -436,7 +436,7 @@ public void testReindex_ComboUnique_HashesShouldBePopulated() { // Run a reindex JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); startRequest.setParameters(new ReindexJobParameters()); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(new SystemRequestDetails(), startRequest); JobInstance myJob = myBatch2JobHelper.awaitJobCompletion(startResponse.getInstanceId(), 999); @@ -469,7 +469,7 @@ public void testReindex_ComboNonUnique_HashesShouldBePopulated() { // Run a reindex JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); startRequest.setParameters(new ReindexJobParameters()); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(new SystemRequestDetails(), startRequest); JobInstance myJob = myBatch2JobHelper.awaitJobCompletion(startResponse.getInstanceId(), 999); @@ -500,7 +500,7 @@ public void testReindex_ExceptionThrownDuringWrite() { // execute JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); startRequest.setParameters(new ReindexJobParameters()); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(mySrd, startRequest); JobInstance outcome = myBatch2JobHelper.awaitJobCompletion(startResponse); @@ -528,7 +528,7 @@ public void testReindex_FailureThrownDuringWrite() { // execute JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); startRequest.setParameters(new ReindexJobParameters()); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(new SystemRequestDetails(), startRequest); JobInstance outcome = myBatch2JobHelper.awaitJobFailure(startResponse); @@ -541,7 +541,7 @@ public void testReindex_FailureThrownDuringWrite() { @Test public void testReindex_withReindexingUponSearchParameterChangeEnabled_reindexJobCompleted() { - List jobInstances = myJobPersistence.fetchInstancesByJobDefinitionId(ReindexAppCtx.JOB_REINDEX, 10, 0); + List jobInstances = myJobPersistence.fetchInstancesByJobDefinitionId(JOB_REINDEX, 10, 0); assertThat(jobInstances).isEmpty(); // make sure the resources auto-reindex after the search parameter update is enabled @@ -552,7 +552,7 @@ public void testReindex_withReindexingUponSearchParameterChangeEnabled_reindexJo myReindexTestHelper.createCodeSearchParameter(); // check that reindex job was created - jobInstances = myJobPersistence.fetchInstancesByJobDefinitionId(ReindexAppCtx.JOB_REINDEX, 10, 0); + jobInstances = myJobPersistence.fetchInstancesByJobDefinitionId(JOB_REINDEX, 10, 0); assertThat(jobInstances).hasSize(1); // check that the job is completed (not stuck in QUEUED status) diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexJobWithPartitioningTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexTaskWithPartitioningTest.java similarity index 97% rename from hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexJobWithPartitioningTest.java rename to hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexTaskWithPartitioningTest.java index 8a643006439b..35657c3ab47a 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexJobWithPartitioningTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexTaskWithPartitioningTest.java @@ -1,9 +1,8 @@ package ca.uhn.fhir.jpa.reindex; import ca.uhn.fhir.batch2.api.IJobCoordinator; -import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrlJobParameters; import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrl; -import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx; +import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrlJobParameters; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.JobInstanceStartRequest; import ca.uhn.fhir.interceptor.model.RequestPartitionId; @@ -25,9 +24,10 @@ import java.util.List; import java.util.stream.Stream; +import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.JOB_REINDEX; import static org.assertj.core.api.Assertions.assertThat; @TestInstance(TestInstance.Lifecycle.PER_CLASS) -public class ReindexJobWithPartitioningTest extends BaseJpaR4Test { +public class ReindexTaskWithPartitioningTest extends BaseJpaR4Test { @Autowired private IJobCoordinator myJobCoordinator; @@ -133,7 +133,7 @@ public void testReindex_withPartitionedUrls_indexesMatchingResources(List all = new HashSet<>(); + for (CodeSystem.ConceptDefinitionComponent set : cs.getConcept()) { + all.add(set.getCode()); + } + for (ValueSet.ValueSetExpansionContainsComponent v : expanded.getExpansion().getContains()) { + all.remove(v.getCode()); + } + assertTrue(all.isEmpty(), String.join(", ", all)); + assertEquals(cs.getConcept().size(), expanded.getExpansion().getTotal()); + } finally { + // set back to standard values + myStorageSettings.setDeferIndexingForCodesystemsOfSize(deferredIndexingDefault); + ReindexUtils.setRetryDelay(null); + } + } } diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/DuplicateIndexR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/DuplicateIndexR5Test.java index d88d136d0890..2a4387b45975 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/DuplicateIndexR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/DuplicateIndexR5Test.java @@ -1,6 +1,5 @@ package ca.uhn.fhir.jpa.dao.r5; -import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx; import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; import ca.uhn.fhir.batch2.model.JobInstanceStartRequest; import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse; @@ -19,6 +18,7 @@ import org.hl7.fhir.r5.model.SearchParameter; import org.junit.jupiter.api.Test; +import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.JOB_REINDEX; import static org.junit.jupiter.api.Assertions.assertEquals; public class DuplicateIndexR5Test extends BaseJpaR5Test { @@ -149,7 +149,7 @@ private void reindexAllPatients() { ReindexJobParameters parameters = new ReindexJobParameters(); parameters.addUrl("Patient?"); JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); startRequest.setParameters(parameters); Batch2JobStartResponse res = myJobCoordinator.startInstance(mySrd, startRequest); myBatch2JobHelper.awaitJobCompletion(res.getInstanceId()); diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5Test.java index f6f26c10fc9c..4ee5ea481c4d 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5Test.java @@ -1,7 +1,6 @@ package ca.uhn.fhir.jpa.provider.r5; import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrl; -import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx; import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; import ca.uhn.fhir.batch2.model.JobInstanceStartRequest; import ca.uhn.fhir.interceptor.model.RequestPartitionId; @@ -67,6 +66,7 @@ import java.util.Set; import java.util.stream.Collectors; +import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.JOB_REINDEX; import static org.apache.commons.lang3.StringUtils.leftPad; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -374,7 +374,7 @@ public void searchForNewerResources_fullTextSearchWithFilterAndCount_shouldRetur ReindexJobParameters jobParameters = new ReindexJobParameters(); jobParameters.addPartitionedUrl(new PartitionedUrl().setRequestPartitionId(RequestPartitionId.allPartitions())); JobInstanceStartRequest request = new JobInstanceStartRequest(); - request.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + request.setJobDefinitionId(JOB_REINDEX); request.setParameters(jobParameters); Batch2JobStartResponse response = myJobCoordinator.startInstance(new SystemRequestDetails(), request); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/HapiEmbeddedDatabasesExtension.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/HapiEmbeddedDatabasesExtension.java index c2ca070788a2..8619d3db0a80 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/HapiEmbeddedDatabasesExtension.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/HapiEmbeddedDatabasesExtension.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.embedded; import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; +import ca.uhn.fhir.jpa.util.DatabaseSupportUtil; import ca.uhn.fhir.test.utilities.docker.DockerRequiredCondition; import ca.uhn.fhir.util.VersionEnum; import org.junit.jupiter.api.extension.AfterAllCallback; @@ -54,7 +55,7 @@ public HapiEmbeddedDatabasesExtension() { myEmbeddedDatabases.add(new H2EmbeddedDatabase()); myEmbeddedDatabases.add(new PostgresEmbeddedDatabase()); myEmbeddedDatabases.add(new MsSqlEmbeddedDatabase()); - if (OracleCondition.canUseOracle()) { + if (DatabaseSupportUtil.canUseOracle()) { myEmbeddedDatabases.add(new OracleEmbeddedDatabase()); } else { String message = @@ -136,7 +137,7 @@ public Stream provideArguments(ExtensionContext context) { arguments.add(Arguments.of(DriverTypeEnum.POSTGRES_9_4)); arguments.add(Arguments.of(DriverTypeEnum.MSSQL_2012)); - if (OracleCondition.canUseOracle()) { + if (DatabaseSupportUtil.canUseOracle()) { arguments.add(Arguments.of(DriverTypeEnum.ORACLE_12C)); } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/MsSqlEmbeddedDatabase.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/MsSqlEmbeddedDatabase.java index 7ab8ed648ea3..cc498305293a 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/MsSqlEmbeddedDatabase.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/MsSqlEmbeddedDatabase.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.embedded; import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; +import ca.uhn.fhir.jpa.util.DatabaseSupportUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.containers.MSSQLServerContainer; @@ -43,9 +44,19 @@ public class MsSqlEmbeddedDatabase extends JpaEmbeddedDatabase { private final MSSQLServerContainer myContainer; public MsSqlEmbeddedDatabase() { - DockerImageName msSqlImage = DockerImageName.parse("mcr.microsoft.com/azure-sql-edge:latest") - .asCompatibleSubstituteFor("mcr.microsoft.com/mssql/server"); - myContainer = new MSSQLServerContainer(msSqlImage).acceptLicense(); + + // azure-sql-edge docker image does not support kernel 6.7+ + // as a result, mssql container fails to start most of the time + // mssql/server:2019 image support kernel 6.7+, so use it for amd64 architecture + // See: https://github.com/microsoft/mssql-docker/issues/868 + if (DatabaseSupportUtil.canUseMsSql2019()) { + myContainer = new MSSQLServerContainer("mcr.microsoft.com/mssql/server:2019-latest").acceptLicense(); + } else { + DockerImageName msSqlImage = DockerImageName.parse("mcr.microsoft.com/azure-sql-edge:latest") + .asCompatibleSubstituteFor("mcr.microsoft.com/mssql/server"); + myContainer = new MSSQLServerContainer(msSqlImage).acceptLicense(); + } + myContainer.start(); super.initialize( DriverTypeEnum.MSSQL_2012, diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/OracleCondition.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/OracleCondition.java index ddefa1a127cd..6528e8f6bcc5 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/OracleCondition.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/OracleCondition.java @@ -19,8 +19,7 @@ */ package ca.uhn.fhir.jpa.embedded; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.SystemUtils; +import ca.uhn.fhir.jpa.util.DatabaseSupportUtil; import org.junit.jupiter.api.extension.ConditionEvaluationResult; import org.junit.jupiter.api.extension.ExecutionCondition; import org.junit.jupiter.api.extension.ExtensionContext; @@ -33,25 +32,8 @@ public class OracleCondition implements ExecutionCondition { @Override public ConditionEvaluationResult evaluateExecutionCondition(ExtensionContext theExtensionContext) { - return canUseOracle() + return DatabaseSupportUtil.canUseOracle() ? ConditionEvaluationResult.enabled(ENABLED_MSG) : ConditionEvaluationResult.disabled(DISABLED_MSG); } - - public static boolean canUseOracle() { - if (!isMac()) { - return true; - } - return isColimaConfigured(); - } - - private static boolean isMac() { - return SystemUtils.IS_OS_MAC || SystemUtils.IS_OS_MAC_OSX; - } - - private static boolean isColimaConfigured() { - return StringUtils.isNotBlank(System.getenv("TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE")) - && StringUtils.isNotBlank(System.getenv("DOCKER_HOST")) - && System.getenv("DOCKER_HOST").contains("colima"); - } } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java index 2fb5a9b87f63..8b98ddfe30e0 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java @@ -19,6 +19,7 @@ */ package ca.uhn.fhir.jpa.test; +import ca.uhn.fhir.batch2.api.IJobCoordinator; import ca.uhn.fhir.batch2.api.IJobMaintenanceService; import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider; import ca.uhn.fhir.context.FhirContext; @@ -559,6 +560,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil @Autowired protected IJobMaintenanceService myJobMaintenanceService; + @Autowired + protected IJobCoordinator myJobCoordinator; @RegisterExtension private final PreventDanglingInterceptorsExtension myPreventDanglingInterceptorsExtension = new PreventDanglingInterceptorsExtension(()-> myInterceptorRegistry); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/PatientReindexTestHelper.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/PatientReindexTestHelper.java index 346ecfc519ff..bb063387b89a 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/PatientReindexTestHelper.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/PatientReindexTestHelper.java @@ -38,6 +38,7 @@ import java.util.List; import java.util.stream.Stream; +import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.JOB_REINDEX; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; @@ -170,7 +171,7 @@ private void validatePersistedPatients(int theExpectedNumPatients, long theExpec private JobInstanceStartRequest createPatientReindexRequest(int theBatchSize) { JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); - startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + startRequest.setJobDefinitionId(JOB_REINDEX); ReindexJobParameters reindexJobParameters = new ReindexJobParameters(); reindexJobParameters.setBatchSize(Math.max(theBatchSize,1)); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/DatabaseSupportUtil.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/DatabaseSupportUtil.java new file mode 100644 index 000000000000..fd1d362a95a5 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/DatabaseSupportUtil.java @@ -0,0 +1,34 @@ +package ca.uhn.fhir.jpa.util; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.SystemUtils; + +public final class DatabaseSupportUtil { + + private DatabaseSupportUtil() {} + + public static boolean canUseMsSql2019() { + return isSupportAmd64Architecture(); + } + + public static boolean canUseOracle() { + return isSupportAmd64Architecture(); + } + + private static boolean isSupportAmd64Architecture() { + if (!isMac()) { + return true; + } + return isColimaConfigured(); + } + + private static boolean isMac() { + return SystemUtils.IS_OS_MAC || SystemUtils.IS_OS_MAC_OSX; + } + + private static boolean isColimaConfigured() { + return StringUtils.isNotBlank(System.getenv("TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE")) + && StringUtils.isNotBlank(System.getenv("DOCKER_HOST")) + && System.getenv("DOCKER_HOST").contains("colima"); + } +} diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImplTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImplTest.java index eed255e7842b..2365300b65af 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImplTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImplTest.java @@ -17,11 +17,15 @@ import org.springframework.transaction.PlatformTransactionManager; import java.util.ArrayList; +import java.util.List; import java.util.Optional; +import static ca.uhn.fhir.batch2.jobs.termcodesystem.TermCodeSystemJobConfig.TERM_CODE_SYSTEM_DELETE_JOB_NAME; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.times; @@ -67,8 +71,15 @@ public void testStorageNotEmptyWhileJobsExecuting() { ReflectionTestUtils.setField(mySvc, "myJobExecutions", mockExecutions); - when(myJobCoordinator.getInstance(eq(jobId))) - .thenReturn(instance); + when(myJobCoordinator.getInstancesbyJobDefinitionIdAndEndedStatus( + eq(TERM_CODE_SYSTEM_DELETE_JOB_NAME), + eq(true), + anyInt(), + eq(0) + )) + .thenReturn(List.of()) // first nothing + .thenReturn(List.of(instance)); // then the list with the instance + assertFalse(mySvc.isStorageQueueEmpty(true)); instance.setStatus(StatusEnum.COMPLETED); assertTrue(mySvc.isStorageQueueEmpty(true)); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexAppCtx.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexAppCtx.java index a49d054bf397..ba335b1f60b6 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexAppCtx.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexAppCtx.java @@ -21,69 +21,37 @@ import ca.uhn.fhir.batch2.api.IJobCoordinator; import ca.uhn.fhir.batch2.api.IJobPartitionProvider; -import ca.uhn.fhir.batch2.api.IJobStepWorker; -import ca.uhn.fhir.batch2.api.VoidModel; -import ca.uhn.fhir.batch2.jobs.chunk.ChunkRangeJson; -import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson; -import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator; -import ca.uhn.fhir.batch2.jobs.step.GenerateRangeChunksStep; -import ca.uhn.fhir.batch2.jobs.step.LoadIdsStep; -import ca.uhn.fhir.batch2.model.JobDefinition; +import ca.uhn.fhir.batch2.jobs.reindex.svcs.ReindexJobService; +import ca.uhn.fhir.batch2.jobs.reindex.v1.ReindexV1Config; +import ca.uhn.fhir.batch2.jobs.reindex.v2.ReindexV2Config; import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc; -import ca.uhn.fhir.rest.server.provider.ProviderConstants; +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; +import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; +import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; @Configuration +@Import({ReindexV1Config.class, ReindexV2Config.class}) public class ReindexAppCtx { - public static final String JOB_REINDEX = "REINDEX"; + @Autowired + private HapiTransactionService myHapiTransactionService; - @Bean - public JobDefinition reindexJobDefinition(IBatch2DaoSvc theBatch2DaoSvc) { - return JobDefinition.newBuilder() - .setJobDefinitionId(JOB_REINDEX) - .setJobDescription("Reindex resources") - .setJobDefinitionVersion(1) - .setParametersType(ReindexJobParameters.class) - .setParametersValidator(reindexJobParametersValidator(theBatch2DaoSvc)) - .gatedExecution() - .addFirstStep( - "generate-ranges", - "Generate data ranges to reindex", - ChunkRangeJson.class, - reindexGenerateRangeChunksStep()) - .addIntermediateStep( - "load-ids", - "Load IDs of resources to reindex", - ResourceIdListWorkChunkJson.class, - reindexLoadIdsStep(theBatch2DaoSvc)) - .addLastStep("reindex", "Perform the resource reindex", reindexStep()) - .build(); - } + @Autowired + private IFhirSystemDao mySystemDao; - @Bean - public IJobStepWorker reindexGenerateRangeChunksStep() { - return new GenerateRangeChunksStep<>(); - } - - @Bean - public IJobStepWorker reindexLoadIdsStep( - IBatch2DaoSvc theBatch2DaoSvc) { - return new LoadIdsStep<>(theBatch2DaoSvc); - } + @Autowired + private DaoRegistry myRegistry; - @Bean - public ReindexJobParametersValidator reindexJobParametersValidator(IBatch2DaoSvc theBatch2DaoSvc) { - return new ReindexJobParametersValidator( - new UrlListValidator(ProviderConstants.OPERATION_REINDEX, theBatch2DaoSvc)); - } + @Autowired + private IIdHelperService> myIdHelperService; - @Bean - public ReindexStep reindexStep() { - return new ReindexStep(); - } + /* Shared services */ @Bean public ReindexProvider reindexProvider( @@ -92,4 +60,9 @@ public ReindexProvider reindexProvider( IJobPartitionProvider theJobPartitionHandler) { return new ReindexProvider(theFhirContext, theJobCoordinator, theJobPartitionHandler); } + + @Bean + public ReindexJobService jobService() { + return new ReindexJobService(myRegistry); + } } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProvider.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProvider.java index 5889a75d4e93..a8f8e08707ca 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProvider.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProvider.java @@ -44,6 +44,7 @@ import static ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters.OPTIMIZE_STORAGE; import static ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters.REINDEX_SEARCH_PARAMETERS; +import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.JOB_REINDEX; public class ReindexProvider { @@ -127,7 +128,7 @@ public IBaseParameters reindex( myJobPartitionProvider.getPartitionedUrls(theRequestDetails, urls).forEach(params::addPartitionedUrl); JobInstanceStartRequest request = new JobInstanceStartRequest(); - request.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); + request.setJobDefinitionId(JOB_REINDEX); request.setParameters(params); Batch2JobStartResponse response = myJobCoordinator.startInstance(theRequestDetails, request); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexStep.java deleted file mode 100644 index 2644f3af89f1..000000000000 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexStep.java +++ /dev/null @@ -1,203 +0,0 @@ -/*- - * #%L - * hapi-fhir-storage-batch2-jobs - * %% - * Copyright (C) 2014 - 2024 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ -package ca.uhn.fhir.batch2.jobs.reindex; - -import ca.uhn.fhir.batch2.api.IJobDataSink; -import ca.uhn.fhir.batch2.api.IJobStepWorker; -import ca.uhn.fhir.batch2.api.JobExecutionFailedException; -import ca.uhn.fhir.batch2.api.RunOutcome; -import ca.uhn.fhir.batch2.api.StepExecutionDetails; -import ca.uhn.fhir.batch2.api.VoidModel; -import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson; -import ca.uhn.fhir.jpa.api.dao.DaoRegistry; -import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; -import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; -import ca.uhn.fhir.jpa.api.dao.ReindexOutcome; -import ca.uhn.fhir.jpa.api.dao.ReindexParameters; -import ca.uhn.fhir.jpa.api.svc.IIdHelperService; -import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; -import ca.uhn.fhir.parser.DataFormatException; -import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.api.server.SystemRequestDetails; -import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; -import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; -import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; -import ca.uhn.fhir.util.StopWatch; -import jakarta.annotation.Nonnull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.transaction.TransactionStatus; -import org.springframework.transaction.support.TransactionCallback; - -import java.util.List; -import java.util.concurrent.TimeUnit; - -public class ReindexStep implements IJobStepWorker { - - public static final int REINDEX_MAX_RETRIES = 10; - - private static final Logger ourLog = LoggerFactory.getLogger(ReindexStep.class); - - @Autowired - private HapiTransactionService myHapiTransactionService; - - @Autowired - private IFhirSystemDao mySystemDao; - - @Autowired - private DaoRegistry myDaoRegistry; - - @Autowired - private IIdHelperService myIdHelperService; - - @Nonnull - @Override - public RunOutcome run( - @Nonnull StepExecutionDetails theStepExecutionDetails, - @Nonnull IJobDataSink theDataSink) - throws JobExecutionFailedException { - - ResourceIdListWorkChunkJson data = theStepExecutionDetails.getData(); - ReindexJobParameters jobParameters = theStepExecutionDetails.getParameters(); - - return doReindex( - data, - theDataSink, - theStepExecutionDetails.getInstance().getInstanceId(), - theStepExecutionDetails.getChunkId(), - jobParameters); - } - - @Nonnull - public RunOutcome doReindex( - ResourceIdListWorkChunkJson data, - IJobDataSink theDataSink, - String theInstanceId, - String theChunkId, - ReindexJobParameters theJobParameters) { - RequestDetails requestDetails = new SystemRequestDetails(); - requestDetails.setRetry(true); - requestDetails.setMaxRetries(REINDEX_MAX_RETRIES); - TransactionDetails transactionDetails = new TransactionDetails(); - ReindexJob reindexJob = new ReindexJob( - data, requestDetails, transactionDetails, theDataSink, theInstanceId, theChunkId, theJobParameters); - - myHapiTransactionService - .withRequest(requestDetails) - .withTransactionDetails(transactionDetails) - .withRequestPartitionId(data.getRequestPartitionId()) - .execute(reindexJob); - - return new RunOutcome(data.size()); - } - - private class ReindexJob implements TransactionCallback { - private final ResourceIdListWorkChunkJson myData; - private final RequestDetails myRequestDetails; - private final TransactionDetails myTransactionDetails; - private final IJobDataSink myDataSink; - private final String myChunkId; - private final String myInstanceId; - private final ReindexJobParameters myJobParameters; - - public ReindexJob( - ResourceIdListWorkChunkJson theData, - RequestDetails theRequestDetails, - TransactionDetails theTransactionDetails, - IJobDataSink theDataSink, - String theInstanceId, - String theChunkId, - ReindexJobParameters theJobParameters) { - myData = theData; - myRequestDetails = theRequestDetails; - myTransactionDetails = theTransactionDetails; - myDataSink = theDataSink; - myInstanceId = theInstanceId; - myChunkId = theChunkId; - myJobParameters = theJobParameters; - myDataSink.setWarningProcessor(new ReindexWarningProcessor()); - } - - @Override - public Void doInTransaction(@Nonnull TransactionStatus theStatus) { - - List persistentIds = myData.getResourcePersistentIds(myIdHelperService); - - ourLog.info( - "Starting reindex work chunk with {} resources - Instance[{}] Chunk[{}]", - persistentIds.size(), - myInstanceId, - myChunkId); - StopWatch sw = new StopWatch(); - - // Prefetch Resources from DB - - boolean reindexSearchParameters = - myJobParameters.getReindexSearchParameters() != ReindexParameters.ReindexSearchParametersEnum.NONE; - mySystemDao.preFetchResources(persistentIds, reindexSearchParameters); - ourLog.info( - "Prefetched {} resources in {} - Instance[{}] Chunk[{}]", - persistentIds.size(), - sw, - myInstanceId, - myChunkId); - - ReindexParameters parameters = new ReindexParameters() - .setReindexSearchParameters(myJobParameters.getReindexSearchParameters()) - .setOptimizeStorage(myJobParameters.getOptimizeStorage()) - .setOptimisticLock(myJobParameters.getOptimisticLock()); - - // Reindex - - sw.restart(); - for (int i = 0; i < myData.size(); i++) { - - String nextResourceType = myData.getResourceType(i); - IFhirResourceDao dao = myDaoRegistry.getResourceDao(nextResourceType); - IResourcePersistentId resourcePersistentId = persistentIds.get(i); - try { - - ReindexOutcome outcome = - dao.reindex(resourcePersistentId, parameters, myRequestDetails, myTransactionDetails); - outcome.getWarnings().forEach(myDataSink::recoveredError); - - } catch (BaseServerResponseException | DataFormatException e) { - String resourceForcedId = myIdHelperService - .translatePidIdToForcedIdWithCache(resourcePersistentId) - .orElse(resourcePersistentId.toString()); - String resourceId = nextResourceType + "/" + resourceForcedId; - ourLog.debug("Failure during reindexing {}", resourceId, e); - myDataSink.recoveredError("Failure reindexing " + resourceId + ": " + e.getMessage()); - } - } - - ourLog.info( - "Finished reindexing {} resources in {} - {}/sec - Instance[{}] Chunk[{}]", - persistentIds.size(), - sw, - sw.formatThroughput(persistentIds.size(), TimeUnit.SECONDS), - myInstanceId, - myChunkId); - - return null; - } - } -} diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexUtils.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexUtils.java new file mode 100644 index 000000000000..e54b32a90ccc --- /dev/null +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexUtils.java @@ -0,0 +1,39 @@ +package ca.uhn.fhir.batch2.jobs.reindex; + +import com.google.common.annotations.VisibleForTesting; + +import java.time.Duration; +import java.time.temporal.ChronoUnit; + +public class ReindexUtils { + + /** + * The reindex job definition id + */ + public static final String JOB_REINDEX = "REINDEX"; + + public static final int REINDEX_MAX_RETRIES = 10; + + private static final Duration RETRY_DELAY = Duration.of(30, ChronoUnit.SECONDS); + + private static Duration myDelay; + + /** + * Returns the retry delay for reindex jobs that require polling. + */ + public static Duration getRetryLaterDelay() { + if (myDelay != null) { + return myDelay; + } + return RETRY_DELAY; + } + + /** + * Sets the retry delay to use for reindex jobs. + * Do not use this in production code! Only test code. + */ + @VisibleForTesting + public static void setRetryDelay(Duration theDuration) { + myDelay = theDuration; + } +} diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/models/ReindexResults.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/models/ReindexResults.java new file mode 100644 index 000000000000..0eee14d3ec12 --- /dev/null +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/models/ReindexResults.java @@ -0,0 +1,29 @@ +package ca.uhn.fhir.batch2.jobs.reindex.models; + +import ca.uhn.fhir.model.api.IModelJson; +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.HashMap; + +public class ReindexResults implements IModelJson { + + /** + * A map of resource type : whether or not the reindex is completed; + * true = more work needed. false (or omitted) = reindex is done + */ + @JsonProperty("resource2NeedsWork") + private HashMap myResourceToHasWorkToComplete; + + public ReindexResults() {} + + public HashMap getResourceToHasWorkToComplete() { + if (myResourceToHasWorkToComplete == null) { + myResourceToHasWorkToComplete = new HashMap<>(); + } + return myResourceToHasWorkToComplete; + } + + public void addResourceTypeToCompletionStatus(String theResourceType, boolean theRequiresMoreWork) { + getResourceToHasWorkToComplete().put(theResourceType, theRequiresMoreWork); + } +} diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/svcs/ReindexJobService.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/svcs/ReindexJobService.java new file mode 100644 index 000000000000..39a4273a8b79 --- /dev/null +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/svcs/ReindexJobService.java @@ -0,0 +1,38 @@ +package ca.uhn.fhir.batch2.jobs.reindex.svcs; + +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.api.model.ReindexJobStatus; + +import java.util.Map; + +public class ReindexJobService { + + private final DaoRegistry myDaoRegistry; + + public ReindexJobService(DaoRegistry theRegistry) { + myDaoRegistry = theRegistry; + } + + /** + * Checks if any of the resource types in the map have any pending reindex work waiting. + * This will return true after the first such encounter, and only return false if no + * reindex work is required for any resource. + * @param theResourceTypesToCheckFlag map of resourceType:whether or not to check + * @return true if there's reindex work pending, false otherwise + */ + public boolean anyResourceHasPendingReindexWork(Map theResourceTypesToCheckFlag) { + for (String resourceType : theResourceTypesToCheckFlag.keySet()) { + boolean toCheck = theResourceTypesToCheckFlag.get(resourceType); + if (toCheck) { + IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceType); + + ReindexJobStatus status = dao.getReindexJobStatus(); + if (status.isHasReindexWorkPending()) { + return true; + } + } + } + return false; + } +} diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v1/ReindexJobParametersValidatorV1.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v1/ReindexJobParametersValidatorV1.java new file mode 100644 index 000000000000..a54ef49f9eb0 --- /dev/null +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v1/ReindexJobParametersValidatorV1.java @@ -0,0 +1,57 @@ +/*- + * #%L + * hapi-fhir-storage-batch2-jobs + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.batch2.jobs.reindex.v1; + +import ca.uhn.fhir.batch2.api.IJobParametersValidator; +import ca.uhn.fhir.batch2.jobs.parameters.IUrlListValidator; +import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; + +import java.util.ArrayList; +import java.util.List; + +@Deprecated(forRemoval = true, since = "7.6.0") +public class ReindexJobParametersValidatorV1 implements IJobParametersValidator { + + private final IUrlListValidator myUrlListValidator; + + public ReindexJobParametersValidatorV1(IUrlListValidator theUrlListValidator) { + myUrlListValidator = theUrlListValidator; + } + + @Nullable + @Override + public List validate(RequestDetails theRequestDetails, @Nonnull ReindexJobParameters theParameters) { + List errors = myUrlListValidator.validateUrls(theParameters.getUrls()); + + if (errors == null || errors.isEmpty()) { + // only check if there's no other errors (new list to fix immutable issues) + errors = new ArrayList<>(); + for (String url : theParameters.getUrls()) { + if (url.contains(" ") || url.contains("\n") || url.contains("\t")) { + errors.add("Invalid URL. URL cannot contain spaces : " + url); + } + } + } + return errors; + } +} diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v1/ReindexStepV1.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v1/ReindexStepV1.java new file mode 100644 index 000000000000..c9adb93b91c6 --- /dev/null +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v1/ReindexStepV1.java @@ -0,0 +1,117 @@ +/*- + * #%L + * hapi-fhir-storage-batch2-jobs + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.batch2.jobs.reindex.v1; + +import ca.uhn.fhir.batch2.api.IJobDataSink; +import ca.uhn.fhir.batch2.api.IJobStepWorker; +import ca.uhn.fhir.batch2.api.JobExecutionFailedException; +import ca.uhn.fhir.batch2.api.RunOutcome; +import ca.uhn.fhir.batch2.api.StepExecutionDetails; +import ca.uhn.fhir.batch2.api.VoidModel; +import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson; +import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; +import ca.uhn.fhir.batch2.jobs.reindex.models.ReindexResults; +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; +import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; +import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; +import jakarta.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.REINDEX_MAX_RETRIES; + +@Deprecated(forRemoval = true, since = "7.6.0") +public class ReindexStepV1 implements IJobStepWorker { + + private static final Logger ourLog = LoggerFactory.getLogger(ReindexStepV1.class); + + private final HapiTransactionService myHapiTransactionService; + + private final IFhirSystemDao mySystemDao; + + private final DaoRegistry myDaoRegistry; + + private final IIdHelperService> myIdHelperService; + + public ReindexStepV1( + HapiTransactionService theHapiTransactionService, + IFhirSystemDao theSystemDao, + DaoRegistry theRegistry, + IIdHelperService> theIdHelperService) { + myDaoRegistry = theRegistry; + myHapiTransactionService = theHapiTransactionService; + mySystemDao = theSystemDao; + myIdHelperService = theIdHelperService; + } + + @Nonnull + @Override + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { + + ResourceIdListWorkChunkJson data = theStepExecutionDetails.getData(); + ReindexJobParameters jobParameters = theStepExecutionDetails.getParameters(); + + doReindex( + data, + theDataSink, + theStepExecutionDetails.getInstance().getInstanceId(), + theStepExecutionDetails.getChunkId(), + jobParameters); + + return new RunOutcome(data.size()); + } + + public ReindexResults doReindex( + ResourceIdListWorkChunkJson data, + IJobDataSink theDataSink, + String theInstanceId, + String theChunkId, + ReindexJobParameters theJobParameters) { + RequestDetails requestDetails = new SystemRequestDetails(); + requestDetails.setRetry(true); + requestDetails.setMaxRetries(REINDEX_MAX_RETRIES); + + TransactionDetails transactionDetails = new TransactionDetails(); + ReindexTaskV1.JobParameters jp = new ReindexTaskV1.JobParameters(); + jp.setData(data) + .setRequestDetails(requestDetails) + .setTransactionDetails(transactionDetails) + .setDataSink(theDataSink) + .setInstanceId(theInstanceId) + .setChunkId(theChunkId) + .setJobParameters(theJobParameters); + + ReindexTaskV1 reindexJob = new ReindexTaskV1(jp, myDaoRegistry, mySystemDao, myIdHelperService); + + return myHapiTransactionService + .withRequest(requestDetails) + .withTransactionDetails(transactionDetails) + .withRequestPartitionId(data.getRequestPartitionId()) + .execute(reindexJob); + } +} diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v1/ReindexTaskV1.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v1/ReindexTaskV1.java new file mode 100644 index 000000000000..f6dd6db4a8cb --- /dev/null +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v1/ReindexTaskV1.java @@ -0,0 +1,202 @@ +package ca.uhn.fhir.batch2.jobs.reindex.v1; + +import ca.uhn.fhir.batch2.api.IJobDataSink; +import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson; +import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; +import ca.uhn.fhir.batch2.jobs.reindex.ReindexWarningProcessor; +import ca.uhn.fhir.batch2.jobs.reindex.models.ReindexResults; +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; +import ca.uhn.fhir.jpa.api.dao.ReindexOutcome; +import ca.uhn.fhir.jpa.api.dao.ReindexParameters; +import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.parser.DataFormatException; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; +import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; +import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; +import ca.uhn.fhir.util.StopWatch; +import jakarta.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.support.TransactionCallback; + +import java.util.List; +import java.util.concurrent.TimeUnit; + +@Deprecated(forRemoval = true, since = "7.6.0") +public class ReindexTaskV1 implements TransactionCallback { + private static final Logger ourLog = LoggerFactory.getLogger(ReindexTaskV1.class); + + public static class JobParameters { + private ResourceIdListWorkChunkJson myData; + private RequestDetails myRequestDetails; + private TransactionDetails myTransactionDetails; + private IJobDataSink myDataSink; + private String myChunkId; + private String myInstanceId; + private ReindexJobParameters myJobParameters; + + public ResourceIdListWorkChunkJson getData() { + return myData; + } + + public JobParameters setData(ResourceIdListWorkChunkJson theData) { + myData = theData; + return this; + } + + public RequestDetails getRequestDetails() { + return myRequestDetails; + } + + public JobParameters setRequestDetails(RequestDetails theRequestDetails) { + myRequestDetails = theRequestDetails; + return this; + } + + public TransactionDetails getTransactionDetails() { + return myTransactionDetails; + } + + public JobParameters setTransactionDetails(TransactionDetails theTransactionDetails) { + myTransactionDetails = theTransactionDetails; + return this; + } + + public IJobDataSink getDataSink() { + return myDataSink; + } + + public JobParameters setDataSink(IJobDataSink theDataSink) { + myDataSink = theDataSink; + return this; + } + + public String getChunkId() { + return myChunkId; + } + + public JobParameters setChunkId(String theChunkId) { + myChunkId = theChunkId; + return this; + } + + public String getInstanceId() { + return myInstanceId; + } + + public JobParameters setInstanceId(String theInstanceId) { + myInstanceId = theInstanceId; + return this; + } + + public ReindexJobParameters getJobParameters() { + return myJobParameters; + } + + public JobParameters setJobParameters(ReindexJobParameters theJobParameters) { + myJobParameters = theJobParameters; + return this; + } + } + + private final DaoRegistry myDaoRegistry; + private final IFhirSystemDao mySystemDao; + + private final IIdHelperService> myIdHelperService; + + private final ResourceIdListWorkChunkJson myData; + private final RequestDetails myRequestDetails; + private final TransactionDetails myTransactionDetails; + private final IJobDataSink myDataSink; + private final String myChunkId; + private final String myInstanceId; + private final ReindexJobParameters myJobParameters; + + public ReindexTaskV1( + JobParameters theJobParameters, + DaoRegistry theRegistry, + IFhirSystemDao theSystemDao, + IIdHelperService> theIdHelperService) { + myDaoRegistry = theRegistry; + mySystemDao = theSystemDao; + myIdHelperService = theIdHelperService; + + myData = theJobParameters.getData(); + myRequestDetails = theJobParameters.getRequestDetails(); + myTransactionDetails = theJobParameters.getTransactionDetails(); + myDataSink = theJobParameters.getDataSink(); + myInstanceId = theJobParameters.getInstanceId(); + myChunkId = theJobParameters.getChunkId(); + myJobParameters = theJobParameters.getJobParameters(); + myDataSink.setWarningProcessor(new ReindexWarningProcessor()); + } + + @Override + public ReindexResults doInTransaction(@Nonnull TransactionStatus theStatus) { + List> persistentIds = myData.getResourcePersistentIds(myIdHelperService); + + ourLog.info( + "Starting reindex work chunk with {} resources - Instance[{}] Chunk[{}]", + persistentIds.size(), + myInstanceId, + myChunkId); + StopWatch sw = new StopWatch(); + ReindexResults reindexResults = new ReindexResults(); + + // Prefetch Resources from DB + boolean reindexSearchParameters = + myJobParameters.getReindexSearchParameters() != ReindexParameters.ReindexSearchParametersEnum.NONE; + mySystemDao.preFetchResources(persistentIds, reindexSearchParameters); + ourLog.info( + "Prefetched {} resources in {} - Instance[{}] Chunk[{}]", + persistentIds.size(), + sw, + myInstanceId, + myChunkId); + + ReindexParameters parameters = new ReindexParameters() + .setReindexSearchParameters(myJobParameters.getReindexSearchParameters()) + .setOptimizeStorage(myJobParameters.getOptimizeStorage()) + .setOptimisticLock(myJobParameters.getOptimisticLock()); + + // Reindex + + sw.restart(); + for (int i = 0; i < myData.size(); i++) { + + String nextResourceType = myData.getResourceType(i); + IFhirResourceDao dao = myDaoRegistry.getResourceDao(nextResourceType); + IResourcePersistentId resourcePersistentId = persistentIds.get(i); + try { + + ReindexOutcome outcome = + dao.reindex(resourcePersistentId, parameters, myRequestDetails, myTransactionDetails); + + outcome.getWarnings().forEach(myDataSink::recoveredError); + reindexResults.addResourceTypeToCompletionStatus(nextResourceType, outcome.isHasPendingWork()); + + } catch (BaseServerResponseException | DataFormatException e) { + String resourceForcedId = myIdHelperService + .translatePidIdToForcedIdWithCache(resourcePersistentId) + .orElse(resourcePersistentId.toString()); + String resourceId = nextResourceType + "/" + resourceForcedId; + ourLog.error("Failure during reindexing {}", resourceId, e); + myDataSink.recoveredError("Failure reindexing " + resourceId + ": " + e.getMessage()); + } + } + + ourLog.info( + "Finished reindexing {} resources in {} - {}/sec - Instance[{}] Chunk[{}]", + persistentIds.size(), + sw, + sw.formatThroughput(persistentIds.size(), TimeUnit.SECONDS), + myInstanceId, + myChunkId); + + return reindexResults; + } +} diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v1/ReindexV1Config.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v1/ReindexV1Config.java new file mode 100644 index 000000000000..8bda0695a5c5 --- /dev/null +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v1/ReindexV1Config.java @@ -0,0 +1,101 @@ +package ca.uhn.fhir.batch2.jobs.reindex.v1; + +import ca.uhn.fhir.batch2.api.IJobStepWorker; +import ca.uhn.fhir.batch2.api.VoidModel; +import ca.uhn.fhir.batch2.jobs.chunk.ChunkRangeJson; +import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson; +import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator; +import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; +import ca.uhn.fhir.batch2.jobs.reindex.svcs.ReindexJobService; +import ca.uhn.fhir.batch2.jobs.step.GenerateRangeChunksStep; +import ca.uhn.fhir.batch2.jobs.step.LoadIdsStep; +import ca.uhn.fhir.batch2.model.JobDefinition; +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; +import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc; +import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; +import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.JOB_REINDEX; + +@Deprecated(forRemoval = true, since = "7.6.0") +@Configuration +public class ReindexV1Config { + @Autowired + private ReindexJobService myReindexJobService; + + @Autowired + private HapiTransactionService myHapiTransactionService; + + @Autowired + private IFhirSystemDao mySystemDao; + + @Autowired + private DaoRegistry myRegistry; + + @Autowired + private IIdHelperService> myIdHelperService; + + @Autowired + @Qualifier("reindexGenerateRangeChunkStepV1") + private IJobStepWorker myReindexGenerateRangeChunkStep; + + @Autowired + @Qualifier("reindexLoadIdsStepV1") + private IJobStepWorker myReindexLoadIdsStep; + + @Autowired + private ReindexJobParametersValidatorV1 myReindexJobParametersValidatorV1; + + // Version 1 + @Bean + public JobDefinition reindexJobDefinitionV1() { + return JobDefinition.newBuilder() + .setJobDefinitionId(JOB_REINDEX) + .setJobDescription("Reindex resources") + .setJobDefinitionVersion(1) + .setParametersType(ReindexJobParameters.class) + .setParametersValidator(myReindexJobParametersValidatorV1) + .gatedExecution() + .addFirstStep( + "generate-ranges", + "Generate data ranges to reindex", + ChunkRangeJson.class, + myReindexGenerateRangeChunkStep) + .addIntermediateStep( + "load-ids", + "Load IDs of resources to reindex", + ResourceIdListWorkChunkJson.class, + myReindexLoadIdsStep) + .addLastStep("reindex-start", "Start the resource reindex", reindexStepV1()) + .build(); + } + + @Bean + public ReindexStepV1 reindexStepV1() { + return new ReindexStepV1(myHapiTransactionService, mySystemDao, myRegistry, myIdHelperService); + } + + @Bean("reindexGenerateRangeChunkStepV1") + public IJobStepWorker reindexGenerateRangeChunksStep() { + return new GenerateRangeChunksStep<>(); + } + + @Bean("reindexLoadIdsStepV1") + public IJobStepWorker reindexLoadIdsStep( + IBatch2DaoSvc theBatch2DaoSvc) { + return new LoadIdsStep<>(theBatch2DaoSvc); + } + + @Bean + public ReindexJobParametersValidatorV1 reindexJobParametersValidatorV1(IBatch2DaoSvc theBatch2DaoSvc) { + return new ReindexJobParametersValidatorV1( + new UrlListValidator(ProviderConstants.OPERATION_REINDEX, theBatch2DaoSvc)); + } +} diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v2/CheckPendingReindexWorkStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v2/CheckPendingReindexWorkStep.java new file mode 100644 index 000000000000..9a9dfd294f9b --- /dev/null +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v2/CheckPendingReindexWorkStep.java @@ -0,0 +1,42 @@ +package ca.uhn.fhir.batch2.jobs.reindex.v2; + +import ca.uhn.fhir.batch2.api.IJobDataSink; +import ca.uhn.fhir.batch2.api.IJobStepWorker; +import ca.uhn.fhir.batch2.api.JobExecutionFailedException; +import ca.uhn.fhir.batch2.api.RetryChunkLaterException; +import ca.uhn.fhir.batch2.api.RunOutcome; +import ca.uhn.fhir.batch2.api.StepExecutionDetails; +import ca.uhn.fhir.batch2.api.VoidModel; +import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; +import ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils; +import ca.uhn.fhir.batch2.jobs.reindex.models.ReindexResults; +import ca.uhn.fhir.batch2.jobs.reindex.svcs.ReindexJobService; +import ca.uhn.fhir.i18n.Msg; +import jakarta.annotation.Nonnull; + +public class CheckPendingReindexWorkStep implements IJobStepWorker { + + private final ReindexJobService myReindexJobService; + + public CheckPendingReindexWorkStep(ReindexJobService theReindexJobService) { + myReindexJobService = theReindexJobService; + } + + @Nonnull + @Override + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { + + ReindexResults results = theStepExecutionDetails.getData(); + + if (!results.getResourceToHasWorkToComplete().isEmpty()) { + if (myReindexJobService.anyResourceHasPendingReindexWork(results.getResourceToHasWorkToComplete())) { + throw new RetryChunkLaterException(Msg.code(2553), ReindexUtils.getRetryLaterDelay()); + } + } + + return RunOutcome.SUCCESS; + } +} diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexJobParametersValidator.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v2/ReindexJobParametersValidatorV2.java similarity index 85% rename from hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexJobParametersValidator.java rename to hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v2/ReindexJobParametersValidatorV2.java index b7560a8ef571..9bfbbfe65643 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexJobParametersValidator.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v2/ReindexJobParametersValidatorV2.java @@ -17,10 +17,11 @@ * limitations under the License. * #L% */ -package ca.uhn.fhir.batch2.jobs.reindex; +package ca.uhn.fhir.batch2.jobs.reindex.v2; import ca.uhn.fhir.batch2.api.IJobParametersValidator; import ca.uhn.fhir.batch2.jobs.parameters.IUrlListValidator; +import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; import ca.uhn.fhir.rest.api.server.RequestDetails; import jakarta.annotation.Nonnull; import jakarta.annotation.Nullable; @@ -28,11 +29,11 @@ import java.util.ArrayList; import java.util.List; -public class ReindexJobParametersValidator implements IJobParametersValidator { +public class ReindexJobParametersValidatorV2 implements IJobParametersValidator { private final IUrlListValidator myUrlListValidator; - public ReindexJobParametersValidator(IUrlListValidator theUrlListValidator) { + public ReindexJobParametersValidatorV2(IUrlListValidator theUrlListValidator) { myUrlListValidator = theUrlListValidator; } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v2/ReindexStepV2.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v2/ReindexStepV2.java new file mode 100644 index 000000000000..adfad38c1c15 --- /dev/null +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v2/ReindexStepV2.java @@ -0,0 +1,118 @@ +package ca.uhn.fhir.batch2.jobs.reindex.v2; + +import ca.uhn.fhir.batch2.api.IJobDataSink; +import ca.uhn.fhir.batch2.api.IJobStepWorker; +import ca.uhn.fhir.batch2.api.JobExecutionFailedException; +import ca.uhn.fhir.batch2.api.RetryChunkLaterException; +import ca.uhn.fhir.batch2.api.RunOutcome; +import ca.uhn.fhir.batch2.api.StepExecutionDetails; +import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson; +import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; +import ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils; +import ca.uhn.fhir.batch2.jobs.reindex.models.ReindexResults; +import ca.uhn.fhir.batch2.jobs.reindex.svcs.ReindexJobService; +import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; +import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; +import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; +import jakarta.annotation.Nonnull; + +import java.util.HashMap; +import java.util.Map; + +import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.REINDEX_MAX_RETRIES; + +public class ReindexStepV2 + implements IJobStepWorker { + + private final ReindexJobService myReindexJobService; + private final HapiTransactionService myHapiTransactionService; + + private final IFhirSystemDao mySystemDao; + + private final DaoRegistry myDaoRegistry; + + private final IIdHelperService> myIdHelperService; + + public ReindexStepV2( + ReindexJobService theJobService, + HapiTransactionService theHapiTransactionService, + IFhirSystemDao theSystemDao, + DaoRegistry theRegistry, + IIdHelperService> theIdHelperService) { + myDaoRegistry = theRegistry; + myHapiTransactionService = theHapiTransactionService; + mySystemDao = theSystemDao; + myIdHelperService = theIdHelperService; + myReindexJobService = theJobService; + } + + @Nonnull + @Override + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { + ResourceIdListWorkChunkJson data = theStepExecutionDetails.getData(); + ReindexJobParameters jobParameters = theStepExecutionDetails.getParameters(); + + // This is not strictly necessary; + // but we'll ensure that no outstanding "reindex work" + // is waiting to be completed, so that when we do + // our reindex work here, it won't skip over that data + Map resourceTypesToCheckFlag = new HashMap<>(); + data.getTypedPids().forEach(id -> { + // we don't really care about duplicates; we check by resource type + resourceTypesToCheckFlag.put(id.getResourceType(), true); + }); + if (myReindexJobService.anyResourceHasPendingReindexWork(resourceTypesToCheckFlag)) { + + throw new RetryChunkLaterException(Msg.code(2552), ReindexUtils.getRetryLaterDelay()); + } + + ReindexResults results = doReindex( + data, + theDataSink, + theStepExecutionDetails.getInstance().getInstanceId(), + theStepExecutionDetails.getChunkId(), + jobParameters); + + theDataSink.accept(results); + + return new RunOutcome(data.size()); + } + + public ReindexResults doReindex( + ResourceIdListWorkChunkJson data, + IJobDataSink theDataSink, + String theInstanceId, + String theChunkId, + ReindexJobParameters theJobParameters) { + RequestDetails requestDetails = new SystemRequestDetails(); + requestDetails.setRetry(true); + requestDetails.setMaxRetries(REINDEX_MAX_RETRIES); + + TransactionDetails transactionDetails = new TransactionDetails(); + ReindexTaskV2.JobParameters jp = new ReindexTaskV2.JobParameters(); + jp.setData(data) + .setRequestDetails(requestDetails) + .setTransactionDetails(transactionDetails) + .setDataSink(theDataSink) + .setInstanceId(theInstanceId) + .setChunkId(theChunkId) + .setJobParameters(theJobParameters); + + ReindexTaskV2 reindexJob = new ReindexTaskV2(jp, myDaoRegistry, mySystemDao, myIdHelperService); + + return myHapiTransactionService + .withRequest(requestDetails) + .withTransactionDetails(transactionDetails) + .withRequestPartitionId(data.getRequestPartitionId()) + .execute(reindexJob); + } +} diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v2/ReindexTaskV2.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v2/ReindexTaskV2.java new file mode 100644 index 000000000000..0e77b74f6e6f --- /dev/null +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v2/ReindexTaskV2.java @@ -0,0 +1,201 @@ +package ca.uhn.fhir.batch2.jobs.reindex.v2; + +import ca.uhn.fhir.batch2.api.IJobDataSink; +import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson; +import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; +import ca.uhn.fhir.batch2.jobs.reindex.ReindexWarningProcessor; +import ca.uhn.fhir.batch2.jobs.reindex.models.ReindexResults; +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; +import ca.uhn.fhir.jpa.api.dao.ReindexOutcome; +import ca.uhn.fhir.jpa.api.dao.ReindexParameters; +import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.parser.DataFormatException; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; +import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; +import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; +import ca.uhn.fhir.util.StopWatch; +import jakarta.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.support.TransactionCallback; + +import java.util.List; +import java.util.concurrent.TimeUnit; + +public class ReindexTaskV2 implements TransactionCallback { + private static final Logger ourLog = LoggerFactory.getLogger(ReindexTaskV2.class); + + public static class JobParameters { + private ResourceIdListWorkChunkJson myData; + private RequestDetails myRequestDetails; + private TransactionDetails myTransactionDetails; + private IJobDataSink myDataSink; + private String myChunkId; + private String myInstanceId; + private ReindexJobParameters myJobParameters; + + public ResourceIdListWorkChunkJson getData() { + return myData; + } + + public JobParameters setData(ResourceIdListWorkChunkJson theData) { + myData = theData; + return this; + } + + public RequestDetails getRequestDetails() { + return myRequestDetails; + } + + public JobParameters setRequestDetails(RequestDetails theRequestDetails) { + myRequestDetails = theRequestDetails; + return this; + } + + public TransactionDetails getTransactionDetails() { + return myTransactionDetails; + } + + public JobParameters setTransactionDetails(TransactionDetails theTransactionDetails) { + myTransactionDetails = theTransactionDetails; + return this; + } + + public IJobDataSink getDataSink() { + return myDataSink; + } + + public JobParameters setDataSink(IJobDataSink theDataSink) { + myDataSink = theDataSink; + return this; + } + + public String getChunkId() { + return myChunkId; + } + + public JobParameters setChunkId(String theChunkId) { + myChunkId = theChunkId; + return this; + } + + public String getInstanceId() { + return myInstanceId; + } + + public JobParameters setInstanceId(String theInstanceId) { + myInstanceId = theInstanceId; + return this; + } + + public ReindexJobParameters getJobParameters() { + return myJobParameters; + } + + public JobParameters setJobParameters(ReindexJobParameters theJobParameters) { + myJobParameters = theJobParameters; + return this; + } + } + + private final DaoRegistry myDaoRegistry; + private final IFhirSystemDao mySystemDao; + + private final IIdHelperService> myIdHelperService; + + private final ResourceIdListWorkChunkJson myData; + private final RequestDetails myRequestDetails; + private final TransactionDetails myTransactionDetails; + private final IJobDataSink myDataSink; + private final String myChunkId; + private final String myInstanceId; + private final ReindexJobParameters myJobParameters; + + public ReindexTaskV2( + JobParameters theJobParameters, + DaoRegistry theRegistry, + IFhirSystemDao theSystemDao, + IIdHelperService> theIdHelperService) { + myDaoRegistry = theRegistry; + mySystemDao = theSystemDao; + myIdHelperService = theIdHelperService; + + myData = theJobParameters.getData(); + myRequestDetails = theJobParameters.getRequestDetails(); + myTransactionDetails = theJobParameters.getTransactionDetails(); + myDataSink = theJobParameters.getDataSink(); + myInstanceId = theJobParameters.getInstanceId(); + myChunkId = theJobParameters.getChunkId(); + myJobParameters = theJobParameters.getJobParameters(); + myDataSink.setWarningProcessor(new ReindexWarningProcessor()); + } + + @Override + public ReindexResults doInTransaction(@Nonnull TransactionStatus theStatus) { + List> persistentIds = myData.getResourcePersistentIds(myIdHelperService); + + ourLog.info( + "Starting reindex work chunk with {} resources - Instance[{}] Chunk[{}]", + persistentIds.size(), + myInstanceId, + myChunkId); + StopWatch sw = new StopWatch(); + ReindexResults reindexResults = new ReindexResults(); + + // Prefetch Resources from DB + boolean reindexSearchParameters = + myJobParameters.getReindexSearchParameters() != ReindexParameters.ReindexSearchParametersEnum.NONE; + mySystemDao.preFetchResources(persistentIds, reindexSearchParameters); + ourLog.info( + "Prefetched {} resources in {} - Instance[{}] Chunk[{}]", + persistentIds.size(), + sw, + myInstanceId, + myChunkId); + + ReindexParameters parameters = new ReindexParameters() + .setReindexSearchParameters(myJobParameters.getReindexSearchParameters()) + .setOptimizeStorage(myJobParameters.getOptimizeStorage()) + .setOptimisticLock(myJobParameters.getOptimisticLock()); + + // Reindex + + sw.restart(); + for (int i = 0; i < myData.size(); i++) { + + String nextResourceType = myData.getResourceType(i); + IFhirResourceDao dao = myDaoRegistry.getResourceDao(nextResourceType); + IResourcePersistentId resourcePersistentId = persistentIds.get(i); + try { + + ReindexOutcome outcome = + dao.reindex(resourcePersistentId, parameters, myRequestDetails, myTransactionDetails); + + outcome.getWarnings().forEach(myDataSink::recoveredError); + reindexResults.addResourceTypeToCompletionStatus(nextResourceType, outcome.isHasPendingWork()); + + } catch (BaseServerResponseException | DataFormatException e) { + String resourceForcedId = myIdHelperService + .translatePidIdToForcedIdWithCache(resourcePersistentId) + .orElse(resourcePersistentId.toString()); + String resourceId = nextResourceType + "/" + resourceForcedId; + ourLog.error("Failure during reindexing {}", resourceId, e); + myDataSink.recoveredError("Failure reindexing " + resourceId + ": " + e.getMessage()); + } + } + + ourLog.info( + "Finished reindexing {} resources in {} - {}/sec - Instance[{}] Chunk[{}]", + persistentIds.size(), + sw, + sw.formatThroughput(persistentIds.size(), TimeUnit.SECONDS), + myInstanceId, + myChunkId); + + return reindexResults; + } +} diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v2/ReindexV2Config.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v2/ReindexV2Config.java new file mode 100644 index 000000000000..6cc4553c64b7 --- /dev/null +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/v2/ReindexV2Config.java @@ -0,0 +1,110 @@ +package ca.uhn.fhir.batch2.jobs.reindex.v2; + +import ca.uhn.fhir.batch2.api.IJobStepWorker; +import ca.uhn.fhir.batch2.api.VoidModel; +import ca.uhn.fhir.batch2.jobs.chunk.ChunkRangeJson; +import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson; +import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator; +import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; +import ca.uhn.fhir.batch2.jobs.reindex.models.ReindexResults; +import ca.uhn.fhir.batch2.jobs.reindex.svcs.ReindexJobService; +import ca.uhn.fhir.batch2.jobs.step.GenerateRangeChunksStep; +import ca.uhn.fhir.batch2.jobs.step.LoadIdsStep; +import ca.uhn.fhir.batch2.model.JobDefinition; +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; +import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc; +import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; +import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.JOB_REINDEX; + +@Configuration +public class ReindexV2Config { + + @Autowired + private ReindexJobService myReindexJobService; + + @Autowired + private HapiTransactionService myHapiTransactionService; + + @Autowired + private IFhirSystemDao mySystemDao; + + @Autowired + private DaoRegistry myRegistry; + + @Autowired + private IIdHelperService> myIdHelperService; + + @Autowired + @Qualifier("reindexGenerateRangeChunkStepV2") + private IJobStepWorker myReindexGenerateRangeChunkStep; + + @Autowired + @Qualifier("reindexLoadIdsStepV2") + private IJobStepWorker myReindexLoadIdsStep; + + @Autowired + private ReindexJobParametersValidatorV2 myReindexJobParametersValidator; + + // Version 2 + @Bean + public JobDefinition reindexJobDefinitionV2() { + return JobDefinition.newBuilder() + .setJobDefinitionId(JOB_REINDEX) + .setJobDescription("Reindex resources") + .setJobDefinitionVersion(2) + .setParametersType(ReindexJobParameters.class) + .setParametersValidator(myReindexJobParametersValidator) + .gatedExecution() + .addFirstStep( + "generate-ranges", + "Generate data ranges to reindex", + ChunkRangeJson.class, + myReindexGenerateRangeChunkStep) + .addIntermediateStep( + "load-ids", + "Load IDs of resources to reindex", + ResourceIdListWorkChunkJson.class, + myReindexLoadIdsStep) + .addIntermediateStep( + "reindex-start", "Perform the resource reindex", ReindexResults.class, reindexStepV2()) + .addLastStep("reindex-pending-work", "Waits for reindex work to complete.", pendingWorkStep()) + .build(); + } + + @Bean + public CheckPendingReindexWorkStep pendingWorkStep() { + return new CheckPendingReindexWorkStep(myReindexJobService); + } + + @Bean + public ReindexStepV2 reindexStepV2() { + return new ReindexStepV2( + myReindexJobService, myHapiTransactionService, mySystemDao, myRegistry, myIdHelperService); + } + + @Bean("reindexGenerateRangeChunkStepV2") + public IJobStepWorker reindexGenerateRangeChunksStep() { + return new GenerateRangeChunksStep<>(); + } + + @Bean("reindexLoadIdsStepV2") + public IJobStepWorker reindexLoadIdsStep( + IBatch2DaoSvc theBatch2DaoSvc) { + return new LoadIdsStep<>(theBatch2DaoSvc); + } + + @Bean + public ReindexJobParametersValidatorV2 reindexJobParametersValidatorV2(IBatch2DaoSvc theBatch2DaoSvc) { + return new ReindexJobParametersValidatorV2( + new UrlListValidator(ProviderConstants.OPERATION_REINDEX, theBatch2DaoSvc)); + } +} diff --git a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexJobParametersValidatorTest.java b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexTaskParametersValidatorTest.java similarity index 86% rename from hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexJobParametersValidatorTest.java rename to hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexTaskParametersValidatorTest.java index 0fe1c00e4610..f03db3068897 100644 --- a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexJobParametersValidatorTest.java +++ b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexTaskParametersValidatorTest.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.batch2.jobs.reindex; import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator; +import ca.uhn.fhir.batch2.jobs.reindex.v1.ReindexJobParametersValidatorV1; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; @@ -13,13 +14,13 @@ import static org.assertj.core.api.Assertions.assertThat; @ExtendWith(MockitoExtension.class) -public class ReindexJobParametersValidatorTest { +public class ReindexTaskParametersValidatorTest { @Mock private UrlListValidator myListValidator; @InjectMocks - private ReindexJobParametersValidator myValidator; + private ReindexJobParametersValidatorV1 myValidator; @ParameterizedTest @ValueSource(strings = { "\n", " ", "\t" }) diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/RetryChunkLaterException.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/RetryChunkLaterException.java index ec35cb770d22..0d132fc71911 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/RetryChunkLaterException.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/RetryChunkLaterException.java @@ -38,11 +38,18 @@ public class RetryChunkLaterException extends RuntimeException { private final Duration myNextPollDuration; public RetryChunkLaterException() { - this(ONE_MINUTE); + this("", ONE_MINUTE); } + /** + * For HAPI exceptions, use {@link RetryChunkLaterException#RetryChunkLaterException(String, Duration)} + */ public RetryChunkLaterException(Duration theDuration) { - super(); + this("", theDuration); + } + + public RetryChunkLaterException(String theCode, Duration theDuration) { + super(theCode); this.myNextPollDuration = theDuration; } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ResourceIdListWorkChunkJson.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ResourceIdListWorkChunkJson.java index b7969f1ae1c5..0ecacb9bdf6b 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ResourceIdListWorkChunkJson.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ResourceIdListWorkChunkJson.java @@ -62,7 +62,7 @@ public RequestPartitionId getRequestPartitionId() { return myRequestPartitionId; } - private List getTypedPids() { + public List getTypedPids() { if (myTypedPids == null) { myTypedPids = new ArrayList<>(); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDao.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDao.java index 7082fed82409..d918f5572cec 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDao.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDao.java @@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOutcome; +import ca.uhn.fhir.jpa.api.model.ReindexJobStatus; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.jpa.model.entity.TagTypeEnum; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; @@ -315,12 +316,24 @@ default T readByPid(IResourcePersistentId thePid, boolean theDeletedOk) { * @param theResourcePersistentId The ID * @return */ + @SuppressWarnings("rawtypes") ReindexOutcome reindex( IResourcePersistentId theResourcePersistentId, ReindexParameters theReindexParameters, RequestDetails theRequest, TransactionDetails theTransactionDetails); + /** + * Returns ReindexJobStatus information object that tells the caller + * if a reindex job is still in progress or not. + * + * If the implementing DAO requires additional work during reindexing, + * this is the method to override. + */ + default ReindexJobStatus getReindexJobStatus() { + return ReindexJobStatus.NO_WORK_NEEDED; + } + void removeTag( IIdType theId, TagTypeEnum theTagType, String theSystem, String theCode, RequestDetails theRequestDetails); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/ReindexOutcome.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/ReindexOutcome.java index 8c7eb7fa5b99..744e1def01f8 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/ReindexOutcome.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/ReindexOutcome.java @@ -29,6 +29,11 @@ public class ReindexOutcome { private List myWarnings; + /** + * True if there is additional (async) work to wait on. + */ + private boolean myHasPendingWork; + public List getWarnings() { return defaultIfNull(myWarnings, Collections.emptyList()); } @@ -39,4 +44,12 @@ public void addWarning(String theWarning) { } myWarnings.add(theWarning); } + + public boolean isHasPendingWork() { + return myHasPendingWork; + } + + public void setHasPendingWork(boolean theHasPendingWork) { + myHasPendingWork = theHasPendingWork; + } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/ReindexJobStatus.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/ReindexJobStatus.java new file mode 100644 index 000000000000..227756c5e44b --- /dev/null +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/ReindexJobStatus.java @@ -0,0 +1,16 @@ +package ca.uhn.fhir.jpa.api.model; + +public class ReindexJobStatus { + + public static ReindexJobStatus NO_WORK_NEEDED = new ReindexJobStatus(); + + private boolean myHasReindexWorkPending; + + public boolean isHasReindexWorkPending() { + return myHasReindexWorkPending; + } + + public void setHasReindexWorkPending(boolean theHasReindexWorkPending) { + myHasReindexWorkPending = theHasReindexWorkPending; + } +}