diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupportBundleStrategy.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupportBundleStrategy.java index e20c9d591e99..8eaa217078e7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupportBundleStrategy.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupportBundleStrategy.java @@ -153,7 +153,7 @@ public List fetchAllConformanceResources() { if (myCodeSystems != null) { retVal.addAll(myCodeSystems.values()); } - if (myStructureDefinitionResources != null) { + if (myStructureDefinitions != null) { retVal.addAll(myStructureDefinitions.values()); } if (myValueSets != null) { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchDateRangeParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchDateRangeParam.java index 0703f080c7e1..3959d3d6a29b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchDateRangeParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchDateRangeParam.java @@ -19,9 +19,22 @@ */ package ca.uhn.fhir.rest.param; +import java.util.Collections; import java.util.Map; public class HistorySearchDateRangeParam extends DateRangeParam { + /** + * Constructor + * + * @since 8.0.0 + */ + public HistorySearchDateRangeParam() { + this(Collections.emptyMap(), new DateRangeParam(), null); + } + + /** + * Constructor + */ public HistorySearchDateRangeParam( Map theParameters, DateRangeParam theDateRange, Integer theOffset) { super(theDateRange); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FileUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FileUtil.java index 2f3dac411e47..84ed3e2303b1 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FileUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FileUtil.java @@ -29,7 +29,7 @@ public static String formatFileSize(long theBytes) { if (theBytes <= 0) { return "0 " + UNITS[0]; } - int digitGroups = (int) (Math.log10(theBytes) / Math.log10(1024)); + int digitGroups = (int) (Math.log10((double) theBytes) / Math.log10(1024)); digitGroups = Math.min(digitGroups, UNITS.length - 1); return new DecimalFormat("###0.#").format(theBytes / Math.pow(1024, digitGroups)) + " " + UNITS[digitGroups]; } diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-fix-timezone-issue-for-history-at-query.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-fix-timezone-issue-for-history-at-query.yaml new file mode 100644 index 000000000000..f6a1e9b750fd --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-fix-timezone-issue-for-history-at-query.yaml @@ -0,0 +1,7 @@ +--- +type: fix +issue: 6409 +title: "When performing a `_history` query using the `_at` parameter, the time value + is now converted to a zoned-date before being passed to the database. This should + avoid conflicts around date changes on some databases. + " diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-optimize-versioned-tag-loading.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-optimize-versioned-tag-loading.yaml new file mode 100644 index 000000000000..37faa975eeb8 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-optimize-versioned-tag-loading.yaml @@ -0,0 +1,7 @@ +--- +type: perf +issue: 6409 +title: "When searching in versioned tag mode, the JPA server now avoids a redundant + lookup of the un-versioned tags, avoiding an extra unnecessary database query + in some cases. + " diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-stop-using-provenance-table.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-stop-using-provenance-table.yaml new file mode 100644 index 000000000000..2f845786de91 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-stop-using-provenance-table.yaml @@ -0,0 +1,11 @@ +--- +type: perf +issue: 6409 +title: "The JPA server will no longer use the HFJ_RES_VER_PROV table to store and index values from + the `Resource.meta.source` element. Beginning in HAPI FHIR 6.8.0 (and Smile CDR 2023.08.R01), a + new pair of columns have been used to store data for this element, so this change only affects + data which was stored in HAPI FHIR prior to version 6.8.0 (released August 2023). If you have + FHIR resources which were stored in a JPA server prior to this version, and you use the + Resource.meta.source element and/or the `_source` search parameter, you should perform a complete + reindex of your server to ensure that data is not lost. See the upgrade notes for more information. + " diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/upgrade.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/upgrade.md index 58969977a1b5..b8fa6a5af5d7 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/upgrade.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/upgrade.md @@ -1,4 +1,20 @@ +# Upgrade Notes + +The JPA server stores values for the field `Resource.meta.source` in dedicated columns in its database so that they can be indexes and searched for as needed, using the `_source` Search Parameter. + +Prior to HAPI FHIR 6.8.0 (and Smile CDR 2023.08.R01), these values were stored in a dedicated table called `HFJ_RES_VER_PROV`. Beginning in HAPI FHIR 6.8.0 (Smile CDR 2023.08.R01), two new columns were added to the `HFJ_RES_VER` +table which store the same data and make it available for searches. + +As of HAPI FHIR 8.0.0, the legacy table is no longer searched by default. If you do not have Resource.meta.source data stored in HAPI FHIR that was last created/updated prior to version 6.8.0, this change will not affect you and no action needs to be taken. + +If you do have such data, you should follow the following steps: + +* Enable the JpaStorageSettings setting `setAccessMetaSourceInformationFromProvenanceTable(true)` to configure the server to continue using the legacy table. + +* Perform a server resource reindex by invoking the [$reindex Operation (server)](https://smilecdr.com/docs/fhir_repository/search_parameter_reindexing.html#reindex-server) with the `optimizeStorage` parameter set to `ALL_VERSIONS`. + +* When this reindex operation has successfully completed, the setting above can be disabled. Disabling this setting avoids an extra database round-trip when loading data, so this change will have a positive performance impact on your server. + # Fulltext Search with _lastUpdated Filter -Fulltext searches have been updated to support `_lastUpdated` search parameter. A reindexing of Search Parameters -is required to migrate old data to support the `_lastUpdated` search parameter. +Fulltext searches have been updated to support `_lastUpdated` search parameter. If you are using Advanced Hibernate Search indexing and wish to use the `_lastUpdated` search parameetr with this feature, a full reindex of your repository is required. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java index a17e3425f466..b889b27e10ff 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java @@ -121,11 +121,12 @@ import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.QuantityNormalizedPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder; +import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryPredicateBuilder; +import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryProvenancePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.SearchParamPresentPredicateBuilder; -import ca.uhn.fhir.jpa.search.builder.predicate.SourcePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.TagPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder; @@ -699,8 +700,15 @@ public TokenPredicateBuilder newTokenPredicateBuilder(SearchQueryBuilder theSear @Bean @Scope("prototype") - public SourcePredicateBuilder newSourcePredicateBuilder(SearchQueryBuilder theSearchBuilder) { - return new SourcePredicateBuilder(theSearchBuilder); + public ResourceHistoryPredicateBuilder newResourceHistoryPredicateBuilder(SearchQueryBuilder theSearchBuilder) { + return new ResourceHistoryPredicateBuilder(theSearchBuilder); + } + + @Bean + @Scope("prototype") + public ResourceHistoryProvenancePredicateBuilder newResourceHistoryProvenancePredicateBuilder( + SearchQueryBuilder theSearchBuilder) { + return new ResourceHistoryProvenancePredicateBuilder(theSearchBuilder); } @Bean diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java index 80d8665362a7..8bac9ba0312c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java @@ -29,7 +29,6 @@ import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; -import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao; import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import ca.uhn.fhir.jpa.model.config.PartitionSettings; @@ -89,9 +88,6 @@ public class SearchConfig { @Autowired private DaoRegistry myDaoRegistry; - @Autowired - private IResourceSearchViewDao myResourceSearchViewDao; - @Autowired private FhirContext myContext; @@ -169,7 +165,6 @@ public ISearchBuilder newSearchBuilder(String theResourceName, Class extends B @Autowired private IJobCoordinator myJobCoordinator; + @Autowired + private IResourceHistoryProvenanceDao myResourceHistoryProvenanceDao; + private IInstanceValidatorModule myInstanceValidator; private String myResourceName; private Class myResourceType; @@ -562,7 +567,7 @@ private DaoMethodOutcome doCreateForPostOrPut( thePerformIndexing); // Store the resource forced ID if necessary - JpaPid jpaPid = JpaPid.fromId(updatedEntity.getResourceId()); + JpaPid jpaPid = updatedEntity.getPersistentId(); // Populate the resource with its actual final stored ID from the entity theResource.setId(entity.getIdDt()); @@ -570,9 +575,7 @@ private DaoMethodOutcome doCreateForPostOrPut( // Pre-cache the resource ID jpaPid.setAssociatedResourceId(entity.getIdType(myFhirContext)); String fhirId = entity.getFhirId(); - if (fhirId == null) { - fhirId = Long.toString(entity.getId()); - } + assert fhirId != null; myIdHelperService.addResolvedPidToFhirIdAfterCommit( jpaPid, theRequestPartitionId, getResourceName(), fhirId, null); theTransactionDetails.addResolvedResourceId(jpaPid.getAssociatedResourceId(), jpaPid); @@ -1016,7 +1019,7 @@ public void beforeCommit(boolean readOnly) { protected ResourceTable updateEntityForDelete( RequestDetails theRequest, TransactionDetails theTransactionDetails, ResourceTable theEntity) { - myResourceSearchUrlSvc.deleteByResId(theEntity.getId()); + myResourceSearchUrlSvc.deleteByResId(theEntity.getPersistentId()); Date updateTime = new Date(); return updateEntity(theRequest, null, theEntity, updateTime, true, true, theTransactionDetails, false, true); } @@ -1261,7 +1264,7 @@ public IBundleProvider history( return myPersistedJpaBundleProviderFactory.history( theRequest, myResourceName, - entity.getId(), + entity.getPersistentId(), theSince, theUntil, theOffset, @@ -1291,7 +1294,7 @@ public IBundleProvider history( return myPersistedJpaBundleProviderFactory.history( theRequest, myResourceName, - entity.getId(), + JpaPid.fromId(entity.getId()), theHistorySearchDateRangeParam.getLowerBoundAsInstant(), theHistorySearchDateRangeParam.getUpperBoundAsInstant(), theHistorySearchDateRangeParam.getOffset(), @@ -1391,8 +1394,8 @@ protected void doMetaAddOperation( doMetaAdd(theMetaAdd, latestVersion, theRequest, transactionDetails); // Also update history entry - ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance( - entity.getId(), entity.getVersion()); + ResourceHistoryTable history = + myResourceHistoryTableDao.findForIdAndVersion(entity.getId(), entity.getVersion()); doMetaAdd(theMetaAdd, history, theRequest, transactionDetails); } @@ -1439,8 +1442,8 @@ public void doMetaDeleteOperation( } else { doMetaDelete(theMetaDel, latestVersion, theRequest, transactionDetails); // Also update history entry - ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance( - entity.getId(), entity.getVersion()); + ResourceHistoryTable history = + myResourceHistoryTableDao.findForIdAndVersion(entity.getId(), entity.getVersion()); doMetaDelete(theMetaDel, history, theRequest, transactionDetails); } @@ -1705,7 +1708,7 @@ private void reindexOptimizeStorage( ResourceTable entity, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageMode) { ResourceHistoryTable historyEntity = entity.getCurrentVersionEntity(); if (historyEntity != null) { - reindexOptimizeStorageHistoryEntity(entity, historyEntity); + reindexOptimizeStorageHistoryEntityThenDetachIt(entity, historyEntity); if (theOptimizeStorageMode == ReindexParameters.OptimizeStorageModeEnum.ALL_VERSIONS) { int pageSize = 100; for (int page = 0; ((long) page * pageSize) < entity.getVersion(); page++) { @@ -1715,39 +1718,44 @@ private void reindexOptimizeStorage( // different pages as the underlying data gets updated. PageRequest pageRequest = PageRequest.of(page, pageSize, Sort.by("myId")); Slice historyEntities = - myResourceHistoryTableDao.findForResourceIdAndReturnEntitiesAndFetchProvenance( + myResourceHistoryTableDao.findAllVersionsExceptSpecificForResourcePid( pageRequest, entity.getId(), historyEntity.getVersion()); for (ResourceHistoryTable next : historyEntities) { - reindexOptimizeStorageHistoryEntity(entity, next); + reindexOptimizeStorageHistoryEntityThenDetachIt(entity, next); } } } } } - private void reindexOptimizeStorageHistoryEntity(ResourceTable entity, ResourceHistoryTable historyEntity) { - boolean changed = false; + /** + * Note that the entity will be detached after being saved if it has changed + * in order to avoid growing the number of resources in memory to be too big + */ + private void reindexOptimizeStorageHistoryEntityThenDetachIt( + ResourceTable entity, ResourceHistoryTable historyEntity) { if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC || historyEntity.getEncoding() == ResourceEncodingEnum.JSON) { byte[] resourceBytes = historyEntity.getResource(); if (resourceBytes != null) { String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding()); - if (myResourceHistoryCalculator.conditionallyAlterHistoryEntity(entity, historyEntity, resourceText)) { - changed = true; - } + myResourceHistoryCalculator.conditionallyAlterHistoryEntity(entity, historyEntity, resourceText); } } - if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) { - if (historyEntity.getProvenance() != null) { - historyEntity.setSourceUri(historyEntity.getProvenance().getSourceUri()); - historyEntity.setRequestId(historyEntity.getProvenance().getRequestId()); - changed = true; + if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) { + if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) { + Long id = historyEntity.getId(); + Optional provenanceEntityOpt = + myResourceHistoryProvenanceDao.findById(id); + if (provenanceEntityOpt.isPresent()) { + ResourceHistoryProvenanceEntity provenanceEntity = provenanceEntityOpt.get(); + historyEntity.setSourceUri(provenanceEntity.getSourceUri()); + historyEntity.setRequestId(provenanceEntity.getRequestId()); + myResourceHistoryProvenanceDao.delete(provenanceEntity); + } } } - if (changed) { - myResourceHistoryTableDao.save(historyEntity); - } } private BaseHasResource readEntity( diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java index d21dff545d32..0aad2196720e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java @@ -206,7 +206,7 @@ public

void preFetchResources( * However, for realistic average workloads, this should reduce the number of round trips. */ if (!idChunk.isEmpty()) { - List entityChunk = prefetchResourceTableHistoryAndProvenance(idChunk); + List entityChunk = prefetchResourceTableAndHistory(idChunk); if (thePreFetchIndexes) { @@ -244,14 +244,13 @@ public

void preFetchResources( } @Nonnull - private List prefetchResourceTableHistoryAndProvenance(List idChunk) { + private List prefetchResourceTableAndHistory(List idChunk) { assert idChunk.size() < SearchConstants.MAX_PAGE_SIZE : "assume pre-chunked"; Query query = myEntityManager.createQuery("select r, h " + " FROM ResourceTable r " + " LEFT JOIN fetch ResourceHistoryTable h " + " on r.myVersion = h.myResourceVersion and r.id = h.myResourceId " - + " left join fetch h.myProvenance " + " WHERE r.myId IN ( :IDS ) "); query.setParameter("IDS", idChunk); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java index c5a5dba6d94b..2493e97e4eca 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java @@ -219,7 +219,7 @@ private ISearchQueryExecutor doSearch( // indicate param was already processed, otherwise queries DB to process it theParams.setOffset(null); - return SearchQueryExecutors.from(longs); + return SearchQueryExecutors.from(JpaPid.fromLongList(longs)); } private int getMaxFetchSize(SearchParameterMap theParams, Integer theMax) { @@ -386,7 +386,6 @@ public List search( @SuppressWarnings("rawtypes") private List toList(ISearchQueryExecutor theSearchResultStream, long theMaxSize) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(theSearchResultStream, 0), false) - .map(JpaPid::fromId) .limit(theMaxSize) .collect(Collectors.toList()); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java index dcf578ee5c9d..4d679594475a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java @@ -40,7 +40,6 @@ import jakarta.persistence.criteria.CriteriaBuilder; import jakarta.persistence.criteria.CriteriaQuery; import jakarta.persistence.criteria.Expression; -import jakarta.persistence.criteria.JoinType; import jakarta.persistence.criteria.Predicate; import jakarta.persistence.criteria.Root; import jakarta.persistence.criteria.Subquery; @@ -125,8 +124,6 @@ public List fetchEntities( addPredicatesToQuery(cb, thePartitionId, criteriaQuery, from, theHistorySearchStyle); - from.fetch("myProvenance", JoinType.LEFT); - /* * The sort on myUpdated is the important one for _history operations, but there are * cases where multiple pages of results all have the exact same myUpdated value (e.g. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java index fe3ae4824df2..3f7334572913 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java @@ -19,15 +19,15 @@ */ package ca.uhn.fhir.jpa.dao; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.BaseTag; import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity; -import ca.uhn.fhir.jpa.model.entity.ResourceTag; import jakarta.annotation.Nullable; import org.hl7.fhir.instance.model.api.IBaseResource; import java.util.Collection; -public interface IJpaStorageResourceParser extends IStorageResourceParser { +public interface IJpaStorageResourceParser extends IStorageResourceParser { /** * Convert a storage entity into a FHIR resource model instance. This method may return null if the entity is not @@ -36,7 +36,7 @@ public interface IJpaStorageResourceParser extends IStorageResourceParser { R toResource( Class theResourceType, IBaseResourceEntity theEntity, - Collection theTagList, + Collection theTagList, boolean theForHistoryOperation); /** diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java index 09d46a557e43..4069b91b7c50 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java @@ -26,9 +26,9 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.IDao; +import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; import ca.uhn.fhir.jpa.entity.PartitionEntity; -import ca.uhn.fhir.jpa.entity.ResourceSearchView; import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry; import ca.uhn.fhir.jpa.esr.IExternallyStoredResourceService; import ca.uhn.fhir.jpa.model.config.PartitionSettings; @@ -37,9 +37,9 @@ import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity; import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable; -import ca.uhn.fhir.jpa.model.entity.ResourceTag; import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.jpa.model.entity.TagTypeEnum; import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc; @@ -71,12 +71,13 @@ import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.Date; import java.util.List; +import java.util.Optional; import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.decodeResource; import static java.util.Objects.nonNull; +import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class JpaStorageResourceParser implements IJpaStorageResourceParser { @@ -92,6 +93,9 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser { @Autowired private IResourceHistoryTableDao myResourceHistoryTableDao; + @Autowired + private IResourceHistoryProvenanceDao myResourceHistoryProvenanceDao; + @Autowired private PartitionSettings myPartitionSettings; @@ -115,14 +119,14 @@ public IBaseResource toResource(IBasePersistedResource theEntity, boolean theFor public R toResource( Class theResourceType, IBaseResourceEntity theEntity, - Collection theTagList, + Collection theTagList, boolean theForHistoryOperation) { // 1. get resource, it's encoding and the tags if any byte[] resourceBytes; String resourceText; ResourceEncodingEnum resourceEncoding; - @Nullable Collection tagList = Collections.emptyList(); + @Nullable Collection tagList; long version; String provenanceSourceUri = null; String provenanceRequestId = null; @@ -132,25 +136,42 @@ public R toResource( resourceBytes = history.getResource(); resourceText = history.getResourceTextVc(); resourceEncoding = history.getEncoding(); - switch (myStorageSettings.getTagStorageMode()) { - case VERSIONED: - default: - if (history.isHasTags()) { - tagList = history.getTags(); - } - break; - case NON_VERSIONED: - if (history.getResourceTable().isHasTags()) { - tagList = history.getResourceTable().getTags(); - } - break; - case INLINE: - tagList = null; + + // For search results we get the list of tags passed in because we load it + // in bulk for all resources we're going to return, but for read results + // we don't get the list passed in so we need to load it here. + tagList = theTagList; + if (tagList == null) { + switch (myStorageSettings.getTagStorageMode()) { + case VERSIONED: + default: + if (history.isHasTags()) { + tagList = history.getTags(); + } + break; + case NON_VERSIONED: + if (history.getResourceTable().isHasTags()) { + tagList = history.getResourceTable().getTags(); + } + break; + case INLINE: + tagList = null; + } } + version = history.getVersion(); - if (history.getProvenance() != null) { - provenanceRequestId = history.getProvenance().getRequestId(); - provenanceSourceUri = history.getProvenance().getSourceUri(); + provenanceSourceUri = history.getSourceUri(); + provenanceRequestId = history.getRequestId(); + if (isBlank(provenanceSourceUri) && isBlank(provenanceRequestId)) { + if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) { + Optional provenanceOpt = + myResourceHistoryProvenanceDao.findById(history.getId()); + if (provenanceOpt.isPresent()) { + ResourceHistoryProvenanceEntity provenance = provenanceOpt.get(); + provenanceRequestId = provenance.getRequestId(); + provenanceSourceUri = provenance.getSourceUri(); + } + } } } else if (theEntity instanceof ResourceTable) { ResourceTable resource = (ResourceTable) theEntity; @@ -159,14 +180,13 @@ public R toResource( history = resource.getCurrentVersionEntity(); } else { version = theEntity.getVersion(); - history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version); + history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getResourceId(), version); ((ResourceTable) theEntity).setCurrentVersionEntity(history); while (history == null) { if (version > 1L) { version--; - history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance( - theEntity.getId(), version); + history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getResourceId(), version); } else { return null; } @@ -181,36 +201,28 @@ public R toResource( case NON_VERSIONED: if (resource.isHasTags()) { tagList = resource.getTags(); + } else { + tagList = List.of(); } break; case INLINE: + default: tagList = null; break; } version = history.getVersion(); - if (history.getProvenance() != null) { - provenanceRequestId = history.getProvenance().getRequestId(); - provenanceSourceUri = history.getProvenance().getSourceUri(); - } - } else if (theEntity instanceof ResourceSearchView) { - // This is the search View - ResourceSearchView view = (ResourceSearchView) theEntity; - resourceBytes = view.getResource(); - resourceText = view.getResourceTextVc(); - resourceEncoding = view.getEncoding(); - version = view.getVersion(); - provenanceRequestId = view.getProvenanceRequestId(); - provenanceSourceUri = view.getProvenanceSourceUri(); - switch (myStorageSettings.getTagStorageMode()) { - case VERSIONED: - case NON_VERSIONED: - if (theTagList != null) { - tagList = theTagList; + provenanceSourceUri = history.getSourceUri(); + provenanceRequestId = history.getRequestId(); + if (isBlank(provenanceSourceUri) && isBlank(provenanceRequestId)) { + if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) { + Optional provenanceOpt = + myResourceHistoryProvenanceDao.findById(history.getId()); + if (provenanceOpt.isPresent()) { + ResourceHistoryProvenanceEntity provenance = provenanceOpt.get(); + provenanceRequestId = provenance.getRequestId(); + provenanceSourceUri = provenance.getSourceUri(); } - break; - case INLINE: - tagList = null; - break; + } } } else { // something wrong @@ -277,7 +289,7 @@ private R parseResource( } else if (theResourceEncoding != ResourceEncodingEnum.DEL) { IParser parser = new TolerantJsonParser( - getContext(theEntity.getFhirVersion()), LENIENT_ERROR_HANDLER, theEntity.getId()); + getContext(theEntity.getFhirVersion()), LENIENT_ERROR_HANDLER, theEntity.getResourceId()); try { retVal = parser.parseResource(theResourceType, theDecodedResourceText); @@ -519,8 +531,8 @@ public void updateResourceMetadata(IBaseResourceEntity theEntitySource, IBaseRes theResourceTarget.setId(id); if (theResourceTarget instanceof IResource) { - ResourceMetadataKeyEnum.VERSION.put((IResource) theResourceTarget, id.getVersionIdPart()); - ResourceMetadataKeyEnum.UPDATED.put((IResource) theResourceTarget, theEntitySource.getUpdated()); + ResourceMetadataKeyEnum.VERSION.put(theResourceTarget, id.getVersionIdPart()); + ResourceMetadataKeyEnum.UPDATED.put(theResourceTarget, theEntitySource.getUpdated()); } else { IBaseMetaType meta = theResourceTarget.getMeta(); meta.setVersionId(id.getVersionIdPart()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java index 765dc33f09b5..f189daf38475 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java @@ -38,10 +38,8 @@ public interface IResourceHistoryTableDao extends JpaRepository findAllVersionsForResourceIdInOrder(@Param("resId") Long theId); - @Query( - "SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :id AND t.myResourceVersion = :version") - ResourceHistoryTable findForIdAndVersionAndFetchProvenance( - @Param("id") long theId, @Param("version") long theVersion); + @Query("SELECT t FROM ResourceHistoryTable t WHERE t.myResourceId = :id AND t.myResourceVersion = :version") + ResourceHistoryTable findForIdAndVersion(@Param("id") long theId, @Param("version") long theVersion); @Query( "SELECT t.myId FROM ResourceHistoryTable t WHERE t.myResourceId = :resId AND t.myResourceVersion <> :dontWantVersion") @@ -49,8 +47,8 @@ Slice findForResourceId( Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion); @Query( - "SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :resId AND t.myResourceVersion <> :dontWantVersion") - Slice findForResourceIdAndReturnEntitiesAndFetchProvenance( + "SELECT t FROM ResourceHistoryTable t WHERE t.myResourceId = :resId AND t.myResourceVersion <> :dontWantVersion") + Slice findAllVersionsExceptSpecificForResourcePid( Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion); @Query("" + "SELECT v.myId FROM ResourceHistoryTable v " @@ -91,4 +89,10 @@ void updateVersion( @Query( "UPDATE ResourceHistoryTable r SET r.myResourceTextVc = null, r.myResource = :text, r.myEncoding = 'JSONC' WHERE r.myId = :pid") void updateNonInlinedContents(@Param("text") byte[] theText, @Param("pid") long thePid); + + @Query("SELECT v FROM ResourceHistoryTable v " + "JOIN FETCH v.myResourceTable t " + + "WHERE v.myResourceId IN (:pids) " + + "AND t.myVersion = v.myResourceVersion") + List findCurrentVersionsByResourcePidsAndFetchResourceTable( + @Param("pids") List theVersionlessPids); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTagDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTagDao.java index 01f2bb3e8f31..4b72b9730d86 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTagDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTagDao.java @@ -25,9 +25,15 @@ import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; +import java.util.Collection; + public interface IResourceHistoryTagDao extends JpaRepository, IHapiFhirJpaRepository { @Modifying @Query("DELETE FROM ResourceHistoryTag t WHERE t.myResourceHistoryPid = :historyPid") void deleteByPid(@Param("historyPid") Long theResourceHistoryTablePid); + + @Query( + "SELECT t FROM ResourceHistoryTag t INNER JOIN FETCH t.myTag WHERE t.myResourceHistory.myId IN (:historyPids)") + Collection findByVersionIds(@Param("historyPids") Collection theIdList); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchResultDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchResultDao.java index 98e9471a18ca..e82ca24df13e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchResultDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchResultDao.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.dao.data; import ca.uhn.fhir.jpa.entity.SearchResult; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import com.google.errorprone.annotations.CanIgnoreReturnValue; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Slice; @@ -28,6 +29,7 @@ import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; +import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -55,4 +57,16 @@ int deleteBySearchIdInRange( @Query("SELECT count(r) FROM SearchResult r WHERE r.mySearchPid = :search") int countForSearch(@Param("search") Long theSearchPid); + + /** + * Converts a response from {@link #findWithSearchPid(Long, Pageable)} to + * a List of JpaPid objects + */ + static List toJpaPidList(List theArrays) { + List retVal = new ArrayList<>(theArrays.size()); + for (Long next : theArrays) { + retVal.add(JpaPid.fromId(next)); + } + return retVal; + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java index f8d63002793a..81d394319fcb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java @@ -321,10 +321,12 @@ protected int expungeEverythingByTypeWithoutPurging( nativeQuery.setMaxResults(800); List pids = nativeQuery.getResultList(); - nativeQuery = myEntityManager.createQuery("DELETE FROM " + theEntityType.getSimpleName() - + " WHERE " + idProperty + " IN (:pids)"); - nativeQuery.setParameter("pids", pids); - nativeQuery.executeUpdate(); + if (!pids.isEmpty()) { + nativeQuery = myEntityManager.createQuery("DELETE FROM " + theEntityType.getSimpleName() + + " WHERE " + idProperty + " IN (:pids)"); + nativeQuery.setParameter("pids", pids); + nativeQuery.executeUpdate(); + } return pids.size(); }); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/JpaResourceExpungeService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/JpaResourceExpungeService.java index 860113cec067..ab47125ca870 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/JpaResourceExpungeService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/JpaResourceExpungeService.java @@ -45,6 +45,7 @@ import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao; import ca.uhn.fhir.jpa.model.dao.JpaPid; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.util.MemoryCacheService; @@ -71,6 +72,7 @@ import java.util.Collections; import java.util.List; +import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; @Service @@ -159,8 +161,7 @@ public List findHistoricalVersionsOfNonDeletedResources( Slice ids; if (theJpaPid != null && theJpaPid.getId() != null) { if (theJpaPid.getVersion() != null) { - ids = toSlice(myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance( - theJpaPid.getId(), theJpaPid.getVersion())); + ids = toSlice(myResourceHistoryTableDao.findForIdAndVersion(theJpaPid.getId(), theJpaPid.getVersion())); } else { ids = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResourceId(page, theJpaPid.getId()); } @@ -239,9 +240,10 @@ private void expungeHistoricalVersion( callHooks(theRequestDetails, theRemainingCount, version, id); - if (version.getProvenance() != null) { - myResourceHistoryProvenanceTableDao.deleteByPid( - version.getProvenance().getId()); + if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) { + Optional provenanceOpt = + myResourceHistoryProvenanceTableDao.findById(theNextVersionId); + provenanceOpt.ifPresent(entity -> myResourceHistoryProvenanceTableDao.deleteByPid(entity.getId())); } myResourceHistoryTagDao.deleteByPid(version.getId()); @@ -302,8 +304,8 @@ protected void expungeCurrentVersionOfResource( RequestDetails theRequestDetails, Long theResourceId, AtomicInteger theRemainingCount) { ResourceTable resource = myResourceTableDao.findById(theResourceId).orElseThrow(IllegalStateException::new); - ResourceHistoryTable currentVersion = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance( - resource.getId(), resource.getVersion()); + ResourceHistoryTable currentVersion = + myResourceHistoryTableDao.findForIdAndVersion(resource.getId(), resource.getVersion()); if (currentVersion != null) { expungeHistoricalVersion(theRequestDetails, currentVersion.getId(), theRemainingCount); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/SearchScrollQueryExecutorAdaptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/SearchScrollQueryExecutorAdaptor.java index 8b0b17b7f9fd..42285f6f071c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/SearchScrollQueryExecutorAdaptor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/SearchScrollQueryExecutorAdaptor.java @@ -19,6 +19,7 @@ */ package ca.uhn.fhir.jpa.dao.search; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor; import org.hibernate.search.engine.search.query.SearchScroll; import org.hibernate.search.engine.search.query.SearchScrollResult; @@ -57,12 +58,12 @@ public boolean hasNext() { } @Override - public Long next() { + public JpaPid next() { Long result = myCurrentIterator.next(); // was this the last in the current scroll page? if (!myCurrentIterator.hasNext()) { advanceNextScrollPage(); } - return result; + return JpaPid.fromId(result); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceSearchView.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceSearchView.java deleted file mode 100644 index e53fa57ecad3..000000000000 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceSearchView.java +++ /dev/null @@ -1,244 +0,0 @@ -/* - * #%L - * HAPI FHIR JPA Server - * %% - * Copyright (C) 2014 - 2024 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ -package ca.uhn.fhir.jpa.entity; - -import ca.uhn.fhir.context.FhirVersionEnum; -import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity; -import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; -import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum; -import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; -import ca.uhn.fhir.jpa.model.entity.ResourceTable; -import ca.uhn.fhir.model.primitive.IdDt; -import ca.uhn.fhir.model.primitive.InstantDt; -import ca.uhn.fhir.rest.api.Constants; -import jakarta.annotation.Nullable; -import jakarta.persistence.Column; -import jakarta.persistence.Entity; -import jakarta.persistence.EnumType; -import jakarta.persistence.Enumerated; -import jakarta.persistence.Id; -import jakarta.persistence.Lob; -import jakarta.persistence.Temporal; -import jakarta.persistence.TemporalType; -import org.hibernate.annotations.Immutable; -import org.hibernate.annotations.Subselect; - -import java.io.Serializable; -import java.util.Date; - -@SuppressWarnings("SqlDialectInspection") -@Entity -@Immutable -// Ideally, all tables and columns should be in UPPERCASE if we ever choose to use a case-sensitive collation for MSSQL -// and there's a risk that queries on lowercase database objects fail. -@Subselect("SELECT h.PID as PID, " - + " r.RES_ID as RES_ID, " - + " h.RES_TYPE as RES_TYPE, " - + " h.RES_VERSION as RES_VERSION, " - // FHIR version - + " h.RES_VER as RES_VER, " - // resource version - + " h.HAS_TAGS as HAS_TAGS, " - + " h.RES_DELETED_AT as RES_DELETED_AT, " - + " h.RES_PUBLISHED as RES_PUBLISHED, " - + " h.RES_UPDATED as RES_UPDATED, " - + " h.RES_TEXT as RES_TEXT, " - + " h.RES_TEXT_VC as RES_TEXT_VC, " - + " h.RES_ENCODING as RES_ENCODING, " - + " h.PARTITION_ID as PARTITION_ID, " - + " p.SOURCE_URI as PROV_SOURCE_URI," - + " p.REQUEST_ID as PROV_REQUEST_ID," - + " r.FHIR_ID as FHIR_ID " - + "FROM HFJ_RESOURCE r " - + " INNER JOIN HFJ_RES_VER h ON r.RES_ID = h.RES_ID and r.RES_VER = h.RES_VER" - + " LEFT OUTER JOIN HFJ_RES_VER_PROV p ON p.RES_VER_PID = h.PID ") -public class ResourceSearchView implements IBaseResourceEntity, Serializable { - - private static final long serialVersionUID = 1L; - - @Id - @Column(name = "PID") - private Long myId; - - @Column(name = "RES_ID") - private Long myResourceId; - - @Column(name = "RES_TYPE", length = Constants.MAX_RESOURCE_NAME_LENGTH) - private String myResourceType; - - @Column(name = "RES_VERSION") - @Enumerated(EnumType.STRING) - private FhirVersionEnum myFhirVersion; - - @Column(name = "RES_VER") - private Long myResourceVersion; - - @Column(name = "PROV_REQUEST_ID", length = Constants.REQUEST_ID_LENGTH) - private String myProvenanceRequestId; - - @Column(name = "PROV_SOURCE_URI", length = ResourceHistoryTable.SOURCE_URI_LENGTH) - private String myProvenanceSourceUri; - - @Column(name = "HAS_TAGS") - private boolean myHasTags; - - @Column(name = "RES_DELETED_AT") - @Temporal(TemporalType.TIMESTAMP) - private Date myDeleted; - - @Temporal(TemporalType.TIMESTAMP) - @Column(name = "RES_PUBLISHED") - private Date myPublished; - - @Temporal(TemporalType.TIMESTAMP) - @Column(name = "RES_UPDATED") - private Date myUpdated; - - @Column(name = "RES_TEXT") - @Lob() - private byte[] myResource; - - @Column(name = "RES_TEXT_VC") - private String myResourceTextVc; - - @Column(name = "RES_ENCODING") - @Enumerated(EnumType.STRING) - private ResourceEncodingEnum myEncoding; - - @Column(name = "FHIR_ID", length = ResourceTable.MAX_FORCED_ID_LENGTH) - private String myFhirId; - - @Column(name = "PARTITION_ID") - private Integer myPartitionId; - - public ResourceSearchView() { - // public constructor for Hibernate - } - - public String getResourceTextVc() { - return myResourceTextVc; - } - - public String getProvenanceRequestId() { - return myProvenanceRequestId; - } - - public String getProvenanceSourceUri() { - return myProvenanceSourceUri; - } - - @Override - public Date getDeleted() { - return myDeleted; - } - - public void setDeleted(Date theDate) { - myDeleted = theDate; - } - - @Override - public FhirVersionEnum getFhirVersion() { - return myFhirVersion; - } - - public void setFhirVersion(FhirVersionEnum theFhirVersion) { - myFhirVersion = theFhirVersion; - } - - public String getFhirId() { - return myFhirId; - } - - @Override - public Long getId() { - return myResourceId; - } - - @Override - public IdDt getIdDt() { - if (myFhirId == null) { - Long id = myResourceId; - return new IdDt(myResourceType + '/' + id + '/' + Constants.PARAM_HISTORY + '/' + getVersion()); - } else { - return new IdDt(getResourceType() + '/' + getFhirId() + '/' + Constants.PARAM_HISTORY + '/' + getVersion()); - } - } - - @Override - public InstantDt getPublished() { - if (myPublished != null) { - return new InstantDt(myPublished); - } else { - return null; - } - } - - public void setPublished(Date thePublished) { - myPublished = thePublished; - } - - @Override - public Long getResourceId() { - return myResourceId; - } - - @Override - public String getResourceType() { - return myResourceType; - } - - @Override - public InstantDt getUpdated() { - return new InstantDt(myUpdated); - } - - @Override - public Date getUpdatedDate() { - return myUpdated; - } - - @Override - public long getVersion() { - return myResourceVersion; - } - - @Override - public boolean isHasTags() { - return myHasTags; - } - - @Override - @Nullable - public PartitionablePartitionId getPartitionId() { - if (myPartitionId != null) { - return new PartitionablePartitionId(myPartitionId, null); - } else { - return null; - } - } - - public byte[] getResource() { - return myResource; - } - - public ResourceEncodingEnum getEncoding() { - return myEncoding; - } -} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java index 77f4f7e3b1ce..bc5eaf628783 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java @@ -22,6 +22,7 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.entity.PartitionEntity; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import org.apache.commons.lang3.Validate; @@ -36,17 +37,27 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc { @Autowired IPartitionLookupSvc myPartitionConfigSvc; + @Autowired + PartitionSettings myPartitionSettings; + public RequestPartitionHelperSvc() {} @Override public RequestPartitionId validateAndNormalizePartitionIds(RequestPartitionId theRequestPartitionId) { List names = null; + List partitionIds = null; for (int i = 0; i < theRequestPartitionId.getPartitionIds().size(); i++) { PartitionEntity partition; Integer id = theRequestPartitionId.getPartitionIds().get(i); if (id == null) { partition = null; + if (myPartitionSettings.getDefaultPartitionId() != null) { + if (partitionIds == null) { + partitionIds = new ArrayList<>(theRequestPartitionId.getPartitionIds()); + } + partitionIds.set(i, myPartitionSettings.getDefaultPartitionId()); + } } else { try { partition = myPartitionConfigSvc.getPartitionById(id); @@ -88,8 +99,12 @@ public RequestPartitionId validateAndNormalizePartitionIds(RequestPartitionId th } if (names != null) { + List partitionIdsToUse = theRequestPartitionId.getPartitionIds(); + if (partitionIds != null) { + partitionIdsToUse = partitionIds; + } return RequestPartitionId.forPartitionIdsAndNames( - names, theRequestPartitionId.getPartitionIds(), theRequestPartitionId.getPartitionDate()); + names, partitionIdsToUse, theRequestPartitionId.getPartitionDate()); } return theRequestPartitionId; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java index a03d51791dd1..24498b0fd26e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java @@ -24,11 +24,13 @@ import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.entity.SearchTypeEnum; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.search.SearchStatusEnum; import ca.uhn.fhir.jpa.search.builder.tasks.SearchTask; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.param.HistorySearchStyleEnum; +import jakarta.annotation.Nullable; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; @@ -56,7 +58,7 @@ public PersistedJpaBundleProvider newInstance(RequestDetails theRequest, Search public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage( RequestDetails theRequestDetails, SearchTask theTask, - ISearchBuilder theSearchBuilder, + ISearchBuilder theSearchBuilder, RequestPartitionId theRequestPartitionId) { return (PersistedJpaSearchFirstPageBundleProvider) myApplicationContext.getBean( JpaConfig.PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER, @@ -69,7 +71,7 @@ public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage( public IBundleProvider history( RequestDetails theRequest, String theResourceType, - Long theResourcePid, + @Nullable JpaPid theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, @@ -88,7 +90,7 @@ public IBundleProvider history( public IBundleProvider history( RequestDetails theRequest, String theResourceType, - Long theResourcePid, + @Nullable JpaPid theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, @@ -103,7 +105,9 @@ public IBundleProvider history( search.setLastUpdated(theRangeStartInclusive, theRangeEndInclusive); search.setUuid(UUID.randomUUID().toString()); search.setResourceType(resourceName); - search.setResourceId(theResourcePid); + if (theResourcePid != null) { + search.setResourceId(theResourcePid.getId()); + } search.setSearchType(SearchTypeEnum.HISTORY); search.setStatus(SearchStatusEnum.FINISHED); search.setHistorySearchStyle(searchParameterType); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java index cc05a2c3a602..3498e961a6f3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java @@ -23,6 +23,7 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao; import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; @@ -81,8 +82,8 @@ public void deleteEntriesOlderThan(Date theCutoffDate) { * Once a resource is updated or deleted, we can trust that future match checks will find the committed resource in the db. * The use of the constraint table is done, and we can delete it to keep the table small. */ - public void deleteByResId(long theResId) { - myResourceSearchUrlDao.deleteByResId(theResId); + public void deleteByResId(JpaPid theResId) { + myResourceSearchUrlDao.deleteByResId(theResId.getId()); } public void deleteByResIds(List theResIds) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/ISearchQueryExecutor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/ISearchQueryExecutor.java index 5ea034496cb4..452a7a11c2f3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/ISearchQueryExecutor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/ISearchQueryExecutor.java @@ -19,10 +19,12 @@ */ package ca.uhn.fhir.jpa.search.builder; +import ca.uhn.fhir.jpa.model.dao.JpaPid; + import java.io.Closeable; import java.util.Iterator; -public interface ISearchQueryExecutor extends Iterator, Closeable { +public interface ISearchQueryExecutor extends Iterator, Closeable { /** * Narrow the signature - no IOException allowed. */ diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/JpaPidRowMapper.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/JpaPidRowMapper.java new file mode 100644 index 000000000000..b36c02bfeefe --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/JpaPidRowMapper.java @@ -0,0 +1,47 @@ +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.search.builder; + +import ca.uhn.fhir.jpa.model.dao.JpaPid; +import org.springframework.jdbc.core.RowMapper; + +import java.sql.ResultSet; +import java.sql.SQLException; + +public class JpaPidRowMapper implements RowMapper { + + private final boolean mySelectPartitionId; + + public JpaPidRowMapper(boolean theSelectPartitionId) { + mySelectPartitionId = theSelectPartitionId; + } + + @Override + public JpaPid mapRow(ResultSet theResultSet, int theRowNum) throws SQLException { + if (mySelectPartitionId) { + Integer partitionId = theResultSet.getObject(1, Integer.class); + Long resourceId = theResultSet.getLong(2); + return JpaPid.fromId(resourceId, partitionId); + } else { + Long resourceId = theResultSet.getLong(1); + return JpaPid.fromId(resourceId); + } + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java index e7d688624d22..7f7c7dbbd29c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java @@ -28,6 +28,7 @@ import ca.uhn.fhir.jpa.dao.BaseStorageDao; import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser; import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel; import ca.uhn.fhir.jpa.model.entity.TagTypeEnum; import ca.uhn.fhir.jpa.model.util.UcumServiceUtil; @@ -44,13 +45,13 @@ import ca.uhn.fhir.jpa.search.builder.predicate.CoordsPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.DatePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ICanMakeMissingParamPredicate; +import ca.uhn.fhir.jpa.search.builder.predicate.ISourcePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ParsedLocationParam; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.SearchParamPresentPredicateBuilder; -import ca.uhn.fhir.jpa.search.builder.predicate.SourcePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.TagPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder; @@ -151,6 +152,7 @@ public class QueryStack { private final PartitionSettings myPartitionSettings; private final JpaStorageSettings myStorageSettings; private final EnumSet myReusePredicateBuilderTypes; + private final RequestDetails myRequestDetails; private Map myJoinMap; private Map myParamNameToPredicateBuilderMap; // used for _offset queries with sort, should be removed once the fix is applied to the async path too. @@ -161,6 +163,7 @@ public class QueryStack { * Constructor */ public QueryStack( + RequestDetails theRequestDetails, SearchParameterMap theSearchParameters, JpaStorageSettings theStorageSettings, FhirContext theFhirContext, @@ -168,6 +171,7 @@ public QueryStack( ISearchParamRegistry theSearchParamRegistry, PartitionSettings thePartitionSettings) { this( + theRequestDetails, theSearchParameters, theStorageSettings, theFhirContext, @@ -181,6 +185,7 @@ public QueryStack( * Constructor */ private QueryStack( + RequestDetails theRequestDetails, SearchParameterMap theSearchParameters, JpaStorageSettings theStorageSettings, FhirContext theFhirContext, @@ -188,6 +193,7 @@ private QueryStack( ISearchParamRegistry theSearchParamRegistry, PartitionSettings thePartitionSettings, EnumSet theReusePredicateBuilderTypes) { + myRequestDetails = theRequestDetails; myPartitionSettings = thePartitionSettings; assert theSearchParameters != null; assert theStorageSettings != null; @@ -1035,7 +1041,6 @@ private Condition createPredicateFilter( searchParam, Collections.singletonList(new UriParam(theFilter.getValue())), theFilter.getOperation(), - theRequest, theRequestPartitionId); } else if (typeEnum == RestSearchParameterTypeEnum.STRING) { return theQueryStack3.createPredicateString( @@ -1220,7 +1225,6 @@ private Condition createPredicateHas( ResourceLinkPredicateBuilder resourceLinkTableJoin = mySqlBuilder.addReferencePredicateBuilderReversed(this, theSourceJoinColumn); - Condition partitionPredicate = resourceLinkTableJoin.createPartitionIdPredicate(theRequestPartitionId); List paths = resourceLinkTableJoin.createResourceLinkPaths( targetResourceType, paramReference, new ArrayList<>()); @@ -1242,7 +1246,12 @@ private Condition createPredicateHas( .setRequest(theRequest) .setRequestPartitionId(theRequestPartitionId)); - andPredicates.add(toAndPredicate(partitionPredicate, pathPredicate, typePredicate, linkedPredicate)); + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + andPredicates.add(toAndPredicate(pathPredicate, typePredicate, linkedPredicate)); + } else { + Condition partitionPredicate = resourceLinkTableJoin.createPartitionIdPredicate(theRequestPartitionId); + andPredicates.add(toAndPredicate(partitionPredicate, pathPredicate, typePredicate, linkedPredicate)); + } } return toAndPredicate(andPredicates); @@ -1889,7 +1898,6 @@ private Condition createIndexPredicate( theParamDefinition, theOrValues, theOperation, - theRequest, theRequestPartitionId, theSqlBuilder); break; @@ -1954,13 +1962,13 @@ private Condition createPredicateSource( .findFirst(); if (isMissingSourceOptional.isPresent()) { - SourcePredicateBuilder join = + ISourcePredicateBuilder join = getSourcePredicateBuilder(theSourceJoinColumn, SelectQuery.JoinType.LEFT_OUTER); orPredicates.add(join.createPredicateMissingSourceUri()); return toOrPredicate(orPredicates); } // for all other cases we use "INNER JOIN" to match search parameters - SourcePredicateBuilder join = getSourcePredicateBuilder(theSourceJoinColumn, SelectQuery.JoinType.INNER); + ISourcePredicateBuilder join = getSourcePredicateBuilder(theSourceJoinColumn, SelectQuery.JoinType.INNER); for (IQueryParameterType nextParameter : theList) { SourceParam sourceParameter = new SourceParam(nextParameter.getValueAsQueryToken(myFhirContext)); @@ -1980,13 +1988,22 @@ private Condition createPredicateSource( return toOrPredicate(orPredicates); } - private SourcePredicateBuilder getSourcePredicateBuilder( + private ISourcePredicateBuilder getSourcePredicateBuilder( @Nullable DbColumn[] theSourceJoinColumn, SelectQuery.JoinType theJoinType) { + if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) { + return createOrReusePredicateBuilder( + PredicateBuilderTypeEnum.SOURCE, + theSourceJoinColumn, + Constants.PARAM_SOURCE, + () -> mySqlBuilder.addResourceHistoryProvenancePredicateBuilder( + theSourceJoinColumn, theJoinType)) + .getResult(); + } return createOrReusePredicateBuilder( PredicateBuilderTypeEnum.SOURCE, theSourceJoinColumn, Constants.PARAM_SOURCE, - () -> mySqlBuilder.addSourcePredicateBuilder(theSourceJoinColumn, theJoinType)) + () -> mySqlBuilder.addResourceHistoryPredicateBuilder(theSourceJoinColumn, theJoinType)) .getResult(); } @@ -2321,7 +2338,6 @@ public Condition createPredicateUri( RuntimeSearchParam theSearchParam, List theList, SearchFilterParser.CompareOperation theOperation, - RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { return createPredicateUri( theSourceJoinColumn, @@ -2330,7 +2346,6 @@ public Condition createPredicateUri( theSearchParam, theList, theOperation, - theRequestDetails, theRequestPartitionId, mySqlBuilder); } @@ -2342,7 +2357,6 @@ public Condition createPredicateUri( RuntimeSearchParam theSearchParam, List theList, SearchFilterParser.CompareOperation theOperation, - RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId, SearchQueryBuilder theSqlBuilder) { @@ -2361,13 +2375,14 @@ public Condition createPredicateUri( } else { UriPredicateBuilder join = theSqlBuilder.addUriPredicateBuilder(theSourceJoinColumn); - Condition predicate = join.addPredicate(theList, paramName, theOperation, theRequestDetails); + Condition predicate = join.addPredicate(theList, paramName, theOperation, myRequestDetails); return join.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate); } } public QueryStack newChildQueryFactoryWithFullBuilderReuse() { return new QueryStack( + myRequestDetails, mySearchParameters, myStorageSettings, myFhirContext, @@ -2452,7 +2467,6 @@ public Condition searchForIdsWithAndOr(SearchForIdsParams theSearchForIdsParams) */ private Condition createPredicateResourcePID( DbColumn[] theSourceJoinColumn, List> theAndOrParams) { - DbColumn pidColumn = getResourceIdColumn(theSourceJoinColumn); if (pidColumn == null) { @@ -2662,7 +2676,6 @@ private Condition createPredicateSearchParameter( nextParamDef, nextAnd, SearchFilterParser.CompareOperation.eq, - theRequest, theRequestPartitionId)); } break; @@ -2871,12 +2884,13 @@ public void addPredicateCompositeNonUnique(List theIndexStrings, Request // expand out the pids public void addPredicateEverythingOperation( - String theResourceName, List theTypeSourceResourceNames, Long... theTargetPids) { + String theResourceName, List theTypeSourceResourceNames, JpaPid... theTargetPids) { ResourceLinkPredicateBuilder table = mySqlBuilder.addReferencePredicateBuilder(this, null); Condition predicate = table.createEverythingPredicate(theResourceName, theTypeSourceResourceNames, theTargetPids); mySqlBuilder.addPredicate(predicate); mySqlBuilder.getSelect().setIsDistinct(true); + addGrouping(); } public IQueryParameterType newParameterInstance( diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java index 088703dbb828..790a8d5ddc4b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java @@ -33,7 +33,6 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; -import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; @@ -43,20 +42,23 @@ import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser; import ca.uhn.fhir.jpa.dao.IResultIterator; import ca.uhn.fhir.jpa.dao.ISearchBuilder; -import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao; +import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; +import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException; -import ca.uhn.fhir.jpa.entity.ResourceSearchView; import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.cross.IResourceLookup; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; -import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity; +import ca.uhn.fhir.jpa.model.entity.BaseTag; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag; import ca.uhn.fhir.jpa.model.entity.ResourceTag; import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters; import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; +import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.search.SearchConstants; import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor; import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql; @@ -98,11 +100,14 @@ import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; +import ca.uhn.fhir.system.HapiSystemProperties; import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.StringUtil; import ca.uhn.fhir.util.UrlUtil; import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; +import com.google.common.collect.MultimapBuilder; import com.healthmarketscience.sqlbuilder.Condition; import jakarta.annotation.Nonnull; import jakarta.annotation.Nullable; @@ -113,6 +118,7 @@ import jakarta.persistence.Tuple; import jakarta.persistence.TypedQuery; import jakarta.persistence.criteria.CriteriaBuilder; +import org.apache.commons.collections4.ListUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.math.NumberUtils; @@ -124,11 +130,8 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.core.RowMapper; import org.springframework.transaction.support.TransactionSynchronizationManager; -import java.sql.ResultSet; -import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -168,14 +171,19 @@ public class SearchBuilder implements ISearchBuilder { public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE; public static final String RESOURCE_ID_ALIAS = "resource_id"; + public static final String PARTITION_ID_ALIAS = "partition_id"; public static final String RESOURCE_VERSION_ALIAS = "resource_version"; private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); private static final JpaPid NO_MORE = JpaPid.fromId(-1L); - private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid"; - private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; + private static final String MY_SOURCE_RESOURCE_PARTITION_ID = "myPartitionIdValue"; private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType"; + private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; + private static final String MY_TARGET_RESOURCE_PARTITION_ID = "myTargetResourcePartitionId"; + private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion"; + public static final JpaPid[] EMPTY_JPA_PID_ARRAY = new JpaPid[0]; + public static boolean myUseMaxPageSize50ForTest = false; public static Integer myMaxPageSizeForTests = null; protected final IInterceptorBroadcaster myInterceptorBroadcaster; protected final IResourceTagDao myResourceTagDao; @@ -187,7 +195,6 @@ public class SearchBuilder implements ISearchBuilder { private final ISearchParamRegistry mySearchParamRegistry; private final PartitionSettings myPartitionSettings; private final DaoRegistry myDaoRegistry; - private final IResourceSearchViewDao myResourceSearchViewDao; private final FhirContext myContext; private final IIdHelperService myIdHelperService; private final JpaStorageSettings myStorageSettings; @@ -213,6 +220,15 @@ public class SearchBuilder implements ISearchBuilder { @Autowired private IJpaStorageResourceParser myJpaStorageResourceParser; + @Autowired + private IResourceHistoryTableDao myResourceHistoryTableDao; + + @Autowired + private IResourceHistoryTagDao myResourceHistoryTagDao; + + @Autowired + private IRequestPartitionHelperSvc myPartitionHelperSvc; + /** * Constructor */ @@ -228,7 +244,6 @@ public SearchBuilder( IInterceptorBroadcaster theInterceptorBroadcaster, IResourceTagDao theResourceTagDao, DaoRegistry theDaoRegistry, - IResourceSearchViewDao theResourceSearchViewDao, FhirContext theContext, IIdHelperService theIdHelperService, Class theResourceType) { @@ -244,7 +259,6 @@ public SearchBuilder( myInterceptorBroadcaster = theInterceptorBroadcaster; myResourceTagDao = theResourceTagDao; myDaoRegistry = theDaoRegistry; - myResourceSearchViewDao = theResourceSearchViewDao; myContext = theContext; myIdHelperService = theIdHelperService; } @@ -339,7 +353,8 @@ public Long createCountQuery( if (queries.isEmpty()) { return 0L; } else { - return queries.get(0).next(); + JpaPid jpaPid = queries.get(0).next(); + return jpaPid.getId(); } } @@ -462,7 +477,7 @@ private List createQuery( ourLog.trace("Query needs db after HSearch. Chunking."); // Finish the query in the database for the rest of the search parameters, sorting, partitioning, etc. // We break the pids into chunks that fit in the 1k limit for jdbc bind params. - new QueryChunker() + new QueryChunker() .chunk( fulltextExecutor, SearchBuilder.getMaximumPageSize(), @@ -568,7 +583,7 @@ private List queryHibernateSearchForEverythingPids(RequestDetails theReq private void doCreateChunkedQueries( SearchParameterMap theParams, - List thePids, + List thePids, Integer theOffset, SortSpec sort, boolean theCount, @@ -584,7 +599,7 @@ private void doCreateChunkedQueries( /** * Combs through the params for any _id parameters and extracts the PIDs for them */ - private void extractTargetPidsFromIdParams(Set theTargetPids) { + private void extractTargetPidsFromIdParams(Set theTargetPids) { // get all the IQueryParameterType objects // for _id -> these should all be StringParam values HashSet ids = new HashSet<>(); @@ -621,8 +636,8 @@ private void extractTargetPidsFromIdParams(Set theTargetPids) { ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled()); // add the pids to targetPids - for (IResourceLookup pid : idToIdentity.values()) { - theTargetPids.add((Long) pid.getPersistentId().getId()); + for (IResourceLookup pid : idToIdentity.values()) { + theTargetPids.add(pid.getPersistentId()); } } @@ -633,11 +648,17 @@ private void createChunkedQuery( Integer theMaximumResults, boolean theCountOnlyFlag, RequestDetails theRequest, - List thePidList, + List thePidList, List theSearchQueryExecutors) { if (myParams.getEverythingMode() != null) { createChunkedQueryForEverythingSearch( - theParams, theOffset, theMaximumResults, theCountOnlyFlag, thePidList, theSearchQueryExecutors); + theRequest, + theParams, + theOffset, + theMaximumResults, + theCountOnlyFlag, + thePidList, + theSearchQueryExecutors); } else { createChunkedQueryNormalSearch( theParams, sort, theOffset, theCountOnlyFlag, theRequest, thePidList, theSearchQueryExecutors); @@ -650,7 +671,7 @@ private void createChunkedQueryNormalSearch( Integer theOffset, boolean theCountOnlyFlag, RequestDetails theRequest, - List thePidList, + List thePidList, List theSearchQueryExecutors) { SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( myContext, @@ -662,7 +683,13 @@ private void createChunkedQueryNormalSearch( myDialectProvider, theCountOnlyFlag); QueryStack queryStack3 = new QueryStack( - theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings); + theRequest, + theParams, + myStorageSettings, + myContext, + sqlBuilder, + mySearchParamRegistry, + myPartitionSettings); if (theParams.keySet().size() > 1 || theParams.getSort() != null @@ -768,11 +795,12 @@ private void executeSearch( } private void createChunkedQueryForEverythingSearch( + RequestDetails theRequest, SearchParameterMap theParams, Integer theOffset, Integer theMaximumResults, boolean theCountOnlyFlag, - List thePidList, + List thePidList, List theSearchQueryExecutors) { SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( @@ -786,11 +814,17 @@ private void createChunkedQueryForEverythingSearch( theCountOnlyFlag); QueryStack queryStack3 = new QueryStack( - theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings); + theRequest, + theParams, + myStorageSettings, + myContext, + sqlBuilder, + mySearchParamRegistry, + myPartitionSettings); JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theMaximumResults); - Set targetPids = new HashSet<>(); + Set targetPids = new HashSet<>(); if (myParams.get(IAnyResource.SP_RES_ID) != null) { extractTargetPidsFromIdParams(targetPids); @@ -816,16 +850,8 @@ private void createChunkedQueryForEverythingSearch( String sql = allTargetsSql.getSql(); Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]); - List output = jdbcTemplate.query(sql, args, new RowMapper() { - @Override - public Long mapRow(ResultSet rs, int rowNum) throws SQLException { - if (myPartitionSettings.isPartitioningEnabled()) { - return rs.getLong(2); - } else { - return rs.getLong(1); - } - } - }); + List output = + jdbcTemplate.query(sql, args, new JpaPidRowMapper(myPartitionSettings.isPartitioningEnabled())); // we add a search executor to fetch unlinked patients first theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output)); @@ -837,7 +863,7 @@ public Long mapRow(ResultSet rs, int rowNum) throws SQLException { } queryStack3.addPredicateEverythingOperation( - myResourceName, typeSourceResources, targetPids.toArray(new Long[0])); + myResourceName, typeSourceResources, targetPids.toArray(EMPTY_JPA_PID_ARRAY)); // Add PID list predicate for full text search and/or lastn operation addPidListPredicate(thePidList, sqlBuilder); @@ -858,7 +884,7 @@ public Long mapRow(ResultSet rs, int rowNum) throws SQLException { executeSearch(theOffset, theSearchQueryExecutors, sqlBuilder); } - private void addPidListPredicate(List thePidList, SearchQueryBuilder theSqlBuilder) { + private void addPidListPredicate(List thePidList, SearchQueryBuilder theSqlBuilder) { if (thePidList != null && !thePidList.isEmpty()) { theSqlBuilder.addResourceIdsPredicate(thePidList); } @@ -1142,31 +1168,55 @@ private void doLoadPids( Collection theIncludedPids, List theResourceListToPopulate, boolean theForHistoryOperation, - Map thePosition) { + Map thePosition) { - Map resourcePidToVersion = null; + Map resourcePidToVersion = null; for (JpaPid next : thePids) { if (next.getVersion() != null && myStorageSettings.isRespectVersionsForSearchIncludes()) { if (resourcePidToVersion == null) { resourcePidToVersion = new HashMap<>(); } - resourcePidToVersion.put((next).getId(), next.getVersion()); + resourcePidToVersion.put(next, next.getVersion()); } } - List versionlessPids = JpaPid.toLongList(thePids); + List versionlessPids = new ArrayList<>(thePids); if (versionlessPids.size() < getMaximumPageSize()) { versionlessPids = normalizeIdListForInClause(versionlessPids); } - // -- get the resource from the searchView - Collection resourceSearchViewList = - myResourceSearchViewDao.findByResourceIds(versionlessPids); + // Load the resource bodies + List resourceSearchViewList = + myResourceHistoryTableDao.findCurrentVersionsByResourcePidsAndFetchResourceTable( + JpaPid.toLongList(versionlessPids)); + + /* + * If we have specific versions to load, replace the history entries with the + * correct ones + * + * TODO: this could definitely be made more efficient, probably by not loading the wrong + * version entity first, and by batching the fetches. But this is a fairly infrequently + * used feature, and loading history entities by PK is a very efficient query so it's + * not the end of the world + */ + if (resourcePidToVersion != null) { + for (int i = 0; i < resourceSearchViewList.size(); i++) { + ResourceHistoryTable next = resourceSearchViewList.get(i); + JpaPid resourceId = next.getPersistentId(); + Long version = resourcePidToVersion.get(resourceId); + resourceId.setVersion(version); + if (version != null && !version.equals(next.getVersion())) { + ResourceHistoryTable replacement = + myResourceHistoryTableDao.findForIdAndVersion(next.getResourceId(), version); + resourceSearchViewList.set(i, replacement); + } + } + } // -- preload all tags with tag definition if any - Map> tagMap = getResourceTagMap(resourceSearchViewList); + Map> tagMap = getResourceTagMap(resourceSearchViewList); - for (IBaseResourceEntity next : resourceSearchViewList) { + for (ResourceHistoryTable next : resourceSearchViewList) { if (next.getDeleted() != null) { continue; } @@ -1174,29 +1224,17 @@ private void doLoadPids( Class resourceType = myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); - JpaPid resourceId = JpaPid.fromId(next.getResourceId()); + JpaPid resourceId = next.getPersistentId(); - /* - * If a specific version is requested via an include, we'll replace the current version - * with the specific desired version. This is not the most efficient thing, given that - * we're loading the current version and then turning around and throwing it away again. - * This could be optimized and probably should be, but it's not critical given that - * this only applies to includes, which don't tend to be massive in numbers. - */ if (resourcePidToVersion != null) { - Long version = resourcePidToVersion.get(next.getResourceId()); + Long version = resourcePidToVersion.get(resourceId); resourceId.setVersion(version); - if (version != null && !version.equals(next.getVersion())) { - IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceType); - next = (IBaseResourceEntity) - dao.readEntity(next.getIdDt().withVersion(Long.toString(version)), null); - } } IBaseResource resource = null; if (next != null) { resource = myJpaStorageResourceParser.toResource( - resourceType, next, tagMap.get(next.getId()), theForHistoryOperation); + resourceType, next, tagMap.get(JpaPid.fromId(next.getResourceId())), theForHistoryOperation); } if (resource == null) { if (next != null) { @@ -1211,7 +1249,7 @@ private void doLoadPids( continue; } - Integer index = thePosition.get(resourceId); + Integer index = thePosition.get(resourceId.getId()); if (index == null) { ourLog.warn("Got back unexpected resource PID {}", resourceId); continue; @@ -1227,40 +1265,93 @@ private void doLoadPids( } } - private Map> getResourceTagMap( - Collection theResourceSearchViewList) { + private Map> getResourceTagMap(Collection theHistoryTables) { - List idList = new ArrayList<>(theResourceSearchViewList.size()); + switch (myStorageSettings.getTagStorageMode()) { + case VERSIONED: + return getPidToTagMapVersioned(theHistoryTables); + case NON_VERSIONED: + return getPidToTagMapUnversioned(theHistoryTables); + case INLINE: + default: + return Map.of(); + } + } + + @Nonnull + private Map> getPidToTagMapVersioned( + Collection theHistoryTables) { + List idList = new ArrayList<>(theHistoryTables.size()); // -- find all resource has tags - for (IBaseResourceEntity resource : theResourceSearchViewList) { - if (resource.isHasTags()) idList.add(resource.getId()); + for (ResourceHistoryTable resource : theHistoryTables) { + if (resource.isHasTags()) { + idList.add(resource.getId()); + } + } + + Map> tagMap = new HashMap<>(); + + // -- no tags + if (idList.isEmpty()) { + return tagMap; + } + + // -- get all tags for the idList + Collection tagList = myResourceHistoryTagDao.findByVersionIds(idList); + + // -- build the map, key = resourceId, value = list of ResourceTag + JpaPid resourceId; + Collection tagCol; + for (ResourceHistoryTag tag : tagList) { + + resourceId = JpaPid.fromId(tag.getResourceId()); + tagCol = tagMap.get(resourceId); + if (tagCol == null) { + tagCol = new ArrayList<>(); + tagCol.add(tag); + tagMap.put(resourceId, tagCol); + } else { + tagCol.add(tag); + } } - return getPidToTagMap(idList); + return tagMap; } @Nonnull - private Map> getPidToTagMap(List thePidList) { - Map> tagMap = new HashMap<>(); + private Map> getPidToTagMapUnversioned( + Collection theHistoryTables) { + List idList = new ArrayList<>(theHistoryTables.size()); + + // -- find all resource has tags + for (ResourceHistoryTable resource : theHistoryTables) { + if (resource.isHasTags()) { + idList.add(JpaPid.fromId(resource.getResourceId())); + } + } + + Map> tagMap = new HashMap<>(); // -- no tags - if (thePidList.isEmpty()) return tagMap; + if (idList.isEmpty()) { + return tagMap; + } // -- get all tags for the idList - Collection tagList = myResourceTagDao.findByResourceIds(thePidList); + Collection tagList = myResourceTagDao.findByResourceIds(JpaPid.toLongList(idList)); // -- build the map, key = resourceId, value = list of ResourceTag JpaPid resourceId; - Collection tagCol; + Collection tagCol; for (ResourceTag tag : tagList) { resourceId = JpaPid.fromId(tag.getResourceId()); - tagCol = tagMap.get(resourceId.getId()); + tagCol = tagMap.get(resourceId); if (tagCol == null) { tagCol = new ArrayList<>(); tagCol.add(tag); - tagMap.put(resourceId.getId(), tagCol); + tagMap.put(resourceId, tagCol); } else { tagCol.add(tag); } @@ -1284,9 +1375,9 @@ public void loadResourcesByPid( // when running asserts assert new HashSet<>(thePids).size() == thePids.size() : "PID list contains duplicates: " + thePids; - Map position = new HashMap<>(); + Map position = new HashMap<>(); for (JpaPid next : thePids) { - position.put(next, theResourceListToPopulate.size()); + position.put(next.getId(), theResourceListToPopulate.size()); theResourceListToPopulate.add(null); } @@ -1402,7 +1493,11 @@ public Set loadIncludes(SearchBuilderLoadIncludesParameters theP return new HashSet<>(); } String searchPidFieldName = reverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID; + String searchPartitionIdFieldName = + reverseMode ? MY_TARGET_RESOURCE_PARTITION_ID : MY_SOURCE_RESOURCE_PARTITION_ID; String findPidFieldName = reverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID; + String findPartitionIdFieldName = + reverseMode ? MY_SOURCE_RESOURCE_PARTITION_ID : MY_TARGET_RESOURCE_PARTITION_ID; String findResourceTypeFieldName = reverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE; String findVersionFieldName = null; if (!reverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) { @@ -1444,9 +1539,11 @@ public Set loadIncludes(SearchBuilderLoadIncludesParameters theP if (matchAll) { loadIncludesMatchAll( findPidFieldName, + findPartitionIdFieldName, findResourceTypeFieldName, findVersionFieldName, searchPidFieldName, + searchPartitionIdFieldName, wantResourceType, reverseMode, hasDesiredResourceTypes, @@ -1461,13 +1558,16 @@ public Set loadIncludes(SearchBuilderLoadIncludesParameters theP nextInclude, fhirContext, findPidFieldName, + findPartitionIdFieldName, findVersionFieldName, searchPidFieldName, + searchPartitionIdFieldName, reverseMode, nextRoundMatches, entityManager, maxCount, - pidsToInclude); + pidsToInclude, + request); } } @@ -1533,13 +1633,16 @@ private void loadIncludesMatchSpecific( Include nextInclude, FhirContext fhirContext, String findPidFieldName, + String findPartitionFieldName, String findVersionFieldName, String searchPidFieldName, + String searchPartitionFieldName, boolean reverseMode, List nextRoundMatches, EntityManager entityManager, Integer maxCount, - HashSet pidsToInclude) { + HashSet pidsToInclude, + RequestDetails theRequest) { List paths; // Start replace @@ -1578,6 +1681,13 @@ private void loadIncludesMatchSpecific( if (findVersionFieldName != null) { fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS; } + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + fieldsToLoad += ", r."; + fieldsToLoad += findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) + ? "partition_id" + : "target_res_partition_id"; + fieldsToLoad += " as " + PARTITION_ID_ALIAS; + } // Query for includes lookup has 2 cases // Case 1: Where target_resource_id is available in hfj_res_link table for local references @@ -1589,30 +1699,45 @@ private void loadIncludesMatchSpecific( String searchPidFieldSqlColumn = searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id"; - StringBuilder localReferenceQuery = new StringBuilder("SELECT " + fieldsToLoad + " FROM hfj_res_link r " - + " WHERE r.src_path = :src_path AND " - + " r.target_resource_id IS NOT NULL AND " - + " r." - + searchPidFieldSqlColumn + " IN (:target_pids) "); + StringBuilder localReferenceQuery = new StringBuilder(); + localReferenceQuery.append("SELECT ").append(fieldsToLoad); + localReferenceQuery.append(" FROM hfj_res_link r "); + localReferenceQuery.append("WHERE r.src_path = :src_path"); + if (!"target_resource_id".equals(searchPidFieldSqlColumn)) { + localReferenceQuery.append(" AND r.target_resource_id IS NOT NULL"); + } + localReferenceQuery + .append(" AND r.") + .append(searchPidFieldSqlColumn) + .append(" IN (:target_pids) "); + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + String partitionFieldToSearch = findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) + ? "target_res_partition_id" + : "partition_id"; + localReferenceQuery + .append("AND r.") + .append(partitionFieldToSearch) + .append(" = :search_partition_id "); + } localReferenceQueryParams.put("src_path", nextPath); // we loop over target_pids later. if (targetResourceTypes != null) { if (targetResourceTypes.size() == 1) { - localReferenceQuery.append(" AND r.target_resource_type = :target_resource_type "); + localReferenceQuery.append("AND r.target_resource_type = :target_resource_type "); localReferenceQueryParams.put( "target_resource_type", targetResourceTypes.iterator().next()); } else { - localReferenceQuery.append(" AND r.target_resource_type in (:target_resource_types) "); + localReferenceQuery.append("AND r.target_resource_type in (:target_resource_types) "); localReferenceQueryParams.put("target_resource_types", targetResourceTypes); } } // Case 2: Pair> canonicalQuery = - buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode); + buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode, theRequest); - String sql = localReferenceQuery + " UNION " + canonicalQuery.getLeft(); + String sql = localReferenceQuery + "UNION " + canonicalQuery.getLeft(); Map limitParams = new HashMap<>(); if (maxCount != null) { @@ -1637,10 +1762,15 @@ private void loadIncludesMatchSpecific( sql = sb.toString(); } - List> partitions = partition(nextRoundMatches, getMaximumPageSize()); + List> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); for (Collection nextPartition : partitions) { Query q = entityManager.createNativeQuery(sql, Tuple.class); q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + q.setParameter( + "search_partition_id", + nextPartition.iterator().next().getPartitionId()); + } localReferenceQueryParams.forEach(q::setParameter); canonicalQuery.getRight().forEach(q::setParameter); limitParams.forEach(q::setParameter); @@ -1655,7 +1785,14 @@ private void loadIncludesMatchSpecific( resourceVersion = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_VERSION_ALIAS))); } - pidsToInclude.add(JpaPid.fromIdAndVersion(resourceId, resourceVersion)); + Integer partitionId = null; + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + partitionId = result.get(PARTITION_ID_ALIAS, Integer.class); + } + + JpaPid pid = JpaPid.fromIdAndVersion(resourceId, resourceVersion); + pid.setPartitionId(partitionId); + pidsToInclude.add(pid); } } } @@ -1664,9 +1801,11 @@ private void loadIncludesMatchSpecific( private void loadIncludesMatchAll( String findPidFieldName, + String findPartitionFieldName, String findResourceTypeFieldName, String findVersionFieldName, String searchPidFieldName, + String searchPartitionFieldName, String wantResourceType, boolean reverseMode, boolean hasDesiredResourceTypes, @@ -1683,10 +1822,17 @@ private void loadIncludesMatchAll( if (findVersionFieldName != null) { sqlBuilder.append(", r.").append(findVersionFieldName); } + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + sqlBuilder.append(", r.").append(findPartitionFieldName); + } sqlBuilder.append(" FROM ResourceLink r WHERE "); - sqlBuilder.append("r."); - sqlBuilder.append(searchPidFieldName); // (rev mode) target_resource_id | source_resource_id + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + sqlBuilder.append("r.").append(searchPartitionFieldName); + sqlBuilder.append(" = :target_partition_id AND "); + } + + sqlBuilder.append("r.").append(searchPidFieldName); sqlBuilder.append(" IN (:target_pids)"); /* @@ -1726,10 +1872,14 @@ private void loadIncludesMatchAll( } String sql = sqlBuilder.toString(); - List> partitions = partition(nextRoundMatches, getMaximumPageSize()); + List> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); for (Collection nextPartition : partitions) { TypedQuery q = entityManager.createQuery(sql, Object[].class); q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + q.setParameter( + "target_partition_id", nextPartition.iterator().next().getPartitionId()); + } if (wantResourceType != null) { q.setParameter("want_resource_type", wantResourceType); } @@ -1752,12 +1902,19 @@ private void loadIncludesMatchAll( Long resourceId = (Long) ((Object[]) nextRow)[0]; String resourceType = (String) ((Object[]) nextRow)[1]; String resourceCanonicalUrl = (String) ((Object[]) nextRow)[2]; + Integer partitionId = null; + int offset = 0; if (findVersionFieldName != null) { version = (Long) ((Object[]) nextRow)[3]; + offset++; + } + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + partitionId = ((Integer) ((Object[]) nextRow)[3 + offset]); } if (resourceId != null) { JpaPid pid = JpaPid.fromIdAndVersionAndResourceType(resourceId, version, resourceType); + pid.setPartitionId(partitionId); pidsToInclude.add(pid); } else if (resourceCanonicalUrl != null) { if (canonicalUrls == null) { @@ -1771,23 +1928,30 @@ private void loadIncludesMatchAll( String message = "Search with _include=* can be inefficient when references using canonical URLs are detected. Use more specific _include values instead."; firePerformanceWarning(request, message); - loadCanonicalUrls(canonicalUrls, entityManager, pidsToInclude, reverseMode); + loadCanonicalUrls(request, canonicalUrls, entityManager, pidsToInclude, reverseMode); } } } private void loadCanonicalUrls( + RequestDetails theRequestDetails, Set theCanonicalUrls, EntityManager theEntityManager, HashSet thePidsToInclude, boolean theReverse) { StringBuilder sqlBuilder; - Set identityHashesForTypes = calculateIndexUriIdentityHashesForResourceTypes(null, theReverse); - List> canonicalUrlPartitions = - partition(theCanonicalUrls, getMaximumPageSize() - identityHashesForTypes.size()); + CanonicalUrlTargets canonicalUrlTargets = + calculateIndexUriIdentityHashesForResourceTypes(theRequestDetails, null, theReverse); + List> canonicalUrlPartitions = ListUtils.partition( + List.copyOf(theCanonicalUrls), getMaximumPageSize() - canonicalUrlTargets.myHashIdentityValues.size()); sqlBuilder = new StringBuilder(); - sqlBuilder.append("SELECT i.myResourcePid "); + sqlBuilder.append("SELECT "); + if (myPartitionSettings.isPartitioningEnabled()) { + sqlBuilder.append("i.myPartitionIdValue, "); + } + sqlBuilder.append("i.myResourcePid "); + sqlBuilder.append("FROM ResourceIndexedSearchParamUri i "); sqlBuilder.append("WHERE i.myHashIdentity IN (:hash_identity) "); sqlBuilder.append("AND i.myUri IN (:uris)"); @@ -1795,13 +1959,23 @@ private void loadCanonicalUrls( String canonicalResSql = sqlBuilder.toString(); for (Collection nextCanonicalUrlList : canonicalUrlPartitions) { - TypedQuery canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Long.class); - canonicalResIdQuery.setParameter("hash_identity", identityHashesForTypes); + TypedQuery canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Object[].class); + canonicalResIdQuery.setParameter("hash_identity", canonicalUrlTargets.myHashIdentityValues); canonicalResIdQuery.setParameter("uris", nextCanonicalUrlList); - List resIds = canonicalResIdQuery.getResultList(); - for (var next : resIds) { + List results = canonicalResIdQuery.getResultList(); + for (var next : results) { if (next != null) { - thePidsToInclude.add(JpaPid.fromId(next)); + Integer partitionId = null; + Long pid; + if (next.length == 1) { + pid = (Long) next[0]; + } else { + partitionId = (Integer) ((Object[]) next)[0]; + pid = (Long) ((Object[]) next)[1]; + } + if (pid != null) { + thePidsToInclude.add(JpaPid.fromId(pid, partitionId)); + } } } } @@ -1841,46 +2015,80 @@ private static Set computeTargetResourceTypes(Include nextInclude, Runti @Nonnull private Pair> buildCanonicalUrlQuery( - String theVersionFieldName, Set theTargetResourceTypes, boolean theReverse) { + String theVersionFieldName, + Set theTargetResourceTypes, + boolean theReverse, + RequestDetails theRequest) { String fieldsToLoadFromSpidxUriTable = theReverse ? "r.src_resource_id" : "rUri.res_id"; if (theVersionFieldName != null) { // canonical-uri references aren't versioned, but we need to match the column count for the UNION fieldsToLoadFromSpidxUriTable += ", NULL"; } + + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + if (theReverse) { + fieldsToLoadFromSpidxUriTable += ", r.partition_id as " + PARTITION_ID_ALIAS; + } else { + fieldsToLoadFromSpidxUriTable += ", rUri.partition_id as " + PARTITION_ID_ALIAS; + } + } + // The logical join will be by hfj_spidx_uri on sp_name='uri' and sp_uri=target_resource_url. // But sp_name isn't indexed, so we use hash_identity instead. - Set identityHashesForTypes = - calculateIndexUriIdentityHashesForResourceTypes(theTargetResourceTypes, theReverse); + CanonicalUrlTargets canonicalUrlTargets = + calculateIndexUriIdentityHashesForResourceTypes(theRequest, theTargetResourceTypes, theReverse); Map canonicalUriQueryParams = new HashMap<>(); - StringBuilder canonicalUrlQuery = new StringBuilder( - "SELECT " + fieldsToLoadFromSpidxUriTable + " FROM hfj_res_link r " + " JOIN hfj_spidx_uri rUri ON ( "); + StringBuilder canonicalUrlQuery = new StringBuilder(); + canonicalUrlQuery + .append("SELECT ") + .append(fieldsToLoadFromSpidxUriTable) + .append(' '); + canonicalUrlQuery.append("FROM hfj_res_link r "); + // join on hash_identity and sp_uri - indexed in IDX_SP_URI_HASH_IDENTITY_V2 - if (theTargetResourceTypes != null && theTargetResourceTypes.size() == 1) { - canonicalUrlQuery.append(" rUri.hash_identity = :uri_identity_hash "); + canonicalUrlQuery.append("JOIN hfj_spidx_uri rUri ON ("); + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + canonicalUrlQuery.append("rUri.partition_id IN (:uri_partition_id) AND "); + canonicalUriQueryParams.put("uri_partition_id", canonicalUrlTargets.myPartitionIds); + } + if (canonicalUrlTargets.myHashIdentityValues.size() == 1) { + canonicalUrlQuery.append("rUri.hash_identity = :uri_identity_hash"); canonicalUriQueryParams.put( - "uri_identity_hash", identityHashesForTypes.iterator().next()); + "uri_identity_hash", + canonicalUrlTargets.myHashIdentityValues.iterator().next()); } else { - canonicalUrlQuery.append(" rUri.hash_identity in (:uri_identity_hashes) "); - canonicalUriQueryParams.put("uri_identity_hashes", identityHashesForTypes); + canonicalUrlQuery.append("rUri.hash_identity in (:uri_identity_hashes)"); + canonicalUriQueryParams.put("uri_identity_hashes", canonicalUrlTargets.myHashIdentityValues); } + canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri"); + canonicalUrlQuery.append(")"); - canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri )"); - canonicalUrlQuery.append(" WHERE r.src_path = :src_path AND "); - canonicalUrlQuery.append(" r.target_resource_id IS NULL "); - canonicalUrlQuery.append(" AND "); + canonicalUrlQuery.append(" WHERE r.src_path = :src_path AND"); + canonicalUrlQuery.append(" r.target_resource_id IS NULL"); + canonicalUrlQuery.append(" AND"); + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + if (theReverse) { + canonicalUrlQuery.append(" rUri.partition_id"); + } else { + canonicalUrlQuery.append(" r.partition_id"); + } + canonicalUrlQuery.append(" = :search_partition_id"); + canonicalUrlQuery.append(" AND"); + } if (theReverse) { - canonicalUrlQuery.append("rUri.res_id"); + canonicalUrlQuery.append(" rUri.res_id"); } else { - canonicalUrlQuery.append("r.src_resource_id"); + canonicalUrlQuery.append(" r.src_resource_id"); } - canonicalUrlQuery.append(" IN (:target_pids) "); + canonicalUrlQuery.append(" IN (:target_pids)"); return Pair.of(canonicalUrlQuery.toString(), canonicalUriQueryParams); } @Nonnull - Set calculateIndexUriIdentityHashesForResourceTypes(Set theTargetResourceTypes, boolean theReverse) { + CanonicalUrlTargets calculateIndexUriIdentityHashesForResourceTypes( + RequestDetails theRequestDetails, Set theTargetResourceTypes, boolean theReverse) { Set targetResourceTypes = theTargetResourceTypes; if (targetResourceTypes == null) { /* @@ -1949,34 +2157,92 @@ Set calculateIndexUriIdentityHashesForResourceTypes(Set theTargetR } assert !targetResourceTypes.isEmpty(); - return targetResourceTypes.stream() - .map(type -> BaseResourceIndexedSearchParam.calculateHashIdentity( - myPartitionSettings, myRequestPartitionId, type, "url")) - .collect(Collectors.toSet()); + Set hashIdentityValues = new HashSet<>(); + Set partitionIds = new HashSet<>(); + for (String type : targetResourceTypes) { + + RequestPartitionId readPartition; + if (myPartitionSettings.isPartitioningEnabled()) { + readPartition = + myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, type); + } else { + readPartition = RequestPartitionId.defaultPartition(); + } + if (readPartition.hasPartitionIds()) { + partitionIds.addAll(readPartition.getPartitionIds()); + } + + Long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( + myPartitionSettings, readPartition, type, "url"); + hashIdentityValues.add(hashIdentity); + } + + return new CanonicalUrlTargets(hashIdentityValues, partitionIds); } - private List> partition(Collection theNextRoundMatches, int theMaxLoad) { - if (theNextRoundMatches.size() <= theMaxLoad) { - return Collections.singletonList(theNextRoundMatches); - } else { + static class CanonicalUrlTargets { - List> retVal = new ArrayList<>(); - Collection current = null; - for (T next : theNextRoundMatches) { - if (current == null) { - current = new ArrayList<>(theMaxLoad); - retVal.add(current); - } + @Nonnull + final Set myHashIdentityValues; + + @Nonnull + final Set myPartitionIds; + + public CanonicalUrlTargets(@Nonnull Set theHashIdentityValues, @Nonnull Set thePartitionIds) { + myHashIdentityValues = theHashIdentityValues; + myPartitionIds = thePartitionIds; + } + } - current.add(next); + /** + * This method takes in a list of {@link JpaPid}'s and returns a series of sublists containing + * those pids where: + *

    + *
  • No single list is most than {@literal theMaxLoad} entries
  • + *
  • Each list only contains JpaPids with the same partition ID
  • + *
+ */ + static List> partitionBySizeAndPartitionId(List theNextRoundMatches, int theMaxLoad) { - if (current.size() >= theMaxLoad) { - current = null; + if (theNextRoundMatches.size() <= theMaxLoad) { + boolean allSamePartition = true; + for (int i = 1; i < theNextRoundMatches.size(); i++) { + if (!Objects.equals( + theNextRoundMatches.get(i - 1).getPartitionId(), + theNextRoundMatches.get(i).getPartitionId())) { + allSamePartition = false; + break; } } + if (allSamePartition) { + return Collections.singletonList(theNextRoundMatches); + } + } - return retVal; + // Break into partitioned sublists + ListMultimap lists = + MultimapBuilder.hashKeys().arrayListValues().build(); + for (JpaPid nextRoundMatch : theNextRoundMatches) { + String partitionId = nextRoundMatch.getPartitionId() != null + ? nextRoundMatch.getPartitionId().toString() + : ""; + lists.put(partitionId, nextRoundMatch); } + + List> retVal = new ArrayList<>(); + for (String key : lists.keySet()) { + List> nextPartition = Lists.partition(lists.get(key), theMaxLoad); + retVal.addAll(nextPartition); + } + + // In unit test mode, we sort the results just for unit test predictability + if (HapiSystemProperties.isUnitTestModeEnabled()) { + retVal = retVal.stream() + .map(t -> t.stream().sorted().collect(Collectors.toList())) + .collect(Collectors.toList()); + } + + return retVal; } private void attemptComboUniqueSpProcessing( @@ -2422,15 +2688,14 @@ private void fetchNext() { break; } - Long nextLong = myResultsIterator.next(); + JpaPid nextPid = myResultsIterator.next(); if (myHavePerfTraceFoundIdHook) { - callPerformanceTracingHook(nextLong); + callPerformanceTracingHook(nextPid); } - if (nextLong != null) { - JpaPid next = JpaPid.fromId(nextLong); - if (myPidSet.add(next) && doNotSkipNextPidForEverything()) { - myNext = next; + if (nextPid != null) { + if (myPidSet.add(nextPid) && doNotSkipNextPidForEverything()) { + myNext = nextPid; myNonSkipCount++; break; } else { @@ -2518,10 +2783,10 @@ private boolean doNotSkipNextPidForEverything() { return !(myParams.getEverythingMode() != null && (myOffset != null && myOffset >= myPidSet.size())); } - private void callPerformanceTracingHook(Long theNextLong) { + private void callPerformanceTracingHook(JpaPid theNextPid) { HookParams params = new HookParams() .add(Integer.class, System.identityHashCode(this)) - .add(Object.class, theNextLong); + .add(Object.class, theNextPid); myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutors.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutors.java index 74420709fef3..f9920f1d0237 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutors.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutors.java @@ -24,7 +24,6 @@ import jakarta.annotation.Nonnull; import org.apache.commons.lang3.Validate; -import java.util.Iterator; import java.util.List; public class SearchQueryExecutors { @@ -46,7 +45,7 @@ public boolean hasNext() { } @Override - public Long next() { + public JpaPid next() { myCount += 1; return theExecutor.next(); } @@ -54,37 +53,7 @@ public Long next() { } @Nonnull - public static ISearchQueryExecutor from(List rawPids) { + public static ISearchQueryExecutor from(List rawPids) { return new ResolvedSearchQueryExecutor(rawPids); } - - public static ISearchQueryExecutor from(Iterator theIterator) { - return new JpaPidQueryAdaptor(theIterator); - } - - public static ISearchQueryExecutor from(Iterable theIterable) { - return new JpaPidQueryAdaptor(theIterable.iterator()); - } - - static class JpaPidQueryAdaptor implements ISearchQueryExecutor { - final Iterator myIterator; - - JpaPidQueryAdaptor(Iterator theIterator) { - myIterator = theIterator; - } - - @Override - public void close() {} - - @Override - public boolean hasNext() { - return myIterator.hasNext(); - } - - @Override - public Long next() { - JpaPid next = myIterator.next(); - return next == null ? null : next.getId(); - } - } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/ResolvedSearchQueryExecutor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/ResolvedSearchQueryExecutor.java index 6a7fd75b2c19..d41a1579587d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/ResolvedSearchQueryExecutor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/ResolvedSearchQueryExecutor.java @@ -19,6 +19,7 @@ */ package ca.uhn.fhir.jpa.search.builder.models; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor; import jakarta.annotation.Nonnull; @@ -26,18 +27,18 @@ import java.util.List; public class ResolvedSearchQueryExecutor implements ISearchQueryExecutor { - private final Iterator myIterator; + private final Iterator myIterator; - public ResolvedSearchQueryExecutor(Iterable theIterable) { + public ResolvedSearchQueryExecutor(Iterable theIterable) { this(theIterable.iterator()); } - public ResolvedSearchQueryExecutor(Iterator theIterator) { + public ResolvedSearchQueryExecutor(Iterator theIterator) { myIterator = theIterator; } @Nonnull - public static ResolvedSearchQueryExecutor from(List rawPids) { + public static ResolvedSearchQueryExecutor from(List rawPids) { return new ResolvedSearchQueryExecutor(rawPids); } @@ -47,7 +48,7 @@ public boolean hasNext() { } @Override - public Long next() { + public JpaPid next() { return myIterator.next(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseJoiningPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseJoiningPredicateBuilder.java index 997ea3296b66..efbe336ce13f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseJoiningPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseJoiningPredicateBuilder.java @@ -21,6 +21,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.jpa.util.QueryParameterUtils; import com.healthmarketscience.sqlbuilder.Condition; @@ -31,6 +32,7 @@ import jakarta.annotation.Nullable; import org.apache.commons.lang3.Validate; +import java.util.Collection; import java.util.List; import java.util.stream.Collectors; @@ -96,15 +98,16 @@ public Condition createPartitionIdPredicate(RequestPartitionId theRequestPartiti } } - public Condition createPredicateResourceIds(boolean theInverse, List theResourceIds) { + public Condition createPredicateResourceIds(boolean theInverse, Collection theResourceIds) { Validate.notNull(theResourceIds, "theResourceIds must not be null"); - // Handle the _id parameter by adding it to the tail - Condition inResourceIds = - QueryParameterUtils.toEqualToOrInPredicate(getResourceIdColumn(), generatePlaceholders(theResourceIds)); + Condition inResourceIds = QueryParameterUtils.toEqualToOrInPredicate( + getResourceIdColumn(), generatePlaceholders(JpaPid.toLongList(theResourceIds))); if (theInverse) { inResourceIds = new NotCondition(inResourceIds); } + + // Handle the _id parameter by adding it to the tail return inResourceIds; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchViewDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ISourcePredicateBuilder.java similarity index 53% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchViewDao.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ISourcePredicateBuilder.java index 4fe985a5a27b..db284b9989ac 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchViewDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ISourcePredicateBuilder.java @@ -17,17 +17,20 @@ * limitations under the License. * #L% */ -package ca.uhn.fhir.jpa.dao.data; +package ca.uhn.fhir.jpa.search.builder.predicate; -import ca.uhn.fhir.jpa.entity.ResourceSearchView; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.data.jpa.repository.Query; -import org.springframework.data.repository.query.Param; +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; +import ca.uhn.fhir.model.api.IQueryParameterType; +import com.healthmarketscience.sqlbuilder.Condition; -import java.util.Collection; +public interface ISourcePredicateBuilder { -public interface IResourceSearchViewDao extends JpaRepository, IHapiFhirJpaRepository { + Condition createPredicateMissingSourceUri(); - @Query("SELECT v FROM ResourceSearchView v WHERE v.myResourceId in (:pids)") - Collection findByResourceIds(@Param("pids") Collection pids); + Condition createPredicateSourceUri(String theSourceUri); + + Condition createPredicateRequestId(String theRequestId); + + Condition createPredicateSourceUriWithModifiers( + IQueryParameterType theQueryParameter, JpaStorageSettings theStorageSetting, String theSourceUri); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/SourcePredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceHistoryPredicateBuilder.java similarity index 93% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/SourcePredicateBuilder.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceHistoryPredicateBuilder.java index 42998c2316bb..b918dc401ccf 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/SourcePredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceHistoryPredicateBuilder.java @@ -40,7 +40,7 @@ import static ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder.createLeftAndRightMatchLikeExpression; import static ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder.createLeftMatchLikeExpression; -public class SourcePredicateBuilder extends BaseJoiningPredicateBuilder { +public class ResourceHistoryPredicateBuilder extends BaseJoiningPredicateBuilder implements ISourcePredicateBuilder { private final DbColumn myColumnSourceUri; private final DbColumn myColumnRequestId; @@ -49,10 +49,10 @@ public class SourcePredicateBuilder extends BaseJoiningPredicateBuilder { /** * Constructor */ - public SourcePredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { - super(theSearchSqlBuilder, theSearchSqlBuilder.addTable("HFJ_RES_VER_PROV")); + public ResourceHistoryPredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { + super(theSearchSqlBuilder, theSearchSqlBuilder.addTable("HFJ_RES_VER")); - myResourceIdColumn = getTable().addColumn("RES_PID"); + myResourceIdColumn = getTable().addColumn("RES_ID"); myColumnSourceUri = getTable().addColumn("SOURCE_URI"); myColumnRequestId = getTable().addColumn("REQUEST_ID"); } @@ -62,14 +62,17 @@ public DbColumn getResourceIdColumn() { return myResourceIdColumn; } + @Override public Condition createPredicateSourceUri(String theSourceUri) { return BinaryCondition.equalTo(myColumnSourceUri, generatePlaceholder(theSourceUri)); } + @Override public Condition createPredicateMissingSourceUri() { return UnaryCondition.isNull(myColumnSourceUri); } + @Override public Condition createPredicateSourceUriWithModifiers( IQueryParameterType theQueryParameter, JpaStorageSettings theStorageSetting, String theSourceUri) { if (theQueryParameter.getMissing() != null && !theQueryParameter.getMissing()) { @@ -117,6 +120,7 @@ private Condition createPredicateSourceContains(JpaStorageSettings theStorageSet } } + @Override public Condition createPredicateRequestId(String theRequestId) { return BinaryCondition.equalTo(myColumnRequestId, generatePlaceholder(theRequestId)); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceHistoryProvenancePredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceHistoryProvenancePredicateBuilder.java new file mode 100644 index 000000000000..cc77e7969ba9 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceHistoryProvenancePredicateBuilder.java @@ -0,0 +1,128 @@ +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.search.builder.predicate; + +import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; +import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; +import ca.uhn.fhir.jpa.util.QueryParameterUtils; +import ca.uhn.fhir.model.api.IQueryParameterType; +import ca.uhn.fhir.rest.param.UriParam; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException; +import ca.uhn.fhir.util.StringUtil; +import ca.uhn.fhir.util.UrlUtil; +import com.healthmarketscience.sqlbuilder.BinaryCondition; +import com.healthmarketscience.sqlbuilder.Condition; +import com.healthmarketscience.sqlbuilder.FunctionCall; +import com.healthmarketscience.sqlbuilder.UnaryCondition; +import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn; + +import java.util.List; + +import static ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder.createLeftAndRightMatchLikeExpression; +import static ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder.createLeftMatchLikeExpression; + +public class ResourceHistoryProvenancePredicateBuilder extends BaseJoiningPredicateBuilder + implements ISourcePredicateBuilder { + + private final DbColumn myColumnSourceUri; + private final DbColumn myColumnRequestId; + private final DbColumn myResourceIdColumn; + + /** + * Constructor + */ + public ResourceHistoryProvenancePredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { + super(theSearchSqlBuilder, theSearchSqlBuilder.addTable("HFJ_RES_VER_PROV")); + + myResourceIdColumn = getTable().addColumn("RES_PID"); + myColumnSourceUri = getTable().addColumn("SOURCE_URI"); + myColumnRequestId = getTable().addColumn("REQUEST_ID"); + } + + @Override + public DbColumn getResourceIdColumn() { + return myResourceIdColumn; + } + + @Override + public Condition createPredicateSourceUri(String theSourceUri) { + return BinaryCondition.equalTo(myColumnSourceUri, generatePlaceholder(theSourceUri)); + } + + @Override + public Condition createPredicateMissingSourceUri() { + return UnaryCondition.isNull(myColumnSourceUri); + } + + @Override + public Condition createPredicateSourceUriWithModifiers( + IQueryParameterType theQueryParameter, JpaStorageSettings theStorageSetting, String theSourceUri) { + if (theQueryParameter.getMissing() != null && !theQueryParameter.getMissing()) { + return UnaryCondition.isNotNull(myColumnSourceUri); + } else if (theQueryParameter instanceof UriParam && theQueryParameter.getQueryParameterQualifier() != null) { + UriParam uriParam = (UriParam) theQueryParameter; + switch (uriParam.getQualifier()) { + case ABOVE: + return createPredicateSourceAbove(theSourceUri); + case BELOW: + return createPredicateSourceBelow(theSourceUri); + case CONTAINS: + return createPredicateSourceContains(theStorageSetting, theSourceUri); + default: + throw new InvalidRequestException(Msg.code(2569) + + String.format( + "Unsupported qualifier specified, qualifier=%s", + theQueryParameter.getQueryParameterQualifier())); + } + } else { + return createPredicateSourceUri(theSourceUri); + } + } + + private Condition createPredicateSourceAbove(String theSourceUri) { + List aboveUriCandidates = UrlUtil.getAboveUriCandidates(theSourceUri); + List aboveUriPlaceholders = generatePlaceholders(aboveUriCandidates); + return QueryParameterUtils.toEqualToOrInPredicate(myColumnSourceUri, aboveUriPlaceholders); + } + + private Condition createPredicateSourceBelow(String theSourceUri) { + String belowLikeExpression = createLeftMatchLikeExpression(theSourceUri); + return BinaryCondition.like(myColumnSourceUri, generatePlaceholder(belowLikeExpression)); + } + + private Condition createPredicateSourceContains(JpaStorageSettings theStorageSetting, String theSourceUri) { + if (theStorageSetting.isAllowContainsSearches()) { + FunctionCall upperFunction = new FunctionCall("UPPER"); + upperFunction.addCustomParams(myColumnSourceUri); + String normalizedString = StringUtil.normalizeStringForSearchIndexing(theSourceUri); + String containsLikeExpression = createLeftAndRightMatchLikeExpression(normalizedString); + return BinaryCondition.like(upperFunction, generatePlaceholder(containsLikeExpression)); + } else { + throw new MethodNotAllowedException(Msg.code(2570) + ":contains modifier is disabled on this server"); + } + } + + @Override + public Condition createPredicateRequestId(String theRequestId) { + return BinaryCondition.equalTo(myColumnRequestId, generatePlaceholder(theRequestId)); + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceIdPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceIdPredicateBuilder.java index b58b0acf2ab2..dd2b7ccae0dc 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceIdPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceIdPredicateBuilder.java @@ -25,6 +25,8 @@ import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser; import ca.uhn.fhir.jpa.model.cross.IResourceLookup; import ca.uhn.fhir.jpa.model.dao.JpaPid; +import ca.uhn.fhir.jpa.search.builder.sql.ColumnTupleObject; +import ca.uhn.fhir.jpa.search.builder.sql.JpaPidValueTuples; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.jpa.util.QueryParameterUtils; import ca.uhn.fhir.model.api.IQueryParameterType; @@ -129,27 +131,34 @@ public Condition createPredicateResourceId( assert operation == SearchFilterParser.CompareOperation.eq || operation == SearchFilterParser.CompareOperation.ne; - List resourceIds = JpaPid.toLongList(allOrPids); if (theSourceJoinColumn == null) { BaseJoiningPredicateBuilder queryRootTable = super.getOrCreateQueryRootTable(true); Condition predicate; switch (operation) { default: case eq: - predicate = queryRootTable.createPredicateResourceIds(false, resourceIds); + predicate = queryRootTable.createPredicateResourceIds(false, allOrPids); break; case ne: - predicate = queryRootTable.createPredicateResourceIds(true, resourceIds); + predicate = queryRootTable.createPredicateResourceIds(true, allOrPids); break; } predicate = queryRootTable.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate); return predicate; } else { - DbColumn resIdColumn = getResourceIdColumn(theSourceJoinColumn); - return QueryParameterUtils.toEqualToOrInPredicate( - resIdColumn, - generatePlaceholders(resourceIds), - operation == SearchFilterParser.CompareOperation.ne); + if (getSearchQueryBuilder().isIncludePartitionIdInJoins()) { + ColumnTupleObject left = new ColumnTupleObject(theSourceJoinColumn); + JpaPidValueTuples right = JpaPidValueTuples.from(getSearchQueryBuilder(), allOrPids); + return QueryParameterUtils.toInPredicate( + left, right, operation == SearchFilterParser.CompareOperation.ne); + } else { + DbColumn resIdColumn = getResourceIdColumn(theSourceJoinColumn); + List resourceIds = JpaPid.toLongList(allOrPids); + return QueryParameterUtils.toEqualToOrInPredicate( + resIdColumn, + generatePlaceholders(resourceIds), + operation == SearchFilterParser.CompareOperation.ne); + } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java index 106900cad4c5..4dc5aac3fa7a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java @@ -43,6 +43,8 @@ import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl; import ca.uhn.fhir.jpa.search.builder.QueryStack; import ca.uhn.fhir.jpa.search.builder.models.MissingQueryParameterPredicateParams; +import ca.uhn.fhir.jpa.search.builder.sql.ColumnTupleObject; +import ca.uhn.fhir.jpa.search.builder.sql.JpaPidValueTuples; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams; @@ -65,6 +67,7 @@ import com.healthmarketscience.sqlbuilder.BinaryCondition; import com.healthmarketscience.sqlbuilder.ComboCondition; import com.healthmarketscience.sqlbuilder.Condition; +import com.healthmarketscience.sqlbuilder.InCondition; import com.healthmarketscience.sqlbuilder.NotCondition; import com.healthmarketscience.sqlbuilder.SelectQuery; import com.healthmarketscience.sqlbuilder.UnaryCondition; @@ -79,7 +82,6 @@ import org.springframework.beans.factory.annotation.Autowired; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; @@ -814,14 +816,20 @@ private InvalidRequestException newInvalidResourceTypeException(String theResour @Nonnull public Condition createEverythingPredicate( - String theResourceName, List theSourceResourceNames, Long... theTargetPids) { + String theResourceName, List theSourceResourceNames, JpaPid... theTargetPids) { Condition condition; if (theTargetPids != null && theTargetPids.length >= 1) { // if resource ids are provided, we'll create the predicate // with ids in or equal to this value - condition = QueryParameterUtils.toEqualToOrInPredicate( - myColumnTargetResourceId, generatePlaceholders(Arrays.asList(theTargetPids))); + if (getSearchQueryBuilder().isIncludePartitionIdInJoins()) { + Object left = ColumnTupleObject.from(getJoinColumnsForTarget()); + JpaPidValueTuples right = JpaPidValueTuples.from(getSearchQueryBuilder(), theTargetPids); + condition = new InCondition(left, right); + } else { + condition = QueryParameterUtils.toEqualToOrInPredicate( + myColumnTargetResourceId, generatePlaceholders(JpaPid.toLongList(theTargetPids))); + } } else { // ... otherwise we look for resource types condition = BinaryCondition.equalTo(myColumnTargetResourceType, generatePlaceholder(theResourceName)); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/JpaPidValueTuples.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/JpaPidValueTuples.java new file mode 100644 index 000000000000..4ca7ef8814f4 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/JpaPidValueTuples.java @@ -0,0 +1,87 @@ +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.search.builder.sql; + +import ca.uhn.fhir.jpa.model.dao.JpaPid; +import com.healthmarketscience.common.util.AppendableExt; +import com.healthmarketscience.sqlbuilder.Expression; +import com.healthmarketscience.sqlbuilder.ValidationContext; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; + +/** + * Outputs an SQL tuple for a collection of JpaPids, consisting of + * ((resId,partitionId),(resId,partitionId),(resId,partitionId),...) + */ +public class JpaPidValueTuples extends Expression { + + private final Collection myValues; + + public JpaPidValueTuples(Collection theValues) { + myValues = theValues; + } + + @Override + protected void collectSchemaObjects(ValidationContext vContext) { + // nothing + } + + @Override + public void appendTo(AppendableExt app) throws IOException { + app.append('('); + + String value; + for (Iterator iter = myValues.iterator(); iter.hasNext(); ) { + if (hasParens()) { + app.append("('"); + } + value = iter.next(); + app.append(value); + app.append("','"); + value = iter.next(); + app.append(value); + app.append("')"); + if (iter.hasNext()) { + app.append(','); + } + } + if (hasParens()) { + app.append(')'); + } + } + + public static JpaPidValueTuples from(SearchQueryBuilder theSearchQueryBuilder, JpaPid[] thePids) { + return from(theSearchQueryBuilder, Arrays.asList(thePids)); + } + + public static JpaPidValueTuples from(SearchQueryBuilder theSearchQueryBuilder, Collection thePids) { + List placeholders = new ArrayList<>(thePids.size() * 2); + for (JpaPid next : thePids) { + placeholders.add(theSearchQueryBuilder.generatePlaceholder(next.getPartitionId())); + placeholders.add(theSearchQueryBuilder.generatePlaceholder(next.getId())); + } + return new JpaPidValueTuples(placeholders); + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilder.java index 188c6f970a7d..c6637b3b3b66 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilder.java @@ -35,11 +35,12 @@ import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.QuantityNormalizedPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder; +import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryPredicateBuilder; +import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryProvenancePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.SearchParamPresentPredicateBuilder; -import ca.uhn.fhir.jpa.search.builder.predicate.SourcePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.TagPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder; @@ -282,9 +283,20 @@ public QuantityNormalizedPredicateBuilder addQuantityNormalizedPredicateBuilder( /** * Add and return a predicate builder (or a root query if no root query exists yet) for selecting on a _source search parameter */ - public SourcePredicateBuilder addSourcePredicateBuilder( + public ResourceHistoryProvenancePredicateBuilder addResourceHistoryProvenancePredicateBuilder( @Nullable DbColumn[] theSourceJoinColumn, SelectQuery.JoinType theJoinType) { - SourcePredicateBuilder retVal = mySqlBuilderFactory.newSourcePredicateBuilder(this); + ResourceHistoryProvenancePredicateBuilder retVal = + mySqlBuilderFactory.newResourceHistoryProvenancePredicateBuilder(this); + addTable(retVal, theSourceJoinColumn, theJoinType); + return retVal; + } + + /** + * Add and return a predicate builder (or a root query if no root query exists yet) for selecting on a _source search parameter + */ + public ResourceHistoryPredicateBuilder addResourceHistoryPredicateBuilder( + @Nullable DbColumn[] theSourceJoinColumn, SelectQuery.JoinType theJoinType) { + ResourceHistoryPredicateBuilder retVal = mySqlBuilderFactory.newResourceHistoryPredicateBuilder(this); addTable(retVal, theSourceJoinColumn, theJoinType); return retVal; } @@ -823,9 +835,11 @@ private boolean isNotEqualsComparator(DateRangeParam theDateRange) { return false; } - public void addResourceIdsPredicate(List thePidList) { + public void addResourceIdsPredicate(List thePidList) { + List pidList = thePidList.stream().map(JpaPid::getId).collect(Collectors.toList()); + DbColumn resourceIdColumn = getOrCreateFirstPredicateBuilder().getResourceIdColumn(); - InCondition predicate = new InCondition(resourceIdColumn, generatePlaceholders(thePidList)); + InCondition predicate = new InCondition(resourceIdColumn, generatePlaceholders(pidList)); addPredicate(predicate); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryExecutor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryExecutor.java index f636ab7eb4cb..bea9f964a544 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryExecutor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryExecutor.java @@ -21,6 +21,7 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor; import ca.uhn.fhir.jpa.util.ScrollableResultsIterator; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; @@ -42,7 +43,7 @@ public class SearchQueryExecutor implements ISearchQueryExecutor { - private static final Long NO_MORE = -1L; + private static final JpaPid NO_MORE = JpaPid.fromId(-1L); private static final SearchQueryExecutor NO_VALUE_EXECUTOR = new SearchQueryExecutor(); private static final Object[] EMPTY_OBJECT_ARRAY = new Object[0]; private static final Logger ourLog = LoggerFactory.getLogger(SearchQueryExecutor.class); @@ -53,7 +54,7 @@ public class SearchQueryExecutor implements ISearchQueryExecutor { private boolean myQueryInitialized; private ScrollableResultsIterator myResultSet; - private Long myNext; + private JpaPid myNext; /** * Constructor @@ -86,10 +87,10 @@ public boolean hasNext() { } @Override - public Long next() { + public JpaPid next() { fetchNext(); Validate.isTrue(hasNext(), "Can not call next() right now, no data remains"); - Long next = myNext; + JpaPid next = myNext; myNext = null; return next; } @@ -155,17 +156,17 @@ private void fetchNext() { } } - private long getNextPid(ScrollableResultsIterator theResultSet) { + private JpaPid getNextPid(ScrollableResultsIterator theResultSet) { Object nextRow = Objects.requireNonNull(theResultSet.next()); // We should typically get two columns back, the first is the partition ID and the second // is the resource ID. But if we're doing a count query, we'll get a single column in an array // or maybe even just a single non array value depending on how the platform handles it. if (nextRow instanceof Number) { - return ((Number) nextRow).longValue(); + return JpaPid.fromId(((Number) nextRow).longValue()); } else { Object[] nextRowAsArray = (Object[]) nextRow; if (nextRowAsArray.length == 1) { - return (Long) nextRowAsArray[0]; + return JpaPid.fromId((Long) nextRowAsArray[0]); } else { int i; // TODO MB add a strategy object to GeneratedSql to describe the result set. @@ -181,9 +182,11 @@ private long getNextPid(ScrollableResultsIterator theResultSet) { // - partition_id, res_id, coord-dist // Assume res_id is first Long in row, and is in first two columns if (nextRowAsArray[0] instanceof Long) { - return (long) nextRowAsArray[0]; + return JpaPid.fromId((Long) nextRowAsArray[0]); } else { - return (long) nextRowAsArray[1]; + Integer partitionId = (Integer) nextRowAsArray[0]; + Long pid = (Long) nextRowAsArray[1]; + return JpaPid.fromId(pid, partitionId); } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SqlObjectFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SqlObjectFactory.java index 23b13b1e82e9..76f8aa9232c7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SqlObjectFactory.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SqlObjectFactory.java @@ -27,11 +27,12 @@ import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.QuantityNormalizedPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder; +import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryPredicateBuilder; +import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryProvenancePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.SearchParamPresentPredicateBuilder; -import ca.uhn.fhir.jpa.search.builder.predicate.SourcePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.TagPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder; @@ -109,8 +110,13 @@ public TagPredicateBuilder newTagPredicateBuilder(SearchQueryBuilder theSearchSq return myApplicationContext.getBean(TagPredicateBuilder.class, theSearchSqlBuilder); } - public SourcePredicateBuilder newSourcePredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { - return myApplicationContext.getBean(SourcePredicateBuilder.class, theSearchSqlBuilder); + public ResourceHistoryPredicateBuilder newResourceHistoryPredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { + return myApplicationContext.getBean(ResourceHistoryPredicateBuilder.class, theSearchSqlBuilder); + } + + public ResourceHistoryProvenancePredicateBuilder newResourceHistoryProvenancePredicateBuilder( + SearchQueryBuilder theSearchSqlBuilder) { + return myApplicationContext.getBean(ResourceHistoryProvenancePredicateBuilder.class, theSearchSqlBuilder); } public SearchQueryExecutor newSearchQueryExecutor(GeneratedSql theGeneratedSql, Integer theMaxResultsToFetch) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java index d280d5501736..99001ba9e5c7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java @@ -68,7 +68,7 @@ public List fetchResultPids( ourLog.debug("fetchResultPids for range {}-{} returned {} pids", theFrom, theTo, retVal.size()); - return JpaPid.fromLongList(retVal); + return ISearchResultDao.toJpaPidList(retVal); }); } @@ -81,7 +81,7 @@ public List fetchAllResultPids( .execute(() -> { List retVal = mySearchResultDao.findWithSearchPidOrderIndependent(theSearch.getId()); ourLog.trace("fetchAllResultPids returned {} pids", retVal.size()); - return JpaPid.fromLongList(retVal); + return ISearchResultDao.toJpaPidList(retVal); }); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/InstanceReindexServiceImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/InstanceReindexServiceImpl.java index 5061b260fa45..c1732aa6c6fe 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/InstanceReindexServiceImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/InstanceReindexServiceImpl.java @@ -177,7 +177,7 @@ private Parameters reindexInTransaction(RequestDetails theRequestDetails, IIdTyp List messages = new ArrayList<>(); - JpaPid pid = JpaPid.fromId(entity.getId()); + JpaPid pid = entity.getPersistentId(); ReindexOutcome outcome = dao.reindex(pid, new ReindexParameters(), theRequestDetails, new TransactionDetails()); messages.add("Reindex completed in " + sw); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexer.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexer.java index d3507b3a7d1f..38ec8e0812a8 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexer.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexer.java @@ -27,7 +27,6 @@ import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; -import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import org.hl7.fhir.instance.model.api.IBaseResource; @@ -61,16 +60,10 @@ public ResourceReindexer(FhirContext theFhirContext) { myFhirContext = theFhirContext; } - public void readAndReindexResourceByPid(Long theResourcePid) { - ResourceTable resourceTable = - myResourceTableDao.findById(theResourcePid).orElseThrow(IllegalStateException::new); - reindexResourceEntity(resourceTable); - } - public void reindexResourceEntity(ResourceTable theResourceTable) { IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResourceTable.getResourceType()); long expectedVersion = theResourceTable.getVersion(); - IBaseResource resource = dao.readByPid(JpaPid.fromId(theResourceTable.getId()), true); + IBaseResource resource = dao.readByPid(theResourceTable.getPersistentId(), true); if (resource == null) { throw new InternalErrorException(Msg.code(1171) + "Could not find resource version " diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java index 7658dc4f0c8e..6bf97cbcfd8a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java @@ -23,11 +23,11 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; -import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; import ca.uhn.fhir.jpa.dao.data.IResourceReindexJobDao; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.sched.HapiJob; import ca.uhn.fhir.jpa.model.sched.IHasScheduledJobs; @@ -79,9 +79,9 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; /** - * @see ca.uhn.fhir.jpa.reindex.job.ReindexJobConfig * @deprecated Use the Batch2 {@link ca.uhn.fhir.batch2.api.IJobCoordinator#startInstance(JobInstanceStartRequest)} instead. */ +@SuppressWarnings({"removal", "DeprecatedIsStillUsed"}) @Deprecated public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasScheduledJobs { @@ -107,9 +107,6 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc @Autowired private IResourceTableDao myResourceTableDao; - @Autowired - private DaoRegistry myDaoRegistry; - @Autowired private FhirContext myContext; @@ -261,10 +258,10 @@ public void cancelAndPurgeAllJobs() { private int runReindexJobs() { Collection jobs = getResourceReindexJobEntities(); - if (jobs.size() > 0) { + if (!jobs.isEmpty()) { ourLog.info("Running {} reindex jobs: {}", jobs.size(), jobs); } else { - ourLog.debug("Running {} reindex jobs: {}", jobs.size(), jobs); + ourLog.debug("Running 0 reindex jobs"); return 0; } @@ -356,7 +353,7 @@ private int runReindexJob(ResourceReindexJobEntity theJob) { // Submit each resource requiring reindexing List> futures = range.stream() - .map(t -> myTaskExecutor.submit(new ResourceReindexingTask(t, counter))) + .map(t -> myTaskExecutor.submit(new ResourceReindexingTask(JpaPid.fromId(t), counter))) .collect(Collectors.toList()); Date latestDate = null; @@ -429,62 +426,64 @@ private void expungeJobsMarkedAsDeleted() { }); } - private void markResourceAsIndexingFailed(final long theId) { + private void markResourceAsIndexingFailed(final JpaPid theId) { TransactionTemplate txTemplate = new TransactionTemplate(myTxManager); txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); txTemplate.execute((TransactionCallback) theStatus -> { ourLog.info("Marking resource with PID {} as indexing_failed", theId); - myResourceTableDao.updateIndexStatus(theId, BaseHapiFhirDao.INDEX_STATUS_INDEXING_FAILED); + myResourceTableDao.updateIndexStatus(theId.getId(), BaseHapiFhirDao.INDEX_STATUS_INDEXING_FAILED); - Query q = myEntityManager.createQuery("DELETE FROM ResourceTag t WHERE t.myResourceId = :id"); - q.setParameter("id", theId); + Query q = myEntityManager.createQuery("DELETE FROM ResourceTag t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); q = myEntityManager.createQuery( - "DELETE FROM ResourceIndexedSearchParamCoords t WHERE t.myResourcePid = :id"); - q.setParameter("id", theId); + "DELETE FROM ResourceIndexedSearchParamCoords t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); - q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamDate t WHERE t.myResourcePid = :id"); - q.setParameter("id", theId); + q = myEntityManager.createQuery( + "DELETE FROM ResourceIndexedSearchParamDate t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); q = myEntityManager.createQuery( - "DELETE FROM ResourceIndexedSearchParamNumber t WHERE t.myResourcePid = :id"); - q.setParameter("id", theId); + "DELETE FROM ResourceIndexedSearchParamNumber t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); q = myEntityManager.createQuery( - "DELETE FROM ResourceIndexedSearchParamQuantity t WHERE t.myResourcePid = :id"); - q.setParameter("id", theId); + "DELETE FROM ResourceIndexedSearchParamQuantity t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); q = myEntityManager.createQuery( - "DELETE FROM ResourceIndexedSearchParamQuantityNormalized t WHERE t.myResourcePid = :id"); - q.setParameter("id", theId); + "DELETE FROM ResourceIndexedSearchParamQuantityNormalized t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); q = myEntityManager.createQuery( - "DELETE FROM ResourceIndexedSearchParamString t WHERE t.myResourcePid = :id"); - q.setParameter("id", theId); + "DELETE FROM ResourceIndexedSearchParamString t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); q = myEntityManager.createQuery( - "DELETE FROM ResourceIndexedSearchParamToken t WHERE t.myResourcePid = :id"); - q.setParameter("id", theId); + "DELETE FROM ResourceIndexedSearchParamToken t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); - q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamUri t WHERE t.myResourcePid = :id"); - q.setParameter("id", theId); + q = myEntityManager.createQuery( + "DELETE FROM ResourceIndexedSearchParamUri t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); - q = myEntityManager.createQuery("DELETE FROM ResourceLink t WHERE t.mySourceResourcePid = :id"); - q.setParameter("id", theId); + q = myEntityManager.createQuery("DELETE FROM ResourceLink t WHERE t.mySourceResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); - q = myEntityManager.createQuery("DELETE FROM ResourceLink t WHERE t.myTargetResourcePid = :id"); - q.setParameter("id", theId); + q = myEntityManager.createQuery("DELETE FROM ResourceLink t WHERE t.myTargetResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); return null; @@ -492,11 +491,11 @@ private void markResourceAsIndexingFailed(final long theId) { } private class ResourceReindexingTask implements Callable { - private final Long myNextId; + private final JpaPid myNextId; private final AtomicInteger myCounter; private Date myUpdated; - ResourceReindexingTask(Long theNextId, AtomicInteger theCounter) { + ResourceReindexingTask(JpaPid theNextId, AtomicInteger theCounter) { myNextId = theNextId; myCounter = theCounter; } @@ -534,7 +533,7 @@ private Throwable readResourceAndReindex() { Throwable reindexFailure; reindexFailure = myTxTemplate.execute(t -> { ResourceTable resourceTable = - myResourceTableDao.findById(myNextId).orElseThrow(IllegalStateException::new); + myResourceTableDao.findById(myNextId.getId()).orElseThrow(IllegalStateException::new); myUpdated = resourceTable.getUpdatedDate(); try { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/InClauseNormalizer.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/InClauseNormalizer.java index 8d8227ac4f91..5825f09987d1 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/InClauseNormalizer.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/InClauseNormalizer.java @@ -19,6 +19,8 @@ */ package ca.uhn.fhir.jpa.util; +import ca.uhn.fhir.jpa.model.dao.JpaPid; + import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -33,9 +35,9 @@ */ public class InClauseNormalizer { - public static List normalizeIdListForInClause(List theResourceIds) { + public static List normalizeIdListForInClause(List theResourceIds) { - List retVal = theResourceIds; + List retVal = theResourceIds; int listSize = theResourceIds.size(); @@ -56,8 +58,8 @@ public static List normalizeIdListForInClause(List theResourceIds) { return retVal; } - private static List padIdListWithPlaceholders(List theIdList, int preferredListSize) { - List retVal = theIdList; + private static List padIdListWithPlaceholders(List theIdList, int preferredListSize) { + List retVal = theIdList; if (isUnmodifiableList(theIdList)) { retVal = new ArrayList<>(preferredListSize); @@ -65,13 +67,13 @@ private static List padIdListWithPlaceholders(List theIdList, int pr } while (retVal.size() < preferredListSize) { - retVal.add(-1L); + retVal.add(JpaPid.fromId(-1L, null)); } return retVal; } - private static boolean isUnmodifiableList(List theList) { + private static boolean isUnmodifiableList(List theList) { try { theList.addAll(Collections.emptyList()); } catch (Exception e) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryParameterUtils.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryParameterUtils.java index 36b0b43a265d..e3fb62fc185c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryParameterUtils.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryParameterUtils.java @@ -26,6 +26,8 @@ import ca.uhn.fhir.jpa.entity.SearchTypeEnum; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.search.SearchStatusEnum; +import ca.uhn.fhir.jpa.search.builder.sql.ColumnTupleObject; +import ca.uhn.fhir.jpa.search.builder.sql.JpaPidValueTuples; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.model.primitive.InstantDt; @@ -118,6 +120,12 @@ public static Condition toAndPredicate(Condition... theAndPredicates) { return toAndPredicate(Arrays.asList(theAndPredicates)); } + @Nonnull + public static Condition toInPredicate( + ColumnTupleObject theColumns, JpaPidValueTuples theValues, boolean theInverse) { + return new InCondition(theColumns, theValues).setNegate(theInverse); + } + @Nonnull public static Condition toEqualToOrInPredicate( DbColumn theColumn, List theValuePlaceholders, boolean theInverse) { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchBuilderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchBuilderTest.java index f6564c8c53ef..ced03f9570c2 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchBuilderTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchBuilderTest.java @@ -3,6 +3,7 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.rest.server.util.FhirContextSearchParamRegistry; import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import org.junit.jupiter.api.BeforeEach; @@ -45,7 +46,7 @@ public void beforeEach() { @Test void testCalculateIndexUriIdentityHashesForResourceTypes_Include_Null() { - Set types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(null, false); + Set types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(new SystemRequestDetails(), null, false).myHashIdentityValues; // There are only 12 resource types that actually can be linked to by the QuestionnaireResponse // resource via canonical references in any parameters assertThat(types).hasSize(1); @@ -54,14 +55,14 @@ void testCalculateIndexUriIdentityHashesForResourceTypes_Include_Null() { @Test void testCalculateIndexUriIdentityHashesForResourceTypes_Include_Nonnull() { Set inputTypes = Set.of("Questionnaire"); - Set types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(inputTypes, false); + Set types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(new SystemRequestDetails(), inputTypes, false).myHashIdentityValues; // Just the one that we actually specified assertThat(types).hasSize(1); } @Test void testCalculateIndexUriIdentityHashesForResourceTypes_RevInclude_Null() { - Set types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(null, true); + Set types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(new SystemRequestDetails(), null, true).myHashIdentityValues; // Revincludes are really hard to figure out the potential resource types for, so we just need to // use all active resource types assertThat(types).hasSize(146); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutorsTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutorsTest.java index ef6397e8189c..d0b7771fc241 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutorsTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutorsTest.java @@ -1,5 +1,6 @@ package ca.uhn.fhir.jpa.search.builder; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import org.junit.jupiter.api.Test; import java.util.Arrays; @@ -14,7 +15,7 @@ class SearchQueryExecutorsTest { @Test public void adaptFromLongArrayYieldsAllValues() { - List listWithValues = Arrays.asList(1L,2L,3L,4L,5L); + List listWithValues = JpaPid.fromLongList(Arrays.asList(1L,2L,3L,4L,5L)); ISearchQueryExecutor queryExecutor = SearchQueryExecutors.from(listWithValues); @@ -24,7 +25,7 @@ public void adaptFromLongArrayYieldsAllValues() { @Test public void limitedCountDropsTrailingTest() { // given - List vals = Arrays.asList(1L,2L,3L,4L,5L); + List vals = JpaPid.fromLongList(Arrays.asList(1L,2L,3L,4L,5L)); ISearchQueryExecutor target = SearchQueryExecutors.from(vals); ISearchQueryExecutor queryExecutor = SearchQueryExecutors.limited(target, 3); @@ -35,7 +36,7 @@ public void limitedCountDropsTrailingTest() { @Test public void limitedCountExhaustsBeforeLimitOkTest() { // given - List vals = Arrays.asList(1L,2L,3L); + List vals = JpaPid.fromLongList(Arrays.asList(1L,2L,3L)); ISearchQueryExecutor target = SearchQueryExecutors.from(vals); ISearchQueryExecutor queryExecutor = SearchQueryExecutors.limited(target, 5); @@ -46,6 +47,7 @@ public void limitedCountExhaustsBeforeLimitOkTest() { private List drain(ISearchQueryExecutor theQueryExecutor) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(theQueryExecutor, 0), false) + .map(JpaPid::getId) .collect(Collectors.toList()); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/util/InClauseNormalizerTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/util/InClauseNormalizerTest.java index bd2ed0603ee9..8d9ff3bea522 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/util/InClauseNormalizerTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/util/InClauseNormalizerTest.java @@ -1,5 +1,6 @@ package ca.uhn.fhir.util; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.util.InClauseNormalizer; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; @@ -14,16 +15,16 @@ import static org.assertj.core.api.Assertions.assertThat; public class InClauseNormalizerTest { - private static final Long ourResourceId = 1L; - private static final Long ourPaddingValue = -1L; + private static final JpaPid ourResourceId = JpaPid.fromId(1L); + private static final JpaPid ourPaddingValue = JpaPid.fromId(-1L); @ParameterizedTest @MethodSource("arguments") public void testNormalizeUnmodifiableList_willCreateNewListAndPadToSize(int theInitialListSize, int theExpectedNormalizedListSize) { - List initialList = new ArrayList<>(nCopies(theInitialListSize, ourResourceId)); + List initialList = new ArrayList<>(nCopies(theInitialListSize, ourResourceId)); initialList = unmodifiableList(initialList); - List normalizedList = InClauseNormalizer.normalizeIdListForInClause(initialList); + List normalizedList = InClauseNormalizer.normalizeIdListForInClause(initialList); assertNormalizedList(initialList, normalizedList, theInitialListSize, theExpectedNormalizedListSize); } @@ -31,23 +32,23 @@ public void testNormalizeUnmodifiableList_willCreateNewListAndPadToSize(int theI @ParameterizedTest @MethodSource("arguments") public void testNormalizeListToSizeAndPad(int theInitialListSize, int theExpectedNormalizedListSize) { - List initialList = new ArrayList<>(nCopies(theInitialListSize, ourResourceId)); + List initialList = new ArrayList<>(nCopies(theInitialListSize, ourResourceId)); - List normalizedList = InClauseNormalizer.normalizeIdListForInClause(initialList); + List normalizedList = InClauseNormalizer.normalizeIdListForInClause(initialList); assertNormalizedList(initialList, normalizedList, theInitialListSize, theExpectedNormalizedListSize); } - private void assertNormalizedList(List theInitialList, List theNormalizedList, int theInitialListSize, int theExpectedNormalizedListSize) { - List expectedPaddedSubList = new ArrayList<>(nCopies(theExpectedNormalizedListSize - theInitialListSize, ourPaddingValue)); + private void assertNormalizedList(List theInitialList, List theNormalizedList, int theInitialListSize, int theExpectedNormalizedListSize) { + List expectedPaddedSubList = new ArrayList<>(nCopies(theExpectedNormalizedListSize - theInitialListSize, ourPaddingValue)); assertThat(theNormalizedList).startsWith(listToArray(theInitialList)); assertThat(theNormalizedList).hasSize(theExpectedNormalizedListSize); assertThat(theNormalizedList).endsWith(listToArray(expectedPaddedSubList)); } - static Long[] listToArray(List theList) { - return theList.toArray(new Long[0]); + static JpaPid[] listToArray(List theList) { + return theList.toArray(new JpaPid[0]); } private static Stream arguments(){ diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmCandidateSearchSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmCandidateSearchSvc.java index df985ae93c5e..16677f9fccb7 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmCandidateSearchSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmCandidateSearchSvc.java @@ -37,7 +37,7 @@ import java.util.Collection; import java.util.Collections; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -77,7 +77,16 @@ public MdmCandidateSearchSvc() {} @Transactional public Collection findCandidates( String theResourceType, IAnyResource theResource, RequestPartitionId theRequestPartitionId) { - Map matchedPidsToResources = new HashMap<>(); + + /* + * This is a LinkedHashMap only because a number of Smile MDM unit tests depend on + * the order of candidates being returned in an order consistent with the order they + * were created. Before we added the partition ID to the hashCode() of JpaPid this + * seemed to happen naturally by complete coincidence, but after that change it + * stopped happening. So now a linked hashmap is used instead. + */ + Map matchedPidsToResources = new LinkedHashMap<>(); + List filterSearchParams = myMdmSettings.getMdmRules().getCandidateFilterSearchParams(); List filterCriteria = buildFilterQuery(filterSearchParams, theResourceType); diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dao/JpaPid.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dao/JpaPid.java index c19566ae44aa..a09745812fdf 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dao/JpaPid.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dao/JpaPid.java @@ -21,23 +21,37 @@ import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; import ca.uhn.fhir.rest.api.server.storage.BaseResourcePersistentId; +import jakarta.annotation.Nonnull; +import org.apache.commons.collections4.ComparatorUtils; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; +import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; + /** * JPA implementation of IResourcePersistentId. JPA uses a Long as the primary key. This class should be used in any * context where the pid is known to be a Long. */ -public class JpaPid extends BaseResourcePersistentId { +public class JpaPid extends BaseResourcePersistentId implements Comparable { private final Long myId; private PartitionablePartitionId myPartitionablePartitionId; + private static final Comparator COMPARATOR; + + static { + Comparator partitionComparator = + Comparator.comparing(t -> defaultIfNull(t.getPartitionId(), Integer.MIN_VALUE)); + Comparator idComparator = Comparator.comparing(t -> t.myId); + COMPARATOR = ComparatorUtils.chainedComparator(List.of(partitionComparator, idComparator)); + } + private JpaPid(Long theId) { super(null); myId = theId; @@ -67,6 +81,13 @@ public JpaPid setPartitionablePartitionId(PartitionablePartitionId thePartitiona return this; } + public void setPartitionId(Integer thePartitionId) { + if (myPartitionablePartitionId == null) { + myPartitionablePartitionId = new PartitionablePartitionId(); + } + myPartitionablePartitionId.setPartitionId(thePartitionId); + } + public static List toLongList(JpaPid[] thePids) { return toLongList(Arrays.asList(thePids)); } @@ -99,6 +120,12 @@ public static JpaPid fromId(Long theId) { return new JpaPid(theId); } + public static JpaPid fromId(Long theId, Integer thePartitionId) { + JpaPid retVal = new JpaPid(theId); + retVal.setPartitionablePartitionId(PartitionablePartitionId.with(thePartitionId, null)); + return retVal; + } + public static JpaPid fromIdAndVersion(Long theId, Long theVersion) { return new JpaPid(theId, theVersion); } @@ -115,14 +142,13 @@ public static JpaPid fromIdAndVersionAndResourceType(Long theId, Long theVersion public boolean equals(Object theO) { if (this == theO) return true; if (theO == null || getClass() != theO.getClass()) return false; - if (!super.equals(theO)) return false; JpaPid jpaPid = (JpaPid) theO; return myId.equals(jpaPid.myId); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), myId); + return Objects.hash(myId); } @Override @@ -135,9 +161,15 @@ public String toString() { return myId.toString(); } + @Override + public int compareTo(@Nonnull JpaPid theOther) { + return COMPARATOR.compare(this, theOther); + } + public Integer getPartitionId() { - // wipmb should we return null instead? - assert getPartitionablePartitionId() != null; + if (getPartitionablePartitionId() == null) { + return null; + } return getPartitionablePartitionId().getPartitionId(); } } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryProvenanceEntity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryProvenanceEntity.java index 60ffac66cbeb..b22e6e168dc9 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryProvenanceEntity.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryProvenanceEntity.java @@ -36,6 +36,16 @@ import static ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable.SOURCE_URI_LENGTH; +/** + * This entity is deprecated - It stores the source URI and Request ID + * fields so that they can be indexed and searched discretely. In + * HAPI FHIR 6.8.0 we added equivalent columns to {@link ResourceHistoryTable} + * and started populating both those columns and the ones in this table. + * As of HAPI FHIR 8.0.0 we are no longer using this table unless + * the "AccessMetaSourceInformationFromProvenanceTable" on JpaStorageSettings + * is enabled (it's disabled by default). In the future we will remove + * this table entirely. + */ @Table( name = "HFJ_RES_VER_PROV", indexes = { diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java index 397b8371a492..d5fa5b0e4eb0 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java @@ -37,7 +37,6 @@ import jakarta.persistence.Lob; import jakarta.persistence.ManyToOne; import jakarta.persistence.OneToMany; -import jakarta.persistence.OneToOne; import jakarta.persistence.Table; import jakarta.persistence.Transient; import jakarta.persistence.UniqueConstraint; @@ -119,10 +118,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl @OptimisticLock(excluded = true) private ResourceEncodingEnum myEncoding; - @OneToOne( - mappedBy = "myResourceHistoryTable", - cascade = {CascadeType.REMOVE}) - private ResourceHistoryProvenanceEntity myProvenance; // TODO: This was added in 6.8.0 - In the future we should drop ResourceHistoryProvenanceEntity @Column(name = "SOURCE_URI", length = SOURCE_URI_LENGTH, nullable = true) private String mySourceUri; @@ -180,10 +175,6 @@ public void setResourceTextVc(String theResourceTextVc) { myResourceTextVc = theResourceTextVc; } - public ResourceHistoryProvenanceEntity getProvenance() { - return myProvenance; - } - public void addTag(ResourceTag theTag) { ResourceHistoryTag tag = new ResourceHistoryTag(this, theTag.getTag(), getPartitionId()); tag.setResourceType(theTag.getResourceType()); diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTag.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTag.java index c78e68a4a902..ea6951b49817 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTag.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTag.java @@ -31,6 +31,8 @@ import jakarta.persistence.ManyToOne; import jakarta.persistence.Table; import jakarta.persistence.UniqueConstraint; +import org.apache.commons.lang3.builder.ToStringBuilder; +import org.apache.commons.lang3.builder.ToStringStyle; import org.hibernate.annotations.GenericGenerator; import java.io.Serializable; @@ -121,4 +123,17 @@ public void setResource(ResourceHistoryTable theResourceHistory) { public Long getId() { return myId; } + + @Override + public String toString() { + ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE); + b.append("id", getId()); + if (getPartitionId() != null) { + b.append("partId", getPartitionId().getPartitionId()); + } + b.append("versionId", myResourceHistoryPid); + b.append("resId", getResourceId()); + b.append("tag", getTag().getId()); + return b.build(); + } } diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchNoFtTest.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchNoFtTest.java index dda67d5cee54..7d6865fb4658 100644 --- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchNoFtTest.java +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchNoFtTest.java @@ -3557,6 +3557,7 @@ public void testReplaceLinkSearchIndex() { myCaptureQueriesListener.clear(); myObservationDao.update(obs); + myCaptureQueriesListener.logSelectQueries(); assertEquals(10, myCaptureQueriesListener.countSelectQueries()); assertEquals(5, myCaptureQueriesListener.countUpdateQueries()); assertEquals(1, myCaptureQueriesListener.countInsertQueries()); diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3Test.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3Test.java index 065afeeb6395..10961f819217 100644 --- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3Test.java +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3Test.java @@ -17,7 +17,6 @@ import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; import ca.uhn.fhir.jpa.dao.DaoTestUtils; -import ca.uhn.fhir.jpa.entity.ResourceSearchView; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; @@ -54,6 +53,7 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.util.ClasspathUtil; import com.google.common.collect.Lists; +import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.RandomStringUtils; import org.hl7.fhir.dstu3.model.Age; import org.hl7.fhir.dstu3.model.Attachment; @@ -110,6 +110,7 @@ import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallbackWithoutResult; import org.springframework.transaction.support.TransactionTemplate; +import org.testcontainers.shaded.org.bouncycastle.util.Arrays; import java.util.ArrayList; import java.util.Collections; @@ -613,15 +614,6 @@ public void testCreateResource_populatesResourceTableFhirIdField( .getSingleResult(); assertThat(historyCount).as("only create one history version").isEqualTo(1); - // make sure the search view works too - ResourceSearchView readBackView = myEntityManager - .createQuery("select v from ResourceSearchView v where v.myResourceId = :resId", ResourceSearchView.class) - .setParameter("resId", myMethodOutcome.getPersistentId().getId()) - .getSingleResult(); - assertThat(readBackView).as("found search view").isNotNull(); - - assertEquals(myExpectedId, readBackView.getFhirId(), - "fhir_id populated"); } } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ConcurrentCreateTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ConcurrentCreateTest.java index 04c3797f2d4d..c3d4e0ee79f4 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ConcurrentCreateTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ConcurrentCreateTest.java @@ -6,6 +6,7 @@ import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao; import ca.uhn.fhir.jpa.interceptor.UserRequestRetryVersionConflictsInterceptor; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.search.ResourceSearchUrlSvc; @@ -171,8 +172,8 @@ public void testMethodDeleteByResId_withEntries_willDeleteTheEntryIfExists(){ myResourceSearchUrlDao.saveAll(asList(entry1, entry2)); // when - myResourceSearchUrlSvc.deleteByResId(entry1.getResourcePid()); - myResourceSearchUrlSvc.deleteByResId(nonExistentResourceId); + myResourceSearchUrlSvc.deleteByResId(JpaPid.fromId(entry1.getResourcePid())); + myResourceSearchUrlSvc.deleteByResId(JpaPid.fromId(nonExistentResourceId)); // then List resourcesPids = getStoredResourceSearchUrlEntitiesPids(); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4DeleteTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4DeleteTest.java index ef1b27147438..a6fbc97f2686 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4DeleteTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4DeleteTest.java @@ -73,12 +73,12 @@ public void testDeleteMarksResourceAndVersionAsDeleted() { // Current version should be marked as deleted runInTransaction(() -> { - ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1); + ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 1); assertNull(resourceTable.getDeleted()); assertNotNull(resourceTable.getPersistentId()); }); runInTransaction(() -> { - ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 2); + ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 2); assertNotNull(resourceTable.getDeleted()); }); @@ -215,7 +215,7 @@ public void testResourceIsConsideredDeletedIfOnlyResourceTableEntryIsDeleted() { // Mark the current history version as not-deleted even though the actual resource // table entry is marked deleted runInTransaction(() -> { - ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 2); + ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 2); resourceTable.setDeleted(null); myResourceHistoryTableDao.save(resourceTable); }); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4InlineResourceModeTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4InlineResourceModeTest.java index 351635cbb02f..09338888d278 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4InlineResourceModeTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4InlineResourceModeTest.java @@ -30,7 +30,7 @@ public void testRetrieveNonInlinedResource() { relocateResourceTextToCompressedColumn(pid, 1L); runInTransaction(()->{ - ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(pid, 1); + ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersion(pid, 1); assertNotNull(historyEntity.getResource()); assertNull(historyEntity.getResourceTextVc()); assertEquals(ResourceEncodingEnum.JSONC, historyEntity.getEncoding()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchSqlTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchSqlTest.java index cd9b1955cf82..08e754b84730 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchSqlTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchSqlTest.java @@ -18,6 +18,7 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.ValueSource; import org.slf4j.Logger; @@ -25,6 +26,7 @@ import java.util.List; import java.util.UUID; +import java.util.stream.Collectors; import static ca.uhn.fhir.interceptor.api.Pointcut.STORAGE_PARTITION_IDENTIFY_ANY; import static org.assertj.core.api.Assertions.assertThat; @@ -175,8 +177,10 @@ public void testTwoRegularSearchParams() { } - @Test - public void testSearchByProfile_VersionedMode() { + @ParameterizedTest + @EnumSource(value = JpaStorageSettings.TagStorageModeEnum.class, names = {"NON_VERSIONED", "VERSIONED"}) + public void testSearchByProfile_VersionedAndNonVersionedMode(JpaStorageSettings.TagStorageModeEnum theTagStorageModeEnum) { + myStorageSettings.setTagStorageMode(theTagStorageModeEnum); // Put a tag in so we can search for it String code = "http://" + UUID.randomUUID(); @@ -185,24 +189,33 @@ public void testSearchByProfile_VersionedMode() { IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless(); myMemoryCacheService.invalidateAllCaches(); + logAllResourceTags(); + logAllResourceHistoryTags(); + // Search myCaptureQueriesListener.clear(); SearchParameterMap map = SearchParameterMap.newSynchronous() .add(Constants.PARAM_PROFILE, new TokenParam(code)); IBundleProvider outcome = myPatientDao.search(map, mySrd); - assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + assertEquals(3, myCaptureQueriesListener.logSelectQueries().size()); // Query 1 - Find resources: Make sure we search for tag type+system+code always String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false); assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t2.TAG_TYPE = ?) AND (t2.TAG_SYSTEM = ?) AND (t2.TAG_CODE = ?)))", sql); - // Query 2 - Load resourece contents + // Query 2 - Load resource contents sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false); - assertThat(sql).contains("where rsv1_0.RES_ID in (?)"); - // Query 3 - Load tags and defintions + assertThat(sql).contains("where rht1_0.RES_ID in (?)"); + // Query 3 - Load tags and definitions sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(2).getSql(false, false); - assertThat(sql).contains("from HFJ_RES_TAG rt1_0 join HFJ_TAG_DEF"); + if (theTagStorageModeEnum == JpaStorageSettings.TagStorageModeEnum.VERSIONED) { + assertThat(sql).contains("from HFJ_HISTORY_TAG rht1_0 join HFJ_TAG_DEF"); + } else { + assertThat(sql).contains("from HFJ_RES_TAG rt1_0 join HFJ_TAG_DEF"); + } assertThat(toUnqualifiedVersionlessIds(outcome)).containsExactly(id); + List profileDeclarations = outcome.getResources(0, 1).get(0).getMeta().getProfile().stream().map(t -> t.getValueAsString()).collect(Collectors.toList()); + assertThat(profileDeclarations).containsExactly(code); } @Test @@ -234,7 +247,7 @@ public void testSearchByProfile_InlineMode() { assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_URI t0 WHERE (t0.HASH_URI = ?)", sql); // Query 2 - Load resourece contents sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false); - assertThat(sql).contains("where rsv1_0.RES_ID in (?)"); + assertThat(sql).contains("where rht1_0.RES_ID in (?)"); assertThat(toUnqualifiedVersionlessIds(outcome)).containsExactly(id); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java index 82c8ae17cb7a..475238805bf4 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java @@ -302,7 +302,7 @@ public void testDeletedResourcesAreReindexed() { table.setDeleted(new Date()); table = myResourceTableDao.saveAndFlush(table); ResourceHistoryTable newHistory = table.toHistory(true); - ResourceHistoryTable currentHistory = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(table.getId(), 1L); + ResourceHistoryTable currentHistory = myResourceHistoryTableDao.findForIdAndVersion(table.getId(), 1L); newHistory.setEncoding(currentHistory.getEncoding()); newHistory.setResourceTextVc(currentHistory.getResourceTextVc()); myResourceHistoryTableDao.save(newHistory); @@ -2934,7 +2934,7 @@ public void testResourceInDatabaseContainsInvalidVocabulary() { tx.execute(new TransactionCallbackWithoutResult() { @Override protected void doInTransactionWithoutResult(TransactionStatus theStatus) { - ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1L); + ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 1L); String newContent = myFhirContext.newJsonParser().encodeResourceToString(p); newContent = newContent.replace("male", "foo"); table.setResourceTextVc(newContent); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4VersionedReferenceTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4VersionedReferenceTest.java index f835c300c199..a4b9be0bcc4c 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4VersionedReferenceTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4VersionedReferenceTest.java @@ -1,9 +1,5 @@ package ca.uhn.fhir.jpa.dao.r4; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.assertNull; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; @@ -15,6 +11,8 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.util.BundleBuilder; +import jakarta.annotation.Nonnull; +import org.hl7.fhir.instance.model.api.IBaseCoding; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.BooleanType; @@ -31,7 +29,6 @@ import org.hl7.fhir.r4.model.Reference; import org.hl7.fhir.r4.model.StringType; import org.hl7.fhir.r4.model.Task; -import jakarta.annotation.Nonnull; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; @@ -40,7 +37,6 @@ import org.junit.platform.commons.annotation.Testable; import java.io.IOException; -import java.io.InputStreamReader; import java.util.Arrays; import java.util.Collections; import java.util.Date; @@ -52,7 +48,10 @@ import static ca.uhn.fhir.util.HapiExtensions.EXTENSION_AUTO_VERSION_REFERENCES_AT_PATH; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; @@ -70,8 +69,10 @@ public void afterEach() { @Nested public class AutoVersionReferencesWithSettingAndExtension extends AutoVersionReferencesWithExtension { + @Override @BeforeEach public void before() { + super.before(); beforeAutoVersionReferencesWithSetting(); } } @@ -219,7 +220,7 @@ public void testCreateAndUpdateVersionedReferencesInTransaction_VersionedReferen @Test public void testCreateAndUpdateVersionedReferencesInTransaction_VersionedReferenceToVersionedReferenceToUpsertWithChange() { - AtomicInteger counter = new AtomicInteger(); + final AtomicInteger counter = new AtomicInteger(); Supplier supplier = () -> { BundleBuilder bb = new BundleBuilder(myFhirContext); @@ -229,12 +230,12 @@ public void testCreateAndUpdateVersionedReferencesInTransaction_VersionedReferen organization.setActive(true); bb.addTransactionUpdateEntry(organization); - Patient patient = new Patient(); - patient.getMeta().setExtension(patientAutoVersionExtension); - patient.setId("Patient/A"); - patient.setManagingOrganization(new Reference("Organization/O")); - patient.setActive(true); - bb.addTransactionUpdateEntry(patient); + Patient patient1 = new Patient(); + patient1.getMeta().setExtension(patientAutoVersionExtension); + patient1.setId("Patient/A"); + patient1.setManagingOrganization(new Reference("Organization/O")); + patient1.setActive(true); + bb.addTransactionUpdateEntry(patient1); ExplanationOfBenefit eob = new ExplanationOfBenefit(); eob.getMeta().setExtension(explanationOfBenefitAutoVersionExtension); @@ -274,7 +275,7 @@ public void testCreateAndUpdateVersionedReferencesInTransaction_VersionedReferen public void testInsertVersionedReferenceAtPath() { Patient p = new Patient(); p.setActive(true); - IIdType patientId = myPatientDao.create(p).getId().toUnqualified(); + IIdType patientId = myPatientDao.create(p, mySrd).getId().toUnqualified(); assertEquals("1", patientId.getVersionIdPart()); assertNull(patientId.getBaseUrl()); String patientIdString = patientId.getValue(); @@ -283,10 +284,10 @@ public void testInsertVersionedReferenceAtPath() { Observation observation = new Observation(); observation.getMeta().setExtension(observationAutoVersionExtension); observation.getSubject().setReference(patientId.toVersionless().getValue()); - IIdType observationId = myObservationDao.create(observation).getId().toUnqualified(); + IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified(); // Read back and verify that reference is now versioned - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientIdString, observation.getSubject().getReference()); myCaptureQueriesListener.clear(); @@ -297,13 +298,13 @@ public void testInsertVersionedReferenceAtPath() { observation.setId(observationId); observation.addIdentifier().setSystem("http://foo").setValue("bar"); observation.getSubject().setReference(patientId.toVersionless().getValue()); - myObservationDao.update(observation); + myObservationDao.update(observation, mySrd); // Make sure we're not introducing any extra DB operations assertThat(myCaptureQueriesListener.logSelectQueries()).hasSize(5); // Read back and verify that reference is now versioned - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientIdString, observation.getSubject().getReference()); } @@ -338,7 +339,7 @@ public void testInsertVersionedReferenceAtPath_InTransaction_SourceAndTargetBoth assertTrue(observationId.hasVersionIdPart()); // Read back and verify that reference is now versioned - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientId.getValue(), observation.getSubject().getReference()); assertEquals(encounterId.toVersionless().getValue(), observation.getEncounter().getReference()); } @@ -354,11 +355,11 @@ public void testInsertVersionedReferenceAtPath_InTransaction_TargetConditionalCr Encounter encounter = new Encounter(); encounter.setId(IdType.newRandomUuid()); encounter.addIdentifier().setSystem("http://baz").setValue("baz"); - myEncounterDao.create(encounter); + myEncounterDao.create(encounter, mySrd); } // Verify Patient Version - assertThat(myPatientDao.search(SearchParameterMap.newSynchronous("active", new TokenParam("false"))) + assertThat(myPatientDao.search(SearchParameterMap.newSynchronous("active", new TokenParam("false")), mySrd) .getResources(0, 1).get(0).getIdElement().getVersionIdPart()).isEqualTo("2"); BundleBuilder builder = new BundleBuilder(myFhirContext); @@ -386,7 +387,7 @@ public void testInsertVersionedReferenceAtPath_InTransaction_TargetConditionalCr IdType observationId = new IdType(outcome.getEntry().get(2).getResponse().getLocation()); // Read back and verify that reference is now versioned - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientId.getValue(), observation.getSubject().getReference()); assertEquals("2", observation.getSubject().getReferenceElement().getVersionIdPart()); assertEquals(encounterId.toVersionless().getValue(), observation.getEncounter().getReference()); @@ -402,11 +403,11 @@ public void testInsertVersionedReferenceAtPath_InTransaction_TargetUpdate() { Patient patient = new Patient(); patient.setId("PATIENT"); patient.setActive(true); - myPatientDao.update(patient).getId(); + myPatientDao.update(patient, mySrd); // Update patient to make a second version patient.setActive(false); - myPatientDao.update(patient); + myPatientDao.update(patient, mySrd); } BundleBuilder builder = new BundleBuilder(myFhirContext); @@ -431,7 +432,7 @@ public void testInsertVersionedReferenceAtPath_InTransaction_TargetUpdate() { assertThat(myCaptureQueriesListener.logSelectQueries()).hasSize(2); // Read back and verify that reference is now versioned - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientId.getValue(), observation.getSubject().getReference()); } @@ -466,7 +467,7 @@ public void testInsertVersionedReferenceAtPath_InTransaction_TargetUpdateConditi assertThat(myCaptureQueriesListener.logSelectQueries()).hasSize(3); // Read back and verify that reference is now versioned - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientId.getValue(), observation.getSubject().getReference()); } @@ -479,16 +480,16 @@ public void bundleTransaction_autoVersionReferenceAtPathWithPreexistingPatientRe // create patient ahead of time Patient patient = new Patient(); patient.setId(patientId); - DaoMethodOutcome outcome = myPatientDao.update(patient); + DaoMethodOutcome outcome = myPatientDao.update(patient, mySrd); assertEquals(patientId + "/_history/1", outcome.getResource().getIdElement().getValue()); - Patient returned = myPatientDao.read(idType); + Patient returned = myPatientDao.read(idType, mySrd); assertNotNull(returned); assertEquals(patientId + "/_history/1", returned.getId()); // update to change version patient.setActive(true); - myPatientDao.update(patient); + myPatientDao.update(patient, mySrd); Observation obs = new Observation(); obs.getMeta().setExtension(observationAutoVersionExtension); @@ -505,7 +506,7 @@ public void bundleTransaction_autoVersionReferenceAtPathWithPreexistingPatientRe assertNotNull(returnedTr); // some verification - Observation obRet = myObservationDao.read(obs.getIdElement()); + Observation obRet = myObservationDao.read(obs.getIdElement(), mySrd); assertNotNull(obRet); } @@ -529,9 +530,9 @@ public void testNoNpeMinimal() { assertNotNull(returnedTr); // some verification - Observation obRet = myObservationDao.read(obs.getIdElement()); + Observation obRet = myObservationDao.read(obs.getIdElement(), mySrd); assertNotNull(obRet); - Patient returned = myPatientDao.read(patientRef.getReferenceElement()); + Patient returned = myPatientDao.read(patientRef.getReferenceElement(), mySrd); assertNotNull(returned); } @@ -554,7 +555,7 @@ public void testInsertVersionedReferencesByPath_resourceReferenceNotInTransactio assertEquals("2", patient.getIdElement().getVersionIdPart()); // read back and verify that reference is versioned - messageHeader = myMessageHeaderDao.read(messageHeaderId); + messageHeader = myMessageHeaderDao.read(messageHeaderId, mySrd); assertEquals(patient.getIdElement().getValue(), messageHeader.getFocus().get(0).getReference()); } @@ -599,8 +600,8 @@ public void testInsertVersionedReferencesByPath_conditionalUpdateNoOpInTransacti IdType messageHeaderId = new IdType(outcome.getEntry().get(1).getResponse().getLocation()); // read back and verify that reference is versioned and correct - Patient patient = myPatientDao.read(patientId); - MessageHeader messageHeader = myMessageHeaderDao.read(messageHeaderId); + Patient patient = myPatientDao.read(patientId, mySrd); + MessageHeader messageHeader = myMessageHeaderDao.read(messageHeaderId, mySrd); assertEquals(patient.getIdElement().getValue(), messageHeader.getFocus().get(0).getReference()); // create bundle second time @@ -609,8 +610,8 @@ public void testInsertVersionedReferencesByPath_conditionalUpdateNoOpInTransacti messageHeaderId = new IdType(outcome.getEntry().get(1).getResponse().getLocation()); // read back and verify that reference is versioned and correct - patient = myPatientDao.read(patientId); - messageHeader = myMessageHeaderDao.read(messageHeaderId); + patient = myPatientDao.read(patientId, mySrd); + messageHeader = myMessageHeaderDao.read(messageHeaderId, mySrd); assertEquals(patient.getIdElement().getValue(), messageHeader.getFocus().get(0).getReference()); } @@ -637,11 +638,11 @@ private Patient createAndUpdatePatient(String thePatientId) { Patient patient = new Patient(); patient.setId(thePatientId); patient.setActive(true); - myPatientDao.create(patient).getId(); + myPatientDao.create(patient, mySrd); // update patient to make a second version patient.setActive(false); - myPatientDao.update(patient); + myPatientDao.update(patient, mySrd); return patient; } } @@ -652,17 +653,17 @@ public void testStoreAndRetrieveVersionedReference() { Patient p = new Patient(); p.setActive(true); - IIdType patientId = myPatientDao.create(p).getId().toUnqualified(); + IIdType patientId = myPatientDao.create(p, mySrd).getId().toUnqualified(); assertEquals("1", patientId.getVersionIdPart()); assertNull(patientId.getBaseUrl()); String patientIdString = patientId.getValue(); Observation observation = new Observation(); observation.getSubject().setReference(patientIdString); - IIdType observationId = myObservationDao.create(observation).getId().toUnqualified(); + IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified(); // Read back - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientIdString, observation.getSubject().getReference()); } @@ -672,21 +673,21 @@ public void testDontOverwriteExistingVersion() { Patient p = new Patient(); p.setActive(true); - myPatientDao.create(p); + myPatientDao.create(p, mySrd); // Update the patient p.setActive(false); - IIdType patientId = myPatientDao.update(p).getId().toUnqualified(); + IIdType patientId = myPatientDao.update(p, mySrd).getId().toUnqualified(); assertEquals("2", patientId.getVersionIdPart()); assertNull(patientId.getBaseUrl()); Observation observation = new Observation(); observation.getSubject().setReference(patientId.withVersion("1").getValue()); - IIdType observationId = myObservationDao.create(observation).getId().toUnqualified(); + IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified(); // Read back - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientId.withVersion("1").getValue(), observation.getSubject().getReference()); } @@ -698,20 +699,22 @@ public void testSearchAndIncludeVersionedReference_Asynchronous() { // Create the patient Patient p = new Patient(); p.addIdentifier().setSystem("http://foo").setValue("1"); - myPatientDao.create(p); + myPatientDao.create(p, mySrd); // Update the patient p.getIdentifier().get(0).setValue("2"); - IIdType patientId = myPatientDao.update(p).getId().toUnqualified(); + IIdType patientId = myPatientDao.update(p, mySrd).getId().toUnqualified(); assertEquals("2", patientId.getVersionIdPart()); Observation observation = new Observation(); observation.getSubject().setReference(patientId.withVersion("1").getValue()); - IIdType observationId = myObservationDao.create(observation).getId().toUnqualified(); + IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified(); + + logAllResourceLinks(); // Search - Non Synchronous for * { - IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(IBaseResource.INCLUDE_ALL)); + IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(IBaseResource.INCLUDE_ALL), mySrd); assertEquals(1, outcome.sizeOrThrowNpe()); List resources = outcome.getResources(0, 1); assertThat(resources).hasSize(2); @@ -721,7 +724,7 @@ public void testSearchAndIncludeVersionedReference_Asynchronous() { // Search - Non Synchronous for named include { - IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(Observation.INCLUDE_PATIENT)); + IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(Observation.INCLUDE_PATIENT), mySrd); assertEquals(1, outcome.sizeOrThrowNpe()); List resources = outcome.getResources(0, 1); assertThat(resources).hasSize(2); @@ -735,46 +738,63 @@ public void testSearchAndIncludeVersionedReference_Asynchronous() { public void testSearchAndIncludeVersionedReference_Synchronous() { myFhirContext.getParserOptions().setStripVersionsFromReferences(false); myStorageSettings.setRespectVersionsForSearchIncludes(true); + myStorageSettings.setTagStorageMode(JpaStorageSettings.TagStorageModeEnum.VERSIONED); // Create the patient Patient p = new Patient(); + p.getMeta().addTag("http://tag", "1", null); p.addIdentifier().setSystem("http://foo").setValue("1"); - myPatientDao.create(p); + myPatientDao.create(p, mySrd); - // Update the patient + // Update the patient - Add a second tag p.getIdentifier().get(0).setValue("2"); - IIdType patientId = myPatientDao.update(p).getId().toUnqualified(); + p.getMeta().addTag("http://tag", "2", null); + IIdType patientId = myPatientDao.update(p, mySrd).getId().toUnqualified(); assertEquals("2", patientId.getVersionIdPart()); Observation observation = new Observation(); observation.getSubject().setReference(patientId.withVersion("1").getValue()); - IIdType observationId = myObservationDao.create(observation).getId().toUnqualified(); + IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified(); - // Search - Non Synchronous for * + logAllResourceVersions(); + logAllResourceHistoryTags(); + + // Search - Non-Synchronous for * { - IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(IBaseResource.INCLUDE_ALL)); + myCaptureQueriesListener.clear(); + IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(IBaseResource.INCLUDE_ALL), mySrd); assertEquals(2, outcome.sizeOrThrowNpe()); List resources = outcome.getResources(0, 2); + assertEquals(5, myCaptureQueriesListener.logSelectQueries().size()); assertThat(resources).hasSize(2); assertEquals(observationId.getValue(), resources.get(0).getIdElement().getValue()); - assertEquals(patientId.withVersion("1").getValue(), resources.get(1).getIdElement().getValue()); + IBaseResource patient = resources.get(1); + assertEquals(patientId.withVersion("1").getValue(), patient.getIdElement().getValue()); + assertThat(getTagCodes(patient)).asList().containsExactly("1"); + ourLog.info("Patient: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient)); } - // Search - Non Synchronous for named include + // Search - Non-Synchronous for named include { - IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(Observation.INCLUDE_PATIENT)); + IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(Observation.INCLUDE_PATIENT), mySrd); assertEquals(2, outcome.sizeOrThrowNpe()); List resources = outcome.getResources(0, 2); assertThat(resources).hasSize(2); assertEquals(observationId.getValue(), resources.get(0).getIdElement().getValue()); assertEquals(patientId.withVersion("1").getValue(), resources.get(1).getIdElement().getValue()); + assertThat(getTagCodes(resources.get(1))).asList().containsExactly("1"); } } + @Nonnull + private static List getTagCodes(IBaseResource patient) { + return patient.getMeta().getTag().stream().map(IBaseCoding::getCode).collect(Collectors.toList()); + } + @Test public void testSearchAndIncludeVersionedReference_WhenOnlyOneVersionExists() { - HashSet refPaths = new HashSet(); + HashSet refPaths = new HashSet<>(); refPaths.add("Task.basedOn"); myFhirContext.getParserOptions().setDontStripVersionsFromReferencesAtPaths(refPaths); myStorageSettings.setRespectVersionsForSearchIncludes(true); @@ -782,15 +802,15 @@ public void testSearchAndIncludeVersionedReference_WhenOnlyOneVersionExists() { // Create a Condition Condition condition = new Condition(); - IIdType conditionId = myConditionDao.create(condition).getId().toUnqualified(); + IIdType conditionId = myConditionDao.create(condition, mySrd).getId().toUnqualified(); // Create a Task which is basedOn that Condition Task task = new Task(); - task.setBasedOn(Arrays.asList(new Reference(conditionId))); - IIdType taskId = myTaskDao.create(task).getId().toUnqualified(); + task.setBasedOn(List.of(new Reference(conditionId))); + IIdType taskId = myTaskDao.create(task, mySrd).getId().toUnqualified(); // Search for the Task using an _include=Task.basedOn and make sure we get the Condition resource in the Response - IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON)); + IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON), mySrd); assertEquals(2, outcome.size()); List resources = outcome.getResources(0, 2); assertThat(resources.size()).as(resources.stream().map(t -> t.getIdElement().toUnqualified().getValue()).collect(Collectors.joining(", "))).isEqualTo(2); @@ -800,10 +820,10 @@ public void testSearchAndIncludeVersionedReference_WhenOnlyOneVersionExists() { // Now, update the Condition to generate another version of it condition.setRecordedDate(new Date(System.currentTimeMillis())); - String conditionIdString = myConditionDao.update(condition).getId().getValue(); + myConditionDao.update(condition, mySrd.getId().getValue(), mySrd); // Search for the Task again and make sure that we get the original version of the Condition resource in the Response - outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON)); + outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON), mySrd); assertEquals(2, outcome.size()); resources = outcome.getResources(0, 2); assertThat(resources).hasSize(2); @@ -814,7 +834,7 @@ public void testSearchAndIncludeVersionedReference_WhenOnlyOneVersionExists() { @Test public void testSearchAndIncludeVersionedReference_WhenMultipleVersionsExist() { - HashSet refPaths = new HashSet(); + HashSet refPaths = new HashSet<>(); refPaths.add("Task.basedOn"); myFhirContext.getParserOptions().setDontStripVersionsFromReferencesAtPaths(refPaths); myStorageSettings.setRespectVersionsForSearchIncludes(true); @@ -822,23 +842,24 @@ public void testSearchAndIncludeVersionedReference_WhenMultipleVersionsExist() { // Create a Condition Condition condition = new Condition(); - IIdType conditionId = myConditionDao.create(condition).getId().toUnqualified(); + IIdType conditionId = myConditionDao.create(condition, mySrd).getId().toUnqualified(); + ourLog.info("conditionId: {}", conditionId); // Now, update the Condition 3 times to generate a 4th version of it condition.setRecordedDate(new Date(System.currentTimeMillis())); - conditionId = myConditionDao.update(condition).getId(); + myConditionDao.update(condition, mySrd); condition.setRecordedDate(new Date(System.currentTimeMillis() + 1000000)); - conditionId = myConditionDao.update(condition).getId(); + myConditionDao.update(condition, mySrd); condition.setRecordedDate(new Date(System.currentTimeMillis() + 2000000)); - conditionId = myConditionDao.update(condition).getId(); + conditionId = myConditionDao.update(condition, mySrd).getId().toUnqualified(); // Create a Task which is basedOn that Condition Task task = new Task(); - task.setBasedOn(Arrays.asList(new Reference(conditionId))); - IIdType taskId = myTaskDao.create(task).getId().toUnqualified(); + task.setBasedOn(List.of(new Reference(conditionId))); + IIdType taskId = myTaskDao.create(task, mySrd).getId().toUnqualified(); // Search for the Task using an _include=Task.basedOn and make sure we get the Condition resource in the Response - IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON)); + IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON), mySrd); assertEquals(2, outcome.size()); List resources = outcome.getResources(0, 2); assertThat(resources.size()).as(resources.stream().map(t -> t.getIdElement().toUnqualified().getValue()).collect(Collectors.joining(", "))).isEqualTo(2); @@ -849,7 +870,7 @@ public void testSearchAndIncludeVersionedReference_WhenMultipleVersionsExist() { @Test public void testSearchAndIncludeVersionedReference_WhenPreviouslyReferencedVersionOne() { - HashSet refPaths = new HashSet(); + HashSet refPaths = new HashSet<>(); refPaths.add("Task.basedOn"); myFhirContext.getParserOptions().setDontStripVersionsFromReferencesAtPaths(refPaths); myStorageSettings.setRespectVersionsForSearchIncludes(true); @@ -857,32 +878,32 @@ public void testSearchAndIncludeVersionedReference_WhenPreviouslyReferencedVersi // Create a Condition Condition condition = new Condition(); - IIdType conditionId = myConditionDao.create(condition).getId().toUnqualified(); + IIdType conditionId = myConditionDao.create(condition, mySrd).getId().toUnqualified(); ourLog.info("conditionId: \n{}", conditionId); // Create a Task which is basedOn that Condition Task task = new Task(); - task.setBasedOn(Arrays.asList(new Reference(conditionId))); - IIdType taskId = myTaskDao.create(task).getId().toUnqualified(); + task.setBasedOn(List.of(new Reference(conditionId))); + myTaskDao.create(task, mySrd).getId().toUnqualified(); // Now, update the Condition 3 times to generate a 4th version of it condition.setRecordedDate(new Date(System.currentTimeMillis())); - conditionId = myConditionDao.update(condition).getId(); + conditionId = myConditionDao.update(condition, mySrd).getId(); ourLog.info("UPDATED conditionId: \n{}", conditionId); condition.setRecordedDate(new Date(System.currentTimeMillis() + 1000000)); - conditionId = myConditionDao.update(condition).getId(); + conditionId = myConditionDao.update(condition, mySrd).getId(); ourLog.info("UPDATED conditionId: \n{}", conditionId); condition.setRecordedDate(new Date(System.currentTimeMillis() + 2000000)); - conditionId = myConditionDao.update(condition).getId(); + conditionId = myConditionDao.update(condition, mySrd).getId(); ourLog.info("UPDATED conditionId: \n{}", conditionId); // Now, update the Task to refer to the latest version 4 of the Condition - task.setBasedOn(Arrays.asList(new Reference(conditionId))); - taskId = myTaskDao.update(task).getId(); + task.setBasedOn(List.of(new Reference(conditionId))); + IIdType taskId = myTaskDao.update(task, mySrd).getId(); ourLog.info("UPDATED taskId: \n{}", taskId); // Search for the Task using an _include=Task.basedOn and make sure we get the Condition resource in the Response - IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON)); + IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON), mySrd); assertEquals(2, outcome.size()); List resources = outcome.getResources(0, 2); assertThat(resources.size()).as(resources.stream().map(t -> t.getIdElement().toUnqualified().getValue()).collect(Collectors.joining(", "))).isEqualTo(2); @@ -899,20 +920,20 @@ public void testSearchAndIncludeUnersionedReference_Asynchronous() { // Create the patient Patient p = new Patient(); p.addIdentifier().setSystem("http://foo").setValue("1"); - myPatientDao.create(p); + myPatientDao.create(p, mySrd); // Update the patient p.getIdentifier().get(0).setValue("2"); - IIdType patientId = myPatientDao.update(p).getId().toUnqualified(); + IIdType patientId = myPatientDao.update(p, mySrd).getId().toUnqualified(); assertEquals("2", patientId.getVersionIdPart()); Observation observation = new Observation(); observation.getSubject().setReference(patientId.withVersion("1").getValue()); - IIdType observationId = myObservationDao.create(observation).getId().toUnqualified(); + IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified(); // Search - Non Synchronous for * { - IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(IBaseResource.INCLUDE_ALL)); + IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(IBaseResource.INCLUDE_ALL), mySrd); assertEquals(1, outcome.sizeOrThrowNpe()); List resources = outcome.getResources(0, 1); assertThat(resources).hasSize(2); @@ -922,7 +943,7 @@ public void testSearchAndIncludeUnersionedReference_Asynchronous() { // Search - Non Synchronous for named include { - IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(Observation.INCLUDE_PATIENT)); + IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(Observation.INCLUDE_PATIENT), mySrd); assertEquals(1, outcome.sizeOrThrowNpe()); List resources = outcome.getResources(0, 1); assertThat(resources).hasSize(2); @@ -940,24 +961,24 @@ public void testSearchAndIncludeUnversionedReference_Synchronous() { // Create the patient Patient p = new Patient(); p.addIdentifier().setSystem("http://foo").setValue("1"); - myPatientDao.create(p); + myPatientDao.create(p, mySrd); // Update the patient p.getIdentifier().get(0).setValue("2"); - IIdType patientId = myPatientDao.update(p).getId().toUnqualified(); + IIdType patientId = myPatientDao.update(p, mySrd).getId().toUnqualified(); assertEquals("2", patientId.getVersionIdPart()); Observation observation = new Observation(); observation.getSubject().setReference(patientId.withVersion("1").getValue()); - IIdType observationId = myObservationDao.create(observation).getId().toUnqualified(); + IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified(); // Read the observation back - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientId.toVersionless().getValue(), observation.getSubject().getReference()); // Search - Non Synchronous for * { - IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(IBaseResource.INCLUDE_ALL)); + IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(IBaseResource.INCLUDE_ALL), mySrd); assertEquals(2, outcome.sizeOrThrowNpe()); List resources = outcome.getResources(0, 2); assertThat(resources).hasSize(2); @@ -967,7 +988,7 @@ public void testSearchAndIncludeUnversionedReference_Synchronous() { // Search - Non Synchronous for named include { - IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(Observation.INCLUDE_PATIENT)); + IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(Observation.INCLUDE_PATIENT), mySrd); assertEquals(2, outcome.sizeOrThrowNpe()); List resources = outcome.getResources(0, 2); assertThat(resources).hasSize(2); @@ -977,7 +998,7 @@ public void testSearchAndIncludeUnversionedReference_Synchronous() { } @Test - public void testNoNpeOnEoBBundle() { + public void testNoNpeOnEoBBundle() throws IOException { myStorageSettings.setAutoCreatePlaceholderReferenceTargets(true); List strings = Arrays.asList( "ExplanationOfBenefit.patient", @@ -989,9 +1010,7 @@ public void testNoNpeOnEoBBundle() { ); myStorageSettings.setAutoVersionReferenceAtPaths(new HashSet<>(strings)); - Bundle bundle = myFhirContext.newJsonParser().parseResource(Bundle.class, - new InputStreamReader( - FhirResourceDaoR4VersionedReferenceTest.class.getResourceAsStream("/npe-causing-bundle.json"))); + Bundle bundle = loadResourceFromClasspath(Bundle.class, "/npe-causing-bundle.json"); Bundle transaction = mySystemDao.transaction(new SystemRequestDetails(), bundle); @@ -1005,12 +1024,12 @@ public void testAutoVersionPathsWithAutoCreatePlaceholders() { Observation obs = new Observation(); obs.setId("Observation/CDE"); obs.setSubject(new Reference("Patient/ABC")); - DaoMethodOutcome update = myObservationDao.create(obs); + DaoMethodOutcome update = myObservationDao.create(obs, mySrd); Observation resource = (Observation)update.getResource(); String versionedPatientReference = resource.getSubject().getReference(); assertEquals("Patient/ABC", versionedPatientReference); - Patient p = myPatientDao.read(new IdDt("Patient/ABC")); + Patient p = myPatientDao.read(new IdDt("Patient/ABC"), mySrd); assertNotNull(p); myStorageSettings.setAutoVersionReferenceAtPaths("Observation.subject"); @@ -1018,7 +1037,7 @@ public void testAutoVersionPathsWithAutoCreatePlaceholders() { obs = new Observation(); obs.setId("Observation/DEF"); obs.setSubject(new Reference("Patient/RED")); - update = myObservationDao.create(obs); + update = myObservationDao.create(obs, mySrd); resource = (Observation)update.getResource(); versionedPatientReference = resource.getSubject().getReference(); @@ -1052,7 +1071,7 @@ public void bundleTransaction_withRequestURLWithPrecedingSlash_processesAsExpect IdType idType = new IdType(bundle.getEntry().get(0) .getResource().getId()); // the bundle above contains an observation, so we'll verify it was created here - Observation obs = myObservationDao.read(idType); + Observation obs = myObservationDao.read(idType, mySrd); assertNotNull(obs); } } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java index f36a093e147c..36ab6da5ae38 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java @@ -616,7 +616,7 @@ public void testReindexing() { TransactionTemplate template = new TransactionTemplate(myTxManager); template.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); template.execute((TransactionCallback) t -> { - ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), id.getVersionIdPartAsLong()); + ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), id.getVersionIdPartAsLong()); resourceHistoryTable.setEncoding(ResourceEncodingEnum.JSON); resourceHistoryTable.setResourceTextVc("{\"resourceType\":\"FOO\"}"); myResourceHistoryTableDao.save(resourceHistoryTable); @@ -661,7 +661,7 @@ public void testReindexingCurrentVersionDeleted() { assertEquals(1, myPatientDao.search(searchParamMap).size().intValue()); runInTransaction(() -> { - ResourceHistoryTable historyEntry = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 3); + ResourceHistoryTable historyEntry = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 3); assertNotNull(historyEntry); myResourceHistoryTableDao.delete(historyEntry); }); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java index 358471baaaf7..0e007d9fd800 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java @@ -429,7 +429,7 @@ public void testCreate_ServerId_WithPartition() { assertLocalDateFromDbMatches(myPartitionDate, tags.get(0).getPartitionId().getPartitionDate()); // HFJ_RES_VER - ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, 1L); + ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(patientId, 1L); assertEquals(myPartitionId, version.getPartitionId().getPartitionId().intValue()); assertLocalDateFromDbMatches(myPartitionDate, version.getPartitionId().getPartitionDate()); @@ -439,11 +439,6 @@ public void testCreate_ServerId_WithPartition() { assertEquals(myPartitionId, historyTags.get(0).getPartitionId().getPartitionId().intValue()); assertLocalDateFromDbMatches(myPartitionDate, historyTags.get(0).getPartitionId().getPartitionDate()); - // HFJ_RES_VER_PROV - assertNotNull(version.getProvenance()); - assertEquals(myPartitionId, version.getProvenance().getPartitionId().getPartitionId().intValue()); - assertLocalDateFromDbMatches(myPartitionDate, version.getProvenance().getPartitionId().getPartitionDate()); - // HFJ_SPIDX_STRING List strings = myResourceIndexedSearchParamStringDao.findAllForResourceId(patientId); ourLog.info("\n * {}", strings.stream().map(ResourceIndexedSearchParamString::toString).collect(Collectors.joining("\n * "))); @@ -517,7 +512,7 @@ public void testCreate_ServerId_DefaultPartition() { assertLocalDateFromDbMatches(myPartitionDate, tags.get(0).getPartitionId().getPartitionDate()); // HFJ_RES_VER - ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, 1L); + ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(patientId, 1L); assertNull(version.getPartitionId().getPartitionId()); assertLocalDateFromDbMatches(myPartitionDate, version.getPartitionId().getPartitionDate()); @@ -527,11 +522,6 @@ public void testCreate_ServerId_DefaultPartition() { assertNull(historyTags.get(0).getPartitionId().getPartitionId()); assertLocalDateFromDbMatches(myPartitionDate, historyTags.get(0).getPartitionId().getPartitionDate()); - // HFJ_RES_VER_PROV - assertNotNull(version.getProvenance()); - assertNull(version.getProvenance().getPartitionId().getPartitionId()); - assertLocalDateFromDbMatches(myPartitionDate, version.getProvenance().getPartitionId().getPartitionDate()); - // HFJ_SPIDX_STRING List strings = myResourceIndexedSearchParamStringDao.findAllForResourceId(patientId); String stringsDesc = strings.stream().map(ResourceIndexedSearchParamString::toString).sorted().collect(Collectors.joining("\n * ")); @@ -778,7 +768,7 @@ public void testUpdateResourceWithPartition() { // HFJ_RES_VER int version = 2; - ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, version); + ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersion(patientId, version); assertEquals(myPartitionId, resVer.getPartitionId().getPartitionId().intValue()); assertLocalDateFromDbMatches(myPartitionDate, resVer.getPartitionId().getPartitionDate()); @@ -790,12 +780,6 @@ public void testUpdateResourceWithPartition() { assertEquals(myPartitionId, historyTags.get(1).getPartitionId().getPartitionId().intValue()); assertLocalDateFromDbMatches(myPartitionDate, historyTags.get(1).getPartitionId().getPartitionDate()); - // HFJ_RES_VER_PROV - assertNotNull(resVer.getProvenance()); - assertNotNull(resVer.getPartitionId()); - assertEquals(myPartitionId, resVer.getProvenance().getPartitionId().getPartitionId().intValue()); - assertLocalDateFromDbMatches(myPartitionDate, resVer.getProvenance().getPartitionId().getPartitionDate()); - // HFJ_SPIDX_STRING List strings = myResourceIndexedSearchParamStringDao.findAllForResourceId(patientId); ourLog.info("\n * {}", strings.stream().map(ResourceIndexedSearchParamString::toString).collect(Collectors.joining("\n * "))); @@ -856,7 +840,7 @@ public void testUpdateConditionalInPartition() { // HFJ_RES_VER int version = 2; - ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, version); + ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersion(patientId, version); assertEquals(myPartitionId, resVer.getPartitionId().getPartitionId().intValue()); assertLocalDateFromDbMatches(myPartitionDate, resVer.getPartitionId().getPartitionDate()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java index aaaa937a3ddc..d1e8cfdab173 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java @@ -80,8 +80,10 @@ public void before() throws Exception { myPartitionSettings.setDefaultPartitionId(ALTERNATE_DEFAULT_ID); } + @Override @AfterEach - public void after() { + public void after() throws Exception { + super.after(); myInterceptorRegistry.unregisterInterceptor(mySvc); myInterceptorRegistry.unregisterInterceptor(myForceOffsetSearchModeInterceptor); @@ -171,7 +173,7 @@ public void testCreateEncounter_ValidMembershipInCompartment() { public void testCreateOrganization_ValidMembershipInCompartment() { Organization org = new Organization(); org.setName("Foo"); - Long id = myOrganizationDao.create(org).getId().getIdPartAsLong(); + Long id = myOrganizationDao.create(org, mySrd).getId().getIdPartAsLong(); runInTransaction(() -> { ResourceTable observation = myResourceTableDao.findById(id).orElseThrow(() -> new IllegalArgumentException()); @@ -222,8 +224,9 @@ public void testReadPatientHistory_Good() { myCaptureQueriesListener.logSelectQueriesForCurrentThread(); List selectQueriesForCurrentThread = myCaptureQueriesListener.getSelectQueriesForCurrentThread(); - assertEquals(3, selectQueriesForCurrentThread.size()); + assertEquals(2, selectQueriesForCurrentThread.size()); assertThat(selectQueriesForCurrentThread.get(0).getSql(false, false)).contains("PARTITION_ID=?"); + assertThat(selectQueriesForCurrentThread.get(1).getSql(false, false)).doesNotContain("PARTITION_ID="); } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/DiffProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/DiffProviderR4Test.java index 6c1598602066..9b8917ce97c9 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/DiffProviderR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/DiffProviderR4Test.java @@ -104,7 +104,7 @@ public void testLatestVersion_PreviousVersionExpunged() { createPatient(withId(id), withActiveTrue(), withFamily("SMITH")); runInTransaction(() -> { - ResourceHistoryTable version2 = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 2); + ResourceHistoryTable version2 = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 2); myResourceHistoryTableDao.deleteByPid(version2.getId()); }); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInvalidDataR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInvalidDataR4Test.java index dc00f13b0cca..ccc4062f723c 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInvalidDataR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInvalidDataR4Test.java @@ -35,7 +35,7 @@ public void testRetrieveDataSavedWithInvalidDecimal() throws IOException { // Manually set the value to be an invalid decimal number runInTransaction(() -> { - ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id, 1); + ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersion(id, 1); String resourceText = resVer.getResourceTextVc(); resourceText = resourceText.replace("100", "-.100"); resVer.setResourceTextVc(resourceText); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java index 7b7caf3e4eb6..28ee7ab19d1c 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java @@ -3370,7 +3370,7 @@ public void testRetrieveMissingVersionsDoesntCrashHistory() { new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() { @Override protected void doInTransactionWithoutResult(TransactionStatus status) { - ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id1.getIdPartAsLong(), 1); + ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(id1.getIdPartAsLong(), 1); myResourceHistoryTableDao.delete(version); } }); @@ -3395,7 +3395,7 @@ public void testRetrieveMissingVersionsDoesntCrashSearch() { new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() { @Override protected void doInTransactionWithoutResult(TransactionStatus status) { - ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id1.getIdPartAsLong(), 1); + ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(id1.getIdPartAsLong(), 1); myResourceHistoryTableDao.delete(version); } }); @@ -4257,6 +4257,7 @@ private void testSearchReturnsResults(String search) throws IOException { @Test public void testSearchReturnsSearchDate() throws Exception { Date before = new Date(); + sleepAtLeast(10); //@formatter:off Bundle found = myClient @@ -4267,6 +4268,7 @@ public void testSearchReturnsSearchDate() throws Exception { .execute(); //@formatter:on + sleepAtLeast(10); Date after = new Date(); InstantType updated = found.getMeta().getLastUpdatedElement(); @@ -6807,6 +6809,7 @@ public void testSearchHistoryWithAtAndGtParameters() throws Exception { TestUtil.sleepAtLeast(delayInMs + 100); patient.getNameFirstRep().addGiven("Bob"); myClient.update().resource(patient).execute(); + TestUtil.sleepAtLeast(100); Patient unrelatedPatient = (Patient) myClient.create().resource(new Patient()).execute().getResource(); assertThat(patientId).isNotEqualTo(unrelatedPatient.getIdElement().getIdPartAsLong()); @@ -6832,7 +6835,9 @@ public void testSearchHistoryWithAtAndGtParameters() throws Exception { // Issue 3138 test case, verify behavior of _at verifyAtBehaviourWhenQueriedDateDuringTwoUpdatedDates(patientId, delayInMs, dateV1, dateV2); verifyAtBehaviourWhenQueriedDateAfterTwoUpdatedDates(patientId, delayInMs, dateV1, dateV2); + myCaptureQueriesListener.clear(); verifyAtBehaviourWhenQueriedDateBeforeTwoUpdatedDates(patientId, delayInMs, dateV1, dateV2); + myCaptureQueriesListener.logSelectQueries(); // verify behavior of _since verifySinceBehaviourWhenQueriedDateDuringTwoUpdatedDates(patientId, delayInMs, dateV1, dateV2); verifySinceBehaviourWhenQueriedDateAfterTwoUpdatedDates(patientId, delayInMs, dateV1, dateV2); @@ -6854,8 +6859,10 @@ private void verifyAtBehaviourWhenQueriedDateAfterTwoUpdatedDates(Long patientId Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV2, delayInMs); assertTrue(timeBetweenUpdates.after(dateV1)); assertTrue(timeBetweenUpdates.after(dateV2)); - List resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates)); - assertThat(resultIds).hasSize(1); + String url = myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates); + myCaptureQueriesListener.clear(); + List resultIds = searchAndReturnUnqualifiedIdValues(url); + assertThat(resultIds).as(()->describeVersionsAndUrl(url)).hasSize(1); assertThat(resultIds).contains("Patient/" + patientId + "/_history/2"); } @@ -6863,8 +6870,10 @@ private void verifyAtBehaviourWhenQueriedDateBeforeTwoUpdatedDates(Long patientI Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV1, -delayInMs); assertTrue(timeBetweenUpdates.before(dateV1)); assertTrue(timeBetweenUpdates.before(dateV2)); - List resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates)); - assertThat(resultIds).hasSize(2); + String url = myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates); + myCaptureQueriesListener.clear(); + List resultIds = searchAndReturnUnqualifiedIdValues(url); + assertThat(resultIds).as(()->describeVersionsAndUrl(url)).hasSize(2); assertThat(resultIds).contains("Patient/" + patientId + "/_history/1"); assertThat(resultIds).contains("Patient/" + patientId + "/_history/2"); } @@ -6873,11 +6882,22 @@ private void verifySinceBehaviourWhenQueriedDateDuringTwoUpdatedDates(Long patie Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV1, delayInMs / 2); assertTrue(timeBetweenUpdates.after(dateV1)); assertTrue(timeBetweenUpdates.before(dateV2)); - List resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_since=" + toStr(timeBetweenUpdates)); - assertThat(resultIds).hasSize(1); + String url = myServerBase + "/Patient/" + patientId + "/_history?_since=" + toStr(timeBetweenUpdates); + myCaptureQueriesListener.clear(); + List resultIds = searchAndReturnUnqualifiedIdValues(url); + assertThat(resultIds).as(()->describeVersionsAndUrl(url)).hasSize(1); assertThat(resultIds).contains("Patient/" + patientId + "/_history/2"); } + private String describeVersionsAndUrl(String theUrl) { + return runInTransaction(()->{ + return "URL: " + theUrl + "\n\nHistory Entries:\n * " + + myResourceHistoryTableDao.findAll().stream().map(t->t.toString()).collect(Collectors.joining("\n * ")) + + "\n\nSQL Queries:\n * " + + myCaptureQueriesListener.getSelectQueries().stream().map(t->t.getSql(true, false)).collect(Collectors.joining("\n * ")); + }); + } + private void verifySinceBehaviourWhenQueriedDateAfterTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException { Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV2, delayInMs); assertTrue(timeBetweenUpdates.after(dateV1)); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexTaskTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexTaskTest.java index 290dfcdf9b4b..f44d1fdd69b0 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexTaskTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexTaskTest.java @@ -12,6 +12,8 @@ import ca.uhn.fhir.jpa.api.dao.ReindexParameters; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse; +import ca.uhn.fhir.jpa.model.dao.JpaPid; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboStringUnique; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboTokenNonUnique; @@ -22,7 +24,6 @@ import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; import jakarta.annotation.PostConstruct; -import jakarta.persistence.Query; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Patient; @@ -67,8 +68,10 @@ public void postConstruct() { @AfterEach public void after() { myInterceptorRegistry.unregisterAllAnonymousInterceptors(); - myStorageSettings.setStoreMetaSourceInformation(new JpaStorageSettings().getStoreMetaSourceInformation()); - myStorageSettings.setPreserveRequestIdInResourceBody(new JpaStorageSettings().isPreserveRequestIdInResourceBody()); + JpaStorageSettings defaults = new JpaStorageSettings(); + myStorageSettings.setStoreMetaSourceInformation(defaults.getStoreMetaSourceInformation()); + myStorageSettings.setPreserveRequestIdInResourceBody(defaults.isPreserveRequestIdInResourceBody()); + myStorageSettings.setAccessMetaSourceInformationFromProvenanceTable(defaults.isAccessMetaSourceInformationFromProvenanceTable()); } @Test @@ -171,7 +174,7 @@ public void testOptimizeStorage_AllVersions() { runInTransaction(()->{ assertEquals(20, myResourceHistoryTableDao.count()); for (ResourceHistoryTable history : myResourceHistoryTableDao.findAll()) { - assertNotNull(history.getResourceTextVc()); + assertNotNull(history.getResourceTextVc(), ()->"Null history on: " + history); assertNull(history.getResource()); } }); @@ -237,24 +240,34 @@ public void testOptimizeStorage_AllVersions_SingleResourceWithMultipleVersion() public void testOptimizeStorage_AllVersions_CopyProvenanceEntityData() { // Setup myStorageSettings.setStoreMetaSourceInformation(JpaStorageSettings.StoreMetaSourceInformationEnum.SOURCE_URI_AND_REQUEST_ID); + myStorageSettings.setAccessMetaSourceInformationFromProvenanceTable(true); myStorageSettings.setPreserveRequestIdInResourceBody(true); for (int i = 0; i < 10; i++) { Patient p = new Patient(); p.setId("PATIENT" + i); - p.getMeta().setSource("http://foo#bar"); p.addIdentifier().setValue(String.valueOf(i)); myPatientDao.update(p, mySrd); - p.addIdentifier().setSystem("http://blah"); + p.setActive(true); myPatientDao.update(p, mySrd); } + runInTransaction(()->{ + List versions = myResourceHistoryTableDao.findAll(); + for (var version : versions) { + ResourceHistoryProvenanceEntity provenance = new ResourceHistoryProvenanceEntity(); + provenance.setResourceTable(version.getResourceTable()); + provenance.setResourceHistoryTable(version); + provenance.setSourceUri("http://foo"); + provenance.setRequestId("bar"); + myResourceHistoryProvenanceDao.save(provenance); + } + }); + runInTransaction(()->{ assertEquals(20, myResourceHistoryTableDao.count()); assertEquals(20, myResourceHistoryProvenanceDao.count()); - Query query = myEntityManager.createQuery("UPDATE " + ResourceHistoryTable.class.getSimpleName() + " p SET p.mySourceUri = NULL, p.myRequestId = NULL"); - assertEquals(20, query.executeUpdate()); }); runInTransaction(()-> { @@ -281,6 +294,7 @@ public void testOptimizeStorage_AllVersions_CopyProvenanceEntityData() { // validate runInTransaction(()-> { + assertEquals(0, myResourceHistoryProvenanceDao.count()); for (var next : myResourceHistoryProvenanceDao.findAll()) { assertEquals("bar", next.getRequestId()); assertEquals("http://foo", next.getSourceUri()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java index 081492fb569b..5c9cdf6d139f 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java @@ -360,7 +360,7 @@ public List findAllVersionsForResourceIdInOrder(Long theId } @Override - public ResourceHistoryTable findForIdAndVersionAndFetchProvenance(long theId, long theVersion) { + public ResourceHistoryTable findForIdAndVersion(long theId, long theVersion) { throw new UnsupportedOperationException(); } @@ -370,7 +370,7 @@ public Slice findForResourceId(Pageable thePage, Long theId, Long theDontW } @Override - public Slice findForResourceIdAndReturnEntitiesAndFetchProvenance(Pageable thePage, Long theId, Long theDontWantVersion) { + public Slice findAllVersionsExceptSpecificForResourcePid(Pageable thePage, Long theId, Long theDontWantVersion) { throw new UnsupportedOperationException(); } @@ -404,6 +404,11 @@ public void updateNonInlinedContents(byte[] theText, long thePid) { throw new UnsupportedOperationException(); } + @Override + public List findCurrentVersionsByResourcePidsAndFetchResourceTable(List theVersionlessPids) { + throw new UnsupportedOperationException(); + } + @Nonnull @Override public List findAll() { diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml index be1037dfedaf..e9b7954dd811 100644 --- a/hapi-fhir-jpaserver-test-r5/pom.xml +++ b/hapi-fhir-jpaserver-test-r5/pom.xml @@ -47,6 +47,18 @@ + + ca.uhn.hapi.fhir + hapi-tinder-test + ${project.version} + test + + + + com.github.jsqlparser + jsqlparser + test + diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java index a3041031fc09..d1b670004c7e 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java @@ -19,6 +19,7 @@ import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; +import ca.uhn.fhir.jpa.dao.TestDaoSearch; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; @@ -140,7 +141,7 @@ import static org.mockito.Mockito.mock; @ExtendWith(SpringExtension.class) -@ContextConfiguration(classes = {TestR5Config.class}) +@ContextConfiguration(classes = {TestR5Config.class, TestDaoSearch.Config.class}) public abstract class BaseJpaR5Test extends BaseJpaTest implements ITestDataBuilder { @Autowired protected IJobCoordinator myJobCoordinator; @@ -419,12 +420,15 @@ public FhirContext getFhirContext() { @AfterEach() public void afterCleanupDao() { - myStorageSettings.setExpireSearchResults(new JpaStorageSettings().isExpireSearchResults()); - myStorageSettings.setEnforceReferentialIntegrityOnDelete(new JpaStorageSettings().isEnforceReferentialIntegrityOnDelete()); - myStorageSettings.setExpireSearchResultsAfterMillis(new JpaStorageSettings().getExpireSearchResultsAfterMillis()); - myStorageSettings.setReuseCachedSearchResultsForMillis(new JpaStorageSettings().getReuseCachedSearchResultsForMillis()); - myStorageSettings.setSuppressUpdatesWithNoChange(new JpaStorageSettings().isSuppressUpdatesWithNoChange()); - myStorageSettings.setAllowContainsSearches(new JpaStorageSettings().isAllowContainsSearches()); + JpaStorageSettings defaults = new JpaStorageSettings(); + myStorageSettings.setAccessMetaSourceInformationFromProvenanceTable(defaults.isAccessMetaSourceInformationFromProvenanceTable()); + myStorageSettings.setAllowContainsSearches(defaults.isAllowContainsSearches()); + myStorageSettings.setEnforceReferentialIntegrityOnDelete(defaults.isEnforceReferentialIntegrityOnDelete()); + myStorageSettings.setExpireSearchResults(defaults.isExpireSearchResults()); + myStorageSettings.setExpireSearchResultsAfterMillis(defaults.getExpireSearchResultsAfterMillis()); + myStorageSettings.setReuseCachedSearchResultsForMillis(defaults.getReuseCachedSearchResultsForMillis()); + myStorageSettings.setSuppressUpdatesWithNoChange(defaults.isSuppressUpdatesWithNoChange()); + myStorageSettings.setAutoCreatePlaceholderReferenceTargets(defaults.isAutoCreatePlaceholderReferenceTargets()); myPagingProvider.setDefaultPageSize(BasePagingProvider.DEFAULT_DEFAULT_PAGE_SIZE); myPagingProvider.setMaximumPageSize(BasePagingProvider.DEFAULT_MAX_PAGE_SIZE); diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/CrossPartitionReferencesTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/CrossPartitionReferencesTest.java index 6608d2763a69..2719deb84a54 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/CrossPartitionReferencesTest.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/CrossPartitionReferencesTest.java @@ -35,7 +35,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Propagation; -import jakarta.annotation.Nonnull; +import java.util.List; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -98,28 +98,28 @@ public void testSamePartitionReference_Create() { Patient p1 = new Patient(); p1.setActive(true); IIdType patient1Id = myPatientDao.create(p1, mySrd).getId().toUnqualifiedVersionless(); - initializeCrossReferencesInterceptor(); + logAllResources(); + // Test myCaptureQueriesListener.clear(); Patient p2 = new Patient(); p2.setActive(true); p2.addLink().setOther(new Reference(patient1Id)); - - // Test - myCaptureQueriesListener.clear(); IIdType patient2Id = myPatientDao.create(p2, mySrd).getId().toUnqualifiedVersionless(); // Verify - myCaptureQueriesListener.logSelectQueries(); assertEquals(1, myCaptureQueriesListener.countCommits()); assertEquals(0, myCaptureQueriesListener.countRollbacks()); + myCaptureQueriesListener.clear(); SearchParameterMap params = SearchParameterMap .newSynchronous(Constants.PARAM_ID, new TokenParam(patient2Id.getValue())) .addInclude(Patient.INCLUDE_LINK); IBundleProvider search = myPatientDao.search(params, mySrd); - assertThat(toUnqualifiedVersionlessIdValues(search)).containsExactly(patient2Id.getValue(), patient1Id.getValue()); + List values = toUnqualifiedVersionlessIdValues(search); + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).containsExactly(patient2Id.getValue(), patient1Id.getValue()); assertThat(search.getAllResources()).hasSize(2); search.getAllResources().forEach(p -> assertTrue(((Patient) p).getActive())); } @@ -190,7 +190,7 @@ public void testCrossPartitionReference_Create() { } private void initializeCrossReferencesInterceptor() { - when(myCrossPartitionReferencesDetectedInterceptor.handle(any(),any())).thenAnswer(t->{ + when(myCrossPartitionReferencesDetectedInterceptor.handle(any(), any())).thenAnswer(t -> { CrossPartitionReferenceDetails theDetails = t.getArgument(1, CrossPartitionReferenceDetails.class); IIdType targetId = theDetails.getPathAndRef().getRef().getReferenceElement(); RequestPartitionId referenceTargetPartition = myPartitionHelperSvc.determineReadPartitionForRequestForRead(theDetails.getRequestDetails(), targetId.getResourceType(), targetId); @@ -232,11 +232,12 @@ public RequestPartitionId selectPartitionRead(ReadPartitionIdRequestDetails theR private static RequestPartitionId selectPartition(String resourceType) { switch (resourceType) { case "Patient": + case "RelatedPerson": return PARTITION_PATIENT; case "Observation": return PARTITION_OBSERVATION; default: - throw new InternalErrorException("Don't know how to handle resource type"); + throw new InternalErrorException("Don't know how to handle resource type: " + resourceType); } } diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/ExternallyStoredResourceR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/ExternallyStoredResourceR5Test.java index 6637cddd6c82..5f89ed6271fd 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/ExternallyStoredResourceR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/ExternallyStoredResourceR5Test.java @@ -52,7 +52,7 @@ public void testCreate() { runInTransaction(()->{ ResourceTable resource = myResourceTableDao.getReferenceById(id.getIdPartAsLong()); assertNotNull(resource); - ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1L); + ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 1L); assertNotNull(history); assertEquals(ResourceEncodingEnum.ESR, history.getEncoding()); assertEquals(MY_PROVIDER_ID + ":" + ADDRESS_123, history.getResourceTextVc()); diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5HistoryDisabledTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5HistoryDisabledTest.java index 1bf29740ff7f..4a45fca83940 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5HistoryDisabledTest.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5HistoryDisabledTest.java @@ -17,6 +17,7 @@ import org.hl7.fhir.r5.model.Patient; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import jakarta.annotation.Nonnull; @@ -298,33 +299,6 @@ public void testUpdate_NonVersionedTagsMode_TagsCanBeDeleted() { }); } - @Test - public void testUpdate_ProvenanceIsUpdatedInPlace() { - // Setup - myStorageSettings.setStoreMetaSourceInformation(JpaStorageSettings.StoreMetaSourceInformationEnum.SOURCE_URI_AND_REQUEST_ID); - Patient p = new Patient(); - p.getMeta().setSource("source-1"); - p.setActive(true); - when(mySrd.getRequestId()).thenReturn("request-id-1"); - IIdType id1 = myPatientDao.create(p, mySrd).getId(); - runInTransaction(()-> assertEquals(1, myResourceHistoryProvenanceDao.count())); - - // Test - p = new Patient(); - p.setId(id1); - p.addIdentifier().setValue("foo"); - p.getMeta().setSource("source-2"); - p.setActive(true); - when(mySrd.getRequestId()).thenReturn("request-id-2"); - DaoMethodOutcome outcome = myPatientDao.update(p, mySrd); - - // Verify - assertEquals("source-2#request-id-2", ((Patient) outcome.getResource()).getMeta().getSource()); - p = myPatientDao.read(outcome.getId(), mySrd); - assertEquals("source-2#request-id-2", p.getMeta().getSource()); - runInTransaction(()-> assertEquals(1, myResourceHistoryProvenanceDao.count())); - } - @Nonnull private static List toTagTokens(IBaseResource resource) { List tags = resource.getMeta() diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/BaseDbpmJpaR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/BaseDbpmJpaR5Test.java new file mode 100644 index 000000000000..a8b1531e657c --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/BaseDbpmJpaR5Test.java @@ -0,0 +1,53 @@ +package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode; + +import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.jpa.dao.r5.BaseJpaR5Test; +import ca.uhn.fhir.jpa.entity.PartitionEntity; +import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc; +import ca.uhn.fhir.jpa.util.TestPartitionSelectorInterceptor; +import org.junit.jupiter.api.AfterEach; +import org.springframework.beans.factory.annotation.Autowired; + +import static org.junit.jupiter.api.Assertions.assertFalse; + +public class BaseDbpmJpaR5Test extends BaseJpaR5Test { + + public static final String PARTITION_NAME_1 = "Partition_1"; + public static final String PARTITION_NAME_2 = "Partition_2"; + public static final int PARTITION_1 = 1; + public static final int PARTITION_2 = 2; + + protected final TestPartitionSelectorInterceptor myPartitionSelectorInterceptor = new TestPartitionSelectorInterceptor(); + + @Autowired + private IPartitionLookupSvc myPartitionConfigSvc; + + @Override + @AfterEach + protected void afterResetInterceptors() { + super.afterResetInterceptors(); + myPartitionSettings.setPartitioningEnabled(false); + myInterceptorRegistry.unregisterInterceptor(myPartitionSelectorInterceptor); + } + + protected void registerPartitionInterceptorAndCreatePartitions() { + assertFalse(myInterceptorRegistry.hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ), ()->myInterceptorRegistry.getAllRegisteredInterceptors().toString()); + myInterceptorRegistry.registerInterceptor(myPartitionSelectorInterceptor); + + myPartitionConfigSvc.createPartition(new PartitionEntity().setId(PARTITION_1).setName(PARTITION_NAME_1), null); + myPartitionConfigSvc.createPartition(new PartitionEntity().setId(PARTITION_2).setName(PARTITION_NAME_2), null); + + // Load to pre-cache and avoid adding SQL queries + preFetchPartitionsIntoCache(); + } + + protected void preFetchPartitionsIntoCache() { + if (myPartitionSettings.isPartitioningEnabled()) { + myPartitionConfigSvc.getPartitionById(PARTITION_1); + myPartitionConfigSvc.getPartitionById(PARTITION_2); + myPartitionConfigSvc.getPartitionByName(PARTITION_NAME_1); + myPartitionConfigSvc.getPartitionByName(PARTITION_NAME_2); + } + } + +} diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningDisabledTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningDisabledTest.java new file mode 100644 index 000000000000..f5323ff9cd15 --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningDisabledTest.java @@ -0,0 +1,19 @@ +package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode; + +import ca.uhn.fhir.jpa.util.TestPartitionSelectorInterceptor; +import org.junit.jupiter.api.Nested; + +/** + * This is a test verifying that we emit the right SQL for HAPI FHIR running in + * full legacy mode - No partitioning, no partition IDs in PKs. + */ +public class DbpmDisabledPartitioningDisabledTest extends BaseDbpmJpaR5Test { + + @Nested + public class MyTestDefinitions extends TestDefinitions { + MyTestDefinitions() { + super(DbpmDisabledPartitioningDisabledTest.this, new TestPartitionSelectorInterceptor(), false, false); + } + } + +} diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningEnabledNullDefaultPartitionTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningEnabledNullDefaultPartitionTest.java new file mode 100644 index 000000000000..307d542d031d --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningEnabledNullDefaultPartitionTest.java @@ -0,0 +1,32 @@ +package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Nested; + +/** + * This is a test verifying that we emit the right SQL when running in + * legacy partition mode with DEFAULT partition value of null (the default if + * not configured otherwise) - Partition IDs are in use, but they aren't + * included in primary keys or joins. + */ +public class DbpmDisabledPartitioningEnabledNullDefaultPartitionTest extends BaseDbpmJpaR5Test { + + @Override + @BeforeEach + public void before() throws Exception { + super.before(); + myPartitionSettings.setPartitioningEnabled(true); + myPartitionSettings.setDefaultPartitionId(null); + + registerPartitionInterceptorAndCreatePartitions(); + } + + @Nested + public class MyTestDefinitions extends TestDefinitions { + MyTestDefinitions() { + super(DbpmDisabledPartitioningEnabledNullDefaultPartitionTest.this, myPartitionSelectorInterceptor, true, false); + } + } + + +} diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningEnabledTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningEnabledTest.java new file mode 100644 index 000000000000..70585212356b --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningEnabledTest.java @@ -0,0 +1,31 @@ +package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Nested; + +/** + * This is a test verifying that we emit the right SQL when running in + * legacy partition mode - Partition IDs are in use, but they aren't + * included in primary keys or joins. + */ +public class DbpmDisabledPartitioningEnabledTest extends BaseDbpmJpaR5Test { + + @Override + @BeforeEach + public void before() throws Exception { + super.before(); + myPartitionSettings.setPartitioningEnabled(true); + myPartitionSettings.setDefaultPartitionId(0); + + registerPartitionInterceptorAndCreatePartitions(); + } + + @Nested + public class MyTestDefinitions extends TestDefinitions { + MyTestDefinitions() { + super(DbpmDisabledPartitioningEnabledTest.this, myPartitionSelectorInterceptor, true, false); + } + } + + +} diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/TestDefinitions.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/TestDefinitions.java new file mode 100644 index 000000000000..5eb53eeb62b0 --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/TestDefinitions.java @@ -0,0 +1,1882 @@ +package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode; + +import ca.uhn.fhir.batch2.api.IJobDataSink; +import ca.uhn.fhir.batch2.api.VoidModel; +import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeStep; +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; +import ca.uhn.fhir.interceptor.executor.InterceptorService; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoObservation; +import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoPatient; +import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; +import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; +import ca.uhn.fhir.jpa.dao.TestDaoSearch; +import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao; +import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; +import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao; +import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; +import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.model.dao.JpaPid; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity; +import ca.uhn.fhir.jpa.model.entity.ResourceLink; +import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; +import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; +import ca.uhn.fhir.jpa.term.api.ITermReadSvc; +import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet; +import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener; +import ca.uhn.fhir.jpa.util.MemoryCacheService; +import ca.uhn.fhir.jpa.util.SqlQuery; +import ca.uhn.fhir.jpa.util.TestPartitionSelectorInterceptor; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.api.SortSpec; +import ca.uhn.fhir.rest.api.server.IBundleProvider; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.fhir.rest.param.HasParam; +import ca.uhn.fhir.rest.param.HistorySearchDateRangeParam; +import ca.uhn.fhir.rest.param.ReferenceParam; +import ca.uhn.fhir.rest.param.TokenOrListParam; +import ca.uhn.fhir.rest.param.TokenParam; +import ca.uhn.fhir.rest.param.TokenParamModifier; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.test.utilities.ITestDataBuilder; +import jakarta.annotation.Nonnull; +import net.sf.jsqlparser.JSQLParserException; +import net.sf.jsqlparser.parser.CCJSqlParserUtil; +import net.sf.jsqlparser.statement.insert.Insert; +import org.assertj.core.api.Assertions; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r5.model.Bundle; +import org.hl7.fhir.r5.model.CodeSystem; +import org.hl7.fhir.r5.model.DateTimeType; +import org.hl7.fhir.r5.model.Encounter; +import org.hl7.fhir.r5.model.Enumerations; +import org.hl7.fhir.r5.model.IdType; +import org.hl7.fhir.r5.model.Meta; +import org.hl7.fhir.r5.model.Observation; +import org.hl7.fhir.r5.model.Organization; +import org.hl7.fhir.r5.model.Patient; +import org.hl7.fhir.r5.model.Questionnaire; +import org.hl7.fhir.r5.model.QuestionnaireResponse; +import org.hl7.fhir.r5.model.Reference; +import org.hl7.fhir.r5.model.ValueSet; +import org.intellij.lang.annotations.Language; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; +import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.ValueSource; +import org.mockito.Mock; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.stream.Collectors; + +import static ca.uhn.fhir.jpa.dao.r5.dbpartitionmode.DbpmDisabledPartitioningEnabledTest.PARTITION_1; +import static ca.uhn.fhir.jpa.dao.r5.dbpartitionmode.DbpmDisabledPartitioningEnabledTest.PARTITION_2; +import static ca.uhn.fhir.rest.api.Constants.PARAM_HAS; +import static ca.uhn.fhir.rest.api.Constants.PARAM_SOURCE; +import static ca.uhn.fhir.rest.api.Constants.PARAM_TAG; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; +import static org.hl7.fhir.instance.model.api.IAnyResource.SP_RES_ID; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +/** + * This class is a set of test that are run as {@literal @Nested} by several + * test classes. It verifies that we emit appropriate SQL for various + * scenarios including non-partitioned mode, partitioned mode, and + * database partitioning mode. + */ +abstract class TestDefinitions implements ITestDataBuilder { + + private final TestPartitionSelectorInterceptor myPartitionSelectorInterceptor; + private final boolean myIncludePartitionIdsInSql; + private final BaseDbpmJpaR5Test myParentTest; + private final boolean myIncludePartitionIdsInPks; + @Autowired + protected ITermCodeSystemStorageSvc myTermCodeSystemStorageSvc; + @Autowired + protected ITermDeferredStorageSvc myTerminologyDeferredStorageSvc; + @Autowired + protected ITermReadSvc myTermSvc; + @Autowired + private TestDaoSearch myTestDaoSearch; + @Autowired + private InterceptorService myInterceptorService; + @Autowired + protected CircularQueueCaptureQueriesListener myCaptureQueriesListener; + @Autowired + private IFhirResourceDaoPatient myPatientDao; + @Autowired + private IFhirResourceDaoObservation myObservationDao; + @Autowired + private IFhirResourceDao myCodeSystemDao; + @Autowired + private IFhirResourceDao myValueSetDao; + @Autowired + private IFhirResourceDao myEncounterDao; + @Autowired + private IFhirResourceDao myOrganizationDao; + @Autowired + private IFhirResourceDao myQuestionnaireDao; + @Autowired + private IFhirResourceDao myQuestionnaireResponseDao; + @Autowired + private IFhirSystemDao mySystemDao; + @Autowired + private IResourceTableDao myResourceTableDao; + @Autowired + private IResourceHistoryTableDao myResourceHistoryTableDao; + @Autowired + private IResourceHistoryProvenanceDao myResourceHistoryProvenanceTableDao; + @Autowired + private IResourceLinkDao myResourceLinkDao; + @Autowired + private FhirContext myFhirCtx; + @Autowired + private DaoRegistry myDaoRegistry; + @Autowired + private PartitionSettings myPartitionSettings; + @Autowired + private MemoryCacheService myMemoryCache; + @Autowired + private JpaStorageSettings myStorageSettings; + @Autowired + private DeleteExpungeStep myDeleteExpungeStep; + + @Mock + private IJobDataSink myVoidSink; + @Autowired + private ExpungeEverythingService myExpungeEverythingService; + + public TestDefinitions(@Nonnull BaseDbpmJpaR5Test theParentTest, @Nonnull TestPartitionSelectorInterceptor thePartitionSelectorInterceptor, boolean theIncludePartitionIdsInSql, boolean theIncludePartitionIdsInPks) { + myParentTest = theParentTest; + myPartitionSelectorInterceptor = thePartitionSelectorInterceptor; + myIncludePartitionIdsInSql = theIncludePartitionIdsInSql; + myIncludePartitionIdsInPks = theIncludePartitionIdsInPks; + assert myIncludePartitionIdsInSql && myIncludePartitionIdsInPks || myIncludePartitionIdsInSql || !myIncludePartitionIdsInPks; + } + + @AfterEach + public void after() { + { + JpaStorageSettings defaults = new JpaStorageSettings(); + myStorageSettings.setTagStorageMode(defaults.getTagStorageMode()); + myStorageSettings.setIndexOnContainedResources(defaults.isIndexOnContainedResources()); + } + { + PartitionSettings defaults = new PartitionSettings(); + myPartitionSettings.setConditionalCreateDuplicateIdentifiersEnabled(defaults.isConditionalCreateDuplicateIdentifiersEnabled()); + } + } + + @Test + public void testCreate_Conditional() throws JSQLParserException { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_2); + createPatient(withActiveTrue()); // Just to pre-fetch the partition details + myPartitionSettings.setConditionalCreateDuplicateIdentifiersEnabled(true); + + // Test + myCaptureQueriesListener.clear(); + Patient patient = new Patient(); + patient.addIdentifier().setSystem("http://foo").setValue("bar"); + DaoMethodOutcome outcome = myPatientDao.create(patient, "Patient?identifier=http://foo|bar", new SystemRequestDetails()); + long id = outcome.getId().getIdPartAsLong(); + + // Verify + assertTrue(outcome.getCreated()); + + // Verify Select Queries + + myCaptureQueriesListener.logSelectQueries(); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).startsWith("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE ((t0.PARTITION_ID = '2') AND (t0.HASH_SYS_AND_VALUE = '-2780914544385068076'))"); + } else { + assertThat(getSelectSql(0)).startsWith("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_SYS_AND_VALUE = '-2780914544385068076')"); + } + assertEquals(1, myCaptureQueriesListener.countSelectQueries()); + + // Verify Insert Queries + + myCaptureQueriesListener.logInsertQueries(); + assertEquals(5, myCaptureQueriesListener.countInsertQueries()); + assertEquals("HFJ_RESOURCE", parseInsertStatementTableName(getInsertSql(0))); + assertEquals("HFJ_RES_VER", parseInsertStatementTableName(getInsertSql(1))); + for (int i = 0; i < 4; i++) { + String insertSql = getInsertSql(i); + Map insertColumns = parseInsertStatementParams(insertSql); + String tableName = parseInsertStatementTableName(getInsertSql(i)); + if (myIncludePartitionIdsInSql) { + assertEquals("'2'", insertColumns.get("PARTITION_ID"), insertSql); + assertEquals("'" + id + "'", insertColumns.get("RES_ID"), insertSql); + } else { + if ("HFJ_RES_SEARCH_URL".equals(tableName)) { + assertEquals("'-1'", insertColumns.get("PARTITION_ID"), insertSql); + } else { + assertEquals("NULL", insertColumns.get("PARTITION_ID"), insertSql); + } + assertEquals("'" + id + "'", insertColumns.get("RES_ID"), insertSql); + } + } + + // Verify no other queries + + assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); + assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); + } + + @ParameterizedTest + @EnumSource(PartitionSettings.CrossPartitionReferenceMode.class) + public void testCreate_ReferenceToResourceInOtherPartition(PartitionSettings.CrossPartitionReferenceMode theAllowReferencesToCrossPartition) { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_2); + myPartitionSettings.setAllowReferencesAcrossPartitions(theAllowReferencesToCrossPartition); + IIdType patientId = createPatient(withActiveTrue()); + + // Test + ourLog.info("Starting to test testCreate_ReferenceToResourceInOtherPartition"); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + try { + IIdType obsId = createObservation(withSubject(patientId)); + if (myIncludePartitionIdsInSql && theAllowReferencesToCrossPartition == PartitionSettings.CrossPartitionReferenceMode.NOT_ALLOWED) { + runInTransaction(()->{ + List resources = myResourceTableDao.findAll(); + String failMessage = "Resources:\n * " + resources.stream().map(ResourceTable::toString).collect(Collectors.joining("\n * ")); + List resourceLinks = myResourceLinkDao.findAll(); + failMessage += "\n\nResource Links:\n * " + resourceLinks.stream().map(ResourceLink::toString).collect(Collectors.joining("\n * ")); + failMessage += "\n\nRegistered Interceptors:\n * " + myInterceptorService.getAllRegisteredInterceptors().stream().map(Object::toString).collect(Collectors.joining("\n * ")); + fail(failMessage); + }); + } else { + assertNotNull(obsId); + } + } catch (InvalidRequestException e) { + if (myIncludePartitionIdsInSql) { + assertEquals(PartitionSettings.CrossPartitionReferenceMode.NOT_ALLOWED, theAllowReferencesToCrossPartition); + assertThat(e.getMessage()).contains("not found, specified in path: Observation.subject"); + } else { + fail(); + } + } + } + + @Test + public void testDelete() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + IIdType orgId = createOrganization(withName("ORG")).toUnqualifiedVersionless(); + IIdType id = createPatient(withActiveTrue(), withFamily("HOMER"), withOrganization(orgId)).toUnqualifiedVersionless(); + long idLong = id.getIdPartAsLong(); + + // Test + myCaptureQueriesListener.clear(); + myPatientDao.delete(id, new SystemRequestDetails()); + + // Verify + + // Verify Select + myCaptureQueriesListener.logSelectQueries(); + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(0)).endsWith(" from HFJ_RESOURCE rt1_0 where (rt1_0.RES_ID,rt1_0.PARTITION_ID) in (('" + idLong + "','1'))"); + } else { + assertThat(getSelectSql(0)).endsWith(" from HFJ_RESOURCE rt1_0 where rt1_0.RES_ID='" + idLong + "'"); + } + assertEquals(4, myCaptureQueriesListener.countSelectQueries()); + + // Verify Insert + myCaptureQueriesListener.logInsertQueries(); + assertThat(getInsertSql(0)).startsWith("insert into HFJ_RES_VER "); + assertEquals(1, myCaptureQueriesListener.countInsertQueries()); + + // Verify Update + myCaptureQueriesListener.logUpdateQueries(); + if (myIncludePartitionIdsInPks) { + assertThat(getUpdateSql(0)).contains("where RES_ID='" + idLong + "' and PARTITION_ID='1' and RES_VER='1'"); + } else { + assertThat(getUpdateSql(0)).contains("where RES_ID='" + idLong + "' and RES_VER='1'"); + } + assertEquals(1, myCaptureQueriesListener.countUpdateQueries()); + + // Verify Delete + myCaptureQueriesListener.logDeleteQueries(); + String deleteWhere; + assertEquals("delete from HFJ_RES_SEARCH_URL where (RES_ID='" + idLong + "')", getDeleteSql(0)); + if (myIncludePartitionIdsInPks) { + deleteWhere = "(RES_ID,PARTITION_ID)=('" + idLong + "','1')"; + } else { + deleteWhere = "RES_ID='" + idLong + "'"; + } + assertEquals("delete from HFJ_SPIDX_STRING where " + deleteWhere, getDeleteSql(1)); + assertEquals("delete from HFJ_SPIDX_TOKEN where " + deleteWhere, getDeleteSql(2)); + if (myIncludePartitionIdsInPks) { + assertEquals("delete from HFJ_RES_LINK where (SRC_RESOURCE_ID,PARTITION_ID)=('" + idLong + "','1')", getDeleteSql(3)); + } else { + assertEquals("delete from HFJ_RES_LINK where SRC_RESOURCE_ID='" + idLong + "'", getDeleteSql(3)); + } + assertEquals(4, myCaptureQueriesListener.countDeleteQueries()); + } + + @Test + public void testHistory_Instance() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + Patient p = new Patient(); + p.addIdentifier().setSystem("http://foo").setValue("1"); + IIdType id = myPatientDao.create(p, newRequest()).getId(); + assertEquals("1", id.getVersionIdPart()); + + p.getIdentifierFirstRep().setValue("2"); + id = myPatientDao.update(p, newRequest()).getId(); + assertEquals("2", id.getVersionIdPart()); + + p.getIdentifierFirstRep().setValue("3"); + id = myPatientDao.update(p, newRequest()).getId(); + assertEquals("3", id.getVersionIdPart()); + id = id.toUnqualifiedVersionless(); + + // Test + myCaptureQueriesListener.clear(); + IBundleProvider outcome; + outcome = myPatientDao.history(id, new HistorySearchDateRangeParam(), newRequest()); + + // Verify + List actualIds = toUnqualifiedIdValues(outcome); + myCaptureQueriesListener.logSelectQueries(); + assertThat(actualIds).asList().containsExactlyInAnyOrder("Patient/" + id.getIdPart() + "/_history/3", "Patient/" + id.getIdPart() + "/_history/2", "Patient/" + id.getIdPart() + "/_history/1"); + + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).endsWith("from HFJ_RESOURCE rt1_0 where rt1_0.PARTITION_ID='1' and rt1_0.RES_ID='" + id.getIdPartAsLong() + "'"); + } else { + assertThat(getSelectSql(0)).endsWith("from HFJ_RESOURCE rt1_0 where rt1_0.RES_ID='" + id.getIdPartAsLong() + "'"); + } + + if (myIncludePartitionIdsInSql) { + assertEquals("select count(rht1_0.PID) from HFJ_RES_VER rht1_0 where rht1_0.PARTITION_ID in ('1') and rht1_0.RES_ID='" + id.getIdPartAsLong() + "'", getSelectSql(1)); + } else { + assertEquals("select count(rht1_0.PID) from HFJ_RES_VER rht1_0 where rht1_0.RES_ID='" + id.getIdPartAsLong() + "'", getSelectSql(1)); + } + + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(2)).contains(" from HFJ_RES_VER rht1_0 where rht1_0.PARTITION_ID in ('1') and rht1_0.RES_ID='" + id.getIdPartAsLong() + "'"); + } else { + assertThat(getSelectSql(2)).contains(" from HFJ_RES_VER rht1_0 where rht1_0.RES_ID='" + id.getIdPartAsLong() + "' "); + } + + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testHistory_Type() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + Patient p = new Patient(); + p.addIdentifier().setSystem("http://foo").setValue("1"); + IIdType id = myPatientDao.create(p, newRequest()).getId(); + assertEquals("1", id.getVersionIdPart()); + + p.getIdentifierFirstRep().setValue("2"); + id = myPatientDao.update(p, newRequest()).getId(); + assertEquals("2", id.getVersionIdPart()); + + p.getIdentifierFirstRep().setValue("3"); + id = myPatientDao.update(p, newRequest()).getId(); + assertEquals("3", id.getVersionIdPart()); + id = id.toUnqualifiedVersionless(); + + // Test + myCaptureQueriesListener.clear(); + IBundleProvider outcome; + outcome = myPatientDao.history(null, null, null, newRequest()); + + // Verify + List actualIds = toUnqualifiedIdValues(outcome); + myCaptureQueriesListener.logSelectQueries(); + assertThat(actualIds).asList().containsExactlyInAnyOrder("Patient/" + id.getIdPart() + "/_history/3", "Patient/" + id.getIdPart() + "/_history/2", "Patient/" + id.getIdPart() + "/_history/1"); + + if (myIncludePartitionIdsInSql) { + assertEquals("select count(rht1_0.PID) from HFJ_RES_VER rht1_0 where rht1_0.PARTITION_ID in ('1') and rht1_0.RES_TYPE='Patient'", getSelectSql(0)); + } else { + assertEquals("select count(rht1_0.PID) from HFJ_RES_VER rht1_0 where rht1_0.RES_TYPE='Patient'", getSelectSql(0)); + } + + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(1)).contains(" from HFJ_RES_VER rht1_0 where rht1_0.PARTITION_ID in ('1') and rht1_0.RES_TYPE='Patient' "); + } else { + assertThat(getSelectSql(1)).contains(" from HFJ_RES_VER rht1_0 where rht1_0.RES_TYPE='Patient' "); + } + + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testHistory_Server() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + Patient p = new Patient(); + p.addIdentifier().setSystem("http://foo").setValue("1"); + IIdType id = myPatientDao.create(p, newRequest()).getId(); + assertEquals("1", id.getVersionIdPart()); + + p.getIdentifierFirstRep().setValue("2"); + id = myPatientDao.update(p, newRequest()).getId(); + assertEquals("2", id.getVersionIdPart()); + + p.getIdentifierFirstRep().setValue("3"); + id = myPatientDao.update(p, newRequest()).getId(); + assertEquals("3", id.getVersionIdPart()); + id = id.toUnqualifiedVersionless(); + + // Test + myCaptureQueriesListener.clear(); + IBundleProvider outcome; + outcome = mySystemDao.history(null, null, null, newRequest()); + + // Verify + List actualIds = toUnqualifiedIdValues(outcome); + myCaptureQueriesListener.logSelectQueries(); + assertThat(actualIds).asList().containsExactlyInAnyOrder("Patient/" + id.getIdPart() + "/_history/3", "Patient/" + id.getIdPart() + "/_history/2", "Patient/" + id.getIdPart() + "/_history/1"); + + if (myIncludePartitionIdsInSql) { + assertEquals("select count(rht1_0.PID) from HFJ_RES_VER rht1_0 where rht1_0.PARTITION_ID in ('1')", getSelectSql(0)); + } else { + assertEquals("select count(rht1_0.PID) from HFJ_RES_VER rht1_0", getSelectSql(0)); + } + + assertThat(getSelectSql(1)).contains(" from HFJ_RES_VER rht1_0 "); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(1)).contains(" where rht1_0.PARTITION_ID in ('1') "); + } else { + assertThat(getSelectSql(1)).doesNotContain(" where "); + } + + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testOperation_ExpungeEverything() { + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + createPatient(withActiveTrue()); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_2); + createPatient(withFamily("SIMPSON"), withBirthdate("2024-01-01")); + + // Test + myCaptureQueriesListener.clear(); + myExpungeEverythingService.expungeEverything(new SystemRequestDetails()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + String sql; + + // Select HFJ_SPIDX_TOKEN + List selectTokenQueries = getSqlSelectQueriesWithString(" HFJ_SPIDX_TOKEN "); + if (myIncludePartitionIdsInPks) { + sql = "select rispt1_0.SP_ID,rispt1_0.PARTITION_ID from HFJ_SPIDX_TOKEN rispt1_0 fetch first '800' rows only"; + } else { + sql = "select rispt1_0.SP_ID from HFJ_SPIDX_TOKEN rispt1_0 fetch first '800' rows only"; + } + assertThat(selectTokenQueries.get(0).getSql(true, false)).isEqualTo(sql); + assertThat(selectTokenQueries.get(1).getSql(true, false)).isEqualTo(sql); + assertEquals(2, selectTokenQueries.size()); + + // Delete HFJ_SPIDX_TOKEN + List deleteTokenQueries = getSqlDeleteQueriesWithString(" HFJ_SPIDX_TOKEN "); + if (myIncludePartitionIdsInPks) { + assertThat(deleteTokenQueries.get(0).getSql(true, false)).startsWith("delete from HFJ_SPIDX_TOKEN where (SP_ID,PARTITION_ID) in "); + } else { + assertThat(deleteTokenQueries.get(0).getSql(true, false)).startsWith("delete from HFJ_SPIDX_TOKEN where SP_ID in "); + } + assertEquals(1, deleteTokenQueries.size(), ()-> "SQL:\n * " + deleteTokenQueries.stream().map(t->t.getSql(true, false)).collect(Collectors.joining("\n * "))); + + // Select HFJ_RES_VER + List selectResVerQueries = getSqlSelectQueriesWithString(" HFJ_RES_VER "); + if (myIncludePartitionIdsInPks) { + sql = "select rht1_0.PARTITION_ID,rht1_0.PID from HFJ_RES_VER rht1_0 fetch first '400' rows only"; + } else { + sql = "select rht1_0.PID from HFJ_RES_VER rht1_0 fetch first '800' rows only"; + } + assertThat(selectResVerQueries.get(0).getSql(true, false)).isEqualTo(sql); + assertThat(selectResVerQueries.get(1).getSql(true, false)).isEqualTo(sql); + assertEquals(2, selectResVerQueries.size()); + + // Select HFJ_RES_VER + List deleteResVerQueries = getSqlDeleteQueriesWithString(" HFJ_RES_VER "); + if (myIncludePartitionIdsInPks) { + assertThat(deleteResVerQueries.get(0).getSql(true, false)).startsWith("delete from HFJ_RES_VER where (PARTITION_ID,PID) in "); + } else { + assertThat(deleteResVerQueries.get(0).getSql(true, false)).startsWith("delete from HFJ_RES_VER where PID in "); + } + assertEquals(1, deleteResVerQueries.size()); + } + + @Test + public void testRead_DefaultPartition() { + // Setup + myPartitionSelectorInterceptor.addNonPartitionableResource("Organization"); + IIdType id = createOrganization(withId("O"), withName("PARENT")); + long pid = findId("Organization", "O").getId(); + + // Test + myCaptureQueriesListener.clear(); + myMemoryCache.invalidateAllCaches(); + Organization actual = myOrganizationDao.read(id, new SystemRequestDetails()); + + // Verify + assertEquals("PARENT", actual.getName()); + myCaptureQueriesListener.logSelectQueries(); + + if (myIncludePartitionIdsInSql) { + if (myPartitionSettings.getDefaultPartitionId() == null) { + assertThat(getSelectSql(0)).endsWith(" where rt1_0.PARTITION_ID is null and (rt1_0.RES_TYPE='Organization' and rt1_0.FHIR_ID='O')"); + } else { + assertThat(getSelectSql(0)).endsWith(" where rt1_0.PARTITION_ID='0' and (rt1_0.RES_TYPE='Organization' and rt1_0.FHIR_ID='O')"); + } + } else { + assertThat(getSelectSql(0)).endsWith(" where (rt1_0.RES_TYPE='Organization' and rt1_0.FHIR_ID='O')"); + } + + if (myIncludePartitionIdsInSql) { + if (myPartitionSettings.getDefaultPartitionId() == null) { + assertThat(getSelectSql(1)).endsWith(" from HFJ_RESOURCE rt1_0 where rt1_0.PARTITION_ID is null and rt1_0.RES_ID='" + pid + "'"); + } else { + assertThat(getSelectSql(1)).endsWith(" from HFJ_RESOURCE rt1_0 where rt1_0.PARTITION_ID='0' and rt1_0.RES_ID='" + pid + "'"); + } + } else { + assertThat(getSelectSql(1)).endsWith(" from HFJ_RESOURCE rt1_0 where rt1_0.RES_ID='" + pid + "'"); + } + + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(2)).endsWith(" from HFJ_RES_VER rht1_0 where (rht1_0.RES_ID,rht1_0.PARTITION_ID)=('" + pid + "','0') and rht1_0.RES_VER='1'"); + } else { + assertThat(getSelectSql(2)).endsWith(" from HFJ_RES_VER rht1_0 where rht1_0.RES_ID='" + pid + "' and rht1_0.RES_VER='1'"); + } + + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testRead_ServerAssignedId() { + // Setup + myCaptureQueriesListener.clear(); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + long id = createPatient(withActiveTrue()).getIdPartAsLong(); + myParentTest.logAllResources(); + myCaptureQueriesListener.logInsertQueries(); + + // Test + myCaptureQueriesListener.clear(); + myPatientDao.read(new IdType("Patient/" + id), newRequest()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).endsWith(" where rt1_0.PARTITION_ID='1' and rt1_0.RES_ID='" + id + "'"); + } else { + assertThat(getSelectSql(0)).endsWith(" where rt1_0.RES_ID='" + id + "'"); + } + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(1)).endsWith("where (rht1_0.RES_ID,rht1_0.PARTITION_ID)=('" + id + "','1') and rht1_0.RES_VER='1'"); + } else { + assertThat(getSelectSql(1)).endsWith(" where rht1_0.RES_ID='" + id + "' and rht1_0.RES_VER='1'"); + } + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testRead_ClientAssignedId() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + createPatient(withId("A"), withActiveTrue()); + + long id = runInTransaction(() -> myResourceTableDao.findByTypeAndFhirId("Patient", "A").orElseThrow().getPersistentId().getId()); + + // Test + myCaptureQueriesListener.clear(); + myPatientDao.read(new IdType("Patient/A"), newRequest()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).endsWith(" from HFJ_RESOURCE rt1_0 where rt1_0.PARTITION_ID='1' and rt1_0.RES_ID='" + id + "'"); + } else { + assertThat(getSelectSql(0)).endsWith(" from HFJ_RESOURCE rt1_0 where rt1_0.RES_ID='" + id + "'"); + } + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(1)).endsWith(" where (rht1_0.RES_ID,rht1_0.PARTITION_ID)=('" + id + "','1') and rht1_0.RES_VER='1'"); + } else { + assertThat(getSelectSql(1)).endsWith(" where rht1_0.RES_ID='" + id + "' and rht1_0.RES_VER='1'"); + } + } + + @Test + public void testSearch_Contained() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + myStorageSettings.setIndexOnContainedResources(true); + Patient p = new Patient(); + p.addName().setFamily("Smith"); + Observation obs = new Observation(); + obs.setSubject(new Reference(p)); + IIdType id = myObservationDao.create(obs, new SystemRequestDetails()).getId().toUnqualifiedVersionless(); + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap map = SearchParameterMap.newSynchronous(); + map.add("subject", new ReferenceParam("name", "Smith")); + IBundleProvider outcome = myObservationDao.search(map, new SystemRequestDetails()); + List results = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + Assertions.assertThat(results).containsExactlyInAnyOrder(id.getValue()); + myCaptureQueriesListener.logSelectQueries(); + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(0)).startsWith("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE ((t0.PARTITION_ID,t0.RES_ID) IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 "); + } else if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).startsWith("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (t0.RES_ID IN (SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 "); + } else { + assertThat(getSelectSql(0)).startsWith("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (t0.RES_ID IN (SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 "); + } + + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).contains("t0.PARTITION_ID = '1'"); + } + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + + } + + @Test + public void testSearch_Chained() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + IIdType patientId = createPatient(withFamily("NAME")).toUnqualifiedVersionless(); + IIdType observationId = createObservation(withSubject(patientId)).toUnqualifiedVersionless(); + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = SearchParameterMap.newSynchronous(Observation.SP_PATIENT, new ReferenceParam("family", "NAME")); + IBundleProvider outcome = myObservationDao.search(params, new SystemRequestDetails()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactly(observationId.getValue()); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).contains("SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 "); + } else { + assertThat(getSelectSql(0)).contains("SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 "); + } + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(0)).contains("INNER JOIN HFJ_SPIDX_STRING t1 ON ((t0.TARGET_RES_PARTITION_ID = t1.PARTITION_ID) AND (t0.TARGET_RESOURCE_ID = t1.RES_ID))"); + } else { + assertThat(getSelectSql(0)).contains("INNER JOIN HFJ_SPIDX_STRING t1 ON (t0.TARGET_RESOURCE_ID = t1.RES_ID)"); + } + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).contains("t1.PARTITION_ID = '1'"); + } + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + + } + + @Test + public void testSearch_Has() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + + IIdType patientId = createPatient(withActiveTrue()).toUnqualifiedVersionless(); + IIdType observationId = createObservation(withSubject(patientId)).toUnqualifiedVersionless(); + myParentTest.logAllResources(); + myParentTest.logAllResourceLinks(); + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = SearchParameterMap.newSynchronous(); + params.add(PARAM_HAS, new HasParam("Observation", "patient", "_id", observationId.getValue())); + IBundleProvider outcome = myPatientDao.search(params, new SystemRequestDetails()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactly(patientId.getValue()); + } + + @ParameterizedTest + @ValueSource(booleans = {false}) // TODO: True will be added in the next PR + public void testSearch_IdParam(boolean theIncludeOtherParameter) { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + + IIdType id0 = createPatient(withActiveTrue()).toUnqualifiedVersionless(); + IIdType id1 = createPatient(withId("A"), withActiveTrue()).toUnqualifiedVersionless(); + + myMemoryCache.invalidateAllCaches(); + myParentTest.preFetchPartitionsIntoCache(); + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + if (theIncludeOtherParameter) { + params.add(Patient.SP_ACTIVE, new TokenParam("true")); + } + params.add(SP_RES_ID, new TokenOrListParam().add(id0.getValue()).add(id1.getValue())); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + assertThat(toUnqualifiedVersionlessIdValues(outcome)).asList().containsExactlyInAnyOrder(id0.getValue(), id1.getValue()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).endsWith(" where rt1_0.PARTITION_ID='1' and (rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='" + id0.getIdPart() + "' or rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A')"); + } else { + assertThat(getSelectSql(0)).endsWith(" where (rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='" + id0.getIdPart() + "' or rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A')"); + } + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(1)).contains(" WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = '1') AND (t0.RES_ID IN "); + } else { + assertThat(getSelectSql(1)).contains(" WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.RES_ID IN "); + } + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + + } + + @Test + public void testSearch_ListParam() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + IIdType patId0 = createPatient(withActiveTrue()).toUnqualifiedVersionless(); + IIdType patId1 = createPatient(withActiveTrue()).toUnqualifiedVersionless(); + IIdType listId = createList(withListItem(patId0), withListItem(patId1)).toUnqualifiedVersionless(); + Long listIdLong = listId.getIdPartAsLong(); + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + params.add(Constants.PARAM_LIST, new TokenParam(listId.getValue())); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + assertThat(toUnqualifiedVersionlessIdValues(outcome)).asList().containsExactlyInAnyOrder(patId0.getValue(), patId1.getValue()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(getSelectSql(0)).contains(" FROM HFJ_RESOURCE t1 "); + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_RES_LINK t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.TARGET_RESOURCE_ID)) "); + assertThat(getSelectSql(0)).endsWith(" WHERE ((t0.SRC_PATH = 'List.entry.item') AND (t0.TARGET_RESOURCE_TYPE = 'Patient') AND ((t0.PARTITION_ID,t0.SRC_RESOURCE_ID) IN (('1','" + listIdLong + "')) ))"); + } else { + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_RES_LINK t0 ON (t1.RES_ID = t0.TARGET_RESOURCE_ID) "); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).endsWith(" WHERE ((t0.PARTITION_ID = '1') AND (t0.SRC_PATH = 'List.entry.item') AND (t0.TARGET_RESOURCE_TYPE = 'Patient') AND (t0.SRC_RESOURCE_ID = '" + listIdLong + "'))"); + } else { + assertThat(getSelectSql(0)).endsWith(" WHERE ((t0.SRC_PATH = 'List.entry.item') AND (t0.TARGET_RESOURCE_TYPE = 'Patient') AND (t0.SRC_RESOURCE_ID = '" + listIdLong + "'))"); + } + } + + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + /** + * Perform a search where the request partition ID includes multiple partitions + */ + @Test + public void testSearch_MultiPartition() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + IIdType id0 = createPatient(withActiveTrue(), withFamily("A")).toUnqualifiedVersionless(); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_2); + IIdType id1 = createPatient(withActiveFalse(), withFamily("B")).toUnqualifiedVersionless(); + + // Test + myPartitionSelectorInterceptor.setNextPartition(RequestPartitionId.fromPartitionIds(PARTITION_1, PARTITION_2)); + myCaptureQueriesListener.clear(); + SearchParameterMap params = SearchParameterMap.newSynchronous() + .setSort(new SortSpec(Patient.SP_FAMILY)); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + assertThat(toUnqualifiedVersionlessIdValues(outcome)).asList().containsExactlyInAnyOrder(id0.getValue(), id1.getValue()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + if (myIncludePartitionIdsInPks) { + assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = '-9208284524139093953')) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID IN ('1','2') )) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST", getSelectSql(0)); + assertThat(getSelectSql(1)).contains(" where (rht1_0.RES_ID,rht1_0.PARTITION_ID) in (('" + id0.getIdPartAsLong() + "','1'),('" + id1.getIdPartAsLong() + "','2'),('-1',NULL),('-1',NULL),('-1',NULL),('-1',NULL),('-1',NULL),('-1',NULL),('-1',NULL),('-1',NULL)) and mrt1_0.RES_VER=rht1_0.RES_VER"); + } else if (myIncludePartitionIdsInSql) { + assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = '-9208284524139093953')) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID IN ('1','2') )) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST", getSelectSql(0)); + assertThat(getSelectSql(1)).contains(" where rht1_0.RES_ID in ('" + id0.getIdPartAsLong() + "','" + id1.getIdPartAsLong() + "','-1','-1','-1','-1','-1','-1','-1','-1') and mrt1_0.RES_VER=rht1_0.RES_VER"); + } else { + assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = '-9208284524139093953')) WHERE ((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST", getSelectSql(0)); + assertThat(getSelectSql(1)).contains(" where rht1_0.RES_ID in ('" + id0.getIdPartAsLong() + "','" + id1.getIdPartAsLong() + "','-1','-1','-1','-1','-1','-1','-1','-1') and mrt1_0.RES_VER=rht1_0.RES_VER"); + } + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testSearch_Source(boolean theAccessMetaSourceInformationFromProvenanceTable) { + // Setup + myStorageSettings.setAccessMetaSourceInformationFromProvenanceTable(theAccessMetaSourceInformationFromProvenanceTable); + myStorageSettings.setStoreMetaSourceInformation(JpaStorageSettings.StoreMetaSourceInformationEnum.SOURCE_URI_AND_REQUEST_ID); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + long idFoo = createPatient(withActiveTrue(), withSource("http://foo")).getIdPartAsLong(); + long idBar = createPatient(withActiveTrue(), withSource("http://bar")).getIdPartAsLong(); + + runInTransaction(()->{ + ResourceTable table = myResourceTableDao.getReferenceById(idFoo); + ResourceHistoryProvenanceEntity prov = new ResourceHistoryProvenanceEntity(); + prov.setResourceTable(table); + prov.setResourceHistoryTable(myResourceHistoryTableDao.findForIdAndVersion(table.getResourceId(), 1)); + prov.setSourceUri("http://foo"); + myResourceHistoryProvenanceTableDao.save(prov); + + table = myResourceTableDao.getReferenceById(idBar); + prov = new ResourceHistoryProvenanceEntity(); + prov.setResourceTable(table); + prov.setResourceHistoryTable(myResourceHistoryTableDao.findForIdAndVersion(table.getResourceId(), 1)); + prov.setSourceUri("http://bar"); + myResourceHistoryProvenanceTableDao.save(prov); + }); + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = SearchParameterMap.newSynchronous(); + params.add(PARAM_SOURCE, new TokenParam("http://foo")); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactly("Patient/" + idFoo); + + if (myIncludePartitionIdsInPks) { + if (theAccessMetaSourceInformationFromProvenanceTable) { + assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER_PROV t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_PID)) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo'))", getSelectSql(0)); + } else { + assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID)) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo'))", getSelectSql(0)); + } + } else if (myIncludePartitionIdsInSql) { + if (theAccessMetaSourceInformationFromProvenanceTable) { + assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER_PROV t1 ON (t0.RES_ID = t1.RES_PID) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo'))", getSelectSql(0)); + } else { + assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER t1 ON (t0.RES_ID = t1.RES_ID) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo'))", getSelectSql(0)); + } + } else { + if (theAccessMetaSourceInformationFromProvenanceTable) { + assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER_PROV t1 ON (t0.RES_ID = t1.RES_PID) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo'))", getSelectSql(0)); + } else { + assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER t1 ON (t0.RES_ID = t1.RES_ID) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo'))", getSelectSql(0)); + } + } + + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testSearch_Tags_Versioned(boolean theNegate) { + // Setup + myStorageSettings.setTagStorageMode(JpaStorageSettings.TagStorageModeEnum.VERSIONED); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + long idBar = createPatient(withActiveTrue(), withTag("http://foo", "bar")).getIdPartAsLong(); + long idBaz = createPatient(withActiveTrue(), withTag("http://foo", "baz")).getIdPartAsLong(); + long id = theNegate ? idBaz : idBar; + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + TokenParam bar = new TokenParam("http://foo", "bar"); + if (theNegate) { + bar.setModifier(TokenParamModifier.NOT); + } + params.add(PARAM_TAG, bar); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactly("Patient/" + id); + + if (theNegate) { + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(0)).contains("((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RES_TAG t0"); + } else { + assertThat(getSelectSql(0)).contains("t0.RES_ID NOT IN (SELECT t0.RES_ID FROM HFJ_RES_TAG t0 "); + } + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_TAG_DEF t1 ON (t0.TAG_ID = t1.TAG_ID) "); + } else { + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_RES_TAG t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID)) INNER"); + } else { + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER"); + } + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) "); + } + + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).contains("PARTITION_ID = '1')"); + } + + // Query 1 is the HFJ_RES_VER fetch + assertThat(getSelectSql(1)).contains(" from HFJ_RES_VER "); + + assertThat(getSelectSql(2)).contains(" from HFJ_HISTORY_TAG rht1_0 "); + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(2)).contains(" where (rht1_0.PARTITION_ID,rht1_0.RES_VER_PID) in (('1',"); + } else { + assertThat(getSelectSql(2)).contains(" where rht1_0.RES_VER_PID in ('"); + } + + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testSearch_Tags_Unversioned() { + // Setup + myStorageSettings.setTagStorageMode(JpaStorageSettings.TagStorageModeEnum.NON_VERSIONED); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + long id = createPatient(withActiveTrue(), withTag("http://foo", "bar")).getIdPartAsLong(); + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + params.add(PARAM_TAG, new TokenParam("http://foo", "bar")); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactly("Patient/" + id); + + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_RES_TAG t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID)) INNER"); + } else { + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER"); + } + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) "); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).contains("(t1.PARTITION_ID = '1')"); + } + + // Query 1 is the HFJ_RES_VER fetch + assertThat(getSelectSql(1)).contains(" from HFJ_RES_VER "); + + assertThat(getSelectSql(2)).contains(" from HFJ_RES_TAG rt1_0 "); + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(2)).contains(" where (rt1_0.RES_ID,rt1_0.PARTITION_ID) in (('" + id + "','1'))"); + } else { + assertThat(getSelectSql(2)).contains(" where rt1_0.RES_ID in ('" + id + "')"); + } + + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testSearch_Token() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + long id = createPatient(withActiveTrue()).getIdPartAsLong(); + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + params.add(Patient.SP_ACTIVE, new TokenParam().setValue("true")); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + assertThat(toUnqualifiedVersionlessIdValues(outcome)).asList().containsExactly("Patient/" + id); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).endsWith(" WHERE ((t0.PARTITION_ID = '1') AND (t0.HASH_VALUE = '7943378963388545453'))"); + } else { + assertThat(getSelectSql(0)).endsWith(" WHERE (t0.HASH_VALUE = '7943378963388545453')"); + } + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(1)).endsWith(" where (rht1_0.RES_ID,rht1_0.PARTITION_ID) in (('" + id + "','1')) and mrt1_0.RES_VER=rht1_0.RES_VER"); + } else { + assertThat(getSelectSql(1)).endsWith(" where rht1_0.RES_ID in ('" + id + "') and mrt1_0.RES_VER=rht1_0.RES_VER"); + } + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testSearch_Token_Not() { + // Setup + + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + createObservation(withId("A"), withObservationCode("http://foo", "A")); + createObservation(withId("B"), withObservationCode("http://foo", "B")); + + // Test + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + params.add(Observation.SP_CODE, new TokenParam("http://foo", "B").setModifier(TokenParamModifier.NOT)); + IBundleProvider outcome = myObservationDao.search(params, newRequest()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(toUnqualifiedVersionlessIdValues(outcome)).asList().containsExactly("Observation/A"); + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(0)).contains("((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN"); + } else { + assertThat(getSelectSql(0)).contains("((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN"); + } + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testSearch_Includes_Forward_Star() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + CreatedResourceIds ids = createPatientWithOrganizationReferences(); + + // Test + myParentTest.logAllResources(); + myParentTest.logAllResourceLinks(); + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + params.addInclude(IBaseResource.INCLUDE_ALL.asRecursive()); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + assertThat(values).asList().containsExactlyInAnyOrder("Patient/" + ids.patientPid(), "Organization/" + ids.parentOrgId().getIdPart(), "Organization/" + ids.childOrgId().getIdPart()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + + String sql; + + sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false); + if (myIncludePartitionIdsInSql) { + assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = '1'))"); + } else { + assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE ((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL))"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("where rl1_0.PARTITION_ID='1' and rl1_0.SRC_RESOURCE_ID in ('" + ids.patientPid() + "') fetch"); + } else { + assertThat(sql).contains("where rl1_0.SRC_RESOURCE_ID in ('" + ids.patientPid() + "') fetch "); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(2).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("where rl1_0.PARTITION_ID='0' and rl1_0.SRC_RESOURCE_ID in ('" + ids.childOrgPid() + "') "); + } else { + assertThat(sql).contains("where rl1_0.SRC_RESOURCE_ID in ('" + ids.childOrgPid() + "') fetch "); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(3).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("where rl1_0.PARTITION_ID='0' and rl1_0.SRC_RESOURCE_ID in ('" + ids.parentOrgPid() + "') "); + } else { + assertThat(sql).contains("where rl1_0.SRC_RESOURCE_ID in ('" + ids.parentOrgPid() + "') fetch "); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(4).getSql(true, false); + assertThat(sql).contains("from HFJ_RES_VER rht1_0"); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("join HFJ_RESOURCE mrt1_0 on mrt1_0.RES_ID=rht1_0.RES_ID and mrt1_0.PARTITION_ID=rht1_0.PARTITION_ID where"); + assertThat(sql).contains("where (rht1_0.RES_ID,rht1_0.PARTITION_ID) in"); + } else { + assertThat(sql).contains("join HFJ_RESOURCE mrt1_0 on mrt1_0.RES_ID=rht1_0.RES_ID where"); + assertThat(sql).contains("where rht1_0.RES_ID in"); + } + + assertEquals(5, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testSearch_Includes_Forward_Star_UsingCanonicalUrl() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + CreatedQuestionnaireAndResponseIds ids = createQuestionnaireAndQuestionnaireResponseWithCanonicalUrlLink(); + + // Test + myParentTest.logAllResources(); + myParentTest.logAllResourceLinks(); + myParentTest.logAllUriIndexes(); + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + params.addInclude(IBaseResource.INCLUDE_ALL.asRecursive()); + IBundleProvider outcome = myQuestionnaireResponseDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactlyInAnyOrder(ids.qId().getValue(), ids.qrId().getValue()); + + String sql; + + sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false); + if (myIncludePartitionIdsInSql) { + assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'QuestionnaireResponse') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = '1'))"); + } else { + assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE ((t0.RES_TYPE = 'QuestionnaireResponse') AND (t0.RES_DELETED_AT IS NULL))"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).isEqualTo("select rl1_0.TARGET_RESOURCE_ID,rl1_0.TARGET_RESOURCE_TYPE,rl1_0.TARGET_RESOURCE_URL,rl1_0.TARGET_RES_PARTITION_ID from HFJ_RES_LINK rl1_0 where rl1_0.PARTITION_ID='1' and rl1_0.SRC_RESOURCE_ID in ('" + ids.qrId.getIdPart() + "') fetch first '1000' rows only"); + } else { + assertThat(sql).isEqualTo("select rl1_0.TARGET_RESOURCE_ID,rl1_0.TARGET_RESOURCE_TYPE,rl1_0.TARGET_RESOURCE_URL from HFJ_RES_LINK rl1_0 where rl1_0.SRC_RESOURCE_ID in ('" + ids.qrId().getIdPart() + "') fetch first '1000' rows only"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(2).getSql(true, false); + if (myIncludePartitionIdsInSql) { + assertThat(sql).startsWith("select rispu1_0.PARTITION_ID,rispu1_0.RES_ID from HFJ_SPIDX_URI rispu1_0 where rispu1_0.HASH_IDENTITY in ("); + } else { + assertThat(sql).startsWith("select rispu1_0.RES_ID from HFJ_SPIDX_URI rispu1_0 where rispu1_0.HASH_IDENTITY in ("); + } + + assertEquals(5, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testSearch_Includes_Forward_Specific() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + CreatedResourceIds ids = createPatientWithOrganizationReferences(); + + // Test + myParentTest.logAllResources(); + myParentTest.logAllResourceLinks(); + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + params.addInclude(Patient.INCLUDE_ORGANIZATION.asRecursive()); + params.addInclude(Organization.INCLUDE_PARTOF.asRecursive()); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + assertThat(values).asList().containsExactlyInAnyOrder("Patient/" + ids.patientPid(), "Organization/" + ids.parentOrgId.getIdPart(), "Organization/" + ids.childOrgId.getIdPart()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + + String sql; + + sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false); + if (myIncludePartitionIdsInSql) { + assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = '1'))"); + } else { + assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE ((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL))"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("WHERE r.src_path = 'Organization.partOf' AND r.target_resource_id IS NOT NULL AND r.src_resource_id IN ('" + ids.patientPid + "') AND r.partition_id = '1' AND r.target_resource_type = 'Organization' UNION"); + } else { + assertThat(sql).contains("WHERE r.src_path = 'Organization.partOf' AND r.target_resource_id IS NOT NULL AND r.src_resource_id IN ('" + ids.patientPid + "') AND r.target_resource_type = 'Organization' UNION"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(2).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("WHERE r.src_path = 'Patient.managingOrganization' AND r.target_resource_id IS NOT NULL AND r.src_resource_id IN ('" + ids.patientPid + "') AND r.partition_id = '1' AND r.target_resource_type = 'Organization' UNION"); + } else { + assertThat(sql).contains("WHERE r.src_path = 'Patient.managingOrganization' AND r.target_resource_id IS NOT NULL AND r.src_resource_id IN ('" + ids.patientPid + "') AND r.target_resource_type = 'Organization' UNION"); + } + + // Index 3-6 are just more includes loading + assertThat(myCaptureQueriesListener.getSelectQueries().get(3).getSql(true, false)).contains(" FROM hfj_res_link r "); + assertThat(myCaptureQueriesListener.getSelectQueries().get(4).getSql(true, false)).contains(" FROM hfj_res_link r "); + assertThat(myCaptureQueriesListener.getSelectQueries().get(5).getSql(true, false)).contains(" FROM hfj_res_link r "); + assertThat(myCaptureQueriesListener.getSelectQueries().get(6).getSql(true, false)).contains(" FROM hfj_res_link r "); + + sql = myCaptureQueriesListener.getSelectQueries().get(7).getSql(true, false); + assertThat(sql).contains("from HFJ_RES_VER rht1_0"); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("join HFJ_RESOURCE mrt1_0 on mrt1_0.RES_ID=rht1_0.RES_ID and mrt1_0.PARTITION_ID=rht1_0.PARTITION_ID where"); + assertThat(sql).contains("where (rht1_0.RES_ID,rht1_0.PARTITION_ID) in"); + } else { + assertThat(sql).contains("join HFJ_RESOURCE mrt1_0 on mrt1_0.RES_ID=rht1_0.RES_ID where"); + assertThat(sql).contains("where rht1_0.RES_ID in"); + } + + assertEquals(8, myCaptureQueriesListener.countSelectQueries()); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testSearch_Includes_Forward_Specific_UsingCanonicalUrl(boolean theIncludePartitionInSearchHashes) { + // Setup + myPartitionSettings.setIncludePartitionInSearchHashes(theIncludePartitionInSearchHashes); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + CreatedQuestionnaireAndResponseIds result = createQuestionnaireAndQuestionnaireResponseWithCanonicalUrlLink(); + + // Test + myParentTest.logAllResources(); + myParentTest.logAllResourceLinks(); + myParentTest.logAllUriIndexes(); + myCaptureQueriesListener.clear(); + SearchParameterMap params = SearchParameterMap.newSynchronous(); + params.addInclude(QuestionnaireResponse.INCLUDE_QUESTIONNAIRE); + IBundleProvider outcome = myQuestionnaireResponseDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactlyInAnyOrder(result.qrId().getValue(), result.qId().getValue()); + + String sql; + + sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false); + sql = sql.substring(sql.indexOf("UNION")); + long expectedHash; + if (theIncludePartitionInSearchHashes && myIncludePartitionIdsInSql && myPartitionSettings.getDefaultPartitionId() != null) { + expectedHash = -2559752747310040606L; + } else { + expectedHash = -600769180185160063L; + } + if (myIncludePartitionIdsInPks) { + assertEquals("UNION SELECT rUri.res_id, rUri.partition_id as partition_id FROM hfj_res_link r JOIN hfj_spidx_uri rUri ON (rUri.partition_id IN ('0') AND rUri.hash_identity = '" + expectedHash + "' AND r.target_resource_url = rUri.sp_uri) WHERE r.src_path = 'QuestionnaireResponse.questionnaire' AND r.target_resource_id IS NULL AND r.partition_id = '1' AND r.src_resource_id IN ('" + result.qrId.getIdPart() + "') fetch first '1000' rows only", sql); + } else { + assertEquals("UNION SELECT rUri.res_id FROM hfj_res_link r JOIN hfj_spidx_uri rUri ON (rUri.hash_identity = '" + expectedHash + "' AND r.target_resource_url = rUri.sp_uri) WHERE r.src_path = 'QuestionnaireResponse.questionnaire' AND r.target_resource_id IS NULL AND r.src_resource_id IN ('" + result.qrId().getIdPart() + "') fetch first '1000' rows only", sql); + } + + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testSearch_Includes_Reverse_Specific_UsingCanonicalUrl() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + CreatedQuestionnaireAndResponseIds result = createQuestionnaireAndQuestionnaireResponseWithCanonicalUrlLink(); + + // Test + myParentTest.logAllResources(); + myParentTest.logAllResourceLinks(); + myParentTest.logAllUriIndexes(); + myCaptureQueriesListener.clear(); + SearchParameterMap params = SearchParameterMap.newSynchronous(); + params.addRevInclude(QuestionnaireResponse.INCLUDE_QUESTIONNAIRE); + IBundleProvider outcome = myQuestionnaireDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactlyInAnyOrder(result.qrId().getValue(), result.qId().getValue()); + + String sql; + + sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false); + sql = sql.substring(sql.indexOf("UNION")); + if (myIncludePartitionIdsInPks) { + assertEquals("UNION SELECT r.src_resource_id, r.partition_id as partition_id FROM hfj_res_link r JOIN hfj_spidx_uri rUri ON (rUri.partition_id IN ('0') AND rUri.hash_identity = '-600769180185160063' AND r.target_resource_url = rUri.sp_uri) WHERE r.src_path = 'QuestionnaireResponse.questionnaire' AND r.target_resource_id IS NULL AND rUri.partition_id = '0' AND rUri.res_id IN ('" + result.qId.getIdPart() + "') fetch first '1000' rows only", sql); + } else { + assertEquals("UNION SELECT r.src_resource_id FROM hfj_res_link r JOIN hfj_spidx_uri rUri ON (rUri.hash_identity = '-600769180185160063' AND r.target_resource_url = rUri.sp_uri) WHERE r.src_path = 'QuestionnaireResponse.questionnaire' AND r.target_resource_id IS NULL AND rUri.res_id IN ('" + result.qId().getIdPart() + "') fetch first '1000' rows only", sql); + } + + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + } + + @Nonnull + private CreatedQuestionnaireAndResponseIds createQuestionnaireAndQuestionnaireResponseWithCanonicalUrlLink() { + Questionnaire q = new Questionnaire(); + q.setUrl("http://foo"); + IIdType qId = myQuestionnaireDao.create(q, newRequest()).getId().toUnqualifiedVersionless(); + + QuestionnaireResponse qr = new QuestionnaireResponse(); + qr.setQuestionnaire("http://foo"); + IIdType qrId = myQuestionnaireResponseDao.create(qr, newRequest()).getId().toUnqualifiedVersionless(); + CreatedQuestionnaireAndResponseIds result = new CreatedQuestionnaireAndResponseIds(qId, qrId); + return result; + } + + private record CreatedQuestionnaireAndResponseIds(IIdType qId, IIdType qrId) { + } + + @Test + public void testSearch_Includes_Reverse_Star() { + // Setup + myPartitionSettings.setAllowReferencesAcrossPartitions(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + myPartitionSelectorInterceptor.addNonPartitionableResource("Organization"); + CreatedResourceIds ids = createPatientWithOrganizationReferences(); + + // Test + myParentTest.logAllResources(); + myParentTest.logAllResourceLinks(); + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.add(SP_RES_ID, new TokenParam("Organization/" + ids.parentOrgPid())); + params.setLoadSynchronous(true); + params.addRevInclude(IBaseResource.INCLUDE_ALL.asRecursive()); + IBundleProvider outcome = myOrganizationDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + assertThat(values).asList().containsExactlyInAnyOrder("Patient/" + ids.patientPid(), "Organization/" + ids.parentOrgId().getIdPart(), "Organization/" + ids.childOrgId().getIdPart()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertEquals(5, myCaptureQueriesListener.countSelectQueries()); + + String sql; + + sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false); + if (myIncludePartitionIdsInSql && myPartitionSettings.getDefaultPartitionId() == null) { + assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID IS NULL) AND (t0.RES_ID = '" + ids.parentOrgPid() + "')))"); + } else if (myIncludePartitionIdsInSql) { + assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = '0') AND (t0.RES_ID = '" + ids.parentOrgPid() + "')))"); + } else { + assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.RES_ID = '" + ids.parentOrgPid() + "'))"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("where rl1_0.TARGET_RES_PARTITION_ID='0' and rl1_0.TARGET_RESOURCE_ID in ('" + ids.parentOrgPid() + "') fetch"); + } else { + assertThat(sql).contains("where rl1_0.TARGET_RESOURCE_ID in ('" + ids.parentOrgPid() + "') fetch"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(2).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("where rl1_0.TARGET_RES_PARTITION_ID='0' and rl1_0.TARGET_RESOURCE_ID in ('" + ids.childOrgPid() + "') "); + } else { + assertThat(sql).contains("where rl1_0.TARGET_RESOURCE_ID in ('" + ids.childOrgPid() + "') "); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(3).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("where rl1_0.TARGET_RES_PARTITION_ID='1' and rl1_0.TARGET_RESOURCE_ID in ('" + ids.patientPid() + "') fetch"); + } else { + assertThat(sql).contains("where rl1_0.TARGET_RESOURCE_ID in ('" + ids.patientPid() + "') fetch"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(4).getSql(true, false); + assertThat(sql).contains("from HFJ_RES_VER rht1_0"); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("join HFJ_RESOURCE mrt1_0 on mrt1_0.RES_ID=rht1_0.RES_ID and mrt1_0.PARTITION_ID=rht1_0.PARTITION_ID where"); + assertThat(sql).contains("where (rht1_0.RES_ID,rht1_0.PARTITION_ID) in"); + } else { + assertThat(sql).contains("join HFJ_RESOURCE mrt1_0 on mrt1_0.RES_ID=rht1_0.RES_ID where"); + assertThat(sql).contains("where rht1_0.RES_ID in"); + } + } + + @Test + public void testSearch_Includes_Reverse_Specific() { + // Setup + myPartitionSettings.setAllowReferencesAcrossPartitions(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + myPartitionSelectorInterceptor.addNonPartitionableResource("Organization"); + CreatedResourceIds ids = createPatientWithOrganizationReferences(); + + // Test + myParentTest.logAllResources(); + myParentTest.logAllResourceLinks(); + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.add(SP_RES_ID, new TokenParam("Organization/" + ids.parentOrgPid())); + params.setLoadSynchronous(true); + params.addRevInclude(Patient.INCLUDE_ORGANIZATION.asRecursive()); + params.addRevInclude(Organization.INCLUDE_PARTOF.asRecursive()); + IBundleProvider outcome = myOrganizationDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactlyInAnyOrder("Patient/" + ids.patientPid(), "Organization/" + ids.parentOrgId.getIdPart(), "Organization/" + ids.childOrgId.getIdPart()); + + String sql; + + sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false); + if (myIncludePartitionIdsInSql && myPartitionSettings.getDefaultPartitionId() == null) { + assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID IS NULL) AND (t0.RES_ID = '" + ids.parentOrgPid() + "')))"); + } else if (myIncludePartitionIdsInSql) { + assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = '0') AND (t0.RES_ID = '" + ids.parentOrgPid() + "')))"); + } else { + assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.RES_ID = '" + ids.parentOrgPid() + "'))"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("WHERE r.src_path = 'Organization.partOf' AND r.target_resource_id IN ('" + ids.parentOrgPid() + "') AND r.target_res_partition_id = '0' AND r.target_resource_type = 'Organization' "); + } else { + assertThat(sql).contains("WHERE r.src_path = 'Organization.partOf' AND r.target_resource_id IN ('" + ids.parentOrgPid() + "') AND r.target_resource_type = 'Organization' "); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(2).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("WHERE r.src_path = 'Patient.managingOrganization' AND r.target_resource_id IN ('" + ids.parentOrgPid + "') AND r.target_res_partition_id = '0' AND r.target_resource_type = 'Organization' UNION"); + } else { + assertThat(sql).contains("WHERE r.src_path = 'Patient.managingOrganization' AND r.target_resource_id IN ('" + ids.parentOrgPid + "') AND r.target_resource_type = 'Organization' UNION"); + } + + // Index 3-6 are just more includes loading + assertThat(myCaptureQueriesListener.getSelectQueries().get(3).getSql(true, false)).contains(" FROM hfj_res_link r "); + assertThat(myCaptureQueriesListener.getSelectQueries().get(4).getSql(true, false)).contains(" FROM hfj_res_link r "); + assertThat(myCaptureQueriesListener.getSelectQueries().get(5).getSql(true, false)).contains(" FROM hfj_res_link r "); + assertThat(myCaptureQueriesListener.getSelectQueries().get(6).getSql(true, false)).contains(" FROM hfj_res_link r "); + + sql = myCaptureQueriesListener.getSelectQueries().get(7).getSql(true, false); + assertThat(sql).contains("from HFJ_RES_VER rht1_0"); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("join HFJ_RESOURCE mrt1_0 on mrt1_0.RES_ID=rht1_0.RES_ID and mrt1_0.PARTITION_ID=rht1_0.PARTITION_ID where"); + assertThat(sql).contains("where (rht1_0.RES_ID,rht1_0.PARTITION_ID) in"); + } else { + assertThat(sql).contains("join HFJ_RESOURCE mrt1_0 on mrt1_0.RES_ID=rht1_0.RES_ID where"); + assertThat(sql).contains("where rht1_0.RES_ID in"); + } + + assertEquals(8, myCaptureQueriesListener.countSelectQueries()); + } + + + /** + * Searching for all partitions or multiple partitions + */ + @ParameterizedTest(name = "[{index}] - {0}") + @MethodSource("searchMultiPartitionTestCases") + public void testSearch_MultiplePartitions(SearchMultiPartitionTestCase theTestCase) { + myPartitionSelectorInterceptor.setNextPartition(theTestCase.requestPartitionId); + String sql = getSqlForRestQuery(theTestCase.restQuery); + + if (myIncludePartitionIdsInPks) { + assertEquals(theTestCase.expectedPartitionedPksSql, sql, theTestCase.comment); + } else if (myIncludePartitionIdsInSql) { + assertEquals(theTestCase.expectedPartitionedSql, sql, theTestCase.comment); + } else { + assertEquals(theTestCase.expectedSql, sql, theTestCase.comment); + } + } + + /** + * Make sure _sort incorporates the partition ID on joins + */ + @ParameterizedTest(name = "[{index}] - {0}") + @MethodSource("searchSortTestCases") + public void testSearch_Sort(SqlGenerationTestCase theTestCase) { + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + String sql = getSqlForRestQuery(theTestCase.restQuery); + + if (myIncludePartitionIdsInPks) { + assertEquals(theTestCase.expectedPartitionedPksSql, sql, theTestCase.comment); + } else if (myIncludePartitionIdsInSql) { + assertEquals(theTestCase.expectedPartitionedSql, sql, theTestCase.comment); + } else { + assertEquals(theTestCase.expectedSql, sql, theTestCase.comment); + } + } + + + @Test + public void testValuesetExpansion_IncludePreExpandedVsWithFilter() { + // Setup + myStorageSettings.setPreExpandValueSets(true); + + CodeSystem cs = new CodeSystem(); + cs.setUrl("http://cs"); + cs.setContent(Enumerations.CodeSystemContentMode.NOTPRESENT); + myCodeSystemDao.create(cs, newRequest()); + + CustomTerminologySet additions = new CustomTerminologySet(); + additions.addRootConcept("A", "HELLO"); + additions.addRootConcept("B", "HELLO"); + additions.addRootConcept("C", "GOODBYE"); + myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd("http://cs", additions); + myTerminologyDeferredStorageSvc.saveAllDeferred(); + + ValueSet valueSet = new ValueSet(); + valueSet.setUrl("http://vs"); + valueSet + .getCompose() + .addInclude().setSystem("http://cs"); + myValueSetDao.create(valueSet, newRequest()); + + myCaptureQueriesListener.clear(); + myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); + + myParentTest.logAllCodeSystemsAndVersionsCodeSystemsAndVersions(); + myParentTest.logAllConcepts(); + myParentTest.logAllValueSetConcepts(); + + // Test + ValueSet input = new ValueSet(); + input.getCompose() + .addInclude() + .addValueSet("http://vs"); + + ValueSetExpansionOptions expansionOptions = new ValueSetExpansionOptions(); + expansionOptions.setFilter("HELLO"); + myCaptureQueriesListener.clear(); + ValueSet outcome = (ValueSet) myTermSvc.expandValueSet(expansionOptions, valueSet); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(outcome.getExpansion().getContains().stream().map(ValueSet.ValueSetExpansionContainsComponent::getCode).toList()).asList().containsExactly("A", "B"); + } + + + @Test + public void testUpdateAsCreate() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + createPatient(withId("A"), withActiveTrue()); + + // Test + myCaptureQueriesListener.clear(); + + Observation obs = new Observation(); + obs.setId("Observation/O"); + obs.setSubject(new Reference("Patient/A")); + obs.setEffective(new DateTimeType("2022")); + myObservationDao.update(obs, newRequest()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).endsWith(" where rt1_0.PARTITION_ID='1' and (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='O')"); + assertThat(getSelectSql(1)).endsWith(" where rt1_0.PARTITION_ID='1' and (rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A')"); + } else { + assertThat(getSelectSql(0)).endsWith(" where (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='O')"); + assertThat(getSelectSql(1)).endsWith(" where (rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A')"); + } + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + private SystemRequestDetails newRequest() { + return new SystemRequestDetails(); + } + + private JpaPid findId(String theResourceType, String theIdPart) { + return myParentTest.runInTransaction(() -> myResourceTableDao + .findAll() + .stream() + .filter(t -> t.getResourceType().equals(theResourceType)) + .filter(t -> t.getFhirId().equals(theIdPart)) + .findFirst() + .orElseThrow() + .getPersistentId()); + } + + @Language("SQL") + private String getSelectSql(int theIndex) { + return myCaptureQueriesListener.getSelectQueries().get(theIndex).getSql(true, false); + } + + @Language("SQL") + private String getDeleteSql(int theIndex) { + return myCaptureQueriesListener.getDeleteQueries().get(theIndex).getSql(true, false); + } + + @Language("SQL") + private String getUpdateSql(int theIndex) { + return myCaptureQueriesListener.getUpdateQueries().get(theIndex).getSql(true, false); + } + + @Language("SQL") + private String getInsertSql(int theIndex) { + return myCaptureQueriesListener.getInsertQueries().get(theIndex).getSql(true, false); + } + + @SuppressWarnings({"rawtypes", "unchecked"}) + @Override + public IIdType doCreateResource(IBaseResource theResource) { + IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResource.getClass()); + return dao.create(theResource, newRequest()).getId().toUnqualifiedVersionless(); + } + + @SuppressWarnings({"rawtypes", "unchecked"}) + @Override + public IIdType doUpdateResource(IBaseResource theResource) { + IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResource.getClass()); + return dao.update(theResource, newRequest()).getId().toUnqualifiedVersionless(); + } + + @Override + public FhirContext getFhirContext() { + return myFhirCtx; + } + + public void runInTransaction(Runnable theRunnable) { + myParentTest.runInTransaction(theRunnable); + } + public T runInTransaction(Callable theRunnable) { + return myParentTest.runInTransaction(theRunnable); + } + + @Nonnull + private CreatedResourceIds createPatientWithOrganizationReferences() { + IIdType parentOrgId = createOrganization(withName("PARENT")).toUnqualifiedVersionless(); + IIdType childOrgId = createOrganization(withName("CHILD"), withReference("partOf", parentOrgId)).toUnqualifiedVersionless(); + IIdType patientId = createPatient(withActiveTrue(), withOrganization(childOrgId)).toUnqualifiedVersionless(); + long patientPid = patientId.getIdPartAsLong(); + long childPid = childOrgId.getIdPartAsLong(); + long parentPid = parentOrgId.getIdPartAsLong(); + CreatedResourceIds result = new CreatedResourceIds(parentOrgId, childOrgId, patientId, null, patientPid, childPid, parentPid, null); + return result; + } + + @Nonnull + private CreatedResourceIds createPatientWithOrganizationAndEncounterReferences() { + CreatedResourceIds createdResourceIds = createPatientWithOrganizationReferences(); + + Encounter encounter = new Encounter(); + encounter.setSubject(new Reference(createdResourceIds.patientId)); + IIdType encounterId = myEncounterDao.create(encounter).getId().toUnqualifiedVersionless(); + Long encounterPid = encounterId.getIdPartAsLong(); + + return new CreatedResourceIds( + createdResourceIds.parentOrgId, + createdResourceIds.childOrgId, + createdResourceIds.patientId, + encounterId, + createdResourceIds.patientPid, + createdResourceIds.childOrgPid, + createdResourceIds.parentOrgPid, + encounterPid + ); + } + + @Nonnull + private List getSqlSelectQueriesWithString(String tableName) { + List selectTokenQueries = myCaptureQueriesListener.getSelectQueries() + .stream() + .filter(t -> t.getSql(false, false).contains(tableName)) + .toList(); + return selectTokenQueries; + } + + @Nonnull + private List getSqlDeleteQueriesWithString(String tableName) { + List selectTokenQueries = myCaptureQueriesListener.getDeleteQueries() + .stream() + .filter(t -> t.getSql(false, false).contains(tableName)) + .toList(); + return selectTokenQueries; + } + + private static Map parseInsertStatementParams(String theInsertSql) throws JSQLParserException { + Insert parsedStatement = (Insert) CCJSqlParserUtil.parse(theInsertSql); + + Map retVal = new HashMap<>(); + + for (int i = 0; i < parsedStatement.getColumns().size(); i++) { + String columnName = parsedStatement.getColumns().get(i).getColumnName(); + String columnValue = parsedStatement.getValues().getExpressions().get(i).toString(); + retVal.put(columnName, columnValue); + } + + return retVal; + } + + private static String parseInsertStatementTableName(String theInsertSql) throws JSQLParserException { + Insert parsedStatement = (Insert) CCJSqlParserUtil.parse(theInsertSql); + return parsedStatement.getTable().getName(); + } + + private static List toUnqualifiedVersionlessIdValues(IBundleProvider theFound) { + int fromIndex = 0; + Integer toIndex = theFound.size(); + return toUnqualifiedVersionlessIdValues(theFound, fromIndex, toIndex, true); + } + + private static List toUnqualifiedIdValues(IBundleProvider theFound) { + return toIdValues(theFound, false); + } + + private static List toUnqualifiedVersionlessIdValues(IBundleProvider theFound, int theFromIndex, Integer theToIndex, boolean theFirstCall) { + return toIdValues(theFound, true); + } + + @Nonnull + private static List toIdValues(IBundleProvider theFound, boolean theVersionless) { + List retVal = new ArrayList<>(); + + IBundleProvider bundleProvider; + bundleProvider = theFound; + + List resources = bundleProvider.getResources(0, 99999); + for (IBaseResource next : resources) { + IIdType id = next.getIdElement(); + if (theVersionless) { + id = id.toUnqualifiedVersionless(); + } else { + id = id.toUnqualified(); + } + retVal.add(id.getValue()); + } + return retVal; + } + + private record CreatedResourceIds(IIdType parentOrgId, IIdType childOrgId, IIdType patientId, IIdType encounterId, + Long patientPid, Long childOrgPid, Long parentOrgPid, Long encounterPid) { + + public Set allIdValues() { + Set retVal = new HashSet<>(); + addIfNotNull(retVal, parentOrgId); + addIfNotNull(retVal, childOrgId); + addIfNotNull(retVal, patientId); + addIfNotNull(retVal, encounterId); + return retVal; + } + + private static void addIfNotNull(Set theList, IIdType theObject) { + if (theObject != null) { + theList.add(theObject.getValue()); + } + } + } + + public record SearchMultiPartitionTestCase(String comment, RequestPartitionId requestPartitionId, String restQuery, String expectedSql, String expectedPartitionedSql, String expectedPartitionedPksSql) { + @Override + public String toString() { + return comment; + } + + public static void add(List theTarget, RequestPartitionId theRequestPartitionId, String theName, String theRestQuery, String theExpectedSql, String theExpectedPartitionedSql, String theExpectedPartitionedPksSql) { + theTarget.add(new SearchMultiPartitionTestCase( + theName, + theRequestPartitionId, + theRestQuery, + theExpectedSql, + theExpectedPartitionedSql, + theExpectedPartitionedPksSql)); + } + } + + static List searchMultiPartitionTestCases() { + List retVal = new ArrayList<>(); + + SearchMultiPartitionTestCase.add( + retVal, + RequestPartitionId.allPartitions(), + "single string - no hfj_resource root - all partitions", + "Patient?name=FOO", + "SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))" + ); + SearchMultiPartitionTestCase.add( + retVal, + RequestPartitionId.fromPartitionIds(PARTITION_1, PARTITION_2), + "single string - no hfj_resource root - multiple partitions", + "Patient?name=FOO", + "SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.PARTITION_ID IN (?,?) ) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)))", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.PARTITION_ID IN (?,?) ) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)))" + ); + + SearchMultiPartitionTestCase.add( + retVal, + RequestPartitionId.allPartitions(), + "two regular params - should use hfj_resource as root - all partitions", + "Patient?name=smith&active=true", + "SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?))", + "SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?))", + "SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) INNER JOIN HFJ_SPIDX_TOKEN t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID)) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?))" + ); + SearchMultiPartitionTestCase.add( + retVal, + RequestPartitionId.fromPartitionIds(PARTITION_1, PARTITION_2), + "two regular params - should use hfj_resource as root - multiple partitions", + "Patient?name=smith&active=true", + "SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?))", + "SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.PARTITION_ID IN (?,?) ) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) AND ((t2.PARTITION_ID IN (?,?) ) AND (t2.HASH_VALUE = ?)))", + "SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) INNER JOIN HFJ_SPIDX_TOKEN t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID)) WHERE (((t0.PARTITION_ID IN (?,?) ) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) AND ((t2.PARTITION_ID IN (?,?) ) AND (t2.HASH_VALUE = ?)))" + ); + + SearchMultiPartitionTestCase.add( + retVal, + RequestPartitionId.allPartitions(), + "token not as a NOT IN subselect - all partitions", + "Encounter?class:not=not-there", + "SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))" + ); + SearchMultiPartitionTestCase.add( + retVal, + RequestPartitionId.fromPartitionIds(PARTITION_1, PARTITION_2), + "token not as a NOT IN subselect - multiple partitions", + "Encounter?class:not=not-there", + "SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID IN (?,?) ) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID IN (?,?) ) AND ((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))" + ); + + SearchMultiPartitionTestCase.add( + retVal, + RequestPartitionId.allPartitions(), + "token not on chain join - NOT IN from hfj_res_link target columns - all partitions", + "Observation?encounter.class:not=not-there", + "SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))", + "SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))", + "SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RES_PARTITION_ID,t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))" + ); + SearchMultiPartitionTestCase.add( + retVal, + RequestPartitionId.fromPartitionIds(PARTITION_1, PARTITION_2), + "token not on chain join - NOT IN from hfj_res_link target columns - multiple partitions", + "Observation?encounter.class:not=not-there", + "SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))", + "SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.PARTITION_ID IN (?,?) ) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))", + "SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.PARTITION_ID IN (?,?) ) AND ((t0.TARGET_RES_PARTITION_ID,t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))" + ); + + return retVal; + } + + public record SqlGenerationTestCase(String comment, String restQuery, String expectedSql, String expectedPartitionedSql, String expectedPartitionedPksSql) { + @Override + public String toString() { + return comment; + } + } + + static List searchSortTestCases() { + return List.of( + new SqlGenerationTestCase( + "bare sort", + "Patient?_sort=name", + "SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST" + ) + , new SqlGenerationTestCase( + "sort with predicate", + "Patient?active=true&_sort=name", + "SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (t0.HASH_VALUE = ?) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST", + "SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.PARTITION_ID = ?) AND (t0.HASH_VALUE = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST", + "SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.PARTITION_ID = ?) AND (t0.HASH_VALUE = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST" + ) + , new SqlGenerationTestCase( + "chained sort", + "Patient?_sort=Practitioner:general-practitioner.name", + "SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RES_PARTITION_ID = t2.PARTITION_ID) AND (t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST" + ) + ); + } + + private String getSqlForRestQuery(String theFhirRestQuery) { + myCaptureQueriesListener.clear(); + myTestDaoSearch.searchForIds(theFhirRestQuery); + myCaptureQueriesListener.logSelectQueries(); + assertEquals(1, myCaptureQueriesListener.countSelectQueries()); + return myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false); + } + +} + + diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java index 10d830d66336..df6fe2233b2c 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java @@ -703,7 +703,7 @@ protected PlatformTransactionManager getTxManager() { protected void relocateResourceTextToCompressedColumn(Long theResourcePid, Long theVersion) { runInTransaction(()->{ - ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theResourcePid, theVersion); + ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersion(theResourcePid, theVersion); byte[] contents = GZipUtil.compress(historyEntity.getResourceTextVc()); myResourceHistoryTableDao.updateNonInlinedContents(contents, historyEntity.getId()); }); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java index 742279af8313..410ef62c6482 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java @@ -40,6 +40,7 @@ import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport; import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; +import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboStringUniqueDao; import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboTokensNonUniqueDao; import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamCoordsDao; @@ -49,33 +50,42 @@ import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamTokenDao; import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao; import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao; +import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao; import ca.uhn.fhir.jpa.dao.data.ITermConceptDao; import ca.uhn.fhir.jpa.dao.data.ITermConceptDesignationDao; +import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao; import ca.uhn.fhir.jpa.dao.data.ITermConceptPropertyDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao; +import ca.uhn.fhir.jpa.dao.mdm.MdmLinkDaoJpaImpl; import ca.uhn.fhir.jpa.entity.MdmLink; import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConceptDesignation; +import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink; import ca.uhn.fhir.jpa.entity.TermConceptProperty; import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSetConcept; import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboStringUnique; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboTokenNonUnique; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamCoords; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamNumber; +import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri; import ca.uhn.fhir.jpa.model.entity.ResourceLink; +import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity; import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.jpa.model.entity.ResourceTag; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc; @@ -220,6 +230,8 @@ public abstract class BaseJpaTest extends BaseTest { @Autowired protected ISearchResultCacheSvc mySearchResultCacheSvc; @Autowired + protected PartitionSettings myPartitionSettings; + @Autowired protected ITermCodeSystemDao myTermCodeSystemDao; @Autowired protected ITermCodeSystemVersionDao myTermCodeSystemVersionDao; @@ -256,6 +268,8 @@ public abstract class BaseJpaTest extends BaseTest { @Autowired protected ITermConceptDao myTermConceptDao; @Autowired + protected ITermConceptParentChildLinkDao myTermConceptParentChildLinkDao; + @Autowired protected ITermValueSetConceptDao myTermValueSetConceptDao; @Autowired protected ITermValueSetDao myTermValueSetDao; @@ -277,6 +291,8 @@ public abstract class BaseJpaTest extends BaseTest { @Autowired private IResourceTableDao myResourceTableDao; @Autowired + private IResourceSearchUrlDao myResourceSearchUrlDao; + @Autowired private IResourceTagDao myResourceTagDao; @Autowired private IResourceHistoryTableDao myResourceHistoryTableDao; @@ -285,6 +301,8 @@ public abstract class BaseJpaTest extends BaseTest { @Autowired protected ITermDeferredStorageSvc myTermDeferredStorageSvc; private final List myRegisteredInterceptors = new ArrayList<>(1); + @Autowired + private IResourceHistoryTagDao myResourceHistoryTagDao; @SuppressWarnings("BusyWait") public static void waitForSize(int theTarget, List theList) { @@ -402,15 +420,22 @@ public void afterPerformCleanup() { } JpaStorageSettings defaultConfig = new JpaStorageSettings(); + myStorageSettings.setAccessMetaSourceInformationFromProvenanceTable(defaultConfig.isAccessMetaSourceInformationFromProvenanceTable()); myStorageSettings.setAdvancedHSearchIndexing(defaultConfig.isAdvancedHSearchIndexing()); myStorageSettings.setAllowContainsSearches(defaultConfig.isAllowContainsSearches()); myStorageSettings.setDeleteEnabled(defaultConfig.isDeleteEnabled()); myStorageSettings.setIncludeHashIdentityForTokenSearches(defaultConfig.isIncludeHashIdentityForTokenSearches()); + myStorageSettings.setMarkResourcesForReindexingUponSearchParameterChange(defaultConfig.isMarkResourcesForReindexingUponSearchParameterChange()); myStorageSettings.setMaximumIncludesToLoadPerPage(defaultConfig.getMaximumIncludesToLoadPerPage()); + myStorageSettings.setPreExpandValueSets(defaultConfig.isPreExpandValueSets()); myStorageSettings.getTreatBaseUrlsAsLocal().clear(); ParserOptions defaultParserOptions = new ParserOptions(); myFhirContext.getParserOptions().setStripVersionsFromReferences(defaultParserOptions.isStripVersionsFromReferences()); + + PartitionSettings defaultPartConfig = new PartitionSettings(); + myPartitionSettings.setIncludePartitionInSearchHashes(defaultPartConfig.isIncludePartitionInSearchHashes()); + myPartitionSettings.setAllowReferencesAcrossPartitions(defaultPartConfig.getAllowReferencesAcrossPartitions()); } @AfterEach @@ -484,7 +509,7 @@ protected void purgeHibernateSearch(EntityManager theEntityManager) { protected abstract PlatformTransactionManager getTxManager(); - protected void logAllCodeSystemsAndVersionsCodeSystemsAndVersions() { + public void logAllCodeSystemsAndVersionsCodeSystemsAndVersions() { runInTransaction(() -> { ourLog.info("CodeSystems:\n * " + myTermCodeSystemDao.findAll() .stream() @@ -519,13 +544,13 @@ protected int logAllMdmLinks() { }); } - protected void logAllResourceLinks() { + public void logAllResourceLinks() { runInTransaction(() -> { ourLog.info("Resource Links:\n * {}", myResourceLinkDao.findAll().stream().map(ResourceLink::toString).collect(Collectors.joining("\n * "))); }); } - protected int logAllResources() { + public int logAllResources() { return runInTransaction(() -> { List resources = myResourceTableDao.findAll(); ourLog.info("Resources:\n * {}", resources.stream().map(ResourceTable::toString).collect(Collectors.joining("\n * "))); @@ -533,6 +558,14 @@ protected int logAllResources() { }); } + public int logAllResourceSearchUrls() { + return runInTransaction(() -> { + List resources = myResourceSearchUrlDao.findAll(); + ourLog.info("Search URLs:\n * {}", resources.stream().map(ResourceSearchUrlEntity::toString).collect(Collectors.joining("\n * "))); + return resources.size(); + }); + } + protected int logAllConceptDesignations() { return runInTransaction(() -> { List resources = myTermConceptDesignationDao.findAll(); @@ -544,12 +577,12 @@ protected int logAllConceptDesignations() { protected int logAllConceptProperties() { return runInTransaction(() -> { List resources = myTermConceptPropertyDao.findAll(); - ourLog.info("Concept Designations:\n * {}", resources.stream().map(TermConceptProperty::toString).collect(Collectors.joining("\n * "))); + ourLog.info("Concept Properties:\n * {}", resources.stream().map(TermConceptProperty::toString).collect(Collectors.joining("\n * "))); return resources.size(); }); } - protected int logAllConcepts() { + public int logAllConcepts() { return runInTransaction(() -> { List resources = myTermConceptDao.findAll(); ourLog.info("Concepts:\n * {}", resources.stream().map(TermConcept::toString).collect(Collectors.joining("\n * "))); @@ -557,10 +590,18 @@ protected int logAllConcepts() { }); } - protected int logAllValueSetConcepts() { + protected int logAllConceptParentChildLinks() { + return runInTransaction(() -> { + List resources = myTermConceptParentChildLinkDao.findAll(); + ourLog.info("Concept Parent/Child Links:\n * {}", resources.stream().map(TermConceptParentChildLink::toString).collect(Collectors.joining("\n * "))); + return resources.size(); + }); + } + + public int logAllValueSetConcepts() { return runInTransaction(() -> { List resources = myTermValueSetConceptDao.findAll(); - ourLog.info("Concepts:\n * {}", resources.stream().map(TermValueSetConcept::toString).collect(Collectors.joining("\n * "))); + ourLog.info("ValueSet Concepts:\n * {}", resources.stream().map(TermValueSetConcept::toString).collect(Collectors.joining("\n * "))); return resources.size(); }); } @@ -591,12 +632,26 @@ protected void logAllUniqueIndexes() { }); } - protected void logAllTokenIndexes() { + protected void logAllTokenIndexes(String... theParamNames) { + String messageSuffix = theParamNames.length > 0 ? " containing " + Arrays.asList(theParamNames) : ""; runInTransaction(() -> { - ourLog.info("Token indexes:\n * {}", myResourceIndexedSearchParamTokenDao.findAll().stream().map(ResourceIndexedSearchParamToken::toString).collect(Collectors.joining("\n * "))); + String message = getAllTokenIndexes(theParamNames) + .stream() + .map(ResourceIndexedSearchParamToken::toString) + .collect(Collectors.joining("\n * ")); + ourLog.info("Token indexes{}:\n * {}", messageSuffix, message); }); } + @Nonnull + protected List getAllTokenIndexes(String... theParamNames) { + return runInTransaction(()->myResourceIndexedSearchParamTokenDao + .findAll() + .stream() + .filter(t -> theParamNames.length == 0 || Arrays.asList(theParamNames).contains(t.getParamName())) + .toList()); + } + protected void logAllCoordsIndexes() { runInTransaction(() -> { ourLog.info("Coords indexes:\n * {}", myResourceIndexedSearchParamCoordsDao.findAll().stream().map(ResourceIndexedSearchParamCoords::toString).collect(Collectors.joining("\n * "))); @@ -609,7 +664,7 @@ protected void logAllNumberIndexes() { }); } - protected void logAllUriIndexes() { + public void logAllUriIndexes() { runInTransaction(() -> { ourLog.info("URI indexes:\n * {}", myResourceIndexedSearchParamUriDao.findAll().stream().map(ResourceIndexedSearchParamUri::toString).collect(Collectors.joining("\n * "))); }); @@ -618,19 +673,33 @@ protected void logAllUriIndexes() { protected void logAllStringIndexes(String... theParamNames) { String messageSuffix = theParamNames.length > 0 ? " containing " + Arrays.asList(theParamNames) : ""; runInTransaction(() -> { - String message = myResourceIndexedSearchParamStringDao - .findAll() + String message = getAllStringIndexes(theParamNames) .stream() - .filter(t -> theParamNames.length == 0 ? true : Arrays.asList(theParamNames).contains(t.getParamName())) - .map(t -> t.toString()) + .map(ResourceIndexedSearchParamString::toString) .collect(Collectors.joining("\n * ")); ourLog.info("String indexes{}:\n * {}", messageSuffix, message); }); } + @Nonnull + protected List getAllStringIndexes(String... theParamNames) { + return runInTransaction(()->myResourceIndexedSearchParamStringDao + .findAll() + .stream() + .filter(t -> theParamNames.length == 0 || Arrays.asList(theParamNames).contains(t.getParamName())) + .toList()); + } + + protected void logAllResourceTags() { runInTransaction(() -> { - ourLog.info("Token tags:\n * {}", myResourceTagDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * "))); + ourLog.info("Resource tags:\n * {}", myResourceTagDao.findAll().stream().map(ResourceTag::toString).collect(Collectors.joining("\n * "))); + }); + } + + protected void logAllResourceHistoryTags() { + runInTransaction(() -> { + ourLog.info("Resource history tags:\n * {}", myResourceHistoryTagDao.findAll().stream().map(ResourceHistoryTag::toString).collect(Collectors.joining("\n * "))); }); } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/PreventDanglingInterceptorsExtension.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/PreventDanglingInterceptorsExtension.java index 73a34c4b9325..b1626d5a1d23 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/PreventDanglingInterceptorsExtension.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/PreventDanglingInterceptorsExtension.java @@ -41,28 +41,42 @@ public class PreventDanglingInterceptorsExtension implements BeforeEachCallback, AfterEachCallback { private static final Logger ourLog = LoggerFactory.getLogger(PreventDanglingInterceptorsExtension.class); - private final Supplier myInterceptorServiceSuplier; + private final Supplier myIInterceptorServiceSupplier; private List myBeforeInterceptors; - public PreventDanglingInterceptorsExtension(Supplier theInterceptorServiceSuplier) { - myInterceptorServiceSuplier = theInterceptorServiceSuplier; + public PreventDanglingInterceptorsExtension(Supplier theIInterceptorServiceSupplier) { + myIInterceptorServiceSupplier = theIInterceptorServiceSupplier; } @Override public void beforeEach(ExtensionContext theExtensionContext) throws Exception { - myBeforeInterceptors = myInterceptorServiceSuplier.get().getAllRegisteredInterceptors(); + myBeforeInterceptors = myIInterceptorServiceSupplier.get().getAllRegisteredInterceptors(); - ourLog.info("Registered interceptors:\n * " + myBeforeInterceptors.stream().map(t -> t.toString()).collect(Collectors.joining("\n * "))); + ourLog.info("Registered interceptors:\n * {}", myBeforeInterceptors.stream().map(Object::toString).collect(Collectors.joining("\n * "))); } @Override public void afterEach(ExtensionContext theExtensionContext) throws Exception { - List afterInterceptors = myInterceptorServiceSuplier.get().getAllRegisteredInterceptors(); - Map delta = new IdentityHashMap<>(); - afterInterceptors.forEach(t -> delta.put(t, t)); - myBeforeInterceptors.forEach(t -> delta.remove(t)); - delta.keySet().forEach(t->myInterceptorServiceSuplier.get().unregisterInterceptor(t)); - assertThat(delta.isEmpty()).as(() -> "Test added interceptor(s) and did not clean them up:\n * " + delta.keySet().stream().map(t -> t.toString()).collect(Collectors.joining("\n * "))).isTrue(); + List afterInterceptors = myIInterceptorServiceSupplier.get().getAllRegisteredInterceptors(); + // Handle interceptors added by the test + { + Map delta = new IdentityHashMap<>(); + afterInterceptors.forEach(t -> delta.put(t, t)); + myBeforeInterceptors.forEach(delta::remove); + delta.keySet().forEach(t -> myIInterceptorServiceSupplier.get().unregisterInterceptor(t)); + assertThat(delta.isEmpty()).as(() -> "Test added interceptor(s) and did not clean them up:\n * " + delta.keySet().stream().map(Object::toString).collect(Collectors.joining("\n * "))).isTrue(); + } + + // Handle interceptors removed by the test + { + IdentityHashMap delta = new IdentityHashMap<>(); + myBeforeInterceptors.forEach(t -> delta.put(t, t)); + afterInterceptors.forEach(t -> delta.remove(t, t)); + for (Object t : delta.keySet()) { + ourLog.warn("Interceptor {} was removed by test, re-adding", t); + myIInterceptorServiceSupplier.get().registerInterceptor(t); + } + } } } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/DatabaseSupportUtil.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/DatabaseSupportUtil.java index 91c41fba19ba..f46e4493b291 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/DatabaseSupportUtil.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/DatabaseSupportUtil.java @@ -19,9 +19,12 @@ */ package ca.uhn.fhir.jpa.util; +import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.SystemUtils; +import java.util.UUID; + public final class DatabaseSupportUtil { private DatabaseSupportUtil() {} @@ -50,4 +53,12 @@ private static boolean isColimaConfigured() { && StringUtils.isNotBlank(System.getenv("DOCKER_HOST")) && System.getenv("DOCKER_HOST").contains("colima"); } + + /** + * Create a new connection to a randomized H2 database for testing + */ + public static DriverTypeEnum.ConnectionProperties newConnection() { + String url = "jdbc:h2:mem:test_migration-" + UUID.randomUUID() + ";CASE_INSENSITIVE_IDENTIFIERS=TRUE;"; + return DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "SA", "SA"); + } } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/TestPartitionSelectorInterceptor.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/TestPartitionSelectorInterceptor.java new file mode 100644 index 000000000000..36649816b846 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/TestPartitionSelectorInterceptor.java @@ -0,0 +1,87 @@ +/*- + * #%L + * HAPI FHIR JPA Server Test Utilities + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.util; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.interceptor.api.Hook; +import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.partition.BaseRequestPartitionHelperSvc; +import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc; +import jakarta.annotation.Nonnull; +import org.apache.commons.lang3.Validate; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.util.HashSet; +import java.util.Set; + +public class TestPartitionSelectorInterceptor { + private RequestPartitionId myNextPartition; + private final Set myNonPartitionableResources = new HashSet<>(); + private BaseRequestPartitionHelperSvc myHelperSvc = new RequestPartitionHelperSvc(); + + /** + * Constructor + */ + public TestPartitionSelectorInterceptor() { + super(); + } + + public TestPartitionSelectorInterceptor addNonPartitionableResource(@Nonnull String theResourceName) { + Validate.notBlank(theResourceName, "Must not be blank"); + myNonPartitionableResources.add(theResourceName); + return this; + } + + public void setNextPartitionId(Integer theNextPartitionId) { + myNextPartition = RequestPartitionId.fromPartitionId(theNextPartitionId); + } + + public void setNextPartition(RequestPartitionId theNextPartition) { + myNextPartition = theNextPartition; + } + + @Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE) + public RequestPartitionId selectPartitionCreate(IBaseResource theResource) { + String resourceType = FhirContext.forR5Cached().getResourceType(theResource); + return selectPartition(resourceType); + } + + @Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_READ) + public RequestPartitionId selectPartitionRead(ReadPartitionIdRequestDetails theDetails) { + return selectPartition(theDetails.getResourceType()); + } + + @Nonnull + private RequestPartitionId selectPartition(String theResourceType) { + if (theResourceType != null) { + if (!myHelperSvc.isResourcePartitionable(theResourceType)) { + return RequestPartitionId.defaultPartition(); + } + if (myNonPartitionableResources.contains(theResourceType)) { + return RequestPartitionId.defaultPartition(); + } + } + + assert myNextPartition != null; + return myNextPartition; + } +} diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParserTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParserTest.java index 8f515b7ecbc8..184371ca7d2f 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParserTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParserTest.java @@ -1,8 +1,8 @@ package ca.uhn.fhir.jpa.dao; import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.jpa.entity.ResourceSearchView; import ca.uhn.fhir.jpa.model.entity.BaseTag; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.model.primitive.IdDt; import org.hl7.fhir.r4.hapi.ctx.FhirR4; import org.hl7.fhir.r4.model.Coding; @@ -27,15 +27,16 @@ public class JpaStorageResourceParserTest { @Mock private FhirContext myFhirContext; - @Mock - private ResourceSearchView patientSearchView; - @InjectMocks + @Mock + ResourceHistoryTable myEntity; + + @InjectMocks private final JpaStorageResourceParser jpaStorageResourceParser = new JpaStorageResourceParser(); @Test public void testPopulateResourceMeta_doesNotRemoveTags_whenTagListIsEmpty() { Mockito.when(myFhirContext.getVersion()).thenReturn(new FhirR4()); - Mockito.when(patientSearchView.getIdDt()).thenReturn(new IdDt("Patient/test-patient/_history/1")); + Mockito.when(myEntity.getIdDt()).thenReturn(new IdDt("Patient/test-patient/_history/1")); Coding coding = new Coding("system", "code", "display"); List tagList = Collections.emptyList(); @@ -44,8 +45,8 @@ public void testPopulateResourceMeta_doesNotRemoveTags_whenTagListIsEmpty() { Patient resourceTarget = new Patient(); resourceTarget.getMeta().addTag(coding); - Patient actualResult = jpaStorageResourceParser - .populateResourceMetadata(patientSearchView, forHistoryOperation, tagList, version, resourceTarget); + Patient actualResult = jpaStorageResourceParser + .populateResourceMetadata(myEntity, forHistoryOperation, tagList, version, resourceTarget); List actualTagList = actualResult.getMeta().getTag(); assertFalse(actualTagList.isEmpty()); diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilderTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilderTest.java index cac4107d9864..aadcfb8e4d0b 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilderTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilderTest.java @@ -5,6 +5,7 @@ import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.primitive.IdDt; @@ -70,20 +71,20 @@ public void init() { @Test public void createEverythingPredicate_withListOfPids_returnsInPredicate() { when(myResourceLinkPredicateBuilder.generatePlaceholders(anyCollection())).thenReturn(List.of(PLACEHOLDER_BASE + "1", PLACEHOLDER_BASE + "2")); - Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), 1L, 2L); + Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), JpaPid.fromId(1L), JpaPid.fromId(2L)); assertEquals(InCondition.class, condition.getClass()); } @Test public void createEverythingPredicate_withSinglePid_returnsInCondition() { when(myResourceLinkPredicateBuilder.generatePlaceholders(anyCollection())).thenReturn(List.of(PLACEHOLDER_BASE + "1")); - Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), 1L); + Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), JpaPid.fromId(1L)); assertEquals(BinaryCondition.class, condition.getClass()); } @Test public void createEverythingPredicate_withNoPids_returnsBinaryCondition() { - Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), new Long[0]); + Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), new JpaPid[0]); assertEquals(BinaryCondition.class, condition.getClass()); } diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilderTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilderTest.java index 473f88ec95d1..4ca1b57f0fbd 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilderTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilderTest.java @@ -4,6 +4,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.dialect.HapiFhirMariaDBDialect; import ca.uhn.fhir.jpa.model.dialect.HapiFhirOracleDialect; import ca.uhn.fhir.jpa.model.entity.StorageSettings; @@ -55,7 +56,7 @@ public void testRangeSqlServer2005_NoSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new SQLServerDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); GeneratedSql generated; // No range @@ -81,7 +82,7 @@ public void testRangeSqlServer2005_WithSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new SQLServerDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true); GeneratedSql generated; @@ -109,7 +110,7 @@ public void testRangeSqlServer2012_NoSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new SQLServer2012Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); GeneratedSql generated; // No range @@ -135,7 +136,7 @@ public void testRangeSqlServer2012_WithSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new SQLServer2012Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true); GeneratedSql generated; @@ -162,7 +163,7 @@ public void testRangePostgreSQL95_NoSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new PostgreSQLDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); GeneratedSql generated; // No range @@ -188,7 +189,7 @@ public void testRangePostgreSQL95_WithSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new PostgreSQLDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true); GeneratedSql generated; @@ -215,7 +216,7 @@ public void testRangeOracle12c_NoSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new HapiFhirOracleDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); GeneratedSql generated; // No range @@ -241,7 +242,7 @@ public void testRangeOracle12c_WithSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new HapiFhirOracleDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true); GeneratedSql generated; @@ -268,7 +269,7 @@ public void testRangeMySQL8_NoSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new MySQL8Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); GeneratedSql generated; // No range @@ -294,7 +295,7 @@ public void testRangeMySQL8_WithSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new MySQL8Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true); GeneratedSql generated; @@ -325,7 +326,7 @@ public void testRangeMariaDB103_NoSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new HapiFhirMariaDBDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); GeneratedSql generated; // No range @@ -351,7 +352,7 @@ public void testRangeMariaDB103_WithSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new HapiFhirMariaDBDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true); GeneratedSql generated; @@ -382,7 +383,7 @@ public void testRangeDerbyTenSeven_NoSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new DerbyDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); GeneratedSql generated; // No range @@ -408,7 +409,7 @@ public void testRangeDerbyTenSeven_WithSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new DerbyDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true); GeneratedSql generated; diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/util/MemoryCacheServiceTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/util/MemoryCacheServiceTest.java index 797cb126f2ff..4b14db933c08 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/util/MemoryCacheServiceTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/util/MemoryCacheServiceTest.java @@ -1,5 +1,6 @@ package ca.uhn.fhir.jpa.util; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.jpa.model.entity.TagTypeEnum; @@ -237,5 +238,11 @@ void assertNotDone() { } } + @Test + public void testToString() { + String actual = new MemoryCacheService.ForcedIdCacheKey("Patient", "12", RequestPartitionId.allPartitions()).toString(); + assertEquals("MemoryCacheService.ForcedIdCacheKey[resType=Patient,resId=12,partId=RequestPartitionId[allPartitions=true]]", actual); + } + } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java index 5e3b05a6ad38..bacb96416e3f 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java @@ -368,6 +368,10 @@ public class JpaStorageSettings extends StorageSettings { * @since 7.2.0 */ private boolean myWriteToLegacyLobColumns = false; + /** + * @since 8.0.0 + */ + private boolean myAccessMetaSourceInformationFromProvenanceTable = false; /** * If this is enabled (default is {@literal false}), searches on token indexes will @@ -1764,6 +1768,37 @@ public void setStoreMetaSourceInformation(StoreMetaSourceInformationEnum theStor myStoreMetaSourceInformation = theStoreMetaSourceInformation; } + /** + * If set to true (default is false), the system will read + * Resource.meta.source values from the HFJ_RES_VER_PROV + * table. This table was replaced by dedicated columns in the HFJ_RES_VER + * table as of HAPI FHIR 6.8.0 (Smile CDR 2023.08.R01) and as of that version + * there is no need to read from the dedicated table. However, if old data still + * remains and has not been migrated (using a $reindex operation) then you can + * enable this setting in order to read from the old table. + * + * @since 8.0.0 + */ + public boolean isAccessMetaSourceInformationFromProvenanceTable() { + return myAccessMetaSourceInformationFromProvenanceTable; + } + + /** + * If set to true (default is false), the system will read + * Resource.meta.source values from the HFJ_RES_VER_PROV + * table. This table was replaced by dedicated columns in the HFJ_RES_VER + * table as of HAPI FHIR 6.8.0 (Smile CDR 2023.08.R01) and as of that version + * there is no need to read from the dedicated table. However, if old data still + * remains and has not been migrated (using a $reindex operation) then you can + * enable this setting in order to read from the old table. + * + * @since 8.0.0 + */ + public void setAccessMetaSourceInformationFromProvenanceTable( + boolean theAccessMetaSourceInformationFromProvenanceTable) { + myAccessMetaSourceInformationFromProvenanceTable = theAccessMetaSourceInformationFromProvenanceTable; + } + /** *

* If set to {@code true}, ValueSets and expansions are stored in terminology tables. This is to facilitate diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/IStorageResourceParser.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/IStorageResourceParser.java index 71179781a6be..a2972490a5ec 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/IStorageResourceParser.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/IStorageResourceParser.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.dao; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; +import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import org.hl7.fhir.instance.model.api.IBaseResource; /** @@ -30,10 +31,10 @@ * Currently only DB->FHIR is enabled through this interface but the aim * eventually is to handle both directions */ -public interface IStorageResourceParser { +public interface IStorageResourceParser> { // TODO: JA2 - Remove theForHistoryOperation flag - It toggles adding a bit of extra // metadata but there's no reason to not always just add that, and this would // simplify this interface - IBaseResource toResource(IBasePersistedResource theEntity, boolean theForHistoryOperation); + IBaseResource toResource(IBasePersistedResource theEntity, boolean theForHistoryOperation); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java index 5d56284f450b..d64bbdbe60ca 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java @@ -30,11 +30,11 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Queue; import java.util.concurrent.atomic.AtomicInteger; -import java.util.stream.Collectors; import static org.apache.commons.lang3.StringUtils.trim; @@ -87,10 +87,16 @@ public void execute(ExecutionInfo theExecutionInfo, List theQueryInfo && next.getParametersList().get(0).size() > 0) { size = next.getParametersList().size(); List values = next.getParametersList().get(0); - params = values.stream() - .map(t -> t.getArgs()[1]) - .map(t -> t != null ? t.toString() : "NULL") - .collect(Collectors.toList()); + params = new ArrayList<>(); + for (ParameterSetOperation t : values) { + if (t.getMethod().getName().equals("setNull")) { + params.add(null); + } else { + Object arg = t.getArgs()[1]; + String s = arg != null ? arg.toString() : null; + params.add(s); + } + } } else { params = Collections.emptyList(); size = next.getParametersList().size(); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java index fd16ed349907..97d6754908a7 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java @@ -29,6 +29,8 @@ import jakarta.annotation.Nullable; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.apache.commons.lang3.builder.ToStringBuilder; +import org.apache.commons.lang3.builder.ToStringStyle; import org.hl7.fhir.instance.model.api.IIdType; import org.springframework.transaction.support.TransactionSynchronization; import org.springframework.transaction.support.TransactionSynchronizationManager; @@ -324,6 +326,15 @@ public static class ForcedIdCacheKey { private final RequestPartitionId myRequestPartitionId; private final int myHashCode; + @Override + public String toString() { + return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) + .append("resType", myResourceType) + .append("resId", myResourceId) + .append("partId", myRequestPartitionId) + .toString(); + } + public ForcedIdCacheKey( @Nullable String theResourceType, @Nonnull String theResourceId, diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java index abc653b1a0bb..c1e7979fe5e2 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java @@ -138,10 +138,15 @@ public String getSql(boolean theInlineParams, boolean theFormat, boolean theSani break; } String nextParamValue = nextParams.remove(0); - if (theSanitizeParams) { - nextParamValue = UrlUtil.sanitizeUrlPart(nextParamValue); + String nextSubstitution; + if (nextParamValue != null) { + if (theSanitizeParams) { + nextParamValue = UrlUtil.sanitizeUrlPart(nextParamValue); + } + nextSubstitution = "'" + nextParamValue + "'"; + } else { + nextSubstitution = "NULL"; } - String nextSubstitution = "'" + nextParamValue + "'"; retVal = retVal.substring(0, idx) + nextSubstitution + retVal.substring(idx + 1); idx += nextSubstitution.length(); } diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java index 5660322de55d..c125433d6a8c 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java @@ -21,6 +21,7 @@ import ca.uhn.fhir.context.BaseRuntimeChildDefinition; import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition; +import ca.uhn.fhir.context.BaseRuntimeElementDefinition; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.util.FhirTerser; @@ -87,6 +88,13 @@ default ICreationArgument withLanguage(String theLanguage) { return t -> __setPrimitiveChild(getFhirContext(), t, "language", "string", theLanguage); } + /** + * List.entry.item + */ + default ICreationArgument withListItem(IIdType theReference) { + return withElementAt("entry", withReference("item", theReference)); + } + /** * Set Patient.gender */ @@ -239,6 +247,10 @@ default IBaseResource buildPatient(ICreationArgument... theModifiers) { return buildResource("Patient", theModifiers); } + default IIdType createList(ICreationArgument... theModifiers) { + return createResource("List", theModifiers); + } + default IIdType createPatient(ICreationArgument... theModifiers) { return createResource("Patient", theModifiers); } @@ -321,7 +333,7 @@ default ICreationArgument withReference(String theReferenceName, @Nullable IIdTy IBaseReference reference = (IBaseReference) getFhirContext().getElementDefinition("Reference").newInstance(); reference.setReference(theReferenceValue.getValue()); - RuntimeResourceDefinition resourceDef = getFhirContext().getResourceDefinition((IBaseResource) t); + BaseRuntimeElementDefinition resourceDef = getFhirContext().getElementDefinition(t.getClass()); resourceDef.getChildByName(theReferenceName).getMutator().addValue(t, reference); } }; diff --git a/pom.xml b/pom.xml index bae31e5f3f5e..8d12a0c89b0e 100644 --- a/pom.xml +++ b/pom.xml @@ -1170,6 +1170,11 @@ caffeine ${caffeine_version} + + com.github.jsqlparser + jsqlparser + 5.0 + com.googlecode.owasp-java-html-sanitizer owasp-java-html-sanitizer @@ -1361,7 +1366,7 @@ org.jetbrains annotations - 23.0.0 + 24.0.1 commons-io