diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml index 26d519252704..3e6ac234994a 100644 --- a/hapi-deployable-pom/pom.xml +++ b/hapi-deployable-pom/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml index 9e0e3e40cdc3..f883703bcd3f 100644 --- a/hapi-fhir-android/pom.xml +++ b/hapi-fhir-android/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml index 0f624a5a3813..5279f5078498 100644 --- a/hapi-fhir-base/pom.xml +++ b/hapi-fhir-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupportBundleStrategy.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupportBundleStrategy.java index f6319bcd3a4c..8eaa217078e7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupportBundleStrategy.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupportBundleStrategy.java @@ -150,9 +150,15 @@ private void initializeResourceLists() { @Override public List fetchAllConformanceResources() { ArrayList retVal = new ArrayList<>(); - retVal.addAll(myCodeSystems.values()); - retVal.addAll(myStructureDefinitions.values()); - retVal.addAll(myValueSets.values()); + if (myCodeSystems != null) { + retVal.addAll(myCodeSystems.values()); + } + if (myStructureDefinitions != null) { + retVal.addAll(myStructureDefinitions.values()); + } + if (myValueSets != null) { + retVal.addAll(myValueSets.values()); + } return retVal; } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/ValidationSupportContext.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/ValidationSupportContext.java index e1e8381230c3..a4ed3bad451c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/ValidationSupportContext.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/ValidationSupportContext.java @@ -30,7 +30,7 @@ public class ValidationSupportContext { private final Set myCurrentlyGeneratingSnapshots = new HashSet<>(); public ValidationSupportContext(IValidationSupport theRootValidationSupport) { - Validate.notNull(theRootValidationSupport, "theRootValidationSupport musty not be null"); + Validate.notNull(theRootValidationSupport, "theRootValidationSupport must not be null"); myRootValidationSupport = theRootValidationSupport; } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IFhirVersion.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IFhirVersion.java index bf5e285b7c5c..973eeb8a7229 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IFhirVersion.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IFhirVersion.java @@ -74,6 +74,17 @@ default IIdType newIdType(String theValue) { return retVal; } + /** + * Creates a new {@link IIdType} instance for the given version with the given value + * + * @since 8.0.0 + */ + default IIdType newIdType(String theResourceType, String theIdPart) { + IIdType retVal = newIdType(); + retVal.setParts(null, theResourceType, theIdPart, null); + return retVal; + } + /** * Returns an instance of IFhirVersionServer for this version. * Note that this method may only be called if the hapi-fhir-server diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchDateRangeParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchDateRangeParam.java index 0703f080c7e1..3959d3d6a29b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchDateRangeParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchDateRangeParam.java @@ -19,9 +19,22 @@ */ package ca.uhn.fhir.rest.param; +import java.util.Collections; import java.util.Map; public class HistorySearchDateRangeParam extends DateRangeParam { + /** + * Constructor + * + * @since 8.0.0 + */ + public HistorySearchDateRangeParam() { + this(Collections.emptyMap(), new DateRangeParam(), null); + } + + /** + * Constructor + */ public HistorySearchDateRangeParam( Map theParameters, DateRangeParam theDateRange, Integer theOffset) { super(theDateRange); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FileUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FileUtil.java index 2f3dac411e47..84ed3e2303b1 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FileUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FileUtil.java @@ -29,7 +29,7 @@ public static String formatFileSize(long theBytes) { if (theBytes <= 0) { return "0 " + UNITS[0]; } - int digitGroups = (int) (Math.log10(theBytes) / Math.log10(1024)); + int digitGroups = (int) (Math.log10((double) theBytes) / Math.log10(1024)); digitGroups = Math.min(digitGroups, UNITS.length - 1); return new DecimalFormat("###0.#").format(theBytes / Math.pow(1024, digitGroups)) + " " + UNITS[digitGroups]; } diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml index 615928fa4229..09650eb318c9 100644 --- a/hapi-fhir-bom/pom.xml +++ b/hapi-fhir-bom/pom.xml @@ -4,7 +4,7 @@ 4.0.0 ca.uhn.hapi.fhir hapi-fhir-bom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT pom HAPI FHIR BOM @@ -12,7 +12,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-checkstyle/pom.xml b/hapi-fhir-checkstyle/pom.xml index db4657026c13..03c8c95000f1 100644 --- a/hapi-fhir-checkstyle/pom.xml +++ b/hapi-fhir-checkstyle/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml index 33782915ddc0..0d9588d571b1 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml index 301353ac697f..b35c0adeb379 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir-cli - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml index 746014589d2c..b39413e22de3 100644 --- a/hapi-fhir-cli/pom.xml +++ b/hapi-fhir-cli/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml index f6dcd6b81a24..07962556b50c 100644 --- a/hapi-fhir-client-okhttp/pom.xml +++ b/hapi-fhir-client-okhttp/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml index f9b12517d257..77d53a2c2410 100644 --- a/hapi-fhir-client/pom.xml +++ b/hapi-fhir-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml index 28d3078c0876..99288856f0b2 100644 --- a/hapi-fhir-converter/pom.xml +++ b/hapi-fhir-converter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml index 0749e3faaff4..990e45623903 100644 --- a/hapi-fhir-dist/pom.xml +++ b/hapi-fhir-dist/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml index a8d553276c60..c01781c6e11a 100644 --- a/hapi-fhir-docs/pom.xml +++ b/hapi-fhir-docs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-fix-timezone-issue-for-history-at-query.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-fix-timezone-issue-for-history-at-query.yaml new file mode 100644 index 000000000000..f6a1e9b750fd --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-fix-timezone-issue-for-history-at-query.yaml @@ -0,0 +1,7 @@ +--- +type: fix +issue: 6409 +title: "When performing a `_history` query using the `_at` parameter, the time value + is now converted to a zoned-date before being passed to the database. This should + avoid conflicts around date changes on some databases. + " diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-optimize-versioned-tag-loading.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-optimize-versioned-tag-loading.yaml new file mode 100644 index 000000000000..37faa975eeb8 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-optimize-versioned-tag-loading.yaml @@ -0,0 +1,7 @@ +--- +type: perf +issue: 6409 +title: "When searching in versioned tag mode, the JPA server now avoids a redundant + lookup of the un-versioned tags, avoiding an extra unnecessary database query + in some cases. + " diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-stop-using-provenance-table.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-stop-using-provenance-table.yaml new file mode 100644 index 000000000000..2f845786de91 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-stop-using-provenance-table.yaml @@ -0,0 +1,11 @@ +--- +type: perf +issue: 6409 +title: "The JPA server will no longer use the HFJ_RES_VER_PROV table to store and index values from + the `Resource.meta.source` element. Beginning in HAPI FHIR 6.8.0 (and Smile CDR 2023.08.R01), a + new pair of columns have been used to store data for this element, so this change only affects + data which was stored in HAPI FHIR prior to version 6.8.0 (released August 2023). If you have + FHIR resources which were stored in a JPA server prior to this version, and you use the + Resource.meta.source element and/or the `_source` search parameter, you should perform a complete + reindex of your server to ensure that data is not lost. See the upgrade notes for more information. + " diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6478-transaction-bundle-updates-with-multiple-conditional-urls.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6478-transaction-bundle-updates-with-multiple-conditional-urls.yaml new file mode 100644 index 000000000000..f431323b068e --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6478-transaction-bundle-updates-with-multiple-conditional-urls.yaml @@ -0,0 +1,8 @@ +--- +type: perf +issue: 6478 +jira: SMILE-8955 +title: "Transactions with multiple saved search urls will have the saved search urls + deleted in a batch, instead of 1 at a time. + This is a minor performance update. +" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6522-cache-cleanup.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6522-cache-cleanup.yaml new file mode 100644 index 000000000000..eb04499d8b6f --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6522-cache-cleanup.yaml @@ -0,0 +1,7 @@ +--- +type: perf +issue: 6522 +title: "Several memory caches in various parts of the JPA server have been removed in an + effort to consolidate caching in this system to two places: The MemoryCacheService, and + ValidationSupportChain. This should make management of the system easier." + diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/upgrade.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/upgrade.md index 58969977a1b5..b8fa6a5af5d7 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/upgrade.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/upgrade.md @@ -1,4 +1,20 @@ +# Upgrade Notes + +The JPA server stores values for the field `Resource.meta.source` in dedicated columns in its database so that they can be indexes and searched for as needed, using the `_source` Search Parameter. + +Prior to HAPI FHIR 6.8.0 (and Smile CDR 2023.08.R01), these values were stored in a dedicated table called `HFJ_RES_VER_PROV`. Beginning in HAPI FHIR 6.8.0 (Smile CDR 2023.08.R01), two new columns were added to the `HFJ_RES_VER` +table which store the same data and make it available for searches. + +As of HAPI FHIR 8.0.0, the legacy table is no longer searched by default. If you do not have Resource.meta.source data stored in HAPI FHIR that was last created/updated prior to version 6.8.0, this change will not affect you and no action needs to be taken. + +If you do have such data, you should follow the following steps: + +* Enable the JpaStorageSettings setting `setAccessMetaSourceInformationFromProvenanceTable(true)` to configure the server to continue using the legacy table. + +* Perform a server resource reindex by invoking the [$reindex Operation (server)](https://smilecdr.com/docs/fhir_repository/search_parameter_reindexing.html#reindex-server) with the `optimizeStorage` parameter set to `ALL_VERSIONS`. + +* When this reindex operation has successfully completed, the setting above can be disabled. Disabling this setting avoids an extra database round-trip when loading data, so this change will have a positive performance impact on your server. + # Fulltext Search with _lastUpdated Filter -Fulltext searches have been updated to support `_lastUpdated` search parameter. A reindexing of Search Parameters -is required to migrate old data to support the `_lastUpdated` search parameter. +Fulltext searches have been updated to support `_lastUpdated` search parameter. If you are using Advanced Hibernate Search indexing and wish to use the `_lastUpdated` search parameetr with this feature, a full reindex of your repository is required. diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml index e6a3f64d67d2..cef258e3a670 100644 --- a/hapi-fhir-jacoco/pom.xml +++ b/hapi-fhir-jacoco/pom.xml @@ -11,7 +11,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml index f1ce7ec0ce88..ce4f586cd916 100644 --- a/hapi-fhir-jaxrsserver-base/pom.xml +++ b/hapi-fhir-jaxrsserver-base/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpa/pom.xml b/hapi-fhir-jpa/pom.xml index e894f9877787..97b4947eafd3 100644 --- a/hapi-fhir-jpa/pom.xml +++ b/hapi-fhir-jpa/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index 02f29b40e6ec..2ec6079eb2e8 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionSvcDaoImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionSvcDaoImpl.java index 6cae759b3dcb..8b34e38699da 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionSvcDaoImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionSvcDaoImpl.java @@ -23,7 +23,9 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; +import ca.uhn.fhir.jpa.model.cross.IResourceLookup; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.server.SystemRequestDetails; @@ -37,6 +39,7 @@ import java.util.Collection; import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Optional; import static org.slf4j.LoggerFactory.getLogger; @@ -90,20 +93,9 @@ public ResourceVersionMap getVersionMap( public ResourcePersistentIdMap getLatestVersionIdsForResourceIds( RequestPartitionId theRequestPartitionId, List theIds) { ResourcePersistentIdMap idToPID = new ResourcePersistentIdMap(); - HashMap> resourceTypeToIds = new HashMap<>(); - for (IIdType id : theIds) { - String resourceType = id.getResourceType(); - if (!resourceTypeToIds.containsKey(resourceType)) { - resourceTypeToIds.put(resourceType, new ArrayList<>()); - } - resourceTypeToIds.get(resourceType).add(id); - } - - for (List nextIds : resourceTypeToIds.values()) { - ResourcePersistentIdMap idAndPID = getIdsOfExistingResources(theRequestPartitionId, nextIds); - idToPID.putAll(idAndPID); - } + ResourcePersistentIdMap idAndPID = getIdsOfExistingResources(theRequestPartitionId, theIds); + idToPID.putAll(idAndPID); return idToPID; } @@ -124,14 +116,17 @@ private ResourcePersistentIdMap getIdsOfExistingResources( return retval; } - List jpaPids = - myIdHelperService.resolveResourcePersistentIdsWithCache(thePartitionId, new ArrayList<>(theIds)); + Map> identities = myIdHelperService.resolveResourceIdentities( + thePartitionId, + new ArrayList<>(theIds), + ResolveIdentityMode.includeDeleted().cacheOk()); // we'll use this map to fetch pids that require versions HashMap pidsToVersionToResourcePid = new HashMap<>(); // fill in our map - for (JpaPid pid : jpaPids) { + for (IResourceLookup identity : identities.values()) { + JpaPid pid = identity.getPersistentId(); if (pid.getVersion() == null) { pidsToVersionToResourcePid.put(pid.getId(), pid); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java index a17e3425f466..b889b27e10ff 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java @@ -121,11 +121,12 @@ import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.QuantityNormalizedPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder; +import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryPredicateBuilder; +import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryProvenancePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.SearchParamPresentPredicateBuilder; -import ca.uhn.fhir.jpa.search.builder.predicate.SourcePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.TagPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder; @@ -699,8 +700,15 @@ public TokenPredicateBuilder newTokenPredicateBuilder(SearchQueryBuilder theSear @Bean @Scope("prototype") - public SourcePredicateBuilder newSourcePredicateBuilder(SearchQueryBuilder theSearchBuilder) { - return new SourcePredicateBuilder(theSearchBuilder); + public ResourceHistoryPredicateBuilder newResourceHistoryPredicateBuilder(SearchQueryBuilder theSearchBuilder) { + return new ResourceHistoryPredicateBuilder(theSearchBuilder); + } + + @Bean + @Scope("prototype") + public ResourceHistoryProvenancePredicateBuilder newResourceHistoryProvenancePredicateBuilder( + SearchQueryBuilder theSearchBuilder) { + return new ResourceHistoryProvenancePredicateBuilder(theSearchBuilder); } @Bean diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java index 80d8665362a7..8bac9ba0312c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java @@ -29,7 +29,6 @@ import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; -import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao; import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import ca.uhn.fhir.jpa.model.config.PartitionSettings; @@ -89,9 +88,6 @@ public class SearchConfig { @Autowired private DaoRegistry myDaoRegistry; - @Autowired - private IResourceSearchViewDao myResourceSearchViewDao; - @Autowired private FhirContext myContext; @@ -169,7 +165,6 @@ public ISearchBuilder newSearchBuilder(String theResourceName, Class extends B @Autowired private IJobCoordinator myJobCoordinator; + @Autowired + private IResourceHistoryProvenanceDao myResourceHistoryProvenanceDao; + private IInstanceValidatorModule myInstanceValidator; private String myResourceName; private Class myResourceType; @@ -562,7 +567,7 @@ private DaoMethodOutcome doCreateForPostOrPut( thePerformIndexing); // Store the resource forced ID if necessary - JpaPid jpaPid = JpaPid.fromId(updatedEntity.getResourceId()); + JpaPid jpaPid = updatedEntity.getPersistentId(); // Populate the resource with its actual final stored ID from the entity theResource.setId(entity.getIdDt()); @@ -570,10 +575,9 @@ private DaoMethodOutcome doCreateForPostOrPut( // Pre-cache the resource ID jpaPid.setAssociatedResourceId(entity.getIdType(myFhirContext)); String fhirId = entity.getFhirId(); - if (fhirId == null) { - fhirId = Long.toString(entity.getId()); - } - myIdHelperService.addResolvedPidToFhirId(jpaPid, theRequestPartitionId, getResourceName(), fhirId, null); + assert fhirId != null; + myIdHelperService.addResolvedPidToFhirIdAfterCommit( + jpaPid, theRequestPartitionId, getResourceName(), fhirId, null); theTransactionDetails.addResolvedResourceId(jpaPid.getAssociatedResourceId(), jpaPid); theTransactionDetails.addResolvedResource(jpaPid.getAssociatedResourceId(), theResource); @@ -723,9 +727,12 @@ public DaoMethodOutcome delete( validateIdPresentForDelete(theId); validateDeleteEnabled(); + RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead( + theRequestDetails, getResourceName(), theId); + final ResourceTable entity; try { - entity = readEntityLatestVersion(theId, theRequestDetails, theTransactionDetails); + entity = readEntityLatestVersion(theId, requestPartitionId, theTransactionDetails); } catch (ResourceNotFoundException ex) { // we don't want to throw 404s. // if not found, return an outcome anyways. @@ -803,6 +810,13 @@ public DaoMethodOutcome delete( .getMessageSanitized(BaseStorageDao.class, "successfulTimingSuffix", w.getMillis()); outcome.setOperationOutcome(createInfoOperationOutcome(msg, StorageResponseCodeEnum.SUCCESSFUL_DELETE)); + myIdHelperService.addResolvedPidToFhirIdAfterCommit( + entity.getPersistentId(), + requestPartitionId, + entity.getResourceType(), + entity.getFhirId(), + entity.getDeleted()); + return outcome; } @@ -1005,7 +1019,7 @@ public void beforeCommit(boolean readOnly) { protected ResourceTable updateEntityForDelete( RequestDetails theRequest, TransactionDetails theTransactionDetails, ResourceTable theEntity) { - myResourceSearchUrlSvc.deleteByResId(theEntity.getId()); + myResourceSearchUrlSvc.deleteByResId(theEntity.getPersistentId()); Date updateTime = new Date(); return updateEntity(theRequest, null, theEntity, updateTime, true, true, theTransactionDetails, false, true); } @@ -1144,15 +1158,15 @@ private void doMetaDelete( } @Override - @Transactional(propagation = Propagation.NEVER) public ExpungeOutcome expunge(IIdType theId, ExpungeOptions theExpungeOptions, RequestDetails theRequest) { + HapiTransactionService.noTransactionAllowed(); validateExpungeEnabled(); return forceExpungeInExistingTransaction(theId, theExpungeOptions, theRequest); } @Override - @Transactional(propagation = Propagation.NEVER) public ExpungeOutcome expunge(ExpungeOptions theExpungeOptions, RequestDetails theRequestDetails) { + HapiTransactionService.noTransactionAllowed(); ourLog.info("Beginning TYPE[{}] expunge operation", getResourceName()); validateExpungeEnabled(); return myExpungeService.expunge(getResourceName(), null, theExpungeOptions, theRequestDetails); @@ -1250,7 +1264,7 @@ public IBundleProvider history( return myPersistedJpaBundleProviderFactory.history( theRequest, myResourceName, - entity.getId(), + entity.getPersistentId(), theSince, theUntil, theOffset, @@ -1280,7 +1294,7 @@ public IBundleProvider history( return myPersistedJpaBundleProviderFactory.history( theRequest, myResourceName, - entity.getId(), + JpaPid.fromId(entity.getId()), theHistorySearchDateRangeParam.getLowerBoundAsInstant(), theHistorySearchDateRangeParam.getUpperBoundAsInstant(), theHistorySearchDateRangeParam.getOffset(), @@ -1380,8 +1394,8 @@ protected void doMetaAddOperation( doMetaAdd(theMetaAdd, latestVersion, theRequest, transactionDetails); // Also update history entry - ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance( - entity.getId(), entity.getVersion()); + ResourceHistoryTable history = + myResourceHistoryTableDao.findForIdAndVersion(entity.getId(), entity.getVersion()); doMetaAdd(theMetaAdd, history, theRequest, transactionDetails); } @@ -1428,8 +1442,8 @@ public void doMetaDeleteOperation( } else { doMetaDelete(theMetaDel, latestVersion, theRequest, transactionDetails); // Also update history entry - ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance( - entity.getId(), entity.getVersion()); + ResourceHistoryTable history = + myResourceHistoryTableDao.findForIdAndVersion(entity.getId(), entity.getVersion()); doMetaDelete(theMetaDel, history, theRequest, transactionDetails); } @@ -1694,7 +1708,7 @@ private void reindexOptimizeStorage( ResourceTable entity, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageMode) { ResourceHistoryTable historyEntity = entity.getCurrentVersionEntity(); if (historyEntity != null) { - reindexOptimizeStorageHistoryEntity(entity, historyEntity); + reindexOptimizeStorageHistoryEntityThenDetachIt(entity, historyEntity); if (theOptimizeStorageMode == ReindexParameters.OptimizeStorageModeEnum.ALL_VERSIONS) { int pageSize = 100; for (int page = 0; ((long) page * pageSize) < entity.getVersion(); page++) { @@ -1704,39 +1718,44 @@ private void reindexOptimizeStorage( // different pages as the underlying data gets updated. PageRequest pageRequest = PageRequest.of(page, pageSize, Sort.by("myId")); Slice historyEntities = - myResourceHistoryTableDao.findForResourceIdAndReturnEntitiesAndFetchProvenance( + myResourceHistoryTableDao.findAllVersionsExceptSpecificForResourcePid( pageRequest, entity.getId(), historyEntity.getVersion()); for (ResourceHistoryTable next : historyEntities) { - reindexOptimizeStorageHistoryEntity(entity, next); + reindexOptimizeStorageHistoryEntityThenDetachIt(entity, next); } } } } } - private void reindexOptimizeStorageHistoryEntity(ResourceTable entity, ResourceHistoryTable historyEntity) { - boolean changed = false; + /** + * Note that the entity will be detached after being saved if it has changed + * in order to avoid growing the number of resources in memory to be too big + */ + private void reindexOptimizeStorageHistoryEntityThenDetachIt( + ResourceTable entity, ResourceHistoryTable historyEntity) { if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC || historyEntity.getEncoding() == ResourceEncodingEnum.JSON) { byte[] resourceBytes = historyEntity.getResource(); if (resourceBytes != null) { String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding()); - if (myResourceHistoryCalculator.conditionallyAlterHistoryEntity(entity, historyEntity, resourceText)) { - changed = true; - } + myResourceHistoryCalculator.conditionallyAlterHistoryEntity(entity, historyEntity, resourceText); } } - if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) { - if (historyEntity.getProvenance() != null) { - historyEntity.setSourceUri(historyEntity.getProvenance().getSourceUri()); - historyEntity.setRequestId(historyEntity.getProvenance().getRequestId()); - changed = true; + if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) { + if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) { + Long id = historyEntity.getId(); + Optional provenanceEntityOpt = + myResourceHistoryProvenanceDao.findById(id); + if (provenanceEntityOpt.isPresent()) { + ResourceHistoryProvenanceEntity provenanceEntity = provenanceEntityOpt.get(); + historyEntity.setSourceUri(provenanceEntity.getSourceUri()); + historyEntity.setRequestId(provenanceEntity.getRequestId()); + myResourceHistoryProvenanceDao.delete(provenanceEntity); + } } } - if (changed) { - myResourceHistoryTableDao.save(historyEntity); - } } private BaseHasResource readEntity( @@ -1873,47 +1892,37 @@ private ResourceTable readEntityLatestVersion( IIdType theId, @Nonnull RequestPartitionId theRequestPartitionId, TransactionDetails theTransactionDetails) { - validateResourceTypeAndThrowInvalidRequestException(theId); + HapiTransactionService.requireTransaction(); + + IIdType id = theId; + validateResourceTypeAndThrowInvalidRequestException(id); + if (!id.hasResourceType()) { + id = id.withResourceType(getResourceName()); + } JpaPid persistentId = null; if (theTransactionDetails != null) { - if (theTransactionDetails.isResolvedResourceIdEmpty(theId.toUnqualifiedVersionless())) { - throw new ResourceNotFoundException(Msg.code(1997) + theId); + if (theTransactionDetails.isResolvedResourceIdEmpty(id.toUnqualifiedVersionless())) { + throw new ResourceNotFoundException(Msg.code(1997) + id); } if (theTransactionDetails.hasResolvedResourceIds()) { - persistentId = (JpaPid) theTransactionDetails.getResolvedResourceId(theId); + persistentId = (JpaPid) theTransactionDetails.getResolvedResourceId(id); } } if (persistentId == null) { - String resourceName = getResourceName(); - if (myStorageSettings.getResourceClientIdStrategy() - == JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC) { - if (theId.isIdPartValidLong()) { - /* - * If it's a pure numeric ID and we are in ALPHANUMERIC mode, then the number - * corresponds to a DB PID. In this case we want to resolve it regardless of - * which type the client has supplied. This is because DB PIDs are unique across - * all resource types (unlike FHIR_IDs which are namespaced to the resource type). - * We want to load the resource with that PID regardless of type because if - * the user is trying to update it we want to fail if the type is wrong, as - * opposed to trying to create a new instance. - */ - resourceName = null; - } - } persistentId = myIdHelperService.resolveResourceIdentityPid( theRequestPartitionId, - resourceName, - theId.getIdPart(), + id.getResourceType(), + id.getIdPart(), ResolveIdentityMode.includeDeleted().cacheOk()); } ResourceTable entity = myEntityManager.find(ResourceTable.class, persistentId.getId()); if (entity == null) { - throw new ResourceNotFoundException(Msg.code(1998) + theId); + throw new ResourceNotFoundException(Msg.code(1998) + id); } - validateGivenIdIsAppropriateToRetrieveResource(theId, entity); + validateGivenIdIsAppropriateToRetrieveResource(id, entity); return entity; } @@ -2351,13 +2360,13 @@ private DaoMethodOutcome doUpdate( RequestDetails theRequest, TransactionDetails theTransactionDetails, RequestPartitionId theRequestPartitionId) { - + DaoMethodOutcome outcome = null; preProcessResourceForStorage(theResource); preProcessResourceForStorage(theResource, theRequest, theTransactionDetails, thePerformIndexing); ResourceTable entity = null; - IIdType resourceId; + IIdType resourceId = null; RestOperationTypeEnum update = RestOperationTypeEnum.UPDATE; if (isNotBlank(theMatchUrl)) { // Validate that the supplied resource matches the conditional. @@ -2399,7 +2408,7 @@ && getStorageSettings().getResourceServerIdStrategy() theResource.setId(UUID.randomUUID().toString()); theResource.setUserData(JpaConstants.RESOURCE_ID_SERVER_ASSIGNED, Boolean.TRUE); } - DaoMethodOutcome outcome = doCreateForPostOrPut( + outcome = doCreateForPostOrPut( theRequest, theResource, theMatchUrl, @@ -2414,8 +2423,6 @@ && getStorageSettings().getResourceServerIdStrategy() myMatchResourceUrlService.matchUrlResolved( theTransactionDetails, getResourceName(), theMatchUrl, (JpaPid) outcome.getPersistentId()); } - - return outcome; } } else { /* @@ -2427,6 +2434,10 @@ && getStorageSettings().getResourceServerIdStrategy() assert resourceId != null; assert resourceId.hasIdPart(); + if (!resourceId.hasResourceType()) { + resourceId = resourceId.withResourceType(getResourceName()); + } + boolean create = false; if (theRequest != null) { @@ -2445,7 +2456,7 @@ && getStorageSettings().getResourceServerIdStrategy() } if (create) { - return doCreateForPostOrPut( + outcome = doCreateForPostOrPut( theRequest, theResource, null, @@ -2458,16 +2469,35 @@ && getStorageSettings().getResourceServerIdStrategy() } // Start - return doUpdateForUpdateOrPatch( - theRequest, - resourceId, - theMatchUrl, - thePerformIndexing, - theForceUpdateVersion, - theResource, - entity, - update, - theTransactionDetails); + if (outcome == null) { + outcome = doUpdateForUpdateOrPatch( + theRequest, + resourceId, + theMatchUrl, + thePerformIndexing, + theForceUpdateVersion, + theResource, + entity, + update, + theTransactionDetails); + } + + postUpdateTransaction(theTransactionDetails); + + return outcome; + } + + @SuppressWarnings("rawtypes") + protected void postUpdateTransaction(TransactionDetails theTransactionDetails) { + // Transactions will delete these at the end of the entire transaction + if (!theTransactionDetails.isFhirTransaction()) { + Set resourceIds = theTransactionDetails.getUpdatedResourceIds(); + if (resourceIds != null && !resourceIds.isEmpty()) { + List ids = resourceIds.stream().map(r -> (Long) r.getId()).collect(Collectors.toList()); + + myResourceSearchUrlSvc.deleteByResIds(ids); + } + } } @Override @@ -2489,9 +2519,22 @@ protected DaoMethodOutcome doUpdateForUpdateOrPatch( */ ResourceTable entity = (ResourceTable) theEntity; if (entity.isSearchUrlPresent()) { - myResourceSearchUrlSvc.deleteByResId( - (Long) theEntity.getPersistentId().getId()); - entity.setSearchUrlPresent(false); + JpaPid persistentId = JpaPid.fromId(entity.getResourceId()); + theTransactionDetails.addUpdatedResourceId(persistentId); + + entity.setSearchUrlPresent(false); // it will be removed at the end + } + + if (entity.isDeleted()) { + // We're un-deleting this entity so let's inform the memory cache service + myIdHelperService.addResolvedPidToFhirIdAfterCommit( + entity.getPersistentId(), + entity.getPartitionId() == null + ? RequestPartitionId.defaultPartition() + : entity.getPartitionId().toPartitionId(), + entity.getResourceType(), + entity.getFhirId(), + null); } return super.doUpdateForUpdateOrPatch( @@ -2571,7 +2614,6 @@ private DaoMethodOutcome doUpdateWithHistoryRewrite( } @Override - @Transactional(propagation = Propagation.SUPPORTS) public MethodOutcome validate( T theResource, IIdType theId, @@ -2587,19 +2629,30 @@ public MethodOutcome validate( throw new InvalidRequestException( Msg.code(991) + "No ID supplied. ID is required when validating with mode=DELETE"); } - final ResourceTable entity = readEntityLatestVersion(theId, theRequest, transactionDetails); - // Validate that there are no resources pointing to the candidate that - // would prevent deletion - DeleteConflictList deleteConflicts = new DeleteConflictList(); - if (getStorageSettings().isEnforceReferentialIntegrityOnDelete()) { - myDeleteConflictService.validateOkToDelete( - deleteConflicts, entity, true, theRequest, new TransactionDetails()); - } - DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts); + RequestPartitionId requestPartitionId = + myRequestPartitionHelperService.determineReadPartitionForRequestForRead( + theRequest, getResourceName(), theId); + + return myTransactionService + .withRequest(theRequest) + .withRequestPartitionId(requestPartitionId) + .execute(() -> { + final ResourceTable entity = + readEntityLatestVersion(theId, requestPartitionId, transactionDetails); + + // Validate that there are no resources pointing to the candidate that + // would prevent deletion + DeleteConflictList deleteConflicts = new DeleteConflictList(); + if (getStorageSettings().isEnforceReferentialIntegrityOnDelete()) { + myDeleteConflictService.validateOkToDelete( + deleteConflicts, entity, true, theRequest, new TransactionDetails()); + } + DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts); - IBaseOperationOutcome oo = createInfoOperationOutcome("Ok to delete"); - return new MethodOutcome(new IdDt(theId.getValue()), oo); + IBaseOperationOutcome oo = createInfoOperationOutcome("Ok to delete"); + return new MethodOutcome(new IdDt(theId.getValue()), oo); + }); } FhirValidator validator = getContext().newValidator(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java index d21dff545d32..0aad2196720e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java @@ -206,7 +206,7 @@ public

void preFetchResources( * However, for realistic average workloads, this should reduce the number of round trips. */ if (!idChunk.isEmpty()) { - List entityChunk = prefetchResourceTableHistoryAndProvenance(idChunk); + List entityChunk = prefetchResourceTableAndHistory(idChunk); if (thePreFetchIndexes) { @@ -244,14 +244,13 @@ public

void preFetchResources( } @Nonnull - private List prefetchResourceTableHistoryAndProvenance(List idChunk) { + private List prefetchResourceTableAndHistory(List idChunk) { assert idChunk.size() < SearchConstants.MAX_PAGE_SIZE : "assume pre-chunked"; Query query = myEntityManager.createQuery("select r, h " + " FROM ResourceTable r " + " LEFT JOIN fetch ResourceHistoryTable h " + " on r.myVersion = h.myResourceVersion and r.id = h.myResourceId " - + " left join fetch h.myProvenance " + " WHERE r.myId IN ( :IDS ) "); query.setParameter("IDS", idChunk); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java index c5a5dba6d94b..2493e97e4eca 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java @@ -219,7 +219,7 @@ private ISearchQueryExecutor doSearch( // indicate param was already processed, otherwise queries DB to process it theParams.setOffset(null); - return SearchQueryExecutors.from(longs); + return SearchQueryExecutors.from(JpaPid.fromLongList(longs)); } private int getMaxFetchSize(SearchParameterMap theParams, Integer theMax) { @@ -386,7 +386,6 @@ public List search( @SuppressWarnings("rawtypes") private List toList(ISearchQueryExecutor theSearchResultStream, long theMaxSize) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(theSearchResultStream, 0), false) - .map(JpaPid::fromId) .limit(theMaxSize) .collect(Collectors.toList()); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java index dcf578ee5c9d..4d679594475a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java @@ -40,7 +40,6 @@ import jakarta.persistence.criteria.CriteriaBuilder; import jakarta.persistence.criteria.CriteriaQuery; import jakarta.persistence.criteria.Expression; -import jakarta.persistence.criteria.JoinType; import jakarta.persistence.criteria.Predicate; import jakarta.persistence.criteria.Root; import jakarta.persistence.criteria.Subquery; @@ -125,8 +124,6 @@ public List fetchEntities( addPredicatesToQuery(cb, thePartitionId, criteriaQuery, from, theHistorySearchStyle); - from.fetch("myProvenance", JoinType.LEFT); - /* * The sort on myUpdated is the important one for _history operations, but there are * cases where multiple pages of results all have the exact same myUpdated value (e.g. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java index fe3ae4824df2..3f7334572913 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java @@ -19,15 +19,15 @@ */ package ca.uhn.fhir.jpa.dao; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.BaseTag; import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity; -import ca.uhn.fhir.jpa.model.entity.ResourceTag; import jakarta.annotation.Nullable; import org.hl7.fhir.instance.model.api.IBaseResource; import java.util.Collection; -public interface IJpaStorageResourceParser extends IStorageResourceParser { +public interface IJpaStorageResourceParser extends IStorageResourceParser { /** * Convert a storage entity into a FHIR resource model instance. This method may return null if the entity is not @@ -36,7 +36,7 @@ public interface IJpaStorageResourceParser extends IStorageResourceParser { R toResource( Class theResourceType, IBaseResourceEntity theEntity, - Collection theTagList, + Collection theTagList, boolean theForHistoryOperation); /** diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaPersistedResourceValidationSupport.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaPersistedResourceValidationSupport.java index 48a04cf38fe2..b2a9f45ffa49 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaPersistedResourceValidationSupport.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaPersistedResourceValidationSupport.java @@ -26,8 +26,6 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.term.TermReadSvcUtil; -import ca.uhn.fhir.jpa.term.api.ITermReadSvc; -import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.SortOrderEnum; import ca.uhn.fhir.rest.api.SortSpec; import ca.uhn.fhir.rest.api.server.IBundleProvider; @@ -35,76 +33,57 @@ import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.UriParam; -import ca.uhn.fhir.sl.cache.Cache; -import ca.uhn.fhir.sl.cache.CacheFactory; import jakarta.annotation.Nullable; import jakarta.annotation.PostConstruct; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.CodeSystem; import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.ImplementationGuide; import org.hl7.fhir.r4.model.Questionnaire; import org.hl7.fhir.r4.model.StructureDefinition; -import org.hl7.fhir.r4.model.UriType; import org.hl7.fhir.r4.model.ValueSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.transaction.annotation.Propagation; -import org.springframework.transaction.annotation.Transactional; import java.util.Arrays; import java.util.List; +import java.util.Objects; import java.util.Optional; -import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW; +import static org.hl7.fhir.instance.model.api.IAnyResource.SP_RES_LAST_UPDATED; /** * This class is a {@link IValidationSupport Validation support} module that loads * validation resources (StructureDefinition, ValueSet, CodeSystem, etc.) from the resources * persisted in the JPA server. */ -@Transactional(propagation = Propagation.REQUIRED) public class JpaPersistedResourceValidationSupport implements IValidationSupport { private static final Logger ourLog = LoggerFactory.getLogger(JpaPersistedResourceValidationSupport.class); private final FhirContext myFhirContext; - private final IBaseResource myNoMatch; @Autowired private DaoRegistry myDaoRegistry; - @Autowired - private ITermReadSvc myTermReadSvc; - private Class myCodeSystemType; private Class myStructureDefinitionType; private Class myValueSetType; - // TODO: JA2 We shouldn't need to cache here, but we probably still should since the - // TermReadSvcImpl calls these methods as a part of its "isCodeSystemSupported" calls. - // We should modify CachingValidationSupport to cache the results of "isXXXSupported" - // at which point we could do away with this cache - // TODO: LD: This cache seems to supersede the cache in CachingValidationSupport, as that cache is set to - // 10 minutes, but this 1 minute cache now determines the expiry. - // This new behaviour was introduced between the 7.0.0 release and the current master (7.2.0) - private Cache myLoadCache = CacheFactory.build(TimeUnit.MINUTES.toMillis(1), 1000); - /** * Constructor */ public JpaPersistedResourceValidationSupport(FhirContext theFhirContext) { super(); - Validate.notNull(theFhirContext); + Validate.notNull(theFhirContext, "theFhirContext must not be null"); myFhirContext = theFhirContext; - - myNoMatch = myFhirContext.getResourceDefinition("Basic").newInstance(); } @Override @@ -115,51 +94,44 @@ public String getName() { @Override public IBaseResource fetchCodeSystem(String theSystem) { if (TermReadSvcUtil.isLoincUnversionedCodeSystem(theSystem)) { - Optional currentCSOpt = getCodeSystemCurrentVersion(new UriType(theSystem)); - if (!currentCSOpt.isPresent()) { - ourLog.info("Couldn't find current version of CodeSystem: " + theSystem); - } - return currentCSOpt.orElse(null); + IIdType id = myFhirContext.getVersion().newIdType("CodeSystem", LOINC_LOW); + return findResourceByIdWithNoException(id, myCodeSystemType); } return fetchResource(myCodeSystemType, theSystem); } - /** - * Obtains the current version of a CodeSystem using the fact that the current - * version is always pointed by the ForcedId for the no-versioned CS - */ - private Optional getCodeSystemCurrentVersion(UriType theUrl) { - if (!theUrl.getValueAsString().contains(LOINC_LOW)) { - return Optional.empty(); - } - - return myTermReadSvc.readCodeSystemByForcedId(LOINC_LOW); - } - @Override public IBaseResource fetchValueSet(String theSystem) { if (TermReadSvcUtil.isLoincUnversionedValueSet(theSystem)) { - Optional currentVSOpt = getValueSetCurrentVersion(new UriType(theSystem)); - return currentVSOpt.orElse(null); + Optional vsIdOpt = TermReadSvcUtil.getValueSetId(theSystem); + if (vsIdOpt.isEmpty()) { + return null; + } + IIdType id = myFhirContext.getVersion().newIdType("ValueSet", vsIdOpt.get()); + return findResourceByIdWithNoException(id, myValueSetType); } return fetchResource(myValueSetType, theSystem); } /** - * Obtains the current version of a ValueSet using the fact that the current - * version is always pointed by the ForcedId for the no-versioned VS + * Performs a lookup by ID, with no exception thrown (since that can mark the active + * transaction as rollback). */ - private Optional getValueSetCurrentVersion(UriType theUrl) { - Optional vsIdOpt = TermReadSvcUtil.getValueSetId(theUrl.getValueAsString()); - if (!vsIdOpt.isPresent()) { - return Optional.empty(); + @Nullable + private IBaseResource findResourceByIdWithNoException(IIdType id, Class type) { + SearchParameterMap map = SearchParameterMap.newSynchronous() + .setLoadSynchronousUpTo(1) + .add(IAnyResource.SP_RES_ID, new TokenParam(id.getValue())); + IFhirResourceDao dao = myDaoRegistry.getResourceDao(type); + IBundleProvider outcome = dao.search(map, new SystemRequestDetails()); + List resources = outcome.getResources(0, 1); + if (resources.isEmpty()) { + return null; + } else { + return resources.get(0); } - - IFhirResourceDao valueSetResourceDao = myDaoRegistry.getResourceDao(myValueSetType); - IBaseResource valueSet = valueSetResourceDao.read(new IdDt("ValueSet", vsIdOpt.get())); - return Optional.ofNullable(valueSet); } @Override @@ -188,17 +160,7 @@ public T fetchResource(@Nullable Class theClass, St return null; } - String key = theClass + " " + theUri; - IBaseResource fetched = myLoadCache.get(key, t -> doFetchResource(theClass, theUri)); - - if (fetched == myNoMatch) { - ourLog.debug( - "Invalidating cache entry for URI: {} since the result of the underlying query is empty", theUri); - myLoadCache.invalidate(key); - return null; - } - - return (T) fetched; + return (T) doFetchResource(theClass, theUri); } private IBaseResource doFetchResource(@Nullable Class theClass, String theUri) { @@ -209,17 +171,14 @@ private IBaseResource doFetchResource(@Nullable Class< () -> doFetchResource(StructureDefinition.class, theUri) }; return Arrays.stream(fetchers) - .map(t -> t.get()) - .filter(t -> t != myNoMatch) + .map(Supplier::get) + .filter(Objects::nonNull) .findFirst() - .orElse(myNoMatch); + .orElse(null); } IdType id = new IdType(theUri); - boolean localReference = false; - if (id.hasBaseUrl() == false && id.hasIdPart() == true) { - localReference = true; - } + boolean localReference = id.hasBaseUrl() == false && id.hasIdPart() == true; String resourceName = myFhirContext.getResourceType(theClass); IBundleProvider search; @@ -230,7 +189,7 @@ private IBaseResource doFetchResource(@Nullable Class< params.setLoadSynchronousUpTo(1); params.add(IAnyResource.SP_RES_ID, new StringParam(theUri)); search = myDaoRegistry.getResourceDao(resourceName).search(params); - if (search.size() == 0) { + if (search.isEmpty()) { params = new SearchParameterMap(); params.setLoadSynchronousUpTo(1); params.add(ValueSet.SP_URL, new UriParam(theUri)); @@ -246,7 +205,7 @@ private IBaseResource doFetchResource(@Nullable Class< } else { params.add(ValueSet.SP_URL, new UriParam(theUri)); } - params.setSort(new SortSpec("_lastUpdated").setOrder(SortOrderEnum.DESC)); + params.setSort(new SortSpec(SP_RES_LAST_UPDATED).setOrder(SortOrderEnum.DESC)); search = myDaoRegistry.getResourceDao(resourceName).search(params); if (search.isEmpty() @@ -255,11 +214,13 @@ private IBaseResource doFetchResource(@Nullable Class< params.setLoadSynchronousUpTo(1); if (versionSeparator != -1) { params.add(ValueSet.SP_VERSION, new TokenParam(theUri.substring(versionSeparator + 1))); - params.add("system", new UriParam(theUri.substring(0, versionSeparator))); + params.add( + ca.uhn.fhir.model.dstu2.resource.ValueSet.SP_SYSTEM, + new UriParam(theUri.substring(0, versionSeparator))); } else { - params.add("system", new UriParam(theUri)); + params.add(ca.uhn.fhir.model.dstu2.resource.ValueSet.SP_SYSTEM, new UriParam(theUri)); } - params.setSort(new SortSpec("_lastUpdated").setOrder(SortOrderEnum.DESC)); + params.setSort(new SortSpec(SP_RES_LAST_UPDATED).setOrder(SortOrderEnum.DESC)); search = myDaoRegistry.getResourceDao(resourceName).search(params); } } @@ -269,7 +230,7 @@ private IBaseResource doFetchResource(@Nullable Class< if (theUri.startsWith("http://hl7.org/fhir/StructureDefinition/")) { String typeName = theUri.substring("http://hl7.org/fhir/StructureDefinition/".length()); if (myFhirContext.getElementDefinition(typeName) != null) { - return myNoMatch; + return null; } } SearchParameterMap params = new SearchParameterMap(); @@ -299,7 +260,7 @@ private IBaseResource doFetchResource(@Nullable Class< } else { params.add(CodeSystem.SP_URL, new UriParam(theUri)); } - params.setSort(new SortSpec("_lastUpdated").setOrder(SortOrderEnum.DESC)); + params.setSort(new SortSpec(SP_RES_LAST_UPDATED).setOrder(SortOrderEnum.DESC)); search = myDaoRegistry.getResourceDao(resourceName).search(params); break; } @@ -322,7 +283,7 @@ private IBaseResource doFetchResource(@Nullable Class< Integer size = search.size(); if (size == null || size == 0) { - return myNoMatch; + return null; } if (size > 1) { @@ -349,8 +310,4 @@ public void start() { myCodeSystemType = myFhirContext.getResourceDefinition("ValueSet").getImplementingClass(); } } - - public void clearCaches() { - myLoadCache.invalidateAll(); - } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java index 09d46a557e43..4069b91b7c50 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java @@ -26,9 +26,9 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.IDao; +import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; import ca.uhn.fhir.jpa.entity.PartitionEntity; -import ca.uhn.fhir.jpa.entity.ResourceSearchView; import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry; import ca.uhn.fhir.jpa.esr.IExternallyStoredResourceService; import ca.uhn.fhir.jpa.model.config.PartitionSettings; @@ -37,9 +37,9 @@ import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity; import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable; -import ca.uhn.fhir.jpa.model.entity.ResourceTag; import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.jpa.model.entity.TagTypeEnum; import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc; @@ -71,12 +71,13 @@ import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.Date; import java.util.List; +import java.util.Optional; import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.decodeResource; import static java.util.Objects.nonNull; +import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class JpaStorageResourceParser implements IJpaStorageResourceParser { @@ -92,6 +93,9 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser { @Autowired private IResourceHistoryTableDao myResourceHistoryTableDao; + @Autowired + private IResourceHistoryProvenanceDao myResourceHistoryProvenanceDao; + @Autowired private PartitionSettings myPartitionSettings; @@ -115,14 +119,14 @@ public IBaseResource toResource(IBasePersistedResource theEntity, boolean theFor public R toResource( Class theResourceType, IBaseResourceEntity theEntity, - Collection theTagList, + Collection theTagList, boolean theForHistoryOperation) { // 1. get resource, it's encoding and the tags if any byte[] resourceBytes; String resourceText; ResourceEncodingEnum resourceEncoding; - @Nullable Collection tagList = Collections.emptyList(); + @Nullable Collection tagList; long version; String provenanceSourceUri = null; String provenanceRequestId = null; @@ -132,25 +136,42 @@ public R toResource( resourceBytes = history.getResource(); resourceText = history.getResourceTextVc(); resourceEncoding = history.getEncoding(); - switch (myStorageSettings.getTagStorageMode()) { - case VERSIONED: - default: - if (history.isHasTags()) { - tagList = history.getTags(); - } - break; - case NON_VERSIONED: - if (history.getResourceTable().isHasTags()) { - tagList = history.getResourceTable().getTags(); - } - break; - case INLINE: - tagList = null; + + // For search results we get the list of tags passed in because we load it + // in bulk for all resources we're going to return, but for read results + // we don't get the list passed in so we need to load it here. + tagList = theTagList; + if (tagList == null) { + switch (myStorageSettings.getTagStorageMode()) { + case VERSIONED: + default: + if (history.isHasTags()) { + tagList = history.getTags(); + } + break; + case NON_VERSIONED: + if (history.getResourceTable().isHasTags()) { + tagList = history.getResourceTable().getTags(); + } + break; + case INLINE: + tagList = null; + } } + version = history.getVersion(); - if (history.getProvenance() != null) { - provenanceRequestId = history.getProvenance().getRequestId(); - provenanceSourceUri = history.getProvenance().getSourceUri(); + provenanceSourceUri = history.getSourceUri(); + provenanceRequestId = history.getRequestId(); + if (isBlank(provenanceSourceUri) && isBlank(provenanceRequestId)) { + if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) { + Optional provenanceOpt = + myResourceHistoryProvenanceDao.findById(history.getId()); + if (provenanceOpt.isPresent()) { + ResourceHistoryProvenanceEntity provenance = provenanceOpt.get(); + provenanceRequestId = provenance.getRequestId(); + provenanceSourceUri = provenance.getSourceUri(); + } + } } } else if (theEntity instanceof ResourceTable) { ResourceTable resource = (ResourceTable) theEntity; @@ -159,14 +180,13 @@ public R toResource( history = resource.getCurrentVersionEntity(); } else { version = theEntity.getVersion(); - history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version); + history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getResourceId(), version); ((ResourceTable) theEntity).setCurrentVersionEntity(history); while (history == null) { if (version > 1L) { version--; - history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance( - theEntity.getId(), version); + history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getResourceId(), version); } else { return null; } @@ -181,36 +201,28 @@ public R toResource( case NON_VERSIONED: if (resource.isHasTags()) { tagList = resource.getTags(); + } else { + tagList = List.of(); } break; case INLINE: + default: tagList = null; break; } version = history.getVersion(); - if (history.getProvenance() != null) { - provenanceRequestId = history.getProvenance().getRequestId(); - provenanceSourceUri = history.getProvenance().getSourceUri(); - } - } else if (theEntity instanceof ResourceSearchView) { - // This is the search View - ResourceSearchView view = (ResourceSearchView) theEntity; - resourceBytes = view.getResource(); - resourceText = view.getResourceTextVc(); - resourceEncoding = view.getEncoding(); - version = view.getVersion(); - provenanceRequestId = view.getProvenanceRequestId(); - provenanceSourceUri = view.getProvenanceSourceUri(); - switch (myStorageSettings.getTagStorageMode()) { - case VERSIONED: - case NON_VERSIONED: - if (theTagList != null) { - tagList = theTagList; + provenanceSourceUri = history.getSourceUri(); + provenanceRequestId = history.getRequestId(); + if (isBlank(provenanceSourceUri) && isBlank(provenanceRequestId)) { + if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) { + Optional provenanceOpt = + myResourceHistoryProvenanceDao.findById(history.getId()); + if (provenanceOpt.isPresent()) { + ResourceHistoryProvenanceEntity provenance = provenanceOpt.get(); + provenanceRequestId = provenance.getRequestId(); + provenanceSourceUri = provenance.getSourceUri(); } - break; - case INLINE: - tagList = null; - break; + } } } else { // something wrong @@ -277,7 +289,7 @@ private R parseResource( } else if (theResourceEncoding != ResourceEncodingEnum.DEL) { IParser parser = new TolerantJsonParser( - getContext(theEntity.getFhirVersion()), LENIENT_ERROR_HANDLER, theEntity.getId()); + getContext(theEntity.getFhirVersion()), LENIENT_ERROR_HANDLER, theEntity.getResourceId()); try { retVal = parser.parseResource(theResourceType, theDecodedResourceText); @@ -519,8 +531,8 @@ public void updateResourceMetadata(IBaseResourceEntity theEntitySource, IBaseRes theResourceTarget.setId(id); if (theResourceTarget instanceof IResource) { - ResourceMetadataKeyEnum.VERSION.put((IResource) theResourceTarget, id.getVersionIdPart()); - ResourceMetadataKeyEnum.UPDATED.put((IResource) theResourceTarget, theEntitySource.getUpdated()); + ResourceMetadataKeyEnum.VERSION.put(theResourceTarget, id.getVersionIdPart()); + ResourceMetadataKeyEnum.UPDATED.put(theResourceTarget, theEntitySource.getUpdated()); } else { IBaseMetaType meta = theResourceTarget.getMeta(); meta.setVersionId(id.getVersionIdPart()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java index 6e0280143bb0..c6d7ebdb7cf4 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java @@ -33,10 +33,12 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken; import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; +import ca.uhn.fhir.jpa.search.ResourceSearchUrlSvc; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.util.FhirTerser; @@ -114,6 +116,9 @@ public class TransactionProcessor extends BaseTransactionProcessor { @Autowired private MatchUrlService myMatchUrlService; + @Autowired + private ResourceSearchUrlSvc myResourceSearchUrlSvc; + @Autowired private IRequestPartitionHelperSvc myRequestPartitionSvc; @@ -224,6 +229,16 @@ private void preFetch( systemDao.preFetchResources(JpaPid.fromLongList(idsToPreFetch), true); } + @SuppressWarnings("rawtypes") + protected void postTransactionProcess(TransactionDetails theTransactionDetails) { + Set resourceIds = theTransactionDetails.getUpdatedResourceIds(); + if (resourceIds != null && !resourceIds.isEmpty()) { + List ids = resourceIds.stream().map(r -> (Long) r.getId()).collect(Collectors.toList()); + + myResourceSearchUrlSvc.deleteByResIds(ids); + } + } + private void preFetchResourcesById( TransactionDetails theTransactionDetails, List theEntries, diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java index 765dc33f09b5..f189daf38475 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java @@ -38,10 +38,8 @@ public interface IResourceHistoryTableDao extends JpaRepository findAllVersionsForResourceIdInOrder(@Param("resId") Long theId); - @Query( - "SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :id AND t.myResourceVersion = :version") - ResourceHistoryTable findForIdAndVersionAndFetchProvenance( - @Param("id") long theId, @Param("version") long theVersion); + @Query("SELECT t FROM ResourceHistoryTable t WHERE t.myResourceId = :id AND t.myResourceVersion = :version") + ResourceHistoryTable findForIdAndVersion(@Param("id") long theId, @Param("version") long theVersion); @Query( "SELECT t.myId FROM ResourceHistoryTable t WHERE t.myResourceId = :resId AND t.myResourceVersion <> :dontWantVersion") @@ -49,8 +47,8 @@ Slice findForResourceId( Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion); @Query( - "SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :resId AND t.myResourceVersion <> :dontWantVersion") - Slice findForResourceIdAndReturnEntitiesAndFetchProvenance( + "SELECT t FROM ResourceHistoryTable t WHERE t.myResourceId = :resId AND t.myResourceVersion <> :dontWantVersion") + Slice findAllVersionsExceptSpecificForResourcePid( Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion); @Query("" + "SELECT v.myId FROM ResourceHistoryTable v " @@ -91,4 +89,10 @@ void updateVersion( @Query( "UPDATE ResourceHistoryTable r SET r.myResourceTextVc = null, r.myResource = :text, r.myEncoding = 'JSONC' WHERE r.myId = :pid") void updateNonInlinedContents(@Param("text") byte[] theText, @Param("pid") long thePid); + + @Query("SELECT v FROM ResourceHistoryTable v " + "JOIN FETCH v.myResourceTable t " + + "WHERE v.myResourceId IN (:pids) " + + "AND t.myVersion = v.myResourceVersion") + List findCurrentVersionsByResourcePidsAndFetchResourceTable( + @Param("pids") List theVersionlessPids); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTagDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTagDao.java index 01f2bb3e8f31..4b72b9730d86 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTagDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTagDao.java @@ -25,9 +25,15 @@ import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; +import java.util.Collection; + public interface IResourceHistoryTagDao extends JpaRepository, IHapiFhirJpaRepository { @Modifying @Query("DELETE FROM ResourceHistoryTag t WHERE t.myResourceHistoryPid = :historyPid") void deleteByPid(@Param("historyPid") Long theResourceHistoryTablePid); + + @Query( + "SELECT t FROM ResourceHistoryTag t INNER JOIN FETCH t.myTag WHERE t.myResourceHistory.myId IN (:historyPids)") + Collection findByVersionIds(@Param("historyPids") Collection theIdList); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchUrlDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchUrlDao.java index e73db69698fc..3ea3431cf445 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchUrlDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchUrlDao.java @@ -26,6 +26,7 @@ import org.springframework.data.repository.query.Param; import java.util.Date; +import java.util.List; public interface IResourceSearchUrlDao extends JpaRepository, IHapiFhirJpaRepository { @@ -36,4 +37,8 @@ public interface IResourceSearchUrlDao extends JpaRepository theIds); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchResultDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchResultDao.java index 98e9471a18ca..e82ca24df13e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchResultDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchResultDao.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.dao.data; import ca.uhn.fhir.jpa.entity.SearchResult; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import com.google.errorprone.annotations.CanIgnoreReturnValue; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Slice; @@ -28,6 +29,7 @@ import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; +import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -55,4 +57,16 @@ int deleteBySearchIdInRange( @Query("SELECT count(r) FROM SearchResult r WHERE r.mySearchPid = :search") int countForSearch(@Param("search") Long theSearchPid); + + /** + * Converts a response from {@link #findWithSearchPid(Long, Pageable)} to + * a List of JpaPid objects + */ + static List toJpaPidList(List theArrays) { + List retVal = new ArrayList<>(theArrays.size()); + for (Long next : theArrays) { + retVal.add(JpaPid.fromId(next)); + } + return retVal; + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java index f8d63002793a..81d394319fcb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java @@ -321,10 +321,12 @@ protected int expungeEverythingByTypeWithoutPurging( nativeQuery.setMaxResults(800); List pids = nativeQuery.getResultList(); - nativeQuery = myEntityManager.createQuery("DELETE FROM " + theEntityType.getSimpleName() - + " WHERE " + idProperty + " IN (:pids)"); - nativeQuery.setParameter("pids", pids); - nativeQuery.executeUpdate(); + if (!pids.isEmpty()) { + nativeQuery = myEntityManager.createQuery("DELETE FROM " + theEntityType.getSimpleName() + + " WHERE " + idProperty + " IN (:pids)"); + nativeQuery.setParameter("pids", pids); + nativeQuery.executeUpdate(); + } return pids.size(); }); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/JpaResourceExpungeService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/JpaResourceExpungeService.java index 860113cec067..ab47125ca870 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/JpaResourceExpungeService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/JpaResourceExpungeService.java @@ -45,6 +45,7 @@ import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao; import ca.uhn.fhir.jpa.model.dao.JpaPid; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.util.MemoryCacheService; @@ -71,6 +72,7 @@ import java.util.Collections; import java.util.List; +import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; @Service @@ -159,8 +161,7 @@ public List findHistoricalVersionsOfNonDeletedResources( Slice ids; if (theJpaPid != null && theJpaPid.getId() != null) { if (theJpaPid.getVersion() != null) { - ids = toSlice(myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance( - theJpaPid.getId(), theJpaPid.getVersion())); + ids = toSlice(myResourceHistoryTableDao.findForIdAndVersion(theJpaPid.getId(), theJpaPid.getVersion())); } else { ids = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResourceId(page, theJpaPid.getId()); } @@ -239,9 +240,10 @@ private void expungeHistoricalVersion( callHooks(theRequestDetails, theRemainingCount, version, id); - if (version.getProvenance() != null) { - myResourceHistoryProvenanceTableDao.deleteByPid( - version.getProvenance().getId()); + if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) { + Optional provenanceOpt = + myResourceHistoryProvenanceTableDao.findById(theNextVersionId); + provenanceOpt.ifPresent(entity -> myResourceHistoryProvenanceTableDao.deleteByPid(entity.getId())); } myResourceHistoryTagDao.deleteByPid(version.getId()); @@ -302,8 +304,8 @@ protected void expungeCurrentVersionOfResource( RequestDetails theRequestDetails, Long theResourceId, AtomicInteger theRemainingCount) { ResourceTable resource = myResourceTableDao.findById(theResourceId).orElseThrow(IllegalStateException::new); - ResourceHistoryTable currentVersion = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance( - resource.getId(), resource.getVersion()); + ResourceHistoryTable currentVersion = + myResourceHistoryTableDao.findForIdAndVersion(resource.getId(), resource.getVersion()); if (currentVersion != null) { expungeHistoricalVersion(theRequestDetails, currentVersion.getId(), theRemainingCount); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java index c4542ea8aaca..9ad2c34aa3ef 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java @@ -61,7 +61,6 @@ import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; -import org.hl7.fhir.r4.model.IdType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -70,13 +69,12 @@ import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.Date; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; @@ -87,7 +85,7 @@ /** * This class is used to convert between PIDs (the internal primary key for a particular resource as - * stored in the {@link ca.uhn.fhir.jpa.model.entity.ResourceTable HFJ_RESOURCE} table), and the + * stored in the {@link ResourceTable HFJ_RESOURCE} table), and the * public ID that a resource has. *

* These IDs are sometimes one and the same (by default, a resource that the server assigns the ID of @@ -150,20 +148,44 @@ public IResourceLookup resolveResourceIdentity( throws ResourceNotFoundException { IIdType id; + boolean untyped; if (theResourceType != null) { + untyped = false; id = newIdType(theResourceType + "/" + theResourceId); } else { + /* + * This shouldn't be common, but we need to be able to handle it. + * The only real known use case currently is when handing references + * in searches where the client didn't qualify the ID. E.g. + * /Provenance?target=A,B,C + * We emit a warning in this case that they should be qualfying the + * IDs, but we do stil allow it. + */ + untyped = true; id = newIdType(theResourceId); } List ids = List.of(id); Map> outcome = resolveResourceIdentities(theRequestPartitionId, ids, theMode); // We only pass 1 input in so only 0..1 will come back - if (!outcome.containsKey(id)) { + Validate.isTrue(outcome.size() <= 1, "Unexpected output size %s for ID: %s", outcome.size(), ids); + + IResourceLookup retVal; + if (untyped) { + if (outcome.isEmpty()) { + retVal = null; + } else { + retVal = outcome.values().iterator().next(); + } + } else { + retVal = outcome.get(id); + } + + if (retVal == null) { throw new ResourceNotFoundException(Msg.code(2001) + "Resource " + id + " is not known"); } - return outcome.get(id); + return retVal; } @Nonnull @@ -180,7 +202,11 @@ public Map> resolveResourceIdentities( } Collection ids = new ArrayList<>(theIds); - ids.forEach(id -> Validate.isTrue(id.hasIdPart())); + for (IIdType id : theIds) { + if (!id.hasIdPart()) { + throw new InvalidRequestException(Msg.code(1101) + "Parameter value missing in request"); + } + } RequestPartitionId requestPartitionId = replaceDefault(theRequestPartitionId); ListMultimap> idToLookup = @@ -197,16 +223,16 @@ public Map> resolveResourceIdentities( } // Convert the multimap into a simple map - Map> retVal = new HashMap<>(); + Map> retVal = new HashMap<>(idToLookup.size()); for (Map.Entry> next : idToLookup.entries()) { - if (next.getValue().getDeleted() != null) { + IResourceLookup nextLookup = next.getValue(); + + IIdType resourceId = myFhirCtx.getVersion().newIdType(nextLookup.getResourceType(), nextLookup.getFhirId()); + if (nextLookup.getDeleted() != null) { if (theMode.isFailOnDeleted()) { String msg = myFhirCtx .getLocalizer() - .getMessageSanitized( - IdHelperService.class, - "deletedId", - next.getKey().getValue()); + .getMessageSanitized(IdHelperService.class, "deletedId", resourceId.getValue()); throw new ResourceGoneException(Msg.code(2572) + msg); } if (!theMode.isIncludeDeleted()) { @@ -214,7 +240,9 @@ public Map> resolveResourceIdentities( } } - IResourceLookup previousValue = retVal.put(next.getKey(), next.getValue()); + nextLookup.getPersistentId().setAssociatedResourceId(resourceId); + + IResourceLookup previousValue = retVal.put(resourceId, nextLookup); if (previousValue != null) { /* * This means that either: @@ -224,11 +252,7 @@ public Map> resolveResourceIdentities( * with the same ID. * 2. The unique constraint on the FHIR_ID column has been dropped */ - ourLog.warn( - "Resource ID[{}] corresponds to lookups: {} and {}", - next.getKey(), - previousValue, - next.getValue()); + ourLog.warn("Resource ID[{}] corresponds to lookups: {} and {}", resourceId, previousValue, nextLookup); String msg = myFhirCtx.getLocalizer().getMessage(IdHelperService.class, "nonUniqueForcedId"); throw new PreconditionFailedException(Msg.code(1099) + msg); } @@ -320,26 +344,15 @@ private void resolveResourceIdentitiesForFhirIdsUsingDatabase( // one create one clause per id. List innerIdPredicates = new ArrayList<>(theIdsToResolve.size()); - boolean haveUntypedIds = false; for (IIdType next : theIdsToResolve) { - if (!next.hasResourceType()) { - haveUntypedIds = true; - } - List idPredicates = new ArrayList<>(2); - if (myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC - && next.isIdPartValidLong()) { - Predicate typeCriteria = cb.equal(from.get("myId"), next.getIdPartAsLong()); + if (isNotBlank(next.getResourceType())) { + Predicate typeCriteria = cb.equal(from.get("myResourceType"), next.getResourceType()); idPredicates.add(typeCriteria); - } else { - if (isNotBlank(next.getResourceType())) { - Predicate typeCriteria = cb.equal(from.get("myResourceType"), next.getResourceType()); - idPredicates.add(typeCriteria); - } - Predicate idCriteria = cb.equal(from.get("myFhirId"), next.getIdPart()); - idPredicates.add(idCriteria); } + Predicate idCriteria = cb.equal(from.get("myFhirId"), next.getIdPart()); + idPredicates.add(idCriteria); innerIdPredicates.add(cb.and(idPredicates.toArray(EMPTY_PREDICATE_ARRAY))); } @@ -357,18 +370,13 @@ private void resolveResourceIdentitiesForFhirIdsUsingDatabase( Integer partitionId = nextId.get(4, Integer.class); if (resourcePid != null) { JpaResourceLookup lookup = new JpaResourceLookup( - resourceType, resourcePid, deletedAd, PartitionablePartitionId.with(partitionId, null)); + resourceType, fhirId, resourcePid, deletedAd, PartitionablePartitionId.with(partitionId, null)); MemoryCacheService.ForcedIdCacheKey nextKey = new MemoryCacheService.ForcedIdCacheKey(resourceType, fhirId, theRequestPartitionId); IIdType id = nextKey.toIdType(myFhirCtx); theMapToPopulate.put(id, lookup); - if (haveUntypedIds) { - id = nextKey.toIdTypeWithoutResourceType(myFhirCtx); - theMapToPopulate.put(id, lookup); - } - List> valueToCache = theMapToPopulate.get(id); myMemoryCacheService.putAfterCommit( MemoryCacheService.CacheEnum.RESOURCE_LOOKUP_BY_FORCED_ID, nextKey, valueToCache); @@ -376,78 +384,6 @@ private void resolveResourceIdentitiesForFhirIdsUsingDatabase( } } - /** - * Returns a mapping of Id -> IResourcePersistentId. - * If any resource is not found, it will throw ResourceNotFound exception (and no map will be returned) - * Optionally filters out deleted resources. - */ - @Override - @Nonnull - public Map resolveResourcePersistentIds( - @Nonnull RequestPartitionId theRequestPartitionId, - String theResourceType, - List theIds, - ResolveIdentityMode theMode) { - assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive(); - Validate.notNull(theIds, "theIds cannot be null"); - Validate.isTrue(!theIds.isEmpty(), "theIds must not be empty"); - - Map retVals = new HashMap<>(); - for (String id : theIds) { - JpaPid retVal; - if (!idRequiresForcedId(id)) { - // is already a PID - retVal = JpaPid.fromId(Long.parseLong(id)); - retVals.put(id, retVal); - } else { - // is a forced id - // we must resolve! - if (myStorageSettings.isDeleteEnabled()) { - retVal = resolveResourceIdentity(theRequestPartitionId, theResourceType, id, theMode) - .getPersistentId(); - retVals.put(id, retVal); - } else { - // fetch from cache... adding to cache if not available - String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, id); - retVal = myMemoryCacheService.getThenPutAfterCommit( - MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, t -> { - List ids = Collections.singletonList(new IdType(theResourceType, id)); - // fetches from cache using a function that checks cache first... - List resolvedIds = - resolveResourcePersistentIdsWithCache(theRequestPartitionId, ids); - if (resolvedIds.isEmpty()) { - throw new ResourceNotFoundException(Msg.code(1100) + ids.get(0)); - } - return resolvedIds.get(0); - }); - retVals.put(id, retVal); - } - } - } - - return retVals; - } - - /** - * Given a resource type and ID, determines the internal persistent ID for the resource. - * Optionally filters out deleted resources. - * - * @throws ResourceNotFoundException If the ID can not be found - */ - @Nonnull - @Override - public JpaPid resolveResourcePersistentIds( - @Nonnull RequestPartitionId theRequestPartitionId, - String theResourceType, - String theId, - ResolveIdentityMode theMode) { - Validate.notNull(theId, "theId must not be null"); - - Map retVal = resolveResourcePersistentIds( - theRequestPartitionId, theResourceType, Collections.singletonList(theId), theMode); - return retVal.get(theId); // should be only one - } - /** * Returns true if the given resource ID should be stored in a forced ID. Under default config * (meaning client ID strategy is {@link JpaStorageSettings.ClientIdStrategyEnum#ALPHANUMERIC}) @@ -461,132 +397,6 @@ public boolean idRequiresForcedId(String theId) { || !isValidPid(theId); } - @Nonnull - private String toForcedIdToPidKey( - @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId) { - return RequestPartitionId.stringifyForKey(theRequestPartitionId) + "/" + theResourceType + "/" + theId; - } - - /** - * Given a collection of resource IDs (resource type + id), resolves the internal persistent IDs. - *

- * This implementation will always try to use a cache for performance, meaning that it can resolve resources that - * are deleted (but note that forced IDs can't change, so the cache can't return incorrect results) - */ - @Override - @Nonnull - public List resolveResourcePersistentIdsWithCache( - RequestPartitionId theRequestPartitionId, List theIds) { - boolean onlyForcedIds = false; - return resolveResourcePersistentIdsWithCache(theRequestPartitionId, theIds, onlyForcedIds); - } - - /** - * Given a collection of resource IDs (resource type + id), resolves the internal persistent IDs. - *

- * This implementation will always try to use a cache for performance, meaning that it can resolve resources that - * are deleted (but note that forced IDs can't change, so the cache can't return incorrect results) - * - * @param theOnlyForcedIds If true, resources which are not existing forced IDs will not be resolved - */ - @Override - @Nonnull - public List resolveResourcePersistentIdsWithCache( - @Nonnull RequestPartitionId theRequestPartitionId, List theIds, boolean theOnlyForcedIds) { - assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive(); - - List retVal = new ArrayList<>(theIds.size()); - - for (IIdType id : theIds) { - if (!id.hasIdPart()) { - throw new InvalidRequestException(Msg.code(1101) + "Parameter value missing in request"); - } - } - - if (!theIds.isEmpty()) { - Set idsToCheck = new HashSet<>(theIds.size()); - for (IIdType nextId : theIds) { - if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY) { - if (nextId.isIdPartValidLong()) { - if (!theOnlyForcedIds) { - JpaPid jpaPid = JpaPid.fromId(nextId.getIdPartAsLong()); - jpaPid.setAssociatedResourceId(nextId); - retVal.add(jpaPid); - } - continue; - } - } - - String key = toForcedIdToPidKey(theRequestPartitionId, nextId.getResourceType(), nextId.getIdPart()); - JpaPid cachedId = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key); - if (cachedId != null) { - retVal.add(cachedId); - continue; - } - - idsToCheck.add(nextId); - } - new QueryChunker(); - TaskChunker.chunk( - idsToCheck, - SearchBuilder.getMaximumPageSize() / 2, - ids -> doResolvePersistentIds(theRequestPartitionId, ids, retVal)); - } - - return retVal; - } - - private void doResolvePersistentIds( - RequestPartitionId theRequestPartitionId, List theIds, List theOutputListToPopulate) { - CriteriaBuilder cb = myEntityManager.getCriteriaBuilder(); - CriteriaQuery criteriaQuery = cb.createTupleQuery(); - Root from = criteriaQuery.from(ResourceTable.class); - - /* - * IDX_RES_FHIR_ID covers these columns, but RES_ID is only INCLUDEd. - * Only PG, and MSSql support INCLUDE COLUMNS. - * @see AddIndexTask.generateSql - */ - criteriaQuery.multiselect(from.get("myId"), from.get("myResourceType"), from.get("myFhirId")); - - // one create one clause per id. - List predicates = new ArrayList<>(theIds.size()); - for (IIdType next : theIds) { - - List andPredicates = new ArrayList<>(3); - - if (isNotBlank(next.getResourceType())) { - Predicate typeCriteria = cb.equal(from.get("myResourceType"), next.getResourceType()); - andPredicates.add(typeCriteria); - } - - Predicate idCriteria = cb.equal(from.get("myFhirId"), next.getIdPart()); - andPredicates.add(idCriteria); - getOptionalPartitionPredicate(theRequestPartitionId, cb, from).ifPresent(andPredicates::add); - predicates.add(cb.and(andPredicates.toArray(EMPTY_PREDICATE_ARRAY))); - } - - // join all the clauses as OR - criteriaQuery.where(cb.or(predicates.toArray(EMPTY_PREDICATE_ARRAY))); - - TypedQuery query = myEntityManager.createQuery(criteriaQuery); - List results = query.getResultList(); - for (Tuple nextId : results) { - // Check if the nextId has a resource ID. It may have a null resource ID if a commit is still pending. - Long resourceId = nextId.get(0, Long.class); - String resourceType = nextId.get(1, String.class); - String forcedId = nextId.get(2, String.class); - if (resourceId != null) { - JpaPid jpaPid = JpaPid.fromId(resourceId); - populateAssociatedResourceId(resourceType, forcedId, jpaPid); - theOutputListToPopulate.add(jpaPid); - - String key = toForcedIdToPidKey(theRequestPartitionId, resourceType, forcedId); - myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, jpaPid); - } - } - } - /** * Return optional predicate for searching on forcedId * 1. If the partition mode is ALLOWED_UNQUALIFIED, the return optional predicate will be empty, so search is across all partitions. @@ -609,7 +419,7 @@ private Optional getOptionalPartitionPredicate( return Optional.of(partitionIdNullCriteria); } else { Predicate partitionIdCriteria = from.get("myPartitionIdValue") - .in(partitionIds.stream().filter(t -> t != null).collect(Collectors.toList())); + .in(partitionIds.stream().filter(Objects::nonNull).collect(Collectors.toList())); return Optional.of(cb.or(partitionIdCriteria, partitionIdNullCriteria)); } } else { @@ -719,10 +529,14 @@ public PersistentIdToForcedIdMap translatePidsToForcedIds(Set th } /** - * Pre-cache a PID-to-Resource-ID mapping for later retrieval by {@link #translatePidsToForcedIds(Set)} and related methods + * This method can be called to pre-emptively add entries to the ID cache. It should + * be called by DAO methods if they are creating or changing the deleted status + * of a resource. This method returns immediately, but the data is not + * added to the internal caches until the current DB transaction is successfully + * committed, and nothing is added if the transaction rolls back. */ @Override - public void addResolvedPidToFhirId( + public void addResolvedPidToFhirIdAfterCommit( @Nonnull JpaPid theJpaPid, @Nonnull RequestPartitionId theRequestPartitionId, @Nonnull String theResourceType, @@ -736,11 +550,9 @@ public void addResolvedPidToFhirId( MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theJpaPid.getId(), Optional.of(theResourceType + "/" + theFhirId)); - String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, theFhirId); - myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, theJpaPid); JpaResourceLookup lookup = new JpaResourceLookup( - theResourceType, theJpaPid.getId(), theDeletedAt, theJpaPid.getPartitionablePartitionId()); + theResourceType, theFhirId, theJpaPid.getId(), theDeletedAt, theJpaPid.getPartitionablePartitionId()); MemoryCacheService.ForcedIdCacheKey fhirIdKey = new MemoryCacheService.ForcedIdCacheKey(theResourceType, theFhirId, theRequestPartitionId); @@ -763,13 +575,6 @@ public void setPartitionSettingsForUnitTest(PartitionSettings thePartitionSettin myPartitionSettings = thePartitionSettings; } - @Override - @Nonnull - public List getPidsOrThrowException( - @Nonnull RequestPartitionId theRequestPartitionId, List theIds) { - return resolveResourcePersistentIdsWithCache(theRequestPartitionId, theIds); - } - @Override @Nullable public JpaPid getPidOrNull(@Nonnull RequestPartitionId theRequestPartitionId, IBaseResource theResource) { @@ -792,17 +597,6 @@ public JpaPid getPidOrNull(@Nonnull RequestPartitionId theRequestPartitionId, IB return retVal; } - @Override - @Nonnull - public JpaPid getPidOrThrowException(@Nonnull RequestPartitionId theRequestPartitionId, IIdType theId) { - List ids = Collections.singletonList(theId); - List resourcePersistentIds = resolveResourcePersistentIdsWithCache(theRequestPartitionId, ids); - if (resourcePersistentIds.isEmpty()) { - throw new InvalidRequestException(Msg.code(2295) + "Invalid ID was provided: [" + theId.getIdPart() + "]"); - } - return resourcePersistentIds.get(0); - } - @Override @Nonnull public JpaPid getPidOrThrowException(@Nonnull IAnyResource theResource) { @@ -861,15 +655,6 @@ private IIdType newIdType(String theValue) { return retVal; } - public static boolean isValidPid(IIdType theId) { - if (theId == null) { - return false; - } - - String idPart = theId.getIdPart(); - return isValidPid(idPart); - } - public static boolean isValidPid(String theIdPart) { return StringUtils.isNumeric(theIdPart); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkDaoJpaImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkDaoJpaImpl.java index a6efdc8fc3ee..dd97f814907e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkDaoJpaImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkDaoJpaImpl.java @@ -22,6 +22,7 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.dao.data.IMdmLinkJpaRepository; import ca.uhn.fhir.jpa.entity.HapiFhirEnversRevision; import ca.uhn.fhir.jpa.entity.MdmLink; @@ -455,9 +456,13 @@ public List> getHistoryForIds( @Nonnull private List convertToLongIds(List theMdmHistorySearchParameters) { return myIdHelperService - .getPidsOrThrowException(RequestPartitionId.allPartitions(), theMdmHistorySearchParameters) + .resolveResourceIdentities( + RequestPartitionId.allPartitions(), + theMdmHistorySearchParameters, + ResolveIdentityMode.includeDeleted().cacheOk()) + .values() .stream() - .map(JpaPid::getId) + .map(t -> t.getPersistentId().getId()) .collect(Collectors.toUnmodifiableList()); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/SearchScrollQueryExecutorAdaptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/SearchScrollQueryExecutorAdaptor.java index 8b0b17b7f9fd..42285f6f071c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/SearchScrollQueryExecutorAdaptor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/SearchScrollQueryExecutorAdaptor.java @@ -19,6 +19,7 @@ */ package ca.uhn.fhir.jpa.dao.search; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor; import org.hibernate.search.engine.search.query.SearchScroll; import org.hibernate.search.engine.search.query.SearchScrollResult; @@ -57,12 +58,12 @@ public boolean hasNext() { } @Override - public Long next() { + public JpaPid next() { Long result = myCurrentIterator.next(); // was this the last in the current scroll page? if (!myCurrentIterator.hasNext()) { advanceNextScrollPage(); } - return result; + return JpaPid.fromId(result); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceSearchView.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceSearchView.java deleted file mode 100644 index e53fa57ecad3..000000000000 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceSearchView.java +++ /dev/null @@ -1,244 +0,0 @@ -/* - * #%L - * HAPI FHIR JPA Server - * %% - * Copyright (C) 2014 - 2024 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ -package ca.uhn.fhir.jpa.entity; - -import ca.uhn.fhir.context.FhirVersionEnum; -import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity; -import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; -import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum; -import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; -import ca.uhn.fhir.jpa.model.entity.ResourceTable; -import ca.uhn.fhir.model.primitive.IdDt; -import ca.uhn.fhir.model.primitive.InstantDt; -import ca.uhn.fhir.rest.api.Constants; -import jakarta.annotation.Nullable; -import jakarta.persistence.Column; -import jakarta.persistence.Entity; -import jakarta.persistence.EnumType; -import jakarta.persistence.Enumerated; -import jakarta.persistence.Id; -import jakarta.persistence.Lob; -import jakarta.persistence.Temporal; -import jakarta.persistence.TemporalType; -import org.hibernate.annotations.Immutable; -import org.hibernate.annotations.Subselect; - -import java.io.Serializable; -import java.util.Date; - -@SuppressWarnings("SqlDialectInspection") -@Entity -@Immutable -// Ideally, all tables and columns should be in UPPERCASE if we ever choose to use a case-sensitive collation for MSSQL -// and there's a risk that queries on lowercase database objects fail. -@Subselect("SELECT h.PID as PID, " - + " r.RES_ID as RES_ID, " - + " h.RES_TYPE as RES_TYPE, " - + " h.RES_VERSION as RES_VERSION, " - // FHIR version - + " h.RES_VER as RES_VER, " - // resource version - + " h.HAS_TAGS as HAS_TAGS, " - + " h.RES_DELETED_AT as RES_DELETED_AT, " - + " h.RES_PUBLISHED as RES_PUBLISHED, " - + " h.RES_UPDATED as RES_UPDATED, " - + " h.RES_TEXT as RES_TEXT, " - + " h.RES_TEXT_VC as RES_TEXT_VC, " - + " h.RES_ENCODING as RES_ENCODING, " - + " h.PARTITION_ID as PARTITION_ID, " - + " p.SOURCE_URI as PROV_SOURCE_URI," - + " p.REQUEST_ID as PROV_REQUEST_ID," - + " r.FHIR_ID as FHIR_ID " - + "FROM HFJ_RESOURCE r " - + " INNER JOIN HFJ_RES_VER h ON r.RES_ID = h.RES_ID and r.RES_VER = h.RES_VER" - + " LEFT OUTER JOIN HFJ_RES_VER_PROV p ON p.RES_VER_PID = h.PID ") -public class ResourceSearchView implements IBaseResourceEntity, Serializable { - - private static final long serialVersionUID = 1L; - - @Id - @Column(name = "PID") - private Long myId; - - @Column(name = "RES_ID") - private Long myResourceId; - - @Column(name = "RES_TYPE", length = Constants.MAX_RESOURCE_NAME_LENGTH) - private String myResourceType; - - @Column(name = "RES_VERSION") - @Enumerated(EnumType.STRING) - private FhirVersionEnum myFhirVersion; - - @Column(name = "RES_VER") - private Long myResourceVersion; - - @Column(name = "PROV_REQUEST_ID", length = Constants.REQUEST_ID_LENGTH) - private String myProvenanceRequestId; - - @Column(name = "PROV_SOURCE_URI", length = ResourceHistoryTable.SOURCE_URI_LENGTH) - private String myProvenanceSourceUri; - - @Column(name = "HAS_TAGS") - private boolean myHasTags; - - @Column(name = "RES_DELETED_AT") - @Temporal(TemporalType.TIMESTAMP) - private Date myDeleted; - - @Temporal(TemporalType.TIMESTAMP) - @Column(name = "RES_PUBLISHED") - private Date myPublished; - - @Temporal(TemporalType.TIMESTAMP) - @Column(name = "RES_UPDATED") - private Date myUpdated; - - @Column(name = "RES_TEXT") - @Lob() - private byte[] myResource; - - @Column(name = "RES_TEXT_VC") - private String myResourceTextVc; - - @Column(name = "RES_ENCODING") - @Enumerated(EnumType.STRING) - private ResourceEncodingEnum myEncoding; - - @Column(name = "FHIR_ID", length = ResourceTable.MAX_FORCED_ID_LENGTH) - private String myFhirId; - - @Column(name = "PARTITION_ID") - private Integer myPartitionId; - - public ResourceSearchView() { - // public constructor for Hibernate - } - - public String getResourceTextVc() { - return myResourceTextVc; - } - - public String getProvenanceRequestId() { - return myProvenanceRequestId; - } - - public String getProvenanceSourceUri() { - return myProvenanceSourceUri; - } - - @Override - public Date getDeleted() { - return myDeleted; - } - - public void setDeleted(Date theDate) { - myDeleted = theDate; - } - - @Override - public FhirVersionEnum getFhirVersion() { - return myFhirVersion; - } - - public void setFhirVersion(FhirVersionEnum theFhirVersion) { - myFhirVersion = theFhirVersion; - } - - public String getFhirId() { - return myFhirId; - } - - @Override - public Long getId() { - return myResourceId; - } - - @Override - public IdDt getIdDt() { - if (myFhirId == null) { - Long id = myResourceId; - return new IdDt(myResourceType + '/' + id + '/' + Constants.PARAM_HISTORY + '/' + getVersion()); - } else { - return new IdDt(getResourceType() + '/' + getFhirId() + '/' + Constants.PARAM_HISTORY + '/' + getVersion()); - } - } - - @Override - public InstantDt getPublished() { - if (myPublished != null) { - return new InstantDt(myPublished); - } else { - return null; - } - } - - public void setPublished(Date thePublished) { - myPublished = thePublished; - } - - @Override - public Long getResourceId() { - return myResourceId; - } - - @Override - public String getResourceType() { - return myResourceType; - } - - @Override - public InstantDt getUpdated() { - return new InstantDt(myUpdated); - } - - @Override - public Date getUpdatedDate() { - return myUpdated; - } - - @Override - public long getVersion() { - return myResourceVersion; - } - - @Override - public boolean isHasTags() { - return myHasTags; - } - - @Override - @Nullable - public PartitionablePartitionId getPartitionId() { - if (myPartitionId != null) { - return new PartitionablePartitionId(myPartitionId, null); - } else { - return null; - } - } - - public byte[] getResource() { - return myResource; - } - - public ResourceEncodingEnum getEncoding() { - return myEncoding; - } -} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/cross/JpaResourceLookup.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/cross/JpaResourceLookup.java index d20f61a7c6ba..5691eb52902c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/cross/JpaResourceLookup.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/cross/JpaResourceLookup.java @@ -29,16 +29,34 @@ public class JpaResourceLookup implements IResourceLookup { private final String myResourceType; - private final Long myResourcePid; + private final JpaPid myResourcePid; private final Date myDeletedAt; private final PartitionablePartitionId myPartitionablePartitionId; + private final String myFhirId; public JpaResourceLookup( String theResourceType, + String theFhirId, Long theResourcePid, Date theDeletedAt, PartitionablePartitionId thePartitionablePartitionId) { myResourceType = theResourceType; + myFhirId = theFhirId; + myDeletedAt = theDeletedAt; + myPartitionablePartitionId = thePartitionablePartitionId; + + myResourcePid = JpaPid.fromId(theResourcePid); + myResourcePid.setPartitionablePartitionId(myPartitionablePartitionId); + } + + public JpaResourceLookup( + String theResourceType, + String theFhirId, + JpaPid theResourcePid, + Date theDeletedAt, + PartitionablePartitionId thePartitionablePartitionId) { + myResourceType = theResourceType; + myFhirId = theFhirId; myResourcePid = theResourcePid; myDeletedAt = theDeletedAt; myPartitionablePartitionId = thePartitionablePartitionId; @@ -49,6 +67,11 @@ public String getResourceType() { return myResourceType; } + @Override + public String getFhirId() { + return myFhirId; + } + @Override public Date getDeleted() { return myDeletedAt; @@ -56,10 +79,7 @@ public Date getDeleted() { @Override public JpaPid getPersistentId() { - JpaPid jpaPid = JpaPid.fromId(myResourcePid); - jpaPid.setPartitionablePartitionId(myPartitionablePartitionId); - - return jpaPid; + return myResourcePid; } @Override diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java index 18f36ca0fb72..46fcec9ee224 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java @@ -559,7 +559,7 @@ public NpmPackage installPackage(PackageInstallationSpec theInstallationSpec) th } @Override - @Transactional + @Transactional(readOnly = true) public IBaseResource loadPackageAssetByUrl(FhirVersionEnum theFhirVersion, String theCanonicalUrl) { String canonicalUrl = theCanonicalUrl; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java index 77f4f7e3b1ce..bc5eaf628783 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java @@ -22,6 +22,7 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.entity.PartitionEntity; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import org.apache.commons.lang3.Validate; @@ -36,17 +37,27 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc { @Autowired IPartitionLookupSvc myPartitionConfigSvc; + @Autowired + PartitionSettings myPartitionSettings; + public RequestPartitionHelperSvc() {} @Override public RequestPartitionId validateAndNormalizePartitionIds(RequestPartitionId theRequestPartitionId) { List names = null; + List partitionIds = null; for (int i = 0; i < theRequestPartitionId.getPartitionIds().size(); i++) { PartitionEntity partition; Integer id = theRequestPartitionId.getPartitionIds().get(i); if (id == null) { partition = null; + if (myPartitionSettings.getDefaultPartitionId() != null) { + if (partitionIds == null) { + partitionIds = new ArrayList<>(theRequestPartitionId.getPartitionIds()); + } + partitionIds.set(i, myPartitionSettings.getDefaultPartitionId()); + } } else { try { partition = myPartitionConfigSvc.getPartitionById(id); @@ -88,8 +99,12 @@ public RequestPartitionId validateAndNormalizePartitionIds(RequestPartitionId th } if (names != null) { + List partitionIdsToUse = theRequestPartitionId.getPartitionIds(); + if (partitionIds != null) { + partitionIdsToUse = partitionIds; + } return RequestPartitionId.forPartitionIdsAndNames( - names, theRequestPartitionId.getPartitionIds(), theRequestPartitionId.getPartitionDate()); + names, partitionIdsToUse, theRequestPartitionId.getPartitionDate()); } return theRequestPartitionId; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java index a03d51791dd1..24498b0fd26e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java @@ -24,11 +24,13 @@ import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.entity.SearchTypeEnum; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.search.SearchStatusEnum; import ca.uhn.fhir.jpa.search.builder.tasks.SearchTask; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.param.HistorySearchStyleEnum; +import jakarta.annotation.Nullable; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; @@ -56,7 +58,7 @@ public PersistedJpaBundleProvider newInstance(RequestDetails theRequest, Search public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage( RequestDetails theRequestDetails, SearchTask theTask, - ISearchBuilder theSearchBuilder, + ISearchBuilder theSearchBuilder, RequestPartitionId theRequestPartitionId) { return (PersistedJpaSearchFirstPageBundleProvider) myApplicationContext.getBean( JpaConfig.PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER, @@ -69,7 +71,7 @@ public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage( public IBundleProvider history( RequestDetails theRequest, String theResourceType, - Long theResourcePid, + @Nullable JpaPid theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, @@ -88,7 +90,7 @@ public IBundleProvider history( public IBundleProvider history( RequestDetails theRequest, String theResourceType, - Long theResourcePid, + @Nullable JpaPid theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, @@ -103,7 +105,9 @@ public IBundleProvider history( search.setLastUpdated(theRangeStartInclusive, theRangeEndInclusive); search.setUuid(UUID.randomUUID().toString()); search.setResourceType(resourceName); - search.setResourceId(theResourcePid); + if (theResourcePid != null) { + search.setResourceId(theResourcePid.getId()); + } search.setSearchType(SearchTypeEnum.HISTORY); search.setStatus(SearchStatusEnum.FINISHED); search.setHistorySearchStyle(searchParameterType); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java index d4e336ab8705..3498e961a6f3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java @@ -23,6 +23,7 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao; import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; @@ -34,6 +35,7 @@ import org.springframework.transaction.annotation.Transactional; import java.util.Date; +import java.util.List; /** * This service ensures uniqueness of resources during create or create-on-update @@ -80,8 +82,12 @@ public void deleteEntriesOlderThan(Date theCutoffDate) { * Once a resource is updated or deleted, we can trust that future match checks will find the committed resource in the db. * The use of the constraint table is done, and we can delete it to keep the table small. */ - public void deleteByResId(long theResId) { - myResourceSearchUrlDao.deleteByResId(theResId); + public void deleteByResId(JpaPid theResId) { + myResourceSearchUrlDao.deleteByResId(theResId.getId()); + } + + public void deleteByResIds(List theResIds) { + myResourceSearchUrlDao.deleteByResIds(theResIds); } /** diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/ISearchQueryExecutor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/ISearchQueryExecutor.java index 5ea034496cb4..452a7a11c2f3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/ISearchQueryExecutor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/ISearchQueryExecutor.java @@ -19,10 +19,12 @@ */ package ca.uhn.fhir.jpa.search.builder; +import ca.uhn.fhir.jpa.model.dao.JpaPid; + import java.io.Closeable; import java.util.Iterator; -public interface ISearchQueryExecutor extends Iterator, Closeable { +public interface ISearchQueryExecutor extends Iterator, Closeable { /** * Narrow the signature - no IOException allowed. */ diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/JpaPidRowMapper.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/JpaPidRowMapper.java new file mode 100644 index 000000000000..b36c02bfeefe --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/JpaPidRowMapper.java @@ -0,0 +1,47 @@ +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.search.builder; + +import ca.uhn.fhir.jpa.model.dao.JpaPid; +import org.springframework.jdbc.core.RowMapper; + +import java.sql.ResultSet; +import java.sql.SQLException; + +public class JpaPidRowMapper implements RowMapper { + + private final boolean mySelectPartitionId; + + public JpaPidRowMapper(boolean theSelectPartitionId) { + mySelectPartitionId = theSelectPartitionId; + } + + @Override + public JpaPid mapRow(ResultSet theResultSet, int theRowNum) throws SQLException { + if (mySelectPartitionId) { + Integer partitionId = theResultSet.getObject(1, Integer.class); + Long resourceId = theResultSet.getLong(2); + return JpaPid.fromId(resourceId, partitionId); + } else { + Long resourceId = theResultSet.getLong(1); + return JpaPid.fromId(resourceId); + } + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java index e7d688624d22..7f7c7dbbd29c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java @@ -28,6 +28,7 @@ import ca.uhn.fhir.jpa.dao.BaseStorageDao; import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser; import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel; import ca.uhn.fhir.jpa.model.entity.TagTypeEnum; import ca.uhn.fhir.jpa.model.util.UcumServiceUtil; @@ -44,13 +45,13 @@ import ca.uhn.fhir.jpa.search.builder.predicate.CoordsPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.DatePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ICanMakeMissingParamPredicate; +import ca.uhn.fhir.jpa.search.builder.predicate.ISourcePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ParsedLocationParam; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.SearchParamPresentPredicateBuilder; -import ca.uhn.fhir.jpa.search.builder.predicate.SourcePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.TagPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder; @@ -151,6 +152,7 @@ public class QueryStack { private final PartitionSettings myPartitionSettings; private final JpaStorageSettings myStorageSettings; private final EnumSet myReusePredicateBuilderTypes; + private final RequestDetails myRequestDetails; private Map myJoinMap; private Map myParamNameToPredicateBuilderMap; // used for _offset queries with sort, should be removed once the fix is applied to the async path too. @@ -161,6 +163,7 @@ public class QueryStack { * Constructor */ public QueryStack( + RequestDetails theRequestDetails, SearchParameterMap theSearchParameters, JpaStorageSettings theStorageSettings, FhirContext theFhirContext, @@ -168,6 +171,7 @@ public QueryStack( ISearchParamRegistry theSearchParamRegistry, PartitionSettings thePartitionSettings) { this( + theRequestDetails, theSearchParameters, theStorageSettings, theFhirContext, @@ -181,6 +185,7 @@ public QueryStack( * Constructor */ private QueryStack( + RequestDetails theRequestDetails, SearchParameterMap theSearchParameters, JpaStorageSettings theStorageSettings, FhirContext theFhirContext, @@ -188,6 +193,7 @@ private QueryStack( ISearchParamRegistry theSearchParamRegistry, PartitionSettings thePartitionSettings, EnumSet theReusePredicateBuilderTypes) { + myRequestDetails = theRequestDetails; myPartitionSettings = thePartitionSettings; assert theSearchParameters != null; assert theStorageSettings != null; @@ -1035,7 +1041,6 @@ private Condition createPredicateFilter( searchParam, Collections.singletonList(new UriParam(theFilter.getValue())), theFilter.getOperation(), - theRequest, theRequestPartitionId); } else if (typeEnum == RestSearchParameterTypeEnum.STRING) { return theQueryStack3.createPredicateString( @@ -1220,7 +1225,6 @@ private Condition createPredicateHas( ResourceLinkPredicateBuilder resourceLinkTableJoin = mySqlBuilder.addReferencePredicateBuilderReversed(this, theSourceJoinColumn); - Condition partitionPredicate = resourceLinkTableJoin.createPartitionIdPredicate(theRequestPartitionId); List paths = resourceLinkTableJoin.createResourceLinkPaths( targetResourceType, paramReference, new ArrayList<>()); @@ -1242,7 +1246,12 @@ private Condition createPredicateHas( .setRequest(theRequest) .setRequestPartitionId(theRequestPartitionId)); - andPredicates.add(toAndPredicate(partitionPredicate, pathPredicate, typePredicate, linkedPredicate)); + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + andPredicates.add(toAndPredicate(pathPredicate, typePredicate, linkedPredicate)); + } else { + Condition partitionPredicate = resourceLinkTableJoin.createPartitionIdPredicate(theRequestPartitionId); + andPredicates.add(toAndPredicate(partitionPredicate, pathPredicate, typePredicate, linkedPredicate)); + } } return toAndPredicate(andPredicates); @@ -1889,7 +1898,6 @@ private Condition createIndexPredicate( theParamDefinition, theOrValues, theOperation, - theRequest, theRequestPartitionId, theSqlBuilder); break; @@ -1954,13 +1962,13 @@ private Condition createPredicateSource( .findFirst(); if (isMissingSourceOptional.isPresent()) { - SourcePredicateBuilder join = + ISourcePredicateBuilder join = getSourcePredicateBuilder(theSourceJoinColumn, SelectQuery.JoinType.LEFT_OUTER); orPredicates.add(join.createPredicateMissingSourceUri()); return toOrPredicate(orPredicates); } // for all other cases we use "INNER JOIN" to match search parameters - SourcePredicateBuilder join = getSourcePredicateBuilder(theSourceJoinColumn, SelectQuery.JoinType.INNER); + ISourcePredicateBuilder join = getSourcePredicateBuilder(theSourceJoinColumn, SelectQuery.JoinType.INNER); for (IQueryParameterType nextParameter : theList) { SourceParam sourceParameter = new SourceParam(nextParameter.getValueAsQueryToken(myFhirContext)); @@ -1980,13 +1988,22 @@ private Condition createPredicateSource( return toOrPredicate(orPredicates); } - private SourcePredicateBuilder getSourcePredicateBuilder( + private ISourcePredicateBuilder getSourcePredicateBuilder( @Nullable DbColumn[] theSourceJoinColumn, SelectQuery.JoinType theJoinType) { + if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) { + return createOrReusePredicateBuilder( + PredicateBuilderTypeEnum.SOURCE, + theSourceJoinColumn, + Constants.PARAM_SOURCE, + () -> mySqlBuilder.addResourceHistoryProvenancePredicateBuilder( + theSourceJoinColumn, theJoinType)) + .getResult(); + } return createOrReusePredicateBuilder( PredicateBuilderTypeEnum.SOURCE, theSourceJoinColumn, Constants.PARAM_SOURCE, - () -> mySqlBuilder.addSourcePredicateBuilder(theSourceJoinColumn, theJoinType)) + () -> mySqlBuilder.addResourceHistoryPredicateBuilder(theSourceJoinColumn, theJoinType)) .getResult(); } @@ -2321,7 +2338,6 @@ public Condition createPredicateUri( RuntimeSearchParam theSearchParam, List theList, SearchFilterParser.CompareOperation theOperation, - RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { return createPredicateUri( theSourceJoinColumn, @@ -2330,7 +2346,6 @@ public Condition createPredicateUri( theSearchParam, theList, theOperation, - theRequestDetails, theRequestPartitionId, mySqlBuilder); } @@ -2342,7 +2357,6 @@ public Condition createPredicateUri( RuntimeSearchParam theSearchParam, List theList, SearchFilterParser.CompareOperation theOperation, - RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId, SearchQueryBuilder theSqlBuilder) { @@ -2361,13 +2375,14 @@ public Condition createPredicateUri( } else { UriPredicateBuilder join = theSqlBuilder.addUriPredicateBuilder(theSourceJoinColumn); - Condition predicate = join.addPredicate(theList, paramName, theOperation, theRequestDetails); + Condition predicate = join.addPredicate(theList, paramName, theOperation, myRequestDetails); return join.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate); } } public QueryStack newChildQueryFactoryWithFullBuilderReuse() { return new QueryStack( + myRequestDetails, mySearchParameters, myStorageSettings, myFhirContext, @@ -2452,7 +2467,6 @@ public Condition searchForIdsWithAndOr(SearchForIdsParams theSearchForIdsParams) */ private Condition createPredicateResourcePID( DbColumn[] theSourceJoinColumn, List> theAndOrParams) { - DbColumn pidColumn = getResourceIdColumn(theSourceJoinColumn); if (pidColumn == null) { @@ -2662,7 +2676,6 @@ private Condition createPredicateSearchParameter( nextParamDef, nextAnd, SearchFilterParser.CompareOperation.eq, - theRequest, theRequestPartitionId)); } break; @@ -2871,12 +2884,13 @@ public void addPredicateCompositeNonUnique(List theIndexStrings, Request // expand out the pids public void addPredicateEverythingOperation( - String theResourceName, List theTypeSourceResourceNames, Long... theTargetPids) { + String theResourceName, List theTypeSourceResourceNames, JpaPid... theTargetPids) { ResourceLinkPredicateBuilder table = mySqlBuilder.addReferencePredicateBuilder(this, null); Condition predicate = table.createEverythingPredicate(theResourceName, theTypeSourceResourceNames, theTargetPids); mySqlBuilder.addPredicate(predicate); mySqlBuilder.getSelect().setIsDistinct(true); + addGrouping(); } public IQueryParameterType newParameterInstance( diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java index 088703dbb828..790a8d5ddc4b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java @@ -33,7 +33,6 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; -import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; @@ -43,20 +42,23 @@ import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser; import ca.uhn.fhir.jpa.dao.IResultIterator; import ca.uhn.fhir.jpa.dao.ISearchBuilder; -import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao; +import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; +import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException; -import ca.uhn.fhir.jpa.entity.ResourceSearchView; import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.cross.IResourceLookup; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; -import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity; +import ca.uhn.fhir.jpa.model.entity.BaseTag; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag; import ca.uhn.fhir.jpa.model.entity.ResourceTag; import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters; import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; +import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.search.SearchConstants; import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor; import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql; @@ -98,11 +100,14 @@ import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; +import ca.uhn.fhir.system.HapiSystemProperties; import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.StringUtil; import ca.uhn.fhir.util.UrlUtil; import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; +import com.google.common.collect.MultimapBuilder; import com.healthmarketscience.sqlbuilder.Condition; import jakarta.annotation.Nonnull; import jakarta.annotation.Nullable; @@ -113,6 +118,7 @@ import jakarta.persistence.Tuple; import jakarta.persistence.TypedQuery; import jakarta.persistence.criteria.CriteriaBuilder; +import org.apache.commons.collections4.ListUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.math.NumberUtils; @@ -124,11 +130,8 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.core.RowMapper; import org.springframework.transaction.support.TransactionSynchronizationManager; -import java.sql.ResultSet; -import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -168,14 +171,19 @@ public class SearchBuilder implements ISearchBuilder { public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE; public static final String RESOURCE_ID_ALIAS = "resource_id"; + public static final String PARTITION_ID_ALIAS = "partition_id"; public static final String RESOURCE_VERSION_ALIAS = "resource_version"; private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); private static final JpaPid NO_MORE = JpaPid.fromId(-1L); - private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid"; - private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; + private static final String MY_SOURCE_RESOURCE_PARTITION_ID = "myPartitionIdValue"; private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType"; + private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; + private static final String MY_TARGET_RESOURCE_PARTITION_ID = "myTargetResourcePartitionId"; + private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion"; + public static final JpaPid[] EMPTY_JPA_PID_ARRAY = new JpaPid[0]; + public static boolean myUseMaxPageSize50ForTest = false; public static Integer myMaxPageSizeForTests = null; protected final IInterceptorBroadcaster myInterceptorBroadcaster; protected final IResourceTagDao myResourceTagDao; @@ -187,7 +195,6 @@ public class SearchBuilder implements ISearchBuilder { private final ISearchParamRegistry mySearchParamRegistry; private final PartitionSettings myPartitionSettings; private final DaoRegistry myDaoRegistry; - private final IResourceSearchViewDao myResourceSearchViewDao; private final FhirContext myContext; private final IIdHelperService myIdHelperService; private final JpaStorageSettings myStorageSettings; @@ -213,6 +220,15 @@ public class SearchBuilder implements ISearchBuilder { @Autowired private IJpaStorageResourceParser myJpaStorageResourceParser; + @Autowired + private IResourceHistoryTableDao myResourceHistoryTableDao; + + @Autowired + private IResourceHistoryTagDao myResourceHistoryTagDao; + + @Autowired + private IRequestPartitionHelperSvc myPartitionHelperSvc; + /** * Constructor */ @@ -228,7 +244,6 @@ public SearchBuilder( IInterceptorBroadcaster theInterceptorBroadcaster, IResourceTagDao theResourceTagDao, DaoRegistry theDaoRegistry, - IResourceSearchViewDao theResourceSearchViewDao, FhirContext theContext, IIdHelperService theIdHelperService, Class theResourceType) { @@ -244,7 +259,6 @@ public SearchBuilder( myInterceptorBroadcaster = theInterceptorBroadcaster; myResourceTagDao = theResourceTagDao; myDaoRegistry = theDaoRegistry; - myResourceSearchViewDao = theResourceSearchViewDao; myContext = theContext; myIdHelperService = theIdHelperService; } @@ -339,7 +353,8 @@ public Long createCountQuery( if (queries.isEmpty()) { return 0L; } else { - return queries.get(0).next(); + JpaPid jpaPid = queries.get(0).next(); + return jpaPid.getId(); } } @@ -462,7 +477,7 @@ private List createQuery( ourLog.trace("Query needs db after HSearch. Chunking."); // Finish the query in the database for the rest of the search parameters, sorting, partitioning, etc. // We break the pids into chunks that fit in the 1k limit for jdbc bind params. - new QueryChunker() + new QueryChunker() .chunk( fulltextExecutor, SearchBuilder.getMaximumPageSize(), @@ -568,7 +583,7 @@ private List queryHibernateSearchForEverythingPids(RequestDetails theReq private void doCreateChunkedQueries( SearchParameterMap theParams, - List thePids, + List thePids, Integer theOffset, SortSpec sort, boolean theCount, @@ -584,7 +599,7 @@ private void doCreateChunkedQueries( /** * Combs through the params for any _id parameters and extracts the PIDs for them */ - private void extractTargetPidsFromIdParams(Set theTargetPids) { + private void extractTargetPidsFromIdParams(Set theTargetPids) { // get all the IQueryParameterType objects // for _id -> these should all be StringParam values HashSet ids = new HashSet<>(); @@ -621,8 +636,8 @@ private void extractTargetPidsFromIdParams(Set theTargetPids) { ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled()); // add the pids to targetPids - for (IResourceLookup pid : idToIdentity.values()) { - theTargetPids.add((Long) pid.getPersistentId().getId()); + for (IResourceLookup pid : idToIdentity.values()) { + theTargetPids.add(pid.getPersistentId()); } } @@ -633,11 +648,17 @@ private void createChunkedQuery( Integer theMaximumResults, boolean theCountOnlyFlag, RequestDetails theRequest, - List thePidList, + List thePidList, List theSearchQueryExecutors) { if (myParams.getEverythingMode() != null) { createChunkedQueryForEverythingSearch( - theParams, theOffset, theMaximumResults, theCountOnlyFlag, thePidList, theSearchQueryExecutors); + theRequest, + theParams, + theOffset, + theMaximumResults, + theCountOnlyFlag, + thePidList, + theSearchQueryExecutors); } else { createChunkedQueryNormalSearch( theParams, sort, theOffset, theCountOnlyFlag, theRequest, thePidList, theSearchQueryExecutors); @@ -650,7 +671,7 @@ private void createChunkedQueryNormalSearch( Integer theOffset, boolean theCountOnlyFlag, RequestDetails theRequest, - List thePidList, + List thePidList, List theSearchQueryExecutors) { SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( myContext, @@ -662,7 +683,13 @@ private void createChunkedQueryNormalSearch( myDialectProvider, theCountOnlyFlag); QueryStack queryStack3 = new QueryStack( - theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings); + theRequest, + theParams, + myStorageSettings, + myContext, + sqlBuilder, + mySearchParamRegistry, + myPartitionSettings); if (theParams.keySet().size() > 1 || theParams.getSort() != null @@ -768,11 +795,12 @@ private void executeSearch( } private void createChunkedQueryForEverythingSearch( + RequestDetails theRequest, SearchParameterMap theParams, Integer theOffset, Integer theMaximumResults, boolean theCountOnlyFlag, - List thePidList, + List thePidList, List theSearchQueryExecutors) { SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( @@ -786,11 +814,17 @@ private void createChunkedQueryForEverythingSearch( theCountOnlyFlag); QueryStack queryStack3 = new QueryStack( - theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings); + theRequest, + theParams, + myStorageSettings, + myContext, + sqlBuilder, + mySearchParamRegistry, + myPartitionSettings); JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theMaximumResults); - Set targetPids = new HashSet<>(); + Set targetPids = new HashSet<>(); if (myParams.get(IAnyResource.SP_RES_ID) != null) { extractTargetPidsFromIdParams(targetPids); @@ -816,16 +850,8 @@ private void createChunkedQueryForEverythingSearch( String sql = allTargetsSql.getSql(); Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]); - List output = jdbcTemplate.query(sql, args, new RowMapper() { - @Override - public Long mapRow(ResultSet rs, int rowNum) throws SQLException { - if (myPartitionSettings.isPartitioningEnabled()) { - return rs.getLong(2); - } else { - return rs.getLong(1); - } - } - }); + List output = + jdbcTemplate.query(sql, args, new JpaPidRowMapper(myPartitionSettings.isPartitioningEnabled())); // we add a search executor to fetch unlinked patients first theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output)); @@ -837,7 +863,7 @@ public Long mapRow(ResultSet rs, int rowNum) throws SQLException { } queryStack3.addPredicateEverythingOperation( - myResourceName, typeSourceResources, targetPids.toArray(new Long[0])); + myResourceName, typeSourceResources, targetPids.toArray(EMPTY_JPA_PID_ARRAY)); // Add PID list predicate for full text search and/or lastn operation addPidListPredicate(thePidList, sqlBuilder); @@ -858,7 +884,7 @@ public Long mapRow(ResultSet rs, int rowNum) throws SQLException { executeSearch(theOffset, theSearchQueryExecutors, sqlBuilder); } - private void addPidListPredicate(List thePidList, SearchQueryBuilder theSqlBuilder) { + private void addPidListPredicate(List thePidList, SearchQueryBuilder theSqlBuilder) { if (thePidList != null && !thePidList.isEmpty()) { theSqlBuilder.addResourceIdsPredicate(thePidList); } @@ -1142,31 +1168,55 @@ private void doLoadPids( Collection theIncludedPids, List theResourceListToPopulate, boolean theForHistoryOperation, - Map thePosition) { + Map thePosition) { - Map resourcePidToVersion = null; + Map resourcePidToVersion = null; for (JpaPid next : thePids) { if (next.getVersion() != null && myStorageSettings.isRespectVersionsForSearchIncludes()) { if (resourcePidToVersion == null) { resourcePidToVersion = new HashMap<>(); } - resourcePidToVersion.put((next).getId(), next.getVersion()); + resourcePidToVersion.put(next, next.getVersion()); } } - List versionlessPids = JpaPid.toLongList(thePids); + List versionlessPids = new ArrayList<>(thePids); if (versionlessPids.size() < getMaximumPageSize()) { versionlessPids = normalizeIdListForInClause(versionlessPids); } - // -- get the resource from the searchView - Collection resourceSearchViewList = - myResourceSearchViewDao.findByResourceIds(versionlessPids); + // Load the resource bodies + List resourceSearchViewList = + myResourceHistoryTableDao.findCurrentVersionsByResourcePidsAndFetchResourceTable( + JpaPid.toLongList(versionlessPids)); + + /* + * If we have specific versions to load, replace the history entries with the + * correct ones + * + * TODO: this could definitely be made more efficient, probably by not loading the wrong + * version entity first, and by batching the fetches. But this is a fairly infrequently + * used feature, and loading history entities by PK is a very efficient query so it's + * not the end of the world + */ + if (resourcePidToVersion != null) { + for (int i = 0; i < resourceSearchViewList.size(); i++) { + ResourceHistoryTable next = resourceSearchViewList.get(i); + JpaPid resourceId = next.getPersistentId(); + Long version = resourcePidToVersion.get(resourceId); + resourceId.setVersion(version); + if (version != null && !version.equals(next.getVersion())) { + ResourceHistoryTable replacement = + myResourceHistoryTableDao.findForIdAndVersion(next.getResourceId(), version); + resourceSearchViewList.set(i, replacement); + } + } + } // -- preload all tags with tag definition if any - Map> tagMap = getResourceTagMap(resourceSearchViewList); + Map> tagMap = getResourceTagMap(resourceSearchViewList); - for (IBaseResourceEntity next : resourceSearchViewList) { + for (ResourceHistoryTable next : resourceSearchViewList) { if (next.getDeleted() != null) { continue; } @@ -1174,29 +1224,17 @@ private void doLoadPids( Class resourceType = myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); - JpaPid resourceId = JpaPid.fromId(next.getResourceId()); + JpaPid resourceId = next.getPersistentId(); - /* - * If a specific version is requested via an include, we'll replace the current version - * with the specific desired version. This is not the most efficient thing, given that - * we're loading the current version and then turning around and throwing it away again. - * This could be optimized and probably should be, but it's not critical given that - * this only applies to includes, which don't tend to be massive in numbers. - */ if (resourcePidToVersion != null) { - Long version = resourcePidToVersion.get(next.getResourceId()); + Long version = resourcePidToVersion.get(resourceId); resourceId.setVersion(version); - if (version != null && !version.equals(next.getVersion())) { - IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceType); - next = (IBaseResourceEntity) - dao.readEntity(next.getIdDt().withVersion(Long.toString(version)), null); - } } IBaseResource resource = null; if (next != null) { resource = myJpaStorageResourceParser.toResource( - resourceType, next, tagMap.get(next.getId()), theForHistoryOperation); + resourceType, next, tagMap.get(JpaPid.fromId(next.getResourceId())), theForHistoryOperation); } if (resource == null) { if (next != null) { @@ -1211,7 +1249,7 @@ private void doLoadPids( continue; } - Integer index = thePosition.get(resourceId); + Integer index = thePosition.get(resourceId.getId()); if (index == null) { ourLog.warn("Got back unexpected resource PID {}", resourceId); continue; @@ -1227,40 +1265,93 @@ private void doLoadPids( } } - private Map> getResourceTagMap( - Collection theResourceSearchViewList) { + private Map> getResourceTagMap(Collection theHistoryTables) { - List idList = new ArrayList<>(theResourceSearchViewList.size()); + switch (myStorageSettings.getTagStorageMode()) { + case VERSIONED: + return getPidToTagMapVersioned(theHistoryTables); + case NON_VERSIONED: + return getPidToTagMapUnversioned(theHistoryTables); + case INLINE: + default: + return Map.of(); + } + } + + @Nonnull + private Map> getPidToTagMapVersioned( + Collection theHistoryTables) { + List idList = new ArrayList<>(theHistoryTables.size()); // -- find all resource has tags - for (IBaseResourceEntity resource : theResourceSearchViewList) { - if (resource.isHasTags()) idList.add(resource.getId()); + for (ResourceHistoryTable resource : theHistoryTables) { + if (resource.isHasTags()) { + idList.add(resource.getId()); + } + } + + Map> tagMap = new HashMap<>(); + + // -- no tags + if (idList.isEmpty()) { + return tagMap; + } + + // -- get all tags for the idList + Collection tagList = myResourceHistoryTagDao.findByVersionIds(idList); + + // -- build the map, key = resourceId, value = list of ResourceTag + JpaPid resourceId; + Collection tagCol; + for (ResourceHistoryTag tag : tagList) { + + resourceId = JpaPid.fromId(tag.getResourceId()); + tagCol = tagMap.get(resourceId); + if (tagCol == null) { + tagCol = new ArrayList<>(); + tagCol.add(tag); + tagMap.put(resourceId, tagCol); + } else { + tagCol.add(tag); + } } - return getPidToTagMap(idList); + return tagMap; } @Nonnull - private Map> getPidToTagMap(List thePidList) { - Map> tagMap = new HashMap<>(); + private Map> getPidToTagMapUnversioned( + Collection theHistoryTables) { + List idList = new ArrayList<>(theHistoryTables.size()); + + // -- find all resource has tags + for (ResourceHistoryTable resource : theHistoryTables) { + if (resource.isHasTags()) { + idList.add(JpaPid.fromId(resource.getResourceId())); + } + } + + Map> tagMap = new HashMap<>(); // -- no tags - if (thePidList.isEmpty()) return tagMap; + if (idList.isEmpty()) { + return tagMap; + } // -- get all tags for the idList - Collection tagList = myResourceTagDao.findByResourceIds(thePidList); + Collection tagList = myResourceTagDao.findByResourceIds(JpaPid.toLongList(idList)); // -- build the map, key = resourceId, value = list of ResourceTag JpaPid resourceId; - Collection tagCol; + Collection tagCol; for (ResourceTag tag : tagList) { resourceId = JpaPid.fromId(tag.getResourceId()); - tagCol = tagMap.get(resourceId.getId()); + tagCol = tagMap.get(resourceId); if (tagCol == null) { tagCol = new ArrayList<>(); tagCol.add(tag); - tagMap.put(resourceId.getId(), tagCol); + tagMap.put(resourceId, tagCol); } else { tagCol.add(tag); } @@ -1284,9 +1375,9 @@ public void loadResourcesByPid( // when running asserts assert new HashSet<>(thePids).size() == thePids.size() : "PID list contains duplicates: " + thePids; - Map position = new HashMap<>(); + Map position = new HashMap<>(); for (JpaPid next : thePids) { - position.put(next, theResourceListToPopulate.size()); + position.put(next.getId(), theResourceListToPopulate.size()); theResourceListToPopulate.add(null); } @@ -1402,7 +1493,11 @@ public Set loadIncludes(SearchBuilderLoadIncludesParameters theP return new HashSet<>(); } String searchPidFieldName = reverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID; + String searchPartitionIdFieldName = + reverseMode ? MY_TARGET_RESOURCE_PARTITION_ID : MY_SOURCE_RESOURCE_PARTITION_ID; String findPidFieldName = reverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID; + String findPartitionIdFieldName = + reverseMode ? MY_SOURCE_RESOURCE_PARTITION_ID : MY_TARGET_RESOURCE_PARTITION_ID; String findResourceTypeFieldName = reverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE; String findVersionFieldName = null; if (!reverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) { @@ -1444,9 +1539,11 @@ public Set loadIncludes(SearchBuilderLoadIncludesParameters theP if (matchAll) { loadIncludesMatchAll( findPidFieldName, + findPartitionIdFieldName, findResourceTypeFieldName, findVersionFieldName, searchPidFieldName, + searchPartitionIdFieldName, wantResourceType, reverseMode, hasDesiredResourceTypes, @@ -1461,13 +1558,16 @@ public Set loadIncludes(SearchBuilderLoadIncludesParameters theP nextInclude, fhirContext, findPidFieldName, + findPartitionIdFieldName, findVersionFieldName, searchPidFieldName, + searchPartitionIdFieldName, reverseMode, nextRoundMatches, entityManager, maxCount, - pidsToInclude); + pidsToInclude, + request); } } @@ -1533,13 +1633,16 @@ private void loadIncludesMatchSpecific( Include nextInclude, FhirContext fhirContext, String findPidFieldName, + String findPartitionFieldName, String findVersionFieldName, String searchPidFieldName, + String searchPartitionFieldName, boolean reverseMode, List nextRoundMatches, EntityManager entityManager, Integer maxCount, - HashSet pidsToInclude) { + HashSet pidsToInclude, + RequestDetails theRequest) { List paths; // Start replace @@ -1578,6 +1681,13 @@ private void loadIncludesMatchSpecific( if (findVersionFieldName != null) { fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS; } + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + fieldsToLoad += ", r."; + fieldsToLoad += findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) + ? "partition_id" + : "target_res_partition_id"; + fieldsToLoad += " as " + PARTITION_ID_ALIAS; + } // Query for includes lookup has 2 cases // Case 1: Where target_resource_id is available in hfj_res_link table for local references @@ -1589,30 +1699,45 @@ private void loadIncludesMatchSpecific( String searchPidFieldSqlColumn = searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id"; - StringBuilder localReferenceQuery = new StringBuilder("SELECT " + fieldsToLoad + " FROM hfj_res_link r " - + " WHERE r.src_path = :src_path AND " - + " r.target_resource_id IS NOT NULL AND " - + " r." - + searchPidFieldSqlColumn + " IN (:target_pids) "); + StringBuilder localReferenceQuery = new StringBuilder(); + localReferenceQuery.append("SELECT ").append(fieldsToLoad); + localReferenceQuery.append(" FROM hfj_res_link r "); + localReferenceQuery.append("WHERE r.src_path = :src_path"); + if (!"target_resource_id".equals(searchPidFieldSqlColumn)) { + localReferenceQuery.append(" AND r.target_resource_id IS NOT NULL"); + } + localReferenceQuery + .append(" AND r.") + .append(searchPidFieldSqlColumn) + .append(" IN (:target_pids) "); + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + String partitionFieldToSearch = findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) + ? "target_res_partition_id" + : "partition_id"; + localReferenceQuery + .append("AND r.") + .append(partitionFieldToSearch) + .append(" = :search_partition_id "); + } localReferenceQueryParams.put("src_path", nextPath); // we loop over target_pids later. if (targetResourceTypes != null) { if (targetResourceTypes.size() == 1) { - localReferenceQuery.append(" AND r.target_resource_type = :target_resource_type "); + localReferenceQuery.append("AND r.target_resource_type = :target_resource_type "); localReferenceQueryParams.put( "target_resource_type", targetResourceTypes.iterator().next()); } else { - localReferenceQuery.append(" AND r.target_resource_type in (:target_resource_types) "); + localReferenceQuery.append("AND r.target_resource_type in (:target_resource_types) "); localReferenceQueryParams.put("target_resource_types", targetResourceTypes); } } // Case 2: Pair> canonicalQuery = - buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode); + buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode, theRequest); - String sql = localReferenceQuery + " UNION " + canonicalQuery.getLeft(); + String sql = localReferenceQuery + "UNION " + canonicalQuery.getLeft(); Map limitParams = new HashMap<>(); if (maxCount != null) { @@ -1637,10 +1762,15 @@ private void loadIncludesMatchSpecific( sql = sb.toString(); } - List> partitions = partition(nextRoundMatches, getMaximumPageSize()); + List> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); for (Collection nextPartition : partitions) { Query q = entityManager.createNativeQuery(sql, Tuple.class); q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + q.setParameter( + "search_partition_id", + nextPartition.iterator().next().getPartitionId()); + } localReferenceQueryParams.forEach(q::setParameter); canonicalQuery.getRight().forEach(q::setParameter); limitParams.forEach(q::setParameter); @@ -1655,7 +1785,14 @@ private void loadIncludesMatchSpecific( resourceVersion = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_VERSION_ALIAS))); } - pidsToInclude.add(JpaPid.fromIdAndVersion(resourceId, resourceVersion)); + Integer partitionId = null; + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + partitionId = result.get(PARTITION_ID_ALIAS, Integer.class); + } + + JpaPid pid = JpaPid.fromIdAndVersion(resourceId, resourceVersion); + pid.setPartitionId(partitionId); + pidsToInclude.add(pid); } } } @@ -1664,9 +1801,11 @@ private void loadIncludesMatchSpecific( private void loadIncludesMatchAll( String findPidFieldName, + String findPartitionFieldName, String findResourceTypeFieldName, String findVersionFieldName, String searchPidFieldName, + String searchPartitionFieldName, String wantResourceType, boolean reverseMode, boolean hasDesiredResourceTypes, @@ -1683,10 +1822,17 @@ private void loadIncludesMatchAll( if (findVersionFieldName != null) { sqlBuilder.append(", r.").append(findVersionFieldName); } + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + sqlBuilder.append(", r.").append(findPartitionFieldName); + } sqlBuilder.append(" FROM ResourceLink r WHERE "); - sqlBuilder.append("r."); - sqlBuilder.append(searchPidFieldName); // (rev mode) target_resource_id | source_resource_id + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + sqlBuilder.append("r.").append(searchPartitionFieldName); + sqlBuilder.append(" = :target_partition_id AND "); + } + + sqlBuilder.append("r.").append(searchPidFieldName); sqlBuilder.append(" IN (:target_pids)"); /* @@ -1726,10 +1872,14 @@ private void loadIncludesMatchAll( } String sql = sqlBuilder.toString(); - List> partitions = partition(nextRoundMatches, getMaximumPageSize()); + List> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); for (Collection nextPartition : partitions) { TypedQuery q = entityManager.createQuery(sql, Object[].class); q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + q.setParameter( + "target_partition_id", nextPartition.iterator().next().getPartitionId()); + } if (wantResourceType != null) { q.setParameter("want_resource_type", wantResourceType); } @@ -1752,12 +1902,19 @@ private void loadIncludesMatchAll( Long resourceId = (Long) ((Object[]) nextRow)[0]; String resourceType = (String) ((Object[]) nextRow)[1]; String resourceCanonicalUrl = (String) ((Object[]) nextRow)[2]; + Integer partitionId = null; + int offset = 0; if (findVersionFieldName != null) { version = (Long) ((Object[]) nextRow)[3]; + offset++; + } + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + partitionId = ((Integer) ((Object[]) nextRow)[3 + offset]); } if (resourceId != null) { JpaPid pid = JpaPid.fromIdAndVersionAndResourceType(resourceId, version, resourceType); + pid.setPartitionId(partitionId); pidsToInclude.add(pid); } else if (resourceCanonicalUrl != null) { if (canonicalUrls == null) { @@ -1771,23 +1928,30 @@ private void loadIncludesMatchAll( String message = "Search with _include=* can be inefficient when references using canonical URLs are detected. Use more specific _include values instead."; firePerformanceWarning(request, message); - loadCanonicalUrls(canonicalUrls, entityManager, pidsToInclude, reverseMode); + loadCanonicalUrls(request, canonicalUrls, entityManager, pidsToInclude, reverseMode); } } } private void loadCanonicalUrls( + RequestDetails theRequestDetails, Set theCanonicalUrls, EntityManager theEntityManager, HashSet thePidsToInclude, boolean theReverse) { StringBuilder sqlBuilder; - Set identityHashesForTypes = calculateIndexUriIdentityHashesForResourceTypes(null, theReverse); - List> canonicalUrlPartitions = - partition(theCanonicalUrls, getMaximumPageSize() - identityHashesForTypes.size()); + CanonicalUrlTargets canonicalUrlTargets = + calculateIndexUriIdentityHashesForResourceTypes(theRequestDetails, null, theReverse); + List> canonicalUrlPartitions = ListUtils.partition( + List.copyOf(theCanonicalUrls), getMaximumPageSize() - canonicalUrlTargets.myHashIdentityValues.size()); sqlBuilder = new StringBuilder(); - sqlBuilder.append("SELECT i.myResourcePid "); + sqlBuilder.append("SELECT "); + if (myPartitionSettings.isPartitioningEnabled()) { + sqlBuilder.append("i.myPartitionIdValue, "); + } + sqlBuilder.append("i.myResourcePid "); + sqlBuilder.append("FROM ResourceIndexedSearchParamUri i "); sqlBuilder.append("WHERE i.myHashIdentity IN (:hash_identity) "); sqlBuilder.append("AND i.myUri IN (:uris)"); @@ -1795,13 +1959,23 @@ private void loadCanonicalUrls( String canonicalResSql = sqlBuilder.toString(); for (Collection nextCanonicalUrlList : canonicalUrlPartitions) { - TypedQuery canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Long.class); - canonicalResIdQuery.setParameter("hash_identity", identityHashesForTypes); + TypedQuery canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Object[].class); + canonicalResIdQuery.setParameter("hash_identity", canonicalUrlTargets.myHashIdentityValues); canonicalResIdQuery.setParameter("uris", nextCanonicalUrlList); - List resIds = canonicalResIdQuery.getResultList(); - for (var next : resIds) { + List results = canonicalResIdQuery.getResultList(); + for (var next : results) { if (next != null) { - thePidsToInclude.add(JpaPid.fromId(next)); + Integer partitionId = null; + Long pid; + if (next.length == 1) { + pid = (Long) next[0]; + } else { + partitionId = (Integer) ((Object[]) next)[0]; + pid = (Long) ((Object[]) next)[1]; + } + if (pid != null) { + thePidsToInclude.add(JpaPid.fromId(pid, partitionId)); + } } } } @@ -1841,46 +2015,80 @@ private static Set computeTargetResourceTypes(Include nextInclude, Runti @Nonnull private Pair> buildCanonicalUrlQuery( - String theVersionFieldName, Set theTargetResourceTypes, boolean theReverse) { + String theVersionFieldName, + Set theTargetResourceTypes, + boolean theReverse, + RequestDetails theRequest) { String fieldsToLoadFromSpidxUriTable = theReverse ? "r.src_resource_id" : "rUri.res_id"; if (theVersionFieldName != null) { // canonical-uri references aren't versioned, but we need to match the column count for the UNION fieldsToLoadFromSpidxUriTable += ", NULL"; } + + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + if (theReverse) { + fieldsToLoadFromSpidxUriTable += ", r.partition_id as " + PARTITION_ID_ALIAS; + } else { + fieldsToLoadFromSpidxUriTable += ", rUri.partition_id as " + PARTITION_ID_ALIAS; + } + } + // The logical join will be by hfj_spidx_uri on sp_name='uri' and sp_uri=target_resource_url. // But sp_name isn't indexed, so we use hash_identity instead. - Set identityHashesForTypes = - calculateIndexUriIdentityHashesForResourceTypes(theTargetResourceTypes, theReverse); + CanonicalUrlTargets canonicalUrlTargets = + calculateIndexUriIdentityHashesForResourceTypes(theRequest, theTargetResourceTypes, theReverse); Map canonicalUriQueryParams = new HashMap<>(); - StringBuilder canonicalUrlQuery = new StringBuilder( - "SELECT " + fieldsToLoadFromSpidxUriTable + " FROM hfj_res_link r " + " JOIN hfj_spidx_uri rUri ON ( "); + StringBuilder canonicalUrlQuery = new StringBuilder(); + canonicalUrlQuery + .append("SELECT ") + .append(fieldsToLoadFromSpidxUriTable) + .append(' '); + canonicalUrlQuery.append("FROM hfj_res_link r "); + // join on hash_identity and sp_uri - indexed in IDX_SP_URI_HASH_IDENTITY_V2 - if (theTargetResourceTypes != null && theTargetResourceTypes.size() == 1) { - canonicalUrlQuery.append(" rUri.hash_identity = :uri_identity_hash "); + canonicalUrlQuery.append("JOIN hfj_spidx_uri rUri ON ("); + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + canonicalUrlQuery.append("rUri.partition_id IN (:uri_partition_id) AND "); + canonicalUriQueryParams.put("uri_partition_id", canonicalUrlTargets.myPartitionIds); + } + if (canonicalUrlTargets.myHashIdentityValues.size() == 1) { + canonicalUrlQuery.append("rUri.hash_identity = :uri_identity_hash"); canonicalUriQueryParams.put( - "uri_identity_hash", identityHashesForTypes.iterator().next()); + "uri_identity_hash", + canonicalUrlTargets.myHashIdentityValues.iterator().next()); } else { - canonicalUrlQuery.append(" rUri.hash_identity in (:uri_identity_hashes) "); - canonicalUriQueryParams.put("uri_identity_hashes", identityHashesForTypes); + canonicalUrlQuery.append("rUri.hash_identity in (:uri_identity_hashes)"); + canonicalUriQueryParams.put("uri_identity_hashes", canonicalUrlTargets.myHashIdentityValues); } + canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri"); + canonicalUrlQuery.append(")"); - canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri )"); - canonicalUrlQuery.append(" WHERE r.src_path = :src_path AND "); - canonicalUrlQuery.append(" r.target_resource_id IS NULL "); - canonicalUrlQuery.append(" AND "); + canonicalUrlQuery.append(" WHERE r.src_path = :src_path AND"); + canonicalUrlQuery.append(" r.target_resource_id IS NULL"); + canonicalUrlQuery.append(" AND"); + if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) { + if (theReverse) { + canonicalUrlQuery.append(" rUri.partition_id"); + } else { + canonicalUrlQuery.append(" r.partition_id"); + } + canonicalUrlQuery.append(" = :search_partition_id"); + canonicalUrlQuery.append(" AND"); + } if (theReverse) { - canonicalUrlQuery.append("rUri.res_id"); + canonicalUrlQuery.append(" rUri.res_id"); } else { - canonicalUrlQuery.append("r.src_resource_id"); + canonicalUrlQuery.append(" r.src_resource_id"); } - canonicalUrlQuery.append(" IN (:target_pids) "); + canonicalUrlQuery.append(" IN (:target_pids)"); return Pair.of(canonicalUrlQuery.toString(), canonicalUriQueryParams); } @Nonnull - Set calculateIndexUriIdentityHashesForResourceTypes(Set theTargetResourceTypes, boolean theReverse) { + CanonicalUrlTargets calculateIndexUriIdentityHashesForResourceTypes( + RequestDetails theRequestDetails, Set theTargetResourceTypes, boolean theReverse) { Set targetResourceTypes = theTargetResourceTypes; if (targetResourceTypes == null) { /* @@ -1949,34 +2157,92 @@ Set calculateIndexUriIdentityHashesForResourceTypes(Set theTargetR } assert !targetResourceTypes.isEmpty(); - return targetResourceTypes.stream() - .map(type -> BaseResourceIndexedSearchParam.calculateHashIdentity( - myPartitionSettings, myRequestPartitionId, type, "url")) - .collect(Collectors.toSet()); + Set hashIdentityValues = new HashSet<>(); + Set partitionIds = new HashSet<>(); + for (String type : targetResourceTypes) { + + RequestPartitionId readPartition; + if (myPartitionSettings.isPartitioningEnabled()) { + readPartition = + myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, type); + } else { + readPartition = RequestPartitionId.defaultPartition(); + } + if (readPartition.hasPartitionIds()) { + partitionIds.addAll(readPartition.getPartitionIds()); + } + + Long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( + myPartitionSettings, readPartition, type, "url"); + hashIdentityValues.add(hashIdentity); + } + + return new CanonicalUrlTargets(hashIdentityValues, partitionIds); } - private List> partition(Collection theNextRoundMatches, int theMaxLoad) { - if (theNextRoundMatches.size() <= theMaxLoad) { - return Collections.singletonList(theNextRoundMatches); - } else { + static class CanonicalUrlTargets { - List> retVal = new ArrayList<>(); - Collection current = null; - for (T next : theNextRoundMatches) { - if (current == null) { - current = new ArrayList<>(theMaxLoad); - retVal.add(current); - } + @Nonnull + final Set myHashIdentityValues; + + @Nonnull + final Set myPartitionIds; + + public CanonicalUrlTargets(@Nonnull Set theHashIdentityValues, @Nonnull Set thePartitionIds) { + myHashIdentityValues = theHashIdentityValues; + myPartitionIds = thePartitionIds; + } + } - current.add(next); + /** + * This method takes in a list of {@link JpaPid}'s and returns a series of sublists containing + * those pids where: + *

    + *
  • No single list is most than {@literal theMaxLoad} entries
  • + *
  • Each list only contains JpaPids with the same partition ID
  • + *
+ */ + static List> partitionBySizeAndPartitionId(List theNextRoundMatches, int theMaxLoad) { - if (current.size() >= theMaxLoad) { - current = null; + if (theNextRoundMatches.size() <= theMaxLoad) { + boolean allSamePartition = true; + for (int i = 1; i < theNextRoundMatches.size(); i++) { + if (!Objects.equals( + theNextRoundMatches.get(i - 1).getPartitionId(), + theNextRoundMatches.get(i).getPartitionId())) { + allSamePartition = false; + break; } } + if (allSamePartition) { + return Collections.singletonList(theNextRoundMatches); + } + } - return retVal; + // Break into partitioned sublists + ListMultimap lists = + MultimapBuilder.hashKeys().arrayListValues().build(); + for (JpaPid nextRoundMatch : theNextRoundMatches) { + String partitionId = nextRoundMatch.getPartitionId() != null + ? nextRoundMatch.getPartitionId().toString() + : ""; + lists.put(partitionId, nextRoundMatch); } + + List> retVal = new ArrayList<>(); + for (String key : lists.keySet()) { + List> nextPartition = Lists.partition(lists.get(key), theMaxLoad); + retVal.addAll(nextPartition); + } + + // In unit test mode, we sort the results just for unit test predictability + if (HapiSystemProperties.isUnitTestModeEnabled()) { + retVal = retVal.stream() + .map(t -> t.stream().sorted().collect(Collectors.toList())) + .collect(Collectors.toList()); + } + + return retVal; } private void attemptComboUniqueSpProcessing( @@ -2422,15 +2688,14 @@ private void fetchNext() { break; } - Long nextLong = myResultsIterator.next(); + JpaPid nextPid = myResultsIterator.next(); if (myHavePerfTraceFoundIdHook) { - callPerformanceTracingHook(nextLong); + callPerformanceTracingHook(nextPid); } - if (nextLong != null) { - JpaPid next = JpaPid.fromId(nextLong); - if (myPidSet.add(next) && doNotSkipNextPidForEverything()) { - myNext = next; + if (nextPid != null) { + if (myPidSet.add(nextPid) && doNotSkipNextPidForEverything()) { + myNext = nextPid; myNonSkipCount++; break; } else { @@ -2518,10 +2783,10 @@ private boolean doNotSkipNextPidForEverything() { return !(myParams.getEverythingMode() != null && (myOffset != null && myOffset >= myPidSet.size())); } - private void callPerformanceTracingHook(Long theNextLong) { + private void callPerformanceTracingHook(JpaPid theNextPid) { HookParams params = new HookParams() .add(Integer.class, System.identityHashCode(this)) - .add(Object.class, theNextLong); + .add(Object.class, theNextPid); myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutors.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutors.java index 74420709fef3..f9920f1d0237 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutors.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutors.java @@ -24,7 +24,6 @@ import jakarta.annotation.Nonnull; import org.apache.commons.lang3.Validate; -import java.util.Iterator; import java.util.List; public class SearchQueryExecutors { @@ -46,7 +45,7 @@ public boolean hasNext() { } @Override - public Long next() { + public JpaPid next() { myCount += 1; return theExecutor.next(); } @@ -54,37 +53,7 @@ public Long next() { } @Nonnull - public static ISearchQueryExecutor from(List rawPids) { + public static ISearchQueryExecutor from(List rawPids) { return new ResolvedSearchQueryExecutor(rawPids); } - - public static ISearchQueryExecutor from(Iterator theIterator) { - return new JpaPidQueryAdaptor(theIterator); - } - - public static ISearchQueryExecutor from(Iterable theIterable) { - return new JpaPidQueryAdaptor(theIterable.iterator()); - } - - static class JpaPidQueryAdaptor implements ISearchQueryExecutor { - final Iterator myIterator; - - JpaPidQueryAdaptor(Iterator theIterator) { - myIterator = theIterator; - } - - @Override - public void close() {} - - @Override - public boolean hasNext() { - return myIterator.hasNext(); - } - - @Override - public Long next() { - JpaPid next = myIterator.next(); - return next == null ? null : next.getId(); - } - } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/ResolvedSearchQueryExecutor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/ResolvedSearchQueryExecutor.java index 6a7fd75b2c19..d41a1579587d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/ResolvedSearchQueryExecutor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/ResolvedSearchQueryExecutor.java @@ -19,6 +19,7 @@ */ package ca.uhn.fhir.jpa.search.builder.models; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor; import jakarta.annotation.Nonnull; @@ -26,18 +27,18 @@ import java.util.List; public class ResolvedSearchQueryExecutor implements ISearchQueryExecutor { - private final Iterator myIterator; + private final Iterator myIterator; - public ResolvedSearchQueryExecutor(Iterable theIterable) { + public ResolvedSearchQueryExecutor(Iterable theIterable) { this(theIterable.iterator()); } - public ResolvedSearchQueryExecutor(Iterator theIterator) { + public ResolvedSearchQueryExecutor(Iterator theIterator) { myIterator = theIterator; } @Nonnull - public static ResolvedSearchQueryExecutor from(List rawPids) { + public static ResolvedSearchQueryExecutor from(List rawPids) { return new ResolvedSearchQueryExecutor(rawPids); } @@ -47,7 +48,7 @@ public boolean hasNext() { } @Override - public Long next() { + public JpaPid next() { return myIterator.next(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseJoiningPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseJoiningPredicateBuilder.java index 997ea3296b66..efbe336ce13f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseJoiningPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseJoiningPredicateBuilder.java @@ -21,6 +21,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.jpa.util.QueryParameterUtils; import com.healthmarketscience.sqlbuilder.Condition; @@ -31,6 +32,7 @@ import jakarta.annotation.Nullable; import org.apache.commons.lang3.Validate; +import java.util.Collection; import java.util.List; import java.util.stream.Collectors; @@ -96,15 +98,16 @@ public Condition createPartitionIdPredicate(RequestPartitionId theRequestPartiti } } - public Condition createPredicateResourceIds(boolean theInverse, List theResourceIds) { + public Condition createPredicateResourceIds(boolean theInverse, Collection theResourceIds) { Validate.notNull(theResourceIds, "theResourceIds must not be null"); - // Handle the _id parameter by adding it to the tail - Condition inResourceIds = - QueryParameterUtils.toEqualToOrInPredicate(getResourceIdColumn(), generatePlaceholders(theResourceIds)); + Condition inResourceIds = QueryParameterUtils.toEqualToOrInPredicate( + getResourceIdColumn(), generatePlaceholders(JpaPid.toLongList(theResourceIds))); if (theInverse) { inResourceIds = new NotCondition(inResourceIds); } + + // Handle the _id parameter by adding it to the tail return inResourceIds; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchViewDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ISourcePredicateBuilder.java similarity index 53% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchViewDao.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ISourcePredicateBuilder.java index 4fe985a5a27b..db284b9989ac 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchViewDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ISourcePredicateBuilder.java @@ -17,17 +17,20 @@ * limitations under the License. * #L% */ -package ca.uhn.fhir.jpa.dao.data; +package ca.uhn.fhir.jpa.search.builder.predicate; -import ca.uhn.fhir.jpa.entity.ResourceSearchView; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.data.jpa.repository.Query; -import org.springframework.data.repository.query.Param; +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; +import ca.uhn.fhir.model.api.IQueryParameterType; +import com.healthmarketscience.sqlbuilder.Condition; -import java.util.Collection; +public interface ISourcePredicateBuilder { -public interface IResourceSearchViewDao extends JpaRepository, IHapiFhirJpaRepository { + Condition createPredicateMissingSourceUri(); - @Query("SELECT v FROM ResourceSearchView v WHERE v.myResourceId in (:pids)") - Collection findByResourceIds(@Param("pids") Collection pids); + Condition createPredicateSourceUri(String theSourceUri); + + Condition createPredicateRequestId(String theRequestId); + + Condition createPredicateSourceUriWithModifiers( + IQueryParameterType theQueryParameter, JpaStorageSettings theStorageSetting, String theSourceUri); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/SourcePredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceHistoryPredicateBuilder.java similarity index 93% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/SourcePredicateBuilder.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceHistoryPredicateBuilder.java index 42998c2316bb..b918dc401ccf 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/SourcePredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceHistoryPredicateBuilder.java @@ -40,7 +40,7 @@ import static ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder.createLeftAndRightMatchLikeExpression; import static ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder.createLeftMatchLikeExpression; -public class SourcePredicateBuilder extends BaseJoiningPredicateBuilder { +public class ResourceHistoryPredicateBuilder extends BaseJoiningPredicateBuilder implements ISourcePredicateBuilder { private final DbColumn myColumnSourceUri; private final DbColumn myColumnRequestId; @@ -49,10 +49,10 @@ public class SourcePredicateBuilder extends BaseJoiningPredicateBuilder { /** * Constructor */ - public SourcePredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { - super(theSearchSqlBuilder, theSearchSqlBuilder.addTable("HFJ_RES_VER_PROV")); + public ResourceHistoryPredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { + super(theSearchSqlBuilder, theSearchSqlBuilder.addTable("HFJ_RES_VER")); - myResourceIdColumn = getTable().addColumn("RES_PID"); + myResourceIdColumn = getTable().addColumn("RES_ID"); myColumnSourceUri = getTable().addColumn("SOURCE_URI"); myColumnRequestId = getTable().addColumn("REQUEST_ID"); } @@ -62,14 +62,17 @@ public DbColumn getResourceIdColumn() { return myResourceIdColumn; } + @Override public Condition createPredicateSourceUri(String theSourceUri) { return BinaryCondition.equalTo(myColumnSourceUri, generatePlaceholder(theSourceUri)); } + @Override public Condition createPredicateMissingSourceUri() { return UnaryCondition.isNull(myColumnSourceUri); } + @Override public Condition createPredicateSourceUriWithModifiers( IQueryParameterType theQueryParameter, JpaStorageSettings theStorageSetting, String theSourceUri) { if (theQueryParameter.getMissing() != null && !theQueryParameter.getMissing()) { @@ -117,6 +120,7 @@ private Condition createPredicateSourceContains(JpaStorageSettings theStorageSet } } + @Override public Condition createPredicateRequestId(String theRequestId) { return BinaryCondition.equalTo(myColumnRequestId, generatePlaceholder(theRequestId)); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceHistoryProvenancePredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceHistoryProvenancePredicateBuilder.java new file mode 100644 index 000000000000..cc77e7969ba9 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceHistoryProvenancePredicateBuilder.java @@ -0,0 +1,128 @@ +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.search.builder.predicate; + +import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; +import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; +import ca.uhn.fhir.jpa.util.QueryParameterUtils; +import ca.uhn.fhir.model.api.IQueryParameterType; +import ca.uhn.fhir.rest.param.UriParam; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException; +import ca.uhn.fhir.util.StringUtil; +import ca.uhn.fhir.util.UrlUtil; +import com.healthmarketscience.sqlbuilder.BinaryCondition; +import com.healthmarketscience.sqlbuilder.Condition; +import com.healthmarketscience.sqlbuilder.FunctionCall; +import com.healthmarketscience.sqlbuilder.UnaryCondition; +import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn; + +import java.util.List; + +import static ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder.createLeftAndRightMatchLikeExpression; +import static ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder.createLeftMatchLikeExpression; + +public class ResourceHistoryProvenancePredicateBuilder extends BaseJoiningPredicateBuilder + implements ISourcePredicateBuilder { + + private final DbColumn myColumnSourceUri; + private final DbColumn myColumnRequestId; + private final DbColumn myResourceIdColumn; + + /** + * Constructor + */ + public ResourceHistoryProvenancePredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { + super(theSearchSqlBuilder, theSearchSqlBuilder.addTable("HFJ_RES_VER_PROV")); + + myResourceIdColumn = getTable().addColumn("RES_PID"); + myColumnSourceUri = getTable().addColumn("SOURCE_URI"); + myColumnRequestId = getTable().addColumn("REQUEST_ID"); + } + + @Override + public DbColumn getResourceIdColumn() { + return myResourceIdColumn; + } + + @Override + public Condition createPredicateSourceUri(String theSourceUri) { + return BinaryCondition.equalTo(myColumnSourceUri, generatePlaceholder(theSourceUri)); + } + + @Override + public Condition createPredicateMissingSourceUri() { + return UnaryCondition.isNull(myColumnSourceUri); + } + + @Override + public Condition createPredicateSourceUriWithModifiers( + IQueryParameterType theQueryParameter, JpaStorageSettings theStorageSetting, String theSourceUri) { + if (theQueryParameter.getMissing() != null && !theQueryParameter.getMissing()) { + return UnaryCondition.isNotNull(myColumnSourceUri); + } else if (theQueryParameter instanceof UriParam && theQueryParameter.getQueryParameterQualifier() != null) { + UriParam uriParam = (UriParam) theQueryParameter; + switch (uriParam.getQualifier()) { + case ABOVE: + return createPredicateSourceAbove(theSourceUri); + case BELOW: + return createPredicateSourceBelow(theSourceUri); + case CONTAINS: + return createPredicateSourceContains(theStorageSetting, theSourceUri); + default: + throw new InvalidRequestException(Msg.code(2569) + + String.format( + "Unsupported qualifier specified, qualifier=%s", + theQueryParameter.getQueryParameterQualifier())); + } + } else { + return createPredicateSourceUri(theSourceUri); + } + } + + private Condition createPredicateSourceAbove(String theSourceUri) { + List aboveUriCandidates = UrlUtil.getAboveUriCandidates(theSourceUri); + List aboveUriPlaceholders = generatePlaceholders(aboveUriCandidates); + return QueryParameterUtils.toEqualToOrInPredicate(myColumnSourceUri, aboveUriPlaceholders); + } + + private Condition createPredicateSourceBelow(String theSourceUri) { + String belowLikeExpression = createLeftMatchLikeExpression(theSourceUri); + return BinaryCondition.like(myColumnSourceUri, generatePlaceholder(belowLikeExpression)); + } + + private Condition createPredicateSourceContains(JpaStorageSettings theStorageSetting, String theSourceUri) { + if (theStorageSetting.isAllowContainsSearches()) { + FunctionCall upperFunction = new FunctionCall("UPPER"); + upperFunction.addCustomParams(myColumnSourceUri); + String normalizedString = StringUtil.normalizeStringForSearchIndexing(theSourceUri); + String containsLikeExpression = createLeftAndRightMatchLikeExpression(normalizedString); + return BinaryCondition.like(upperFunction, generatePlaceholder(containsLikeExpression)); + } else { + throw new MethodNotAllowedException(Msg.code(2570) + ":contains modifier is disabled on this server"); + } + } + + @Override + public Condition createPredicateRequestId(String theRequestId) { + return BinaryCondition.equalTo(myColumnRequestId, generatePlaceholder(theRequestId)); + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceIdPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceIdPredicateBuilder.java index b58b0acf2ab2..dd2b7ccae0dc 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceIdPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceIdPredicateBuilder.java @@ -25,6 +25,8 @@ import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser; import ca.uhn.fhir.jpa.model.cross.IResourceLookup; import ca.uhn.fhir.jpa.model.dao.JpaPid; +import ca.uhn.fhir.jpa.search.builder.sql.ColumnTupleObject; +import ca.uhn.fhir.jpa.search.builder.sql.JpaPidValueTuples; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.jpa.util.QueryParameterUtils; import ca.uhn.fhir.model.api.IQueryParameterType; @@ -129,27 +131,34 @@ public Condition createPredicateResourceId( assert operation == SearchFilterParser.CompareOperation.eq || operation == SearchFilterParser.CompareOperation.ne; - List resourceIds = JpaPid.toLongList(allOrPids); if (theSourceJoinColumn == null) { BaseJoiningPredicateBuilder queryRootTable = super.getOrCreateQueryRootTable(true); Condition predicate; switch (operation) { default: case eq: - predicate = queryRootTable.createPredicateResourceIds(false, resourceIds); + predicate = queryRootTable.createPredicateResourceIds(false, allOrPids); break; case ne: - predicate = queryRootTable.createPredicateResourceIds(true, resourceIds); + predicate = queryRootTable.createPredicateResourceIds(true, allOrPids); break; } predicate = queryRootTable.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate); return predicate; } else { - DbColumn resIdColumn = getResourceIdColumn(theSourceJoinColumn); - return QueryParameterUtils.toEqualToOrInPredicate( - resIdColumn, - generatePlaceholders(resourceIds), - operation == SearchFilterParser.CompareOperation.ne); + if (getSearchQueryBuilder().isIncludePartitionIdInJoins()) { + ColumnTupleObject left = new ColumnTupleObject(theSourceJoinColumn); + JpaPidValueTuples right = JpaPidValueTuples.from(getSearchQueryBuilder(), allOrPids); + return QueryParameterUtils.toInPredicate( + left, right, operation == SearchFilterParser.CompareOperation.ne); + } else { + DbColumn resIdColumn = getResourceIdColumn(theSourceJoinColumn); + List resourceIds = JpaPid.toLongList(allOrPids); + return QueryParameterUtils.toEqualToOrInPredicate( + resIdColumn, + generatePlaceholders(resourceIds), + operation == SearchFilterParser.CompareOperation.ne); + } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java index a5e7f2038740..4dc5aac3fa7a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java @@ -35,6 +35,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IDao; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.dao.BaseStorageDao; import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser; import ca.uhn.fhir.jpa.model.dao.JpaPid; @@ -42,6 +43,8 @@ import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl; import ca.uhn.fhir.jpa.search.builder.QueryStack; import ca.uhn.fhir.jpa.search.builder.models.MissingQueryParameterPredicateParams; +import ca.uhn.fhir.jpa.search.builder.sql.ColumnTupleObject; +import ca.uhn.fhir.jpa.search.builder.sql.JpaPidValueTuples; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams; @@ -64,6 +67,7 @@ import com.healthmarketscience.sqlbuilder.BinaryCondition; import com.healthmarketscience.sqlbuilder.ComboCondition; import com.healthmarketscience.sqlbuilder.Condition; +import com.healthmarketscience.sqlbuilder.InCondition; import com.healthmarketscience.sqlbuilder.NotCondition; import com.healthmarketscience.sqlbuilder.SelectQuery; import com.healthmarketscience.sqlbuilder.UnaryCondition; @@ -78,7 +82,6 @@ import org.springframework.beans.factory.annotation.Autowired; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; @@ -121,7 +124,7 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im private ISearchParamRegistry mySearchParamRegistry; @Autowired - private IIdHelperService myIdHelperService; + private IIdHelperService myIdHelperService; @Autowired private DaoRegistry myDaoRegistry; @@ -282,9 +285,11 @@ public Condition createPredicate( inverse = true; } - List targetPids = - myIdHelperService.resolveResourcePersistentIdsWithCache(theRequestPartitionId, targetIds); - List targetPidList = JpaPid.toLongList(targetPids); + List pids = myIdHelperService.resolveResourcePids( + theRequestPartitionId, + targetIds, + ResolveIdentityMode.includeDeleted().cacheOk()); + List targetPidList = pids.stream().map(JpaPid::getId).collect(Collectors.toList()); if (targetPidList.isEmpty() && targetQualifiedUrls.isEmpty()) { setMatchNothing(); @@ -811,14 +816,20 @@ private InvalidRequestException newInvalidResourceTypeException(String theResour @Nonnull public Condition createEverythingPredicate( - String theResourceName, List theSourceResourceNames, Long... theTargetPids) { + String theResourceName, List theSourceResourceNames, JpaPid... theTargetPids) { Condition condition; if (theTargetPids != null && theTargetPids.length >= 1) { // if resource ids are provided, we'll create the predicate // with ids in or equal to this value - condition = QueryParameterUtils.toEqualToOrInPredicate( - myColumnTargetResourceId, generatePlaceholders(Arrays.asList(theTargetPids))); + if (getSearchQueryBuilder().isIncludePartitionIdInJoins()) { + Object left = ColumnTupleObject.from(getJoinColumnsForTarget()); + JpaPidValueTuples right = JpaPidValueTuples.from(getSearchQueryBuilder(), theTargetPids); + condition = new InCondition(left, right); + } else { + condition = QueryParameterUtils.toEqualToOrInPredicate( + myColumnTargetResourceId, generatePlaceholders(JpaPid.toLongList(theTargetPids))); + } } else { // ... otherwise we look for resource types condition = BinaryCondition.equalTo(myColumnTargetResourceType, generatePlaceholder(theResourceName)); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/JpaPidValueTuples.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/JpaPidValueTuples.java new file mode 100644 index 000000000000..4ca7ef8814f4 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/JpaPidValueTuples.java @@ -0,0 +1,87 @@ +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.search.builder.sql; + +import ca.uhn.fhir.jpa.model.dao.JpaPid; +import com.healthmarketscience.common.util.AppendableExt; +import com.healthmarketscience.sqlbuilder.Expression; +import com.healthmarketscience.sqlbuilder.ValidationContext; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; + +/** + * Outputs an SQL tuple for a collection of JpaPids, consisting of + * ((resId,partitionId),(resId,partitionId),(resId,partitionId),...) + */ +public class JpaPidValueTuples extends Expression { + + private final Collection myValues; + + public JpaPidValueTuples(Collection theValues) { + myValues = theValues; + } + + @Override + protected void collectSchemaObjects(ValidationContext vContext) { + // nothing + } + + @Override + public void appendTo(AppendableExt app) throws IOException { + app.append('('); + + String value; + for (Iterator iter = myValues.iterator(); iter.hasNext(); ) { + if (hasParens()) { + app.append("('"); + } + value = iter.next(); + app.append(value); + app.append("','"); + value = iter.next(); + app.append(value); + app.append("')"); + if (iter.hasNext()) { + app.append(','); + } + } + if (hasParens()) { + app.append(')'); + } + } + + public static JpaPidValueTuples from(SearchQueryBuilder theSearchQueryBuilder, JpaPid[] thePids) { + return from(theSearchQueryBuilder, Arrays.asList(thePids)); + } + + public static JpaPidValueTuples from(SearchQueryBuilder theSearchQueryBuilder, Collection thePids) { + List placeholders = new ArrayList<>(thePids.size() * 2); + for (JpaPid next : thePids) { + placeholders.add(theSearchQueryBuilder.generatePlaceholder(next.getPartitionId())); + placeholders.add(theSearchQueryBuilder.generatePlaceholder(next.getId())); + } + return new JpaPidValueTuples(placeholders); + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilder.java index 188c6f970a7d..c6637b3b3b66 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilder.java @@ -35,11 +35,12 @@ import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.QuantityNormalizedPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder; +import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryPredicateBuilder; +import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryProvenancePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.SearchParamPresentPredicateBuilder; -import ca.uhn.fhir.jpa.search.builder.predicate.SourcePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.TagPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder; @@ -282,9 +283,20 @@ public QuantityNormalizedPredicateBuilder addQuantityNormalizedPredicateBuilder( /** * Add and return a predicate builder (or a root query if no root query exists yet) for selecting on a _source search parameter */ - public SourcePredicateBuilder addSourcePredicateBuilder( + public ResourceHistoryProvenancePredicateBuilder addResourceHistoryProvenancePredicateBuilder( @Nullable DbColumn[] theSourceJoinColumn, SelectQuery.JoinType theJoinType) { - SourcePredicateBuilder retVal = mySqlBuilderFactory.newSourcePredicateBuilder(this); + ResourceHistoryProvenancePredicateBuilder retVal = + mySqlBuilderFactory.newResourceHistoryProvenancePredicateBuilder(this); + addTable(retVal, theSourceJoinColumn, theJoinType); + return retVal; + } + + /** + * Add and return a predicate builder (or a root query if no root query exists yet) for selecting on a _source search parameter + */ + public ResourceHistoryPredicateBuilder addResourceHistoryPredicateBuilder( + @Nullable DbColumn[] theSourceJoinColumn, SelectQuery.JoinType theJoinType) { + ResourceHistoryPredicateBuilder retVal = mySqlBuilderFactory.newResourceHistoryPredicateBuilder(this); addTable(retVal, theSourceJoinColumn, theJoinType); return retVal; } @@ -823,9 +835,11 @@ private boolean isNotEqualsComparator(DateRangeParam theDateRange) { return false; } - public void addResourceIdsPredicate(List thePidList) { + public void addResourceIdsPredicate(List thePidList) { + List pidList = thePidList.stream().map(JpaPid::getId).collect(Collectors.toList()); + DbColumn resourceIdColumn = getOrCreateFirstPredicateBuilder().getResourceIdColumn(); - InCondition predicate = new InCondition(resourceIdColumn, generatePlaceholders(thePidList)); + InCondition predicate = new InCondition(resourceIdColumn, generatePlaceholders(pidList)); addPredicate(predicate); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryExecutor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryExecutor.java index f636ab7eb4cb..bea9f964a544 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryExecutor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryExecutor.java @@ -21,6 +21,7 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor; import ca.uhn.fhir.jpa.util.ScrollableResultsIterator; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; @@ -42,7 +43,7 @@ public class SearchQueryExecutor implements ISearchQueryExecutor { - private static final Long NO_MORE = -1L; + private static final JpaPid NO_MORE = JpaPid.fromId(-1L); private static final SearchQueryExecutor NO_VALUE_EXECUTOR = new SearchQueryExecutor(); private static final Object[] EMPTY_OBJECT_ARRAY = new Object[0]; private static final Logger ourLog = LoggerFactory.getLogger(SearchQueryExecutor.class); @@ -53,7 +54,7 @@ public class SearchQueryExecutor implements ISearchQueryExecutor { private boolean myQueryInitialized; private ScrollableResultsIterator myResultSet; - private Long myNext; + private JpaPid myNext; /** * Constructor @@ -86,10 +87,10 @@ public boolean hasNext() { } @Override - public Long next() { + public JpaPid next() { fetchNext(); Validate.isTrue(hasNext(), "Can not call next() right now, no data remains"); - Long next = myNext; + JpaPid next = myNext; myNext = null; return next; } @@ -155,17 +156,17 @@ private void fetchNext() { } } - private long getNextPid(ScrollableResultsIterator theResultSet) { + private JpaPid getNextPid(ScrollableResultsIterator theResultSet) { Object nextRow = Objects.requireNonNull(theResultSet.next()); // We should typically get two columns back, the first is the partition ID and the second // is the resource ID. But if we're doing a count query, we'll get a single column in an array // or maybe even just a single non array value depending on how the platform handles it. if (nextRow instanceof Number) { - return ((Number) nextRow).longValue(); + return JpaPid.fromId(((Number) nextRow).longValue()); } else { Object[] nextRowAsArray = (Object[]) nextRow; if (nextRowAsArray.length == 1) { - return (Long) nextRowAsArray[0]; + return JpaPid.fromId((Long) nextRowAsArray[0]); } else { int i; // TODO MB add a strategy object to GeneratedSql to describe the result set. @@ -181,9 +182,11 @@ private long getNextPid(ScrollableResultsIterator theResultSet) { // - partition_id, res_id, coord-dist // Assume res_id is first Long in row, and is in first two columns if (nextRowAsArray[0] instanceof Long) { - return (long) nextRowAsArray[0]; + return JpaPid.fromId((Long) nextRowAsArray[0]); } else { - return (long) nextRowAsArray[1]; + Integer partitionId = (Integer) nextRowAsArray[0]; + Long pid = (Long) nextRowAsArray[1]; + return JpaPid.fromId(pid, partitionId); } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SqlObjectFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SqlObjectFactory.java index 23b13b1e82e9..76f8aa9232c7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SqlObjectFactory.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SqlObjectFactory.java @@ -27,11 +27,12 @@ import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.QuantityNormalizedPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder; +import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryPredicateBuilder; +import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryProvenancePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.SearchParamPresentPredicateBuilder; -import ca.uhn.fhir.jpa.search.builder.predicate.SourcePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.TagPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder; @@ -109,8 +110,13 @@ public TagPredicateBuilder newTagPredicateBuilder(SearchQueryBuilder theSearchSq return myApplicationContext.getBean(TagPredicateBuilder.class, theSearchSqlBuilder); } - public SourcePredicateBuilder newSourcePredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { - return myApplicationContext.getBean(SourcePredicateBuilder.class, theSearchSqlBuilder); + public ResourceHistoryPredicateBuilder newResourceHistoryPredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { + return myApplicationContext.getBean(ResourceHistoryPredicateBuilder.class, theSearchSqlBuilder); + } + + public ResourceHistoryProvenancePredicateBuilder newResourceHistoryProvenancePredicateBuilder( + SearchQueryBuilder theSearchSqlBuilder) { + return myApplicationContext.getBean(ResourceHistoryProvenancePredicateBuilder.class, theSearchSqlBuilder); } public SearchQueryExecutor newSearchQueryExecutor(GeneratedSql theGeneratedSql, Integer theMaxResultsToFetch) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java index d280d5501736..99001ba9e5c7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java @@ -68,7 +68,7 @@ public List fetchResultPids( ourLog.debug("fetchResultPids for range {}-{} returned {} pids", theFrom, theTo, retVal.size()); - return JpaPid.fromLongList(retVal); + return ISearchResultDao.toJpaPidList(retVal); }); } @@ -81,7 +81,7 @@ public List fetchAllResultPids( .execute(() -> { List retVal = mySearchResultDao.findWithSearchPidOrderIndependent(theSearch.getId()); ourLog.trace("fetchAllResultPids returned {} pids", retVal.size()); - return JpaPid.fromLongList(retVal); + return ISearchResultDao.toJpaPidList(retVal); }); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/InstanceReindexServiceImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/InstanceReindexServiceImpl.java index 5061b260fa45..c1732aa6c6fe 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/InstanceReindexServiceImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/InstanceReindexServiceImpl.java @@ -177,7 +177,7 @@ private Parameters reindexInTransaction(RequestDetails theRequestDetails, IIdTyp List messages = new ArrayList<>(); - JpaPid pid = JpaPid.fromId(entity.getId()); + JpaPid pid = entity.getPersistentId(); ReindexOutcome outcome = dao.reindex(pid, new ReindexParameters(), theRequestDetails, new TransactionDetails()); messages.add("Reindex completed in " + sw); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexer.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexer.java index d3507b3a7d1f..38ec8e0812a8 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexer.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexer.java @@ -27,7 +27,6 @@ import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; -import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import org.hl7.fhir.instance.model.api.IBaseResource; @@ -61,16 +60,10 @@ public ResourceReindexer(FhirContext theFhirContext) { myFhirContext = theFhirContext; } - public void readAndReindexResourceByPid(Long theResourcePid) { - ResourceTable resourceTable = - myResourceTableDao.findById(theResourcePid).orElseThrow(IllegalStateException::new); - reindexResourceEntity(resourceTable); - } - public void reindexResourceEntity(ResourceTable theResourceTable) { IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResourceTable.getResourceType()); long expectedVersion = theResourceTable.getVersion(); - IBaseResource resource = dao.readByPid(JpaPid.fromId(theResourceTable.getId()), true); + IBaseResource resource = dao.readByPid(theResourceTable.getPersistentId(), true); if (resource == null) { throw new InternalErrorException(Msg.code(1171) + "Could not find resource version " diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java index 7658dc4f0c8e..6bf97cbcfd8a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java @@ -23,11 +23,11 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; -import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; import ca.uhn.fhir.jpa.dao.data.IResourceReindexJobDao; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.sched.HapiJob; import ca.uhn.fhir.jpa.model.sched.IHasScheduledJobs; @@ -79,9 +79,9 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; /** - * @see ca.uhn.fhir.jpa.reindex.job.ReindexJobConfig * @deprecated Use the Batch2 {@link ca.uhn.fhir.batch2.api.IJobCoordinator#startInstance(JobInstanceStartRequest)} instead. */ +@SuppressWarnings({"removal", "DeprecatedIsStillUsed"}) @Deprecated public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasScheduledJobs { @@ -107,9 +107,6 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc @Autowired private IResourceTableDao myResourceTableDao; - @Autowired - private DaoRegistry myDaoRegistry; - @Autowired private FhirContext myContext; @@ -261,10 +258,10 @@ public void cancelAndPurgeAllJobs() { private int runReindexJobs() { Collection jobs = getResourceReindexJobEntities(); - if (jobs.size() > 0) { + if (!jobs.isEmpty()) { ourLog.info("Running {} reindex jobs: {}", jobs.size(), jobs); } else { - ourLog.debug("Running {} reindex jobs: {}", jobs.size(), jobs); + ourLog.debug("Running 0 reindex jobs"); return 0; } @@ -356,7 +353,7 @@ private int runReindexJob(ResourceReindexJobEntity theJob) { // Submit each resource requiring reindexing List> futures = range.stream() - .map(t -> myTaskExecutor.submit(new ResourceReindexingTask(t, counter))) + .map(t -> myTaskExecutor.submit(new ResourceReindexingTask(JpaPid.fromId(t), counter))) .collect(Collectors.toList()); Date latestDate = null; @@ -429,62 +426,64 @@ private void expungeJobsMarkedAsDeleted() { }); } - private void markResourceAsIndexingFailed(final long theId) { + private void markResourceAsIndexingFailed(final JpaPid theId) { TransactionTemplate txTemplate = new TransactionTemplate(myTxManager); txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); txTemplate.execute((TransactionCallback) theStatus -> { ourLog.info("Marking resource with PID {} as indexing_failed", theId); - myResourceTableDao.updateIndexStatus(theId, BaseHapiFhirDao.INDEX_STATUS_INDEXING_FAILED); + myResourceTableDao.updateIndexStatus(theId.getId(), BaseHapiFhirDao.INDEX_STATUS_INDEXING_FAILED); - Query q = myEntityManager.createQuery("DELETE FROM ResourceTag t WHERE t.myResourceId = :id"); - q.setParameter("id", theId); + Query q = myEntityManager.createQuery("DELETE FROM ResourceTag t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); q = myEntityManager.createQuery( - "DELETE FROM ResourceIndexedSearchParamCoords t WHERE t.myResourcePid = :id"); - q.setParameter("id", theId); + "DELETE FROM ResourceIndexedSearchParamCoords t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); - q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamDate t WHERE t.myResourcePid = :id"); - q.setParameter("id", theId); + q = myEntityManager.createQuery( + "DELETE FROM ResourceIndexedSearchParamDate t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); q = myEntityManager.createQuery( - "DELETE FROM ResourceIndexedSearchParamNumber t WHERE t.myResourcePid = :id"); - q.setParameter("id", theId); + "DELETE FROM ResourceIndexedSearchParamNumber t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); q = myEntityManager.createQuery( - "DELETE FROM ResourceIndexedSearchParamQuantity t WHERE t.myResourcePid = :id"); - q.setParameter("id", theId); + "DELETE FROM ResourceIndexedSearchParamQuantity t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); q = myEntityManager.createQuery( - "DELETE FROM ResourceIndexedSearchParamQuantityNormalized t WHERE t.myResourcePid = :id"); - q.setParameter("id", theId); + "DELETE FROM ResourceIndexedSearchParamQuantityNormalized t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); q = myEntityManager.createQuery( - "DELETE FROM ResourceIndexedSearchParamString t WHERE t.myResourcePid = :id"); - q.setParameter("id", theId); + "DELETE FROM ResourceIndexedSearchParamString t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); q = myEntityManager.createQuery( - "DELETE FROM ResourceIndexedSearchParamToken t WHERE t.myResourcePid = :id"); - q.setParameter("id", theId); + "DELETE FROM ResourceIndexedSearchParamToken t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); - q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamUri t WHERE t.myResourcePid = :id"); - q.setParameter("id", theId); + q = myEntityManager.createQuery( + "DELETE FROM ResourceIndexedSearchParamUri t WHERE t.myResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); - q = myEntityManager.createQuery("DELETE FROM ResourceLink t WHERE t.mySourceResourcePid = :id"); - q.setParameter("id", theId); + q = myEntityManager.createQuery("DELETE FROM ResourceLink t WHERE t.mySourceResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); - q = myEntityManager.createQuery("DELETE FROM ResourceLink t WHERE t.myTargetResourcePid = :id"); - q.setParameter("id", theId); + q = myEntityManager.createQuery("DELETE FROM ResourceLink t WHERE t.myTargetResource.myId = :id"); + q.setParameter("id", theId.getId()); q.executeUpdate(); return null; @@ -492,11 +491,11 @@ private void markResourceAsIndexingFailed(final long theId) { } private class ResourceReindexingTask implements Callable { - private final Long myNextId; + private final JpaPid myNextId; private final AtomicInteger myCounter; private Date myUpdated; - ResourceReindexingTask(Long theNextId, AtomicInteger theCounter) { + ResourceReindexingTask(JpaPid theNextId, AtomicInteger theCounter) { myNextId = theNextId; myCounter = theCounter; } @@ -534,7 +533,7 @@ private Throwable readResourceAndReindex() { Throwable reindexFailure; reindexFailure = myTxTemplate.execute(t -> { ResourceTable resourceTable = - myResourceTableDao.findById(myNextId).orElseThrow(IllegalStateException::new); + myResourceTableDao.findById(myNextId.getId()).orElseThrow(IllegalStateException::new); myUpdated = resourceTable.getUpdatedDate(); try { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptClientMappingSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptClientMappingSvcImpl.java index d17ced8b4cbc..8b1d4373cec3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptClientMappingSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptClientMappingSvcImpl.java @@ -34,9 +34,9 @@ import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.term.api.ITermConceptClientMappingSvc; -import ca.uhn.fhir.jpa.util.MemoryCacheService; import ca.uhn.fhir.jpa.util.ScrollableResultsIterator; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import jakarta.annotation.Nonnull; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; import jakarta.persistence.PersistenceContextType; @@ -73,18 +73,12 @@ public class TermConceptClientMappingSvcImpl implements ITermConceptClientMappin private final int myFetchSize = TermReadSvcImpl.DEFAULT_FETCH_SIZE; - protected static boolean ourLastResultsFromTranslationCache; // For testing. - protected static boolean ourLastResultsFromTranslationWithReverseCache; // For testing. - @PersistenceContext(type = PersistenceContextType.TRANSACTION) protected EntityManager myEntityManager; @Autowired protected FhirContext myContext; - @Autowired - protected MemoryCacheService myMemoryCacheService; - @Autowired protected IIdHelperService myIdHelperService; @@ -107,7 +101,6 @@ public TranslateConceptResults translate(TranslationRequest theTranslationReques Join conceptMapJoin = groupJoin.join("myConceptMap"); List translationQueries = theTranslationRequest.getTranslationQueries(); - List cachedTargets; ArrayList predicates; Coding coding; @@ -117,110 +110,99 @@ public TranslateConceptResults translate(TranslationRequest theTranslationReques latestConceptMapVersion = getLatestConceptMapVersion(theTranslationRequest); for (TranslationQuery translationQuery : translationQueries) { - cachedTargets = myMemoryCacheService.getIfPresent( - MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION, translationQuery); - if (cachedTargets == null) { - final List targets = new ArrayList<>(); + final List targets = new ArrayList<>(); - predicates = new ArrayList<>(); + predicates = new ArrayList<>(); - coding = translationQuery.getCoding(); - if (coding.hasCode()) { - predicates.add(criteriaBuilder.equal(elementJoin.get("myCode"), coding.getCode())); - } else { - throw new InvalidRequestException( - Msg.code(842) + "A code must be provided for translation to occur."); - } + coding = translationQuery.getCoding(); + if (coding.hasCode()) { + predicates.add(criteriaBuilder.equal(elementJoin.get("myCode"), coding.getCode())); + } else { + throw new InvalidRequestException(Msg.code(842) + "A code must be provided for translation to occur."); + } - if (coding.hasSystem()) { - predicates.add(criteriaBuilder.equal(groupJoin.get("mySource"), coding.getSystem())); - } + if (coding.hasSystem()) { + predicates.add(criteriaBuilder.equal(groupJoin.get("mySource"), coding.getSystem())); + } - if (coding.hasVersion()) { - predicates.add(criteriaBuilder.equal(groupJoin.get("mySourceVersion"), coding.getVersion())); - } + if (coding.hasVersion()) { + predicates.add(criteriaBuilder.equal(groupJoin.get("mySourceVersion"), coding.getVersion())); + } - if (translationQuery.hasTargetSystem()) { - predicates.add( - criteriaBuilder.equal(groupJoin.get("myTarget"), translationQuery.getTargetSystem())); - } + if (translationQuery.hasTargetSystem()) { + predicates.add(criteriaBuilder.equal(groupJoin.get("myTarget"), translationQuery.getTargetSystem())); + } - if (translationQuery.hasUrl()) { - predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myUrl"), translationQuery.getUrl())); - if (translationQuery.hasConceptMapVersion()) { - // both url and conceptMapVersion - predicates.add(criteriaBuilder.equal( - conceptMapJoin.get("myVersion"), translationQuery.getConceptMapVersion())); + if (translationQuery.hasUrl()) { + predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myUrl"), translationQuery.getUrl())); + if (translationQuery.hasConceptMapVersion()) { + // both url and conceptMapVersion + predicates.add(criteriaBuilder.equal( + conceptMapJoin.get("myVersion"), translationQuery.getConceptMapVersion())); + } else { + if (StringUtils.isNotBlank(latestConceptMapVersion)) { + // only url and use latestConceptMapVersion + predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myVersion"), latestConceptMapVersion)); } else { - if (StringUtils.isNotBlank(latestConceptMapVersion)) { - // only url and use latestConceptMapVersion - predicates.add( - criteriaBuilder.equal(conceptMapJoin.get("myVersion"), latestConceptMapVersion)); - } else { - predicates.add(criteriaBuilder.isNull(conceptMapJoin.get("myVersion"))); - } + predicates.add(criteriaBuilder.isNull(conceptMapJoin.get("myVersion"))); } } + } - if (translationQuery.hasSource()) { - predicates.add(criteriaBuilder.equal(conceptMapJoin.get("mySource"), translationQuery.getSource())); - } + if (translationQuery.hasSource()) { + predicates.add(criteriaBuilder.equal(conceptMapJoin.get("mySource"), translationQuery.getSource())); + } - if (translationQuery.hasTarget()) { - predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myTarget"), translationQuery.getTarget())); - } + if (translationQuery.hasTarget()) { + predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myTarget"), translationQuery.getTarget())); + } - if (translationQuery.hasResourceId()) { - IIdType resourceId = translationQuery.getResourceId(); - JpaPid resourcePid = - myIdHelperService.getPidOrThrowException(RequestPartitionId.defaultPartition(), resourceId); - predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myResourcePid"), resourcePid.getId())); - } + if (translationQuery.hasResourceId()) { + IIdType resourceId = translationQuery.getResourceId(); + JpaPid resourcePid = + myIdHelperService.getPidOrThrowException(RequestPartitionId.defaultPartition(), resourceId); + predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myResourcePid"), resourcePid.getId())); + } - Predicate outerPredicate = criteriaBuilder.and(predicates.toArray(new Predicate[0])); - query.where(outerPredicate); - - // Use scrollable results. - final TypedQuery typedQuery = - myEntityManager.createQuery(query.select(root)); - org.hibernate.query.Query hibernateQuery = - (org.hibernate.query.Query) typedQuery; - hibernateQuery.setFetchSize(myFetchSize); - ScrollableResults scrollableResults = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY); - try (ScrollableResultsIterator scrollableResultsIterator = - new ScrollableResultsIterator<>(scrollableResults)) { - - Set matches = new HashSet<>(); - while (scrollableResultsIterator.hasNext()) { - TermConceptMapGroupElementTarget next = scrollableResultsIterator.next(); - if (matches.add(next)) { - - TranslateConceptResult translationMatch = new TranslateConceptResult(); - if (next.getEquivalence() != null) { - translationMatch.setEquivalence( - next.getEquivalence().toCode()); - } + Predicate outerPredicate = criteriaBuilder.and(predicates.toArray(new Predicate[0])); + query.where(outerPredicate); + + // Use scrollable results. + final TypedQuery typedQuery = + myEntityManager.createQuery(query.select(root)); + org.hibernate.query.Query hibernateQuery = + (org.hibernate.query.Query) typedQuery; + hibernateQuery.setFetchSize(myFetchSize); + ScrollableResults scrollableResults = + hibernateQuery.scroll(ScrollMode.FORWARD_ONLY); + try (ScrollableResultsIterator scrollableResultsIterator = + new ScrollableResultsIterator<>(scrollableResults)) { + + Set matches = new HashSet<>(); + while (scrollableResultsIterator.hasNext()) { + TermConceptMapGroupElementTarget next = scrollableResultsIterator.next(); + if (matches.add(next)) { + + TranslateConceptResult translationMatch = new TranslateConceptResult(); + if (next.getEquivalence() != null) { + translationMatch.setEquivalence( + next.getEquivalence().toCode()); + } - translationMatch.setCode(next.getCode()); - translationMatch.setSystem(next.getSystem()); - translationMatch.setSystemVersion(next.getSystemVersion()); - translationMatch.setDisplay(next.getDisplay()); - translationMatch.setValueSet(next.getValueSet()); - translationMatch.setSystemVersion(next.getSystemVersion()); - translationMatch.setConceptMapUrl(next.getConceptMapUrl()); + translationMatch.setCode(next.getCode()); + translationMatch.setSystem(next.getSystem()); + translationMatch.setSystemVersion(next.getSystemVersion()); + translationMatch.setDisplay(next.getDisplay()); + translationMatch.setValueSet(next.getValueSet()); + translationMatch.setSystemVersion(next.getSystemVersion()); + translationMatch.setConceptMapUrl(next.getConceptMapUrl()); - targets.add(translationMatch); - } + targets.add(translationMatch); } } - - ourLastResultsFromTranslationCache = false; // For testing. - myMemoryCacheService.put(MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION, translationQuery, targets); - retVal.getResults().addAll(targets); - } else { - ourLastResultsFromTranslationCache = true; // For testing. - retVal.getResults().addAll(cachedTargets); } + + retVal.getResults().addAll(targets); } buildTranslationResult(retVal); @@ -242,7 +224,6 @@ public TranslateConceptResults translateWithReverse(TranslationRequest theTransl Join conceptMapJoin = groupJoin.join("myConceptMap"); List translationQueries = theTranslationRequest.getTranslationQueries(); - List cachedElements; ArrayList predicates; Coding coding; @@ -252,145 +233,137 @@ public TranslateConceptResults translateWithReverse(TranslationRequest theTransl latestConceptMapVersion = getLatestConceptMapVersion(theTranslationRequest); for (TranslationQuery translationQuery : translationQueries) { - cachedElements = myMemoryCacheService.getIfPresent( - MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION_REVERSE, translationQuery); - if (cachedElements == null) { - final List elements = new ArrayList<>(); - - predicates = new ArrayList<>(); - - coding = translationQuery.getCoding(); - String targetCode; - String targetCodeSystem = null; - if (coding.hasCode()) { - predicates.add(criteriaBuilder.equal(targetJoin.get("myCode"), coding.getCode())); - targetCode = coding.getCode(); - } else { - throw new InvalidRequestException( - Msg.code(843) + "A code must be provided for translation to occur."); - } + final List elements = new ArrayList<>(); - if (coding.hasSystem()) { - predicates.add(criteriaBuilder.equal(groupJoin.get("myTarget"), coding.getSystem())); - targetCodeSystem = coding.getSystem(); - } + predicates = new ArrayList<>(); - if (coding.hasVersion()) { - predicates.add(criteriaBuilder.equal(groupJoin.get("myTargetVersion"), coding.getVersion())); - } + coding = translationQuery.getCoding(); + String targetCode; + String targetCodeSystem = null; + if (coding.hasCode()) { + predicates.add(criteriaBuilder.equal(targetJoin.get("myCode"), coding.getCode())); + targetCode = coding.getCode(); + } else { + throw new InvalidRequestException(Msg.code(843) + "A code must be provided for translation to occur."); + } + + if (coding.hasSystem()) { + predicates.add(criteriaBuilder.equal(groupJoin.get("myTarget"), coding.getSystem())); + targetCodeSystem = coding.getSystem(); + } + + if (coding.hasVersion()) { + predicates.add(criteriaBuilder.equal(groupJoin.get("myTargetVersion"), coding.getVersion())); + } - if (translationQuery.hasUrl()) { - predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myUrl"), translationQuery.getUrl())); - if (translationQuery.hasConceptMapVersion()) { - // both url and conceptMapVersion - predicates.add(criteriaBuilder.equal( - conceptMapJoin.get("myVersion"), translationQuery.getConceptMapVersion())); + if (translationQuery.hasUrl()) { + predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myUrl"), translationQuery.getUrl())); + if (translationQuery.hasConceptMapVersion()) { + // both url and conceptMapVersion + predicates.add(criteriaBuilder.equal( + conceptMapJoin.get("myVersion"), translationQuery.getConceptMapVersion())); + } else { + if (StringUtils.isNotBlank(latestConceptMapVersion)) { + // only url and use latestConceptMapVersion + predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myVersion"), latestConceptMapVersion)); } else { - if (StringUtils.isNotBlank(latestConceptMapVersion)) { - // only url and use latestConceptMapVersion - predicates.add( - criteriaBuilder.equal(conceptMapJoin.get("myVersion"), latestConceptMapVersion)); - } else { - predicates.add(criteriaBuilder.isNull(conceptMapJoin.get("myVersion"))); - } + predicates.add(criteriaBuilder.isNull(conceptMapJoin.get("myVersion"))); } } + } - if (translationQuery.hasTargetSystem()) { - predicates.add( - criteriaBuilder.equal(groupJoin.get("mySource"), translationQuery.getTargetSystem())); - } + if (translationQuery.hasTargetSystem()) { + predicates.add(criteriaBuilder.equal(groupJoin.get("mySource"), translationQuery.getTargetSystem())); + } - if (translationQuery.hasSource()) { - predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myTarget"), translationQuery.getSource())); - } + if (translationQuery.hasSource()) { + predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myTarget"), translationQuery.getSource())); + } - if (translationQuery.hasTarget()) { - predicates.add(criteriaBuilder.equal(conceptMapJoin.get("mySource"), translationQuery.getTarget())); - } + if (translationQuery.hasTarget()) { + predicates.add(criteriaBuilder.equal(conceptMapJoin.get("mySource"), translationQuery.getTarget())); + } - if (translationQuery.hasResourceId()) { - IIdType resourceId = translationQuery.getResourceId(); - JpaPid resourcePid = - myIdHelperService.getPidOrThrowException(RequestPartitionId.defaultPartition(), resourceId); - predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myResourcePid"), resourcePid.getId())); - } + if (translationQuery.hasResourceId()) { + IIdType resourceId = translationQuery.getResourceId(); + JpaPid resourcePid = + myIdHelperService.getPidOrThrowException(RequestPartitionId.defaultPartition(), resourceId); + predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myResourcePid"), resourcePid.getId())); + } - Predicate outerPredicate = criteriaBuilder.and(predicates.toArray(new Predicate[0])); - query.where(outerPredicate); - - // Use scrollable results. - final TypedQuery typedQuery = - myEntityManager.createQuery(query.select(root)); - org.hibernate.query.Query hibernateQuery = - (org.hibernate.query.Query) typedQuery; - hibernateQuery.setFetchSize(myFetchSize); - ScrollableResults scrollableResults = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY); - try (ScrollableResultsIterator scrollableResultsIterator = - new ScrollableResultsIterator<>(scrollableResults)) { - - Set matches = new HashSet<>(); - while (scrollableResultsIterator.hasNext()) { - TermConceptMapGroupElement nextElement = scrollableResultsIterator.next(); - - /* TODO: The invocation of the size() below does not seem to be necessary but for some reason, - * but removing it causes tests in TerminologySvcImplR4Test to fail. We use the outcome - * in a trace log to avoid ErrorProne flagging an unused return value. - */ - int size = - nextElement.getConceptMapGroupElementTargets().size(); - ourLog.trace("Have {} targets", size); - - myEntityManager.detach(nextElement); - - if (isNotBlank(targetCode)) { - for (TermConceptMapGroupElementTarget next : - nextElement.getConceptMapGroupElementTargets()) { - if (matches.add(next)) { - if (isBlank(targetCodeSystem) - || StringUtils.equals(targetCodeSystem, next.getSystem())) { - if (StringUtils.equals(targetCode, next.getCode())) { - TranslateConceptResult translationMatch = new TranslateConceptResult(); - translationMatch.setCode(nextElement.getCode()); - translationMatch.setSystem(nextElement.getSystem()); - translationMatch.setSystemVersion(nextElement.getSystemVersion()); - translationMatch.setDisplay(nextElement.getDisplay()); - translationMatch.setValueSet(nextElement.getValueSet()); - translationMatch.setSystemVersion(nextElement.getSystemVersion()); - translationMatch.setConceptMapUrl(nextElement.getConceptMapUrl()); - if (next.getEquivalence() != null) { - translationMatch.setEquivalence( - next.getEquivalence().toCode()); - } - - if (alreadyContainsMapping(elements, translationMatch) - || alreadyContainsMapping(retVal.getResults(), translationMatch)) { - continue; - } - - elements.add(translationMatch); + Predicate outerPredicate = criteriaBuilder.and(predicates.toArray(new Predicate[0])); + query.where(outerPredicate); + + // Use scrollable results. + final TypedQuery typedQuery = myEntityManager.createQuery(query.select(root)); + org.hibernate.query.Query hibernateQuery = + (org.hibernate.query.Query) typedQuery; + hibernateQuery.setFetchSize(myFetchSize); + ScrollableResults scrollableResults = + hibernateQuery.scroll(ScrollMode.FORWARD_ONLY); + try (ScrollableResultsIterator scrollableResultsIterator = + new ScrollableResultsIterator<>(scrollableResults)) { + + Set matches = new HashSet<>(); + while (scrollableResultsIterator.hasNext()) { + TermConceptMapGroupElement nextElement = scrollableResultsIterator.next(); + + /* TODO: The invocation of the size() below does not seem to be necessary but for some reason, + * but removing it causes tests in TerminologySvcImplR4Test to fail. We use the outcome + * in a trace log to avoid ErrorProne flagging an unused return value. + */ + int size = nextElement.getConceptMapGroupElementTargets().size(); + ourLog.trace("Have {} targets", size); + + myEntityManager.detach(nextElement); + + if (isNotBlank(targetCode)) { + for (TermConceptMapGroupElementTarget next : nextElement.getConceptMapGroupElementTargets()) { + if (matches.add(next)) { + if (isBlank(targetCodeSystem) + || StringUtils.equals(targetCodeSystem, next.getSystem())) { + if (StringUtils.equals(targetCode, next.getCode())) { + TranslateConceptResult translationMatch = + newTranslateConceptResult(nextElement, next); + + if (alreadyContainsMapping(elements, translationMatch) + || alreadyContainsMapping(retVal.getResults(), translationMatch)) { + continue; } + + elements.add(translationMatch); } } } } } } - - ourLastResultsFromTranslationWithReverseCache = false; // For testing. - myMemoryCacheService.put( - MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION_REVERSE, translationQuery, elements); - retVal.getResults().addAll(elements); - } else { - ourLastResultsFromTranslationWithReverseCache = true; // For testing. - retVal.getResults().addAll(cachedElements); } + + retVal.getResults().addAll(elements); } buildTranslationResult(retVal); return retVal; } + @Nonnull + private static TranslateConceptResult newTranslateConceptResult( + TermConceptMapGroupElement theGroup, TermConceptMapGroupElementTarget theTarget) { + TranslateConceptResult translationMatch = new TranslateConceptResult(); + translationMatch.setCode(theGroup.getCode()); + translationMatch.setSystem(theGroup.getSystem()); + translationMatch.setSystemVersion(theGroup.getSystemVersion()); + translationMatch.setDisplay(theGroup.getDisplay()); + translationMatch.setValueSet(theGroup.getValueSet()); + translationMatch.setSystemVersion(theGroup.getSystemVersion()); + translationMatch.setConceptMapUrl(theGroup.getConceptMapUrl()); + if (theTarget.getEquivalence() != null) { + translationMatch.setEquivalence(theTarget.getEquivalence().toCode()); + } + return translationMatch; + } + @Override public FhirContext getFhirContext() { return myContext; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptMappingSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptMappingSvcImpl.java index bb4e00383dc9..e8156a91853a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptMappingSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptMappingSvcImpl.java @@ -35,7 +35,6 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.util.ValidateUtil; -import com.google.common.annotations.VisibleForTesting; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.r4.model.BooleanType; import org.hl7.fhir.r4.model.CodeType; @@ -315,38 +314,6 @@ public void deleteConceptMap(ResourceTable theResourceTable) { } } - /** - * This method is present only for unit tests, do not call from client code - */ - @VisibleForTesting - public static void clearOurLastResultsFromTranslationCache() { - ourLastResultsFromTranslationCache = false; - } - - /** - * This method is present only for unit tests, do not call from client code - */ - @VisibleForTesting - public static void clearOurLastResultsFromTranslationWithReverseCache() { - ourLastResultsFromTranslationWithReverseCache = false; - } - - /** - * This method is present only for unit tests, do not call from client code - */ - @VisibleForTesting - static boolean isOurLastResultsFromTranslationCache() { - return ourLastResultsFromTranslationCache; - } - - /** - * This method is present only for unit tests, do not call from client code - */ - @VisibleForTesting - static boolean isOurLastResultsFromTranslationWithReverseCache() { - return ourLastResultsFromTranslationWithReverseCache; - } - public static Parameters toParameters(TranslateConceptResults theTranslationResult) { Parameters retVal = new Parameters(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java index 5dadd5a06ada..d76e39f896db 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java @@ -158,6 +158,7 @@ import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_ALL_VALUESET_ID; +import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_GENERIC_VALUESET_URL; public class TermLoaderSvcImpl implements ITermLoaderSvc { public static final String CUSTOM_CONCEPTS_FILE = "concepts.csv"; @@ -1026,7 +1027,7 @@ private ValueSet getValueSetLoincAll(Properties theUploadProperties, String theC valueSetId = LOINC_ALL_VALUESET_ID; } retVal.setId(valueSetId); - retVal.setUrl("http://loinc.org/vs"); + retVal.setUrl(LOINC_GENERIC_VALUESET_URL); retVal.setVersion(codeSystemVersionId); retVal.setName("All LOINC codes"); retVal.setStatus(Enumerations.PublicationStatus.ACTIVE); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java index d52f493632bc..457fe4ff3e5b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java @@ -50,6 +50,7 @@ import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptViewDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptViewOracleDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao; +import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import ca.uhn.fhir.jpa.entity.ITermValueSetConceptView; import ca.uhn.fhir.jpa.entity.TermCodeSystem; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; @@ -80,8 +81,6 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; -import ca.uhn.fhir.sl.cache.Cache; -import ca.uhn.fhir.sl.cache.CacheFactory; import ca.uhn.fhir.util.CoverageIgnore; import ca.uhn.fhir.util.FhirVersionIndependentConcept; import ca.uhn.fhir.util.HapiExtensions; @@ -205,10 +204,10 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { private static final String OUR_PIPE_CHARACTER = "|"; private static final int SECONDS_IN_MINUTE = 60; private static final int INDEXED_ROOTS_LOGGING_COUNT = 50_000; + private static final String CS_USERDATA_CURRENT_VERSION = TermReadSvcImpl.class.getName() + "_CS_CURRENT_VERSION"; + private static final String VS_USERDATA_CURRENT_VERSION = TermReadSvcImpl.class.getName() + "_VS_CURRENT_VERSION"; private static Runnable myInvokeOnNextCallForUnitTest; private static boolean ourForceDisableHibernateSearchForUnitTest; - private final Cache myCodeSystemCurrentVersionCache = - CacheFactory.build(TimeUnit.MINUTES.toMillis(1)); @Autowired protected DaoRegistry myDaoRegistry; @@ -300,7 +299,7 @@ public boolean isCodeSystemSupported(ValidationSupportContext theValidationSuppo if (isBlank(theSystem)) { return false; } - TermCodeSystemVersionDetails cs = getCurrentCodeSystemVersion(theSystem); + TermCodeSystemVersionDetails cs = getCurrentCodeSystemVersion(theValidationSupportContext, theSystem); return cs != null; } @@ -459,14 +458,6 @@ private boolean addToSet(Set theSetToPopulate, TermConcept theConce return retVal; } - /** - * This method is present only for unit tests, do not call from client code - */ - @VisibleForTesting - public void clearCaches() { - myCodeSystemCurrentVersionCache.invalidateAll(); - } - public Optional deleteValueSetForResource(ResourceTable theResourceTable) { // Get existing entity so it can be deleted. Optional optionalExistingTermValueSetById = @@ -678,10 +669,10 @@ private void expandValueSetIntoAccumulator( private String toHumanReadableExpansionTimestamp(TermValueSet termValueSet) { String expansionTimestamp = "(unknown)"; if (termValueSet.getExpansionTimestamp() != null) { - String timeElapsed = StopWatch.formatMillis(System.currentTimeMillis() - - termValueSet.getExpansionTimestamp().getTime()); - expansionTimestamp = new InstantType(termValueSet.getExpansionTimestamp()).getValueAsString() + " (" - + timeElapsed + " ago)"; + // Note: We used to append "123ms ago" to the timestamp, but we cache the + // results here, so it's just kind of weird to do that since the duration will + // be out of date when the entry comes back from cache + expansionTimestamp = new InstantType(termValueSet.getExpansionTimestamp()).getValueAsString(); } return expansionTimestamp; } @@ -2059,7 +2050,7 @@ public String invalidatePreCalculatedExpansion(IIdType theValueSetId, RequestDet } @Override - @Transactional + @Transactional(readOnly = true) public boolean isValueSetPreExpandedForCodeValidation(ValueSet theValueSet) { Optional optionalTermValueSet = fetchValueSetEntity(theValueSet); @@ -2084,9 +2075,18 @@ public boolean isValueSetPreExpandedForCodeValidation(ValueSet theValueSet) { return true; } + @SuppressWarnings({"OptionalAssignedToNull", "unchecked"}) private Optional fetchValueSetEntity(ValueSet theValueSet) { - JpaPid valueSetResourcePid = getValueSetResourcePersistentId(theValueSet); - return myTermValueSetDao.findByResourcePid(valueSetResourcePid.getId()); + Optional retVal = (Optional) theValueSet.getUserData(VS_USERDATA_CURRENT_VERSION); + if (retVal == null) { + synchronized (theValueSet) { + JpaPid valueSetResourcePid = getValueSetResourcePersistentId(theValueSet); + retVal = myTermValueSetDao.findByResourcePid(valueSetResourcePid.getId()); + theValueSet.setUserData(VS_USERDATA_CURRENT_VERSION, retVal); + } + } + + return retVal; } private JpaPid getValueSetResourcePersistentId(ValueSet theValueSet) { @@ -2098,6 +2098,7 @@ private JpaPid getValueSetResourcePersistentId(ValueSet theValueSet) { } protected IValidationSupport.CodeValidationResult validateCodeIsInPreExpandedValueSet( + ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theValidationOptions, ValueSet theValueSet, String theSystem, @@ -2137,9 +2138,7 @@ protected IValidationSupport.CodeValidationResult validateCodeIsInPreExpandedVal return null; } - TermValueSet valueSetEntity = myTermValueSetDao - .findByResourcePid(valueSetResourcePid.getId()) - .orElseThrow(IllegalStateException::new); + TermValueSet valueSetEntity = fetchValueSetEntity(theValueSet).orElseThrow(IllegalStateException::new); String timingDescription = toHumanReadableExpansionTimestamp(valueSetEntity); String preExpansionMessage = myContext .getLocalizer() @@ -2255,26 +2254,24 @@ private void fetchParents(TermConcept theConcept, Set theSetToPopul public Optional findCode(String theCodeSystem, String theCode) { /* * Loading concepts without a transaction causes issues later on some - * platforms (e.g. PSQL) so this transactiontemplate is here to make - * sure that we always call this with an open transaction + * platforms (e.g. PSQL) so make sure that we always call this with an open transaction */ - TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager); - txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_MANDATORY); - txTemplate.setReadOnly(true); + HapiTransactionService.requireTransaction(); - return txTemplate.execute(t -> { - TermCodeSystemVersionDetails csv = getCurrentCodeSystemVersion(theCodeSystem); - if (csv == null) { - return Optional.empty(); - } - return myConceptDao.findByCodeSystemAndCode(csv.myPid, theCode); - }); + TermCodeSystemVersionDetails csv = + getCurrentCodeSystemVersion(new ValidationSupportContext(provideValidationSupport()), theCodeSystem); + if (csv == null) { + return Optional.empty(); + } + return myConceptDao.findByCodeSystemAndCode(csv.myPid, theCode); } @Override - @Transactional(propagation = Propagation.MANDATORY) public List findCodes(String theCodeSystem, List theCodeList) { - TermCodeSystemVersionDetails csv = getCurrentCodeSystemVersion(theCodeSystem); + HapiTransactionService.requireTransaction(); + + TermCodeSystemVersionDetails csv = + getCurrentCodeSystemVersion(new ValidationSupportContext(provideValidationSupport()), theCodeSystem); if (csv == null) { return Collections.emptyList(); } @@ -2283,30 +2280,51 @@ public List findCodes(String theCodeSystem, List theCodeLis } @Nullable - private TermCodeSystemVersionDetails getCurrentCodeSystemVersion(String theCodeSystemIdentifier) { + private TermCodeSystemVersionDetails getCurrentCodeSystemVersion( + ValidationSupportContext theValidationSupportContext, String theCodeSystemIdentifier) { String version = getVersionFromIdentifier(theCodeSystemIdentifier); - TermCodeSystemVersionDetails retVal = myCodeSystemCurrentVersionCache.get( - theCodeSystemIdentifier, - t -> myTxTemplate.execute(tx -> { - TermCodeSystemVersion csv = null; - TermCodeSystem cs = - myCodeSystemDao.findByCodeSystemUri(getUrlFromIdentifier(theCodeSystemIdentifier)); - if (cs != null) { - if (version != null) { - csv = myCodeSystemVersionDao.findByCodeSystemPidAndVersion(cs.getPid(), version); - } else if (cs.getCurrentVersion() != null) { - csv = cs.getCurrentVersion(); - } - } - if (csv != null) { - return new TermCodeSystemVersionDetails(csv.getPid(), csv.getCodeSystemVersionId()); - } else { - return NO_CURRENT_VERSION; - } - })); - if (retVal == NO_CURRENT_VERSION) { - return null; + + // Fetch the CodeSystem from ValidationSupport, which should return a cached copy. We + // keep a copy of the current version entity in userData in that cached copy + // to avoid repeated lookups + TermCodeSystemVersionDetails retVal; + IBaseResource codeSystem = + theValidationSupportContext.getRootValidationSupport().fetchCodeSystem(theCodeSystemIdentifier); + if (codeSystem != null) { + + synchronized (codeSystem) { + retVal = (TermCodeSystemVersionDetails) codeSystem.getUserData(CS_USERDATA_CURRENT_VERSION); + if (retVal == null) { + retVal = getCurrentCodeSystemVersion(theCodeSystemIdentifier, version); + codeSystem.setUserData(CS_USERDATA_CURRENT_VERSION, retVal); + } + } + } else { + retVal = getCurrentCodeSystemVersion(theCodeSystemIdentifier, version); } + + return retVal; + } + + @Nullable + private TermCodeSystemVersionDetails getCurrentCodeSystemVersion(String theCodeSystemIdentifier, String version) { + TermCodeSystemVersionDetails retVal; + retVal = myTxTemplate.execute(tx -> { + TermCodeSystemVersion csv = null; + TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(getUrlFromIdentifier(theCodeSystemIdentifier)); + if (cs != null) { + if (version != null) { + csv = myCodeSystemVersionDao.findByCodeSystemPidAndVersion(cs.getPid(), version); + } else if (cs.getCurrentVersion() != null) { + csv = cs.getCurrentVersion(); + } + } + if (csv != null) { + return new TermCodeSystemVersionDetails(csv.getPid(), csv.getCodeSystemVersionId()); + } else { + return null; + } + }); return retVal; } @@ -2332,7 +2350,7 @@ private String getUrlFromIdentifier(String theUri) { return retVal; } - @Transactional(propagation = Propagation.REQUIRED) + @Transactional(propagation = Propagation.REQUIRED, readOnly = true) @Override public Set findCodesAbove( Long theCodeSystemResourcePid, Long theCodeSystemVersionPid, String theCode) { @@ -2352,7 +2370,7 @@ public Set findCodesAbove( return retVal; } - @Transactional + @Transactional(readOnly = true) @Override public List findCodesAbove(String theSystem, String theCode) { TermCodeSystem cs = getCodeSystem(theSystem); @@ -2365,7 +2383,7 @@ public List findCodesAbove(String theSystem, Stri return toVersionIndependentConcepts(theSystem, codes); } - @Transactional(propagation = Propagation.REQUIRED) + @Transactional(propagation = Propagation.REQUIRED, readOnly = true) @Override public Set findCodesBelow( Long theCodeSystemResourcePid, Long theCodeSystemVersionPid, String theCode) { @@ -2389,7 +2407,7 @@ public Set findCodesBelow( return retVal; } - @Transactional + @Transactional(readOnly = true) @Override public List findCodesBelow(String theSystem, String theCode) { TermCodeSystem cs = getCodeSystem(theSystem); @@ -2508,6 +2526,29 @@ private void afterValueSetExpansionStatusChange() { provideValidationSupport().invalidateCaches(); } + @SuppressWarnings("SynchronizationOnLocalVariableOrMethodParameter") + @Override + public void invalidateCaches() { + /* + * Clear out anything left in the userdata caches. We do this mostly because it messes + * up unit tests to have these things stick around between test runs, since many of + * these resources come from DefaultProfileValidationSupport and therefore live beyond + * any single test execution. + */ + for (IBaseResource next : provideValidationSupport().fetchAllConformanceResources()) { + if (next != null) { + synchronized (next) { + if (next.getUserData(CS_USERDATA_CURRENT_VERSION) != null) { + next.setUserData(CS_USERDATA_CURRENT_VERSION, null); + } + if (next.getUserData(VS_USERDATA_CURRENT_VERSION) != null) { + next.setUserData(VS_USERDATA_CURRENT_VERSION, null); + } + } + } + } + } + private synchronized boolean isPreExpandingValueSets() { return myPreExpandingValueSets; } @@ -2823,7 +2864,9 @@ public IValidationSupport.CodeValidationResult validateCode( txTemplate.setReadOnly(true); Optional codeOpt = txTemplate.execute(tx -> findCode(theCodeSystemUrl, theCode).map(c -> { - String codeSystemVersionId = getCurrentCodeSystemVersion(theCodeSystemUrl).myCodeSystemVersionId; + String codeSystemVersionId = getCurrentCodeSystemVersion( + theValidationSupportContext, theCodeSystemUrl) + .myCodeSystemVersionId; return new FhirVersionIndependentConcept( theCodeSystemUrl, c.getCode(), c.getDisplay(), codeSystemVersionId); })); @@ -2865,13 +2908,20 @@ IValidationSupport.CodeValidationResult validateCodeInValueSet( // If we don't have a PID, this came from some source other than the JPA // database, so we don't need to check if it's pre-expanded or not if (valueSet instanceof IAnyResource) { - Long pid = IDao.RESOURCE_PID.get((IAnyResource) valueSet); + Long pid = IDao.RESOURCE_PID.get(valueSet); if (pid != null) { TransactionTemplate txTemplate = new TransactionTemplate(myTxManager); retVal = txTemplate.execute(tx -> { if (isValueSetPreExpandedForCodeValidation(valueSet)) { return validateCodeIsInPreExpandedValueSet( - theValidationOptions, valueSet, theCodeSystem, theCode, theDisplay, null, null); + theValidationSupportContext, + theValidationOptions, + valueSet, + theCodeSystem, + theCode, + theDisplay, + null, + null); } else { return null; } @@ -3212,6 +3262,7 @@ private org.hl7.fhir.r4.model.ValueSet getValueSetFromResourceTable(ResourceTabl @Override public CodeValidationResult validateCodeIsInPreExpandedValueSet( + ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theOptions, IBaseResource theValueSet, String theSystem, @@ -3226,7 +3277,14 @@ public CodeValidationResult validateCodeIsInPreExpandedValueSet( myVersionCanonicalizer.codeableConceptToCanonical(theCodeableConcept); return validateCodeIsInPreExpandedValueSet( - theOptions, valueSetR4, theSystem, theCode, theDisplay, codingR4, codeableConcept); + theValidationSupportContext, + theOptions, + valueSetR4, + theSystem, + theCode, + theDisplay, + codingR4, + codeableConcept); } @Override @@ -3261,7 +3319,7 @@ public void execute(JobExecutionContext theContext) { * Properties returned from method buildSearchScroll */ private static final class SearchProperties { - private List>> mySearchScroll = new ArrayList<>(); + private final List>> mySearchScroll = new ArrayList<>(); private List myIncludeOrExcludeCodes; public List>> getSearchScroll() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermReadSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermReadSvc.java index c00ce3b74af1..cc00bf3d04a1 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermReadSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermReadSvc.java @@ -21,6 +21,7 @@ import ca.uhn.fhir.context.support.ConceptValidationOptions; import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.context.support.ValidationSupportContext; import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem; import ca.uhn.fhir.jpa.entity.TermConcept; @@ -119,6 +120,7 @@ IFhirResourceDaoCodeSystem.SubsumesResult subsumes( */ @Transactional() CodeValidationResult validateCodeIsInPreExpandedValueSet( + ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theOptions, IBaseResource theValueSet, String theSystem, diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/InClauseNormalizer.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/InClauseNormalizer.java index 8d8227ac4f91..5825f09987d1 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/InClauseNormalizer.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/InClauseNormalizer.java @@ -19,6 +19,8 @@ */ package ca.uhn.fhir.jpa.util; +import ca.uhn.fhir.jpa.model.dao.JpaPid; + import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -33,9 +35,9 @@ */ public class InClauseNormalizer { - public static List normalizeIdListForInClause(List theResourceIds) { + public static List normalizeIdListForInClause(List theResourceIds) { - List retVal = theResourceIds; + List retVal = theResourceIds; int listSize = theResourceIds.size(); @@ -56,8 +58,8 @@ public static List normalizeIdListForInClause(List theResourceIds) { return retVal; } - private static List padIdListWithPlaceholders(List theIdList, int preferredListSize) { - List retVal = theIdList; + private static List padIdListWithPlaceholders(List theIdList, int preferredListSize) { + List retVal = theIdList; if (isUnmodifiableList(theIdList)) { retVal = new ArrayList<>(preferredListSize); @@ -65,13 +67,13 @@ private static List padIdListWithPlaceholders(List theIdList, int pr } while (retVal.size() < preferredListSize) { - retVal.add(-1L); + retVal.add(JpaPid.fromId(-1L, null)); } return retVal; } - private static boolean isUnmodifiableList(List theList) { + private static boolean isUnmodifiableList(List theList) { try { theList.addAll(Collections.emptyList()); } catch (Exception e) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryParameterUtils.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryParameterUtils.java index 36b0b43a265d..e3fb62fc185c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryParameterUtils.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryParameterUtils.java @@ -26,6 +26,8 @@ import ca.uhn.fhir.jpa.entity.SearchTypeEnum; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.search.SearchStatusEnum; +import ca.uhn.fhir.jpa.search.builder.sql.ColumnTupleObject; +import ca.uhn.fhir.jpa.search.builder.sql.JpaPidValueTuples; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.model.primitive.InstantDt; @@ -118,6 +120,12 @@ public static Condition toAndPredicate(Condition... theAndPredicates) { return toAndPredicate(Arrays.asList(theAndPredicates)); } + @Nonnull + public static Condition toInPredicate( + ColumnTupleObject theColumns, JpaPidValueTuples theValues, boolean theInverse) { + return new InCondition(theColumns, theValues).setNegate(theInverse); + } + @Nonnull public static Condition toEqualToOrInPredicate( DbColumn theColumn, List theValuePlaceholders, boolean theInverse) { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/index/IdHelperServiceTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/index/IdHelperServiceTest.java index 9036b3514533..1db10169ab92 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/index/IdHelperServiceTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/index/IdHelperServiceTest.java @@ -79,57 +79,6 @@ void setUp() { lenient().doReturn(true).when(myStorageSettings).isDeleteEnabled(); } - @Test - public void testResolveResourcePersistentIds() { - lenient().doReturn(JpaStorageSettings.ClientIdStrategyEnum.ANY).when(myStorageSettings).getResourceClientIdStrategy(); - - //prepare params - RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionIdAndName(1, "Partition-A"); - String resourceType = "Patient"; - Long id = 123L; - List ids = List.of(String.valueOf(id)); - ResolveIdentityMode mode = ResolveIdentityMode.includeDeleted().noCacheUnlessDeletesDisabled(); - - //prepare results - Patient expectedPatient = new Patient(); - expectedPatient.setId(ids.get(0)); - - // configure mock behaviour - when(myStorageSettings.isDeleteEnabled()).thenReturn(true); - - final ResourceNotFoundException resourceNotFoundException = assertThrows(ResourceNotFoundException.class, () -> myHelperSvc.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, mode)); - assertEquals("HAPI-2001: Resource Patient/123 is not known", resourceNotFoundException.getMessage()); - } - - @Test - public void testResolveResourcePersistentIdsDeleteFalse() { - lenient().doReturn(JpaStorageSettings.ClientIdStrategyEnum.ANY).when(myStorageSettings).getResourceClientIdStrategy(); - - //prepare Params - RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionIdAndName(1, "Partition-A"); - Long id = 123L; - String resourceType = "Patient"; - List ids = List.of(String.valueOf(id)); - String forcedId = "(all)/" + resourceType + "/" + id; - ResolveIdentityMode mode = ResolveIdentityMode.includeDeleted().noCacheUnlessDeletesDisabled(); - - //prepare results - Patient expectedPatient = new Patient(); - expectedPatient.setId(ids.get(0)); - - // configure mock behaviour - when(myStorageSettings.isDeleteEnabled()).thenReturn(false); - - Map actualIds = myHelperSvc.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, mode); - - //verifyResult - assertFalse(actualIds.isEmpty()); - assertNull(actualIds.get(ids.get(0))); - } - - - - @Test public void testResolveResourceIdentity_defaultFunctionality(){ diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchBuilderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchBuilderTest.java index f6564c8c53ef..ced03f9570c2 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchBuilderTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchBuilderTest.java @@ -3,6 +3,7 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.rest.server.util.FhirContextSearchParamRegistry; import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import org.junit.jupiter.api.BeforeEach; @@ -45,7 +46,7 @@ public void beforeEach() { @Test void testCalculateIndexUriIdentityHashesForResourceTypes_Include_Null() { - Set types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(null, false); + Set types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(new SystemRequestDetails(), null, false).myHashIdentityValues; // There are only 12 resource types that actually can be linked to by the QuestionnaireResponse // resource via canonical references in any parameters assertThat(types).hasSize(1); @@ -54,14 +55,14 @@ void testCalculateIndexUriIdentityHashesForResourceTypes_Include_Null() { @Test void testCalculateIndexUriIdentityHashesForResourceTypes_Include_Nonnull() { Set inputTypes = Set.of("Questionnaire"); - Set types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(inputTypes, false); + Set types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(new SystemRequestDetails(), inputTypes, false).myHashIdentityValues; // Just the one that we actually specified assertThat(types).hasSize(1); } @Test void testCalculateIndexUriIdentityHashesForResourceTypes_RevInclude_Null() { - Set types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(null, true); + Set types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(new SystemRequestDetails(), null, true).myHashIdentityValues; // Revincludes are really hard to figure out the potential resource types for, so we just need to // use all active resource types assertThat(types).hasSize(146); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutorsTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutorsTest.java index ef6397e8189c..d0b7771fc241 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutorsTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutorsTest.java @@ -1,5 +1,6 @@ package ca.uhn.fhir.jpa.search.builder; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import org.junit.jupiter.api.Test; import java.util.Arrays; @@ -14,7 +15,7 @@ class SearchQueryExecutorsTest { @Test public void adaptFromLongArrayYieldsAllValues() { - List listWithValues = Arrays.asList(1L,2L,3L,4L,5L); + List listWithValues = JpaPid.fromLongList(Arrays.asList(1L,2L,3L,4L,5L)); ISearchQueryExecutor queryExecutor = SearchQueryExecutors.from(listWithValues); @@ -24,7 +25,7 @@ public void adaptFromLongArrayYieldsAllValues() { @Test public void limitedCountDropsTrailingTest() { // given - List vals = Arrays.asList(1L,2L,3L,4L,5L); + List vals = JpaPid.fromLongList(Arrays.asList(1L,2L,3L,4L,5L)); ISearchQueryExecutor target = SearchQueryExecutors.from(vals); ISearchQueryExecutor queryExecutor = SearchQueryExecutors.limited(target, 3); @@ -35,7 +36,7 @@ public void limitedCountDropsTrailingTest() { @Test public void limitedCountExhaustsBeforeLimitOkTest() { // given - List vals = Arrays.asList(1L,2L,3L); + List vals = JpaPid.fromLongList(Arrays.asList(1L,2L,3L)); ISearchQueryExecutor target = SearchQueryExecutors.from(vals); ISearchQueryExecutor queryExecutor = SearchQueryExecutors.limited(target, 5); @@ -46,6 +47,7 @@ public void limitedCountExhaustsBeforeLimitOkTest() { private List drain(ISearchQueryExecutor theQueryExecutor) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(theQueryExecutor, 0), false) + .map(JpaPid::getId) .collect(Collectors.toList()); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/util/InClauseNormalizerTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/util/InClauseNormalizerTest.java index bd2ed0603ee9..8d9ff3bea522 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/util/InClauseNormalizerTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/util/InClauseNormalizerTest.java @@ -1,5 +1,6 @@ package ca.uhn.fhir.util; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.util.InClauseNormalizer; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; @@ -14,16 +15,16 @@ import static org.assertj.core.api.Assertions.assertThat; public class InClauseNormalizerTest { - private static final Long ourResourceId = 1L; - private static final Long ourPaddingValue = -1L; + private static final JpaPid ourResourceId = JpaPid.fromId(1L); + private static final JpaPid ourPaddingValue = JpaPid.fromId(-1L); @ParameterizedTest @MethodSource("arguments") public void testNormalizeUnmodifiableList_willCreateNewListAndPadToSize(int theInitialListSize, int theExpectedNormalizedListSize) { - List initialList = new ArrayList<>(nCopies(theInitialListSize, ourResourceId)); + List initialList = new ArrayList<>(nCopies(theInitialListSize, ourResourceId)); initialList = unmodifiableList(initialList); - List normalizedList = InClauseNormalizer.normalizeIdListForInClause(initialList); + List normalizedList = InClauseNormalizer.normalizeIdListForInClause(initialList); assertNormalizedList(initialList, normalizedList, theInitialListSize, theExpectedNormalizedListSize); } @@ -31,23 +32,23 @@ public void testNormalizeUnmodifiableList_willCreateNewListAndPadToSize(int theI @ParameterizedTest @MethodSource("arguments") public void testNormalizeListToSizeAndPad(int theInitialListSize, int theExpectedNormalizedListSize) { - List initialList = new ArrayList<>(nCopies(theInitialListSize, ourResourceId)); + List initialList = new ArrayList<>(nCopies(theInitialListSize, ourResourceId)); - List normalizedList = InClauseNormalizer.normalizeIdListForInClause(initialList); + List normalizedList = InClauseNormalizer.normalizeIdListForInClause(initialList); assertNormalizedList(initialList, normalizedList, theInitialListSize, theExpectedNormalizedListSize); } - private void assertNormalizedList(List theInitialList, List theNormalizedList, int theInitialListSize, int theExpectedNormalizedListSize) { - List expectedPaddedSubList = new ArrayList<>(nCopies(theExpectedNormalizedListSize - theInitialListSize, ourPaddingValue)); + private void assertNormalizedList(List theInitialList, List theNormalizedList, int theInitialListSize, int theExpectedNormalizedListSize) { + List expectedPaddedSubList = new ArrayList<>(nCopies(theExpectedNormalizedListSize - theInitialListSize, ourPaddingValue)); assertThat(theNormalizedList).startsWith(listToArray(theInitialList)); assertThat(theNormalizedList).hasSize(theExpectedNormalizedListSize); assertThat(theNormalizedList).endsWith(listToArray(expectedPaddedSubList)); } - static Long[] listToArray(List theList) { - return theList.toArray(new Long[0]); + static JpaPid[] listToArray(List theList) { + return theList.toArray(new JpaPid[0]); } private static Stream arguments(){ diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml index e945e1951d90..8e1080783642 100644 --- a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-hfql/pom.xml b/hapi-fhir-jpaserver-hfql/pom.xml index 6ebfca9d2915..285e0f2ac0e6 100644 --- a/hapi-fhir-jpaserver-hfql/pom.xml +++ b/hapi-fhir-jpaserver-hfql/pom.xml @@ -3,7 +3,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-ips/pom.xml b/hapi-fhir-jpaserver-ips/pom.xml index bccea89af6fa..667bac703c23 100644 --- a/hapi-fhir-jpaserver-ips/pom.xml +++ b/hapi-fhir-jpaserver-ips/pom.xml @@ -3,7 +3,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml index 6d73b51dc281..2155dcf8eccc 100644 --- a/hapi-fhir-jpaserver-mdm/pom.xml +++ b/hapi-fhir-jpaserver-mdm/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java index 7362adc18f06..0569062c8be7 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java @@ -44,7 +44,6 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Example; import org.springframework.data.domain.Page; -import org.springframework.data.history.Revisions; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; @@ -410,12 +409,6 @@ public void deleteLinksWithAnyReferenceToPids(List

theGoldenResourcePids) { myMdmLinkDao.deleteLinksWithAnyReferenceToPids(theGoldenResourcePids); } - // TODO: LD: delete for good on the next bump - @Deprecated(since = "6.5.7", forRemoval = true) - public Revisions findMdmLinkHistory(M mdmLink) { - return myMdmLinkDao.findHistory(mdmLink.getId()); - } - @Transactional public List> findMdmLinkHistory(MdmHistorySearchParameters theMdmHistorySearchParameters) { return myMdmLinkDao.getHistoryForIds(theMdmHistorySearchParameters); diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmCandidateSearchSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmCandidateSearchSvc.java index df985ae93c5e..16677f9fccb7 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmCandidateSearchSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmCandidateSearchSvc.java @@ -37,7 +37,7 @@ import java.util.Collection; import java.util.Collections; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -77,7 +77,16 @@ public MdmCandidateSearchSvc() {} @Transactional public Collection findCandidates( String theResourceType, IAnyResource theResource, RequestPartitionId theRequestPartitionId) { - Map matchedPidsToResources = new HashMap<>(); + + /* + * This is a LinkedHashMap only because a number of Smile MDM unit tests depend on + * the order of candidates being returned in an order consistent with the order they + * were created. Before we added the partition ID to the hashCode() of JpaPid this + * seemed to happen naturally by complete coincidence, but after that change it + * stopped happening. So now a linked hashmap is used instead. + */ + Map matchedPidsToResources = new LinkedHashMap<>(); + List filterSearchParams = myMdmSettings.getMdmRules().getCandidateFilterSearchParams(); List filterCriteria = buildFilterQuery(filterSearchParams, theResourceType); diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImplTest.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImplTest.java index e37946f7cbcc..dad426cfe565 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImplTest.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImplTest.java @@ -25,7 +25,6 @@ import ca.uhn.fhir.mdm.util.MdmPartitionHelper; import ca.uhn.fhir.mdm.util.MdmResourceUtil; import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.Patient; @@ -52,9 +51,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; @@ -156,7 +153,7 @@ public void rebuildGoldenResourceCurrentLinksUsingSurvivorshipRules_withManyLink link.setSourceId(patient.getId()); link.setGoldenResourceId(goldenPatient.getId()); links.add(link); - sourceIdToPid.put(patient.getIdElement(), new JpaResourceLookup("Patient", (Long) link.getSourcePid().getId(), null, new PartitionablePartitionId())); + sourceIdToPid.put(patient.getIdElement(), new JpaResourceLookup("Patient", patient.getIdPart(), (Long) link.getSourcePid().getId(), null, new PartitionablePartitionId())); } IFhirResourceDao resourceDao = mock(IFhirResourceDao.class); diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml index 71d5378a536c..42b6c7b68470 100644 --- a/hapi-fhir-jpaserver-model/pom.xml +++ b/hapi-fhir-jpaserver-model/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IResourceLookup.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IResourceLookup.java index 5ea186ed4699..be795a9ed407 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IResourceLookup.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IResourceLookup.java @@ -26,6 +26,8 @@ public interface IResourceLookup> { String getResourceType(); + String getFhirId(); + /** * If the resource is deleted, returns the date/time that the resource was deleted at. Otherwise, returns null */ diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dao/JpaPid.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dao/JpaPid.java index c19566ae44aa..a09745812fdf 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dao/JpaPid.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dao/JpaPid.java @@ -21,23 +21,37 @@ import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; import ca.uhn.fhir.rest.api.server.storage.BaseResourcePersistentId; +import jakarta.annotation.Nonnull; +import org.apache.commons.collections4.ComparatorUtils; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; +import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; + /** * JPA implementation of IResourcePersistentId. JPA uses a Long as the primary key. This class should be used in any * context where the pid is known to be a Long. */ -public class JpaPid extends BaseResourcePersistentId { +public class JpaPid extends BaseResourcePersistentId implements Comparable { private final Long myId; private PartitionablePartitionId myPartitionablePartitionId; + private static final Comparator COMPARATOR; + + static { + Comparator partitionComparator = + Comparator.comparing(t -> defaultIfNull(t.getPartitionId(), Integer.MIN_VALUE)); + Comparator idComparator = Comparator.comparing(t -> t.myId); + COMPARATOR = ComparatorUtils.chainedComparator(List.of(partitionComparator, idComparator)); + } + private JpaPid(Long theId) { super(null); myId = theId; @@ -67,6 +81,13 @@ public JpaPid setPartitionablePartitionId(PartitionablePartitionId thePartitiona return this; } + public void setPartitionId(Integer thePartitionId) { + if (myPartitionablePartitionId == null) { + myPartitionablePartitionId = new PartitionablePartitionId(); + } + myPartitionablePartitionId.setPartitionId(thePartitionId); + } + public static List toLongList(JpaPid[] thePids) { return toLongList(Arrays.asList(thePids)); } @@ -99,6 +120,12 @@ public static JpaPid fromId(Long theId) { return new JpaPid(theId); } + public static JpaPid fromId(Long theId, Integer thePartitionId) { + JpaPid retVal = new JpaPid(theId); + retVal.setPartitionablePartitionId(PartitionablePartitionId.with(thePartitionId, null)); + return retVal; + } + public static JpaPid fromIdAndVersion(Long theId, Long theVersion) { return new JpaPid(theId, theVersion); } @@ -115,14 +142,13 @@ public static JpaPid fromIdAndVersionAndResourceType(Long theId, Long theVersion public boolean equals(Object theO) { if (this == theO) return true; if (theO == null || getClass() != theO.getClass()) return false; - if (!super.equals(theO)) return false; JpaPid jpaPid = (JpaPid) theO; return myId.equals(jpaPid.myId); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), myId); + return Objects.hash(myId); } @Override @@ -135,9 +161,15 @@ public String toString() { return myId.toString(); } + @Override + public int compareTo(@Nonnull JpaPid theOther) { + return COMPARATOR.compare(this, theOther); + } + public Integer getPartitionId() { - // wipmb should we return null instead? - assert getPartitionablePartitionId() != null; + if (getPartitionablePartitionId() == null) { + return null; + } return getPartitionablePartitionId().getPartitionId(); } } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryProvenanceEntity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryProvenanceEntity.java index 60ffac66cbeb..b22e6e168dc9 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryProvenanceEntity.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryProvenanceEntity.java @@ -36,6 +36,16 @@ import static ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable.SOURCE_URI_LENGTH; +/** + * This entity is deprecated - It stores the source URI and Request ID + * fields so that they can be indexed and searched discretely. In + * HAPI FHIR 6.8.0 we added equivalent columns to {@link ResourceHistoryTable} + * and started populating both those columns and the ones in this table. + * As of HAPI FHIR 8.0.0 we are no longer using this table unless + * the "AccessMetaSourceInformationFromProvenanceTable" on JpaStorageSettings + * is enabled (it's disabled by default). In the future we will remove + * this table entirely. + */ @Table( name = "HFJ_RES_VER_PROV", indexes = { diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java index fb3534f299e9..d5fa5b0e4eb0 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java @@ -37,7 +37,6 @@ import jakarta.persistence.Lob; import jakarta.persistence.ManyToOne; import jakarta.persistence.OneToMany; -import jakarta.persistence.OneToOne; import jakarta.persistence.Table; import jakarta.persistence.Transient; import jakarta.persistence.UniqueConstraint; @@ -119,10 +118,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl @OptimisticLock(excluded = true) private ResourceEncodingEnum myEncoding; - @OneToOne( - mappedBy = "myResourceHistoryTable", - cascade = {CascadeType.REMOVE}) - private ResourceHistoryProvenanceEntity myProvenance; // TODO: This was added in 6.8.0 - In the future we should drop ResourceHistoryProvenanceEntity @Column(name = "SOURCE_URI", length = SOURCE_URI_LENGTH, nullable = true) private String mySourceUri; @@ -180,10 +175,6 @@ public void setResourceTextVc(String theResourceTextVc) { myResourceTextVc = theResourceTextVc; } - public ResourceHistoryProvenanceEntity getProvenance() { - return myProvenance; - } - public void addTag(ResourceTag theTag) { ResourceHistoryTag tag = new ResourceHistoryTag(this, theTag.getTag(), getPartitionId()); tag.setResourceType(theTag.getResourceType()); @@ -251,6 +242,11 @@ public String getResourceType() { return myResourceType; } + @Override + public String getFhirId() { + return getIdDt().getIdPart(); + } + public void setResourceType(String theResourceType) { myResourceType = theResourceType; } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTag.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTag.java index c78e68a4a902..ea6951b49817 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTag.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTag.java @@ -31,6 +31,8 @@ import jakarta.persistence.ManyToOne; import jakarta.persistence.Table; import jakarta.persistence.UniqueConstraint; +import org.apache.commons.lang3.builder.ToStringBuilder; +import org.apache.commons.lang3.builder.ToStringStyle; import org.hibernate.annotations.GenericGenerator; import java.io.Serializable; @@ -121,4 +123,17 @@ public void setResource(ResourceHistoryTable theResourceHistory) { public Long getId() { return myId; } + + @Override + public String toString() { + ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE); + b.append("id", getId()); + if (getPartitionId() != null) { + b.append("partId", getPartitionId().getPartitionId()); + } + b.append("versionId", myResourceHistoryPid); + b.append("resId", getResourceId()); + b.append("tag", getTag().getId()); + return b.build(); + } } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java index 7f1779c3a8fb..41258153ee1b 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java @@ -988,6 +988,7 @@ public Collection getSearchParamPresents() { * @return the resource id, or null if the resource doesn't have a client-assigned id, * and hasn't been saved to the db to get a server-assigned id yet. */ + @Override public String getFhirId() { return myFhirId; } diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml index 17961eca9d58..afcdda7de717 100755 --- a/hapi-fhir-jpaserver-searchparam/pom.xml +++ b/hapi-fhir-jpaserver-searchparam/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml index 01b2efc26436..c9dce2fd3381 100644 --- a/hapi-fhir-jpaserver-subscription/pom.xml +++ b/hapi-fhir-jpaserver-subscription/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu2/pom.xml b/hapi-fhir-jpaserver-test-dstu2/pom.xml index 9b3c42420e3a..4f0c15f388e7 100644 --- a/hapi-fhir-jpaserver-test-dstu2/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu2/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2UpdateTest.java b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2UpdateTest.java index ad5fafacbfe1..4dced847c2f1 100644 --- a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2UpdateTest.java +++ b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2UpdateTest.java @@ -209,7 +209,7 @@ public void testUpdateRejectsInvalidTypes() { p2.setId(new IdDt("Organization/" + p1id.getIdPart())); myOrganizationDao.update(p2, mySrd); fail(""); - } catch (UnprocessableEntityException e) { + } catch (InvalidRequestException e) { // good } diff --git a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoValueSetDstu2Test.java b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoValueSetDstu2Test.java index 43242b0114f9..7ae14f6a4a05 100644 --- a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoValueSetDstu2Test.java +++ b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoValueSetDstu2Test.java @@ -102,7 +102,8 @@ public void testValidateCodeOperationByIdentifierAndCodeAndSystemAndBadDisplay() CodeableConceptDt codeableConcept = null; IValidationSupport.CodeValidationResult result = myValueSetDao.validateCode(valueSetIdentifier, id, code, system, display, coding, codeableConcept, mySrd); assertTrue(result.isOk()); - assertEquals("Concept Display \"Systolic blood pressure at First encounterXXXX\" does not match expected \"Systolic blood pressure at First encounter\" for 'http://acme.org#11378-7' for in-memory expansion of ValueSet: http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", result.getMessage()); + assertThat(result.getMessage()).contains("Concept Display \"Systolic blood pressure at First encounterXXXX\" does not match expected \"Systolic blood pressure at First encounter\" for 'http://acme.org#11378-7'"); + assertThat(result.getMessage()).contains("for in-memory expansion"); assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); assertEquals(IValidationSupport.IssueSeverity.WARNING, result.getSeverity()); } diff --git a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java index 060e1430c5e9..69d51f118bb8 100644 --- a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java +++ b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java @@ -2571,32 +2571,6 @@ public void testUpdateInvalidUrl() throws Exception { } } - @Test - public void testUpdateRejectsInvalidTypes() { - - Patient patient = new Patient(); - patient.addIdentifier().setSystem("urn:system").setValue("testUpdateRejectsInvalidTypes"); - patient.addName().addFamily("Tester").addGiven("testUpdateRejectsInvalidTypes"); - IdDt p1id = (IdDt) myClient.create().resource(patient).execute().getId(); - - Organization org = new Organization(); - org.getNameElement().setValue("testUpdateRejectsInvalidTypes"); - try { - myClient.update().resource(org).withId("Organization/" + p1id.getIdPart()).execute(); - fail(""); - } catch (UnprocessableEntityException e) { - assertThat(e.getMessage()).contains("HAPI-0930: Existing resource ID[Patient/" + p1id.getIdPart() + "] is of type[Patient] - Cannot update with [Organization]"); - } - - try { - myClient.update().resource(org).withId("Patient/" + p1id.getIdPart()).execute(); - fail(""); - } catch (UnprocessableEntityException e) { - assertThat(e.getMessage()).contains("HAPI-0930: Existing resource ID[Patient/" + p1id.getIdPart() + "] is of type[Patient] - Cannot update with [Organization]"); - } - - } - @Test public void testUpdateResourceConditional() throws IOException { String methodName = "testUpdateResourceConditional"; diff --git a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/SystemProviderTransactionSearchDstu2Test.java b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/SystemProviderTransactionSearchDstu2Test.java index be41667846ec..0baebe987551 100644 --- a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/SystemProviderTransactionSearchDstu2Test.java +++ b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/SystemProviderTransactionSearchDstu2Test.java @@ -28,6 +28,7 @@ import org.junit.jupiter.api.Test; import java.util.ArrayList; +import java.util.Comparator; import java.util.List; import static org.assertj.core.api.Assertions.assertThat; @@ -116,6 +117,7 @@ private List create20Patients() { @Test public void testBatchWithGetHardLimitLargeSynchronous() { List ids = create20Patients(); + ids.sort(Comparator.naturalOrder()); Bundle input = new Bundle(); input.setType(BundleTypeEnum.BATCH); diff --git a/hapi-fhir-jpaserver-test-dstu3/pom.xml b/hapi-fhir-jpaserver-test-dstu3/pom.xml index 575740e07528..afaa398665d3 100644 --- a/hapi-fhir-jpaserver-test-dstu3/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu3/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchNoFtTest.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchNoFtTest.java index dda67d5cee54..7d6865fb4658 100644 --- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchNoFtTest.java +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchNoFtTest.java @@ -3557,6 +3557,7 @@ public void testReplaceLinkSearchIndex() { myCaptureQueriesListener.clear(); myObservationDao.update(obs); + myCaptureQueriesListener.logSelectQueries(); assertEquals(10, myCaptureQueriesListener.countSelectQueries()); assertEquals(5, myCaptureQueriesListener.countUpdateQueries()); assertEquals(1, myCaptureQueriesListener.countInsertQueries()); diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3Test.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3Test.java index 065afeeb6395..10961f819217 100644 --- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3Test.java +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3Test.java @@ -17,7 +17,6 @@ import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; import ca.uhn.fhir.jpa.dao.DaoTestUtils; -import ca.uhn.fhir.jpa.entity.ResourceSearchView; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; @@ -54,6 +53,7 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.util.ClasspathUtil; import com.google.common.collect.Lists; +import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.RandomStringUtils; import org.hl7.fhir.dstu3.model.Age; import org.hl7.fhir.dstu3.model.Attachment; @@ -110,6 +110,7 @@ import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallbackWithoutResult; import org.springframework.transaction.support.TransactionTemplate; +import org.testcontainers.shaded.org.bouncycastle.util.Arrays; import java.util.ArrayList; import java.util.Collections; @@ -613,15 +614,6 @@ public void testCreateResource_populatesResourceTableFhirIdField( .getSingleResult(); assertThat(historyCount).as("only create one history version").isEqualTo(1); - // make sure the search view works too - ResourceSearchView readBackView = myEntityManager - .createQuery("select v from ResourceSearchView v where v.myResourceId = :resId", ResourceSearchView.class) - .setParameter("resId", myMethodOutcome.getPersistentId().getId()) - .getSingleResult(); - assertThat(readBackView).as("found search view").isNotNull(); - - assertEquals(myExpectedId, readBackView.getFhirId(), - "fhir_id populated"); } } diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3UpdateTest.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3UpdateTest.java index c1e46ff78aa8..0ba2a2554f5f 100644 --- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3UpdateTest.java +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3UpdateTest.java @@ -1,6 +1,5 @@ package ca.uhn.fhir.jpa.dao.dstu3; -import static org.junit.jupiter.api.Assertions.assertEquals; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.model.dao.JpaPid; @@ -33,7 +32,7 @@ import java.util.UUID; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; @@ -41,7 +40,7 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu3UpdateTest.class); @AfterEach - public void afterEach(){ + public void afterEach() { myStorageSettings.setResourceServerIdStrategy(JpaStorageSettings.IdStrategyEnum.SEQUENTIAL_NUMERIC); } @@ -135,7 +134,7 @@ public void testDuplicateProfilesIgnored() { public void afterResetDao() { myStorageSettings.setResourceMetaCountHardLimit(new JpaStorageSettings().getResourceMetaCountHardLimit()); } - + @Test public void testHardMetaCapIsEnforcedOnCreate() { myStorageSettings.setResourceMetaCountHardLimit(3); @@ -156,7 +155,7 @@ public void testHardMetaCapIsEnforcedOnCreate() { } } } - + @Test public void testHardMetaCapIsEnforcedOnMetaAdd() { myStorageSettings.setResourceMetaCountHardLimit(3); @@ -167,7 +166,7 @@ public void testHardMetaCapIsEnforcedOnMetaAdd() { patient.setActive(true); id = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless(); } - + { Meta meta = new Meta(); meta.addTag().setSystem("http://foo").setCode("1"); @@ -183,7 +182,7 @@ public void testHardMetaCapIsEnforcedOnMetaAdd() { } } - + @Test public void testDuplicateTagsOnAddTagsIgnored() { IIdType id; @@ -198,7 +197,7 @@ public void testDuplicateTagsOnAddTagsIgnored() { meta.addTag().setSystem("http://foo").setCode("bar").setDisplay("Val2"); meta.addTag().setSystem("http://foo").setCode("bar").setDisplay("Val3"); myPatientDao.metaAddOperation(id, meta, null); - + // Do a read { Patient patient = myPatientDao.read(id, mySrd); @@ -228,7 +227,7 @@ public void testDuplicateTagsOnUpdateIgnored() { patient.getMeta().addTag().setSystem("http://foo").setCode("bar").setDisplay("Val3"); myPatientDao.update(patient, mySrd).getId().toUnqualifiedVersionless(); } - + // Do a read on second version { Patient patient = myPatientDao.read(id, mySrd); @@ -314,7 +313,6 @@ public void testMultipleUpdatesWithNoChangesDoesNotResultInAnUpdateForDiscreteUp } - @Test public void testUpdateByUrl() { String methodName = "testUpdateByUrl"; @@ -602,8 +600,8 @@ public void testUpdateRejectsInvalidTypes() { p2.setId(new IdType("Organization/" + p1id.getIdPart())); myOrganizationDao.update(p2, mySrd); fail(""); - } catch (UnprocessableEntityException e) { - assertEquals(Msg.code(930) + "Existing resource ID[Patient/" + p1id.getIdPartAsLong() + "] is of type[Patient] - Cannot update with [Organization]", e.getMessage()); + } catch (InvalidRequestException e) { + assertThat(e.getMessage()).contains(Msg.code(960) + "Can not create resource with"); } try { @@ -611,9 +609,8 @@ public void testUpdateRejectsInvalidTypes() { myOrganizationDao.update(p2, mySrd); fail(""); } catch (InvalidRequestException e) { - assertEquals(Msg.code(996) + "Incorrect resource type (Patient) for this DAO, wanted: Organization", e.getMessage()); + assertThat(e.getMessage()).contains(Msg.code(996) + "Incorrect resource type"); } - } @Test @@ -844,7 +841,7 @@ void testCreateWithConditionalUpdate_withUuidAsServerResourceStrategyAndNoIdProv // verify try { UUID.fromString(result); - } catch (IllegalArgumentException exception){ + } catch (IllegalArgumentException exception) { fail("Result id is not a UUID. Instead, it was: " + result); } } diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3ValidateTest.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3ValidateTest.java index 47ac1ba2f36a..95fb846d8636 100644 --- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3ValidateTest.java +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3ValidateTest.java @@ -2,7 +2,6 @@ import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.jpa.config.JpaConfig; -import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport; import ca.uhn.fhir.jpa.test.BaseJpaDstu3Test; import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.api.MethodOutcome; @@ -10,7 +9,6 @@ import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; -import ca.uhn.fhir.test.utilities.ProxyUtil; import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.validation.IValidatorModule; import org.apache.commons.io.IOUtils; @@ -95,8 +93,6 @@ public void testValidateChangedQuestionnaire() { ourLog.info("Clearing cache"); myValidationSupport.invalidateCaches(); - myFhirInstanceValidator.invalidateCaches(); - ProxyUtil.getSingletonTarget(myPersistedResourceValidationSupport, JpaPersistedResourceValidationSupport.class).clearCaches(); MethodOutcome result = myQuestionnaireResponseDao.validate(qr, null, null, null, null, null, null); OperationOutcome oo = (OperationOutcome) result.getOperationOutcome(); diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java index a4bc55eef464..fdcc5f5dc8ed 100644 --- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java @@ -4277,14 +4277,14 @@ public void testUpdateRejectsInvalidTypes() { try { myClient.update().resource(p2).withId("Organization/" + p1id.getIdPart()).execute(); fail(""); - } catch (UnprocessableEntityException e) { + } catch (InvalidRequestException e) { // good } try { myClient.update().resource(p2).withId("Patient/" + p1id.getIdPart()).execute(); fail(""); - } catch (UnprocessableEntityException e) { + } catch (InvalidRequestException e) { // good } diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/SystemProviderTransactionSearchDstu3Test.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/SystemProviderTransactionSearchDstu3Test.java index c1dc0776535e..565a9c1cb95a 100644 --- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/SystemProviderTransactionSearchDstu3Test.java +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/SystemProviderTransactionSearchDstu3Test.java @@ -1,8 +1,5 @@ package ca.uhn.fhir.jpa.provider.dstu3; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.rp.dstu3.ObservationResourceProvider; @@ -15,12 +12,9 @@ import ca.uhn.fhir.rest.server.RestfulServer; import ca.uhn.fhir.test.utilities.JettyUtil; import com.google.common.base.Charsets; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; -import org.eclipse.jetty.server.Server; import org.eclipse.jetty.ee10.servlet.ServletContextHandler; import org.eclipse.jetty.ee10.servlet.ServletHolder; +import org.eclipse.jetty.server.Server; import org.hl7.fhir.dstu3.model.Binary; import org.hl7.fhir.dstu3.model.Bundle; import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent; @@ -35,17 +29,20 @@ import org.junit.jupiter.api.Test; import java.util.ArrayList; +import java.util.Comparator; import java.util.List; -import java.util.concurrent.TimeUnit; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test { private static RestfulServer myRestServer; private static IGenericClient ourClient; private static FhirContext ourCtx; - private static CloseableHttpClient ourHttpClient; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SystemProviderTransactionSearchDstu3Test.class); private static Server ourServer; private static String ourServerBase; @@ -97,14 +94,8 @@ public void beforeStartServer() throws Exception { int myPort = JettyUtil.getPortForStartedServer(ourServer); ourServerBase = "http://localhost:" + myPort + "/fhir/context"; - PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS); - HttpClientBuilder builder = HttpClientBuilder.create(); - builder.setConnectionManager(connectionManager); - ourHttpClient = builder.build(); - ourCtx.getRestfulClientFactory().setSocketTimeout(600 * 1000); ourClient = ourCtx.newRestfulGenericClient(ourServerBase); - ourClient.setLogRequestAndResponse(true); myRestServer = restServer; } @@ -117,13 +108,13 @@ public void beforeStartServer() throws Exception { private List create20Patients() { - List ids = new ArrayList(); + List ids = new ArrayList<>(); for (int i = 0; i < 20; i++) { Patient patient = new Patient(); patient.setGender(AdministrativeGender.MALE); patient.addIdentifier().setSystem("urn:foo").setValue("A"); patient.addName().setFamily("abcdefghijklmnopqrstuvwxyz".substring(i, i+1)); - String id = myPatientDao.create(patient).getId().toUnqualifiedVersionless().getValue(); + String id = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless().getValue(); ids.add(id); } return ids; @@ -132,6 +123,7 @@ private List create20Patients() { @Test public void testBatchWithGetHardLimitLargeSynchronous() { List ids = create20Patients(); + ids.sort(Comparator.naturalOrder()); Bundle input = new Bundle(); input.setType(BundleType.BATCH); @@ -227,7 +219,7 @@ public void testPatchUsingJsonPatch_Transaction() { Patient newPt = ourClient.read().resource(Patient.class).withId(pid1.getIdPart()).execute(); assertEquals("2", newPt.getIdElement().getVersionIdPart()); - assertEquals(false, newPt.getActive()); + assertFalse(newPt.getActive()); } @@ -336,7 +328,7 @@ public void testTransactionGetStartsWithSlash() { } private List toIds(Bundle theRespBundle) { - ArrayList retVal = new ArrayList(); + ArrayList retVal = new ArrayList<>(); for (BundleEntryComponent next : theRespBundle.getEntry()) { retVal.add(next.getResource().getIdElement().toUnqualifiedVersionless().getValue()); } diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcIntegrationDstu3Test.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcIntegrationDstu3Test.java index 4a920125af43..32be6c035a87 100644 --- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcIntegrationDstu3Test.java +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcIntegrationDstu3Test.java @@ -242,7 +242,7 @@ public void testValidateCodeNotFound() throws Exception { IValidationSupport.CodeValidationResult result = myValueSetDao.validateCode(new UriType("http://loinc.org/vs"), null, new StringType("10013-1-9999999999"), new StringType(ITermLoaderSvc.LOINC_URI), null, null, null, mySrd); assertFalse(result.isOk()); - assertEquals("Unknown code 'http://loinc.org#10013-1-9999999999' for in-memory expansion of ValueSet 'http://loinc.org/vs'", result.getMessage()); + assertThat(result.getMessage()).contains("Unknown code 'http://loinc.org#10013-1-9999999999' for in-memory expansion"); } private Set toExpandedCodes(ValueSet theExpanded) { diff --git a/hapi-fhir-jpaserver-test-r4/pom.xml b/hapi-fhir-jpaserver-test-r4/pom.xml index fc415fbd3c16..b0dd0ec3061a 100644 --- a/hapi-fhir-jpaserver-test-r4/pom.xml +++ b/hapi-fhir-jpaserver-test-r4/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/imprt2/ConsumeFilesStepR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/imprt2/ConsumeFilesStepR4Test.java index 75aa29e41d3e..282d62feb5d4 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/imprt2/ConsumeFilesStepR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/imprt2/ConsumeFilesStepR4Test.java @@ -191,8 +191,8 @@ public void testNotAlreadyExisting() { String sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false); assertThat(sql).satisfiesAnyOf( - s -> assertThat(s).contains("rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='B' and rt1_0.PARTITION_ID is null or rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A' and rt1_0.PARTITION_ID is null"), - s -> assertThat(s).contains("rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A' and rt1_0.PARTITION_ID is null or rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='B' and rt1_0.PARTITION_ID is null") + s -> assertThat(s).contains("where rt1_0.PARTITION_ID is null and (rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='B' or rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A')"), + s -> assertThat(s).contains("where rt1_0.PARTITION_ID is null and (rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A' or rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='B')") ); assertEquals(50, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDaoTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDaoTest.java index 6a6cada087af..8deff0d665b9 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDaoTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDaoTest.java @@ -57,6 +57,7 @@ import org.springframework.context.ApplicationContext; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallback; +import org.springframework.transaction.support.TransactionSynchronizationManager; import java.util.Collections; import java.util.List; @@ -256,7 +257,13 @@ public void delete_nonExistentEntity_doesNotThrow404() { // but for now, Mockito will complain, so we'll leave it out // test - DaoMethodOutcome outcome = mySvc.delete(id, deleteConflicts, requestDetails, transactionDetails); + DaoMethodOutcome outcome; + try { + TransactionSynchronizationManager.setActualTransactionActive(true); + outcome = mySvc.delete(id, deleteConflicts, requestDetails, transactionDetails); + } finally { + TransactionSynchronizationManager.setActualTransactionActive(false); + } // verify assertNotNull(outcome); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/JpaPersistedResourceValidationSupportTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/JpaPersistedResourceValidationSupportTest.java deleted file mode 100644 index f18a5008b3ac..000000000000 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/JpaPersistedResourceValidationSupportTest.java +++ /dev/null @@ -1,119 +0,0 @@ -package ca.uhn.fhir.jpa.dao; - -import static org.junit.jupiter.api.Assertions.assertNull; -/*- - * #%L - * HAPI FHIR JPA Server - * %% - * Copyright (C) 2014 - 2024 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.context.support.IValidationSupport; -import ca.uhn.fhir.jpa.api.dao.DaoRegistry; -import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; -import ca.uhn.fhir.jpa.term.api.ITermReadSvc; -import ca.uhn.fhir.sl.cache.Cache; -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.r4.model.CodeSystem; -import org.hl7.fhir.r4.model.ValueSet; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; -import org.springframework.test.util.ReflectionTestUtils; - -import java.util.function.Function; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.isA; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - -@ExtendWith(MockitoExtension.class) -class JpaPersistedResourceValidationSupportTest { - - private FhirContext theFhirContext = FhirContext.forR4(); - - @Mock private ITermReadSvc myTermReadSvc; - @Mock private DaoRegistry myDaoRegistry; - @Mock private Cache myLoadCache; - @Mock private IFhirResourceDao myValueSetResourceDao; - - @InjectMocks - private IValidationSupport testedClass = - new JpaPersistedResourceValidationSupport(theFhirContext); - - private Class myCodeSystemType = CodeSystem.class; - private Class myValueSetType = ValueSet.class; - - - @BeforeEach - public void setup() { - ReflectionTestUtils.setField(testedClass, "myValueSetType", myValueSetType); - } - - - @Nested - public class FetchCodeSystemTests { - - @Test - void fetchCodeSystemMustUseForcedId() { - testedClass.fetchCodeSystem("string-containing-loinc"); - - verify(myTermReadSvc, times(1)).readCodeSystemByForcedId(LOINC_LOW); - verify(myLoadCache, never()).get(anyString(), isA(Function.class)); - } - - - @Test - void fetchCodeSystemMustNotUseForcedId() { - testedClass.fetchCodeSystem("string-not-containing-l-o-i-n-c"); - - verify(myTermReadSvc, never()).readCodeSystemByForcedId(LOINC_LOW); - verify(myLoadCache, times(1)).get(anyString(), isA(Function.class)); - } - - } - - - @Nested - public class FetchValueSetTests { - - @Test - void fetchValueSetMustUseForcedId() { - final String valueSetId = "string-containing-loinc"; - assertNull(testedClass.fetchValueSet(valueSetId)); - } - - - @Test - void fetchValueSetMustNotUseForcedId() { - testedClass.fetchValueSet("string-not-containing-l-o-i-n-c"); - - verify(myLoadCache, times(1)).get(anyString(), isA(Function.class)); - } - - } - - -} diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/TransactionProcessorTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/TransactionProcessorTest.java index b088feb5cfcb..188c4a20a31a 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/TransactionProcessorTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/TransactionProcessorTest.java @@ -1,6 +1,5 @@ package ca.uhn.fhir.jpa.dao; -import static org.junit.jupiter.api.Assertions.assertEquals; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.executor.InterceptorService; @@ -15,10 +14,13 @@ import ca.uhn.fhir.jpa.dao.tx.NonTransactionalHapiTransactionService; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; +import ca.uhn.fhir.jpa.search.ResourceSearchUrlSvc; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher; import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import jakarta.persistence.EntityManager; +import jakarta.persistence.EntityManagerFactory; import org.hibernate.Session; import org.hibernate.internal.SessionImpl; import org.hl7.fhir.r4.model.Bundle; @@ -39,10 +41,7 @@ import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.transaction.PlatformTransactionManager; -import jakarta.persistence.EntityManager; -import jakarta.persistence.EntityManagerFactory; - -import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.when; @@ -82,6 +81,8 @@ public class TransactionProcessorTest { private SessionImpl mySession; @MockBean private IFhirSystemDao mySystemDao; + @MockBean + private ResourceSearchUrlSvc myResourceSearchUrlSvc; @BeforeEach public void before() { @@ -152,6 +153,5 @@ public ITransactionProcessorVersionAdapter public IHapiTransactionService hapiTransactionService() { return new NonTransactionalHapiTransactionService(); } - } } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ComboNonUniqueParamTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ComboNonUniqueParamTest.java index 0b7dcd5878a1..087e2a5c173e 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ComboNonUniqueParamTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ComboNonUniqueParamTest.java @@ -423,8 +423,7 @@ public void testStringAndReference_SearchByUnqualifiedReference() { myCaptureQueriesListener.logSelectQueries(); String expected; - expected = "select rt1_0.RES_ID,rt1_0.RES_TYPE,rt1_0.FHIR_ID from HFJ_RESOURCE rt1_0 where rt1_0.FHIR_ID='my-org'"; - assertEquals(expected, myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false)); + assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false)).contains("where (rt1_0.FHIR_ID='my-org')"); String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false); assertThat(sql).contains("SP_VALUE_NORMALIZED LIKE 'FAMILY1%'"); assertThat(sql).contains("t1.TARGET_RESOURCE_ID"); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ConcurrentCreateTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ConcurrentCreateTest.java index 04c3797f2d4d..c3d4e0ee79f4 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ConcurrentCreateTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ConcurrentCreateTest.java @@ -6,6 +6,7 @@ import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao; import ca.uhn.fhir.jpa.interceptor.UserRequestRetryVersionConflictsInterceptor; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.search.ResourceSearchUrlSvc; @@ -171,8 +172,8 @@ public void testMethodDeleteByResId_withEntries_willDeleteTheEntryIfExists(){ myResourceSearchUrlDao.saveAll(asList(entry1, entry2)); // when - myResourceSearchUrlSvc.deleteByResId(entry1.getResourcePid()); - myResourceSearchUrlSvc.deleteByResId(nonExistentResourceId); + myResourceSearchUrlSvc.deleteByResId(JpaPid.fromId(entry1.getResourcePid())); + myResourceSearchUrlSvc.deleteByResId(JpaPid.fromId(nonExistentResourceId)); // then List resourcesPids = getStoredResourceSearchUrlEntitiesPids(); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4DeleteTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4DeleteTest.java index ef1b27147438..a6fbc97f2686 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4DeleteTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4DeleteTest.java @@ -73,12 +73,12 @@ public void testDeleteMarksResourceAndVersionAsDeleted() { // Current version should be marked as deleted runInTransaction(() -> { - ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1); + ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 1); assertNull(resourceTable.getDeleted()); assertNotNull(resourceTable.getPersistentId()); }); runInTransaction(() -> { - ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 2); + ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 2); assertNotNull(resourceTable.getDeleted()); }); @@ -215,7 +215,7 @@ public void testResourceIsConsideredDeletedIfOnlyResourceTableEntryIsDeleted() { // Mark the current history version as not-deleted even though the actual resource // table entry is marked deleted runInTransaction(() -> { - ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 2); + ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 2); resourceTable.setDeleted(null); myResourceHistoryTableDao.save(resourceTable); }); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4InlineResourceModeTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4InlineResourceModeTest.java index 351635cbb02f..09338888d278 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4InlineResourceModeTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4InlineResourceModeTest.java @@ -30,7 +30,7 @@ public void testRetrieveNonInlinedResource() { relocateResourceTextToCompressedColumn(pid, 1L); runInTransaction(()->{ - ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(pid, 1); + ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersion(pid, 1); assertNotNull(historyEntity.getResource()); assertNull(historyEntity.getResourceTextVc()); assertEquals(ResourceEncodingEnum.JSONC, historyEntity.getEncoding()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java index aa5fd85af49f..93795cea439b 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java @@ -8,15 +8,19 @@ import ca.uhn.fhir.batch2.jobs.chunk.TypedPidJson; import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeStep; import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; +import ca.uhn.fhir.batch2.jobs.reindex.models.ReindexResults; import ca.uhn.fhir.batch2.jobs.reindex.v2.ReindexStepV2; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.WorkChunk; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.support.ConceptValidationOptions; +import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.context.support.ValidationSupportContext; import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.ReindexParameters; +import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum; @@ -35,8 +39,11 @@ import ca.uhn.fhir.jpa.term.TermReadSvcImpl; import ca.uhn.fhir.jpa.test.util.SubscriptionTestUtil; import ca.uhn.fhir.jpa.util.SqlQuery; +import ca.uhn.fhir.rest.api.EncodingEnum; +import ca.uhn.fhir.rest.api.MethodOutcome; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.SortSpec; +import ca.uhn.fhir.rest.api.ValidationModeEnum; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.rest.param.ReferenceParam; @@ -65,12 +72,14 @@ import org.hl7.fhir.r4.model.Coverage; import org.hl7.fhir.r4.model.DateTimeType; import org.hl7.fhir.r4.model.Encounter; +import org.hl7.fhir.r4.model.Enumerations; import org.hl7.fhir.r4.model.ExplanationOfBenefit; import org.hl7.fhir.r4.model.Group; import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.Location; import org.hl7.fhir.r4.model.Narrative; import org.hl7.fhir.r4.model.Observation; +import org.hl7.fhir.r4.model.OperationOutcome; import org.hl7.fhir.r4.model.Parameters; import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Practitioner; @@ -81,6 +90,7 @@ import org.hl7.fhir.r4.model.Reference; import org.hl7.fhir.r4.model.ServiceRequest; import org.hl7.fhir.r4.model.StringType; +import org.hl7.fhir.r4.model.StructureDefinition; import org.hl7.fhir.r4.model.Subscription; import org.hl7.fhir.r4.model.ValueSet; import org.junit.jupiter.api.AfterEach; @@ -92,6 +102,8 @@ import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; +import org.junit.jupiter.params.provider.ValueSource; +import org.mockito.Mock; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Slice; @@ -148,6 +160,8 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test public static final HashMapResourceProviderExtension ourPatientProvider = new HashMapResourceProviderExtension<>(ourServer, Patient.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4QueryCountTest.class); @Autowired + protected SubscriptionTestUtil mySubscriptionTestUtil; + @Autowired private ISearchParamPresentDao mySearchParamPresentDao; @Autowired private ISubscriptionTriggeringSvc mySubscriptionTriggeringSvc; @@ -155,9 +169,13 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test private ReindexStepV2 myReindexStep; @Autowired private DeleteExpungeStep myDeleteExpungeStep; - @Autowired - protected SubscriptionTestUtil mySubscriptionTestUtil; private ReindexTestHelper myReindexTestHelper; + @Mock + private IJobDataSink myMockJobDataSinkVoid; + @Mock + private WorkChunk myMockWorkChunk; + @Mock + private IJobDataSink myMockJobDataSinkReindexResults; @AfterEach public void afterResetDao() { @@ -314,7 +332,6 @@ public void testTransactionWithManyResourceLinks() { assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); } - /* * See the class javadoc before changing the counts in this test! */ @@ -345,7 +362,6 @@ public void testUpdateWithNoChanges() { assertThat(myCaptureQueriesListener.getDeleteQueriesForCurrentThread()).isEmpty(); } - /** * See the class javadoc before changing the counts in this test! */ @@ -556,7 +572,6 @@ public void testUpdate_DeletesSearchUrlOnlyWhenPresent() { assertEquals(1L, id.getVersionIdPartAsLong()); // Update 1 - Should delete search URL - p.setActive(true); myCaptureQueriesListener.clear(); id = myPatientDao.update(p, "Patient?identifier=http://foo|123", mySrd).getId(); @@ -564,13 +579,11 @@ public void testUpdate_DeletesSearchUrlOnlyWhenPresent() { assertEquals(2L, id.getVersionIdPartAsLong()); // Update 2 - Should not try to delete search URL - p.setActive(false); myCaptureQueriesListener.clear(); id = myPatientDao.update(p, "Patient?identifier=http://foo|123", mySrd).getId(); assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); assertEquals(3L, id.getVersionIdPartAsLong()); - } /** @@ -642,7 +655,7 @@ public void testValidate() { cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE); cs.addConcept().setCode("bar-1").setDisplay("Bar 1"); cs.addConcept().setCode("bar-2").setDisplay("Bar 2"); - myCodeSystemDao.create(cs); + myCodeSystemDao.create(cs, mySrd); ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(cs)); Observation obs = new Observation(); @@ -665,11 +678,11 @@ public void testValidate() { fail(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome())); } myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(10, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()); + assertEquals(12, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()); assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size()); assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size()); assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size()); - assertEquals(8, myCaptureQueriesListener.getCommitCount()); + assertEquals(9, myCaptureQueriesListener.countCommits()); // Validate again (should rely only on caches) myCaptureQueriesListener.clear(); @@ -682,7 +695,7 @@ public void testValidate() { assertThat(myCaptureQueriesListener.getInsertQueriesForCurrentThread()).isEmpty(); myCaptureQueriesListener.logDeleteQueriesForCurrentThread(); assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size()); - assertEquals(0, myCaptureQueriesListener.getCommitCount()); + assertEquals(0, myCaptureQueriesListener.countCommits()); } /** @@ -693,12 +706,12 @@ public void testVRead() { IIdType id = runInTransaction(() -> { Patient p = new Patient(); p.addIdentifier().setSystem("urn:system").setValue("2"); - return myPatientDao.create(p).getId().toUnqualified(); + return myPatientDao.create(p, mySrd).getId().toUnqualified(); }); myCaptureQueriesListener.clear(); runInTransaction(() -> { - myPatientDao.read(id.withVersion("1")); + myPatientDao.read(id.withVersion("1"), mySrd); }); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread()).hasSize(2); @@ -720,7 +733,7 @@ public void testCreateWithClientAssignedId() { runInTransaction(() -> { Patient p = new Patient(); p.getMaritalStatus().setText("123"); - return myPatientDao.create(p).getId().toUnqualified(); + return myPatientDao.create(p, mySrd).getId().toUnqualified(); }); myCaptureQueriesListener.clear(); @@ -729,7 +742,7 @@ public void testCreateWithClientAssignedId() { Patient p = new Patient(); p.setId("AAA"); p.getMaritalStatus().setText("123"); - return myPatientDao.update(p).getId().toUnqualified(); + return myPatientDao.update(p, mySrd).getId().toUnqualified(); }); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); @@ -764,7 +777,7 @@ public void testCreateWithServerAssignedId_AnyClientAssignedIdStrategy() { Patient p = new Patient(); p.setUserData("ABAB", "ABAB"); p.getMaritalStatus().setText("123"); - return myPatientDao.create(p).getId().toUnqualifiedVersionless(); + return myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless(); }); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); @@ -808,14 +821,14 @@ public void testCreateWithClientAssignedId_AnyClientAssignedIdStrategy() { Patient p = new Patient(); p.setUserData("ABAB", "ABAB"); p.getMaritalStatus().setText("123"); - return myPatientDao.create(p).getId().toUnqualified(); + return myPatientDao.create(p, mySrd).getId().toUnqualified(); }); runInTransaction(() -> { Patient p = new Patient(); p.setId("BBB"); p.getMaritalStatus().setText("123"); - myPatientDao.update(p); + myPatientDao.update(p, mySrd); }); myCaptureQueriesListener.clear(); @@ -824,7 +837,7 @@ public void testCreateWithClientAssignedId_AnyClientAssignedIdStrategy() { Patient p = new Patient(); p.setId("AAA"); p.getMaritalStatus().setText("123"); - myPatientDao.update(p); + myPatientDao.update(p, mySrd); }); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); @@ -919,13 +932,11 @@ public void testDeleteExpungeStep() { .map(t -> new TypedPidJson(t.getResourceType(), Long.toString(t.getResourceId()))) .collect(Collectors.toList())); - runInTransaction(()-> assertEquals(10, myResourceTableDao.count())); - - IJobDataSink sink = mock(IJobDataSink.class); + runInTransaction(() -> assertEquals(10, myResourceTableDao.count())); // Test myCaptureQueriesListener.clear(); - RunOutcome outcome = myDeleteExpungeStep.doDeleteExpunge(new ResourceIdListWorkChunkJson(pids, null), sink, "instance-id", "chunk-id", false, null); + RunOutcome outcome = myDeleteExpungeStep.doDeleteExpunge(new ResourceIdListWorkChunkJson(pids, null), myMockJobDataSinkVoid, "instance-id", "chunk-id", false, null); // Verify assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); @@ -933,7 +944,7 @@ public void testDeleteExpungeStep() { assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(29, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); assertEquals(10, outcome.getRecordsProcessed()); - runInTransaction(()-> assertEquals(0, myResourceTableDao.count())); + runInTransaction(() -> assertEquals(0, myResourceTableDao.count())); } @@ -1128,9 +1139,9 @@ public void testReindexJob_OptimizeStorage(boolean theOptimisticLock, ReindexPar params, data, instance, - mock(WorkChunk.class) + myMockWorkChunk ); - RunOutcome outcome = myReindexStep.run(stepExecutionDetails, mock(IJobDataSink.class)); + RunOutcome outcome = myReindexStep.run(stepExecutionDetails, myMockJobDataSinkReindexResults); // validate assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread()).hasSize(theExpectedSelectCount); @@ -1142,7 +1153,7 @@ public void testReindexJob_OptimizeStorage(boolean theOptimisticLock, ReindexPar @Test public void testReindexJob_ComboParamIndexesInUse() { - myStorageSettings.setUniqueIndexesEnabled(true); + myStorageSettings.setUniqueIndexesEnabled(true); myReindexTestHelper.createUniqueCodeSearchParameter(); myReindexTestHelper.createNonUniqueStatusAndCodeSearchParameter(); @@ -1152,38 +1163,38 @@ public void testReindexJob_ComboParamIndexesInUse() { transactionResonse .getEntry() .stream() - .map(t->new IdType(t.getResponse().getLocation())) - .forEach(t->data.addTypedPid("Observation", t.getIdPartAsLong())); - - runInTransaction(() -> { - assertEquals(24L, myResourceTableDao.count()); - assertEquals(20L, myResourceIndexedComboStringUniqueDao.count()); - assertEquals(20L, myResourceIndexedComboTokensNonUniqueDao.count()); - }); - - ReindexJobParameters params = new ReindexJobParameters() - .setOptimizeStorage(ReindexParameters.OptimizeStorageModeEnum.NONE) - .setReindexSearchParameters(ReindexParameters.ReindexSearchParametersEnum.ALL) - .setOptimisticLock(false); - - // execute - myCaptureQueriesListener.clear(); + .map(t -> new IdType(t.getResponse().getLocation())) + .forEach(t -> data.addTypedPid("Observation", t.getIdPartAsLong())); + + runInTransaction(() -> { + assertEquals(24L, myResourceTableDao.count()); + assertEquals(20L, myResourceIndexedComboStringUniqueDao.count()); + assertEquals(20L, myResourceIndexedComboTokensNonUniqueDao.count()); + }); + + ReindexJobParameters params = new ReindexJobParameters() + .setOptimizeStorage(ReindexParameters.OptimizeStorageModeEnum.NONE) + .setReindexSearchParameters(ReindexParameters.ReindexSearchParametersEnum.ALL) + .setOptimisticLock(false); + + // execute + myCaptureQueriesListener.clear(); JobInstance instance = new JobInstance(); StepExecutionDetails stepExecutionDetails = new StepExecutionDetails<>( params, data, instance, - mock(WorkChunk.class) + myMockWorkChunk ); - RunOutcome outcome = myReindexStep.run(stepExecutionDetails, mock(IJobDataSink.class)); + RunOutcome outcome = myReindexStep.run(stepExecutionDetails, myMockJobDataSinkReindexResults); assertEquals(20, outcome.getRecordsProcessed()); - // validate - assertEquals(4, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()); - assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size()); - assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size()); - assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size()); - } + // validate + assertEquals(4, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()); + assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size()); + assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size()); + assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size()); + } public void assertNoPartitionSelectors() { List selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread(); @@ -1460,7 +1471,6 @@ public void testSearchByMultipleIds() { } - /** * See the class javadoc before changing the counts in this test! */ @@ -2500,7 +2510,7 @@ public void testTransactionWithMultipleConditionalUpdates() { assertEquals(4, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); assertEquals(8, myCaptureQueriesListener.countUpdateQueries()); - assertEquals(4, myCaptureQueriesListener.countDeleteQueries()); + assertEquals(1, myCaptureQueriesListener.countDeleteQueries()); /* * Third time with mass ingestion mode enabled @@ -3405,7 +3415,7 @@ public void testTransaction_ComboParamIndexesInUse_NoPreCheck() { @Test public void testTriggerSubscription_Sync() throws Exception { // Setup - IntStream.range(0, 200).forEach(i->createAPatient()); + IntStream.range(0, 200).forEach(i -> createAPatient()); mySubscriptionTestUtil.registerRestHookInterceptor(); ForceOffsetSearchModeInterceptor interceptor = new ForceOffsetSearchModeInterceptor(); @@ -3417,7 +3427,7 @@ public void testTriggerSubscription_Sync() throws Exception { waitForActivatedSubscriptionCount(1); - mySubscriptionTriggeringSvc.triggerSubscription(null, List.of(new StringType("Patient?")), subscriptionId, mySrd); + mySubscriptionTriggeringSvc.triggerSubscription(null, List.of(new StringType("Patient?")), subscriptionId, mySrd); // Test myCaptureQueriesListener.clear(); @@ -3443,7 +3453,7 @@ public void testTriggerSubscription_Sync() throws Exception { @Test public void testTriggerSubscription_Async() throws Exception { // Setup - IntStream.range(0, 200).forEach(i->createAPatient()); + IntStream.range(0, 200).forEach(i -> createAPatient()); mySubscriptionTestUtil.registerRestHookInterceptor(); @@ -3699,7 +3709,7 @@ public void testMassIngestionMode_TransactionWithChanges() { myCaptureQueriesListener.logInsertQueries(); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(7, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); - assertEquals(2, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); IdType patientId = new IdType(outcome.getEntry().get(1).getResponse().getLocation()); @@ -3782,7 +3792,7 @@ public void testMassIngestionMode_TransactionWithChanges_NonVersionedTags() thro assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(6, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); - assertEquals(2, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); } @@ -3804,7 +3814,7 @@ public void testDeleteResource_WithOutgoingReference() { assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(3, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); - runInTransaction(()->{ + runInTransaction(() -> { ResourceTable version = myResourceTableDao.findById(patientId.getIdPartAsLong()).orElseThrow(); assertFalse(version.isParamsTokenPopulated()); assertFalse(version.isHasLinks()); @@ -3825,7 +3835,7 @@ public void testDeleteResource_WithMassIngestionMode_enabled() { Observation observation = new Observation().setStatus(Observation.ObservationStatus.FINAL).addCategory(new CodeableConcept().addCoding(new Coding("http://category-type", "12345", null))).setCode(new CodeableConcept().addCoding(new Coding("http://coverage-type", "12345", null))); IIdType idDt = myObservationDao.create(observation, mySrd).getEntity().getIdDt(); - runInTransaction(()->{ + runInTransaction(() -> { assertEquals(4, myResourceIndexedSearchParamTokenDao.count()); ResourceTable version = myResourceTableDao.findById(idDt.getIdPartAsLong()).orElseThrow(); assertTrue(version.isParamsTokenPopulated()); @@ -3837,13 +3847,217 @@ public void testDeleteResource_WithMassIngestionMode_enabled() { // then assertQueryCount(3, 1, 1, 2); - runInTransaction(()->{ + runInTransaction(() -> { assertEquals(0, myResourceIndexedSearchParamTokenDao.count()); ResourceTable version = myResourceTableDao.findById(idDt.getIdPartAsLong()).orElseThrow(); assertFalse(version.isParamsTokenPopulated()); }); } + @Test + public void testFetchStructureDefinition_BuiltIn() { + + // First pass with an empty cache + myValidationSupport.invalidateCaches(); + myCaptureQueriesListener.clear(); + assertNotNull(myValidationSupport.fetchStructureDefinition("http://hl7.org/fhir/StructureDefinition/Patient")); + + assertEquals(0, myCaptureQueriesListener.countGetConnections()); + assertEquals(0, myCaptureQueriesListener.countSelectQueries()); + + // Again (should use cache) + myCaptureQueriesListener.clear(); + assertNotNull(myValidationSupport.fetchStructureDefinition("http://hl7.org/fhir/StructureDefinition/Patient")); + + assertEquals(0, myCaptureQueriesListener.countGetConnections()); + assertEquals(0, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testFetchStructureDefinition_StoredInRepository() { + + StructureDefinition sd = new StructureDefinition(); + sd.setUrl("http://foo"); + myStructureDefinitionDao.create(sd, mySrd); + + // First pass with an empty cache + myValidationSupport.invalidateCaches(); + myCaptureQueriesListener.clear(); + assertNotNull(myValidationSupport.fetchStructureDefinition("http://foo")); + + assertEquals(1, myCaptureQueriesListener.countGetConnections()); + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + + // Again (should use cache) + myCaptureQueriesListener.clear(); + assertNotNull(myValidationSupport.fetchStructureDefinition("http://foo")); + + assertEquals(0, myCaptureQueriesListener.countGetConnections()); + assertEquals(0, myCaptureQueriesListener.countSelectQueries()); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testValidateResource(boolean theStoredInRepository) { + Patient resource = new Patient(); + resource.setGender(Enumerations.AdministrativeGender.MALE); + resource.getText().setStatus(Narrative.NarrativeStatus.GENERATED).setDivAsString("

hello
"); + String encoded; + + IIdType id = null; + int initialAdditionalSelects = 0; + if (theStoredInRepository) { + id = myPatientDao.create(resource, mySrd).getId(); + resource = null; + encoded = null; + initialAdditionalSelects = 1; + } else { + resource.setId("A"); + encoded = myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(resource); + } + + myCaptureQueriesListener.clear(); + ValidationModeEnum mode = ValidationModeEnum.UPDATE; + MethodOutcome outcome = myPatientDao.validate(resource, id, encoded, EncodingEnum.JSON, mode, null, mySrd); + assertThat(((OperationOutcome)outcome.getOperationOutcome()).getIssueFirstRep().getDiagnostics()).contains("No issues detected"); + myCaptureQueriesListener.logSelectQueries(); + if (theStoredInRepository) { + assertEquals(7, myCaptureQueriesListener.countGetConnections()); + assertEquals(8, myCaptureQueriesListener.countSelectQueries()); + } else { + assertEquals(6, myCaptureQueriesListener.countGetConnections()); + assertEquals(6, myCaptureQueriesListener.countSelectQueries()); + } + + // Again (should use caches) + myCaptureQueriesListener.clear(); + outcome = myPatientDao.validate(resource, id, encoded, EncodingEnum.JSON, mode, null, mySrd); + assertThat(((OperationOutcome)outcome.getOperationOutcome()).getIssueFirstRep().getDiagnostics()).contains("No issues detected"); + if (theStoredInRepository) { + assertEquals(1, myCaptureQueriesListener.countGetConnections()); + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } else { + assertEquals(0, myCaptureQueriesListener.countGetConnections()); + assertEquals(0, myCaptureQueriesListener.countSelectQueries()); + } + } + + + + @Test + public void testValidateCode_BuiltIn() { + + // First pass with an empty cache + myValidationSupport.invalidateCaches(); + myCaptureQueriesListener.clear(); + ValidationSupportContext ctx = new ValidationSupportContext(myValidationSupport); + ConceptValidationOptions options = new ConceptValidationOptions(); + String vsUrl = "http://hl7.org/fhir/ValueSet/marital-status"; + String csUrl = "http://terminology.hl7.org/CodeSystem/v3-MaritalStatus"; + String code = "I"; + String code2 = "A"; + assertTrue(myValidationSupport.validateCode(ctx, options, csUrl, code, null, vsUrl).isOk()); + + assertEquals(1, myCaptureQueriesListener.countGetConnections()); + assertEquals(1, myCaptureQueriesListener.countSelectQueries()); + + // Again (should use cache) + myCaptureQueriesListener.clear(); + assertTrue(myValidationSupport.validateCode(ctx, options, csUrl, code, null, vsUrl).isOk()); + assertEquals(0, myCaptureQueriesListener.countGetConnections()); + assertEquals(0, myCaptureQueriesListener.countSelectQueries()); + + // Different code (should use cache) + myCaptureQueriesListener.clear(); + assertTrue(myValidationSupport.validateCode(ctx, options, csUrl, code2, null, vsUrl).isOk()); + assertEquals(0, myCaptureQueriesListener.countGetConnections()); + assertEquals(0, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testValidateCode_StoredInRepository() { + String vsUrl = "http://vs"; + String csUrl = "http://cs"; + String code = "A"; + String code2 = "B"; + + CodeSystem cs = new CodeSystem(); + cs.setUrl(csUrl); + cs.setStatus(Enumerations.PublicationStatus.ACTIVE); + cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE); + cs.addConcept().setCode(code); + cs.addConcept().setCode(code2); + myCodeSystemDao.create(cs, mySrd); + + ValueSet vs = new ValueSet(); + vs.setUrl(vsUrl); + vs.setStatus(Enumerations.PublicationStatus.ACTIVE); + vs.getCompose().addInclude().setSystem(csUrl); + myValueSetDao.create(vs, mySrd); + IValidationSupport.CodeValidationResult result; + + // First pass with an empty cache + myValidationSupport.invalidateCaches(); + myCaptureQueriesListener.clear(); + ValidationSupportContext ctx = new ValidationSupportContext(myValidationSupport); + ConceptValidationOptions options = new ConceptValidationOptions(); + result = myValidationSupport.validateCode(ctx, options, csUrl, code, null, vsUrl); + assertNotNull(result); + assertTrue(result.isOk()); + assertThat(result.getMessage()).isNull(); + + assertEquals(4, myCaptureQueriesListener.countGetConnections()); + assertEquals(8, myCaptureQueriesListener.countSelectQueries()); + myCaptureQueriesListener.logSelectQueries(); + + // Again (should use cache) + myCaptureQueriesListener.clear(); + result = myValidationSupport.validateCode(ctx, options, csUrl, code, null, vsUrl); + assertNotNull(result); + assertTrue(result.isOk()); + assertThat(result.getMessage()).isNull(); + assertEquals(0, myCaptureQueriesListener.countGetConnections()); + assertEquals(0, myCaptureQueriesListener.countSelectQueries()); + + // Different code (should use cache) + myCaptureQueriesListener.clear(); + result = myValidationSupport.validateCode(ctx, options, csUrl, code2, null, vsUrl); + assertNotNull(result); + assertTrue(result.isOk()); + assertEquals(1, myCaptureQueriesListener.countGetConnections()); + assertEquals(0, myCaptureQueriesListener.countSelectQueries()); + + // Now pre-expand the VS and try again (should use disk because we're fetching from pre-expansion) + myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); + myCaptureQueriesListener.clear(); + result = myValidationSupport.validateCode(ctx, options, csUrl, code, null, vsUrl); + assertNotNull(result); + assertTrue(result.isOk()); + assertThat(result.getMessage()).contains("expansion that was pre-calculated"); + assertEquals(3, myCaptureQueriesListener.countGetConnections()); + assertEquals(7, myCaptureQueriesListener.countSelectQueries()); + + // Same code (should use cache) + myCaptureQueriesListener.clear(); + result = myValidationSupport.validateCode(ctx, options, csUrl, code, null, vsUrl); + assertNotNull(result); + assertTrue(result.isOk()); + assertThat(result.getMessage()).contains("expansion that was pre-calculated"); + assertEquals(0, myCaptureQueriesListener.countGetConnections()); + assertEquals(0, myCaptureQueriesListener.countSelectQueries()); + + // Different code (should use cache) + myCaptureQueriesListener.clear(); + result = myValidationSupport.validateCode(ctx, options, csUrl, code2, null, vsUrl); + assertNotNull(result); + assertTrue(result.isOk()); + assertThat(result.getMessage()).contains("expansion that was pre-calculated"); + assertEquals(1, myCaptureQueriesListener.countGetConnections()); + assertEquals(1, myCaptureQueriesListener.countSelectQueries()); + + } + + private void assertQueryCount(int theExpectedSelectCount, int theExpectedUpdateCount, int theExpectedInsertCount, int theExpectedDeleteCount) { assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread()).hasSize(theExpectedSelectCount); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java index 460d755f7e95..e0a45c27060c 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java @@ -828,7 +828,7 @@ public void testReferenceOrLinksUseInList() { String resultingQueryNotFormatted = queries.get(0); assertThat(StringUtils.countMatches(resultingQueryNotFormatted, "Patient.managingOrganization")).as(resultingQueryNotFormatted).isEqualTo(1); - assertThat(resultingQueryNotFormatted).contains("TARGET_RESOURCE_ID IN ('" + ids.get(0) + "','" + ids.get(1) + "','" + ids.get(2) + "','" + ids.get(3) + "','" + ids.get(4) + "')"); + assertThat(resultingQueryNotFormatted).matches("^SELECT .* WHERE .*TARGET_RESOURCE_ID IN \\(.*\\)$"); // Ensure that the search actually worked assertEquals(5, search.size().intValue()); @@ -891,7 +891,7 @@ public void testSearchOnIdAndReference_SearchById() { myCaptureQueriesListener.logSelectQueriesForCurrentThread(); String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false); - assertThat(selectQuery).contains("where rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='B'"); + assertThat(selectQuery).contains("where (rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='B')"); selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false); assertThat(selectQuery).contains("where (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='A')"); assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); @@ -910,7 +910,10 @@ public void testSearchOnIdAndReference_SearchById() { assertThat(outcome.getResources(0, 999)).hasSize(2); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false); - assertThat(selectQuery).contains("where (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='A' or rt1_0.RES_ID='" + obs2id + "')"); + assertThat(selectQuery).containsAnyOf( + "where (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='A' or rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='" + obs2id + "')", + "where (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='" + obs2id + "' or rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='A')" + ); assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchSqlTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchSqlTest.java index cd9b1955cf82..08e754b84730 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchSqlTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchSqlTest.java @@ -18,6 +18,7 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.ValueSource; import org.slf4j.Logger; @@ -25,6 +26,7 @@ import java.util.List; import java.util.UUID; +import java.util.stream.Collectors; import static ca.uhn.fhir.interceptor.api.Pointcut.STORAGE_PARTITION_IDENTIFY_ANY; import static org.assertj.core.api.Assertions.assertThat; @@ -175,8 +177,10 @@ public void testTwoRegularSearchParams() { } - @Test - public void testSearchByProfile_VersionedMode() { + @ParameterizedTest + @EnumSource(value = JpaStorageSettings.TagStorageModeEnum.class, names = {"NON_VERSIONED", "VERSIONED"}) + public void testSearchByProfile_VersionedAndNonVersionedMode(JpaStorageSettings.TagStorageModeEnum theTagStorageModeEnum) { + myStorageSettings.setTagStorageMode(theTagStorageModeEnum); // Put a tag in so we can search for it String code = "http://" + UUID.randomUUID(); @@ -185,24 +189,33 @@ public void testSearchByProfile_VersionedMode() { IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless(); myMemoryCacheService.invalidateAllCaches(); + logAllResourceTags(); + logAllResourceHistoryTags(); + // Search myCaptureQueriesListener.clear(); SearchParameterMap map = SearchParameterMap.newSynchronous() .add(Constants.PARAM_PROFILE, new TokenParam(code)); IBundleProvider outcome = myPatientDao.search(map, mySrd); - assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + assertEquals(3, myCaptureQueriesListener.logSelectQueries().size()); // Query 1 - Find resources: Make sure we search for tag type+system+code always String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false); assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t2.TAG_TYPE = ?) AND (t2.TAG_SYSTEM = ?) AND (t2.TAG_CODE = ?)))", sql); - // Query 2 - Load resourece contents + // Query 2 - Load resource contents sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false); - assertThat(sql).contains("where rsv1_0.RES_ID in (?)"); - // Query 3 - Load tags and defintions + assertThat(sql).contains("where rht1_0.RES_ID in (?)"); + // Query 3 - Load tags and definitions sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(2).getSql(false, false); - assertThat(sql).contains("from HFJ_RES_TAG rt1_0 join HFJ_TAG_DEF"); + if (theTagStorageModeEnum == JpaStorageSettings.TagStorageModeEnum.VERSIONED) { + assertThat(sql).contains("from HFJ_HISTORY_TAG rht1_0 join HFJ_TAG_DEF"); + } else { + assertThat(sql).contains("from HFJ_RES_TAG rt1_0 join HFJ_TAG_DEF"); + } assertThat(toUnqualifiedVersionlessIds(outcome)).containsExactly(id); + List profileDeclarations = outcome.getResources(0, 1).get(0).getMeta().getProfile().stream().map(t -> t.getValueAsString()).collect(Collectors.toList()); + assertThat(profileDeclarations).containsExactly(code); } @Test @@ -234,7 +247,7 @@ public void testSearchByProfile_InlineMode() { assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_URI t0 WHERE (t0.HASH_URI = ?)", sql); // Query 2 - Load resourece contents sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false); - assertThat(sql).contains("where rsv1_0.RES_ID in (?)"); + assertThat(sql).contains("where rht1_0.RES_ID in (?)"); assertThat(toUnqualifiedVersionlessIds(outcome)).containsExactly(id); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java index 82c8ae17cb7a..475238805bf4 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java @@ -302,7 +302,7 @@ public void testDeletedResourcesAreReindexed() { table.setDeleted(new Date()); table = myResourceTableDao.saveAndFlush(table); ResourceHistoryTable newHistory = table.toHistory(true); - ResourceHistoryTable currentHistory = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(table.getId(), 1L); + ResourceHistoryTable currentHistory = myResourceHistoryTableDao.findForIdAndVersion(table.getId(), 1L); newHistory.setEncoding(currentHistory.getEncoding()); newHistory.setResourceTextVc(currentHistory.getResourceTextVc()); myResourceHistoryTableDao.save(newHistory); @@ -2934,7 +2934,7 @@ public void testResourceInDatabaseContainsInvalidVocabulary() { tx.execute(new TransactionCallbackWithoutResult() { @Override protected void doInTransactionWithoutResult(TransactionStatus theStatus) { - ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1L); + ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 1L); String newContent = myFhirContext.newJsonParser().encodeResourceToString(p); newContent = newContent.replace("male", "foo"); table.setResourceTextVc(newContent); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UpdateTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UpdateTest.java index 097d35941422..070bcefb8d4c 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UpdateTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UpdateTest.java @@ -996,7 +996,7 @@ public void testUpdateRejectsInvalidTypes() { p2.setId(new IdType("Organization/" + p1id.getIdPart())); myOrganizationDao.update(p2, mySrd); fail(); - } catch (UnprocessableEntityException e) { + } catch (InvalidRequestException e) { // good } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValueSetTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValueSetTest.java index ac5906177587..a9d55519dd83 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValueSetTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValueSetTest.java @@ -35,6 +35,7 @@ import org.springframework.transaction.annotation.Transactional; import java.io.IOException; +import java.util.stream.Collectors; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.fail; @@ -150,7 +151,7 @@ public void testValidateCodeInValueSet_HierarchicalAndEnumeratedValueset() { outcome = myValidationSupport.validateCode(ctx, options, "http://cs", "childX", null, "http://vs"); assertNotNull(outcome); assertFalse(outcome.isOk()); - assertEquals("Unknown code 'http://cs#childX' for in-memory expansion of ValueSet 'http://vs'", outcome.getMessage()); + assertThat(outcome.getMessage()).contains("Unknown code 'http://cs#childX' for in-memory expansion of ValueSet 'http://vs'"); // In memory - Enumerated in non-present CS @@ -162,7 +163,7 @@ public void testValidateCodeInValueSet_HierarchicalAndEnumeratedValueset() { outcome = myValidationSupport.validateCode(ctx, options, "http://cs-np", "codeX", null, "http://vs"); assertNotNull(outcome); assertFalse(outcome.isOk()); - assertEquals("Unknown code 'http://cs-np#codeX' for in-memory expansion of ValueSet 'http://vs'", outcome.getMessage()); + assertThat(outcome.getMessage()).contains("Unknown code 'http://cs-np#codeX' for in-memory expansion of ValueSet 'http://vs'"); // Precalculated @@ -253,7 +254,7 @@ public void testValidateCodeInValueSet_HierarchicalAndEnumeratedValueset_Hiberna outcome = myValidationSupport.validateCode(ctx, options, "http://cs", "childX", null, "http://vs"); assertNotNull(outcome); assertFalse(outcome.isOk()); - assertEquals("Unknown code 'http://cs#childX' for in-memory expansion of ValueSet 'http://vs'", outcome.getMessage()); + assertThat(outcome.getMessage()).contains("Unknown code 'http://cs#childX' for in-memory expansion of ValueSet 'http://vs'"); // In memory - Enumerated in non-present CS @@ -265,7 +266,7 @@ public void testValidateCodeInValueSet_HierarchicalAndEnumeratedValueset_Hiberna outcome = myValidationSupport.validateCode(ctx, options, "http://cs-np", "codeX", null, "http://vs"); assertNotNull(outcome); assertFalse(outcome.isOk()); - assertEquals("Unknown code 'http://cs-np#codeX' for in-memory expansion of ValueSet 'http://vs'", outcome.getMessage()); + assertThat(outcome.getMessage()).contains("Unknown code 'http://cs-np#codeX' for in-memory expansion of ValueSet 'http://vs'"); // Precalculated @@ -343,7 +344,7 @@ public void testValidateCodeOperationByIdentifierAndCodeAndSystemAndBadDisplay() IValidationSupport.CodeValidationResult result = myValueSetDao.validateCode(valueSetIdentifier, id, code, system, display, coding, codeableConcept, mySrd); assertTrue(result.isOk()); assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); - assertEquals("Concept Display \"Systolic blood pressure at First encounterXXXX\" does not match expected \"Systolic blood pressure at First encounter\" for 'http://acme.org#11378-7' for in-memory expansion of ValueSet: http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", result.getMessage()); + assertThat(result.getMessage()).contains("Concept Display \"Systolic blood pressure at First encounterXXXX\" does not match expected \"Systolic blood pressure at First encounter\" for 'http://acme.org#11378-7' for in-memory expansion of ValueSet 'http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2'"); } @Test @@ -551,9 +552,16 @@ public void testExpandValueSet_VsUsesVersionedSystem_CsIsFragmentWithoutCode() { vsInclude.addConcept().setCode("28571000087109").setDisplay("MODERNA COVID-19 mRNA-1273"); myValueSetDao.update(vs); + TermReadSvcImpl.setForceDisableHibernateSearchForUnitTest(true); + myTerminologyDeferredStorageSvc.saveAllDeferred(); + myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); + myCaptureQueriesListener.clear();; IValidationSupport.CodeValidationResult outcome = myValueSetDao.validateCode(null, new IdType("ValueSet/vaccinecode"), new CodeType("28571000087109"), new CodeType("http://snomed.info/sct"), null, null, null, mySrd); - assertTrue(outcome.isOk()); + myCaptureQueriesListener.logSelectQueries(); + assertEquals(9, myCaptureQueriesListener.countSelectQueries(), ()->myCaptureQueriesListener.getSelectQueries().stream().map(t->t.getSql(true, false)).collect(Collectors.joining("\n"))); + assertThat(outcome.getMessage()).contains("Code validation occurred using a ValueSet expansion that was pre-calculated"); + assertTrue(outcome.isOk(), outcome.getMessage()); outcome = myTermSvc.validateCodeInValueSet( new ValidationSupportContext(myValidationSupport), new ConceptValidationOptions(), @@ -563,10 +571,6 @@ public void testExpandValueSet_VsUsesVersionedSystem_CsIsFragmentWithoutCode() { vs ); assertTrue(outcome.isOk()); - - ValueSet expansion = myValueSetDao.expand(new IdType("ValueSet/vaccinecode"), new ValueSetExpansionOptions(), mySrd); - ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(expansion)); - } /** See #4449 */ diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4VersionedReferenceTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4VersionedReferenceTest.java index f835c300c199..a4b9be0bcc4c 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4VersionedReferenceTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4VersionedReferenceTest.java @@ -1,9 +1,5 @@ package ca.uhn.fhir.jpa.dao.r4; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.assertNull; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; @@ -15,6 +11,8 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.util.BundleBuilder; +import jakarta.annotation.Nonnull; +import org.hl7.fhir.instance.model.api.IBaseCoding; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.BooleanType; @@ -31,7 +29,6 @@ import org.hl7.fhir.r4.model.Reference; import org.hl7.fhir.r4.model.StringType; import org.hl7.fhir.r4.model.Task; -import jakarta.annotation.Nonnull; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; @@ -40,7 +37,6 @@ import org.junit.platform.commons.annotation.Testable; import java.io.IOException; -import java.io.InputStreamReader; import java.util.Arrays; import java.util.Collections; import java.util.Date; @@ -52,7 +48,10 @@ import static ca.uhn.fhir.util.HapiExtensions.EXTENSION_AUTO_VERSION_REFERENCES_AT_PATH; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; @@ -70,8 +69,10 @@ public void afterEach() { @Nested public class AutoVersionReferencesWithSettingAndExtension extends AutoVersionReferencesWithExtension { + @Override @BeforeEach public void before() { + super.before(); beforeAutoVersionReferencesWithSetting(); } } @@ -219,7 +220,7 @@ public void testCreateAndUpdateVersionedReferencesInTransaction_VersionedReferen @Test public void testCreateAndUpdateVersionedReferencesInTransaction_VersionedReferenceToVersionedReferenceToUpsertWithChange() { - AtomicInteger counter = new AtomicInteger(); + final AtomicInteger counter = new AtomicInteger(); Supplier supplier = () -> { BundleBuilder bb = new BundleBuilder(myFhirContext); @@ -229,12 +230,12 @@ public void testCreateAndUpdateVersionedReferencesInTransaction_VersionedReferen organization.setActive(true); bb.addTransactionUpdateEntry(organization); - Patient patient = new Patient(); - patient.getMeta().setExtension(patientAutoVersionExtension); - patient.setId("Patient/A"); - patient.setManagingOrganization(new Reference("Organization/O")); - patient.setActive(true); - bb.addTransactionUpdateEntry(patient); + Patient patient1 = new Patient(); + patient1.getMeta().setExtension(patientAutoVersionExtension); + patient1.setId("Patient/A"); + patient1.setManagingOrganization(new Reference("Organization/O")); + patient1.setActive(true); + bb.addTransactionUpdateEntry(patient1); ExplanationOfBenefit eob = new ExplanationOfBenefit(); eob.getMeta().setExtension(explanationOfBenefitAutoVersionExtension); @@ -274,7 +275,7 @@ public void testCreateAndUpdateVersionedReferencesInTransaction_VersionedReferen public void testInsertVersionedReferenceAtPath() { Patient p = new Patient(); p.setActive(true); - IIdType patientId = myPatientDao.create(p).getId().toUnqualified(); + IIdType patientId = myPatientDao.create(p, mySrd).getId().toUnqualified(); assertEquals("1", patientId.getVersionIdPart()); assertNull(patientId.getBaseUrl()); String patientIdString = patientId.getValue(); @@ -283,10 +284,10 @@ public void testInsertVersionedReferenceAtPath() { Observation observation = new Observation(); observation.getMeta().setExtension(observationAutoVersionExtension); observation.getSubject().setReference(patientId.toVersionless().getValue()); - IIdType observationId = myObservationDao.create(observation).getId().toUnqualified(); + IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified(); // Read back and verify that reference is now versioned - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientIdString, observation.getSubject().getReference()); myCaptureQueriesListener.clear(); @@ -297,13 +298,13 @@ public void testInsertVersionedReferenceAtPath() { observation.setId(observationId); observation.addIdentifier().setSystem("http://foo").setValue("bar"); observation.getSubject().setReference(patientId.toVersionless().getValue()); - myObservationDao.update(observation); + myObservationDao.update(observation, mySrd); // Make sure we're not introducing any extra DB operations assertThat(myCaptureQueriesListener.logSelectQueries()).hasSize(5); // Read back and verify that reference is now versioned - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientIdString, observation.getSubject().getReference()); } @@ -338,7 +339,7 @@ public void testInsertVersionedReferenceAtPath_InTransaction_SourceAndTargetBoth assertTrue(observationId.hasVersionIdPart()); // Read back and verify that reference is now versioned - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientId.getValue(), observation.getSubject().getReference()); assertEquals(encounterId.toVersionless().getValue(), observation.getEncounter().getReference()); } @@ -354,11 +355,11 @@ public void testInsertVersionedReferenceAtPath_InTransaction_TargetConditionalCr Encounter encounter = new Encounter(); encounter.setId(IdType.newRandomUuid()); encounter.addIdentifier().setSystem("http://baz").setValue("baz"); - myEncounterDao.create(encounter); + myEncounterDao.create(encounter, mySrd); } // Verify Patient Version - assertThat(myPatientDao.search(SearchParameterMap.newSynchronous("active", new TokenParam("false"))) + assertThat(myPatientDao.search(SearchParameterMap.newSynchronous("active", new TokenParam("false")), mySrd) .getResources(0, 1).get(0).getIdElement().getVersionIdPart()).isEqualTo("2"); BundleBuilder builder = new BundleBuilder(myFhirContext); @@ -386,7 +387,7 @@ public void testInsertVersionedReferenceAtPath_InTransaction_TargetConditionalCr IdType observationId = new IdType(outcome.getEntry().get(2).getResponse().getLocation()); // Read back and verify that reference is now versioned - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientId.getValue(), observation.getSubject().getReference()); assertEquals("2", observation.getSubject().getReferenceElement().getVersionIdPart()); assertEquals(encounterId.toVersionless().getValue(), observation.getEncounter().getReference()); @@ -402,11 +403,11 @@ public void testInsertVersionedReferenceAtPath_InTransaction_TargetUpdate() { Patient patient = new Patient(); patient.setId("PATIENT"); patient.setActive(true); - myPatientDao.update(patient).getId(); + myPatientDao.update(patient, mySrd); // Update patient to make a second version patient.setActive(false); - myPatientDao.update(patient); + myPatientDao.update(patient, mySrd); } BundleBuilder builder = new BundleBuilder(myFhirContext); @@ -431,7 +432,7 @@ public void testInsertVersionedReferenceAtPath_InTransaction_TargetUpdate() { assertThat(myCaptureQueriesListener.logSelectQueries()).hasSize(2); // Read back and verify that reference is now versioned - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientId.getValue(), observation.getSubject().getReference()); } @@ -466,7 +467,7 @@ public void testInsertVersionedReferenceAtPath_InTransaction_TargetUpdateConditi assertThat(myCaptureQueriesListener.logSelectQueries()).hasSize(3); // Read back and verify that reference is now versioned - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientId.getValue(), observation.getSubject().getReference()); } @@ -479,16 +480,16 @@ public void bundleTransaction_autoVersionReferenceAtPathWithPreexistingPatientRe // create patient ahead of time Patient patient = new Patient(); patient.setId(patientId); - DaoMethodOutcome outcome = myPatientDao.update(patient); + DaoMethodOutcome outcome = myPatientDao.update(patient, mySrd); assertEquals(patientId + "/_history/1", outcome.getResource().getIdElement().getValue()); - Patient returned = myPatientDao.read(idType); + Patient returned = myPatientDao.read(idType, mySrd); assertNotNull(returned); assertEquals(patientId + "/_history/1", returned.getId()); // update to change version patient.setActive(true); - myPatientDao.update(patient); + myPatientDao.update(patient, mySrd); Observation obs = new Observation(); obs.getMeta().setExtension(observationAutoVersionExtension); @@ -505,7 +506,7 @@ public void bundleTransaction_autoVersionReferenceAtPathWithPreexistingPatientRe assertNotNull(returnedTr); // some verification - Observation obRet = myObservationDao.read(obs.getIdElement()); + Observation obRet = myObservationDao.read(obs.getIdElement(), mySrd); assertNotNull(obRet); } @@ -529,9 +530,9 @@ public void testNoNpeMinimal() { assertNotNull(returnedTr); // some verification - Observation obRet = myObservationDao.read(obs.getIdElement()); + Observation obRet = myObservationDao.read(obs.getIdElement(), mySrd); assertNotNull(obRet); - Patient returned = myPatientDao.read(patientRef.getReferenceElement()); + Patient returned = myPatientDao.read(patientRef.getReferenceElement(), mySrd); assertNotNull(returned); } @@ -554,7 +555,7 @@ public void testInsertVersionedReferencesByPath_resourceReferenceNotInTransactio assertEquals("2", patient.getIdElement().getVersionIdPart()); // read back and verify that reference is versioned - messageHeader = myMessageHeaderDao.read(messageHeaderId); + messageHeader = myMessageHeaderDao.read(messageHeaderId, mySrd); assertEquals(patient.getIdElement().getValue(), messageHeader.getFocus().get(0).getReference()); } @@ -599,8 +600,8 @@ public void testInsertVersionedReferencesByPath_conditionalUpdateNoOpInTransacti IdType messageHeaderId = new IdType(outcome.getEntry().get(1).getResponse().getLocation()); // read back and verify that reference is versioned and correct - Patient patient = myPatientDao.read(patientId); - MessageHeader messageHeader = myMessageHeaderDao.read(messageHeaderId); + Patient patient = myPatientDao.read(patientId, mySrd); + MessageHeader messageHeader = myMessageHeaderDao.read(messageHeaderId, mySrd); assertEquals(patient.getIdElement().getValue(), messageHeader.getFocus().get(0).getReference()); // create bundle second time @@ -609,8 +610,8 @@ public void testInsertVersionedReferencesByPath_conditionalUpdateNoOpInTransacti messageHeaderId = new IdType(outcome.getEntry().get(1).getResponse().getLocation()); // read back and verify that reference is versioned and correct - patient = myPatientDao.read(patientId); - messageHeader = myMessageHeaderDao.read(messageHeaderId); + patient = myPatientDao.read(patientId, mySrd); + messageHeader = myMessageHeaderDao.read(messageHeaderId, mySrd); assertEquals(patient.getIdElement().getValue(), messageHeader.getFocus().get(0).getReference()); } @@ -637,11 +638,11 @@ private Patient createAndUpdatePatient(String thePatientId) { Patient patient = new Patient(); patient.setId(thePatientId); patient.setActive(true); - myPatientDao.create(patient).getId(); + myPatientDao.create(patient, mySrd); // update patient to make a second version patient.setActive(false); - myPatientDao.update(patient); + myPatientDao.update(patient, mySrd); return patient; } } @@ -652,17 +653,17 @@ public void testStoreAndRetrieveVersionedReference() { Patient p = new Patient(); p.setActive(true); - IIdType patientId = myPatientDao.create(p).getId().toUnqualified(); + IIdType patientId = myPatientDao.create(p, mySrd).getId().toUnqualified(); assertEquals("1", patientId.getVersionIdPart()); assertNull(patientId.getBaseUrl()); String patientIdString = patientId.getValue(); Observation observation = new Observation(); observation.getSubject().setReference(patientIdString); - IIdType observationId = myObservationDao.create(observation).getId().toUnqualified(); + IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified(); // Read back - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientIdString, observation.getSubject().getReference()); } @@ -672,21 +673,21 @@ public void testDontOverwriteExistingVersion() { Patient p = new Patient(); p.setActive(true); - myPatientDao.create(p); + myPatientDao.create(p, mySrd); // Update the patient p.setActive(false); - IIdType patientId = myPatientDao.update(p).getId().toUnqualified(); + IIdType patientId = myPatientDao.update(p, mySrd).getId().toUnqualified(); assertEquals("2", patientId.getVersionIdPart()); assertNull(patientId.getBaseUrl()); Observation observation = new Observation(); observation.getSubject().setReference(patientId.withVersion("1").getValue()); - IIdType observationId = myObservationDao.create(observation).getId().toUnqualified(); + IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified(); // Read back - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientId.withVersion("1").getValue(), observation.getSubject().getReference()); } @@ -698,20 +699,22 @@ public void testSearchAndIncludeVersionedReference_Asynchronous() { // Create the patient Patient p = new Patient(); p.addIdentifier().setSystem("http://foo").setValue("1"); - myPatientDao.create(p); + myPatientDao.create(p, mySrd); // Update the patient p.getIdentifier().get(0).setValue("2"); - IIdType patientId = myPatientDao.update(p).getId().toUnqualified(); + IIdType patientId = myPatientDao.update(p, mySrd).getId().toUnqualified(); assertEquals("2", patientId.getVersionIdPart()); Observation observation = new Observation(); observation.getSubject().setReference(patientId.withVersion("1").getValue()); - IIdType observationId = myObservationDao.create(observation).getId().toUnqualified(); + IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified(); + + logAllResourceLinks(); // Search - Non Synchronous for * { - IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(IBaseResource.INCLUDE_ALL)); + IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(IBaseResource.INCLUDE_ALL), mySrd); assertEquals(1, outcome.sizeOrThrowNpe()); List resources = outcome.getResources(0, 1); assertThat(resources).hasSize(2); @@ -721,7 +724,7 @@ public void testSearchAndIncludeVersionedReference_Asynchronous() { // Search - Non Synchronous for named include { - IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(Observation.INCLUDE_PATIENT)); + IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(Observation.INCLUDE_PATIENT), mySrd); assertEquals(1, outcome.sizeOrThrowNpe()); List resources = outcome.getResources(0, 1); assertThat(resources).hasSize(2); @@ -735,46 +738,63 @@ public void testSearchAndIncludeVersionedReference_Asynchronous() { public void testSearchAndIncludeVersionedReference_Synchronous() { myFhirContext.getParserOptions().setStripVersionsFromReferences(false); myStorageSettings.setRespectVersionsForSearchIncludes(true); + myStorageSettings.setTagStorageMode(JpaStorageSettings.TagStorageModeEnum.VERSIONED); // Create the patient Patient p = new Patient(); + p.getMeta().addTag("http://tag", "1", null); p.addIdentifier().setSystem("http://foo").setValue("1"); - myPatientDao.create(p); + myPatientDao.create(p, mySrd); - // Update the patient + // Update the patient - Add a second tag p.getIdentifier().get(0).setValue("2"); - IIdType patientId = myPatientDao.update(p).getId().toUnqualified(); + p.getMeta().addTag("http://tag", "2", null); + IIdType patientId = myPatientDao.update(p, mySrd).getId().toUnqualified(); assertEquals("2", patientId.getVersionIdPart()); Observation observation = new Observation(); observation.getSubject().setReference(patientId.withVersion("1").getValue()); - IIdType observationId = myObservationDao.create(observation).getId().toUnqualified(); + IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified(); - // Search - Non Synchronous for * + logAllResourceVersions(); + logAllResourceHistoryTags(); + + // Search - Non-Synchronous for * { - IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(IBaseResource.INCLUDE_ALL)); + myCaptureQueriesListener.clear(); + IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(IBaseResource.INCLUDE_ALL), mySrd); assertEquals(2, outcome.sizeOrThrowNpe()); List resources = outcome.getResources(0, 2); + assertEquals(5, myCaptureQueriesListener.logSelectQueries().size()); assertThat(resources).hasSize(2); assertEquals(observationId.getValue(), resources.get(0).getIdElement().getValue()); - assertEquals(patientId.withVersion("1").getValue(), resources.get(1).getIdElement().getValue()); + IBaseResource patient = resources.get(1); + assertEquals(patientId.withVersion("1").getValue(), patient.getIdElement().getValue()); + assertThat(getTagCodes(patient)).asList().containsExactly("1"); + ourLog.info("Patient: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient)); } - // Search - Non Synchronous for named include + // Search - Non-Synchronous for named include { - IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(Observation.INCLUDE_PATIENT)); + IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(Observation.INCLUDE_PATIENT), mySrd); assertEquals(2, outcome.sizeOrThrowNpe()); List resources = outcome.getResources(0, 2); assertThat(resources).hasSize(2); assertEquals(observationId.getValue(), resources.get(0).getIdElement().getValue()); assertEquals(patientId.withVersion("1").getValue(), resources.get(1).getIdElement().getValue()); + assertThat(getTagCodes(resources.get(1))).asList().containsExactly("1"); } } + @Nonnull + private static List getTagCodes(IBaseResource patient) { + return patient.getMeta().getTag().stream().map(IBaseCoding::getCode).collect(Collectors.toList()); + } + @Test public void testSearchAndIncludeVersionedReference_WhenOnlyOneVersionExists() { - HashSet refPaths = new HashSet(); + HashSet refPaths = new HashSet<>(); refPaths.add("Task.basedOn"); myFhirContext.getParserOptions().setDontStripVersionsFromReferencesAtPaths(refPaths); myStorageSettings.setRespectVersionsForSearchIncludes(true); @@ -782,15 +802,15 @@ public void testSearchAndIncludeVersionedReference_WhenOnlyOneVersionExists() { // Create a Condition Condition condition = new Condition(); - IIdType conditionId = myConditionDao.create(condition).getId().toUnqualified(); + IIdType conditionId = myConditionDao.create(condition, mySrd).getId().toUnqualified(); // Create a Task which is basedOn that Condition Task task = new Task(); - task.setBasedOn(Arrays.asList(new Reference(conditionId))); - IIdType taskId = myTaskDao.create(task).getId().toUnqualified(); + task.setBasedOn(List.of(new Reference(conditionId))); + IIdType taskId = myTaskDao.create(task, mySrd).getId().toUnqualified(); // Search for the Task using an _include=Task.basedOn and make sure we get the Condition resource in the Response - IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON)); + IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON), mySrd); assertEquals(2, outcome.size()); List resources = outcome.getResources(0, 2); assertThat(resources.size()).as(resources.stream().map(t -> t.getIdElement().toUnqualified().getValue()).collect(Collectors.joining(", "))).isEqualTo(2); @@ -800,10 +820,10 @@ public void testSearchAndIncludeVersionedReference_WhenOnlyOneVersionExists() { // Now, update the Condition to generate another version of it condition.setRecordedDate(new Date(System.currentTimeMillis())); - String conditionIdString = myConditionDao.update(condition).getId().getValue(); + myConditionDao.update(condition, mySrd.getId().getValue(), mySrd); // Search for the Task again and make sure that we get the original version of the Condition resource in the Response - outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON)); + outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON), mySrd); assertEquals(2, outcome.size()); resources = outcome.getResources(0, 2); assertThat(resources).hasSize(2); @@ -814,7 +834,7 @@ public void testSearchAndIncludeVersionedReference_WhenOnlyOneVersionExists() { @Test public void testSearchAndIncludeVersionedReference_WhenMultipleVersionsExist() { - HashSet refPaths = new HashSet(); + HashSet refPaths = new HashSet<>(); refPaths.add("Task.basedOn"); myFhirContext.getParserOptions().setDontStripVersionsFromReferencesAtPaths(refPaths); myStorageSettings.setRespectVersionsForSearchIncludes(true); @@ -822,23 +842,24 @@ public void testSearchAndIncludeVersionedReference_WhenMultipleVersionsExist() { // Create a Condition Condition condition = new Condition(); - IIdType conditionId = myConditionDao.create(condition).getId().toUnqualified(); + IIdType conditionId = myConditionDao.create(condition, mySrd).getId().toUnqualified(); + ourLog.info("conditionId: {}", conditionId); // Now, update the Condition 3 times to generate a 4th version of it condition.setRecordedDate(new Date(System.currentTimeMillis())); - conditionId = myConditionDao.update(condition).getId(); + myConditionDao.update(condition, mySrd); condition.setRecordedDate(new Date(System.currentTimeMillis() + 1000000)); - conditionId = myConditionDao.update(condition).getId(); + myConditionDao.update(condition, mySrd); condition.setRecordedDate(new Date(System.currentTimeMillis() + 2000000)); - conditionId = myConditionDao.update(condition).getId(); + conditionId = myConditionDao.update(condition, mySrd).getId().toUnqualified(); // Create a Task which is basedOn that Condition Task task = new Task(); - task.setBasedOn(Arrays.asList(new Reference(conditionId))); - IIdType taskId = myTaskDao.create(task).getId().toUnqualified(); + task.setBasedOn(List.of(new Reference(conditionId))); + IIdType taskId = myTaskDao.create(task, mySrd).getId().toUnqualified(); // Search for the Task using an _include=Task.basedOn and make sure we get the Condition resource in the Response - IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON)); + IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON), mySrd); assertEquals(2, outcome.size()); List resources = outcome.getResources(0, 2); assertThat(resources.size()).as(resources.stream().map(t -> t.getIdElement().toUnqualified().getValue()).collect(Collectors.joining(", "))).isEqualTo(2); @@ -849,7 +870,7 @@ public void testSearchAndIncludeVersionedReference_WhenMultipleVersionsExist() { @Test public void testSearchAndIncludeVersionedReference_WhenPreviouslyReferencedVersionOne() { - HashSet refPaths = new HashSet(); + HashSet refPaths = new HashSet<>(); refPaths.add("Task.basedOn"); myFhirContext.getParserOptions().setDontStripVersionsFromReferencesAtPaths(refPaths); myStorageSettings.setRespectVersionsForSearchIncludes(true); @@ -857,32 +878,32 @@ public void testSearchAndIncludeVersionedReference_WhenPreviouslyReferencedVersi // Create a Condition Condition condition = new Condition(); - IIdType conditionId = myConditionDao.create(condition).getId().toUnqualified(); + IIdType conditionId = myConditionDao.create(condition, mySrd).getId().toUnqualified(); ourLog.info("conditionId: \n{}", conditionId); // Create a Task which is basedOn that Condition Task task = new Task(); - task.setBasedOn(Arrays.asList(new Reference(conditionId))); - IIdType taskId = myTaskDao.create(task).getId().toUnqualified(); + task.setBasedOn(List.of(new Reference(conditionId))); + myTaskDao.create(task, mySrd).getId().toUnqualified(); // Now, update the Condition 3 times to generate a 4th version of it condition.setRecordedDate(new Date(System.currentTimeMillis())); - conditionId = myConditionDao.update(condition).getId(); + conditionId = myConditionDao.update(condition, mySrd).getId(); ourLog.info("UPDATED conditionId: \n{}", conditionId); condition.setRecordedDate(new Date(System.currentTimeMillis() + 1000000)); - conditionId = myConditionDao.update(condition).getId(); + conditionId = myConditionDao.update(condition, mySrd).getId(); ourLog.info("UPDATED conditionId: \n{}", conditionId); condition.setRecordedDate(new Date(System.currentTimeMillis() + 2000000)); - conditionId = myConditionDao.update(condition).getId(); + conditionId = myConditionDao.update(condition, mySrd).getId(); ourLog.info("UPDATED conditionId: \n{}", conditionId); // Now, update the Task to refer to the latest version 4 of the Condition - task.setBasedOn(Arrays.asList(new Reference(conditionId))); - taskId = myTaskDao.update(task).getId(); + task.setBasedOn(List.of(new Reference(conditionId))); + IIdType taskId = myTaskDao.update(task, mySrd).getId(); ourLog.info("UPDATED taskId: \n{}", taskId); // Search for the Task using an _include=Task.basedOn and make sure we get the Condition resource in the Response - IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON)); + IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON), mySrd); assertEquals(2, outcome.size()); List resources = outcome.getResources(0, 2); assertThat(resources.size()).as(resources.stream().map(t -> t.getIdElement().toUnqualified().getValue()).collect(Collectors.joining(", "))).isEqualTo(2); @@ -899,20 +920,20 @@ public void testSearchAndIncludeUnersionedReference_Asynchronous() { // Create the patient Patient p = new Patient(); p.addIdentifier().setSystem("http://foo").setValue("1"); - myPatientDao.create(p); + myPatientDao.create(p, mySrd); // Update the patient p.getIdentifier().get(0).setValue("2"); - IIdType patientId = myPatientDao.update(p).getId().toUnqualified(); + IIdType patientId = myPatientDao.update(p, mySrd).getId().toUnqualified(); assertEquals("2", patientId.getVersionIdPart()); Observation observation = new Observation(); observation.getSubject().setReference(patientId.withVersion("1").getValue()); - IIdType observationId = myObservationDao.create(observation).getId().toUnqualified(); + IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified(); // Search - Non Synchronous for * { - IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(IBaseResource.INCLUDE_ALL)); + IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(IBaseResource.INCLUDE_ALL), mySrd); assertEquals(1, outcome.sizeOrThrowNpe()); List resources = outcome.getResources(0, 1); assertThat(resources).hasSize(2); @@ -922,7 +943,7 @@ public void testSearchAndIncludeUnersionedReference_Asynchronous() { // Search - Non Synchronous for named include { - IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(Observation.INCLUDE_PATIENT)); + IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(Observation.INCLUDE_PATIENT), mySrd); assertEquals(1, outcome.sizeOrThrowNpe()); List resources = outcome.getResources(0, 1); assertThat(resources).hasSize(2); @@ -940,24 +961,24 @@ public void testSearchAndIncludeUnversionedReference_Synchronous() { // Create the patient Patient p = new Patient(); p.addIdentifier().setSystem("http://foo").setValue("1"); - myPatientDao.create(p); + myPatientDao.create(p, mySrd); // Update the patient p.getIdentifier().get(0).setValue("2"); - IIdType patientId = myPatientDao.update(p).getId().toUnqualified(); + IIdType patientId = myPatientDao.update(p, mySrd).getId().toUnqualified(); assertEquals("2", patientId.getVersionIdPart()); Observation observation = new Observation(); observation.getSubject().setReference(patientId.withVersion("1").getValue()); - IIdType observationId = myObservationDao.create(observation).getId().toUnqualified(); + IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified(); // Read the observation back - observation = myObservationDao.read(observationId); + observation = myObservationDao.read(observationId, mySrd); assertEquals(patientId.toVersionless().getValue(), observation.getSubject().getReference()); // Search - Non Synchronous for * { - IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(IBaseResource.INCLUDE_ALL)); + IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(IBaseResource.INCLUDE_ALL), mySrd); assertEquals(2, outcome.sizeOrThrowNpe()); List resources = outcome.getResources(0, 2); assertThat(resources).hasSize(2); @@ -967,7 +988,7 @@ public void testSearchAndIncludeUnversionedReference_Synchronous() { // Search - Non Synchronous for named include { - IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(Observation.INCLUDE_PATIENT)); + IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(Observation.INCLUDE_PATIENT), mySrd); assertEquals(2, outcome.sizeOrThrowNpe()); List resources = outcome.getResources(0, 2); assertThat(resources).hasSize(2); @@ -977,7 +998,7 @@ public void testSearchAndIncludeUnversionedReference_Synchronous() { } @Test - public void testNoNpeOnEoBBundle() { + public void testNoNpeOnEoBBundle() throws IOException { myStorageSettings.setAutoCreatePlaceholderReferenceTargets(true); List strings = Arrays.asList( "ExplanationOfBenefit.patient", @@ -989,9 +1010,7 @@ public void testNoNpeOnEoBBundle() { ); myStorageSettings.setAutoVersionReferenceAtPaths(new HashSet<>(strings)); - Bundle bundle = myFhirContext.newJsonParser().parseResource(Bundle.class, - new InputStreamReader( - FhirResourceDaoR4VersionedReferenceTest.class.getResourceAsStream("/npe-causing-bundle.json"))); + Bundle bundle = loadResourceFromClasspath(Bundle.class, "/npe-causing-bundle.json"); Bundle transaction = mySystemDao.transaction(new SystemRequestDetails(), bundle); @@ -1005,12 +1024,12 @@ public void testAutoVersionPathsWithAutoCreatePlaceholders() { Observation obs = new Observation(); obs.setId("Observation/CDE"); obs.setSubject(new Reference("Patient/ABC")); - DaoMethodOutcome update = myObservationDao.create(obs); + DaoMethodOutcome update = myObservationDao.create(obs, mySrd); Observation resource = (Observation)update.getResource(); String versionedPatientReference = resource.getSubject().getReference(); assertEquals("Patient/ABC", versionedPatientReference); - Patient p = myPatientDao.read(new IdDt("Patient/ABC")); + Patient p = myPatientDao.read(new IdDt("Patient/ABC"), mySrd); assertNotNull(p); myStorageSettings.setAutoVersionReferenceAtPaths("Observation.subject"); @@ -1018,7 +1037,7 @@ public void testAutoVersionPathsWithAutoCreatePlaceholders() { obs = new Observation(); obs.setId("Observation/DEF"); obs.setSubject(new Reference("Patient/RED")); - update = myObservationDao.create(obs); + update = myObservationDao.create(obs, mySrd); resource = (Observation)update.getResource(); versionedPatientReference = resource.getSubject().getReference(); @@ -1052,7 +1071,7 @@ public void bundleTransaction_withRequestURLWithPrecedingSlash_processesAsExpect IdType idType = new IdType(bundle.getEntry().get(0) .getResource().getId()); // the bundle above contains an observation, so we'll verify it was created here - Observation obs = myObservationDao.read(idType); + Observation obs = myObservationDao.read(idType, mySrd); assertNotNull(obs); } } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java index f36a093e147c..36ab6da5ae38 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java @@ -616,7 +616,7 @@ public void testReindexing() { TransactionTemplate template = new TransactionTemplate(myTxManager); template.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); template.execute((TransactionCallback) t -> { - ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), id.getVersionIdPartAsLong()); + ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), id.getVersionIdPartAsLong()); resourceHistoryTable.setEncoding(ResourceEncodingEnum.JSON); resourceHistoryTable.setResourceTextVc("{\"resourceType\":\"FOO\"}"); myResourceHistoryTableDao.save(resourceHistoryTable); @@ -661,7 +661,7 @@ public void testReindexingCurrentVersionDeleted() { assertEquals(1, myPatientDao.search(searchParamMap).size().intValue()); runInTransaction(() -> { - ResourceHistoryTable historyEntry = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 3); + ResourceHistoryTable historyEntry = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 3); assertNotNull(historyEntry); myResourceHistoryTableDao.delete(historyEntry); }); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java index 737c764724b4..0e007d9fd800 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java @@ -429,7 +429,7 @@ public void testCreate_ServerId_WithPartition() { assertLocalDateFromDbMatches(myPartitionDate, tags.get(0).getPartitionId().getPartitionDate()); // HFJ_RES_VER - ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, 1L); + ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(patientId, 1L); assertEquals(myPartitionId, version.getPartitionId().getPartitionId().intValue()); assertLocalDateFromDbMatches(myPartitionDate, version.getPartitionId().getPartitionDate()); @@ -439,11 +439,6 @@ public void testCreate_ServerId_WithPartition() { assertEquals(myPartitionId, historyTags.get(0).getPartitionId().getPartitionId().intValue()); assertLocalDateFromDbMatches(myPartitionDate, historyTags.get(0).getPartitionId().getPartitionDate()); - // HFJ_RES_VER_PROV - assertNotNull(version.getProvenance()); - assertEquals(myPartitionId, version.getProvenance().getPartitionId().getPartitionId().intValue()); - assertLocalDateFromDbMatches(myPartitionDate, version.getProvenance().getPartitionId().getPartitionDate()); - // HFJ_SPIDX_STRING List strings = myResourceIndexedSearchParamStringDao.findAllForResourceId(patientId); ourLog.info("\n * {}", strings.stream().map(ResourceIndexedSearchParamString::toString).collect(Collectors.joining("\n * "))); @@ -517,7 +512,7 @@ public void testCreate_ServerId_DefaultPartition() { assertLocalDateFromDbMatches(myPartitionDate, tags.get(0).getPartitionId().getPartitionDate()); // HFJ_RES_VER - ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, 1L); + ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(patientId, 1L); assertNull(version.getPartitionId().getPartitionId()); assertLocalDateFromDbMatches(myPartitionDate, version.getPartitionId().getPartitionDate()); @@ -527,11 +522,6 @@ public void testCreate_ServerId_DefaultPartition() { assertNull(historyTags.get(0).getPartitionId().getPartitionId()); assertLocalDateFromDbMatches(myPartitionDate, historyTags.get(0).getPartitionId().getPartitionDate()); - // HFJ_RES_VER_PROV - assertNotNull(version.getProvenance()); - assertNull(version.getProvenance().getPartitionId().getPartitionId()); - assertLocalDateFromDbMatches(myPartitionDate, version.getProvenance().getPartitionId().getPartitionDate()); - // HFJ_SPIDX_STRING List strings = myResourceIndexedSearchParamStringDao.findAllForResourceId(patientId); String stringsDesc = strings.stream().map(ResourceIndexedSearchParamString::toString).sorted().collect(Collectors.joining("\n * ")); @@ -778,7 +768,7 @@ public void testUpdateResourceWithPartition() { // HFJ_RES_VER int version = 2; - ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, version); + ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersion(patientId, version); assertEquals(myPartitionId, resVer.getPartitionId().getPartitionId().intValue()); assertLocalDateFromDbMatches(myPartitionDate, resVer.getPartitionId().getPartitionDate()); @@ -790,12 +780,6 @@ public void testUpdateResourceWithPartition() { assertEquals(myPartitionId, historyTags.get(1).getPartitionId().getPartitionId().intValue()); assertLocalDateFromDbMatches(myPartitionDate, historyTags.get(1).getPartitionId().getPartitionDate()); - // HFJ_RES_VER_PROV - assertNotNull(resVer.getProvenance()); - assertNotNull(resVer.getPartitionId()); - assertEquals(myPartitionId, resVer.getProvenance().getPartitionId().getPartitionId().intValue()); - assertLocalDateFromDbMatches(myPartitionDate, resVer.getProvenance().getPartitionId().getPartitionDate()); - // HFJ_SPIDX_STRING List strings = myResourceIndexedSearchParamStringDao.findAllForResourceId(patientId); ourLog.info("\n * {}", strings.stream().map(ResourceIndexedSearchParamString::toString).collect(Collectors.joining("\n * "))); @@ -856,7 +840,7 @@ public void testUpdateConditionalInPartition() { // HFJ_RES_VER int version = 2; - ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, version); + ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersion(patientId, version); assertEquals(myPartitionId, resVer.getPartitionId().getPartitionId().intValue()); assertLocalDateFromDbMatches(myPartitionDate, resVer.getPartitionId().getPartitionDate()); @@ -2927,7 +2911,7 @@ public void testTransaction_MultipleConditionalUpdates() { assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); myCaptureQueriesListener.logUpdateQueriesForCurrentThread(); assertEquals(8, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); - assertEquals(4, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); /* * Third time with mass ingestion mode enabled @@ -2944,6 +2928,7 @@ public void testTransaction_MultipleConditionalUpdates() { assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); myCaptureQueriesListener.logUpdateQueriesForCurrentThread(); assertEquals(8, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); + myCaptureQueriesListener.logDeleteQueries(); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); /* @@ -3007,7 +2992,7 @@ public void testTransactionWithManyInlineMatchUrls() throws IOException { assertEquals(26, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(326, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); - assertEquals(326, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); assertEquals(1, myCaptureQueriesListener.countCommits()); assertEquals(0, myCaptureQueriesListener.countRollbacks()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java index aaaa937a3ddc..d1e8cfdab173 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java @@ -80,8 +80,10 @@ public void before() throws Exception { myPartitionSettings.setDefaultPartitionId(ALTERNATE_DEFAULT_ID); } + @Override @AfterEach - public void after() { + public void after() throws Exception { + super.after(); myInterceptorRegistry.unregisterInterceptor(mySvc); myInterceptorRegistry.unregisterInterceptor(myForceOffsetSearchModeInterceptor); @@ -171,7 +173,7 @@ public void testCreateEncounter_ValidMembershipInCompartment() { public void testCreateOrganization_ValidMembershipInCompartment() { Organization org = new Organization(); org.setName("Foo"); - Long id = myOrganizationDao.create(org).getId().getIdPartAsLong(); + Long id = myOrganizationDao.create(org, mySrd).getId().getIdPartAsLong(); runInTransaction(() -> { ResourceTable observation = myResourceTableDao.findById(id).orElseThrow(() -> new IllegalArgumentException()); @@ -222,8 +224,9 @@ public void testReadPatientHistory_Good() { myCaptureQueriesListener.logSelectQueriesForCurrentThread(); List selectQueriesForCurrentThread = myCaptureQueriesListener.getSelectQueriesForCurrentThread(); - assertEquals(3, selectQueriesForCurrentThread.size()); + assertEquals(2, selectQueriesForCurrentThread.size()); assertThat(selectQueriesForCurrentThread.get(0).getSql(false, false)).contains("PARTITION_ID=?"); + assertThat(selectQueriesForCurrentThread.get(1).getSql(false, false)).doesNotContain("PARTITION_ID="); } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/RemoteTerminologyServiceJpaR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/RemoteTerminologyServiceJpaR4Test.java index 8ed166482e0c..11d7984baffa 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/RemoteTerminologyServiceJpaR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/RemoteTerminologyServiceJpaR4Test.java @@ -257,7 +257,7 @@ public void testValidateSimpleExtension() { // Verify 1 myCaptureQueriesListener.logSelectQueries(); - Assertions.assertEquals(4, myCaptureQueriesListener.countGetConnections()); + Assertions.assertEquals(3, myCaptureQueriesListener.countGetConnections()); assertThat(ourValueSetProvider.mySearchUrls).asList().isEmpty(); assertThat(ourCodeSystemProvider.mySearchUrls).asList().isEmpty(); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/DiffProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/DiffProviderR4Test.java index 6c1598602066..9b8917ce97c9 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/DiffProviderR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/DiffProviderR4Test.java @@ -104,7 +104,7 @@ public void testLatestVersion_PreviousVersionExpunged() { createPatient(withId(id), withActiveTrue(), withFamily("SMITH")); runInTransaction(() -> { - ResourceHistoryTable version2 = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 2); + ResourceHistoryTable version2 = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 2); myResourceHistoryTableDao.deleteByPid(version2.getId()); }); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/HookInterceptorR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/HookInterceptorR4Test.java index 3fbcb1a8cab1..1f6c8e89d0d0 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/HookInterceptorR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/HookInterceptorR4Test.java @@ -1,10 +1,11 @@ package ca.uhn.fhir.jpa.provider.r4; -import static org.junit.jupiter.api.Assertions.assertTrue; import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.IDao; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test; import ca.uhn.fhir.rest.api.MethodOutcome; @@ -18,20 +19,17 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicLong; -import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; public class HookInterceptorR4Test extends BaseResourceProviderR4Test { - private static final Logger ourLog = LoggerFactory.getLogger(HookInterceptorR4Test.class); @Autowired IIdHelperService myIdHelperService; @@ -116,8 +114,8 @@ public void testSTORAGE_PRECOMMIT_RESOURCE_CREATED_hasCorrectPid() { IIdType savedPatientId = myClient.create().resource(new Patient()).execute().getId(); runInTransaction(() -> { - List pids = myIdHelperService.resolveResourcePersistentIdsWithCache(null, - Collections.singletonList(savedPatientId)); + List pids = myIdHelperService.resolveResourcePids(RequestPartitionId.allPartitions(), + Collections.singletonList(savedPatientId), ResolveIdentityMode.includeDeleted().cacheOk()); Long savedPatientPid = pids.get(0).getId(); assertEquals(savedPatientPid.longValue(), pid.get()); }); @@ -133,7 +131,9 @@ public void testSTORAGE_PRESTORAGE_EXPUNGE_RESOURCE_hasCorrectPid() { pid.set(resourcePid); }); IIdType savedPatientId = myClient.create().resource(new Patient()).execute().getId(); - Long savedPatientPid = runInTransaction(() -> myIdHelperService.resolveResourcePersistentIdsWithCache(null, Collections.singletonList(savedPatientId)).get(0).getId()); + Long savedPatientPid = runInTransaction(() -> + myIdHelperService.resolveResourceIdentityPid(RequestPartitionId.allPartitions(), savedPatientId.getResourceType(), savedPatientId.getIdPart(), ResolveIdentityMode.includeDeleted().cacheOk()).getId() + ); myClient.delete().resourceById(savedPatientId).execute(); Parameters parameters = new Parameters(); @@ -170,7 +170,9 @@ public void testSTORAGE_PRECOMMIT_RESOURCE_UPDATED_hasCorrectPid() { patient.setActive(true); myClient.update().resource(patient).execute(); runInTransaction(() -> { - Long savedPatientPid = myIdHelperService.resolveResourcePersistentIdsWithCache(null, Collections.singletonList(savedPatientId)).get(0).getId(); + Long savedPatientPid = runInTransaction(() -> + myIdHelperService.resolveResourceIdentityPid(RequestPartitionId.allPartitions(), savedPatientId.getResourceType(), savedPatientId.getIdPart(), ResolveIdentityMode.includeDeleted().cacheOk()).getId() + ); assertEquals(savedPatientPid.longValue(), pidOld.get()); assertEquals(savedPatientPid.longValue(), pidNew.get()); }); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInvalidDataR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInvalidDataR4Test.java index dc00f13b0cca..ccc4062f723c 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInvalidDataR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInvalidDataR4Test.java @@ -35,7 +35,7 @@ public void testRetrieveDataSavedWithInvalidDecimal() throws IOException { // Manually set the value to be an invalid decimal number runInTransaction(() -> { - ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id, 1); + ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersion(id, 1); String resourceText = resVer.getResourceTextVc(); resourceText = resourceText.replace("100", "-.100"); resVer.setResourceTextVc(resourceText); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java index 1da8eb0e6dbf..28ee7ab19d1c 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java @@ -3370,7 +3370,7 @@ public void testRetrieveMissingVersionsDoesntCrashHistory() { new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() { @Override protected void doInTransactionWithoutResult(TransactionStatus status) { - ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id1.getIdPartAsLong(), 1); + ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(id1.getIdPartAsLong(), 1); myResourceHistoryTableDao.delete(version); } }); @@ -3395,7 +3395,7 @@ public void testRetrieveMissingVersionsDoesntCrashSearch() { new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() { @Override protected void doInTransactionWithoutResult(TransactionStatus status) { - ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id1.getIdPartAsLong(), 1); + ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(id1.getIdPartAsLong(), 1); myResourceHistoryTableDao.delete(version); } }); @@ -4257,6 +4257,7 @@ private void testSearchReturnsResults(String search) throws IOException { @Test public void testSearchReturnsSearchDate() throws Exception { Date before = new Date(); + sleepAtLeast(10); //@formatter:off Bundle found = myClient @@ -4267,6 +4268,7 @@ public void testSearchReturnsSearchDate() throws Exception { .execute(); //@formatter:on + sleepAtLeast(10); Date after = new Date(); InstantType updated = found.getMeta().getLastUpdatedElement(); @@ -5810,14 +5812,14 @@ public void testUpdateRejectsInvalidTypes() { try { myClient.update().resource(p2).withId("Organization/" + p1id.getIdPart()).execute(); fail(); - } catch (UnprocessableEntityException e) { + } catch (InvalidRequestException e) { // good } try { myClient.update().resource(p2).withId("Patient/" + p1id.getIdPart()).execute(); fail(); - } catch (UnprocessableEntityException e) { + } catch (InvalidRequestException e) { // good } @@ -6807,6 +6809,7 @@ public void testSearchHistoryWithAtAndGtParameters() throws Exception { TestUtil.sleepAtLeast(delayInMs + 100); patient.getNameFirstRep().addGiven("Bob"); myClient.update().resource(patient).execute(); + TestUtil.sleepAtLeast(100); Patient unrelatedPatient = (Patient) myClient.create().resource(new Patient()).execute().getResource(); assertThat(patientId).isNotEqualTo(unrelatedPatient.getIdElement().getIdPartAsLong()); @@ -6832,7 +6835,9 @@ public void testSearchHistoryWithAtAndGtParameters() throws Exception { // Issue 3138 test case, verify behavior of _at verifyAtBehaviourWhenQueriedDateDuringTwoUpdatedDates(patientId, delayInMs, dateV1, dateV2); verifyAtBehaviourWhenQueriedDateAfterTwoUpdatedDates(patientId, delayInMs, dateV1, dateV2); + myCaptureQueriesListener.clear(); verifyAtBehaviourWhenQueriedDateBeforeTwoUpdatedDates(patientId, delayInMs, dateV1, dateV2); + myCaptureQueriesListener.logSelectQueries(); // verify behavior of _since verifySinceBehaviourWhenQueriedDateDuringTwoUpdatedDates(patientId, delayInMs, dateV1, dateV2); verifySinceBehaviourWhenQueriedDateAfterTwoUpdatedDates(patientId, delayInMs, dateV1, dateV2); @@ -6854,8 +6859,10 @@ private void verifyAtBehaviourWhenQueriedDateAfterTwoUpdatedDates(Long patientId Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV2, delayInMs); assertTrue(timeBetweenUpdates.after(dateV1)); assertTrue(timeBetweenUpdates.after(dateV2)); - List resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates)); - assertThat(resultIds).hasSize(1); + String url = myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates); + myCaptureQueriesListener.clear(); + List resultIds = searchAndReturnUnqualifiedIdValues(url); + assertThat(resultIds).as(()->describeVersionsAndUrl(url)).hasSize(1); assertThat(resultIds).contains("Patient/" + patientId + "/_history/2"); } @@ -6863,8 +6870,10 @@ private void verifyAtBehaviourWhenQueriedDateBeforeTwoUpdatedDates(Long patientI Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV1, -delayInMs); assertTrue(timeBetweenUpdates.before(dateV1)); assertTrue(timeBetweenUpdates.before(dateV2)); - List resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates)); - assertThat(resultIds).hasSize(2); + String url = myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates); + myCaptureQueriesListener.clear(); + List resultIds = searchAndReturnUnqualifiedIdValues(url); + assertThat(resultIds).as(()->describeVersionsAndUrl(url)).hasSize(2); assertThat(resultIds).contains("Patient/" + patientId + "/_history/1"); assertThat(resultIds).contains("Patient/" + patientId + "/_history/2"); } @@ -6873,11 +6882,22 @@ private void verifySinceBehaviourWhenQueriedDateDuringTwoUpdatedDates(Long patie Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV1, delayInMs / 2); assertTrue(timeBetweenUpdates.after(dateV1)); assertTrue(timeBetweenUpdates.before(dateV2)); - List resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_since=" + toStr(timeBetweenUpdates)); - assertThat(resultIds).hasSize(1); + String url = myServerBase + "/Patient/" + patientId + "/_history?_since=" + toStr(timeBetweenUpdates); + myCaptureQueriesListener.clear(); + List resultIds = searchAndReturnUnqualifiedIdValues(url); + assertThat(resultIds).as(()->describeVersionsAndUrl(url)).hasSize(1); assertThat(resultIds).contains("Patient/" + patientId + "/_history/2"); } + private String describeVersionsAndUrl(String theUrl) { + return runInTransaction(()->{ + return "URL: " + theUrl + "\n\nHistory Entries:\n * " + + myResourceHistoryTableDao.findAll().stream().map(t->t.toString()).collect(Collectors.joining("\n * ")) + + "\n\nSQL Queries:\n * " + + myCaptureQueriesListener.getSelectQueries().stream().map(t->t.getSql(true, false)).collect(Collectors.joining("\n * ")); + }); + } + private void verifySinceBehaviourWhenQueriedDateAfterTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException { Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV2, delayInMs); assertTrue(timeBetweenUpdates.after(dateV1)); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java index 97ac4db89c86..b398a191a173 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java @@ -1069,9 +1069,10 @@ public void testExpandUsingHierarchy_PreStored_NotPreCalculated() { .withParameter(Parameters.class, "url", new UrlType(URL_MY_VALUE_SET)) .returnResourceType(ValueSet.class) .execute(); + myCaptureQueriesListener.logSelectQueries(); ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(expansion)); assertThat(toDirectCodes(expansion.getExpansion().getContains())).containsExactlyInAnyOrder("A", "AA", "AB", "AAA"); - assertThat(myCaptureQueriesListener.getSelectQueries().size()).as(() -> myCaptureQueriesListener.logSelectQueries().stream().map(t -> t.getSql(true, false)).collect(Collectors.joining("\n * "))).isEqualTo(14); + assertThat(myCaptureQueriesListener.getSelectQueries().size()).as(() -> myCaptureQueriesListener.logSelectQueries().stream().map(t -> t.getSql(true, false)).collect(Collectors.joining("\n * "))).isEqualTo(18); assertEquals("ValueSet \"ValueSet.url[http://example.com/my_value_set]\" has not yet been pre-expanded. Performing in-memory expansion without parameters. Current status: NOT_EXPANDED | The ValueSet is waiting to be picked up and pre-expanded by a scheduled task.", expansion.getMeta().getExtensionString(EXT_VALUESET_EXPANSION_MESSAGE)); // Hierarchical @@ -1111,7 +1112,7 @@ public void testExpandUsingHierarchy_NotPreStored() { .execute(); ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(expansion)); assertThat(toDirectCodes(expansion.getExpansion().getContains())).containsExactlyInAnyOrder("A", "AA", "AB", "AAA"); - assertThat(myCaptureQueriesListener.getSelectQueries().size()).as(() -> myCaptureQueriesListener.logSelectQueries().stream().map(t -> t.getSql(true, false)).collect(Collectors.joining("\n * "))).isEqualTo(10); + assertThat(myCaptureQueriesListener.getSelectQueries().size()).as(() -> myCaptureQueriesListener.logSelectQueries().stream().map(t -> t.getSql(true, false)).collect(Collectors.joining("\n * "))).isEqualTo(12); assertEquals("ValueSet with URL \"Unidentified ValueSet\" was expanded using an in-memory expansion", expansion.getMeta().getExtensionString(EXT_VALUESET_EXPANSION_MESSAGE)); // Hierarchical diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/SystemProviderTransactionSearchR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/SystemProviderTransactionSearchR4Test.java index 275739638bcc..df894f8abc58 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/SystemProviderTransactionSearchR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/SystemProviderTransactionSearchR4Test.java @@ -43,6 +43,7 @@ import org.junit.jupiter.api.Test; import java.util.ArrayList; +import java.util.Comparator; import java.util.List; import java.util.concurrent.TimeUnit; @@ -146,6 +147,7 @@ private List create20Patients() { @Test public void testBatchWithGetHardLimitLargeSynchronous() { List ids = create20Patients(); + ids.sort(Comparator.naturalOrder()); Bundle input = new Bundle(); input.setType(BundleType.BATCH); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexTaskTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexTaskTest.java index 290dfcdf9b4b..f44d1fdd69b0 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexTaskTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexTaskTest.java @@ -12,6 +12,8 @@ import ca.uhn.fhir.jpa.api.dao.ReindexParameters; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse; +import ca.uhn.fhir.jpa.model.dao.JpaPid; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboStringUnique; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboTokenNonUnique; @@ -22,7 +24,6 @@ import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; import jakarta.annotation.PostConstruct; -import jakarta.persistence.Query; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Patient; @@ -67,8 +68,10 @@ public void postConstruct() { @AfterEach public void after() { myInterceptorRegistry.unregisterAllAnonymousInterceptors(); - myStorageSettings.setStoreMetaSourceInformation(new JpaStorageSettings().getStoreMetaSourceInformation()); - myStorageSettings.setPreserveRequestIdInResourceBody(new JpaStorageSettings().isPreserveRequestIdInResourceBody()); + JpaStorageSettings defaults = new JpaStorageSettings(); + myStorageSettings.setStoreMetaSourceInformation(defaults.getStoreMetaSourceInformation()); + myStorageSettings.setPreserveRequestIdInResourceBody(defaults.isPreserveRequestIdInResourceBody()); + myStorageSettings.setAccessMetaSourceInformationFromProvenanceTable(defaults.isAccessMetaSourceInformationFromProvenanceTable()); } @Test @@ -171,7 +174,7 @@ public void testOptimizeStorage_AllVersions() { runInTransaction(()->{ assertEquals(20, myResourceHistoryTableDao.count()); for (ResourceHistoryTable history : myResourceHistoryTableDao.findAll()) { - assertNotNull(history.getResourceTextVc()); + assertNotNull(history.getResourceTextVc(), ()->"Null history on: " + history); assertNull(history.getResource()); } }); @@ -237,24 +240,34 @@ public void testOptimizeStorage_AllVersions_SingleResourceWithMultipleVersion() public void testOptimizeStorage_AllVersions_CopyProvenanceEntityData() { // Setup myStorageSettings.setStoreMetaSourceInformation(JpaStorageSettings.StoreMetaSourceInformationEnum.SOURCE_URI_AND_REQUEST_ID); + myStorageSettings.setAccessMetaSourceInformationFromProvenanceTable(true); myStorageSettings.setPreserveRequestIdInResourceBody(true); for (int i = 0; i < 10; i++) { Patient p = new Patient(); p.setId("PATIENT" + i); - p.getMeta().setSource("http://foo#bar"); p.addIdentifier().setValue(String.valueOf(i)); myPatientDao.update(p, mySrd); - p.addIdentifier().setSystem("http://blah"); + p.setActive(true); myPatientDao.update(p, mySrd); } + runInTransaction(()->{ + List versions = myResourceHistoryTableDao.findAll(); + for (var version : versions) { + ResourceHistoryProvenanceEntity provenance = new ResourceHistoryProvenanceEntity(); + provenance.setResourceTable(version.getResourceTable()); + provenance.setResourceHistoryTable(version); + provenance.setSourceUri("http://foo"); + provenance.setRequestId("bar"); + myResourceHistoryProvenanceDao.save(provenance); + } + }); + runInTransaction(()->{ assertEquals(20, myResourceHistoryTableDao.count()); assertEquals(20, myResourceHistoryProvenanceDao.count()); - Query query = myEntityManager.createQuery("UPDATE " + ResourceHistoryTable.class.getSimpleName() + " p SET p.mySourceUri = NULL, p.myRequestId = NULL"); - assertEquals(20, query.executeUpdate()); }); runInTransaction(()-> { @@ -281,6 +294,7 @@ public void testOptimizeStorage_AllVersions_CopyProvenanceEntityData() { // validate runInTransaction(()-> { + assertEquals(0, myResourceHistoryProvenanceDao.count()); for (var next : myResourceHistoryProvenanceDao.findAll()) { assertEquals("bar", next.getRequestId()); assertEquals("http://foo", next.getSourceUri()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java index 081492fb569b..5c9cdf6d139f 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java @@ -360,7 +360,7 @@ public List findAllVersionsForResourceIdInOrder(Long theId } @Override - public ResourceHistoryTable findForIdAndVersionAndFetchProvenance(long theId, long theVersion) { + public ResourceHistoryTable findForIdAndVersion(long theId, long theVersion) { throw new UnsupportedOperationException(); } @@ -370,7 +370,7 @@ public Slice findForResourceId(Pageable thePage, Long theId, Long theDontW } @Override - public Slice findForResourceIdAndReturnEntitiesAndFetchProvenance(Pageable thePage, Long theId, Long theDontWantVersion) { + public Slice findAllVersionsExceptSpecificForResourcePid(Pageable thePage, Long theId, Long theDontWantVersion) { throw new UnsupportedOperationException(); } @@ -404,6 +404,11 @@ public void updateNonInlinedContents(byte[] theText, long thePid) { throw new UnsupportedOperationException(); } + @Override + public List findCurrentVersionsByResourcePidsAndFetchResourceTable(List theVersionlessPids) { + throw new UnsupportedOperationException(); + } + @Nonnull @Override public List findAll() { diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/resthook/RestHookTestR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/resthook/RestHookTestR4Test.java index 36856b7b0d7b..7d509c68dfc4 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/resthook/RestHookTestR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/resthook/RestHookTestR4Test.java @@ -1,15 +1,9 @@ package ca.uhn.fhir.jpa.subscription.resthook; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.assertFalse; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; -import ca.uhn.fhir.jpa.subscription.BaseSubscriptionsR4Test; import ca.uhn.fhir.jpa.model.config.SubscriptionSettings; -import ca.uhn.fhir.jpa.subscription.submit.svc.ResourceModifiedSubmitterSvc; +import ca.uhn.fhir.jpa.subscription.BaseSubscriptionsR4Test; import ca.uhn.fhir.jpa.test.util.StoppableSubscriptionDeliveringRestHookSubscriber; import ca.uhn.fhir.jpa.topic.SubscriptionTopicDispatcher; import ca.uhn.fhir.jpa.topic.SubscriptionTopicRegistry; @@ -49,6 +43,7 @@ import org.springframework.beans.factory.annotation.Autowired; import java.util.List; +import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; @@ -57,9 +52,15 @@ import static ca.uhn.fhir.rest.api.Constants.CT_FHIR_JSON_NEW; import static ca.uhn.fhir.util.HapiExtensions.EX_SEND_DELETE_MESSAGES; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.fail; import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; import static org.awaitility.Awaitility.await; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** * Test the rest-hook subscriptions @@ -69,9 +70,6 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test { public static final String TEST_PATIENT_ID = "topic-test-patient-id"; public static final String PATIENT_REFERENCE = "Patient/" + TEST_PATIENT_ID; - @Autowired - ResourceModifiedSubmitterSvc myResourceModifiedSubmitterSvc; - @Autowired StoppableSubscriptionDeliveringRestHookSubscriber myStoppableSubscriptionDeliveringRestHookSubscriber; @Autowired(required = false) @@ -158,7 +156,7 @@ public void testUpdatesHaveCorrectMetadata() throws Exception { */ Observation obs = sendObservation(code, "SNOMED-CT", "http://source-system.com", null); - obs = myObservationDao.read(obs.getIdElement().toUnqualifiedVersionless()); + obs = myObservationDao.read(obs.getIdElement().toUnqualifiedVersionless(), mySrd); // Should see 1 subscription notification waitForQueueToDrain(); @@ -178,8 +176,8 @@ public void testUpdatesHaveCorrectMetadata() throws Exception { obs.getIdentifierFirstRep().setSystem("foo").setValue("2"); obs.getMeta().setSource("http://other-source"); - myObservationDao.update(obs); - obs = myObservationDao.read(obs.getIdElement().toUnqualifiedVersionless()); + myObservationDao.update(obs, mySrd); + obs = myObservationDao.read(obs.getIdElement().toUnqualifiedVersionless(), mySrd); // Should see 1 subscription notification waitForQueueToDrain(); @@ -250,7 +248,7 @@ public void testUpdatesHaveCorrectMetadataUsingTransactions() throws Exception { bundle.addEntry().setResource(observation).getRequest().setMethod(Bundle.HTTPVerb.POST).setUrl("Observation"); Bundle responseBundle = mySystemDao.transaction(null, bundle); - Observation obs = myObservationDao.read(new IdType(responseBundle.getEntry().get(0).getResponse().getLocation())); + Observation obs = myObservationDao.read(new IdType(responseBundle.getEntry().get(0).getResponse().getLocation()), mySrd); // Should see 1 subscription notification waitForQueueToDrain(); @@ -275,7 +273,7 @@ public void testUpdatesHaveCorrectMetadataUsingTransactions() throws Exception { bundle.setType(Bundle.BundleType.TRANSACTION); bundle.addEntry().setResource(observation).getRequest().setMethod(Bundle.HTTPVerb.PUT).setUrl(obs.getIdElement().toUnqualifiedVersionless().getValue()); mySystemDao.transaction(null, bundle); - obs = myObservationDao.read(obs.getIdElement().toUnqualifiedVersionless()); + obs = myObservationDao.read(obs.getIdElement().toUnqualifiedVersionless(), mySrd); // Should see 1 subscription notification waitForQueueToDrain(); @@ -303,7 +301,7 @@ public void testRepeatedDeliveries() throws Exception { observation.getIdentifierFirstRep().setSystem("foo").setValue("ID" + i); observation.getCode().addCoding().setCode(code).setSystem("SNOMED-CT"); observation.setStatus(Observation.ObservationStatus.FINAL); - myObservationDao.create(observation); + myObservationDao.create(observation, mySrd); } ourObservationProvider.waitForUpdateCount(100); @@ -332,7 +330,7 @@ public void testSubscriptionRegistryLoadsSubscriptionsFromDatabase() throws Exce observation.getIdentifierFirstRep().setSystem("foo").setValue("ID"); observation.getCode().addCoding().setCode(code).setSystem("SNOMED-CT"); observation.setStatus(Observation.ObservationStatus.FINAL); - myObservationDao.create(observation); + myObservationDao.create(observation, mySrd); ourObservationProvider.waitForUpdateCount(1); } @@ -376,7 +374,7 @@ public void testRestHookSubscriptionMetaAddDoesntTriggerNewDelivery() throws Exc obs.setId(obs.getIdElement().toUnqualifiedVersionless()); myClient.meta().add().onResource(obs.getIdElement()).meta(new Meta().addTag("http://blah", "blah", null)).execute(); - obs = myClient.read().resource(Observation.class).withId(obs.getIdElement().toUnqualifiedVersionless()).execute(); + obs = myClient.read().resource(Observation.class).withId(obs.getIdElement().toVersionless()).execute(); Coding tag = obs.getMeta().getTag("http://blah", "blah"); assertNotNull(tag); @@ -569,8 +567,8 @@ public void testRestHookSubscriptionDoesntGetLatestVersionByDefault() throws Exc assertEquals(0, ourObservationProvider.getCountCreate()); ourObservationProvider.waitForUpdateCount(2); - Observation observation1 = ourObservationProvider.getResourceUpdates().stream().filter(t->t.getIdElement().getVersionIdPart().equals("1")).findFirst().orElseThrow(()->new IllegalArgumentException()); - Observation observation2 = ourObservationProvider.getResourceUpdates().stream().filter(t->t.getIdElement().getVersionIdPart().equals("2")).findFirst().orElseThrow(()->new IllegalArgumentException()); + Observation observation1 = ourObservationProvider.getResourceUpdates().stream().filter(t->t.getIdElement().getVersionIdPart().equals("1")).findFirst().orElseThrow(IllegalArgumentException::new); + Observation observation2 = ourObservationProvider.getResourceUpdates().stream().filter(t->t.getIdElement().getVersionIdPart().equals("2")).findFirst().orElseThrow(IllegalArgumentException::new); assertEquals("1", observation1.getIdElement().getVersionIdPart()); assertNull(observation1.getNoteFirstRep().getText()); @@ -598,7 +596,7 @@ public void RestHookSubscriptionWithPayloadSendsDeleteRequest(String theCriteria ourObservationProvider.waitForUpdateCount(1); ourLog.info("** About to delete observation"); - myObservationDao.delete(IdDt.of(observation).toUnqualifiedVersionless()); + myObservationDao.delete(Objects.requireNonNull(IdDt.of(observation)).toUnqualifiedVersionless(), mySrd); ourObservationProvider.waitForDeleteCount(1); } @@ -674,7 +672,7 @@ public void testRestHookSubscriptionApplicationJson() throws Exception { assertEquals("1", ourObservationProvider.getStoredResources().get(0).getIdElement().getVersionIdPart()); - Subscription subscriptionTemp = myClient.read(Subscription.class, subscription2.getId()); + Subscription subscriptionTemp = myClient.read().resource(Subscription.class).withId(subscription2.getId()).execute(); assertNotNull(subscriptionTemp); subscriptionTemp.setCriteria(criteria1); @@ -698,7 +696,7 @@ public void testRestHookSubscriptionApplicationJson() throws Exception { assertEquals(0, ourObservationProvider.getCountCreate()); ourObservationProvider.waitForUpdateCount(4); - Observation observation3 = myClient.read(Observation.class, observationTemp3.getId()); + Observation observation3 = myClient.read().resource(Observation.class).withId(observationTemp3.getId()).execute(); CodeableConcept codeableConcept = new CodeableConcept(); observation3.setCode(codeableConcept); Coding coding = codeableConcept.addCoding(); @@ -711,21 +709,21 @@ public void testRestHookSubscriptionApplicationJson() throws Exception { assertEquals(0, ourObservationProvider.getCountCreate()); ourObservationProvider.waitForUpdateCount(4); - Observation observation3a = myClient.read(Observation.class, observationTemp3.getId()); + Observation observation3a = myClient.read().resource(Observation.class).withId(observationTemp3.getId()).execute(); CodeableConcept codeableConcept1 = new CodeableConcept(); observation3a.setCode(codeableConcept1); Coding coding1 = codeableConcept1.addCoding(); coding1.setCode(code); coding1.setSystem("SNOMED-CT"); - myClient.update().resource(observation3a).withId(observation3a.getIdElement()).execute(); + myClient.update().resource(observation3a).withId(observation3a.getIdElement().toUnqualifiedVersionless()).execute(); // Should see only one subscription notification waitForQueueToDrain(); assertEquals(0, ourObservationProvider.getCountCreate()); ourObservationProvider.waitForUpdateCount(5); - assertFalse(subscription1.getId().equals(subscription2.getId())); + assertNotEquals(subscription1.getId(), subscription2.getId()); assertThat(observation1.getId()).isNotEmpty(); assertThat(observation2.getId()).isNotEmpty(); } @@ -754,7 +752,7 @@ public void testRestHookSubscriptionApplicationJsonDatabase() throws Exception { assertEquals("1", ourObservationProvider.getStoredResources().get(0).getIdElement().getVersionIdPart()); - Subscription subscriptionTemp = myClient.read(Subscription.class, subscription2.getId()); + Subscription subscriptionTemp = myClient.read().resource(Subscription.class).withId(subscription2.getId()).execute(); assertNotNull(subscriptionTemp); subscriptionTemp.setCriteria(criteria1); @@ -778,7 +776,7 @@ public void testRestHookSubscriptionApplicationJsonDatabase() throws Exception { assertEquals(0, ourObservationProvider.getCountCreate()); ourObservationProvider.waitForUpdateCount(4); - Observation observation3 = myClient.read(Observation.class, observationTemp3.getId()); + Observation observation3 = myClient.read().resource(Observation.class).withId(observationTemp3.getId()).execute(); CodeableConcept codeableConcept = new CodeableConcept(); observation3.setCode(codeableConcept); Coding coding = codeableConcept.addCoding(); @@ -791,21 +789,21 @@ public void testRestHookSubscriptionApplicationJsonDatabase() throws Exception { assertEquals(0, ourObservationProvider.getCountCreate()); ourObservationProvider.waitForUpdateCount(4); - Observation observation3a = myClient.read(Observation.class, observationTemp3.getId()); + Observation observation3a = myClient.read().resource(Observation.class).withId(observationTemp3.getId()).execute(); CodeableConcept codeableConcept1 = new CodeableConcept(); observation3a.setCode(codeableConcept1); Coding coding1 = codeableConcept1.addCoding(); coding1.setCode(code); coding1.setSystem("SNOMED-CT"); - myClient.update().resource(observation3a).withId(observation3a.getIdElement()).execute(); + myClient.update().resource(observation3a).withId(observation3a.getIdElement().toVersionless()).execute(); // Should see only one subscription notification waitForQueueToDrain(); assertEquals(0, ourObservationProvider.getCountCreate()); ourObservationProvider.waitForUpdateCount(5); - assertFalse(subscription1.getId().equals(subscription2.getId())); + assertNotEquals(subscription1.getId(), subscription2.getId()); assertThat(observation1.getId()).isNotEmpty(); assertThat(observation2.getId()).isNotEmpty(); } @@ -830,7 +828,7 @@ public void testRestHookSubscriptionApplicationXml() throws Exception { ourObservationProvider.waitForUpdateCount(1); assertEquals(Constants.CT_FHIR_XML_NEW, ourRestfulServer.getRequestContentTypes().get(0)); - Subscription subscriptionTemp = myClient.read(Subscription.class, subscription2.getId()); + Subscription subscriptionTemp = myClient.read().resource(Subscription.class).withId(subscription2.getId()).execute(); assertNotNull(subscriptionTemp); subscriptionTemp.setCriteria(criteria1); myClient.update().resource(subscriptionTemp).withId(subscriptionTemp.getIdElement()).execute(); @@ -852,7 +850,7 @@ public void testRestHookSubscriptionApplicationXml() throws Exception { assertEquals(0, ourObservationProvider.getCountCreate()); ourObservationProvider.waitForUpdateCount(4); - Observation observation3 = myClient.read(Observation.class, observationTemp3.getId()); + Observation observation3 = myClient.read().resource(Observation.class).withId(observationTemp3.getId()).execute(); CodeableConcept codeableConcept = new CodeableConcept(); observation3.setCode(codeableConcept); Coding coding = codeableConcept.addCoding(); @@ -865,21 +863,21 @@ public void testRestHookSubscriptionApplicationXml() throws Exception { assertEquals(0, ourObservationProvider.getCountCreate()); ourObservationProvider.waitForUpdateCount(4); - Observation observation3a = myClient.read(Observation.class, observationTemp3.getId()); + Observation observation3a = myClient.read().resource(Observation.class).withId(observationTemp3.getId()).execute(); CodeableConcept codeableConcept1 = new CodeableConcept(); observation3a.setCode(codeableConcept1); Coding coding1 = codeableConcept1.addCoding(); coding1.setCode(code); coding1.setSystem("SNOMED-CT"); - myClient.update().resource(observation3a).withId(observation3a.getIdElement()).execute(); + myClient.update().resource(observation3a).withId(observation3a.getIdElement().toVersionless()).execute(); // Should see only one subscription notification waitForQueueToDrain(); assertEquals(0, ourObservationProvider.getCountCreate()); ourObservationProvider.waitForUpdateCount(5); - assertFalse(subscription1.getId().equals(subscription2.getId())); + assertNotEquals(subscription1.getId(), subscription2.getId()); assertThat(observation1.getId()).isNotEmpty(); assertThat(observation2.getId()).isNotEmpty(); } @@ -1002,7 +1000,7 @@ public void testUpdateSubscriptionToMatchLater() throws Exception { ourLog.info("** About to send observation that wont match"); - Observation observation1 = sendObservation(code, "SNOMED-CT"); + sendObservation(code, "SNOMED-CT"); // Criteria didn't match, shouldn't see any updates waitForQueueToDrain(); @@ -1018,7 +1016,7 @@ public void testUpdateSubscriptionToMatchLater() throws Exception { waitForQueueToDrain(); ourLog.info("** About to send Observation 2"); - Observation observation2 = sendObservation(code, "SNOMED-CT"); + sendObservation(code, "SNOMED-CT"); waitForQueueToDrain(); // Should see a subscription notification this time @@ -1027,7 +1025,7 @@ public void testUpdateSubscriptionToMatchLater() throws Exception { myClient.delete().resourceById(new IdType("Subscription/" + subscription2.getId())).execute(); - Observation observationTemp3 = sendObservation(code, "SNOMED-CT"); + sendObservation(code, "SNOMED-CT"); // No more matches Thread.sleep(1000); @@ -1042,11 +1040,11 @@ public void testRestHookSubscriptionApplicationXmlJson() throws Exception { String criteria1 = "Observation?code=SNOMED-CT|" + code + "&_format=xml"; String criteria2 = "Observation?code=SNOMED-CT|" + code + "111&_format=xml"; - Subscription subscription1 = createSubscription(criteria1, payload); - Subscription subscription2 = createSubscription(criteria2, payload); + createSubscription(criteria1, payload); + createSubscription(criteria2, payload); waitForActivatedSubscriptionCount(2); - Observation observation1 = sendObservation(code, "SNOMED-CT"); + sendObservation(code, "SNOMED-CT"); // Should see 1 subscription notification waitForQueueToDrain(); @@ -1056,7 +1054,7 @@ public void testRestHookSubscriptionApplicationXmlJson() throws Exception { } @Test - public void testRestHookSubscriptionInvalidCriteria() throws Exception { + public void testRestHookSubscriptionInvalidCriteria() { String payload = "application/xml"; String criteria1 = "Observation?codeeeee=SNOMED-CT"; @@ -1235,7 +1233,7 @@ public void testCustomSearchParam() throws Exception { sp.setExpression("Observation.extension('Observation#accessType')"); sp.setXpathUsage(SearchParameter.XPathUsageType.NORMAL); sp.setStatus(Enumerations.PublicationStatus.ACTIVE); - mySearchParameterDao.create(sp); + mySearchParameterDao.create(sp,mySrd); mySearchParamRegistry.forceRefresh(); createSubscription(criteria, "application/json"); waitForActivatedSubscriptionCount(1); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TermConceptMappingSvcImplTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TermConceptMappingSvcImplTest.java index db45371f0c6e..dbc500972655 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TermConceptMappingSvcImplTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TermConceptMappingSvcImplTest.java @@ -29,18 +29,15 @@ import java.util.Collections; import java.util.List; -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; - import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +@SuppressWarnings("LoggingSimilarMessage") public class TermConceptMappingSvcImplTest extends BaseTermR4Test { private static final Logger ourLog = LoggerFactory.getLogger(TermConceptMappingSvcImplTest.class); @@ -72,7 +69,7 @@ public void testByCodeSystemsAndSourceCodeOneToMany() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { TranslationRequest translationRequest = new TranslationRequest(); @@ -84,11 +81,10 @@ public void testByCodeSystemsAndSourceCodeOneToMany() { List targets = myConceptMappingSvc.translate(translationRequest).getResults(); assertNotNull(targets); assertEquals(2, targets.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); TranslateConceptResult target = targets.get(0); - ourLog.info("target(0):\n" + target.toString()); + ourLog.info("target(0):\n{}", target.toString()); assertEquals("56789", target.getCode()); assertEquals("Target Code 56789", target.getDisplay()); @@ -100,7 +96,7 @@ public void testByCodeSystemsAndSourceCodeOneToMany() { target = targets.get(1); - ourLog.info("target(1):\n" + target.toString()); + ourLog.info("target(1):\n{}", target.toString()); assertEquals("67890", target.getCode()); assertEquals("Target Code 67890", target.getDisplay()); @@ -109,12 +105,6 @@ public void testByCodeSystemsAndSourceCodeOneToMany() { assertEquals(Enumerations.ConceptMapEquivalence.WIDER.toCode(), target.getEquivalence()); assertEquals(VS_URL_2, target.getValueSet()); assertEquals(CM_URL, target.getConceptMapUrl()); - - // Test caching. - targets = myConceptMappingSvc.translate(translationRequest).getResults(); - assertNotNull(targets); - assertEquals(2, targets.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); }); } @@ -123,7 +113,7 @@ public void testByCodeSystemsAndSourceCodeOneToOne() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { TranslationRequest translationRequest = new TranslationRequest(); @@ -135,11 +125,10 @@ public void testByCodeSystemsAndSourceCodeOneToOne() { List targets = myConceptMappingSvc.translate(translationRequest).getResults(); assertNotNull(targets); assertEquals(1, targets.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); TranslateConceptResult target = targets.get(0); - ourLog.info("ConceptMap.group.element.target:\n" + target.toString()); + ourLog.info("ConceptMap.group.element.target:\n{}", target.toString()); assertEquals("34567", target.getCode()); assertEquals("Target Code 34567", target.getDisplay()); @@ -148,12 +137,6 @@ public void testByCodeSystemsAndSourceCodeOneToOne() { assertEquals(Enumerations.ConceptMapEquivalence.EQUAL.toCode(), target.getEquivalence()); assertEquals(VS_URL_2, target.getValueSet()); assertEquals(CM_URL, target.getConceptMapUrl()); - - // Test caching. - targets = myConceptMappingSvc.translate(translationRequest).getResults(); - assertNotNull(targets); - assertEquals(1, targets.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); }); } @@ -162,7 +145,7 @@ public void testByCodeSystemsAndSourceCodeUnmapped() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { TranslationRequest translationRequest = new TranslationRequest(); @@ -182,7 +165,7 @@ public void testByCodeSystemsAndSourceCodeMatchedWithoutCode() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { TranslationRequest translationRequest = new TranslationRequest(); @@ -232,7 +215,7 @@ public void testUsingPredicatesWithCodeOnly() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { /* @@ -246,11 +229,10 @@ public void testUsingPredicatesWithCodeOnly() { List targets = myConceptMappingSvc.translate(translationRequest).getResults(); assertNotNull(targets); assertEquals(3, targets.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); TranslateConceptResult target = targets.get(0); - ourLog.info("target(0):\n" + target.toString()); + ourLog.info("target(0):\n{}", target.toString()); assertEquals("34567", target.getCode()); assertEquals("Target Code 34567", target.getDisplay()); @@ -262,7 +244,7 @@ public void testUsingPredicatesWithCodeOnly() { target = targets.get(1); - ourLog.info("target(1):\n" + target.toString()); + ourLog.info("target(1):\n{}", target.toString()); assertEquals("56789", target.getCode()); assertEquals("Target Code 56789", target.getDisplay()); @@ -274,7 +256,7 @@ public void testUsingPredicatesWithCodeOnly() { target = targets.get(2); - ourLog.info("target(2):\n" + target.toString()); + ourLog.info("target(2):\n{}", target.toString()); assertEquals("67890", target.getCode()); assertEquals("Target Code 67890", target.getDisplay()); @@ -283,12 +265,6 @@ public void testUsingPredicatesWithCodeOnly() { assertEquals(Enumerations.ConceptMapEquivalence.WIDER.toCode(), target.getEquivalence()); assertEquals(VS_URL_2, target.getValueSet()); assertEquals(CM_URL, target.getConceptMapUrl()); - - // Test caching. - targets = myConceptMappingSvc.translate(translationRequest).getResults(); - assertNotNull(targets); - assertEquals(3, targets.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); }); } @@ -297,7 +273,7 @@ public void testUsingPredicatesWithSourceAndTargetSystem2() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { /* @@ -315,11 +291,10 @@ public void testUsingPredicatesWithSourceAndTargetSystem2() { List targets = myConceptMappingSvc.translate(translationRequest).getResults(); assertNotNull(targets); assertEquals(1, targets.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); TranslateConceptResult target = targets.get(0); - ourLog.info("target:\n" + target.toString()); + ourLog.info("target:\n{}", target.toString()); assertEquals("34567", target.getCode()); assertEquals("Target Code 34567", target.getDisplay()); @@ -328,12 +303,6 @@ public void testUsingPredicatesWithSourceAndTargetSystem2() { assertEquals(Enumerations.ConceptMapEquivalence.EQUAL.toCode(), target.getEquivalence()); assertEquals(VS_URL_2, target.getValueSet()); assertEquals(CM_URL, target.getConceptMapUrl()); - - // Test caching. - targets = myConceptMappingSvc.translate(translationRequest).getResults(); - assertNotNull(targets); - assertEquals(1, targets.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); }); } @@ -342,7 +311,7 @@ public void testUsingPredicatesWithSourceAndTargetSystem3() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { /* @@ -360,11 +329,10 @@ public void testUsingPredicatesWithSourceAndTargetSystem3() { List targets = myConceptMappingSvc.translate(translationRequest).getResults(); assertNotNull(targets); assertEquals(2, targets.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); TranslateConceptResult target = targets.get(0); - ourLog.info("target(0):\n" + target.toString()); + ourLog.info("target(0):\n{}", target.toString()); assertEquals("56789", target.getCode()); assertEquals("Target Code 56789", target.getDisplay()); @@ -376,7 +344,7 @@ public void testUsingPredicatesWithSourceAndTargetSystem3() { target = targets.get(1); - ourLog.info("target(1):\n" + target.toString()); + ourLog.info("target(1):\n{}", target.toString()); assertEquals("67890", target.getCode()); assertEquals("Target Code 67890", target.getDisplay()); @@ -385,12 +353,6 @@ public void testUsingPredicatesWithSourceAndTargetSystem3() { assertEquals(Enumerations.ConceptMapEquivalence.WIDER.toCode(), target.getEquivalence()); assertEquals(VS_URL_2, target.getValueSet()); assertEquals(CM_URL, target.getConceptMapUrl()); - - // Test caching. - targets = myConceptMappingSvc.translate(translationRequest).getResults(); - assertNotNull(targets); - assertEquals(2, targets.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); }); } @@ -399,7 +361,7 @@ public void testUsingPredicatesWithSourceSystem() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { /* @@ -415,11 +377,10 @@ public void testUsingPredicatesWithSourceSystem() { List targets = myConceptMappingSvc.translate(translationRequest).getResults(); assertNotNull(targets); assertEquals(3, targets.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); TranslateConceptResult target = targets.get(0); - ourLog.info("target(0):\n" + target.toString()); + ourLog.info("target(0):\n{}", target.toString()); assertEquals("34567", target.getCode()); assertEquals("Target Code 34567", target.getDisplay()); @@ -431,7 +392,7 @@ public void testUsingPredicatesWithSourceSystem() { target = targets.get(1); - ourLog.info("target(1):\n" + target.toString()); + ourLog.info("target(1):\n{}", target.toString()); assertEquals("56789", target.getCode()); assertEquals("Target Code 56789", target.getDisplay()); @@ -443,7 +404,7 @@ public void testUsingPredicatesWithSourceSystem() { target = targets.get(2); - ourLog.info("target(2):\n" + target.toString()); + ourLog.info("target(2):\n{}", target.toString()); assertEquals("67890", target.getCode()); assertEquals("Target Code 67890", target.getDisplay()); @@ -452,12 +413,6 @@ public void testUsingPredicatesWithSourceSystem() { assertEquals(Enumerations.ConceptMapEquivalence.WIDER.toCode(), target.getEquivalence()); assertEquals(VS_URL_2, target.getValueSet()); assertEquals(CM_URL, target.getConceptMapUrl()); - - // Test caching. - targets = myConceptMappingSvc.translate(translationRequest).getResults(); - assertNotNull(targets); - assertEquals(3, targets.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); }); } @@ -466,7 +421,7 @@ public void testUsingPredicatesWithSourceSystemAndVersion1() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { /* @@ -484,11 +439,10 @@ public void testUsingPredicatesWithSourceSystemAndVersion1() { List targets = myConceptMappingSvc.translate(translationRequest).getResults(); assertNotNull(targets); assertEquals(1, targets.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); TranslateConceptResult target = targets.get(0); - ourLog.info("target:\n" + target.toString()); + ourLog.info("target:\n{}", target.toString()); assertEquals("34567", target.getCode()); assertEquals("Target Code 34567", target.getDisplay()); @@ -497,12 +451,6 @@ public void testUsingPredicatesWithSourceSystemAndVersion1() { assertEquals(Enumerations.ConceptMapEquivalence.EQUAL.toCode(), target.getEquivalence()); assertEquals(VS_URL_2, target.getValueSet()); assertEquals(CM_URL, target.getConceptMapUrl()); - - // Test caching. - targets = myConceptMappingSvc.translate(translationRequest).getResults(); - assertNotNull(targets); - assertEquals(1, targets.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); }); } @@ -511,7 +459,7 @@ public void testUsingPredicatesWithSourceSystemAndVersion3() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { /* @@ -529,11 +477,10 @@ public void testUsingPredicatesWithSourceSystemAndVersion3() { List targets = myConceptMappingSvc.translate(translationRequest).getResults(); assertNotNull(targets); assertEquals(2, targets.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); TranslateConceptResult target = targets.get(0); - ourLog.info("target(0):\n" + target.toString()); + ourLog.info("target(0):\n{}", target.toString()); assertEquals("56789", target.getCode()); assertEquals("Target Code 56789", target.getDisplay()); @@ -545,7 +492,7 @@ public void testUsingPredicatesWithSourceSystemAndVersion3() { target = targets.get(1); - ourLog.info("target(1):\n" + target.toString()); + ourLog.info("target(1):\n{}", target.toString()); assertEquals("67890", target.getCode()); assertEquals("Target Code 67890", target.getDisplay()); @@ -554,12 +501,6 @@ public void testUsingPredicatesWithSourceSystemAndVersion3() { assertEquals(Enumerations.ConceptMapEquivalence.WIDER.toCode(), target.getEquivalence()); assertEquals(VS_URL_2, target.getValueSet()); assertEquals(CM_URL, target.getConceptMapUrl()); - - // Test caching. - targets = myConceptMappingSvc.translate(translationRequest).getResults(); - assertNotNull(targets); - assertEquals(2, targets.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); }); } @@ -568,7 +509,7 @@ public void testUsingPredicatesWithSourceValueSet() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { /* @@ -584,11 +525,10 @@ public void testUsingPredicatesWithSourceValueSet() { List targets = myConceptMappingSvc.translate(translationRequest).getResults(); assertNotNull(targets); assertEquals(3, targets.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); TranslateConceptResult target = targets.get(0); - ourLog.info("target(0):\n" + target.toString()); + ourLog.info("target(0):\n{}", target.toString()); assertEquals("34567", target.getCode()); assertEquals("Target Code 34567", target.getDisplay()); @@ -600,7 +540,7 @@ public void testUsingPredicatesWithSourceValueSet() { target = targets.get(1); - ourLog.info("target(1):\n" + target.toString()); + ourLog.info("target(1):\n{}", target.toString()); assertEquals("56789", target.getCode()); assertEquals("Target Code 56789", target.getDisplay()); @@ -612,7 +552,7 @@ public void testUsingPredicatesWithSourceValueSet() { target = targets.get(2); - ourLog.info("target(2):\n" + target.toString()); + ourLog.info("target(2):\n{}", target.toString()); assertEquals("67890", target.getCode()); assertEquals("Target Code 67890", target.getDisplay()); @@ -621,12 +561,6 @@ public void testUsingPredicatesWithSourceValueSet() { assertEquals(Enumerations.ConceptMapEquivalence.WIDER.toCode(), target.getEquivalence()); assertEquals(VS_URL_2, target.getValueSet()); assertEquals(CM_URL, target.getConceptMapUrl()); - - // Test caching. - targets = myConceptMappingSvc.translate(translationRequest).getResults(); - assertNotNull(targets); - assertEquals(3, targets.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); }); } @@ -635,7 +569,7 @@ public void testUsingPredicatesWithTargetValueSet() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { /* @@ -651,11 +585,10 @@ public void testUsingPredicatesWithTargetValueSet() { List targets = myConceptMappingSvc.translate(translationRequest).getResults(); assertNotNull(targets); assertEquals(3, targets.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); TranslateConceptResult target = targets.get(0); - ourLog.info("target(0):\n" + target.toString()); + ourLog.info("target(0):\n{}", target.toString()); assertEquals("34567", target.getCode()); assertEquals("Target Code 34567", target.getDisplay()); @@ -667,7 +600,7 @@ public void testUsingPredicatesWithTargetValueSet() { target = targets.get(1); - ourLog.info("target(1):\n" + target.toString()); + ourLog.info("target(1):\n{}", target.toString()); assertEquals("56789", target.getCode()); assertEquals("Target Code 56789", target.getDisplay()); @@ -679,7 +612,7 @@ public void testUsingPredicatesWithTargetValueSet() { target = targets.get(2); - ourLog.info("target(2):\n" + target.toString()); + ourLog.info("target(2):\n{}", target.toString()); assertEquals("67890", target.getCode()); assertEquals("Target Code 67890", target.getDisplay()); @@ -688,12 +621,6 @@ public void testUsingPredicatesWithTargetValueSet() { assertEquals(Enumerations.ConceptMapEquivalence.WIDER.toCode(), target.getEquivalence()); assertEquals(VS_URL_2, target.getValueSet()); assertEquals(CM_URL, target.getConceptMapUrl()); - - // Test caching. - targets = myConceptMappingSvc.translate(translationRequest).getResults(); - assertNotNull(targets); - assertEquals(3, targets.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationCache()); }); } @@ -702,7 +629,7 @@ public void testWithReverse() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { /* @@ -722,11 +649,10 @@ public void testWithReverse() { TranslateConceptResults elements = myConceptMappingSvc.translateWithReverse(translationRequest); assertNotNull(elements); assertEquals(1, elements.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationWithReverseCache()); TranslateConceptResult element = elements.getResults().get(0); - ourLog.info("element:\n" + element.toString()); + ourLog.info("element:\n{}", element.toString()); assertEquals("78901", element.getCode()); assertEquals("Source Code 78901", element.getDisplay()); @@ -734,12 +660,6 @@ public void testWithReverse() { assertEquals("Version 5", element.getSystemVersion()); assertEquals(VS_URL, element.getValueSet()); assertEquals(CM_URL, element.getConceptMapUrl()); - - // Test caching. - elements = myConceptMappingSvc.translateWithReverse(translationRequest); - assertNotNull(elements); - assertEquals(1, elements.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationWithReverseCache()); }); } @@ -748,7 +668,7 @@ public void testWithReverseByCodeSystemsAndSourceCodeUnmapped() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { TranslationRequest translationRequest = new TranslationRequest(); @@ -768,7 +688,7 @@ public void testWithReverseUsingPredicatesWithCodeOnly() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { /* @@ -784,11 +704,10 @@ public void testWithReverseUsingPredicatesWithCodeOnly() { TranslateConceptResults elements = myConceptMappingSvc.translateWithReverse(translationRequest); assertNotNull(elements); assertEquals(2, elements.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationWithReverseCache()); TranslateConceptResult element = elements.getResults().get(0); - ourLog.info("element:\n" + element.toString()); + ourLog.info("element:\n{}", element.toString()); assertEquals("12345", element.getCode()); assertEquals("Source Code 12345", element.getDisplay()); @@ -799,7 +718,7 @@ public void testWithReverseUsingPredicatesWithCodeOnly() { element = elements.getResults().get(1); - ourLog.info("element:\n" + element.toString()); + ourLog.info("element:\n{}", element.toString()); assertEquals("78901", element.getCode()); assertEquals("Source Code 78901", element.getDisplay()); @@ -807,12 +726,6 @@ public void testWithReverseUsingPredicatesWithCodeOnly() { assertEquals("Version 5", element.getSystemVersion()); assertEquals(VS_URL, element.getValueSet()); assertEquals(CM_URL, element.getConceptMapUrl()); - - // Test caching. - elements = myConceptMappingSvc.translateWithReverse(translationRequest); - assertNotNull(elements); - assertEquals(2, elements.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationWithReverseCache()); }); } @@ -821,7 +734,7 @@ public void testWithReverseUsingPredicatesWithSourceAndTargetSystem1() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { /* @@ -841,11 +754,10 @@ public void testWithReverseUsingPredicatesWithSourceAndTargetSystem1() { TranslateConceptResults elements = myConceptMappingSvc.translateWithReverse(translationRequest); assertNotNull(elements); assertEquals(1, elements.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationWithReverseCache()); TranslateConceptResult element = elements.getResults().get(0); - ourLog.info("element:\n" + element.toString()); + ourLog.info("element:\n{}", element.toString()); assertEquals("12345", element.getCode()); assertEquals("Source Code 12345", element.getDisplay()); @@ -853,12 +765,6 @@ public void testWithReverseUsingPredicatesWithSourceAndTargetSystem1() { assertEquals("Version 1", element.getSystemVersion()); assertEquals(VS_URL, element.getValueSet()); assertEquals(CM_URL, element.getConceptMapUrl()); - - // Test caching. - elements = myConceptMappingSvc.translateWithReverse(translationRequest); - assertNotNull(elements); - assertEquals(1, elements.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationWithReverseCache()); }); } @@ -867,7 +773,7 @@ public void testWithReverseUsingPredicatesWithSourceAndTargetSystem4() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { /* @@ -887,11 +793,10 @@ public void testWithReverseUsingPredicatesWithSourceAndTargetSystem4() { TranslateConceptResults elements = myConceptMappingSvc.translateWithReverse(translationRequest); assertNotNull(elements); assertEquals(1, elements.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationWithReverseCache()); TranslateConceptResult element = elements.getResults().get(0); - ourLog.info("element:\n" + element.toString()); + ourLog.info("element:\n{}", element.toString()); assertEquals("78901", element.getCode()); assertEquals("Source Code 78901", element.getDisplay()); @@ -899,12 +804,6 @@ public void testWithReverseUsingPredicatesWithSourceAndTargetSystem4() { assertEquals("Version 5", element.getSystemVersion()); assertEquals(VS_URL, element.getValueSet()); assertEquals(CM_URL, element.getConceptMapUrl()); - - // Test caching. - elements = myConceptMappingSvc.translateWithReverse(translationRequest); - assertNotNull(elements); - assertEquals(1, elements.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationWithReverseCache()); }); } @@ -913,7 +812,7 @@ public void testWithReverseUsingPredicatesWithSourceSystem() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { /* @@ -931,11 +830,10 @@ public void testWithReverseUsingPredicatesWithSourceSystem() { TranslateConceptResults elements = myConceptMappingSvc.translateWithReverse(translationRequest); assertNotNull(elements); assertEquals(2, elements.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationWithReverseCache()); TranslateConceptResult element = elements.getResults().get(0); - ourLog.info("element:\n" + element.toString()); + ourLog.info("element:\n{}", element.toString()); assertEquals("12345", element.getCode()); assertEquals("Source Code 12345", element.getDisplay()); @@ -946,7 +844,7 @@ public void testWithReverseUsingPredicatesWithSourceSystem() { element = elements.getResults().get(1); - ourLog.info("element:\n" + element.toString()); + ourLog.info("element:\n{}", element.toString()); assertEquals("78901", element.getCode()); assertEquals("Source Code 78901", element.getDisplay()); @@ -954,12 +852,6 @@ public void testWithReverseUsingPredicatesWithSourceSystem() { assertEquals("Version 5", element.getSystemVersion()); assertEquals(VS_URL, element.getValueSet()); assertEquals(CM_URL, element.getConceptMapUrl()); - - // Test caching. - elements = myConceptMappingSvc.translateWithReverse(translationRequest); - assertNotNull(elements); - assertEquals(2, elements.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationWithReverseCache()); }); } @@ -968,7 +860,7 @@ public void testWithReverseUsingPredicatesWithSourceSystemAndVersion() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { /* @@ -988,11 +880,10 @@ public void testWithReverseUsingPredicatesWithSourceSystemAndVersion() { TranslateConceptResults elements = myConceptMappingSvc.translateWithReverse(translationRequest); assertNotNull(elements); assertEquals(2, elements.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationWithReverseCache()); TranslateConceptResult element = elements.getResults().get(0); - ourLog.info("element:\n" + element.toString()); + ourLog.info("element:\n{}", element.toString()); assertEquals("12345", element.getCode()); assertEquals("Source Code 12345", element.getDisplay()); @@ -1003,7 +894,7 @@ public void testWithReverseUsingPredicatesWithSourceSystemAndVersion() { element = elements.getResults().get(1); - ourLog.info("element:\n" + element.toString()); + ourLog.info("element:\n{}", element.toString()); assertEquals("78901", element.getCode()); assertEquals("Source Code 78901", element.getDisplay()); @@ -1011,12 +902,6 @@ public void testWithReverseUsingPredicatesWithSourceSystemAndVersion() { assertEquals("Version 5", element.getSystemVersion()); assertEquals(VS_URL, element.getValueSet()); assertEquals(CM_URL, element.getConceptMapUrl()); - - // Test caching. - elements = myConceptMappingSvc.translateWithReverse(translationRequest); - assertNotNull(elements); - assertEquals(2, elements.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationWithReverseCache()); }); } @@ -1025,7 +910,7 @@ public void testWithReverseUsingPredicatesWithSourceValueSet() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { /* @@ -1043,11 +928,10 @@ public void testWithReverseUsingPredicatesWithSourceValueSet() { TranslateConceptResults elements = myConceptMappingSvc.translateWithReverse(translationRequest); assertNotNull(elements); assertEquals(2, elements.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationWithReverseCache()); TranslateConceptResult element = elements.getResults().get(0); - ourLog.info("element:\n" + element.toString()); + ourLog.info("element:\n{}", element.toString()); assertEquals("12345", element.getCode()); assertEquals("Source Code 12345", element.getDisplay()); @@ -1058,7 +942,7 @@ public void testWithReverseUsingPredicatesWithSourceValueSet() { element = elements.getResults().get(1); - ourLog.info("element:\n" + element.toString()); + ourLog.info("element:\n{}", element.toString()); assertEquals("78901", element.getCode()); assertEquals("Source Code 78901", element.getDisplay()); @@ -1066,12 +950,6 @@ public void testWithReverseUsingPredicatesWithSourceValueSet() { assertEquals("Version 5", element.getSystemVersion()); assertEquals(VS_URL, element.getValueSet()); assertEquals(CM_URL, element.getConceptMapUrl()); - - // Test caching. - elements = myConceptMappingSvc.translateWithReverse(translationRequest); - assertNotNull(elements); - assertEquals(2, elements.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationWithReverseCache()); }); } @@ -1080,7 +958,7 @@ public void testWithReverseUsingPredicatesWithTargetValueSet() { createAndPersistConceptMap(); ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); runInTransaction(() -> { /* @@ -1098,11 +976,10 @@ public void testWithReverseUsingPredicatesWithTargetValueSet() { TranslateConceptResults elements = myConceptMappingSvc.translateWithReverse(translationRequest); assertNotNull(elements); assertEquals(2, elements.size()); - assertFalse(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationWithReverseCache()); TranslateConceptResult element = elements.getResults().get(0); - ourLog.info("element:\n" + element.toString()); + ourLog.info("element:\n{}", element.toString()); assertEquals("12345", element.getCode()); assertEquals("Source Code 12345", element.getDisplay()); @@ -1113,7 +990,7 @@ public void testWithReverseUsingPredicatesWithTargetValueSet() { element = elements.getResults().get(1); - ourLog.info("element:\n" + element.toString()); + ourLog.info("element:\n{}", element.toString()); assertEquals("78901", element.getCode()); assertEquals("Source Code 78901", element.getDisplay()); @@ -1121,12 +998,6 @@ public void testWithReverseUsingPredicatesWithTargetValueSet() { assertEquals("Version 5", element.getSystemVersion()); assertEquals(VS_URL, element.getValueSet()); assertEquals(CM_URL, element.getConceptMapUrl()); - - // Test caching. - elements = myConceptMappingSvc.translateWithReverse(translationRequest); - assertNotNull(elements); - assertEquals(2, elements.size()); - assertTrue(TermConceptMappingSvcImpl.isOurLastResultsFromTranslationWithReverseCache()); }); } @@ -1145,10 +1016,10 @@ public void testDuplicateConceptMapUrls() { @Test public void testDuplicateConceptMapUrlsAndVersions() { - createAndPersistConceptMap("v1"); + createAndPersistConceptMapWithVersion(); try { - createAndPersistConceptMap("v1"); + createAndPersistConceptMapWithVersion(); fail(); } catch (UnprocessableEntityException e) { assertEquals(Msg.code(841) + "Can not create multiple ConceptMap resources with ConceptMap.url \"http://example.com/my_concept_map\" and ConceptMap.version \"v1\", already have one with resource ID: ConceptMap/" + myConceptMapId.getIdPart(), e.getMessage()); @@ -1162,7 +1033,7 @@ public void testStoreTermConceptMapAndChildren() { createAndPersistConceptMap(); ConceptMap originalConceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(originalConceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(originalConceptMap)); runInTransaction(() -> { Pageable page = PageRequest.of(0, 1); @@ -1171,7 +1042,7 @@ public void testStoreTermConceptMapAndChildren() { TermConceptMap conceptMap = optionalConceptMap.get(0); - ourLog.info("ConceptMap:\n" + conceptMap.toString()); + ourLog.info("ConceptMap:\n{}", conceptMap.toString()); assertEquals(VS_URL, conceptMap.getSource()); assertEquals(VS_URL_2, conceptMap.getTarget()); @@ -1180,7 +1051,7 @@ public void testStoreTermConceptMapAndChildren() { TermConceptMapGroup group = conceptMap.getConceptMapGroups().get(0); - ourLog.info("ConceptMap.group(0):\n" + group.toString()); + ourLog.info("ConceptMap.group(0):\n{}", group.toString()); assertGroupHasValues( CS_URL,"Version 1", CS_URL_2, "Version 2", group); @@ -1190,7 +1061,7 @@ public void testStoreTermConceptMapAndChildren() { TermConceptMapGroupElement element = group.getConceptMapGroupElements().get(0); - ourLog.info("ConceptMap.group(0).element(0):\n" + element.toString()); + ourLog.info("ConceptMap.group(0).element(0):\n{}", element.toString()); assertElementHasValues( "12345", "Source Code 12345", CS_URL, "Version 1", element); @@ -1198,14 +1069,14 @@ public void testStoreTermConceptMapAndChildren() { TermConceptMapGroupElementTarget target = element.getConceptMapGroupElementTargets().get(0); - ourLog.info("ConceptMap.group(0).element(0).target(0):\n" + target.toString()); + ourLog.info("ConceptMap.group(0).element(0).target(0):\n{}", target.toString()); assertTargetHasValues( "34567", "Target Code 34567", CS_URL_2, "Version 2", Enumerations.ConceptMapEquivalence.EQUAL, target); element = group.getConceptMapGroupElements().get(1); - ourLog.info("ConceptMap.group(0).element(1):\n" + element.toString()); + ourLog.info("ConceptMap.group(0).element(1):\n{}", element.toString()); assertElementHasValues( "23456", "Source Code 23456", CS_URL, "Version 1", element); @@ -1213,19 +1084,19 @@ public void testStoreTermConceptMapAndChildren() { assertEquals(2, element.getConceptMapGroupElementTargets().size()); target = element.getConceptMapGroupElementTargets().get(0); - ourLog.info("ConceptMap.group(0).element(1).target(0):\n" + target.toString()); + ourLog.info("ConceptMap.group(0).element(1).target(0):\n{}", target.toString()); assertTargetHasValues( "45678", "Target Code 45678", CS_URL_2, "Version 2", Enumerations.ConceptMapEquivalence.WIDER, target); // We had deliberately added a duplicate, and here it is... target = element.getConceptMapGroupElementTargets().get(1); - ourLog.info("ConceptMap.group(0).element(1).target(1):\n" + target.toString()); + ourLog.info("ConceptMap.group(0).element(1).target(1):\n{}", target.toString()); assertTargetHasValues( "45678", "Target Code 45678", CS_URL_2, "Version 2", Enumerations.ConceptMapEquivalence.WIDER, target); group = conceptMap.getConceptMapGroups().get(1); - ourLog.info("ConceptMap.group(1):\n" + group.toString()); + ourLog.info("ConceptMap.group(1):\n{}", group.toString()); assertGroupHasValues( CS_URL, "Version 3", CS_URL_3, "Version 4", group); @@ -1233,7 +1104,7 @@ public void testStoreTermConceptMapAndChildren() { element = group.getConceptMapGroupElements().get(0); - ourLog.info("ConceptMap.group(1).element(0):\n" + element.toString()); + ourLog.info("ConceptMap.group(1).element(0):\n{}", element.toString()); assertElementHasValues( "12345", "Source Code 12345", CS_URL, "Version 3", element); @@ -1241,21 +1112,21 @@ public void testStoreTermConceptMapAndChildren() { target = element.getConceptMapGroupElementTargets().get(0); - ourLog.info("ConceptMap.group(1).element(0).target(0):\n" + target.toString()); + ourLog.info("ConceptMap.group(1).element(0).target(0):\n{}", target.toString()); assertTargetHasValues( "56789", "Target Code 56789", CS_URL_3, "Version 4", Enumerations.ConceptMapEquivalence.EQUAL, target); target = element.getConceptMapGroupElementTargets().get(1); - ourLog.info("ConceptMap.group(1).element(0).target(1):\n" + target.toString()); + ourLog.info("ConceptMap.group(1).element(0).target(1):\n{}", target.toString()); assertTargetHasValues( "67890", "Target Code 67890", CS_URL_3, "Version 4", Enumerations.ConceptMapEquivalence.WIDER, target); group = conceptMap.getConceptMapGroups().get(2); - ourLog.info("ConceptMap.group(2):\n" + group.toString()); + ourLog.info("ConceptMap.group(2):\n{}", group.toString()); assertGroupHasValues( CS_URL_4, "Version 5", CS_URL_2, "Version 2", group); @@ -1263,7 +1134,7 @@ public void testStoreTermConceptMapAndChildren() { element = group.getConceptMapGroupElements().get(0); - ourLog.info("ConceptMap.group(2).element(0):\n" + element.toString()); + ourLog.info("ConceptMap.group(2).element(0):\n{}", element.toString()); assertElementHasValues( "78901", "Source Code 78901", CS_URL_4, "Version 5", element); @@ -1271,7 +1142,7 @@ public void testStoreTermConceptMapAndChildren() { target = element.getConceptMapGroupElementTargets().get(0); - ourLog.info("ConceptMap.group(2).element(0).target(0):\n" + target.toString()); + ourLog.info("ConceptMap.group(2).element(0).target(0):\n{}", target.toString()); }); } @@ -1280,7 +1151,7 @@ public void testStoreTermConceptMapAndChildren_handleUnmappedElements() { createAndPersistConceptMap(); ConceptMap originalConceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(originalConceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(originalConceptMap)); runInTransaction(() -> { Pageable page = PageRequest.of(0, 1); @@ -1290,7 +1161,7 @@ public void testStoreTermConceptMapAndChildren_handleUnmappedElements() { TermConceptMap conceptMap = optionalConceptMap.get(0); TermConceptMapGroup group = conceptMap.getConceptMapGroups().get(3); - ourLog.info("ConceptMap.group(3):\n" + group.toString()); + ourLog.info("ConceptMap.group(3):\n{}", group.toString()); assertGroupHasValues( CS_URL_4, "Version 1", CS_URL_3, "Version 1", group); @@ -1298,7 +1169,7 @@ public void testStoreTermConceptMapAndChildren_handleUnmappedElements() { TermConceptMapGroupElement element = group.getConceptMapGroupElements().get(0); - ourLog.info("ConceptMap.group(3).element(0):\n" + element.toString()); + ourLog.info("ConceptMap.group(3).element(0):\n{}", element.toString()); assertElementHasValues( "89012", "Source Code 89012", CS_URL_4, "Version 1", element); @@ -1306,7 +1177,7 @@ public void testStoreTermConceptMapAndChildren_handleUnmappedElements() { element = group.getConceptMapGroupElements().get(1); - ourLog.info("ConceptMap.group(3).element(1):\n" + element.toString()); + ourLog.info("ConceptMap.group(3).element(1):\n{}", element.toString()); assertElementHasValues( "89123", "Source Code 89123", CS_URL_4, "Version 1", element); @@ -1314,7 +1185,7 @@ public void testStoreTermConceptMapAndChildren_handleUnmappedElements() { TermConceptMapGroupElementTarget target = element.getConceptMapGroupElementTargets().get(0); - ourLog.info("ConceptMap.group(3).element(1).target(0):\n" + target.toString()); + ourLog.info("ConceptMap.group(3).element(1).target(0):\n{}", target.toString()); assertTargetHasValues( null, null, CS_URL_3, "Version 1", Enumerations.ConceptMapEquivalence.UNMATCHED, target); @@ -1366,7 +1237,7 @@ public void testStoreTermConceptMapAndChildrenWithClientAssignedId() { createAndPersistConceptMap(); ConceptMap originalConceptMap = myConceptMapDao.read(myConceptMapId, mySrd); - ourLog.debug("ConceptMap:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(originalConceptMap)); + ourLog.debug("ConceptMap:\n{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(originalConceptMap)); runInTransaction(() -> { Pageable page = PageRequest.of(0, 1); @@ -1375,7 +1246,7 @@ public void testStoreTermConceptMapAndChildrenWithClientAssignedId() { TermConceptMap conceptMap = optionalConceptMap.get(0); - ourLog.info("ConceptMap:\n" + conceptMap.toString()); + ourLog.info("ConceptMap:\n{}", conceptMap.toString()); assertEquals(VS_URL, conceptMap.getSource()); assertEquals(VS_URL_2, conceptMap.getTarget()); @@ -1384,7 +1255,7 @@ public void testStoreTermConceptMapAndChildrenWithClientAssignedId() { TermConceptMapGroup group = conceptMap.getConceptMapGroups().get(0); - ourLog.info("ConceptMap.group(0):\n" + group.toString()); + ourLog.info("ConceptMap.group(0):\n{}", group.toString()); assertEquals(CS_URL, group.getSource()); assertEquals("Version 1", group.getSourceVersion()); @@ -1397,77 +1268,77 @@ public void testStoreTermConceptMapAndChildrenWithClientAssignedId() { TermConceptMapGroupElement element = group.getConceptMapGroupElements().get(0); - ourLog.info("ConceptMap.group(0).element(0):\n" + element.toString()); + ourLog.info("ConceptMap.group(0).element(0):\n{}", element.toString()); assertElementHasValues("12345", "Source Code 12345", CS_URL, "Version 1", element); assertEquals(1, element.getConceptMapGroupElementTargets().size()); TermConceptMapGroupElementTarget target = element.getConceptMapGroupElementTargets().get(0); - ourLog.info("ConceptMap.group(0).element(0).target(0):\n" + target.toString()); + ourLog.info("ConceptMap.group(0).element(0).target(0):\n{}", target.toString()); assertTargetHasValues("34567", "Target Code 34567", CS_URL_2, "Version 2", Enumerations.ConceptMapEquivalence.EQUAL, target); element = group.getConceptMapGroupElements().get(1); - ourLog.info("ConceptMap.group(0).element(1):\n" + element.toString()); + ourLog.info("ConceptMap.group(0).element(1):\n{}", element.toString()); assertElementHasValues("23456", "Source Code 23456", CS_URL, "Version 1", element); assertEquals(2, element.getConceptMapGroupElementTargets().size()); target = element.getConceptMapGroupElementTargets().get(0); - ourLog.info("ConceptMap.group(0).element(1).target(0):\n" + target.toString()); + ourLog.info("ConceptMap.group(0).element(1).target(0):\n{}", target.toString()); assertTargetHasValues("45678", "Target Code 45678", CS_URL_2, "Version 2", Enumerations.ConceptMapEquivalence.WIDER, target); // We had deliberately added a duplicate, and here it is... target = element.getConceptMapGroupElementTargets().get(1); - ourLog.info("ConceptMap.group(0).element(1).target(1):\n" + target.toString()); + ourLog.info("ConceptMap.group(0).element(1).target(1):\n{}", target.toString()); assertTargetHasValues("45678", "Target Code 45678", CS_URL_2, "Version 2", Enumerations.ConceptMapEquivalence.WIDER, target); group = conceptMap.getConceptMapGroups().get(1); - ourLog.info("ConceptMap.group(1):\n" + group.toString()); + ourLog.info("ConceptMap.group(1):\n{}", group.toString()); assertGroupHasValues(CS_URL, "Version 3", CS_URL_3, "Version 4", group); assertEquals(1, group.getConceptMapGroupElements().size()); element = group.getConceptMapGroupElements().get(0); - ourLog.info("ConceptMap.group(1).element(0):\n" + element.toString()); + ourLog.info("ConceptMap.group(1).element(0):\n{}", element.toString()); assertElementHasValues("12345", "Source Code 12345", CS_URL, "Version 3", element); assertEquals(2, element.getConceptMapGroupElementTargets().size()); target = element.getConceptMapGroupElementTargets().get(0); - ourLog.info("ConceptMap.group(1).element(0).target(0):\n" + target.toString()); + ourLog.info("ConceptMap.group(1).element(0).target(0):\n{}", target.toString()); assertTargetHasValues("56789", "Target Code 56789", CS_URL_3, "Version 4", Enumerations.ConceptMapEquivalence.EQUAL, target); target = element.getConceptMapGroupElementTargets().get(1); - ourLog.info("ConceptMap.group(1).element(0).target(1):\n" + target.toString()); + ourLog.info("ConceptMap.group(1).element(0).target(1):\n{}", target.toString()); assertTargetHasValues("67890", "Target Code 67890", CS_URL_3, "Version 4", Enumerations.ConceptMapEquivalence.WIDER, target); group = conceptMap.getConceptMapGroups().get(2); - ourLog.info("ConceptMap.group(2):\n" + group.toString()); + ourLog.info("ConceptMap.group(2):\n{}", group.toString()); assertGroupHasValues(CS_URL_4, "Version 5", CS_URL_2, "Version 2", group); assertEquals(1, group.getConceptMapGroupElements().size()); element = group.getConceptMapGroupElements().get(0); - ourLog.info("ConceptMap.group(2).element(0):\n" + element.toString()); + ourLog.info("ConceptMap.group(2).element(0):\n{}", element.toString()); assertElementHasValues("78901", "Source Code 78901", CS_URL_4, "Version 5", element); assertEquals(1, element.getConceptMapGroupElementTargets().size()); target = element.getConceptMapGroupElementTargets().get(0); - ourLog.info("ConceptMap.group(2).element(0).target(0):\n" + target.toString()); + ourLog.info("ConceptMap.group(2).element(0).target(0):\n{}", target.toString()); assertTargetHasValues("34567", "Target Code 34567", CS_URL_2, "Version 2", Enumerations.ConceptMapEquivalence.NARROWER, target); }); @@ -1574,10 +1445,10 @@ private void createAndPersistConceptMap() { persistConceptMap(conceptMap, HttpVerb.POST); } - private void createAndPersistConceptMap(String version) { + private void createAndPersistConceptMapWithVersion() { ConceptMap conceptMap = createConceptMap(); conceptMap.setId("ConceptMap/cm"); - conceptMap.setVersion(version); + conceptMap.setVersion("v1"); persistConceptMap(conceptMap, HttpVerb.POST); } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java index bd3503b9bc03..6dd11c5ee9ed 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java @@ -270,37 +270,38 @@ public void testValidateCodeIsInPreExpandedValueSet() throws Exception { ourLog.debug("ValueSet:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); + ValidationSupportContext valCtx = new ValidationSupportContext(myValidationSupport); - IValidationSupport.CodeValidationResult result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, null, null, null, null); + IValidationSupport.CodeValidationResult result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, null, null, null, null); assertNull(result); - result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, "BOGUS", null, null, null); + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, "BOGUS", null, null, null); assertFalse(result.isOk()); - result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, "11378-7", null, null, null); + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, "11378-7", null, null, null); assertFalse(result.isOk()); - result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsGuess, valueSet, null, "11378-7", null, null, null); + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsGuess, valueSet, null, "11378-7", null, null, null); assertTrue(result.isOk()); assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); - result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsGuess, valueSet, null, "11378-7", "Systolic blood pressure at First encounter", null, null); + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsGuess, valueSet, null, "11378-7", "Systolic blood pressure at First encounter", null, null); assertTrue(result.isOk()); assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); - result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, "http://acme.org", "11378-7", null, null, null); + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, "http://acme.org", "11378-7", null, null, null); assertTrue(result.isOk()); assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); Coding coding = new Coding("http://acme.org", "11378-7", "Systolic blood pressure at First encounter"); - result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, null, null, coding, null); + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, null, null, coding, null); assertTrue(result.isOk()); assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); CodeableConcept codeableConcept = new CodeableConcept(); codeableConcept.addCoding(new Coding("BOGUS", "BOGUS", "BOGUS")); codeableConcept.addCoding(coding); - result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, null, null, null, codeableConcept); + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, null, null, null, codeableConcept); assertTrue(result.isOk()); assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); } @@ -318,38 +319,39 @@ public void testValidateCodeIsInPreExpandedValueSetWithClientAssignedId() throws ourLog.debug("ValueSet:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); + ValidationSupportContext valCtx = new ValidationSupportContext(myValidationSupport); - IValidationSupport.CodeValidationResult result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, null, null, null, null); + IValidationSupport.CodeValidationResult result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, null, null, null, null); assertNull(result); - result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, "BOGUS", null, null, null); + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, "BOGUS", null, null, null); assertFalse(result.isOk()); - result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, "11378-7", null, null, null); + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, "11378-7", null, null, null); assertFalse(result.isOk()); - result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsGuess, valueSet, null, "11378-7", null, null, null); + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsGuess, valueSet, null, "11378-7", null, null, null); assertTrue(result.isOk()); assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); - result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsGuess, valueSet, null, "11378-7", "Systolic blood pressure at First encounter", null, null); + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsGuess, valueSet, null, "11378-7", "Systolic blood pressure at First encounter", null, null); assertTrue(result.isOk()); assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); - result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, "http://acme.org", "11378-7", null, null, null); + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, "http://acme.org", "11378-7", null, null, null); assertTrue(result.isOk()); assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); Coding coding = new Coding("http://acme.org", "11378-7", "Systolic blood pressure at First encounter"); - result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, null, null, coding, null); + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, null, null, coding, null); assertTrue(result.isOk()); assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); CodeableConcept codeableConcept = new CodeableConcept(); codeableConcept.addCoding(new Coding("BOGUS", "BOGUS", "BOGUS")); codeableConcept.addCoding(coding); - result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, null, null, null, codeableConcept); + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, null, null, null, codeableConcept); assertTrue(result.isOk()); assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4Test.java index e9c5bba1a805..a334173346d1 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4Test.java @@ -88,6 +88,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet @AfterEach public void afterEach() { SearchBuilder.setMaxPageSizeForTest(null); + TermReadSvcImpl.setForceDisableHibernateSearchForUnitTest(false); } @Override @@ -502,7 +503,7 @@ public void testExpandNonPersistedValueSet() { code = "AAA"; outcome = myValueSetDao.validateCode(new CodeType(valueSetUrl), null, new CodeType(code), new CodeType(codeSystemUrl), null, null, null, mySrd); assertFalse(outcome.isOk()); - assertEquals("Unknown code 'http://hl7.org/fhir/administrative-gender#AAA' for in-memory expansion of ValueSet 'http://hl7.org/fhir/ValueSet/administrative-gender'", outcome.getMessage()); + assertThat(outcome.getMessage()).contains("Unknown code 'http://hl7.org/fhir/administrative-gender#AAA' for in-memory expansion of ValueSet 'http://hl7.org/fhir/ValueSet/administrative-gender'"); assertEquals("error", outcome.getSeverityCode()); } @@ -986,7 +987,7 @@ public void testExpandValueSetWithUnknownCodeSystem() { String code = "28571000087109"; IValidationSupport.CodeValidationResult outcome = myValueSetDao.validateCode(new CodeType(valueSetUrl), null, new CodeType(code), new CodeType(codeSystemUrl), null, null, null, mySrd); assertFalse(outcome.isOk()); - assertEquals("Unknown code 'http://invalid-cs#28571000087109' for in-memory expansion of ValueSet 'http://vs-with-invalid-cs'", outcome.getMessage()); + assertThat(outcome.getMessage()).contains("Unknown code 'http://invalid-cs#28571000087109' for in-memory expansion of ValueSet 'http://vs-with-invalid-cs'"); assertEquals("error", outcome.getSeverityCode()); // Try validating a code that is in the missing CS that is imported by the VS @@ -1390,7 +1391,7 @@ public void testStoreTermValueSetAndChildren() throws Exception { TermValueSet termValueSet = optionalValueSetByUrl.get(); assertSame(optionalValueSetByResourcePid.get(), termValueSet); - ourLog.info("ValueSet:\n" + termValueSet.toString()); + ourLog.info("ValueSet:\n" + termValueSet); assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName()); assertEquals(0, termValueSet.getConcepts().size()); @@ -1408,7 +1409,7 @@ public void testStoreTermValueSetAndChildren() throws Exception { TermValueSet termValueSet = optionalValueSetByUrl.get(); assertSame(optionalValueSetByResourcePid.get(), termValueSet); - ourLog.info("ValueSet:\n" + termValueSet.toString()); + ourLog.info("ValueSet:\n" + termValueSet); assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName()); assertEquals(codeSystem.getConcept().size(), termValueSet.getConcepts().size()); @@ -1451,7 +1452,7 @@ public void testStoreTermValueSetAndChildrenWithClientAssignedId() throws Except TermValueSet termValueSet = optionalValueSetByUrl.get(); assertSame(optionalValueSetByResourcePid.get(), termValueSet); - ourLog.info("ValueSet:\n" + termValueSet.toString()); + ourLog.info("ValueSet:\n" + termValueSet); assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName()); assertEquals(0, termValueSet.getConcepts().size()); @@ -1469,7 +1470,7 @@ public void testStoreTermValueSetAndChildrenWithClientAssignedId() throws Except TermValueSet termValueSet = optionalValueSetByUrl.get(); assertSame(optionalValueSetByResourcePid.get(), termValueSet); - ourLog.info("ValueSet:\n" + termValueSet.toString()); + ourLog.info("ValueSet:\n" + termValueSet); assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName()); assertEquals(codeSystem.getConcept().size(), termValueSet.getConcepts().size()); @@ -1509,7 +1510,7 @@ public void testStoreTermValueSetAndChildrenWithExclude() throws Exception { TermValueSet termValueSet = optionalValueSetByUrl.get(); assertSame(optionalValueSetByResourcePid.get(), termValueSet); - ourLog.info("ValueSet:\n" + termValueSet.toString()); + ourLog.info("ValueSet:\n" + termValueSet); assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName()); assertEquals(0, termValueSet.getConcepts().size()); @@ -1527,7 +1528,7 @@ public void testStoreTermValueSetAndChildrenWithExclude() throws Exception { TermValueSet termValueSet = optionalValueSetByUrl.get(); assertSame(optionalValueSetByResourcePid.get(), termValueSet); - ourLog.info("ValueSet:\n" + termValueSet.toString()); + ourLog.info("ValueSet:\n" + termValueSet); assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName()); assertEquals(codeSystem.getConcept().size() - 2, termValueSet.getConcepts().size()); @@ -1569,7 +1570,7 @@ public void testStoreTermValueSetAndChildrenWithExcludeWithClientAssignedId() th TermValueSet termValueSet = optionalValueSetByUrl.get(); assertSame(optionalValueSetByResourcePid.get(), termValueSet); - ourLog.info("ValueSet:\n" + termValueSet.toString()); + ourLog.info("ValueSet:\n" + termValueSet); assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName()); assertEquals(0, termValueSet.getConcepts().size()); @@ -1587,7 +1588,7 @@ public void testStoreTermValueSetAndChildrenWithExcludeWithClientAssignedId() th TermValueSet termValueSet = optionalValueSetByUrl.get(); assertSame(optionalValueSetByResourcePid.get(), termValueSet); - ourLog.info("ValueSet:\n" + termValueSet.toString()); + ourLog.info("ValueSet:\n" + termValueSet); assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName()); assertEquals(codeSystem.getConcept().size() - 2, termValueSet.getConcepts().size()); @@ -1641,6 +1642,8 @@ public void testExpandValueSet_VsIsEnumeratedWithVersionedSystem_CsOnlyDifferent ValueSet expansion; IdType vsId = new IdType("ValueSet/vaccinecode"); + TermReadSvcImpl.setForceDisableHibernateSearchForUnitTest(true); + // Expand VS expansion = myValueSetDao.expand(vsId, new ValueSetExpansionOptions(), mySrd); assertThat(myValueSetTestUtil.extractExpansionMessage(expansion)).contains("Current status: NOT_EXPANDED"); @@ -1651,10 +1654,11 @@ public void testExpandValueSet_VsIsEnumeratedWithVersionedSystem_CsOnlyDifferent code = "28571000087109"; String display = null; IValidationSupport.CodeValidationResult outcome = myValueSetDao.validateCode(null, vsId, new CodeType(code), new UriType(codeSystemUrl), new StringType(display), null, null, mySrd); - assertTrue(outcome.isOk()); + assertTrue(outcome.isOk(), outcome.getMessage() + "\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome.toParameters(myFhirContext)) + "\n" + outcome.getSourceDetails()); assertEquals("28571000087109", outcome.getCode()); assertEquals("MODERNA COVID-19 mRNA-1273", outcome.getDisplay()); assertEquals("0.17", outcome.getCodeSystemVersion()); + assertThat(outcome.getSourceDetails()).contains("Code was validated against in-memory expansion of ValueSet: http://ehealthontario.ca/fhir/ValueSet/vaccinecode"); // Validate code - good code, bad display codeSystemUrl = "http://snomed.info/sct"; @@ -1664,7 +1668,7 @@ public void testExpandValueSet_VsIsEnumeratedWithVersionedSystem_CsOnlyDifferent assertTrue(outcome.isOk()); assertEquals("28571000087109", outcome.getCode()); assertEquals("MODERNA COVID-19 mRNA-1273", outcome.getDisplay()); - assertEquals("Concept Display \"BLAH\" does not match expected \"MODERNA COVID-19 mRNA-1273\" for 'http://snomed.info/sct#28571000087109' for in-memory expansion of ValueSet: http://ehealthontario.ca/fhir/ValueSet/vaccinecode", outcome.getMessage()); + assertThat(outcome.getMessage()).contains("Concept Display \"BLAH\" does not match expected \"MODERNA COVID-19 mRNA-1273\" for 'http://snomed.info/sct#28571000087109' for in-memory expansion of ValueSet 'http://ehealthontario.ca/fhir/ValueSet/vaccinecode'"); assertEquals("Code was validated against in-memory expansion of ValueSet: http://ehealthontario.ca/fhir/ValueSet/vaccinecode", outcome.getSourceDetails()); assertEquals("0.17", outcome.getCodeSystemVersion()); @@ -1734,6 +1738,50 @@ public void testExpandValueSet_VsIsEnumeratedWithVersionedSystem_CsOnlyDifferent } + @Test + public void testExpandValueSet_VsHasNoCodes() { + CodeSystem cs = new CodeSystem(); + cs.setId("snomed-ct-ca-imm"); + cs.setStatus(Enumerations.PublicationStatus.ACTIVE); + cs.setContent(CodeSystem.CodeSystemContentMode.FRAGMENT); + cs.setUrl("http://snomed.info/sct"); + cs.setVersion("http://snomed.info/sct/20611000087101/version/20210331"); + cs.addConcept().setCode("28571000087109").setDisplay("MODERNA COVID-19 mRNA-1273"); + myCodeSystemDao.update(cs); + + // No codes in this valueset + ValueSet vs = new ValueSet(); + vs.setId("vaccinecode"); + vs.setUrl("http://ehealthontario.ca/fhir/ValueSet/vaccinecode"); + vs.setVersion("0.1.17"); + vs.setStatus(Enumerations.PublicationStatus.ACTIVE); + myValueSetDao.update(vs); + + ConceptValidationOptions options = new ConceptValidationOptions(); + options.setValidateDisplay(true); + + String codeSystemUrl; + String code; + ValueSet expansion; + IdType vsId = new IdType("ValueSet/vaccinecode"); + + TermReadSvcImpl.setForceDisableHibernateSearchForUnitTest(true); + + // Expand VS + expansion = myValueSetDao.expand(vsId, new ValueSetExpansionOptions(), mySrd); + assertThat(myValueSetTestUtil.extractExpansionMessage(expansion)).contains("Current status: NOT_EXPANDED"); + assertThat(myValueSetTestUtil.toCodes(expansion)).isEmpty(); + + // Validate code + codeSystemUrl = "http://snomed.info/sct"; + code = "38765352"; + String display = null; + IValidationSupport.CodeValidationResult outcome = myValueSetDao.validateCode(null, vsId, new CodeType(code), new UriType(codeSystemUrl), new StringType(display), null, null, mySrd); + assertFalse(outcome.isOk()); + assertThat(outcome.getMessage()).contains("in-memory expansion of ValueSet 'http://ehealthontario.ca/fhir/ValueSet/vaccinecode'"); + assertThat(outcome.getMessage()).contains("Empty compose list for include"); + } + @Test public void testExpandValueSet_VsIsEnumeratedWithVersionedSystem_CsIsFragmentWithWrongVersion() { CodeSystem cs = new CodeSystem(); @@ -1776,7 +1824,7 @@ public void testExpandValueSet_VsIsEnumeratedWithVersionedSystem_CsIsFragmentWit code = "28571000087109"; IValidationSupport.CodeValidationResult outcome = myValueSetDao.validateCode(new CodeType(valueSetUrl), null, new CodeType(code), new CodeType(codeSystemUrl), null, null, null, mySrd); assertFalse(outcome.isOk()); - assertEquals("Unknown code 'http://snomed.info/sct#28571000087109' for in-memory expansion of ValueSet 'http://ehealthontario.ca/fhir/ValueSet/vaccinecode'", outcome.getMessage()); + assertThat(outcome.getMessage()).contains("Unknown code 'http://snomed.info/sct#28571000087109' for in-memory expansion of ValueSet 'http://ehealthontario.ca/fhir/ValueSet/vaccinecode'"); assertEquals("error", outcome.getSeverityCode()); // Perform Pre-Expansion @@ -1835,7 +1883,7 @@ public void testExpandValueSet_VsIsNonEnumeratedWithVersionedSystem_CsIsFragment code = "28571000087109"; IValidationSupport.CodeValidationResult outcome = myValueSetDao.validateCode(new CodeType(valueSetUrl), null, new CodeType(code), new CodeType(codeSystemUrl), null, null, null, mySrd); assertFalse(outcome.isOk()); - assertEquals("Unknown code 'http://snomed.info/sct#28571000087109' for in-memory expansion of ValueSet 'http://ehealthontario.ca/fhir/ValueSet/vaccinecode'", outcome.getMessage()); + assertThat(outcome.getMessage()).contains("Unknown code 'http://snomed.info/sct#28571000087109' for in-memory expansion of ValueSet 'http://ehealthontario.ca/fhir/ValueSet/vaccinecode'"); assertEquals("error", outcome.getSeverityCode()); // Perform Pre-Expansion @@ -2087,12 +2135,12 @@ public void testRequestValueSetReExpansion() { // Validate code that is good IValidationSupport.CodeValidationResult outcome = myValueSetDao.validateCode(vs.getUrlElement(), null, new StringType("B"), cs.getUrlElement(), null, null, null, mySrd); - assertEquals(true, outcome.isOk()); + assertTrue(outcome.isOk()); assertThat(outcome.getMessage()).contains("Code validation occurred using a ValueSet expansion that was pre-calculated"); // Validate code that is bad outcome = myValueSetDao.validateCode(vs.getUrlElement(), null, new StringType("A"), cs.getUrlElement(), null, null, null, mySrd); - assertEquals(false, outcome.isOk()); + assertFalse(outcome.isOk()); assertThat(outcome.getMessage()).contains("Code validation occurred using a ValueSet expansion that was pre-calculated"); } diff --git a/hapi-fhir-jpaserver-test-r4b/pom.xml b/hapi-fhir-jpaserver-test-r4b/pom.xml index bb5bfc312e18..b0d3162e6e44 100644 --- a/hapi-fhir-jpaserver-test-r4b/pom.xml +++ b/hapi-fhir-jpaserver-test-r4b/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml index d17bb5b2f291..e9b7954dd811 100644 --- a/hapi-fhir-jpaserver-test-r5/pom.xml +++ b/hapi-fhir-jpaserver-test-r5/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -47,6 +47,18 @@ + + ca.uhn.hapi.fhir + hapi-tinder-test + ${project.version} + test + + + + com.github.jsqlparser + jsqlparser + test + diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java index 81e03a692b3b..d1b670004c7e 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java @@ -19,6 +19,7 @@ import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; +import ca.uhn.fhir.jpa.dao.TestDaoSearch; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; @@ -56,9 +57,7 @@ import ca.uhn.fhir.jpa.search.warm.ICacheWarmingSvc; import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry; -import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl; import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl; -import ca.uhn.fhir.jpa.term.TermReadSvcImpl; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvc; @@ -142,7 +141,7 @@ import static org.mockito.Mockito.mock; @ExtendWith(SpringExtension.class) -@ContextConfiguration(classes = {TestR5Config.class}) +@ContextConfiguration(classes = {TestR5Config.class, TestDaoSearch.Config.class}) public abstract class BaseJpaR5Test extends BaseJpaTest implements ITestDataBuilder { @Autowired protected IJobCoordinator myJobCoordinator; @@ -421,12 +420,15 @@ public FhirContext getFhirContext() { @AfterEach() public void afterCleanupDao() { - myStorageSettings.setExpireSearchResults(new JpaStorageSettings().isExpireSearchResults()); - myStorageSettings.setEnforceReferentialIntegrityOnDelete(new JpaStorageSettings().isEnforceReferentialIntegrityOnDelete()); - myStorageSettings.setExpireSearchResultsAfterMillis(new JpaStorageSettings().getExpireSearchResultsAfterMillis()); - myStorageSettings.setReuseCachedSearchResultsForMillis(new JpaStorageSettings().getReuseCachedSearchResultsForMillis()); - myStorageSettings.setSuppressUpdatesWithNoChange(new JpaStorageSettings().isSuppressUpdatesWithNoChange()); - myStorageSettings.setAllowContainsSearches(new JpaStorageSettings().isAllowContainsSearches()); + JpaStorageSettings defaults = new JpaStorageSettings(); + myStorageSettings.setAccessMetaSourceInformationFromProvenanceTable(defaults.isAccessMetaSourceInformationFromProvenanceTable()); + myStorageSettings.setAllowContainsSearches(defaults.isAllowContainsSearches()); + myStorageSettings.setEnforceReferentialIntegrityOnDelete(defaults.isEnforceReferentialIntegrityOnDelete()); + myStorageSettings.setExpireSearchResults(defaults.isExpireSearchResults()); + myStorageSettings.setExpireSearchResultsAfterMillis(defaults.getExpireSearchResultsAfterMillis()); + myStorageSettings.setReuseCachedSearchResultsForMillis(defaults.getReuseCachedSearchResultsForMillis()); + myStorageSettings.setSuppressUpdatesWithNoChange(defaults.isSuppressUpdatesWithNoChange()); + myStorageSettings.setAutoCreatePlaceholderReferenceTargets(defaults.isAutoCreatePlaceholderReferenceTargets()); myPagingProvider.setDefaultPageSize(BasePagingProvider.DEFAULT_DEFAULT_PAGE_SIZE); myPagingProvider.setMaximumPageSize(BasePagingProvider.DEFAULT_MAX_PAGE_SIZE); @@ -434,10 +436,6 @@ public void afterCleanupDao() { @AfterEach public void afterClearTerminologyCaches() { - TermReadSvcImpl baseHapiTerminologySvc = AopTestUtils.getTargetObject(myTermSvc); - baseHapiTerminologySvc.clearCaches(); - TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationCache(); - TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationWithReverseCache(); TermDeferredStorageSvcImpl deferredStorageSvc = AopTestUtils.getTargetObject(myTermDeferredStorageSvc); deferredStorageSvc.clearDeferred(); } @@ -446,7 +444,6 @@ public void afterClearTerminologyCaches() { @Override protected void afterResetInterceptors() { super.afterResetInterceptors(); -// myInterceptorRegistry.unregisterInterceptor(myPerformanceTracingLoggingInterceptor); } @BeforeEach diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/CrossPartitionReferencesTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/CrossPartitionReferencesTest.java index 6608d2763a69..2719deb84a54 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/CrossPartitionReferencesTest.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/CrossPartitionReferencesTest.java @@ -35,7 +35,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Propagation; -import jakarta.annotation.Nonnull; +import java.util.List; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -98,28 +98,28 @@ public void testSamePartitionReference_Create() { Patient p1 = new Patient(); p1.setActive(true); IIdType patient1Id = myPatientDao.create(p1, mySrd).getId().toUnqualifiedVersionless(); - initializeCrossReferencesInterceptor(); + logAllResources(); + // Test myCaptureQueriesListener.clear(); Patient p2 = new Patient(); p2.setActive(true); p2.addLink().setOther(new Reference(patient1Id)); - - // Test - myCaptureQueriesListener.clear(); IIdType patient2Id = myPatientDao.create(p2, mySrd).getId().toUnqualifiedVersionless(); // Verify - myCaptureQueriesListener.logSelectQueries(); assertEquals(1, myCaptureQueriesListener.countCommits()); assertEquals(0, myCaptureQueriesListener.countRollbacks()); + myCaptureQueriesListener.clear(); SearchParameterMap params = SearchParameterMap .newSynchronous(Constants.PARAM_ID, new TokenParam(patient2Id.getValue())) .addInclude(Patient.INCLUDE_LINK); IBundleProvider search = myPatientDao.search(params, mySrd); - assertThat(toUnqualifiedVersionlessIdValues(search)).containsExactly(patient2Id.getValue(), patient1Id.getValue()); + List values = toUnqualifiedVersionlessIdValues(search); + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).containsExactly(patient2Id.getValue(), patient1Id.getValue()); assertThat(search.getAllResources()).hasSize(2); search.getAllResources().forEach(p -> assertTrue(((Patient) p).getActive())); } @@ -190,7 +190,7 @@ public void testCrossPartitionReference_Create() { } private void initializeCrossReferencesInterceptor() { - when(myCrossPartitionReferencesDetectedInterceptor.handle(any(),any())).thenAnswer(t->{ + when(myCrossPartitionReferencesDetectedInterceptor.handle(any(), any())).thenAnswer(t -> { CrossPartitionReferenceDetails theDetails = t.getArgument(1, CrossPartitionReferenceDetails.class); IIdType targetId = theDetails.getPathAndRef().getRef().getReferenceElement(); RequestPartitionId referenceTargetPartition = myPartitionHelperSvc.determineReadPartitionForRequestForRead(theDetails.getRequestDetails(), targetId.getResourceType(), targetId); @@ -232,11 +232,12 @@ public RequestPartitionId selectPartitionRead(ReadPartitionIdRequestDetails theR private static RequestPartitionId selectPartition(String resourceType) { switch (resourceType) { case "Patient": + case "RelatedPerson": return PARTITION_PATIENT; case "Observation": return PARTITION_OBSERVATION; default: - throw new InternalErrorException("Don't know how to handle resource type"); + throw new InternalErrorException("Don't know how to handle resource type: " + resourceType); } } diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/ExternallyStoredResourceR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/ExternallyStoredResourceR5Test.java index 6637cddd6c82..5f89ed6271fd 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/ExternallyStoredResourceR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/ExternallyStoredResourceR5Test.java @@ -52,7 +52,7 @@ public void testCreate() { runInTransaction(()->{ ResourceTable resource = myResourceTableDao.getReferenceById(id.getIdPartAsLong()); assertNotNull(resource); - ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1L); + ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 1L); assertNotNull(history); assertEquals(ResourceEncodingEnum.ESR, history.getEncoding()); assertEquals(MY_PROVIDER_ID + ":" + ADDRESS_123, history.getResourceTextVc()); diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5HistoryDisabledTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5HistoryDisabledTest.java index 1bf29740ff7f..4a45fca83940 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5HistoryDisabledTest.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5HistoryDisabledTest.java @@ -17,6 +17,7 @@ import org.hl7.fhir.r5.model.Patient; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import jakarta.annotation.Nonnull; @@ -298,33 +299,6 @@ public void testUpdate_NonVersionedTagsMode_TagsCanBeDeleted() { }); } - @Test - public void testUpdate_ProvenanceIsUpdatedInPlace() { - // Setup - myStorageSettings.setStoreMetaSourceInformation(JpaStorageSettings.StoreMetaSourceInformationEnum.SOURCE_URI_AND_REQUEST_ID); - Patient p = new Patient(); - p.getMeta().setSource("source-1"); - p.setActive(true); - when(mySrd.getRequestId()).thenReturn("request-id-1"); - IIdType id1 = myPatientDao.create(p, mySrd).getId(); - runInTransaction(()-> assertEquals(1, myResourceHistoryProvenanceDao.count())); - - // Test - p = new Patient(); - p.setId(id1); - p.addIdentifier().setValue("foo"); - p.getMeta().setSource("source-2"); - p.setActive(true); - when(mySrd.getRequestId()).thenReturn("request-id-2"); - DaoMethodOutcome outcome = myPatientDao.update(p, mySrd); - - // Verify - assertEquals("source-2#request-id-2", ((Patient) outcome.getResource()).getMeta().getSource()); - p = myPatientDao.read(outcome.getId(), mySrd); - assertEquals("source-2#request-id-2", p.getMeta().getSource()); - runInTransaction(()-> assertEquals(1, myResourceHistoryProvenanceDao.count())); - } - @Nonnull private static List toTagTokens(IBaseResource resource) { List tags = resource.getMeta() diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5ValueSetTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5ValueSetTest.java index b1ed82428263..39040d74febe 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5ValueSetTest.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5ValueSetTest.java @@ -113,7 +113,7 @@ public void testValidateCodeOperationByIdentifierAndCodeAndSystemAndBadDisplay() IValidationSupport.CodeValidationResult result = myValueSetDao.validateCode(valueSetIdentifier, id, code, system, display, coding, codeableConcept, mySrd); assertTrue(result.isOk()); assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); - assertEquals("Concept Display \"Systolic blood pressure at First encounterXXXX\" does not match expected \"Systolic blood pressure at First encounter\" for 'http://acme.org#11378-7' for in-memory expansion of ValueSet: http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", result.getMessage()); + assertThat(result.getMessage()).contains("Concept Display \"Systolic blood pressure at First encounterXXXX\" does not match expected \"Systolic blood pressure at First encounter\" for 'http://acme.org#11378-7' for in-memory expansion of ValueSet 'http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2'"); } @Test diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoTransactionR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoTransactionR5Test.java index 63e6ab0b5182..9762ce2889c0 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoTransactionR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoTransactionR5Test.java @@ -162,7 +162,7 @@ public void testInlineMatchUrlMatchesConditionalUpdate(@SuppressWarnings("unused assertEquals(theMatchUrlCacheEnabled ? 3 : 4, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(4, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); - assertEquals(4, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); assertEquals(1, myCaptureQueriesListener.countCommits()); assertEquals(0, myCaptureQueriesListener.countRollbacks()); diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/BaseDbpmJpaR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/BaseDbpmJpaR5Test.java new file mode 100644 index 000000000000..a8b1531e657c --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/BaseDbpmJpaR5Test.java @@ -0,0 +1,53 @@ +package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode; + +import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.jpa.dao.r5.BaseJpaR5Test; +import ca.uhn.fhir.jpa.entity.PartitionEntity; +import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc; +import ca.uhn.fhir.jpa.util.TestPartitionSelectorInterceptor; +import org.junit.jupiter.api.AfterEach; +import org.springframework.beans.factory.annotation.Autowired; + +import static org.junit.jupiter.api.Assertions.assertFalse; + +public class BaseDbpmJpaR5Test extends BaseJpaR5Test { + + public static final String PARTITION_NAME_1 = "Partition_1"; + public static final String PARTITION_NAME_2 = "Partition_2"; + public static final int PARTITION_1 = 1; + public static final int PARTITION_2 = 2; + + protected final TestPartitionSelectorInterceptor myPartitionSelectorInterceptor = new TestPartitionSelectorInterceptor(); + + @Autowired + private IPartitionLookupSvc myPartitionConfigSvc; + + @Override + @AfterEach + protected void afterResetInterceptors() { + super.afterResetInterceptors(); + myPartitionSettings.setPartitioningEnabled(false); + myInterceptorRegistry.unregisterInterceptor(myPartitionSelectorInterceptor); + } + + protected void registerPartitionInterceptorAndCreatePartitions() { + assertFalse(myInterceptorRegistry.hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ), ()->myInterceptorRegistry.getAllRegisteredInterceptors().toString()); + myInterceptorRegistry.registerInterceptor(myPartitionSelectorInterceptor); + + myPartitionConfigSvc.createPartition(new PartitionEntity().setId(PARTITION_1).setName(PARTITION_NAME_1), null); + myPartitionConfigSvc.createPartition(new PartitionEntity().setId(PARTITION_2).setName(PARTITION_NAME_2), null); + + // Load to pre-cache and avoid adding SQL queries + preFetchPartitionsIntoCache(); + } + + protected void preFetchPartitionsIntoCache() { + if (myPartitionSettings.isPartitioningEnabled()) { + myPartitionConfigSvc.getPartitionById(PARTITION_1); + myPartitionConfigSvc.getPartitionById(PARTITION_2); + myPartitionConfigSvc.getPartitionByName(PARTITION_NAME_1); + myPartitionConfigSvc.getPartitionByName(PARTITION_NAME_2); + } + } + +} diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningDisabledTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningDisabledTest.java new file mode 100644 index 000000000000..f5323ff9cd15 --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningDisabledTest.java @@ -0,0 +1,19 @@ +package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode; + +import ca.uhn.fhir.jpa.util.TestPartitionSelectorInterceptor; +import org.junit.jupiter.api.Nested; + +/** + * This is a test verifying that we emit the right SQL for HAPI FHIR running in + * full legacy mode - No partitioning, no partition IDs in PKs. + */ +public class DbpmDisabledPartitioningDisabledTest extends BaseDbpmJpaR5Test { + + @Nested + public class MyTestDefinitions extends TestDefinitions { + MyTestDefinitions() { + super(DbpmDisabledPartitioningDisabledTest.this, new TestPartitionSelectorInterceptor(), false, false); + } + } + +} diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningEnabledNullDefaultPartitionTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningEnabledNullDefaultPartitionTest.java new file mode 100644 index 000000000000..307d542d031d --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningEnabledNullDefaultPartitionTest.java @@ -0,0 +1,32 @@ +package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Nested; + +/** + * This is a test verifying that we emit the right SQL when running in + * legacy partition mode with DEFAULT partition value of null (the default if + * not configured otherwise) - Partition IDs are in use, but they aren't + * included in primary keys or joins. + */ +public class DbpmDisabledPartitioningEnabledNullDefaultPartitionTest extends BaseDbpmJpaR5Test { + + @Override + @BeforeEach + public void before() throws Exception { + super.before(); + myPartitionSettings.setPartitioningEnabled(true); + myPartitionSettings.setDefaultPartitionId(null); + + registerPartitionInterceptorAndCreatePartitions(); + } + + @Nested + public class MyTestDefinitions extends TestDefinitions { + MyTestDefinitions() { + super(DbpmDisabledPartitioningEnabledNullDefaultPartitionTest.this, myPartitionSelectorInterceptor, true, false); + } + } + + +} diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningEnabledTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningEnabledTest.java new file mode 100644 index 000000000000..70585212356b --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/DbpmDisabledPartitioningEnabledTest.java @@ -0,0 +1,31 @@ +package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Nested; + +/** + * This is a test verifying that we emit the right SQL when running in + * legacy partition mode - Partition IDs are in use, but they aren't + * included in primary keys or joins. + */ +public class DbpmDisabledPartitioningEnabledTest extends BaseDbpmJpaR5Test { + + @Override + @BeforeEach + public void before() throws Exception { + super.before(); + myPartitionSettings.setPartitioningEnabled(true); + myPartitionSettings.setDefaultPartitionId(0); + + registerPartitionInterceptorAndCreatePartitions(); + } + + @Nested + public class MyTestDefinitions extends TestDefinitions { + MyTestDefinitions() { + super(DbpmDisabledPartitioningEnabledTest.this, myPartitionSelectorInterceptor, true, false); + } + } + + +} diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/TestDefinitions.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/TestDefinitions.java new file mode 100644 index 000000000000..5eb53eeb62b0 --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/dbpartitionmode/TestDefinitions.java @@ -0,0 +1,1882 @@ +package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode; + +import ca.uhn.fhir.batch2.api.IJobDataSink; +import ca.uhn.fhir.batch2.api.VoidModel; +import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeStep; +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.support.ValueSetExpansionOptions; +import ca.uhn.fhir.interceptor.executor.InterceptorService; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoObservation; +import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoPatient; +import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; +import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; +import ca.uhn.fhir.jpa.dao.TestDaoSearch; +import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao; +import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; +import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao; +import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; +import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.model.dao.JpaPid; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity; +import ca.uhn.fhir.jpa.model.entity.ResourceLink; +import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; +import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; +import ca.uhn.fhir.jpa.term.api.ITermReadSvc; +import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet; +import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener; +import ca.uhn.fhir.jpa.util.MemoryCacheService; +import ca.uhn.fhir.jpa.util.SqlQuery; +import ca.uhn.fhir.jpa.util.TestPartitionSelectorInterceptor; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.api.SortSpec; +import ca.uhn.fhir.rest.api.server.IBundleProvider; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.fhir.rest.param.HasParam; +import ca.uhn.fhir.rest.param.HistorySearchDateRangeParam; +import ca.uhn.fhir.rest.param.ReferenceParam; +import ca.uhn.fhir.rest.param.TokenOrListParam; +import ca.uhn.fhir.rest.param.TokenParam; +import ca.uhn.fhir.rest.param.TokenParamModifier; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.test.utilities.ITestDataBuilder; +import jakarta.annotation.Nonnull; +import net.sf.jsqlparser.JSQLParserException; +import net.sf.jsqlparser.parser.CCJSqlParserUtil; +import net.sf.jsqlparser.statement.insert.Insert; +import org.assertj.core.api.Assertions; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r5.model.Bundle; +import org.hl7.fhir.r5.model.CodeSystem; +import org.hl7.fhir.r5.model.DateTimeType; +import org.hl7.fhir.r5.model.Encounter; +import org.hl7.fhir.r5.model.Enumerations; +import org.hl7.fhir.r5.model.IdType; +import org.hl7.fhir.r5.model.Meta; +import org.hl7.fhir.r5.model.Observation; +import org.hl7.fhir.r5.model.Organization; +import org.hl7.fhir.r5.model.Patient; +import org.hl7.fhir.r5.model.Questionnaire; +import org.hl7.fhir.r5.model.QuestionnaireResponse; +import org.hl7.fhir.r5.model.Reference; +import org.hl7.fhir.r5.model.ValueSet; +import org.intellij.lang.annotations.Language; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; +import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.ValueSource; +import org.mockito.Mock; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.stream.Collectors; + +import static ca.uhn.fhir.jpa.dao.r5.dbpartitionmode.DbpmDisabledPartitioningEnabledTest.PARTITION_1; +import static ca.uhn.fhir.jpa.dao.r5.dbpartitionmode.DbpmDisabledPartitioningEnabledTest.PARTITION_2; +import static ca.uhn.fhir.rest.api.Constants.PARAM_HAS; +import static ca.uhn.fhir.rest.api.Constants.PARAM_SOURCE; +import static ca.uhn.fhir.rest.api.Constants.PARAM_TAG; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; +import static org.hl7.fhir.instance.model.api.IAnyResource.SP_RES_ID; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +/** + * This class is a set of test that are run as {@literal @Nested} by several + * test classes. It verifies that we emit appropriate SQL for various + * scenarios including non-partitioned mode, partitioned mode, and + * database partitioning mode. + */ +abstract class TestDefinitions implements ITestDataBuilder { + + private final TestPartitionSelectorInterceptor myPartitionSelectorInterceptor; + private final boolean myIncludePartitionIdsInSql; + private final BaseDbpmJpaR5Test myParentTest; + private final boolean myIncludePartitionIdsInPks; + @Autowired + protected ITermCodeSystemStorageSvc myTermCodeSystemStorageSvc; + @Autowired + protected ITermDeferredStorageSvc myTerminologyDeferredStorageSvc; + @Autowired + protected ITermReadSvc myTermSvc; + @Autowired + private TestDaoSearch myTestDaoSearch; + @Autowired + private InterceptorService myInterceptorService; + @Autowired + protected CircularQueueCaptureQueriesListener myCaptureQueriesListener; + @Autowired + private IFhirResourceDaoPatient myPatientDao; + @Autowired + private IFhirResourceDaoObservation myObservationDao; + @Autowired + private IFhirResourceDao myCodeSystemDao; + @Autowired + private IFhirResourceDao myValueSetDao; + @Autowired + private IFhirResourceDao myEncounterDao; + @Autowired + private IFhirResourceDao myOrganizationDao; + @Autowired + private IFhirResourceDao myQuestionnaireDao; + @Autowired + private IFhirResourceDao myQuestionnaireResponseDao; + @Autowired + private IFhirSystemDao mySystemDao; + @Autowired + private IResourceTableDao myResourceTableDao; + @Autowired + private IResourceHistoryTableDao myResourceHistoryTableDao; + @Autowired + private IResourceHistoryProvenanceDao myResourceHistoryProvenanceTableDao; + @Autowired + private IResourceLinkDao myResourceLinkDao; + @Autowired + private FhirContext myFhirCtx; + @Autowired + private DaoRegistry myDaoRegistry; + @Autowired + private PartitionSettings myPartitionSettings; + @Autowired + private MemoryCacheService myMemoryCache; + @Autowired + private JpaStorageSettings myStorageSettings; + @Autowired + private DeleteExpungeStep myDeleteExpungeStep; + + @Mock + private IJobDataSink myVoidSink; + @Autowired + private ExpungeEverythingService myExpungeEverythingService; + + public TestDefinitions(@Nonnull BaseDbpmJpaR5Test theParentTest, @Nonnull TestPartitionSelectorInterceptor thePartitionSelectorInterceptor, boolean theIncludePartitionIdsInSql, boolean theIncludePartitionIdsInPks) { + myParentTest = theParentTest; + myPartitionSelectorInterceptor = thePartitionSelectorInterceptor; + myIncludePartitionIdsInSql = theIncludePartitionIdsInSql; + myIncludePartitionIdsInPks = theIncludePartitionIdsInPks; + assert myIncludePartitionIdsInSql && myIncludePartitionIdsInPks || myIncludePartitionIdsInSql || !myIncludePartitionIdsInPks; + } + + @AfterEach + public void after() { + { + JpaStorageSettings defaults = new JpaStorageSettings(); + myStorageSettings.setTagStorageMode(defaults.getTagStorageMode()); + myStorageSettings.setIndexOnContainedResources(defaults.isIndexOnContainedResources()); + } + { + PartitionSettings defaults = new PartitionSettings(); + myPartitionSettings.setConditionalCreateDuplicateIdentifiersEnabled(defaults.isConditionalCreateDuplicateIdentifiersEnabled()); + } + } + + @Test + public void testCreate_Conditional() throws JSQLParserException { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_2); + createPatient(withActiveTrue()); // Just to pre-fetch the partition details + myPartitionSettings.setConditionalCreateDuplicateIdentifiersEnabled(true); + + // Test + myCaptureQueriesListener.clear(); + Patient patient = new Patient(); + patient.addIdentifier().setSystem("http://foo").setValue("bar"); + DaoMethodOutcome outcome = myPatientDao.create(patient, "Patient?identifier=http://foo|bar", new SystemRequestDetails()); + long id = outcome.getId().getIdPartAsLong(); + + // Verify + assertTrue(outcome.getCreated()); + + // Verify Select Queries + + myCaptureQueriesListener.logSelectQueries(); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).startsWith("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE ((t0.PARTITION_ID = '2') AND (t0.HASH_SYS_AND_VALUE = '-2780914544385068076'))"); + } else { + assertThat(getSelectSql(0)).startsWith("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_SYS_AND_VALUE = '-2780914544385068076')"); + } + assertEquals(1, myCaptureQueriesListener.countSelectQueries()); + + // Verify Insert Queries + + myCaptureQueriesListener.logInsertQueries(); + assertEquals(5, myCaptureQueriesListener.countInsertQueries()); + assertEquals("HFJ_RESOURCE", parseInsertStatementTableName(getInsertSql(0))); + assertEquals("HFJ_RES_VER", parseInsertStatementTableName(getInsertSql(1))); + for (int i = 0; i < 4; i++) { + String insertSql = getInsertSql(i); + Map insertColumns = parseInsertStatementParams(insertSql); + String tableName = parseInsertStatementTableName(getInsertSql(i)); + if (myIncludePartitionIdsInSql) { + assertEquals("'2'", insertColumns.get("PARTITION_ID"), insertSql); + assertEquals("'" + id + "'", insertColumns.get("RES_ID"), insertSql); + } else { + if ("HFJ_RES_SEARCH_URL".equals(tableName)) { + assertEquals("'-1'", insertColumns.get("PARTITION_ID"), insertSql); + } else { + assertEquals("NULL", insertColumns.get("PARTITION_ID"), insertSql); + } + assertEquals("'" + id + "'", insertColumns.get("RES_ID"), insertSql); + } + } + + // Verify no other queries + + assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); + assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); + } + + @ParameterizedTest + @EnumSource(PartitionSettings.CrossPartitionReferenceMode.class) + public void testCreate_ReferenceToResourceInOtherPartition(PartitionSettings.CrossPartitionReferenceMode theAllowReferencesToCrossPartition) { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_2); + myPartitionSettings.setAllowReferencesAcrossPartitions(theAllowReferencesToCrossPartition); + IIdType patientId = createPatient(withActiveTrue()); + + // Test + ourLog.info("Starting to test testCreate_ReferenceToResourceInOtherPartition"); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + try { + IIdType obsId = createObservation(withSubject(patientId)); + if (myIncludePartitionIdsInSql && theAllowReferencesToCrossPartition == PartitionSettings.CrossPartitionReferenceMode.NOT_ALLOWED) { + runInTransaction(()->{ + List resources = myResourceTableDao.findAll(); + String failMessage = "Resources:\n * " + resources.stream().map(ResourceTable::toString).collect(Collectors.joining("\n * ")); + List resourceLinks = myResourceLinkDao.findAll(); + failMessage += "\n\nResource Links:\n * " + resourceLinks.stream().map(ResourceLink::toString).collect(Collectors.joining("\n * ")); + failMessage += "\n\nRegistered Interceptors:\n * " + myInterceptorService.getAllRegisteredInterceptors().stream().map(Object::toString).collect(Collectors.joining("\n * ")); + fail(failMessage); + }); + } else { + assertNotNull(obsId); + } + } catch (InvalidRequestException e) { + if (myIncludePartitionIdsInSql) { + assertEquals(PartitionSettings.CrossPartitionReferenceMode.NOT_ALLOWED, theAllowReferencesToCrossPartition); + assertThat(e.getMessage()).contains("not found, specified in path: Observation.subject"); + } else { + fail(); + } + } + } + + @Test + public void testDelete() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + IIdType orgId = createOrganization(withName("ORG")).toUnqualifiedVersionless(); + IIdType id = createPatient(withActiveTrue(), withFamily("HOMER"), withOrganization(orgId)).toUnqualifiedVersionless(); + long idLong = id.getIdPartAsLong(); + + // Test + myCaptureQueriesListener.clear(); + myPatientDao.delete(id, new SystemRequestDetails()); + + // Verify + + // Verify Select + myCaptureQueriesListener.logSelectQueries(); + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(0)).endsWith(" from HFJ_RESOURCE rt1_0 where (rt1_0.RES_ID,rt1_0.PARTITION_ID) in (('" + idLong + "','1'))"); + } else { + assertThat(getSelectSql(0)).endsWith(" from HFJ_RESOURCE rt1_0 where rt1_0.RES_ID='" + idLong + "'"); + } + assertEquals(4, myCaptureQueriesListener.countSelectQueries()); + + // Verify Insert + myCaptureQueriesListener.logInsertQueries(); + assertThat(getInsertSql(0)).startsWith("insert into HFJ_RES_VER "); + assertEquals(1, myCaptureQueriesListener.countInsertQueries()); + + // Verify Update + myCaptureQueriesListener.logUpdateQueries(); + if (myIncludePartitionIdsInPks) { + assertThat(getUpdateSql(0)).contains("where RES_ID='" + idLong + "' and PARTITION_ID='1' and RES_VER='1'"); + } else { + assertThat(getUpdateSql(0)).contains("where RES_ID='" + idLong + "' and RES_VER='1'"); + } + assertEquals(1, myCaptureQueriesListener.countUpdateQueries()); + + // Verify Delete + myCaptureQueriesListener.logDeleteQueries(); + String deleteWhere; + assertEquals("delete from HFJ_RES_SEARCH_URL where (RES_ID='" + idLong + "')", getDeleteSql(0)); + if (myIncludePartitionIdsInPks) { + deleteWhere = "(RES_ID,PARTITION_ID)=('" + idLong + "','1')"; + } else { + deleteWhere = "RES_ID='" + idLong + "'"; + } + assertEquals("delete from HFJ_SPIDX_STRING where " + deleteWhere, getDeleteSql(1)); + assertEquals("delete from HFJ_SPIDX_TOKEN where " + deleteWhere, getDeleteSql(2)); + if (myIncludePartitionIdsInPks) { + assertEquals("delete from HFJ_RES_LINK where (SRC_RESOURCE_ID,PARTITION_ID)=('" + idLong + "','1')", getDeleteSql(3)); + } else { + assertEquals("delete from HFJ_RES_LINK where SRC_RESOURCE_ID='" + idLong + "'", getDeleteSql(3)); + } + assertEquals(4, myCaptureQueriesListener.countDeleteQueries()); + } + + @Test + public void testHistory_Instance() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + Patient p = new Patient(); + p.addIdentifier().setSystem("http://foo").setValue("1"); + IIdType id = myPatientDao.create(p, newRequest()).getId(); + assertEquals("1", id.getVersionIdPart()); + + p.getIdentifierFirstRep().setValue("2"); + id = myPatientDao.update(p, newRequest()).getId(); + assertEquals("2", id.getVersionIdPart()); + + p.getIdentifierFirstRep().setValue("3"); + id = myPatientDao.update(p, newRequest()).getId(); + assertEquals("3", id.getVersionIdPart()); + id = id.toUnqualifiedVersionless(); + + // Test + myCaptureQueriesListener.clear(); + IBundleProvider outcome; + outcome = myPatientDao.history(id, new HistorySearchDateRangeParam(), newRequest()); + + // Verify + List actualIds = toUnqualifiedIdValues(outcome); + myCaptureQueriesListener.logSelectQueries(); + assertThat(actualIds).asList().containsExactlyInAnyOrder("Patient/" + id.getIdPart() + "/_history/3", "Patient/" + id.getIdPart() + "/_history/2", "Patient/" + id.getIdPart() + "/_history/1"); + + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).endsWith("from HFJ_RESOURCE rt1_0 where rt1_0.PARTITION_ID='1' and rt1_0.RES_ID='" + id.getIdPartAsLong() + "'"); + } else { + assertThat(getSelectSql(0)).endsWith("from HFJ_RESOURCE rt1_0 where rt1_0.RES_ID='" + id.getIdPartAsLong() + "'"); + } + + if (myIncludePartitionIdsInSql) { + assertEquals("select count(rht1_0.PID) from HFJ_RES_VER rht1_0 where rht1_0.PARTITION_ID in ('1') and rht1_0.RES_ID='" + id.getIdPartAsLong() + "'", getSelectSql(1)); + } else { + assertEquals("select count(rht1_0.PID) from HFJ_RES_VER rht1_0 where rht1_0.RES_ID='" + id.getIdPartAsLong() + "'", getSelectSql(1)); + } + + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(2)).contains(" from HFJ_RES_VER rht1_0 where rht1_0.PARTITION_ID in ('1') and rht1_0.RES_ID='" + id.getIdPartAsLong() + "'"); + } else { + assertThat(getSelectSql(2)).contains(" from HFJ_RES_VER rht1_0 where rht1_0.RES_ID='" + id.getIdPartAsLong() + "' "); + } + + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testHistory_Type() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + Patient p = new Patient(); + p.addIdentifier().setSystem("http://foo").setValue("1"); + IIdType id = myPatientDao.create(p, newRequest()).getId(); + assertEquals("1", id.getVersionIdPart()); + + p.getIdentifierFirstRep().setValue("2"); + id = myPatientDao.update(p, newRequest()).getId(); + assertEquals("2", id.getVersionIdPart()); + + p.getIdentifierFirstRep().setValue("3"); + id = myPatientDao.update(p, newRequest()).getId(); + assertEquals("3", id.getVersionIdPart()); + id = id.toUnqualifiedVersionless(); + + // Test + myCaptureQueriesListener.clear(); + IBundleProvider outcome; + outcome = myPatientDao.history(null, null, null, newRequest()); + + // Verify + List actualIds = toUnqualifiedIdValues(outcome); + myCaptureQueriesListener.logSelectQueries(); + assertThat(actualIds).asList().containsExactlyInAnyOrder("Patient/" + id.getIdPart() + "/_history/3", "Patient/" + id.getIdPart() + "/_history/2", "Patient/" + id.getIdPart() + "/_history/1"); + + if (myIncludePartitionIdsInSql) { + assertEquals("select count(rht1_0.PID) from HFJ_RES_VER rht1_0 where rht1_0.PARTITION_ID in ('1') and rht1_0.RES_TYPE='Patient'", getSelectSql(0)); + } else { + assertEquals("select count(rht1_0.PID) from HFJ_RES_VER rht1_0 where rht1_0.RES_TYPE='Patient'", getSelectSql(0)); + } + + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(1)).contains(" from HFJ_RES_VER rht1_0 where rht1_0.PARTITION_ID in ('1') and rht1_0.RES_TYPE='Patient' "); + } else { + assertThat(getSelectSql(1)).contains(" from HFJ_RES_VER rht1_0 where rht1_0.RES_TYPE='Patient' "); + } + + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testHistory_Server() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + Patient p = new Patient(); + p.addIdentifier().setSystem("http://foo").setValue("1"); + IIdType id = myPatientDao.create(p, newRequest()).getId(); + assertEquals("1", id.getVersionIdPart()); + + p.getIdentifierFirstRep().setValue("2"); + id = myPatientDao.update(p, newRequest()).getId(); + assertEquals("2", id.getVersionIdPart()); + + p.getIdentifierFirstRep().setValue("3"); + id = myPatientDao.update(p, newRequest()).getId(); + assertEquals("3", id.getVersionIdPart()); + id = id.toUnqualifiedVersionless(); + + // Test + myCaptureQueriesListener.clear(); + IBundleProvider outcome; + outcome = mySystemDao.history(null, null, null, newRequest()); + + // Verify + List actualIds = toUnqualifiedIdValues(outcome); + myCaptureQueriesListener.logSelectQueries(); + assertThat(actualIds).asList().containsExactlyInAnyOrder("Patient/" + id.getIdPart() + "/_history/3", "Patient/" + id.getIdPart() + "/_history/2", "Patient/" + id.getIdPart() + "/_history/1"); + + if (myIncludePartitionIdsInSql) { + assertEquals("select count(rht1_0.PID) from HFJ_RES_VER rht1_0 where rht1_0.PARTITION_ID in ('1')", getSelectSql(0)); + } else { + assertEquals("select count(rht1_0.PID) from HFJ_RES_VER rht1_0", getSelectSql(0)); + } + + assertThat(getSelectSql(1)).contains(" from HFJ_RES_VER rht1_0 "); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(1)).contains(" where rht1_0.PARTITION_ID in ('1') "); + } else { + assertThat(getSelectSql(1)).doesNotContain(" where "); + } + + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testOperation_ExpungeEverything() { + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + createPatient(withActiveTrue()); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_2); + createPatient(withFamily("SIMPSON"), withBirthdate("2024-01-01")); + + // Test + myCaptureQueriesListener.clear(); + myExpungeEverythingService.expungeEverything(new SystemRequestDetails()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + String sql; + + // Select HFJ_SPIDX_TOKEN + List selectTokenQueries = getSqlSelectQueriesWithString(" HFJ_SPIDX_TOKEN "); + if (myIncludePartitionIdsInPks) { + sql = "select rispt1_0.SP_ID,rispt1_0.PARTITION_ID from HFJ_SPIDX_TOKEN rispt1_0 fetch first '800' rows only"; + } else { + sql = "select rispt1_0.SP_ID from HFJ_SPIDX_TOKEN rispt1_0 fetch first '800' rows only"; + } + assertThat(selectTokenQueries.get(0).getSql(true, false)).isEqualTo(sql); + assertThat(selectTokenQueries.get(1).getSql(true, false)).isEqualTo(sql); + assertEquals(2, selectTokenQueries.size()); + + // Delete HFJ_SPIDX_TOKEN + List deleteTokenQueries = getSqlDeleteQueriesWithString(" HFJ_SPIDX_TOKEN "); + if (myIncludePartitionIdsInPks) { + assertThat(deleteTokenQueries.get(0).getSql(true, false)).startsWith("delete from HFJ_SPIDX_TOKEN where (SP_ID,PARTITION_ID) in "); + } else { + assertThat(deleteTokenQueries.get(0).getSql(true, false)).startsWith("delete from HFJ_SPIDX_TOKEN where SP_ID in "); + } + assertEquals(1, deleteTokenQueries.size(), ()-> "SQL:\n * " + deleteTokenQueries.stream().map(t->t.getSql(true, false)).collect(Collectors.joining("\n * "))); + + // Select HFJ_RES_VER + List selectResVerQueries = getSqlSelectQueriesWithString(" HFJ_RES_VER "); + if (myIncludePartitionIdsInPks) { + sql = "select rht1_0.PARTITION_ID,rht1_0.PID from HFJ_RES_VER rht1_0 fetch first '400' rows only"; + } else { + sql = "select rht1_0.PID from HFJ_RES_VER rht1_0 fetch first '800' rows only"; + } + assertThat(selectResVerQueries.get(0).getSql(true, false)).isEqualTo(sql); + assertThat(selectResVerQueries.get(1).getSql(true, false)).isEqualTo(sql); + assertEquals(2, selectResVerQueries.size()); + + // Select HFJ_RES_VER + List deleteResVerQueries = getSqlDeleteQueriesWithString(" HFJ_RES_VER "); + if (myIncludePartitionIdsInPks) { + assertThat(deleteResVerQueries.get(0).getSql(true, false)).startsWith("delete from HFJ_RES_VER where (PARTITION_ID,PID) in "); + } else { + assertThat(deleteResVerQueries.get(0).getSql(true, false)).startsWith("delete from HFJ_RES_VER where PID in "); + } + assertEquals(1, deleteResVerQueries.size()); + } + + @Test + public void testRead_DefaultPartition() { + // Setup + myPartitionSelectorInterceptor.addNonPartitionableResource("Organization"); + IIdType id = createOrganization(withId("O"), withName("PARENT")); + long pid = findId("Organization", "O").getId(); + + // Test + myCaptureQueriesListener.clear(); + myMemoryCache.invalidateAllCaches(); + Organization actual = myOrganizationDao.read(id, new SystemRequestDetails()); + + // Verify + assertEquals("PARENT", actual.getName()); + myCaptureQueriesListener.logSelectQueries(); + + if (myIncludePartitionIdsInSql) { + if (myPartitionSettings.getDefaultPartitionId() == null) { + assertThat(getSelectSql(0)).endsWith(" where rt1_0.PARTITION_ID is null and (rt1_0.RES_TYPE='Organization' and rt1_0.FHIR_ID='O')"); + } else { + assertThat(getSelectSql(0)).endsWith(" where rt1_0.PARTITION_ID='0' and (rt1_0.RES_TYPE='Organization' and rt1_0.FHIR_ID='O')"); + } + } else { + assertThat(getSelectSql(0)).endsWith(" where (rt1_0.RES_TYPE='Organization' and rt1_0.FHIR_ID='O')"); + } + + if (myIncludePartitionIdsInSql) { + if (myPartitionSettings.getDefaultPartitionId() == null) { + assertThat(getSelectSql(1)).endsWith(" from HFJ_RESOURCE rt1_0 where rt1_0.PARTITION_ID is null and rt1_0.RES_ID='" + pid + "'"); + } else { + assertThat(getSelectSql(1)).endsWith(" from HFJ_RESOURCE rt1_0 where rt1_0.PARTITION_ID='0' and rt1_0.RES_ID='" + pid + "'"); + } + } else { + assertThat(getSelectSql(1)).endsWith(" from HFJ_RESOURCE rt1_0 where rt1_0.RES_ID='" + pid + "'"); + } + + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(2)).endsWith(" from HFJ_RES_VER rht1_0 where (rht1_0.RES_ID,rht1_0.PARTITION_ID)=('" + pid + "','0') and rht1_0.RES_VER='1'"); + } else { + assertThat(getSelectSql(2)).endsWith(" from HFJ_RES_VER rht1_0 where rht1_0.RES_ID='" + pid + "' and rht1_0.RES_VER='1'"); + } + + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testRead_ServerAssignedId() { + // Setup + myCaptureQueriesListener.clear(); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + long id = createPatient(withActiveTrue()).getIdPartAsLong(); + myParentTest.logAllResources(); + myCaptureQueriesListener.logInsertQueries(); + + // Test + myCaptureQueriesListener.clear(); + myPatientDao.read(new IdType("Patient/" + id), newRequest()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).endsWith(" where rt1_0.PARTITION_ID='1' and rt1_0.RES_ID='" + id + "'"); + } else { + assertThat(getSelectSql(0)).endsWith(" where rt1_0.RES_ID='" + id + "'"); + } + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(1)).endsWith("where (rht1_0.RES_ID,rht1_0.PARTITION_ID)=('" + id + "','1') and rht1_0.RES_VER='1'"); + } else { + assertThat(getSelectSql(1)).endsWith(" where rht1_0.RES_ID='" + id + "' and rht1_0.RES_VER='1'"); + } + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testRead_ClientAssignedId() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + createPatient(withId("A"), withActiveTrue()); + + long id = runInTransaction(() -> myResourceTableDao.findByTypeAndFhirId("Patient", "A").orElseThrow().getPersistentId().getId()); + + // Test + myCaptureQueriesListener.clear(); + myPatientDao.read(new IdType("Patient/A"), newRequest()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).endsWith(" from HFJ_RESOURCE rt1_0 where rt1_0.PARTITION_ID='1' and rt1_0.RES_ID='" + id + "'"); + } else { + assertThat(getSelectSql(0)).endsWith(" from HFJ_RESOURCE rt1_0 where rt1_0.RES_ID='" + id + "'"); + } + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(1)).endsWith(" where (rht1_0.RES_ID,rht1_0.PARTITION_ID)=('" + id + "','1') and rht1_0.RES_VER='1'"); + } else { + assertThat(getSelectSql(1)).endsWith(" where rht1_0.RES_ID='" + id + "' and rht1_0.RES_VER='1'"); + } + } + + @Test + public void testSearch_Contained() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + myStorageSettings.setIndexOnContainedResources(true); + Patient p = new Patient(); + p.addName().setFamily("Smith"); + Observation obs = new Observation(); + obs.setSubject(new Reference(p)); + IIdType id = myObservationDao.create(obs, new SystemRequestDetails()).getId().toUnqualifiedVersionless(); + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap map = SearchParameterMap.newSynchronous(); + map.add("subject", new ReferenceParam("name", "Smith")); + IBundleProvider outcome = myObservationDao.search(map, new SystemRequestDetails()); + List results = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + Assertions.assertThat(results).containsExactlyInAnyOrder(id.getValue()); + myCaptureQueriesListener.logSelectQueries(); + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(0)).startsWith("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE ((t0.PARTITION_ID,t0.RES_ID) IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 "); + } else if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).startsWith("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (t0.RES_ID IN (SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 "); + } else { + assertThat(getSelectSql(0)).startsWith("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (t0.RES_ID IN (SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 "); + } + + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).contains("t0.PARTITION_ID = '1'"); + } + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + + } + + @Test + public void testSearch_Chained() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + IIdType patientId = createPatient(withFamily("NAME")).toUnqualifiedVersionless(); + IIdType observationId = createObservation(withSubject(patientId)).toUnqualifiedVersionless(); + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = SearchParameterMap.newSynchronous(Observation.SP_PATIENT, new ReferenceParam("family", "NAME")); + IBundleProvider outcome = myObservationDao.search(params, new SystemRequestDetails()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactly(observationId.getValue()); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).contains("SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 "); + } else { + assertThat(getSelectSql(0)).contains("SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 "); + } + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(0)).contains("INNER JOIN HFJ_SPIDX_STRING t1 ON ((t0.TARGET_RES_PARTITION_ID = t1.PARTITION_ID) AND (t0.TARGET_RESOURCE_ID = t1.RES_ID))"); + } else { + assertThat(getSelectSql(0)).contains("INNER JOIN HFJ_SPIDX_STRING t1 ON (t0.TARGET_RESOURCE_ID = t1.RES_ID)"); + } + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).contains("t1.PARTITION_ID = '1'"); + } + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + + } + + @Test + public void testSearch_Has() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + + IIdType patientId = createPatient(withActiveTrue()).toUnqualifiedVersionless(); + IIdType observationId = createObservation(withSubject(patientId)).toUnqualifiedVersionless(); + myParentTest.logAllResources(); + myParentTest.logAllResourceLinks(); + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = SearchParameterMap.newSynchronous(); + params.add(PARAM_HAS, new HasParam("Observation", "patient", "_id", observationId.getValue())); + IBundleProvider outcome = myPatientDao.search(params, new SystemRequestDetails()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactly(patientId.getValue()); + } + + @ParameterizedTest + @ValueSource(booleans = {false}) // TODO: True will be added in the next PR + public void testSearch_IdParam(boolean theIncludeOtherParameter) { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + + IIdType id0 = createPatient(withActiveTrue()).toUnqualifiedVersionless(); + IIdType id1 = createPatient(withId("A"), withActiveTrue()).toUnqualifiedVersionless(); + + myMemoryCache.invalidateAllCaches(); + myParentTest.preFetchPartitionsIntoCache(); + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + if (theIncludeOtherParameter) { + params.add(Patient.SP_ACTIVE, new TokenParam("true")); + } + params.add(SP_RES_ID, new TokenOrListParam().add(id0.getValue()).add(id1.getValue())); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + assertThat(toUnqualifiedVersionlessIdValues(outcome)).asList().containsExactlyInAnyOrder(id0.getValue(), id1.getValue()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).endsWith(" where rt1_0.PARTITION_ID='1' and (rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='" + id0.getIdPart() + "' or rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A')"); + } else { + assertThat(getSelectSql(0)).endsWith(" where (rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='" + id0.getIdPart() + "' or rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A')"); + } + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(1)).contains(" WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = '1') AND (t0.RES_ID IN "); + } else { + assertThat(getSelectSql(1)).contains(" WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.RES_ID IN "); + } + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + + } + + @Test + public void testSearch_ListParam() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + IIdType patId0 = createPatient(withActiveTrue()).toUnqualifiedVersionless(); + IIdType patId1 = createPatient(withActiveTrue()).toUnqualifiedVersionless(); + IIdType listId = createList(withListItem(patId0), withListItem(patId1)).toUnqualifiedVersionless(); + Long listIdLong = listId.getIdPartAsLong(); + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + params.add(Constants.PARAM_LIST, new TokenParam(listId.getValue())); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + assertThat(toUnqualifiedVersionlessIdValues(outcome)).asList().containsExactlyInAnyOrder(patId0.getValue(), patId1.getValue()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(getSelectSql(0)).contains(" FROM HFJ_RESOURCE t1 "); + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_RES_LINK t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.TARGET_RESOURCE_ID)) "); + assertThat(getSelectSql(0)).endsWith(" WHERE ((t0.SRC_PATH = 'List.entry.item') AND (t0.TARGET_RESOURCE_TYPE = 'Patient') AND ((t0.PARTITION_ID,t0.SRC_RESOURCE_ID) IN (('1','" + listIdLong + "')) ))"); + } else { + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_RES_LINK t0 ON (t1.RES_ID = t0.TARGET_RESOURCE_ID) "); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).endsWith(" WHERE ((t0.PARTITION_ID = '1') AND (t0.SRC_PATH = 'List.entry.item') AND (t0.TARGET_RESOURCE_TYPE = 'Patient') AND (t0.SRC_RESOURCE_ID = '" + listIdLong + "'))"); + } else { + assertThat(getSelectSql(0)).endsWith(" WHERE ((t0.SRC_PATH = 'List.entry.item') AND (t0.TARGET_RESOURCE_TYPE = 'Patient') AND (t0.SRC_RESOURCE_ID = '" + listIdLong + "'))"); + } + } + + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + /** + * Perform a search where the request partition ID includes multiple partitions + */ + @Test + public void testSearch_MultiPartition() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + IIdType id0 = createPatient(withActiveTrue(), withFamily("A")).toUnqualifiedVersionless(); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_2); + IIdType id1 = createPatient(withActiveFalse(), withFamily("B")).toUnqualifiedVersionless(); + + // Test + myPartitionSelectorInterceptor.setNextPartition(RequestPartitionId.fromPartitionIds(PARTITION_1, PARTITION_2)); + myCaptureQueriesListener.clear(); + SearchParameterMap params = SearchParameterMap.newSynchronous() + .setSort(new SortSpec(Patient.SP_FAMILY)); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + assertThat(toUnqualifiedVersionlessIdValues(outcome)).asList().containsExactlyInAnyOrder(id0.getValue(), id1.getValue()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + if (myIncludePartitionIdsInPks) { + assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = '-9208284524139093953')) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID IN ('1','2') )) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST", getSelectSql(0)); + assertThat(getSelectSql(1)).contains(" where (rht1_0.RES_ID,rht1_0.PARTITION_ID) in (('" + id0.getIdPartAsLong() + "','1'),('" + id1.getIdPartAsLong() + "','2'),('-1',NULL),('-1',NULL),('-1',NULL),('-1',NULL),('-1',NULL),('-1',NULL),('-1',NULL),('-1',NULL)) and mrt1_0.RES_VER=rht1_0.RES_VER"); + } else if (myIncludePartitionIdsInSql) { + assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = '-9208284524139093953')) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID IN ('1','2') )) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST", getSelectSql(0)); + assertThat(getSelectSql(1)).contains(" where rht1_0.RES_ID in ('" + id0.getIdPartAsLong() + "','" + id1.getIdPartAsLong() + "','-1','-1','-1','-1','-1','-1','-1','-1') and mrt1_0.RES_VER=rht1_0.RES_VER"); + } else { + assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = '-9208284524139093953')) WHERE ((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST", getSelectSql(0)); + assertThat(getSelectSql(1)).contains(" where rht1_0.RES_ID in ('" + id0.getIdPartAsLong() + "','" + id1.getIdPartAsLong() + "','-1','-1','-1','-1','-1','-1','-1','-1') and mrt1_0.RES_VER=rht1_0.RES_VER"); + } + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testSearch_Source(boolean theAccessMetaSourceInformationFromProvenanceTable) { + // Setup + myStorageSettings.setAccessMetaSourceInformationFromProvenanceTable(theAccessMetaSourceInformationFromProvenanceTable); + myStorageSettings.setStoreMetaSourceInformation(JpaStorageSettings.StoreMetaSourceInformationEnum.SOURCE_URI_AND_REQUEST_ID); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + long idFoo = createPatient(withActiveTrue(), withSource("http://foo")).getIdPartAsLong(); + long idBar = createPatient(withActiveTrue(), withSource("http://bar")).getIdPartAsLong(); + + runInTransaction(()->{ + ResourceTable table = myResourceTableDao.getReferenceById(idFoo); + ResourceHistoryProvenanceEntity prov = new ResourceHistoryProvenanceEntity(); + prov.setResourceTable(table); + prov.setResourceHistoryTable(myResourceHistoryTableDao.findForIdAndVersion(table.getResourceId(), 1)); + prov.setSourceUri("http://foo"); + myResourceHistoryProvenanceTableDao.save(prov); + + table = myResourceTableDao.getReferenceById(idBar); + prov = new ResourceHistoryProvenanceEntity(); + prov.setResourceTable(table); + prov.setResourceHistoryTable(myResourceHistoryTableDao.findForIdAndVersion(table.getResourceId(), 1)); + prov.setSourceUri("http://bar"); + myResourceHistoryProvenanceTableDao.save(prov); + }); + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = SearchParameterMap.newSynchronous(); + params.add(PARAM_SOURCE, new TokenParam("http://foo")); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactly("Patient/" + idFoo); + + if (myIncludePartitionIdsInPks) { + if (theAccessMetaSourceInformationFromProvenanceTable) { + assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER_PROV t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_PID)) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo'))", getSelectSql(0)); + } else { + assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID)) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo'))", getSelectSql(0)); + } + } else if (myIncludePartitionIdsInSql) { + if (theAccessMetaSourceInformationFromProvenanceTable) { + assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER_PROV t1 ON (t0.RES_ID = t1.RES_PID) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo'))", getSelectSql(0)); + } else { + assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER t1 ON (t0.RES_ID = t1.RES_ID) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo'))", getSelectSql(0)); + } + } else { + if (theAccessMetaSourceInformationFromProvenanceTable) { + assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER_PROV t1 ON (t0.RES_ID = t1.RES_PID) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo'))", getSelectSql(0)); + } else { + assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER t1 ON (t0.RES_ID = t1.RES_ID) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo'))", getSelectSql(0)); + } + } + + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testSearch_Tags_Versioned(boolean theNegate) { + // Setup + myStorageSettings.setTagStorageMode(JpaStorageSettings.TagStorageModeEnum.VERSIONED); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + long idBar = createPatient(withActiveTrue(), withTag("http://foo", "bar")).getIdPartAsLong(); + long idBaz = createPatient(withActiveTrue(), withTag("http://foo", "baz")).getIdPartAsLong(); + long id = theNegate ? idBaz : idBar; + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + TokenParam bar = new TokenParam("http://foo", "bar"); + if (theNegate) { + bar.setModifier(TokenParamModifier.NOT); + } + params.add(PARAM_TAG, bar); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactly("Patient/" + id); + + if (theNegate) { + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(0)).contains("((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RES_TAG t0"); + } else { + assertThat(getSelectSql(0)).contains("t0.RES_ID NOT IN (SELECT t0.RES_ID FROM HFJ_RES_TAG t0 "); + } + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_TAG_DEF t1 ON (t0.TAG_ID = t1.TAG_ID) "); + } else { + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_RES_TAG t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID)) INNER"); + } else { + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER"); + } + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) "); + } + + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).contains("PARTITION_ID = '1')"); + } + + // Query 1 is the HFJ_RES_VER fetch + assertThat(getSelectSql(1)).contains(" from HFJ_RES_VER "); + + assertThat(getSelectSql(2)).contains(" from HFJ_HISTORY_TAG rht1_0 "); + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(2)).contains(" where (rht1_0.PARTITION_ID,rht1_0.RES_VER_PID) in (('1',"); + } else { + assertThat(getSelectSql(2)).contains(" where rht1_0.RES_VER_PID in ('"); + } + + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testSearch_Tags_Unversioned() { + // Setup + myStorageSettings.setTagStorageMode(JpaStorageSettings.TagStorageModeEnum.NON_VERSIONED); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + long id = createPatient(withActiveTrue(), withTag("http://foo", "bar")).getIdPartAsLong(); + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + params.add(PARAM_TAG, new TokenParam("http://foo", "bar")); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactly("Patient/" + id); + + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_RES_TAG t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID)) INNER"); + } else { + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER"); + } + assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) "); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).contains("(t1.PARTITION_ID = '1')"); + } + + // Query 1 is the HFJ_RES_VER fetch + assertThat(getSelectSql(1)).contains(" from HFJ_RES_VER "); + + assertThat(getSelectSql(2)).contains(" from HFJ_RES_TAG rt1_0 "); + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(2)).contains(" where (rt1_0.RES_ID,rt1_0.PARTITION_ID) in (('" + id + "','1'))"); + } else { + assertThat(getSelectSql(2)).contains(" where rt1_0.RES_ID in ('" + id + "')"); + } + + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testSearch_Token() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + long id = createPatient(withActiveTrue()).getIdPartAsLong(); + + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + params.add(Patient.SP_ACTIVE, new TokenParam().setValue("true")); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + assertThat(toUnqualifiedVersionlessIdValues(outcome)).asList().containsExactly("Patient/" + id); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).endsWith(" WHERE ((t0.PARTITION_ID = '1') AND (t0.HASH_VALUE = '7943378963388545453'))"); + } else { + assertThat(getSelectSql(0)).endsWith(" WHERE (t0.HASH_VALUE = '7943378963388545453')"); + } + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(1)).endsWith(" where (rht1_0.RES_ID,rht1_0.PARTITION_ID) in (('" + id + "','1')) and mrt1_0.RES_VER=rht1_0.RES_VER"); + } else { + assertThat(getSelectSql(1)).endsWith(" where rht1_0.RES_ID in ('" + id + "') and mrt1_0.RES_VER=rht1_0.RES_VER"); + } + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testSearch_Token_Not() { + // Setup + + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + createObservation(withId("A"), withObservationCode("http://foo", "A")); + createObservation(withId("B"), withObservationCode("http://foo", "B")); + + // Test + // Test + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + params.add(Observation.SP_CODE, new TokenParam("http://foo", "B").setModifier(TokenParamModifier.NOT)); + IBundleProvider outcome = myObservationDao.search(params, newRequest()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(toUnqualifiedVersionlessIdValues(outcome)).asList().containsExactly("Observation/A"); + if (myIncludePartitionIdsInPks) { + assertThat(getSelectSql(0)).contains("((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN"); + } else { + assertThat(getSelectSql(0)).contains("((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN"); + } + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testSearch_Includes_Forward_Star() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + CreatedResourceIds ids = createPatientWithOrganizationReferences(); + + // Test + myParentTest.logAllResources(); + myParentTest.logAllResourceLinks(); + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + params.addInclude(IBaseResource.INCLUDE_ALL.asRecursive()); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + assertThat(values).asList().containsExactlyInAnyOrder("Patient/" + ids.patientPid(), "Organization/" + ids.parentOrgId().getIdPart(), "Organization/" + ids.childOrgId().getIdPart()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + + String sql; + + sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false); + if (myIncludePartitionIdsInSql) { + assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = '1'))"); + } else { + assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE ((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL))"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("where rl1_0.PARTITION_ID='1' and rl1_0.SRC_RESOURCE_ID in ('" + ids.patientPid() + "') fetch"); + } else { + assertThat(sql).contains("where rl1_0.SRC_RESOURCE_ID in ('" + ids.patientPid() + "') fetch "); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(2).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("where rl1_0.PARTITION_ID='0' and rl1_0.SRC_RESOURCE_ID in ('" + ids.childOrgPid() + "') "); + } else { + assertThat(sql).contains("where rl1_0.SRC_RESOURCE_ID in ('" + ids.childOrgPid() + "') fetch "); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(3).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("where rl1_0.PARTITION_ID='0' and rl1_0.SRC_RESOURCE_ID in ('" + ids.parentOrgPid() + "') "); + } else { + assertThat(sql).contains("where rl1_0.SRC_RESOURCE_ID in ('" + ids.parentOrgPid() + "') fetch "); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(4).getSql(true, false); + assertThat(sql).contains("from HFJ_RES_VER rht1_0"); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("join HFJ_RESOURCE mrt1_0 on mrt1_0.RES_ID=rht1_0.RES_ID and mrt1_0.PARTITION_ID=rht1_0.PARTITION_ID where"); + assertThat(sql).contains("where (rht1_0.RES_ID,rht1_0.PARTITION_ID) in"); + } else { + assertThat(sql).contains("join HFJ_RESOURCE mrt1_0 on mrt1_0.RES_ID=rht1_0.RES_ID where"); + assertThat(sql).contains("where rht1_0.RES_ID in"); + } + + assertEquals(5, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testSearch_Includes_Forward_Star_UsingCanonicalUrl() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + CreatedQuestionnaireAndResponseIds ids = createQuestionnaireAndQuestionnaireResponseWithCanonicalUrlLink(); + + // Test + myParentTest.logAllResources(); + myParentTest.logAllResourceLinks(); + myParentTest.logAllUriIndexes(); + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + params.addInclude(IBaseResource.INCLUDE_ALL.asRecursive()); + IBundleProvider outcome = myQuestionnaireResponseDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactlyInAnyOrder(ids.qId().getValue(), ids.qrId().getValue()); + + String sql; + + sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false); + if (myIncludePartitionIdsInSql) { + assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'QuestionnaireResponse') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = '1'))"); + } else { + assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE ((t0.RES_TYPE = 'QuestionnaireResponse') AND (t0.RES_DELETED_AT IS NULL))"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).isEqualTo("select rl1_0.TARGET_RESOURCE_ID,rl1_0.TARGET_RESOURCE_TYPE,rl1_0.TARGET_RESOURCE_URL,rl1_0.TARGET_RES_PARTITION_ID from HFJ_RES_LINK rl1_0 where rl1_0.PARTITION_ID='1' and rl1_0.SRC_RESOURCE_ID in ('" + ids.qrId.getIdPart() + "') fetch first '1000' rows only"); + } else { + assertThat(sql).isEqualTo("select rl1_0.TARGET_RESOURCE_ID,rl1_0.TARGET_RESOURCE_TYPE,rl1_0.TARGET_RESOURCE_URL from HFJ_RES_LINK rl1_0 where rl1_0.SRC_RESOURCE_ID in ('" + ids.qrId().getIdPart() + "') fetch first '1000' rows only"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(2).getSql(true, false); + if (myIncludePartitionIdsInSql) { + assertThat(sql).startsWith("select rispu1_0.PARTITION_ID,rispu1_0.RES_ID from HFJ_SPIDX_URI rispu1_0 where rispu1_0.HASH_IDENTITY in ("); + } else { + assertThat(sql).startsWith("select rispu1_0.RES_ID from HFJ_SPIDX_URI rispu1_0 where rispu1_0.HASH_IDENTITY in ("); + } + + assertEquals(5, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testSearch_Includes_Forward_Specific() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + CreatedResourceIds ids = createPatientWithOrganizationReferences(); + + // Test + myParentTest.logAllResources(); + myParentTest.logAllResourceLinks(); + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.setLoadSynchronous(true); + params.addInclude(Patient.INCLUDE_ORGANIZATION.asRecursive()); + params.addInclude(Organization.INCLUDE_PARTOF.asRecursive()); + IBundleProvider outcome = myPatientDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + assertThat(values).asList().containsExactlyInAnyOrder("Patient/" + ids.patientPid(), "Organization/" + ids.parentOrgId.getIdPart(), "Organization/" + ids.childOrgId.getIdPart()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + + String sql; + + sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false); + if (myIncludePartitionIdsInSql) { + assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = '1'))"); + } else { + assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE ((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL))"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("WHERE r.src_path = 'Organization.partOf' AND r.target_resource_id IS NOT NULL AND r.src_resource_id IN ('" + ids.patientPid + "') AND r.partition_id = '1' AND r.target_resource_type = 'Organization' UNION"); + } else { + assertThat(sql).contains("WHERE r.src_path = 'Organization.partOf' AND r.target_resource_id IS NOT NULL AND r.src_resource_id IN ('" + ids.patientPid + "') AND r.target_resource_type = 'Organization' UNION"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(2).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("WHERE r.src_path = 'Patient.managingOrganization' AND r.target_resource_id IS NOT NULL AND r.src_resource_id IN ('" + ids.patientPid + "') AND r.partition_id = '1' AND r.target_resource_type = 'Organization' UNION"); + } else { + assertThat(sql).contains("WHERE r.src_path = 'Patient.managingOrganization' AND r.target_resource_id IS NOT NULL AND r.src_resource_id IN ('" + ids.patientPid + "') AND r.target_resource_type = 'Organization' UNION"); + } + + // Index 3-6 are just more includes loading + assertThat(myCaptureQueriesListener.getSelectQueries().get(3).getSql(true, false)).contains(" FROM hfj_res_link r "); + assertThat(myCaptureQueriesListener.getSelectQueries().get(4).getSql(true, false)).contains(" FROM hfj_res_link r "); + assertThat(myCaptureQueriesListener.getSelectQueries().get(5).getSql(true, false)).contains(" FROM hfj_res_link r "); + assertThat(myCaptureQueriesListener.getSelectQueries().get(6).getSql(true, false)).contains(" FROM hfj_res_link r "); + + sql = myCaptureQueriesListener.getSelectQueries().get(7).getSql(true, false); + assertThat(sql).contains("from HFJ_RES_VER rht1_0"); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("join HFJ_RESOURCE mrt1_0 on mrt1_0.RES_ID=rht1_0.RES_ID and mrt1_0.PARTITION_ID=rht1_0.PARTITION_ID where"); + assertThat(sql).contains("where (rht1_0.RES_ID,rht1_0.PARTITION_ID) in"); + } else { + assertThat(sql).contains("join HFJ_RESOURCE mrt1_0 on mrt1_0.RES_ID=rht1_0.RES_ID where"); + assertThat(sql).contains("where rht1_0.RES_ID in"); + } + + assertEquals(8, myCaptureQueriesListener.countSelectQueries()); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testSearch_Includes_Forward_Specific_UsingCanonicalUrl(boolean theIncludePartitionInSearchHashes) { + // Setup + myPartitionSettings.setIncludePartitionInSearchHashes(theIncludePartitionInSearchHashes); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + CreatedQuestionnaireAndResponseIds result = createQuestionnaireAndQuestionnaireResponseWithCanonicalUrlLink(); + + // Test + myParentTest.logAllResources(); + myParentTest.logAllResourceLinks(); + myParentTest.logAllUriIndexes(); + myCaptureQueriesListener.clear(); + SearchParameterMap params = SearchParameterMap.newSynchronous(); + params.addInclude(QuestionnaireResponse.INCLUDE_QUESTIONNAIRE); + IBundleProvider outcome = myQuestionnaireResponseDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactlyInAnyOrder(result.qrId().getValue(), result.qId().getValue()); + + String sql; + + sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false); + sql = sql.substring(sql.indexOf("UNION")); + long expectedHash; + if (theIncludePartitionInSearchHashes && myIncludePartitionIdsInSql && myPartitionSettings.getDefaultPartitionId() != null) { + expectedHash = -2559752747310040606L; + } else { + expectedHash = -600769180185160063L; + } + if (myIncludePartitionIdsInPks) { + assertEquals("UNION SELECT rUri.res_id, rUri.partition_id as partition_id FROM hfj_res_link r JOIN hfj_spidx_uri rUri ON (rUri.partition_id IN ('0') AND rUri.hash_identity = '" + expectedHash + "' AND r.target_resource_url = rUri.sp_uri) WHERE r.src_path = 'QuestionnaireResponse.questionnaire' AND r.target_resource_id IS NULL AND r.partition_id = '1' AND r.src_resource_id IN ('" + result.qrId.getIdPart() + "') fetch first '1000' rows only", sql); + } else { + assertEquals("UNION SELECT rUri.res_id FROM hfj_res_link r JOIN hfj_spidx_uri rUri ON (rUri.hash_identity = '" + expectedHash + "' AND r.target_resource_url = rUri.sp_uri) WHERE r.src_path = 'QuestionnaireResponse.questionnaire' AND r.target_resource_id IS NULL AND r.src_resource_id IN ('" + result.qrId().getIdPart() + "') fetch first '1000' rows only", sql); + } + + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + } + + @Test + public void testSearch_Includes_Reverse_Specific_UsingCanonicalUrl() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + CreatedQuestionnaireAndResponseIds result = createQuestionnaireAndQuestionnaireResponseWithCanonicalUrlLink(); + + // Test + myParentTest.logAllResources(); + myParentTest.logAllResourceLinks(); + myParentTest.logAllUriIndexes(); + myCaptureQueriesListener.clear(); + SearchParameterMap params = SearchParameterMap.newSynchronous(); + params.addRevInclude(QuestionnaireResponse.INCLUDE_QUESTIONNAIRE); + IBundleProvider outcome = myQuestionnaireDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactlyInAnyOrder(result.qrId().getValue(), result.qId().getValue()); + + String sql; + + sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false); + sql = sql.substring(sql.indexOf("UNION")); + if (myIncludePartitionIdsInPks) { + assertEquals("UNION SELECT r.src_resource_id, r.partition_id as partition_id FROM hfj_res_link r JOIN hfj_spidx_uri rUri ON (rUri.partition_id IN ('0') AND rUri.hash_identity = '-600769180185160063' AND r.target_resource_url = rUri.sp_uri) WHERE r.src_path = 'QuestionnaireResponse.questionnaire' AND r.target_resource_id IS NULL AND rUri.partition_id = '0' AND rUri.res_id IN ('" + result.qId.getIdPart() + "') fetch first '1000' rows only", sql); + } else { + assertEquals("UNION SELECT r.src_resource_id FROM hfj_res_link r JOIN hfj_spidx_uri rUri ON (rUri.hash_identity = '-600769180185160063' AND r.target_resource_url = rUri.sp_uri) WHERE r.src_path = 'QuestionnaireResponse.questionnaire' AND r.target_resource_id IS NULL AND rUri.res_id IN ('" + result.qId().getIdPart() + "') fetch first '1000' rows only", sql); + } + + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + } + + @Nonnull + private CreatedQuestionnaireAndResponseIds createQuestionnaireAndQuestionnaireResponseWithCanonicalUrlLink() { + Questionnaire q = new Questionnaire(); + q.setUrl("http://foo"); + IIdType qId = myQuestionnaireDao.create(q, newRequest()).getId().toUnqualifiedVersionless(); + + QuestionnaireResponse qr = new QuestionnaireResponse(); + qr.setQuestionnaire("http://foo"); + IIdType qrId = myQuestionnaireResponseDao.create(qr, newRequest()).getId().toUnqualifiedVersionless(); + CreatedQuestionnaireAndResponseIds result = new CreatedQuestionnaireAndResponseIds(qId, qrId); + return result; + } + + private record CreatedQuestionnaireAndResponseIds(IIdType qId, IIdType qrId) { + } + + @Test + public void testSearch_Includes_Reverse_Star() { + // Setup + myPartitionSettings.setAllowReferencesAcrossPartitions(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + myPartitionSelectorInterceptor.addNonPartitionableResource("Organization"); + CreatedResourceIds ids = createPatientWithOrganizationReferences(); + + // Test + myParentTest.logAllResources(); + myParentTest.logAllResourceLinks(); + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.add(SP_RES_ID, new TokenParam("Organization/" + ids.parentOrgPid())); + params.setLoadSynchronous(true); + params.addRevInclude(IBaseResource.INCLUDE_ALL.asRecursive()); + IBundleProvider outcome = myOrganizationDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + assertThat(values).asList().containsExactlyInAnyOrder("Patient/" + ids.patientPid(), "Organization/" + ids.parentOrgId().getIdPart(), "Organization/" + ids.childOrgId().getIdPart()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertEquals(5, myCaptureQueriesListener.countSelectQueries()); + + String sql; + + sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false); + if (myIncludePartitionIdsInSql && myPartitionSettings.getDefaultPartitionId() == null) { + assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID IS NULL) AND (t0.RES_ID = '" + ids.parentOrgPid() + "')))"); + } else if (myIncludePartitionIdsInSql) { + assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = '0') AND (t0.RES_ID = '" + ids.parentOrgPid() + "')))"); + } else { + assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.RES_ID = '" + ids.parentOrgPid() + "'))"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("where rl1_0.TARGET_RES_PARTITION_ID='0' and rl1_0.TARGET_RESOURCE_ID in ('" + ids.parentOrgPid() + "') fetch"); + } else { + assertThat(sql).contains("where rl1_0.TARGET_RESOURCE_ID in ('" + ids.parentOrgPid() + "') fetch"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(2).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("where rl1_0.TARGET_RES_PARTITION_ID='0' and rl1_0.TARGET_RESOURCE_ID in ('" + ids.childOrgPid() + "') "); + } else { + assertThat(sql).contains("where rl1_0.TARGET_RESOURCE_ID in ('" + ids.childOrgPid() + "') "); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(3).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("where rl1_0.TARGET_RES_PARTITION_ID='1' and rl1_0.TARGET_RESOURCE_ID in ('" + ids.patientPid() + "') fetch"); + } else { + assertThat(sql).contains("where rl1_0.TARGET_RESOURCE_ID in ('" + ids.patientPid() + "') fetch"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(4).getSql(true, false); + assertThat(sql).contains("from HFJ_RES_VER rht1_0"); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("join HFJ_RESOURCE mrt1_0 on mrt1_0.RES_ID=rht1_0.RES_ID and mrt1_0.PARTITION_ID=rht1_0.PARTITION_ID where"); + assertThat(sql).contains("where (rht1_0.RES_ID,rht1_0.PARTITION_ID) in"); + } else { + assertThat(sql).contains("join HFJ_RESOURCE mrt1_0 on mrt1_0.RES_ID=rht1_0.RES_ID where"); + assertThat(sql).contains("where rht1_0.RES_ID in"); + } + } + + @Test + public void testSearch_Includes_Reverse_Specific() { + // Setup + myPartitionSettings.setAllowReferencesAcrossPartitions(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED); + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + myPartitionSelectorInterceptor.addNonPartitionableResource("Organization"); + CreatedResourceIds ids = createPatientWithOrganizationReferences(); + + // Test + myParentTest.logAllResources(); + myParentTest.logAllResourceLinks(); + myCaptureQueriesListener.clear(); + SearchParameterMap params = new SearchParameterMap(); + params.add(SP_RES_ID, new TokenParam("Organization/" + ids.parentOrgPid())); + params.setLoadSynchronous(true); + params.addRevInclude(Patient.INCLUDE_ORGANIZATION.asRecursive()); + params.addRevInclude(Organization.INCLUDE_PARTOF.asRecursive()); + IBundleProvider outcome = myOrganizationDao.search(params, newRequest()); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(values).asList().containsExactlyInAnyOrder("Patient/" + ids.patientPid(), "Organization/" + ids.parentOrgId.getIdPart(), "Organization/" + ids.childOrgId.getIdPart()); + + String sql; + + sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false); + if (myIncludePartitionIdsInSql && myPartitionSettings.getDefaultPartitionId() == null) { + assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID IS NULL) AND (t0.RES_ID = '" + ids.parentOrgPid() + "')))"); + } else if (myIncludePartitionIdsInSql) { + assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = '0') AND (t0.RES_ID = '" + ids.parentOrgPid() + "')))"); + } else { + assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.RES_ID = '" + ids.parentOrgPid() + "'))"); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("WHERE r.src_path = 'Organization.partOf' AND r.target_resource_id IN ('" + ids.parentOrgPid() + "') AND r.target_res_partition_id = '0' AND r.target_resource_type = 'Organization' "); + } else { + assertThat(sql).contains("WHERE r.src_path = 'Organization.partOf' AND r.target_resource_id IN ('" + ids.parentOrgPid() + "') AND r.target_resource_type = 'Organization' "); + } + + sql = myCaptureQueriesListener.getSelectQueries().get(2).getSql(true, false); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("WHERE r.src_path = 'Patient.managingOrganization' AND r.target_resource_id IN ('" + ids.parentOrgPid + "') AND r.target_res_partition_id = '0' AND r.target_resource_type = 'Organization' UNION"); + } else { + assertThat(sql).contains("WHERE r.src_path = 'Patient.managingOrganization' AND r.target_resource_id IN ('" + ids.parentOrgPid + "') AND r.target_resource_type = 'Organization' UNION"); + } + + // Index 3-6 are just more includes loading + assertThat(myCaptureQueriesListener.getSelectQueries().get(3).getSql(true, false)).contains(" FROM hfj_res_link r "); + assertThat(myCaptureQueriesListener.getSelectQueries().get(4).getSql(true, false)).contains(" FROM hfj_res_link r "); + assertThat(myCaptureQueriesListener.getSelectQueries().get(5).getSql(true, false)).contains(" FROM hfj_res_link r "); + assertThat(myCaptureQueriesListener.getSelectQueries().get(6).getSql(true, false)).contains(" FROM hfj_res_link r "); + + sql = myCaptureQueriesListener.getSelectQueries().get(7).getSql(true, false); + assertThat(sql).contains("from HFJ_RES_VER rht1_0"); + if (myIncludePartitionIdsInPks) { + assertThat(sql).contains("join HFJ_RESOURCE mrt1_0 on mrt1_0.RES_ID=rht1_0.RES_ID and mrt1_0.PARTITION_ID=rht1_0.PARTITION_ID where"); + assertThat(sql).contains("where (rht1_0.RES_ID,rht1_0.PARTITION_ID) in"); + } else { + assertThat(sql).contains("join HFJ_RESOURCE mrt1_0 on mrt1_0.RES_ID=rht1_0.RES_ID where"); + assertThat(sql).contains("where rht1_0.RES_ID in"); + } + + assertEquals(8, myCaptureQueriesListener.countSelectQueries()); + } + + + /** + * Searching for all partitions or multiple partitions + */ + @ParameterizedTest(name = "[{index}] - {0}") + @MethodSource("searchMultiPartitionTestCases") + public void testSearch_MultiplePartitions(SearchMultiPartitionTestCase theTestCase) { + myPartitionSelectorInterceptor.setNextPartition(theTestCase.requestPartitionId); + String sql = getSqlForRestQuery(theTestCase.restQuery); + + if (myIncludePartitionIdsInPks) { + assertEquals(theTestCase.expectedPartitionedPksSql, sql, theTestCase.comment); + } else if (myIncludePartitionIdsInSql) { + assertEquals(theTestCase.expectedPartitionedSql, sql, theTestCase.comment); + } else { + assertEquals(theTestCase.expectedSql, sql, theTestCase.comment); + } + } + + /** + * Make sure _sort incorporates the partition ID on joins + */ + @ParameterizedTest(name = "[{index}] - {0}") + @MethodSource("searchSortTestCases") + public void testSearch_Sort(SqlGenerationTestCase theTestCase) { + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + String sql = getSqlForRestQuery(theTestCase.restQuery); + + if (myIncludePartitionIdsInPks) { + assertEquals(theTestCase.expectedPartitionedPksSql, sql, theTestCase.comment); + } else if (myIncludePartitionIdsInSql) { + assertEquals(theTestCase.expectedPartitionedSql, sql, theTestCase.comment); + } else { + assertEquals(theTestCase.expectedSql, sql, theTestCase.comment); + } + } + + + @Test + public void testValuesetExpansion_IncludePreExpandedVsWithFilter() { + // Setup + myStorageSettings.setPreExpandValueSets(true); + + CodeSystem cs = new CodeSystem(); + cs.setUrl("http://cs"); + cs.setContent(Enumerations.CodeSystemContentMode.NOTPRESENT); + myCodeSystemDao.create(cs, newRequest()); + + CustomTerminologySet additions = new CustomTerminologySet(); + additions.addRootConcept("A", "HELLO"); + additions.addRootConcept("B", "HELLO"); + additions.addRootConcept("C", "GOODBYE"); + myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd("http://cs", additions); + myTerminologyDeferredStorageSvc.saveAllDeferred(); + + ValueSet valueSet = new ValueSet(); + valueSet.setUrl("http://vs"); + valueSet + .getCompose() + .addInclude().setSystem("http://cs"); + myValueSetDao.create(valueSet, newRequest()); + + myCaptureQueriesListener.clear(); + myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); + + myParentTest.logAllCodeSystemsAndVersionsCodeSystemsAndVersions(); + myParentTest.logAllConcepts(); + myParentTest.logAllValueSetConcepts(); + + // Test + ValueSet input = new ValueSet(); + input.getCompose() + .addInclude() + .addValueSet("http://vs"); + + ValueSetExpansionOptions expansionOptions = new ValueSetExpansionOptions(); + expansionOptions.setFilter("HELLO"); + myCaptureQueriesListener.clear(); + ValueSet outcome = (ValueSet) myTermSvc.expandValueSet(expansionOptions, valueSet); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertThat(outcome.getExpansion().getContains().stream().map(ValueSet.ValueSetExpansionContainsComponent::getCode).toList()).asList().containsExactly("A", "B"); + } + + + @Test + public void testUpdateAsCreate() { + // Setup + myPartitionSelectorInterceptor.setNextPartitionId(PARTITION_1); + createPatient(withId("A"), withActiveTrue()); + + // Test + myCaptureQueriesListener.clear(); + + Observation obs = new Observation(); + obs.setId("Observation/O"); + obs.setSubject(new Reference("Patient/A")); + obs.setEffective(new DateTimeType("2022")); + myObservationDao.update(obs, newRequest()); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + if (myIncludePartitionIdsInSql) { + assertThat(getSelectSql(0)).endsWith(" where rt1_0.PARTITION_ID='1' and (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='O')"); + assertThat(getSelectSql(1)).endsWith(" where rt1_0.PARTITION_ID='1' and (rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A')"); + } else { + assertThat(getSelectSql(0)).endsWith(" where (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='O')"); + assertThat(getSelectSql(1)).endsWith(" where (rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A')"); + } + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + } + + private SystemRequestDetails newRequest() { + return new SystemRequestDetails(); + } + + private JpaPid findId(String theResourceType, String theIdPart) { + return myParentTest.runInTransaction(() -> myResourceTableDao + .findAll() + .stream() + .filter(t -> t.getResourceType().equals(theResourceType)) + .filter(t -> t.getFhirId().equals(theIdPart)) + .findFirst() + .orElseThrow() + .getPersistentId()); + } + + @Language("SQL") + private String getSelectSql(int theIndex) { + return myCaptureQueriesListener.getSelectQueries().get(theIndex).getSql(true, false); + } + + @Language("SQL") + private String getDeleteSql(int theIndex) { + return myCaptureQueriesListener.getDeleteQueries().get(theIndex).getSql(true, false); + } + + @Language("SQL") + private String getUpdateSql(int theIndex) { + return myCaptureQueriesListener.getUpdateQueries().get(theIndex).getSql(true, false); + } + + @Language("SQL") + private String getInsertSql(int theIndex) { + return myCaptureQueriesListener.getInsertQueries().get(theIndex).getSql(true, false); + } + + @SuppressWarnings({"rawtypes", "unchecked"}) + @Override + public IIdType doCreateResource(IBaseResource theResource) { + IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResource.getClass()); + return dao.create(theResource, newRequest()).getId().toUnqualifiedVersionless(); + } + + @SuppressWarnings({"rawtypes", "unchecked"}) + @Override + public IIdType doUpdateResource(IBaseResource theResource) { + IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResource.getClass()); + return dao.update(theResource, newRequest()).getId().toUnqualifiedVersionless(); + } + + @Override + public FhirContext getFhirContext() { + return myFhirCtx; + } + + public void runInTransaction(Runnable theRunnable) { + myParentTest.runInTransaction(theRunnable); + } + public T runInTransaction(Callable theRunnable) { + return myParentTest.runInTransaction(theRunnable); + } + + @Nonnull + private CreatedResourceIds createPatientWithOrganizationReferences() { + IIdType parentOrgId = createOrganization(withName("PARENT")).toUnqualifiedVersionless(); + IIdType childOrgId = createOrganization(withName("CHILD"), withReference("partOf", parentOrgId)).toUnqualifiedVersionless(); + IIdType patientId = createPatient(withActiveTrue(), withOrganization(childOrgId)).toUnqualifiedVersionless(); + long patientPid = patientId.getIdPartAsLong(); + long childPid = childOrgId.getIdPartAsLong(); + long parentPid = parentOrgId.getIdPartAsLong(); + CreatedResourceIds result = new CreatedResourceIds(parentOrgId, childOrgId, patientId, null, patientPid, childPid, parentPid, null); + return result; + } + + @Nonnull + private CreatedResourceIds createPatientWithOrganizationAndEncounterReferences() { + CreatedResourceIds createdResourceIds = createPatientWithOrganizationReferences(); + + Encounter encounter = new Encounter(); + encounter.setSubject(new Reference(createdResourceIds.patientId)); + IIdType encounterId = myEncounterDao.create(encounter).getId().toUnqualifiedVersionless(); + Long encounterPid = encounterId.getIdPartAsLong(); + + return new CreatedResourceIds( + createdResourceIds.parentOrgId, + createdResourceIds.childOrgId, + createdResourceIds.patientId, + encounterId, + createdResourceIds.patientPid, + createdResourceIds.childOrgPid, + createdResourceIds.parentOrgPid, + encounterPid + ); + } + + @Nonnull + private List getSqlSelectQueriesWithString(String tableName) { + List selectTokenQueries = myCaptureQueriesListener.getSelectQueries() + .stream() + .filter(t -> t.getSql(false, false).contains(tableName)) + .toList(); + return selectTokenQueries; + } + + @Nonnull + private List getSqlDeleteQueriesWithString(String tableName) { + List selectTokenQueries = myCaptureQueriesListener.getDeleteQueries() + .stream() + .filter(t -> t.getSql(false, false).contains(tableName)) + .toList(); + return selectTokenQueries; + } + + private static Map parseInsertStatementParams(String theInsertSql) throws JSQLParserException { + Insert parsedStatement = (Insert) CCJSqlParserUtil.parse(theInsertSql); + + Map retVal = new HashMap<>(); + + for (int i = 0; i < parsedStatement.getColumns().size(); i++) { + String columnName = parsedStatement.getColumns().get(i).getColumnName(); + String columnValue = parsedStatement.getValues().getExpressions().get(i).toString(); + retVal.put(columnName, columnValue); + } + + return retVal; + } + + private static String parseInsertStatementTableName(String theInsertSql) throws JSQLParserException { + Insert parsedStatement = (Insert) CCJSqlParserUtil.parse(theInsertSql); + return parsedStatement.getTable().getName(); + } + + private static List toUnqualifiedVersionlessIdValues(IBundleProvider theFound) { + int fromIndex = 0; + Integer toIndex = theFound.size(); + return toUnqualifiedVersionlessIdValues(theFound, fromIndex, toIndex, true); + } + + private static List toUnqualifiedIdValues(IBundleProvider theFound) { + return toIdValues(theFound, false); + } + + private static List toUnqualifiedVersionlessIdValues(IBundleProvider theFound, int theFromIndex, Integer theToIndex, boolean theFirstCall) { + return toIdValues(theFound, true); + } + + @Nonnull + private static List toIdValues(IBundleProvider theFound, boolean theVersionless) { + List retVal = new ArrayList<>(); + + IBundleProvider bundleProvider; + bundleProvider = theFound; + + List resources = bundleProvider.getResources(0, 99999); + for (IBaseResource next : resources) { + IIdType id = next.getIdElement(); + if (theVersionless) { + id = id.toUnqualifiedVersionless(); + } else { + id = id.toUnqualified(); + } + retVal.add(id.getValue()); + } + return retVal; + } + + private record CreatedResourceIds(IIdType parentOrgId, IIdType childOrgId, IIdType patientId, IIdType encounterId, + Long patientPid, Long childOrgPid, Long parentOrgPid, Long encounterPid) { + + public Set allIdValues() { + Set retVal = new HashSet<>(); + addIfNotNull(retVal, parentOrgId); + addIfNotNull(retVal, childOrgId); + addIfNotNull(retVal, patientId); + addIfNotNull(retVal, encounterId); + return retVal; + } + + private static void addIfNotNull(Set theList, IIdType theObject) { + if (theObject != null) { + theList.add(theObject.getValue()); + } + } + } + + public record SearchMultiPartitionTestCase(String comment, RequestPartitionId requestPartitionId, String restQuery, String expectedSql, String expectedPartitionedSql, String expectedPartitionedPksSql) { + @Override + public String toString() { + return comment; + } + + public static void add(List theTarget, RequestPartitionId theRequestPartitionId, String theName, String theRestQuery, String theExpectedSql, String theExpectedPartitionedSql, String theExpectedPartitionedPksSql) { + theTarget.add(new SearchMultiPartitionTestCase( + theName, + theRequestPartitionId, + theRestQuery, + theExpectedSql, + theExpectedPartitionedSql, + theExpectedPartitionedPksSql)); + } + } + + static List searchMultiPartitionTestCases() { + List retVal = new ArrayList<>(); + + SearchMultiPartitionTestCase.add( + retVal, + RequestPartitionId.allPartitions(), + "single string - no hfj_resource root - all partitions", + "Patient?name=FOO", + "SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))" + ); + SearchMultiPartitionTestCase.add( + retVal, + RequestPartitionId.fromPartitionIds(PARTITION_1, PARTITION_2), + "single string - no hfj_resource root - multiple partitions", + "Patient?name=FOO", + "SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.PARTITION_ID IN (?,?) ) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)))", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.PARTITION_ID IN (?,?) ) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)))" + ); + + SearchMultiPartitionTestCase.add( + retVal, + RequestPartitionId.allPartitions(), + "two regular params - should use hfj_resource as root - all partitions", + "Patient?name=smith&active=true", + "SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?))", + "SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?))", + "SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) INNER JOIN HFJ_SPIDX_TOKEN t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID)) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?))" + ); + SearchMultiPartitionTestCase.add( + retVal, + RequestPartitionId.fromPartitionIds(PARTITION_1, PARTITION_2), + "two regular params - should use hfj_resource as root - multiple partitions", + "Patient?name=smith&active=true", + "SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?))", + "SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.PARTITION_ID IN (?,?) ) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) AND ((t2.PARTITION_ID IN (?,?) ) AND (t2.HASH_VALUE = ?)))", + "SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) INNER JOIN HFJ_SPIDX_TOKEN t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID)) WHERE (((t0.PARTITION_ID IN (?,?) ) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) AND ((t2.PARTITION_ID IN (?,?) ) AND (t2.HASH_VALUE = ?)))" + ); + + SearchMultiPartitionTestCase.add( + retVal, + RequestPartitionId.allPartitions(), + "token not as a NOT IN subselect - all partitions", + "Encounter?class:not=not-there", + "SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))" + ); + SearchMultiPartitionTestCase.add( + retVal, + RequestPartitionId.fromPartitionIds(PARTITION_1, PARTITION_2), + "token not as a NOT IN subselect - multiple partitions", + "Encounter?class:not=not-there", + "SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID IN (?,?) ) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID IN (?,?) ) AND ((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))" + ); + + SearchMultiPartitionTestCase.add( + retVal, + RequestPartitionId.allPartitions(), + "token not on chain join - NOT IN from hfj_res_link target columns - all partitions", + "Observation?encounter.class:not=not-there", + "SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))", + "SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))", + "SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RES_PARTITION_ID,t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))" + ); + SearchMultiPartitionTestCase.add( + retVal, + RequestPartitionId.fromPartitionIds(PARTITION_1, PARTITION_2), + "token not on chain join - NOT IN from hfj_res_link target columns - multiple partitions", + "Observation?encounter.class:not=not-there", + "SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))", + "SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.PARTITION_ID IN (?,?) ) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))", + "SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.PARTITION_ID IN (?,?) ) AND ((t0.TARGET_RES_PARTITION_ID,t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))" + ); + + return retVal; + } + + public record SqlGenerationTestCase(String comment, String restQuery, String expectedSql, String expectedPartitionedSql, String expectedPartitionedPksSql) { + @Override + public String toString() { + return comment; + } + } + + static List searchSortTestCases() { + return List.of( + new SqlGenerationTestCase( + "bare sort", + "Patient?_sort=name", + "SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST" + ) + , new SqlGenerationTestCase( + "sort with predicate", + "Patient?active=true&_sort=name", + "SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (t0.HASH_VALUE = ?) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST", + "SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.PARTITION_ID = ?) AND (t0.HASH_VALUE = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST", + "SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.PARTITION_ID = ?) AND (t0.HASH_VALUE = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST" + ) + , new SqlGenerationTestCase( + "chained sort", + "Patient?_sort=Practitioner:general-practitioner.name", + "SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST", + "SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RES_PARTITION_ID = t2.PARTITION_ID) AND (t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST" + ) + ); + } + + private String getSqlForRestQuery(String theFhirRestQuery) { + myCaptureQueriesListener.clear(); + myTestDaoSearch.searchForIds(theFhirRestQuery); + myCaptureQueriesListener.logSelectQueries(); + assertEquals(1, myCaptureQueriesListener.countSelectQueries()); + return myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false); + } + +} + + diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetTest.java index 96cb87762055..5ac282f59a61 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetTest.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetTest.java @@ -1253,7 +1253,8 @@ public void testValidateCodeAgainstBuiltInSystem() { assertEquals(false, ((BooleanType) respParam.getParameter().get(0).getValue()).getValue()); assertEquals("message", respParam.getParameter().get(1).getName()); - assertEquals("Unknown code 'http://hl7.org/fhir/administrative-gender#male' for in-memory expansion of ValueSet 'http://hl7.org/fhir/ValueSet/marital-status'", ((StringType) respParam.getParameter().get(1).getValue()).getValue()); + String message = ((StringType) respParam.getParameter().get(1).getValue()).getValue(); + assertThat(message).contains("Unknown code 'http://hl7.org/fhir/administrative-gender#male' for in-memory expansion of ValueSet 'http://hl7.org/fhir/ValueSet/marital-status'", message); } } diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ServerR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ServerR5Test.java index eae8946a08e9..f46858a89338 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ServerR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ServerR5Test.java @@ -1,31 +1,45 @@ package ca.uhn.fhir.jpa.provider.r5; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.jpa.util.SqlQuery; import ca.uhn.fhir.rest.api.EncodingEnum; +import ca.uhn.fhir.rest.api.server.IBundleProvider; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.util.ExtensionConstants; import org.apache.commons.io.IOUtils; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; +import org.hl7.fhir.r5.model.Bundle; import org.hl7.fhir.r5.model.CapabilityStatement; import org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestResourceComponent; import org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestResourceSearchParamComponent; +import org.hl7.fhir.r5.model.CodeableConcept; +import org.hl7.fhir.r5.model.Coding; +import org.hl7.fhir.r5.model.DateTimeType; +import org.hl7.fhir.r5.model.Enumerations; import org.hl7.fhir.r5.model.Extension; +import org.hl7.fhir.r5.model.Identifier; +import org.hl7.fhir.r5.model.Observation; import org.hl7.fhir.r5.model.Patient; +import org.hl7.fhir.r5.model.Quantity; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.HashSet; +import java.util.List; import java.util.Set; -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; @@ -145,6 +159,99 @@ public void testMetadataIncludesResourceCounts() { } + @Test + public void updateOrCreate_batchesSearchUrlDelete() { + int numOfObs = 10; + int txSize = 5; + List deleteQueries; + myCaptureQueriesListener.clear(); + + String prefix = "p1"; + + // initial insert + doUpdateOrCreate(prefix, numOfObs, txSize); + + // verify we have created the observations (sanity check) + @SuppressWarnings("unchecked") + IFhirResourceDao obsDao = myDaoRegistry.getResourceDao("Observation"); + IBundleProvider result = obsDao.search(new SearchParameterMap().setLoadSynchronous(true), new SystemRequestDetails()); + assertFalse(result.isEmpty()); + + // creates create the initial search urls, so we expect no deletes + deleteQueries = myCaptureQueriesListener.getDeleteQueries(); + assertTrue(deleteQueries.isEmpty()); + myCaptureQueriesListener.clear(); + + // update + doUpdateOrCreate(prefix, numOfObs, txSize); + + // the searchURLs should be deleted now, so we expect some deletes + // specifically, as many deletes as there were "transaction bundles" to process + deleteQueries = myCaptureQueriesListener.getDeleteQueries(); + assertFalse(deleteQueries.isEmpty()); + assertEquals(numOfObs / txSize, deleteQueries.size()); + myCaptureQueriesListener.clear(); + } + + private void doUpdateOrCreate(String prefix, int numOfObs, int txSize) { + for (int i = 0; i < numOfObs / txSize; i++) { + Bundle bundle = new Bundle(); + bundle.setType(Bundle.BundleType.TRANSACTION); + List bundleEntryComponents = createObservations(prefix, 10000 + i * txSize, txSize); + bundle.setEntry(bundleEntryComponents); + mySystemDao.transaction(new SystemRequestDetails(), + bundle); + } + } + + private List createObservations(String prefix, int observationId, int num) { + List list = new ArrayList<>(); + for (int i = 0; i < num; i++) { + Bundle.BundleEntryComponent bundleEntryComponent = new Bundle.BundleEntryComponent(); + Observation obs = new Observation(); + List identifierList = new ArrayList<>(); + identifierList.add(new Identifier().setValue(prefix + observationId + i)); + obs.setIdentifier(identifierList); + obs.setStatus(Enumerations.ObservationStatus.FINAL); + Coding code = new Coding("http://loinc.org", "85354-9", "Blood pressure"); + obs.setCode(new CodeableConcept().addCoding(code)); + obs.setEffective(createDateTime("2020-01-01T12:00:00-05:00")); + Coding code1 = new Coding("http://loinc.org", "8480-6", "Systolic blood pressure"); + List obsccList = new ArrayList<>(); + Observation.ObservationComponentComponent obsvC = new Observation.ObservationComponentComponent(); + CodeableConcept cc = new CodeableConcept().addCoding(code1); + obsvC.setValue(cc); + Quantity quantity = new Quantity(); + quantity.setUnit("mmHg"); + quantity.setValue(170); + quantity.setSystem("http://unitsofmeasure.org"); + quantity.setCode("mm[Hg]"); + obsvC.setValue(quantity); + obsccList.add(obsvC); + Observation.ObservationComponentComponent obsvC1 = new Observation.ObservationComponentComponent(); + CodeableConcept cc1 = new CodeableConcept(new Coding("http://loinc.org", "8462-4", "Diastolic blood pressure")); + Quantity quantity1 = new Quantity(); + quantity1.setValue(110); + quantity1.setUnit("mmHg"); + quantity1.setSystem("http://unitsofmeasure.org"); + quantity1.setCode("mm[Hg]"); + obsvC1.setCode(cc1); + obsvC1.setValue(quantity1); + obsccList.add(obsvC1); + bundleEntryComponent.setResource(obs); + Bundle.BundleEntryRequestComponent bundleEntryRequestComponent = new Bundle.BundleEntryRequestComponent(); + bundleEntryRequestComponent.setMethod(Bundle.HTTPVerb.PUT); + bundleEntryRequestComponent.setUrl("Observation?identifier=" + prefix + observationId + i); + bundleEntryComponent.setRequest(bundleEntryRequestComponent); + list.add(bundleEntryComponent); + + } + return list; + } + + private DateTimeType createDateTime(String theDateString) { + return new DateTimeType(theDateString); + } } diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml index c942a7a4ab97..6e1aaf490978 100644 --- a/hapi-fhir-jpaserver-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/CompositeSearchParameterTestCases.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/CompositeSearchParameterTestCases.java index 8d7b696c4927..9ee298bf90e1 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/CompositeSearchParameterTestCases.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/CompositeSearchParameterTestCases.java @@ -223,7 +223,7 @@ void testComboSearch_withTokenAndNumber_returnsMatchingResources(Extension theEx searchParameter.addComponent( componentFrom("http://hl7.org/fhir/SearchParameter/RiskAssessment-probability", "RiskAssessment")); searchParameter.setExtension(List.of(theExtension)); - doCreateResource(searchParameter); + ourLog.info("Created SP: {}", doCreateResource(searchParameter).toUnqualifiedVersionless()); // enable this sp. myTestDaoSearch.getSearchParamRegistry().forceRefresh(); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaDstu3Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaDstu3Test.java index bc7d9225eba6..dd9825de053f 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaDstu3Test.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaDstu3Test.java @@ -51,9 +51,7 @@ import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc; -import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl; import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl; -import ca.uhn.fhir.jpa.term.TermReadSvcImpl; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvc; @@ -365,10 +363,6 @@ public void afterResetInterceptors() { @AfterEach public void afterClearTerminologyCaches() { - TermReadSvcImpl baseHapiTerminologySvc = AopTestUtils.getTargetObject(myTermSvc); - baseHapiTerminologySvc.clearCaches(); - TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationCache(); - TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationWithReverseCache(); TermDeferredStorageSvcImpl deferredSvc = AopTestUtils.getTargetObject(myTerminologyDeferredStorageSvc); deferredSvc.clearDeferred(); } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java index b29c4a76ea2b..df6fe2233b2c 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java @@ -598,10 +598,6 @@ public void afterResetInterceptors() { @AfterEach public void afterClearTerminologyCaches() { - TermReadSvcImpl baseHapiTerminologySvc = AopTestUtils.getTargetObject(myTermSvc); - baseHapiTerminologySvc.clearCaches(); - TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationCache(); - TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationWithReverseCache(); TermDeferredStorageSvcImpl termDeferredStorageSvc = AopTestUtils.getTargetObject(myTerminologyDeferredStorageSvc); termDeferredStorageSvc.clearDeferred(); @@ -707,7 +703,7 @@ protected PlatformTransactionManager getTxManager() { protected void relocateResourceTextToCompressedColumn(Long theResourcePid, Long theVersion) { runInTransaction(()->{ - ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theResourcePid, theVersion); + ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersion(theResourcePid, theVersion); byte[] contents = GZipUtil.compress(historyEntity.getResourceTextVc()); myResourceHistoryTableDao.updateNonInlinedContents(contents, historyEntity.getId()); }); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java index 8423f9e899a7..410ef62c6482 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java @@ -40,6 +40,7 @@ import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport; import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; +import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboStringUniqueDao; import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboTokensNonUniqueDao; import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamCoordsDao; @@ -49,33 +50,42 @@ import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamTokenDao; import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao; import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao; +import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao; import ca.uhn.fhir.jpa.dao.data.ITermConceptDao; import ca.uhn.fhir.jpa.dao.data.ITermConceptDesignationDao; +import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao; import ca.uhn.fhir.jpa.dao.data.ITermConceptPropertyDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao; +import ca.uhn.fhir.jpa.dao.mdm.MdmLinkDaoJpaImpl; import ca.uhn.fhir.jpa.entity.MdmLink; import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConceptDesignation; +import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink; import ca.uhn.fhir.jpa.entity.TermConceptProperty; import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSetConcept; import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboStringUnique; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboTokenNonUnique; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamCoords; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamNumber; +import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri; import ca.uhn.fhir.jpa.model.entity.ResourceLink; +import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity; import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.jpa.model.entity.ResourceTag; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc; @@ -220,6 +230,8 @@ public abstract class BaseJpaTest extends BaseTest { @Autowired protected ISearchResultCacheSvc mySearchResultCacheSvc; @Autowired + protected PartitionSettings myPartitionSettings; + @Autowired protected ITermCodeSystemDao myTermCodeSystemDao; @Autowired protected ITermCodeSystemVersionDao myTermCodeSystemVersionDao; @@ -256,6 +268,8 @@ public abstract class BaseJpaTest extends BaseTest { @Autowired protected ITermConceptDao myTermConceptDao; @Autowired + protected ITermConceptParentChildLinkDao myTermConceptParentChildLinkDao; + @Autowired protected ITermValueSetConceptDao myTermValueSetConceptDao; @Autowired protected ITermValueSetDao myTermValueSetDao; @@ -277,6 +291,8 @@ public abstract class BaseJpaTest extends BaseTest { @Autowired private IResourceTableDao myResourceTableDao; @Autowired + private IResourceSearchUrlDao myResourceSearchUrlDao; + @Autowired private IResourceTagDao myResourceTagDao; @Autowired private IResourceHistoryTableDao myResourceHistoryTableDao; @@ -285,6 +301,8 @@ public abstract class BaseJpaTest extends BaseTest { @Autowired protected ITermDeferredStorageSvc myTermDeferredStorageSvc; private final List myRegisteredInterceptors = new ArrayList<>(1); + @Autowired + private IResourceHistoryTagDao myResourceHistoryTagDao; @SuppressWarnings("BusyWait") public static void waitForSize(int theTarget, List theList) { @@ -394,9 +412,6 @@ public void afterPerformCleanup() { if (myMemoryCacheService != null) { myMemoryCacheService.invalidateAllCaches(); } - if (myJpaPersistedValidationSupport != null) { - ProxyUtil.getSingletonTarget(myJpaPersistedValidationSupport, JpaPersistedResourceValidationSupport.class).clearCaches(); - } if (myFhirInstanceValidator != null) { myFhirInstanceValidator.invalidateCaches(); } @@ -405,15 +420,22 @@ public void afterPerformCleanup() { } JpaStorageSettings defaultConfig = new JpaStorageSettings(); + myStorageSettings.setAccessMetaSourceInformationFromProvenanceTable(defaultConfig.isAccessMetaSourceInformationFromProvenanceTable()); myStorageSettings.setAdvancedHSearchIndexing(defaultConfig.isAdvancedHSearchIndexing()); myStorageSettings.setAllowContainsSearches(defaultConfig.isAllowContainsSearches()); myStorageSettings.setDeleteEnabled(defaultConfig.isDeleteEnabled()); myStorageSettings.setIncludeHashIdentityForTokenSearches(defaultConfig.isIncludeHashIdentityForTokenSearches()); + myStorageSettings.setMarkResourcesForReindexingUponSearchParameterChange(defaultConfig.isMarkResourcesForReindexingUponSearchParameterChange()); myStorageSettings.setMaximumIncludesToLoadPerPage(defaultConfig.getMaximumIncludesToLoadPerPage()); + myStorageSettings.setPreExpandValueSets(defaultConfig.isPreExpandValueSets()); myStorageSettings.getTreatBaseUrlsAsLocal().clear(); ParserOptions defaultParserOptions = new ParserOptions(); myFhirContext.getParserOptions().setStripVersionsFromReferences(defaultParserOptions.isStripVersionsFromReferences()); + + PartitionSettings defaultPartConfig = new PartitionSettings(); + myPartitionSettings.setIncludePartitionInSearchHashes(defaultPartConfig.isIncludePartitionInSearchHashes()); + myPartitionSettings.setAllowReferencesAcrossPartitions(defaultPartConfig.getAllowReferencesAcrossPartitions()); } @AfterEach @@ -487,7 +509,7 @@ protected void purgeHibernateSearch(EntityManager theEntityManager) { protected abstract PlatformTransactionManager getTxManager(); - protected void logAllCodeSystemsAndVersionsCodeSystemsAndVersions() { + public void logAllCodeSystemsAndVersionsCodeSystemsAndVersions() { runInTransaction(() -> { ourLog.info("CodeSystems:\n * " + myTermCodeSystemDao.findAll() .stream() @@ -522,13 +544,13 @@ protected int logAllMdmLinks() { }); } - protected void logAllResourceLinks() { + public void logAllResourceLinks() { runInTransaction(() -> { ourLog.info("Resource Links:\n * {}", myResourceLinkDao.findAll().stream().map(ResourceLink::toString).collect(Collectors.joining("\n * "))); }); } - protected int logAllResources() { + public int logAllResources() { return runInTransaction(() -> { List resources = myResourceTableDao.findAll(); ourLog.info("Resources:\n * {}", resources.stream().map(ResourceTable::toString).collect(Collectors.joining("\n * "))); @@ -536,6 +558,14 @@ protected int logAllResources() { }); } + public int logAllResourceSearchUrls() { + return runInTransaction(() -> { + List resources = myResourceSearchUrlDao.findAll(); + ourLog.info("Search URLs:\n * {}", resources.stream().map(ResourceSearchUrlEntity::toString).collect(Collectors.joining("\n * "))); + return resources.size(); + }); + } + protected int logAllConceptDesignations() { return runInTransaction(() -> { List resources = myTermConceptDesignationDao.findAll(); @@ -547,12 +577,12 @@ protected int logAllConceptDesignations() { protected int logAllConceptProperties() { return runInTransaction(() -> { List resources = myTermConceptPropertyDao.findAll(); - ourLog.info("Concept Designations:\n * {}", resources.stream().map(TermConceptProperty::toString).collect(Collectors.joining("\n * "))); + ourLog.info("Concept Properties:\n * {}", resources.stream().map(TermConceptProperty::toString).collect(Collectors.joining("\n * "))); return resources.size(); }); } - protected int logAllConcepts() { + public int logAllConcepts() { return runInTransaction(() -> { List resources = myTermConceptDao.findAll(); ourLog.info("Concepts:\n * {}", resources.stream().map(TermConcept::toString).collect(Collectors.joining("\n * "))); @@ -560,10 +590,18 @@ protected int logAllConcepts() { }); } - protected int logAllValueSetConcepts() { + protected int logAllConceptParentChildLinks() { + return runInTransaction(() -> { + List resources = myTermConceptParentChildLinkDao.findAll(); + ourLog.info("Concept Parent/Child Links:\n * {}", resources.stream().map(TermConceptParentChildLink::toString).collect(Collectors.joining("\n * "))); + return resources.size(); + }); + } + + public int logAllValueSetConcepts() { return runInTransaction(() -> { List resources = myTermValueSetConceptDao.findAll(); - ourLog.info("Concepts:\n * {}", resources.stream().map(TermValueSetConcept::toString).collect(Collectors.joining("\n * "))); + ourLog.info("ValueSet Concepts:\n * {}", resources.stream().map(TermValueSetConcept::toString).collect(Collectors.joining("\n * "))); return resources.size(); }); } @@ -594,12 +632,26 @@ protected void logAllUniqueIndexes() { }); } - protected void logAllTokenIndexes() { + protected void logAllTokenIndexes(String... theParamNames) { + String messageSuffix = theParamNames.length > 0 ? " containing " + Arrays.asList(theParamNames) : ""; runInTransaction(() -> { - ourLog.info("Token indexes:\n * {}", myResourceIndexedSearchParamTokenDao.findAll().stream().map(ResourceIndexedSearchParamToken::toString).collect(Collectors.joining("\n * "))); + String message = getAllTokenIndexes(theParamNames) + .stream() + .map(ResourceIndexedSearchParamToken::toString) + .collect(Collectors.joining("\n * ")); + ourLog.info("Token indexes{}:\n * {}", messageSuffix, message); }); } + @Nonnull + protected List getAllTokenIndexes(String... theParamNames) { + return runInTransaction(()->myResourceIndexedSearchParamTokenDao + .findAll() + .stream() + .filter(t -> theParamNames.length == 0 || Arrays.asList(theParamNames).contains(t.getParamName())) + .toList()); + } + protected void logAllCoordsIndexes() { runInTransaction(() -> { ourLog.info("Coords indexes:\n * {}", myResourceIndexedSearchParamCoordsDao.findAll().stream().map(ResourceIndexedSearchParamCoords::toString).collect(Collectors.joining("\n * "))); @@ -612,7 +664,7 @@ protected void logAllNumberIndexes() { }); } - protected void logAllUriIndexes() { + public void logAllUriIndexes() { runInTransaction(() -> { ourLog.info("URI indexes:\n * {}", myResourceIndexedSearchParamUriDao.findAll().stream().map(ResourceIndexedSearchParamUri::toString).collect(Collectors.joining("\n * "))); }); @@ -621,19 +673,33 @@ protected void logAllUriIndexes() { protected void logAllStringIndexes(String... theParamNames) { String messageSuffix = theParamNames.length > 0 ? " containing " + Arrays.asList(theParamNames) : ""; runInTransaction(() -> { - String message = myResourceIndexedSearchParamStringDao - .findAll() + String message = getAllStringIndexes(theParamNames) .stream() - .filter(t -> theParamNames.length == 0 ? true : Arrays.asList(theParamNames).contains(t.getParamName())) - .map(t -> t.toString()) + .map(ResourceIndexedSearchParamString::toString) .collect(Collectors.joining("\n * ")); ourLog.info("String indexes{}:\n * {}", messageSuffix, message); }); } + @Nonnull + protected List getAllStringIndexes(String... theParamNames) { + return runInTransaction(()->myResourceIndexedSearchParamStringDao + .findAll() + .stream() + .filter(t -> theParamNames.length == 0 || Arrays.asList(theParamNames).contains(t.getParamName())) + .toList()); + } + + protected void logAllResourceTags() { runInTransaction(() -> { - ourLog.info("Token tags:\n * {}", myResourceTagDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * "))); + ourLog.info("Resource tags:\n * {}", myResourceTagDao.findAll().stream().map(ResourceTag::toString).collect(Collectors.joining("\n * "))); + }); + } + + protected void logAllResourceHistoryTags() { + runInTransaction(() -> { + ourLog.info("Resource history tags:\n * {}", myResourceHistoryTagDao.findAll().stream().map(ResourceHistoryTag::toString).collect(Collectors.joining("\n * "))); }); } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseValueSetHSearchExpansionR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseValueSetHSearchExpansionR4Test.java index b2ddd8cb2fbd..78d3a8966e61 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseValueSetHSearchExpansionR4Test.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseValueSetHSearchExpansionR4Test.java @@ -37,9 +37,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; import ca.uhn.fhir.jpa.term.IValueSetConceptAccumulator; -import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl; import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl; -import ca.uhn.fhir.jpa.term.TermReadSvcImpl; import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; @@ -179,10 +177,6 @@ public void afterCleanupDao() { @AfterEach public void afterClearTerminologyCaches() { - TermReadSvcImpl baseHapiTerminologySvc = AopTestUtils.getTargetObject(myTermSvc); - baseHapiTerminologySvc.clearCaches(); - TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationCache(); - TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationWithReverseCache(); TermDeferredStorageSvcImpl deferredSvc = AopTestUtils.getTargetObject(myTerminologyDeferredStorageSvc); deferredSvc.clearDeferred(); } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/PreventDanglingInterceptorsExtension.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/PreventDanglingInterceptorsExtension.java index 73a34c4b9325..b1626d5a1d23 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/PreventDanglingInterceptorsExtension.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/PreventDanglingInterceptorsExtension.java @@ -41,28 +41,42 @@ public class PreventDanglingInterceptorsExtension implements BeforeEachCallback, AfterEachCallback { private static final Logger ourLog = LoggerFactory.getLogger(PreventDanglingInterceptorsExtension.class); - private final Supplier myInterceptorServiceSuplier; + private final Supplier myIInterceptorServiceSupplier; private List myBeforeInterceptors; - public PreventDanglingInterceptorsExtension(Supplier theInterceptorServiceSuplier) { - myInterceptorServiceSuplier = theInterceptorServiceSuplier; + public PreventDanglingInterceptorsExtension(Supplier theIInterceptorServiceSupplier) { + myIInterceptorServiceSupplier = theIInterceptorServiceSupplier; } @Override public void beforeEach(ExtensionContext theExtensionContext) throws Exception { - myBeforeInterceptors = myInterceptorServiceSuplier.get().getAllRegisteredInterceptors(); + myBeforeInterceptors = myIInterceptorServiceSupplier.get().getAllRegisteredInterceptors(); - ourLog.info("Registered interceptors:\n * " + myBeforeInterceptors.stream().map(t -> t.toString()).collect(Collectors.joining("\n * "))); + ourLog.info("Registered interceptors:\n * {}", myBeforeInterceptors.stream().map(Object::toString).collect(Collectors.joining("\n * "))); } @Override public void afterEach(ExtensionContext theExtensionContext) throws Exception { - List afterInterceptors = myInterceptorServiceSuplier.get().getAllRegisteredInterceptors(); - Map delta = new IdentityHashMap<>(); - afterInterceptors.forEach(t -> delta.put(t, t)); - myBeforeInterceptors.forEach(t -> delta.remove(t)); - delta.keySet().forEach(t->myInterceptorServiceSuplier.get().unregisterInterceptor(t)); - assertThat(delta.isEmpty()).as(() -> "Test added interceptor(s) and did not clean them up:\n * " + delta.keySet().stream().map(t -> t.toString()).collect(Collectors.joining("\n * "))).isTrue(); + List afterInterceptors = myIInterceptorServiceSupplier.get().getAllRegisteredInterceptors(); + // Handle interceptors added by the test + { + Map delta = new IdentityHashMap<>(); + afterInterceptors.forEach(t -> delta.put(t, t)); + myBeforeInterceptors.forEach(delta::remove); + delta.keySet().forEach(t -> myIInterceptorServiceSupplier.get().unregisterInterceptor(t)); + assertThat(delta.isEmpty()).as(() -> "Test added interceptor(s) and did not clean them up:\n * " + delta.keySet().stream().map(Object::toString).collect(Collectors.joining("\n * "))).isTrue(); + } + + // Handle interceptors removed by the test + { + IdentityHashMap delta = new IdentityHashMap<>(); + myBeforeInterceptors.forEach(t -> delta.put(t, t)); + afterInterceptors.forEach(t -> delta.remove(t, t)); + for (Object t : delta.keySet()) { + ourLog.warn("Interceptor {} was removed by test, re-adding", t); + myIInterceptorServiceSupplier.get().registerInterceptor(t); + } + } } } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/DatabaseSupportUtil.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/DatabaseSupportUtil.java index 91c41fba19ba..f46e4493b291 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/DatabaseSupportUtil.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/DatabaseSupportUtil.java @@ -19,9 +19,12 @@ */ package ca.uhn.fhir.jpa.util; +import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.SystemUtils; +import java.util.UUID; + public final class DatabaseSupportUtil { private DatabaseSupportUtil() {} @@ -50,4 +53,12 @@ private static boolean isColimaConfigured() { && StringUtils.isNotBlank(System.getenv("DOCKER_HOST")) && System.getenv("DOCKER_HOST").contains("colima"); } + + /** + * Create a new connection to a randomized H2 database for testing + */ + public static DriverTypeEnum.ConnectionProperties newConnection() { + String url = "jdbc:h2:mem:test_migration-" + UUID.randomUUID() + ";CASE_INSENSITIVE_IDENTIFIERS=TRUE;"; + return DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "SA", "SA"); + } } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/TestPartitionSelectorInterceptor.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/TestPartitionSelectorInterceptor.java new file mode 100644 index 000000000000..36649816b846 --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/TestPartitionSelectorInterceptor.java @@ -0,0 +1,87 @@ +/*- + * #%L + * HAPI FHIR JPA Server Test Utilities + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.util; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.interceptor.api.Hook; +import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.partition.BaseRequestPartitionHelperSvc; +import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc; +import jakarta.annotation.Nonnull; +import org.apache.commons.lang3.Validate; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.util.HashSet; +import java.util.Set; + +public class TestPartitionSelectorInterceptor { + private RequestPartitionId myNextPartition; + private final Set myNonPartitionableResources = new HashSet<>(); + private BaseRequestPartitionHelperSvc myHelperSvc = new RequestPartitionHelperSvc(); + + /** + * Constructor + */ + public TestPartitionSelectorInterceptor() { + super(); + } + + public TestPartitionSelectorInterceptor addNonPartitionableResource(@Nonnull String theResourceName) { + Validate.notBlank(theResourceName, "Must not be blank"); + myNonPartitionableResources.add(theResourceName); + return this; + } + + public void setNextPartitionId(Integer theNextPartitionId) { + myNextPartition = RequestPartitionId.fromPartitionId(theNextPartitionId); + } + + public void setNextPartition(RequestPartitionId theNextPartition) { + myNextPartition = theNextPartition; + } + + @Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE) + public RequestPartitionId selectPartitionCreate(IBaseResource theResource) { + String resourceType = FhirContext.forR5Cached().getResourceType(theResource); + return selectPartition(resourceType); + } + + @Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_READ) + public RequestPartitionId selectPartitionRead(ReadPartitionIdRequestDetails theDetails) { + return selectPartition(theDetails.getResourceType()); + } + + @Nonnull + private RequestPartitionId selectPartition(String theResourceType) { + if (theResourceType != null) { + if (!myHelperSvc.isResourcePartitionable(theResourceType)) { + return RequestPartitionId.defaultPartition(); + } + if (myNonPartitionableResources.contains(theResourceType)) { + return RequestPartitionId.defaultPartition(); + } + } + + assert myNextPartition != null; + return myNextPartition; + } +} diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/cache/ResourceVersionSvcTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/cache/ResourceVersionSvcTest.java index 2e357229d150..7f889f65fc6a 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/cache/ResourceVersionSvcTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/cache/ResourceVersionSvcTest.java @@ -3,9 +3,12 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.model.cross.IResourceLookup; +import ca.uhn.fhir.jpa.model.cross.JpaResourceLookup; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.model.primitive.IdDt; @@ -15,6 +18,7 @@ import jakarta.persistence.criteria.Path; import jakarta.persistence.criteria.Root; import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r4.model.IdType; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -28,6 +32,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Map; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -91,11 +96,12 @@ private void mock_resolveResourcePersistentIdsWithCache_toReturnNothing() { * @param theResourcePacks */ private void mockReturnsFor_getIdsOfExistingResources(ResourceIdPackage... theResourcePacks) { - List resourcePersistentIds = new ArrayList<>(); + List> resourcePersistentIds = new ArrayList<>(); List matches = new ArrayList<>(); for (ResourceIdPackage pack : theResourcePacks) { resourcePersistentIds.add(pack.myPid); + pack.myPid.setAssociatedResourceId(pack.MyResourceId); matches.add(getResourceTableRecordForResourceTypeAndPid( pack.MyResourceId.getResourceType(), @@ -104,11 +110,19 @@ private void mockReturnsFor_getIdsOfExistingResources(ResourceIdPackage... theRe )); } - IResourcePersistentId first = resourcePersistentIds.remove(0); + IResourcePersistentId first = resourcePersistentIds.remove(0); if (resourcePersistentIds.isEmpty()) { - when(myIdHelperService.resolveResourcePersistentIdsWithCache(any(), any())).thenReturn(Collections.singletonList(first)); + when(myIdHelperService.resolveResourceIdentities(any(), any(), any())) + .thenReturn(Map.of(first.getAssociatedResourceId(), new JpaResourceLookup(first.getResourceType(), first.getAssociatedResourceId().getIdPart(), (JpaPid) first, null, null))); } else { - when(myIdHelperService.resolveResourcePersistentIdsWithCache(any(), any())).thenReturn(resourcePersistentIds); + + HashMap> map = new HashMap<>(); + for (var next : resourcePersistentIds) { + map.put(next.getAssociatedResourceId(), new JpaResourceLookup(next.getResourceType(),next.getAssociatedResourceId().getIdPart() ,(JpaPid) next, null, null)); + } + + when(myIdHelperService.resolveResourceIdentities(any(), any(), any())) + .thenReturn(map); } } diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParserTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParserTest.java index 8f515b7ecbc8..184371ca7d2f 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParserTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParserTest.java @@ -1,8 +1,8 @@ package ca.uhn.fhir.jpa.dao; import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.jpa.entity.ResourceSearchView; import ca.uhn.fhir.jpa.model.entity.BaseTag; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.model.primitive.IdDt; import org.hl7.fhir.r4.hapi.ctx.FhirR4; import org.hl7.fhir.r4.model.Coding; @@ -27,15 +27,16 @@ public class JpaStorageResourceParserTest { @Mock private FhirContext myFhirContext; - @Mock - private ResourceSearchView patientSearchView; - @InjectMocks + @Mock + ResourceHistoryTable myEntity; + + @InjectMocks private final JpaStorageResourceParser jpaStorageResourceParser = new JpaStorageResourceParser(); @Test public void testPopulateResourceMeta_doesNotRemoveTags_whenTagListIsEmpty() { Mockito.when(myFhirContext.getVersion()).thenReturn(new FhirR4()); - Mockito.when(patientSearchView.getIdDt()).thenReturn(new IdDt("Patient/test-patient/_history/1")); + Mockito.when(myEntity.getIdDt()).thenReturn(new IdDt("Patient/test-patient/_history/1")); Coding coding = new Coding("system", "code", "display"); List tagList = Collections.emptyList(); @@ -44,8 +45,8 @@ public void testPopulateResourceMeta_doesNotRemoveTags_whenTagListIsEmpty() { Patient resourceTarget = new Patient(); resourceTarget.getMeta().addTag(coding); - Patient actualResult = jpaStorageResourceParser - .populateResourceMetadata(patientSearchView, forHistoryOperation, tagList, version, resourceTarget); + Patient actualResult = jpaStorageResourceParser + .populateResourceMetadata(myEntity, forHistoryOperation, tagList, version, resourceTarget); List actualTagList = actualResult.getMeta().getTag(); assertFalse(actualTagList.isEmpty()); diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilderTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilderTest.java index cac4107d9864..aadcfb8e4d0b 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilderTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilderTest.java @@ -5,6 +5,7 @@ import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.primitive.IdDt; @@ -70,20 +71,20 @@ public void init() { @Test public void createEverythingPredicate_withListOfPids_returnsInPredicate() { when(myResourceLinkPredicateBuilder.generatePlaceholders(anyCollection())).thenReturn(List.of(PLACEHOLDER_BASE + "1", PLACEHOLDER_BASE + "2")); - Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), 1L, 2L); + Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), JpaPid.fromId(1L), JpaPid.fromId(2L)); assertEquals(InCondition.class, condition.getClass()); } @Test public void createEverythingPredicate_withSinglePid_returnsInCondition() { when(myResourceLinkPredicateBuilder.generatePlaceholders(anyCollection())).thenReturn(List.of(PLACEHOLDER_BASE + "1")); - Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), 1L); + Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), JpaPid.fromId(1L)); assertEquals(BinaryCondition.class, condition.getClass()); } @Test public void createEverythingPredicate_withNoPids_returnsBinaryCondition() { - Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), new Long[0]); + Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), new JpaPid[0]); assertEquals(BinaryCondition.class, condition.getClass()); } diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilderTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilderTest.java index 473f88ec95d1..4ca1b57f0fbd 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilderTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilderTest.java @@ -4,6 +4,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.dialect.HapiFhirMariaDBDialect; import ca.uhn.fhir.jpa.model.dialect.HapiFhirOracleDialect; import ca.uhn.fhir.jpa.model.entity.StorageSettings; @@ -55,7 +56,7 @@ public void testRangeSqlServer2005_NoSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new SQLServerDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); GeneratedSql generated; // No range @@ -81,7 +82,7 @@ public void testRangeSqlServer2005_WithSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new SQLServerDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true); GeneratedSql generated; @@ -109,7 +110,7 @@ public void testRangeSqlServer2012_NoSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new SQLServer2012Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); GeneratedSql generated; // No range @@ -135,7 +136,7 @@ public void testRangeSqlServer2012_WithSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new SQLServer2012Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true); GeneratedSql generated; @@ -162,7 +163,7 @@ public void testRangePostgreSQL95_NoSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new PostgreSQLDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); GeneratedSql generated; // No range @@ -188,7 +189,7 @@ public void testRangePostgreSQL95_WithSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new PostgreSQLDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true); GeneratedSql generated; @@ -215,7 +216,7 @@ public void testRangeOracle12c_NoSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new HapiFhirOracleDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); GeneratedSql generated; // No range @@ -241,7 +242,7 @@ public void testRangeOracle12c_WithSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new HapiFhirOracleDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true); GeneratedSql generated; @@ -268,7 +269,7 @@ public void testRangeMySQL8_NoSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new MySQL8Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); GeneratedSql generated; // No range @@ -294,7 +295,7 @@ public void testRangeMySQL8_WithSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new MySQL8Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true); GeneratedSql generated; @@ -325,7 +326,7 @@ public void testRangeMariaDB103_NoSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new HapiFhirMariaDBDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); GeneratedSql generated; // No range @@ -351,7 +352,7 @@ public void testRangeMariaDB103_WithSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new HapiFhirMariaDBDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true); GeneratedSql generated; @@ -382,7 +383,7 @@ public void testRangeDerbyTenSeven_NoSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new DerbyDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); GeneratedSql generated; // No range @@ -408,7 +409,7 @@ public void testRangeDerbyTenSeven_WithSort() { HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new DerbyDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); - builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); + builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L))); builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true); GeneratedSql generated; diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/util/MemoryCacheServiceTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/util/MemoryCacheServiceTest.java index 797cb126f2ff..4b14db933c08 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/util/MemoryCacheServiceTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/util/MemoryCacheServiceTest.java @@ -1,5 +1,6 @@ package ca.uhn.fhir.jpa.util; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.jpa.model.entity.TagTypeEnum; @@ -237,5 +238,11 @@ void assertNotDone() { } } + @Test + public void testToString() { + String actual = new MemoryCacheService.ForcedIdCacheKey("Patient", "12", RequestPartitionId.allPartitions()).toString(); + assertEquals("MemoryCacheService.ForcedIdCacheKey[resType=Patient,resId=12,partId=RequestPartitionId[allPartitions=true]]", actual); + } + } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml index c0b49b0e3800..4bf857b3e9d1 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml +++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-server-cds-hooks/pom.xml b/hapi-fhir-server-cds-hooks/pom.xml index cca741e8e440..0dbcc1353303 100644 --- a/hapi-fhir-server-cds-hooks/pom.xml +++ b/hapi-fhir-server-cds-hooks/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml index 8aa2a2d33a97..6b734c190f9f 100644 --- a/hapi-fhir-server-mdm/pom.xml +++ b/hapi-fhir-server-mdm/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmReadVirtualizationInterceptor.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmReadVirtualizationInterceptor.java index fa37a2c4f496..afed47c86918 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmReadVirtualizationInterceptor.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmReadVirtualizationInterceptor.java @@ -32,6 +32,7 @@ import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; +import ca.uhn.fhir.rest.server.util.ICachedSearchDetails; import ca.uhn.fhir.util.FhirTerser; import ca.uhn.fhir.util.ResourceReferenceInfo; import org.hl7.fhir.instance.model.api.IAnyResource; @@ -90,7 +91,10 @@ public class MdmReadVirtualizationInterceptor

@Hook( value = Pointcut.STORAGE_PRESEARCH_REGISTERED, order = MdmConstants.ORDER_PRESEARCH_REGISTERED_MDM_READ_VIRTUALIZATION_INTERCEPTOR) - public void preSearchRegistered(RequestDetails theRequestDetails, SearchParameterMap theSearchParameterMap) { + public void preSearchRegistered( + RequestDetails theRequestDetails, + SearchParameterMap theSearchParameterMap, + ICachedSearchDetails theSearchDetails) { ourMdmTroubleshootingLog .atTrace() .setMessage("MDM virtualization original search: {}{}") @@ -98,14 +102,16 @@ public void preSearchRegistered(RequestDetails theRequestDetails, SearchParamete .addArgument(() -> theSearchParameterMap.toNormalizedQueryString(myFhirContext)) .log(); + String resourceType = theSearchDetails.getResourceType(); + if (theSearchParameterMap.hasIncludes() || theSearchParameterMap.hasRevIncludes()) { myMdmSearchExpansionSvc.expandSearchAndStoreInRequestDetails( - theRequestDetails, theSearchParameterMap, PARAM_TESTER_ALL); + resourceType, theRequestDetails, theSearchParameterMap, PARAM_TESTER_ALL); } else { // If we don't have any includes, it's not worth auto-expanding the _id parameter since we'll only end // up filtering out the extra resources afterward myMdmSearchExpansionSvc.expandSearchAndStoreInRequestDetails( - theRequestDetails, theSearchParameterMap, PARAM_TESTER_NO_RES_ID); + resourceType, theRequestDetails, theSearchParameterMap, PARAM_TESTER_NO_RES_ID); } ourMdmTroubleshootingLog @@ -116,7 +122,6 @@ public void preSearchRegistered(RequestDetails theRequestDetails, SearchParamete .log(); } - @SuppressWarnings("EnumSwitchStatementWhichMissesCases") @Hook(Pointcut.STORAGE_PRESHOW_RESOURCES) public void preShowResources(RequestDetails theRequestDetails, IPreResourceShowDetails theDetails) { MdmSearchExpansionResults expansionResults = MdmSearchExpansionSvc.getCachedExpansionResults(theRequestDetails); diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmSearchExpandingInterceptor.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmSearchExpandingInterceptor.java index c7449beffd9e..52ed8293be8e 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmSearchExpandingInterceptor.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmSearchExpandingInterceptor.java @@ -29,6 +29,7 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.TokenParam; +import ca.uhn.fhir.rest.server.util.ICachedSearchDetails; import org.springframework.beans.factory.annotation.Autowired; /** @@ -57,11 +58,15 @@ public class MdmSearchExpandingInterceptor { @Hook( value = Pointcut.STORAGE_PRESEARCH_REGISTERED, order = MdmConstants.ORDER_PRESEARCH_REGISTERED_MDM_SEARCH_EXPANDING_INTERCEPTOR) - public void hook(RequestDetails theRequestDetails, SearchParameterMap theSearchParameterMap) { + public void hook( + RequestDetails theRequestDetails, + SearchParameterMap theSearchParameterMap, + ICachedSearchDetails theSearchDetails) { if (myStorageSettings.isAllowMdmExpansion()) { + String resourceType = theSearchDetails.getResourceType(); myMdmSearchExpansionSvc.expandSearchAndStoreInRequestDetails( - theRequestDetails, theSearchParameterMap, PARAM_TESTER); + resourceType, theRequestDetails, theSearchParameterMap, PARAM_TESTER); } } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSearchExpansionSvc.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSearchExpansionSvc.java index 6b2d141d580c..fffece55721a 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSearchExpansionSvc.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSearchExpansionSvc.java @@ -43,6 +43,8 @@ import java.util.Objects; import java.util.Set; +import static org.apache.commons.lang3.StringUtils.isBlank; + public class MdmSearchExpansionSvc { private static final String EXPANSION_RESULTS = MdmSearchExpansionSvc.class.getName() + "_EXPANSION_RESULTS"; private static final String RESOURCE_NAME = MdmSearchExpansionSvc.class.getName() + "_RESOURCE_NAME"; @@ -75,6 +77,7 @@ public class MdmSearchExpansionSvc { * @since 8.0.0 */ public MdmSearchExpansionResults expandSearchAndStoreInRequestDetails( + String theResourceName, @Nullable RequestDetails theRequestDetails, @Nonnull SearchParameterMap theSearchParameterMap, IParamTester theParamTester) { @@ -113,12 +116,7 @@ public MdmSearchExpansionResults expandSearchAndStoreInRequestDetails( // here we will know if it's an _id param or not // from theSearchParameterMap.keySet() expandAnyReferenceParameters( - requestPartitionId, - theRequestDetails.getResourceName(), - paramName, - orList, - theParamTester, - expansionResults); + requestPartitionId, theResourceName, paramName, orList, theParamTester, expansionResults); } } @@ -128,9 +126,8 @@ public MdmSearchExpansionResults expandSearchAndStoreInRequestDetails( * Note: Do this at the end so that the query string reflects the post-translated * query string */ - String resourceName = theRequestDetails.getResourceName(); String queryString = theSearchParameterMap.toNormalizedQueryString(myFhirContext); - theRequestDetails.getUserData().put(RESOURCE_NAME, resourceName); + theRequestDetails.getUserData().put(RESOURCE_NAME, theResourceName); theRequestDetails.getUserData().put(QUERY_STRING, queryString); return expansionResults; @@ -195,7 +192,7 @@ private IIdType newId(String value) { } private String addResourceTypeIfNecessary(String theResourceType, String theResourceId) { - if (theResourceId.contains("/")) { + if (theResourceId.contains("/") || isBlank(theResourceType)) { return theResourceId; } else { return theResourceType + "/" + theResourceId; diff --git a/hapi-fhir-server-openapi/pom.xml b/hapi-fhir-server-openapi/pom.xml index 052fc447623e..576df87d8163 100644 --- a/hapi-fhir-server-openapi/pom.xml +++ b/hapi-fhir-server-openapi/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml index 642311c8ff52..7b5dec38f45d 100644 --- a/hapi-fhir-server/pom.xml +++ b/hapi-fhir-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/TransactionDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/TransactionDetails.java index 2c6e3f36cb38..bc466e466a8a 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/TransactionDetails.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/TransactionDetails.java @@ -66,6 +66,7 @@ public class TransactionDetails { private Map myResolvedMatchUrls = Collections.emptyMap(); private Map> myResolvedResources = Collections.emptyMap(); private Set myDeletedResourceIds = Collections.emptySet(); + private Set myUpdatedResourceIds = Collections.emptySet(); private Map myUserData; private ListMultimap myDeferredInterceptorBroadcasts; private EnumSet myDeferredInterceptorBroadcastPointcuts; @@ -118,9 +119,40 @@ public void clearRollbackUndoActions() { } } + /** + * @since 7.6.0 + */ + @SuppressWarnings("rawtypes") + public void addUpdatedResourceId(@Nonnull IResourcePersistentId theResourceId) { + Validate.notNull(theResourceId, "theResourceId must not be null"); + if (myUpdatedResourceIds.isEmpty()) { + myUpdatedResourceIds = new HashSet<>(); + } + myUpdatedResourceIds.add(theResourceId); + } + + /** + * @since 7.6.0 + */ + @SuppressWarnings("rawtypes") + public void addUpdatedResourceIds(Collection theResourceIds) { + for (IResourcePersistentId id : theResourceIds) { + addUpdatedResourceId(id); + } + } + + /** + * @since 7.6.0 + */ + @SuppressWarnings("rawtypes") + public Set getUpdatedResourceIds() { + return myUpdatedResourceIds; + } + /** * @since 6.8.0 */ + @SuppressWarnings("rawtypes") public void addDeletedResourceId(@Nonnull IResourcePersistentId theResourceId) { Validate.notNull(theResourceId, "theResourceId must not be null"); if (myDeletedResourceIds.isEmpty()) { @@ -132,6 +164,7 @@ public void addDeletedResourceId(@Nonnull IResourcePersistentId theResourceId) { /** * @since 6.8.0 */ + @SuppressWarnings("rawtypes") public void addDeletedResourceIds(Collection theResourceIds) { for (IResourcePersistentId next : theResourceIds) { addDeletedResourceId(next); @@ -141,6 +174,7 @@ public void addDeletedResourceIds(Collection th /** * @since 6.8.0 */ + @SuppressWarnings("rawtypes") @Nonnull public Set getDeletedResourceIds() { return Collections.unmodifiableSet(myDeletedResourceIds); diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ICachedSearchDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ICachedSearchDetails.java index a6fd1c07e770..86bd7aab2960 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ICachedSearchDetails.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ICachedSearchDetails.java @@ -21,6 +21,8 @@ public interface ICachedSearchDetails { + String getResourceType(); + String getUuid(); void setUuid(String theUuid); diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml index 334ada1b88ad..0a090f3d386d 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml index 55c95b5b01eb..a135318926be 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../pom.xml @@ -21,7 +21,7 @@ ca.uhn.hapi.fhir hapi-fhir-caching-api - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml index 8bdee9dffc11..f291cb829946 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml index 6a6f3b5a6240..b3487107f998 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml @@ -7,7 +7,7 @@ hapi-fhir ca.uhn.hapi.fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../../pom.xml diff --git a/hapi-fhir-serviceloaders/pom.xml b/hapi-fhir-serviceloaders/pom.xml index 4eaa08227ee3..d221ae16a1bf 100644 --- a/hapi-fhir-serviceloaders/pom.xml +++ b/hapi-fhir-serviceloaders/pom.xml @@ -5,7 +5,7 @@ hapi-deployable-pom ca.uhn.hapi.fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml index 9fb961a7de33..10c35cc32cc5 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml index c0404daee270..c63595ae55f6 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT hapi-fhir-spring-boot-sample-client-apache diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml index a03857982c37..ca3ebe2c73fa 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml index 97caf2be8aea..c1a220aab156 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml index 00431db20ca4..2f98aaf2ccac 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml index 1cb18798c2af..1d9502c5793e 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml index 8485992ca068..a934d0ed31cc 100644 --- a/hapi-fhir-spring-boot/pom.xml +++ b/hapi-fhir-spring-boot/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-sql-migrate/pom.xml b/hapi-fhir-sql-migrate/pom.xml index 9db01829031a..c42255dec20c 100644 --- a/hapi-fhir-sql-migrate/pom.xml +++ b/hapi-fhir-sql-migrate/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-jobs/pom.xml b/hapi-fhir-storage-batch2-jobs/pom.xml index 61df69802a87..5f652af7c0da 100644 --- a/hapi-fhir-storage-batch2-jobs/pom.xml +++ b/hapi-fhir-storage-batch2-jobs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourceAndWriteBinaryStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourceAndWriteBinaryStep.java index 6200ffdd8c01..56a7b66fb074 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourceAndWriteBinaryStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourceAndWriteBinaryStep.java @@ -191,7 +191,7 @@ private void fetchResourcesByIdAndConsumeThem( allIds = allIds.subList(batchSize, allIds.size()); PersistentIdToForcedIdMap nextBatchOfResourceIds = myTransactionService - .withRequest(null) + .withSystemRequestOnPartition(theRequestPartitionId) .execute(() -> myIdHelperService.translatePidsToForcedIds(nextBatchOfPids)); TokenOrListParam idListParam = new TokenOrListParam(); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/ConsumeFilesStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/ConsumeFilesStep.java index f158a8c5841d..ddf73264ef2f 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/ConsumeFilesStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/ConsumeFilesStep.java @@ -32,7 +32,9 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; +import ca.uhn.fhir.jpa.model.cross.IResourceLookup; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.rest.api.server.RequestDetails; @@ -71,7 +73,7 @@ public class ConsumeFilesStep implements ILastJobStepWorker myIdHelperService; @Autowired private IFhirSystemDao mySystemDao; @@ -139,18 +141,24 @@ private Void storeResourcesInsideTransaction( ids.put(id, next); } - List idsList = new ArrayList<>(ids.keySet()); - List resolvedIds = myIdHelperService.resolveResourcePersistentIdsWithCache( - theRequestDetails.getRequestPartitionId(), idsList, true); - for (IResourcePersistentId next : resolvedIds) { - IIdType resId = next.getAssociatedResourceId(); - theTransactionDetails.addResolvedResourceId(resId, next); - ids.remove(resId); - } for (IIdType next : ids.keySet()) { theTransactionDetails.addResolvedResourceId(next, null); } + List idsList = new ArrayList<>(ids.keySet()); + Map> resolvedIdentities = myIdHelperService.resolveResourceIdentities( + theRequestDetails.getRequestPartitionId(), + idsList, + ResolveIdentityMode.includeDeleted().cacheOk()); + List> resolvedIds = new ArrayList<>(resolvedIdentities.size()); + for (Map.Entry> next : resolvedIdentities.entrySet()) { + IIdType resId = next.getKey(); + IResourcePersistentId persistentId = next.getValue().getPersistentId(); + resolvedIds.add(persistentId); + theTransactionDetails.addResolvedResourceId(resId, persistentId); + ids.remove(resId); + } + mySystemDao.preFetchResources(resolvedIds, true); for (IBaseResource next : theResources) { diff --git a/hapi-fhir-storage-batch2-test-utilities/pom.xml b/hapi-fhir-storage-batch2-test-utilities/pom.xml index 1c4f88740560..f9563c3200d0 100644 --- a/hapi-fhir-storage-batch2-test-utilities/pom.xml +++ b/hapi-fhir-storage-batch2-test-utilities/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2/pom.xml b/hapi-fhir-storage-batch2/pom.xml index 7c8c05123588..85b854f600db 100644 --- a/hapi-fhir-storage-batch2/pom.xml +++ b/hapi-fhir-storage-batch2/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-cr/pom.xml b/hapi-fhir-storage-cr/pom.xml index 5f7d299e4a18..847811a9394b 100644 --- a/hapi-fhir-storage-cr/pom.xml +++ b/hapi-fhir-storage-cr/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-mdm/pom.xml b/hapi-fhir-storage-mdm/pom.xml index 5a1e19e54e9b..0e9263667e7b 100644 --- a/hapi-fhir-storage-mdm/pom.xml +++ b/hapi-fhir-storage-mdm/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-test-utilities/pom.xml b/hapi-fhir-storage-test-utilities/pom.xml index e76bfbc8bbd3..e6d0c29d4ef7 100644 --- a/hapi-fhir-storage-test-utilities/pom.xml +++ b/hapi-fhir-storage-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage/pom.xml b/hapi-fhir-storage/pom.xml index 804588a75558..c916bbb9efaf 100644 --- a/hapi-fhir-storage/pom.xml +++ b/hapi-fhir-storage/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java index 5e3b05a6ad38..bacb96416e3f 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java @@ -368,6 +368,10 @@ public class JpaStorageSettings extends StorageSettings { * @since 7.2.0 */ private boolean myWriteToLegacyLobColumns = false; + /** + * @since 8.0.0 + */ + private boolean myAccessMetaSourceInformationFromProvenanceTable = false; /** * If this is enabled (default is {@literal false}), searches on token indexes will @@ -1764,6 +1768,37 @@ public void setStoreMetaSourceInformation(StoreMetaSourceInformationEnum theStor myStoreMetaSourceInformation = theStoreMetaSourceInformation; } + /** + * If set to true (default is false), the system will read + * Resource.meta.source values from the HFJ_RES_VER_PROV + * table. This table was replaced by dedicated columns in the HFJ_RES_VER + * table as of HAPI FHIR 6.8.0 (Smile CDR 2023.08.R01) and as of that version + * there is no need to read from the dedicated table. However, if old data still + * remains and has not been migrated (using a $reindex operation) then you can + * enable this setting in order to read from the old table. + * + * @since 8.0.0 + */ + public boolean isAccessMetaSourceInformationFromProvenanceTable() { + return myAccessMetaSourceInformationFromProvenanceTable; + } + + /** + * If set to true (default is false), the system will read + * Resource.meta.source values from the HFJ_RES_VER_PROV + * table. This table was replaced by dedicated columns in the HFJ_RES_VER + * table as of HAPI FHIR 6.8.0 (Smile CDR 2023.08.R01) and as of that version + * there is no need to read from the dedicated table. However, if old data still + * remains and has not been migrated (using a $reindex operation) then you can + * enable this setting in order to read from the old table. + * + * @since 8.0.0 + */ + public void setAccessMetaSourceInformationFromProvenanceTable( + boolean theAccessMetaSourceInformationFromProvenanceTable) { + myAccessMetaSourceInformationFromProvenanceTable = theAccessMetaSourceInformationFromProvenanceTable; + } + /** *

* If set to {@code true}, ValueSets and expansions are stored in terminology tables. This is to facilitate diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IIdHelperService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IIdHelperService.java index 18c1d8eef169..79bf35c4d196 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IIdHelperService.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IIdHelperService.java @@ -20,11 +20,13 @@ package ca.uhn.fhir.jpa.api.svc; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.model.PersistentIdToForcedIdMap; import ca.uhn.fhir.jpa.model.cross.IResourceLookup; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import jakarta.annotation.Nonnull; import jakarta.annotation.Nullable; @@ -38,6 +40,7 @@ import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.stream.Collectors; /** * This interface is used to translate between {@link IResourcePersistentId} @@ -45,43 +48,6 @@ */ public interface IIdHelperService> { - /** - * Given a collection of resource IDs (resource type + id), resolves the internal persistent IDs. - *

- * This implementation will always try to use a cache for performance, meaning that it can resolve resources that - * are deleted (but note that forced IDs can't change, so the cache can't return incorrect results) - * - * @param theOnlyForcedIds If true, resources which are not existing forced IDs will not be resolved - */ - @Nonnull - List resolveResourcePersistentIdsWithCache( - @Nonnull RequestPartitionId theRequestPartitionId, List theIds, boolean theOnlyForcedIds); - - /** - * Given a resource type and ID, determines the internal persistent ID for a resource. - * Optionally filters out deleted resources. - * - * @throws ResourceNotFoundException If the ID can not be found - */ - @Nonnull - T resolveResourcePersistentIds( - @Nonnull RequestPartitionId theRequestPartitionId, - String theResourceType, - String theId, - ResolveIdentityMode theMode); - - /** - * Returns a mapping of Id -> IResourcePersistentId. - * If any resource is not found, it will throw ResourceNotFound exception (and no map will be returned) - * Optionally filters out deleted resources. - */ - @Nonnull - Map resolveResourcePersistentIds( - @Nonnull RequestPartitionId theRequestPartitionId, - String theResourceType, - List theIds, - ResolveIdentityMode theMode); - /** * Given a persistent ID, returns the associated resource ID */ @@ -89,6 +55,9 @@ Map resolveResourcePersistentIds( IIdType translatePidIdToForcedId(FhirContext theCtx, String theResourceType, T theId); /** + * @param theResourceType Note that it is inefficient to call this method + * with a null resource type, so this should be avoided + * unless strictly necessary. * @throws ResourceNotFoundException If the ID can not be found */ @Nonnull @@ -100,6 +69,9 @@ IResourceLookup resolveResourceIdentity( throws ResourceNotFoundException; /** + * @param theResourceType Note that it is inefficient to call this method + * with a null resource type, so this should be avoided + * unless strictly necessary. * @throws ResourceNotFoundException If the ID can not be found */ @Nonnull @@ -114,9 +86,8 @@ default T resolveResourceIdentityPid( } /** - * Given a forced ID, convert it to it's Long value. Since you are allowed to use string IDs for resources, we need to - * convert those to the underlying Long values that are stored, for lookup and comparison purposes. - * Optionally filters out deleted resources. + * Given a collection of resource IDs, resolve the resource identities, including the persistent ID, + * deleted status, resource type, etc. * * @since 8.0.0 */ @@ -124,6 +95,20 @@ default T resolveResourceIdentityPid( Map> resolveResourceIdentities( @Nonnull RequestPartitionId theRequestPartitionId, Collection theIds, ResolveIdentityMode theMode); + /** + * Given a collection of resource IDs, resolve the resource persistent IDs. + * + * @since 8.0.0 + */ + default List resolveResourcePids( + RequestPartitionId theRequestPartitionId, + List theTargetIds, + ResolveIdentityMode theResolveIdentityMode) { + return resolveResourceIdentities(theRequestPartitionId, theTargetIds, theResolveIdentityMode).values().stream() + .map(IResourceLookup::getPersistentId) + .collect(Collectors.toList()); + } + /** * Returns true if the given resource ID should be stored in a forced ID. Under default config * (meaning client ID strategy is {@link JpaStorageSettings.ClientIdStrategyEnum#ALPHANUMERIC}) @@ -133,15 +118,6 @@ Map> resolveResourceIdentities( */ boolean idRequiresForcedId(String theId); - /** - * Given a collection of resource IDs (resource type + id), resolves the internal persistent IDs. - *

- * This implementation will always try to use a cache for performance, meaning that it can resolve resources that - * are deleted (but note that forced IDs can't change, so the cache can't return incorrect results) - */ - @Nonnull - List resolveResourcePersistentIdsWithCache(RequestPartitionId theRequestPartitionId, List theIds); - /** * Value will be an empty Optional if the PID doesn't exist, or * a typed resource ID if so (Patient/ABC). @@ -154,23 +130,34 @@ Map> resolveResourceIdentities( PersistentIdToForcedIdMap translatePidsToForcedIds(Set theResourceIds); /** - * Pre-cache a PID-to-Resource-ID mapping for later retrieval by {@link #translatePidsToForcedIds(Set)} and related methods + * This method can be called to pre-emptively add entries to the ID cache. It should + * be called by DAO methods if they are creating or changing the deleted status + * of a resource. This method returns immediately, but the data is not + * added to the internal caches until the current DB transaction is successfully + * committed, and nothing is added if the transaction rolls back. */ - void addResolvedPidToFhirId( + void addResolvedPidToFhirIdAfterCommit( @Nonnull T theResourcePersistentId, @Nonnull RequestPartitionId theRequestPartitionId, @Nonnull String theResourceType, @Nonnull String theFhirId, @Nullable Date theDeletedAt); - @Nonnull - List getPidsOrThrowException(RequestPartitionId theRequestPartitionId, List theIds); - @Nullable T getPidOrNull(RequestPartitionId theRequestPartitionId, IBaseResource theResource); @Nonnull - T getPidOrThrowException(RequestPartitionId theRequestPartitionId, IIdType theId); + default T getPidOrThrowException(RequestPartitionId theRequestPartitionId, IIdType theId) { + IResourceLookup identity = resolveResourceIdentity( + theRequestPartitionId, + theId.getResourceType(), + theId.getIdPart(), + ResolveIdentityMode.includeDeleted().cacheOk()); + if (identity == null) { + throw new InvalidRequestException(Msg.code(2295) + "Invalid ID was provided: [" + theId.getIdPart() + "]"); + } + return identity.getPersistentId(); + } @Nonnull T getPidOrThrowException(@Nonnull IAnyResource theResource); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java index 6705175d2e3f..777f87a456f8 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java @@ -1356,7 +1356,8 @@ protected EntriesToProcessMap doTransactionWriteOperations( } IFhirResourceDao dao = toDao(parts, verb, url); - IIdType patchId = myContext.getVersion().newIdType().setValue(parts.getResourceId()); + IIdType patchId = + myContext.getVersion().newIdType(parts.getResourceType(), parts.getResourceId()); String conditionalUrl; if (isNull(patchId.getIdPart())) { @@ -1408,6 +1409,8 @@ protected EntriesToProcessMap doTransactionWriteOperations( theTransactionStopWatch.endCurrentTask(); } + postTransactionProcess(theTransactionDetails); + /* * Make sure that there are no conflicts from deletions. E.g. we can't delete something * if something else has a reference to it.. Unless the thing that has a reference to it @@ -1517,6 +1520,13 @@ protected void handleVerbChangeInTransactionWriteOperations() { // nothing } + /** + * Implement to handle post transaction processing + */ + protected void postTransactionProcess(TransactionDetails theTransactionDetails) { + // nothing + } + /** * Check for if a resource id should be matched in a conditional update * If the FHIR version is older than R4, it follows the old specifications and does not match diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/IStorageResourceParser.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/IStorageResourceParser.java index 71179781a6be..a2972490a5ec 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/IStorageResourceParser.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/IStorageResourceParser.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.dao; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; +import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import org.hl7.fhir.instance.model.api.IBaseResource; /** @@ -30,10 +31,10 @@ * Currently only DB->FHIR is enabled through this interface but the aim * eventually is to handle both directions */ -public interface IStorageResourceParser { +public interface IStorageResourceParser> { // TODO: JA2 - Remove theForHistoryOperation flag - It toggles adding a bit of extra // metadata but there's no reason to not always just add that, and this would // simplify this interface - IBaseResource toResource(IBasePersistedResource theEntity, boolean theForHistoryOperation); + IBaseResource toResource(IBasePersistedResource theEntity, boolean theForHistoryOperation); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoResourceLinkResolver.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoResourceLinkResolver.java index 87ee74ae6e2f..178fd024fc00 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoResourceLinkResolver.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoResourceLinkResolver.java @@ -450,6 +450,11 @@ public String getResourceType() { return myPersistentId.getAssociatedResourceId().getResourceType(); } + @Override + public String getFhirId() { + return myPersistentId.getAssociatedResourceId().getIdPart(); + } + @Override public Date getDeleted() { return null; diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java index 5d56284f450b..d64bbdbe60ca 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java @@ -30,11 +30,11 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Queue; import java.util.concurrent.atomic.AtomicInteger; -import java.util.stream.Collectors; import static org.apache.commons.lang3.StringUtils.trim; @@ -87,10 +87,16 @@ public void execute(ExecutionInfo theExecutionInfo, List theQueryInfo && next.getParametersList().get(0).size() > 0) { size = next.getParametersList().size(); List values = next.getParametersList().get(0); - params = values.stream() - .map(t -> t.getArgs()[1]) - .map(t -> t != null ? t.toString() : "NULL") - .collect(Collectors.toList()); + params = new ArrayList<>(); + for (ParameterSetOperation t : values) { + if (t.getMethod().getName().equals("setNull")) { + params.add(null); + } else { + Object arg = t.getArgs()[1]; + String s = arg != null ? arg.toString() : null; + params.add(s); + } + } } else { params = Collections.emptyList(); size = next.getParametersList().size(); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java index c020f57141f9..97d6754908a7 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java @@ -22,7 +22,6 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; -import ca.uhn.fhir.jpa.api.model.TranslationQuery; import ca.uhn.fhir.jpa.model.entity.TagTypeEnum; import ca.uhn.fhir.sl.cache.Cache; import ca.uhn.fhir.sl.cache.CacheFactory; @@ -30,6 +29,8 @@ import jakarta.annotation.Nullable; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.apache.commons.lang3.builder.ToStringBuilder; +import org.apache.commons.lang3.builder.ToStringStyle; import org.hl7.fhir.instance.model.api.IIdType; import org.springframework.transaction.support.TransactionSynchronization; import org.springframework.transaction.support.TransactionSynchronizationManager; @@ -69,14 +70,9 @@ private void populateCaches() { int maximumSize; switch (next) { - case CONCEPT_TRANSLATION: - case CONCEPT_TRANSLATION_REVERSE: - timeoutSeconds = - SECONDS.convert(myStorageSettings.getTranslationCachesExpireAfterWriteInMinutes(), MINUTES); - maximumSize = 500000; - break; + case NAME_TO_PARTITION: + case ID_TO_PARTITION: case PID_TO_FORCED_ID: - case FORCED_ID_TO_PID: case MATCH_URL: case RESOURCE_LOOKUP_BY_FORCED_ID: case HISTORY_COUNT: @@ -207,22 +203,13 @@ public enum CacheEnum { * Value type: {@literal JpaResourceLookup} */ RESOURCE_LOOKUP_BY_FORCED_ID(ForcedIdCacheKey.class), - FORCED_ID_TO_PID(String.class), FHIRPATH_EXPRESSION(String.class), /** * Key type: {@literal Long} * Value type: {@literal Optional} */ PID_TO_FORCED_ID(Long.class), - /** - * TODO: JA this is duplicate with the CachingValidationSupport cache. - * A better solution would be to drop this cache for this item, and to - * create a new CachingValidationSupport implementation which uses - * the MemoryCacheService for all of its caches. - */ - CONCEPT_TRANSLATION(TranslationQuery.class), MATCH_URL(String.class), - CONCEPT_TRANSLATION_REVERSE(TranslationQuery.class), RESOURCE_CONDITIONAL_CREATE_VERSION(Long.class), HISTORY_COUNT(HistoryCountKey.class), NAME_TO_PARTITION(String.class), @@ -339,6 +326,15 @@ public static class ForcedIdCacheKey { private final RequestPartitionId myRequestPartitionId; private final int myHashCode; + @Override + public String toString() { + return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) + .append("resType", myResourceType) + .append("resId", myResourceId) + .append("partId", myRequestPartitionId) + .toString(); + } + public ForcedIdCacheKey( @Nullable String theResourceType, @Nonnull String theResourceId, @@ -372,21 +368,11 @@ public int hashCode() { * Creates and returns a new unqualified versionless IIdType instance */ public IIdType toIdType(FhirContext theFhirCtx) { - if (myResourceType == null) { - return toIdTypeWithoutResourceType(theFhirCtx); - } - IIdType retVal = theFhirCtx.getVersion().newIdType(); - retVal.setValue(myResourceType + "/" + myResourceId); - return retVal; + return theFhirCtx.getVersion().newIdType(myResourceType, myResourceId); } - /** - * Creates and returns a new unqualified versionless IIdType instance - */ public IIdType toIdTypeWithoutResourceType(FhirContext theFhirCtx) { - IIdType retVal = theFhirCtx.getVersion().newIdType(); - retVal.setValue(myResourceId); - return retVal; + return theFhirCtx.getVersion().newIdType(null, myResourceId); } } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java index abc653b1a0bb..c1e7979fe5e2 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java @@ -138,10 +138,15 @@ public String getSql(boolean theInlineParams, boolean theFormat, boolean theSani break; } String nextParamValue = nextParams.remove(0); - if (theSanitizeParams) { - nextParamValue = UrlUtil.sanitizeUrlPart(nextParamValue); + String nextSubstitution; + if (nextParamValue != null) { + if (theSanitizeParams) { + nextParamValue = UrlUtil.sanitizeUrlPart(nextParamValue); + } + nextSubstitution = "'" + nextParamValue + "'"; + } else { + nextSubstitution = "NULL"; } - String nextSubstitution = "'" + nextParamValue + "'"; retVal = retVal.substring(0, idx) + nextSubstitution + retVal.substring(idx + 1); idx += nextSubstitution.length(); } diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml index c14dc3a7001f..12b771a9daf4 100644 --- a/hapi-fhir-structures-dstu2.1/pom.xml +++ b/hapi-fhir-structures-dstu2.1/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml index 7cc77115a18b..f11e2b2269f0 100644 --- a/hapi-fhir-structures-dstu2/pom.xml +++ b/hapi-fhir-structures-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml index 6d1abc83f128..7591774de33f 100644 --- a/hapi-fhir-structures-dstu3/pom.xml +++ b/hapi-fhir-structures-dstu3/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml index 2d678f794fc8..24c995b74118 100644 --- a/hapi-fhir-structures-hl7org-dstu2/pom.xml +++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml index 81332f0b2856..2615521e1320 100644 --- a/hapi-fhir-structures-r4/pom.xml +++ b/hapi-fhir-structures-r4/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4b/pom.xml b/hapi-fhir-structures-r4b/pom.xml index e9bb68a0ced9..11d920a432dd 100644 --- a/hapi-fhir-structures-r4b/pom.xml +++ b/hapi-fhir-structures-r4b/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml index 20fc936b45e1..b44863f813cb 100644 --- a/hapi-fhir-structures-r5/pom.xml +++ b/hapi-fhir-structures-r5/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index 8f3810005ca9..28d655f7d971 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java index 5660322de55d..c125433d6a8c 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java @@ -21,6 +21,7 @@ import ca.uhn.fhir.context.BaseRuntimeChildDefinition; import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition; +import ca.uhn.fhir.context.BaseRuntimeElementDefinition; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.util.FhirTerser; @@ -87,6 +88,13 @@ default ICreationArgument withLanguage(String theLanguage) { return t -> __setPrimitiveChild(getFhirContext(), t, "language", "string", theLanguage); } + /** + * List.entry.item + */ + default ICreationArgument withListItem(IIdType theReference) { + return withElementAt("entry", withReference("item", theReference)); + } + /** * Set Patient.gender */ @@ -239,6 +247,10 @@ default IBaseResource buildPatient(ICreationArgument... theModifiers) { return buildResource("Patient", theModifiers); } + default IIdType createList(ICreationArgument... theModifiers) { + return createResource("List", theModifiers); + } + default IIdType createPatient(ICreationArgument... theModifiers) { return createResource("Patient", theModifiers); } @@ -321,7 +333,7 @@ default ICreationArgument withReference(String theReferenceName, @Nullable IIdTy IBaseReference reference = (IBaseReference) getFhirContext().getElementDefinition("Reference").newInstance(); reference.setReference(theReferenceValue.getValue()); - RuntimeResourceDefinition resourceDef = getFhirContext().getResourceDefinition((IBaseResource) t); + BaseRuntimeElementDefinition resourceDef = getFhirContext().getElementDefinition(t.getClass()); resourceDef.getChildByName(theReferenceName).getMutator().addValue(t, reference); } }; diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/MockInvoker.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/MockInvoker.java index 7c84733f4c18..fd8489068893 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/MockInvoker.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/MockInvoker.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR Test Utilities + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.test.utilities; import ca.uhn.fhir.interceptor.api.HookParams; diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml index b726d4f06b90..bc27a2bd1a38 100644 --- a/hapi-fhir-testpage-overlay/pom.xml +++ b/hapi-fhir-testpage-overlay/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml index 771c5dfce903..f1525bfc71a6 100644 --- a/hapi-fhir-validation-resources-dstu2.1/pom.xml +++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml index 97d409315ead..c6fa72c34ec9 100644 --- a/hapi-fhir-validation-resources-dstu2/pom.xml +++ b/hapi-fhir-validation-resources-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml index a049c6e0326d..c3bba7a71c58 100644 --- a/hapi-fhir-validation-resources-dstu3/pom.xml +++ b/hapi-fhir-validation-resources-dstu3/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml index 8758c8a3f273..e59e8ed12c25 100644 --- a/hapi-fhir-validation-resources-r4/pom.xml +++ b/hapi-fhir-validation-resources-r4/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4b/pom.xml b/hapi-fhir-validation-resources-r4b/pom.xml index 70968ba423b2..2595ffa9fee7 100644 --- a/hapi-fhir-validation-resources-r4b/pom.xml +++ b/hapi-fhir-validation-resources-r4b/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml index a08bea7582d8..ee0b10ded030 100644 --- a/hapi-fhir-validation-resources-r5/pom.xml +++ b/hapi-fhir-validation-resources-r5/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml index 7991424e435f..20548a7c0fe9 100644 --- a/hapi-fhir-validation/pom.xml +++ b/hapi-fhir-validation/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/DefaultProfileValidationSupportNpmStrategy.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/DefaultProfileValidationSupportNpmStrategy.java index 78c5ea21eb05..7ac183ff6b72 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/DefaultProfileValidationSupportNpmStrategy.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/DefaultProfileValidationSupportNpmStrategy.java @@ -12,6 +12,8 @@ import java.io.IOException; +// This is instantiated through reflection from DefaultProfileValidationSupport +@SuppressWarnings("unused") public class DefaultProfileValidationSupportNpmStrategy extends NpmPackageValidationSupport { private static final Logger ourLog = LoggerFactory.getLogger(DefaultProfileValidationSupportNpmStrategy.class); diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/InMemoryTerminologyServerValidationSupport.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/InMemoryTerminologyServerValidationSupport.java index a617e04c41b7..83d1734701cf 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/InMemoryTerminologyServerValidationSupport.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/InMemoryTerminologyServerValidationSupport.java @@ -131,7 +131,8 @@ private ValueSetExpansionOutcome expandValueSet( org.hl7.fhir.r5.model.ValueSet expansionR5; try { expansionR5 = expandValueSetToCanonical( - theValidationSupportContext, theValueSetToExpand, theWantSystemAndVersion, theWantCode); + theValidationSupportContext, theValueSetToExpand, theWantSystemAndVersion, theWantCode) + .getValueSet(); } catch (ExpansionCouldNotBeCompletedInternallyException e) { return new ValueSetExpansionOutcome(e.getMessage(), false); } @@ -176,17 +177,17 @@ private ValueSetExpansionOutcome expandValueSet( return new ValueSetExpansionOutcome(expansion); } - private org.hl7.fhir.r5.model.ValueSet expandValueSetToCanonical( + private ValueSetAndMessages expandValueSetToCanonical( ValidationSupportContext theValidationSupportContext, IBaseResource theValueSetToExpand, @Nullable String theWantSystemUrlAndVersion, @Nullable String theWantCode) throws ExpansionCouldNotBeCompletedInternallyException { - org.hl7.fhir.r5.model.ValueSet expansionR5; + ValueSetAndMessages expansion; switch (getFhirVersionEnum( theValidationSupportContext.getRootValidationSupport().getFhirContext(), theValueSetToExpand)) { case DSTU2: { - expansionR5 = expandValueSetDstu2( + expansion = expandValueSetDstu2( theValidationSupportContext, (ca.uhn.fhir.model.dstu2.resource.ValueSet) theValueSetToExpand, theWantSystemUrlAndVersion, @@ -194,7 +195,7 @@ private org.hl7.fhir.r5.model.ValueSet expandValueSetToCanonical( break; } case DSTU2_HL7ORG: { - expansionR5 = expandValueSetDstu2Hl7Org( + expansion = expandValueSetDstu2Hl7Org( theValidationSupportContext, (ValueSet) theValueSetToExpand, theWantSystemUrlAndVersion, @@ -202,7 +203,7 @@ private org.hl7.fhir.r5.model.ValueSet expandValueSetToCanonical( break; } case DSTU3: { - expansionR5 = expandValueSetDstu3( + expansion = expandValueSetDstu3( theValidationSupportContext, (org.hl7.fhir.dstu3.model.ValueSet) theValueSetToExpand, theWantSystemUrlAndVersion, @@ -210,7 +211,7 @@ private org.hl7.fhir.r5.model.ValueSet expandValueSetToCanonical( break; } case R4: { - expansionR5 = expandValueSetR4( + expansion = expandValueSetR4( theValidationSupportContext, (org.hl7.fhir.r4.model.ValueSet) theValueSetToExpand, theWantSystemUrlAndVersion, @@ -218,7 +219,7 @@ private org.hl7.fhir.r5.model.ValueSet expandValueSetToCanonical( break; } case R4B: { - expansionR5 = expandValueSetR4B( + expansion = expandValueSetR4B( theValidationSupportContext, (org.hl7.fhir.r4b.model.ValueSet) theValueSetToExpand, theWantSystemUrlAndVersion, @@ -226,7 +227,7 @@ private org.hl7.fhir.r5.model.ValueSet expandValueSetToCanonical( break; } case R5: { - expansionR5 = expandValueSetR5( + expansion = expandValueSetR5( theValidationSupportContext, (org.hl7.fhir.r5.model.ValueSet) theValueSetToExpand, theWantSystemUrlAndVersion, @@ -239,7 +240,7 @@ private org.hl7.fhir.r5.model.ValueSet expandValueSetToCanonical( + myCtx.getVersion().getVersion()); } - return expansionR5; + return expansion; } @Override @@ -250,7 +251,7 @@ public CodeValidationResult validateCodeInValueSet( String theCode, String theDisplay, @Nonnull IBaseResource theValueSet) { - org.hl7.fhir.r5.model.ValueSet expansion; + ValueSetAndMessages expansion; String vsUrl = CommonCodeSystemsTerminologyService.getValueSetUrl(getFhirContext(), theValueSet); try { expansion = expandValueSetToCanonical( @@ -270,17 +271,34 @@ public CodeValidationResult validateCodeInValueSet( return codeValidationResult; } - if (expansion == null) { + if (expansion == null || expansion.getValueSet() == null) { return null; } + if (expansion.getValueSet().getExpansion().getContains().isEmpty()) { + IssueSeverity severity = IssueSeverity.ERROR; + String message = "Unknown code '" + + getFormattedCodeSystemAndCodeForMessage(theCodeSystemUrlAndVersion, theCode) + + "'" + + createInMemoryExpansionMessageSuffix(vsUrl) + + (expansion.getMessages().isEmpty() ? "" : " Expansion result: " + expansion.getMessages()); + CodeValidationIssueCoding issueCoding = CodeValidationIssueCoding.NOT_IN_VS; + CodeValidationIssueCode notFound = CodeValidationIssueCode.NOT_FOUND; + CodeValidationResult codeValidationResult = new CodeValidationResult() + .setSeverity(severity) + .setMessage(message) + .setSourceDetails(null) + .addIssue(new CodeValidationIssue(message, severity, notFound, issueCoding)); + return codeValidationResult; + } + return validateCodeInExpandedValueSet( theValidationSupportContext, theOptions, theCodeSystemUrlAndVersion, theCode, theDisplay, - expansion, + expansion.getValueSet(), vsUrl); } @@ -568,19 +586,33 @@ private CodeValidationResult validateCodeInExpandedValueSet( + "'"; } if (isNotBlank(theValueSetUrl)) { - message += " for in-memory expansion of ValueSet '" + theValueSetUrl + "'"; + message += createInMemoryExpansionMessageSuffix(theValueSetUrl); issueCoding = CodeValidationIssueCoding.NOT_IN_VS; } + String sourceDetails = "In-memory expansion containing " + codes.size() + " codes"; + if (!codes.isEmpty() && codes.size() < 10) { + sourceDetails += ": " + + codes.stream() + .map(t -> t.getSystem() + "#" + t.getCode()) + .collect(Collectors.joining(", ")); + } + codeValidationResult = new CodeValidationResult() .setSeverity(severity) .setMessage(message) + .setSourceDetails(sourceDetails) .addIssue(new CodeValidationIssue(message, severity, issueCode, issueCoding)); } return codeValidationResult; } + @Nonnull + private static String createInMemoryExpansionMessageSuffix(String theValueSetUrl) { + return " for in-memory expansion of ValueSet '" + theValueSetUrl + "'"; + } + private static String getFormattedCodeSystemAndCodeForMessage( String theCodeSystemUrlAndVersionToValidate, String theCodeToValidate) { return (isNotBlank(theCodeSystemUrlAndVersionToValidate) ? theCodeSystemUrlAndVersionToValidate + "#" : "") @@ -631,7 +663,7 @@ private CodeValidationResult findCodeInExpansion( } else { String messageAppend = ""; if (isNotBlank(theValueSetUrl)) { - messageAppend = " for in-memory expansion of ValueSet: " + theValueSetUrl; + messageAppend = createInMemoryExpansionMessageSuffix(theValueSetUrl); } CodeValidationResult codeValidationResult = createResultForDisplayMismatch( myCtx, @@ -672,7 +704,7 @@ public LookupCodeResult lookupCode( } @Nullable - private org.hl7.fhir.r5.model.ValueSet expandValueSetDstu2Hl7Org( + private ValueSetAndMessages expandValueSetDstu2Hl7Org( ValidationSupportContext theValidationSupportContext, ValueSet theInput, @Nullable String theWantSystemUrlAndVersion, @@ -684,7 +716,7 @@ private org.hl7.fhir.r5.model.ValueSet expandValueSetDstu2Hl7Org( } @Nullable - private org.hl7.fhir.r5.model.ValueSet expandValueSetDstu2( + private ValueSetAndMessages expandValueSetDstu2( ValidationSupportContext theValidationSupportContext, ca.uhn.fhir.model.dstu2.resource.ValueSet theInput, @Nullable String theWantSystemUrlAndVersion, @@ -753,7 +785,7 @@ private void addCodesDstu2( } @Nullable - private org.hl7.fhir.r5.model.ValueSet expandValueSetDstu3( + private ValueSetAndMessages expandValueSetDstu3( ValidationSupportContext theValidationSupportContext, org.hl7.fhir.dstu3.model.ValueSet theInput, @Nullable String theWantSystemUrlAndVersion, @@ -765,7 +797,7 @@ private org.hl7.fhir.r5.model.ValueSet expandValueSetDstu3( } @Nullable - private org.hl7.fhir.r5.model.ValueSet expandValueSetR4( + private ValueSetAndMessages expandValueSetR4( ValidationSupportContext theValidationSupportContext, org.hl7.fhir.r4.model.ValueSet theInput, @Nullable String theWantSystemUrlAndVersion, @@ -777,7 +809,7 @@ private org.hl7.fhir.r5.model.ValueSet expandValueSetR4( } @Nullable - private org.hl7.fhir.r5.model.ValueSet expandValueSetR4B( + private ValueSetAndMessages expandValueSetR4B( ValidationSupportContext theValidationSupportContext, org.hl7.fhir.r4b.model.ValueSet theInput, @Nullable String theWantSystemUrlAndVersion, @@ -789,12 +821,14 @@ private org.hl7.fhir.r5.model.ValueSet expandValueSetR4B( } @Nullable - private org.hl7.fhir.r5.model.ValueSet expandValueSetR5( + private ValueSetAndMessages expandValueSetR5( ValidationSupportContext theValidationSupportContext, org.hl7.fhir.r5.model.ValueSet theInput, @Nullable String theWantSystemUrlAndVersion, @Nullable String theWantCode) throws ExpansionCouldNotBeCompletedInternallyException { + + ValueSetAndMessages retVal = new ValueSetAndMessages(); Set concepts = new HashSet<>(); expandValueSetR5IncludeOrExcludes( @@ -803,19 +837,22 @@ private org.hl7.fhir.r5.model.ValueSet expandValueSetR5( theInput.getCompose().getInclude(), true, theWantSystemUrlAndVersion, - theWantCode); + theWantCode, + retVal); expandValueSetR5IncludeOrExcludes( theValidationSupportContext, concepts, theInput.getCompose().getExclude(), false, theWantSystemUrlAndVersion, - theWantCode); + theWantCode, + retVal); - org.hl7.fhir.r5.model.ValueSet retVal = new org.hl7.fhir.r5.model.ValueSet(); + org.hl7.fhir.r5.model.ValueSet vs = new org.hl7.fhir.r5.model.ValueSet(); + retVal.setValueSet(vs); for (FhirVersionIndependentConcept next : concepts) { org.hl7.fhir.r5.model.ValueSet.ValueSetExpansionContainsComponent contains = - retVal.getExpansion().addContains(); + vs.getExpansion().addContains(); contains.setSystem(next.getSystem()); contains.setCode(next.getCode()); contains.setDisplay(next.getDisplay()); @@ -835,7 +872,8 @@ public void expandValueSetIncludeOrExclude( Consumer theConsumer, org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent theIncludeOrExclude) throws ExpansionCouldNotBeCompletedInternallyException { - expandValueSetR5IncludeOrExclude(theValidationSupportContext, theConsumer, null, null, theIncludeOrExclude); + expandValueSetR5IncludeOrExclude( + theValidationSupportContext, theConsumer, null, null, theIncludeOrExclude, new ValueSetAndMessages()); } private void expandValueSetR5IncludeOrExcludes( @@ -844,7 +882,8 @@ private void expandValueSetR5IncludeOrExcludes( List theComposeList, boolean theComposeListIsInclude, @Nullable String theWantSystemUrlAndVersion, - @Nullable String theWantCode) + @Nullable String theWantCode, + ValueSetAndMessages theResponseBuilder) throws ExpansionCouldNotBeCompletedInternallyException { Consumer consumer = c -> { if (theComposeListIsInclude) { @@ -854,21 +893,40 @@ private void expandValueSetR5IncludeOrExcludes( } }; expandValueSetR5IncludeOrExcludes( - theValidationSupportContext, consumer, theComposeList, theWantSystemUrlAndVersion, theWantCode); + theComposeListIsInclude, + theValidationSupportContext, + consumer, + theComposeList, + theWantSystemUrlAndVersion, + theWantCode, + theResponseBuilder); } private void expandValueSetR5IncludeOrExcludes( + boolean theComposeListIsInclude, ValidationSupportContext theValidationSupportContext, Consumer theConsumer, List theComposeList, @Nullable String theWantSystemUrlAndVersion, - @Nullable String theWantCode) + @Nullable String theWantCode, + ValueSetAndMessages theResponseBuilder) throws ExpansionCouldNotBeCompletedInternallyException { ExpansionCouldNotBeCompletedInternallyException caughtException = null; + if (theComposeList.isEmpty()) { + if (theComposeListIsInclude) { + theResponseBuilder.addMessage("Empty compose list for includes"); + } + return; + } for (org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent nextInclude : theComposeList) { try { boolean outcome = expandValueSetR5IncludeOrExclude( - theValidationSupportContext, theConsumer, theWantSystemUrlAndVersion, theWantCode, nextInclude); + theValidationSupportContext, + theConsumer, + theWantSystemUrlAndVersion, + theWantCode, + nextInclude, + theResponseBuilder); if (isNotBlank(theWantCode)) { if (outcome) { return; @@ -895,7 +953,8 @@ private boolean expandValueSetR5IncludeOrExclude( Consumer theConsumer, @Nullable String theWantSystemUrlAndVersion, @Nullable String theWantCode, - org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent theInclude) + org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent theInclude, + ValueSetAndMessages theResponseBuilder) throws ExpansionCouldNotBeCompletedInternallyException { String wantSystemUrl = null; @@ -1099,8 +1158,9 @@ private boolean expandValueSetR5IncludeOrExclude( for (CanonicalType nextValueSetInclude : theInclude.getValueSet()) { org.hl7.fhir.r5.model.ValueSet vs = valueSetLoader.apply(nextValueSetInclude.getValueAsString()); if (vs != null) { - org.hl7.fhir.r5.model.ValueSet subExpansion = - expandValueSetR5(theValidationSupportContext, vs, theWantSystemUrlAndVersion, theWantCode); + org.hl7.fhir.r5.model.ValueSet subExpansion = expandValueSetR5( + theValidationSupportContext, vs, theWantSystemUrlAndVersion, theWantCode) + .getValueSet(); if (subExpansion == null) { String theMessage = "Failed to expand ValueSet: " + nextValueSetInclude.getValueAsString(); throw new ExpansionCouldNotBeCompletedInternallyException( @@ -1446,4 +1506,26 @@ public CodeValidationIssue getCodeValidationIssue() { return myCodeValidationIssue; } } + + private static class ValueSetAndMessages { + + private org.hl7.fhir.r5.model.ValueSet myValueSet; + private List myMessages = new ArrayList<>(); + + public void setValueSet(org.hl7.fhir.r5.model.ValueSet theValueSet) { + myValueSet = theValueSet; + } + + public void addMessage(String theMessage) { + myMessages.add(theMessage); + } + + public org.hl7.fhir.r5.model.ValueSet getValueSet() { + return myValueSet; + } + + public List getMessages() { + return myMessages; + } + } } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/ValidationSupportChain.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/ValidationSupportChain.java index 16b3dadb3da9..b77ec4cb9acb 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/ValidationSupportChain.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/ValidationSupportChain.java @@ -431,8 +431,8 @@ public IBaseResource generateSnapshot( if (retVal != null) { ourLog.atDebug() .setMessage("Profile snapshot for {} generated by {}") - .addArgument(() -> theInput.getIdElement()) - .addArgument(() -> next.getName()) + .addArgument(theInput::getIdElement) + .addArgument(next::getName) .log(); return retVal; } @@ -1167,7 +1167,7 @@ public boolean equals(Object theO) { if (this == theO) return true; if (!(theO instanceof TypedResourceByUrlKey)) return false; if (!super.equals(theO)) return false; - TypedResourceByUrlKey that = (TypedResourceByUrlKey) theO; + TypedResourceByUrlKey that = (TypedResourceByUrlKey) theO; return Objects.equals(myType, that.myType); } diff --git a/hapi-fhir-validation/src/test/java/org/hl7/fhir/common/hapi/validation/support/InMemoryTerminologyServerValidationSupportTest.java b/hapi-fhir-validation/src/test/java/org/hl7/fhir/common/hapi/validation/support/InMemoryTerminologyServerValidationSupportTest.java index 6d273a05d41f..4c4ba2a3ecd1 100644 --- a/hapi-fhir-validation/src/test/java/org/hl7/fhir/common/hapi/validation/support/InMemoryTerminologyServerValidationSupportTest.java +++ b/hapi-fhir-validation/src/test/java/org/hl7/fhir/common/hapi/validation/support/InMemoryTerminologyServerValidationSupportTest.java @@ -190,7 +190,7 @@ public void testValidateCode_customMimetypeVSCodeNotInVS_returnsError(String the IValidationSupport.CodeValidationResult outcome = mySvc.validateCode(valCtx, options, theCodeSystem, codeToValidate, null, vs.getUrl()); assertNotNull(outcome); assertFalse(outcome.isOk()); - assertEquals("Unknown code '" + theCodeSystem + "#" + codeToValidate + "' for in-memory expansion of ValueSet '" + vs.getUrl() + "'", outcome.getMessage()); + assertThat(outcome.getMessage()).contains("Unknown code '" + theCodeSystem + "#" + codeToValidate + "' for in-memory expansion of ValueSet '" + vs.getUrl() + "'"); } @Test @@ -240,7 +240,7 @@ public void testValidateCode_UnknownCodeSystem_EnumeratedValueSet() { outcome = myChain.validateCodeInValueSet(valCtx, options, "http://cs", "code99", null, vs); assertNotNull(outcome); assertFalse(outcome.isOk()); - assertEquals("Unknown code 'http://cs#code99' for in-memory expansion of ValueSet 'http://vs'", outcome.getMessage()); + assertThat(outcome.getMessage()).contains("Unknown code 'http://cs#code99' for in-memory expansion of ValueSet 'http://vs'"); assertEquals(IValidationSupport.IssueSeverity.ERROR, outcome.getSeverity()); } @@ -506,6 +506,8 @@ public void testExpandValueSet_VsUsesVersionedSystem_CsIsFragmentWithCode() { code = "123"; outcome = mySvc.validateCode(valCtx, options, codeSystemUrl, code, null, valueSetUrl); assertFalse(outcome.isOk()); + assertThat(outcome.getMessage()).contains("for in-memory expansion of ValueSet"); + assertThat(outcome.getSourceDetails()).contains("In-memory expansion containing 0 codes"); IValidationSupport.ValueSetExpansionOutcome expansion = mySvc.expandValueSet(valCtx, new ValueSetExpansionOptions(), vs); assertNull(expansion.getError()); diff --git a/hapi-fhir-validation/src/test/java/org/hl7/fhir/r4/validation/FhirInstanceValidatorR4Test.java b/hapi-fhir-validation/src/test/java/org/hl7/fhir/r4/validation/FhirInstanceValidatorR4Test.java index ff5d8e2e1fe0..c5c434b0b672 100644 --- a/hapi-fhir-validation/src/test/java/org/hl7/fhir/r4/validation/FhirInstanceValidatorR4Test.java +++ b/hapi-fhir-validation/src/test/java/org/hl7/fhir/r4/validation/FhirInstanceValidatorR4Test.java @@ -1349,7 +1349,7 @@ public void testValidateResourceWithDefaultValuesetBadCode() { ValidationResult output = myFhirValidator.validateWithResult(input); logResultsAndReturnAll(output); assertThat(output.getMessages().get(0).getMessage()).contains("Unknown code 'http://hl7.org/fhir/observation-status#notvalidcode'"); - assertThat(output.getMessages().get(1).getMessage()).contains("The value provided ('notvalidcode') was not found in the value set 'ObservationStatus' (http://hl7.org/fhir/ValueSet/observation-status|4.0.1), and a code is required from this value set (error message = Unknown code 'http://hl7.org/fhir/observation-status#notvalidcode' for in-memory expansion of ValueSet 'http://hl7.org/fhir/ValueSet/observation-status')"); + assertThat(output.getMessages().get(1).getMessage()).contains("The value provided ('notvalidcode') was not found in the value set 'ObservationStatus' (http://hl7.org/fhir/ValueSet/observation-status|4.0.1), and a code is required from this value set (error message = Unknown code 'http://hl7.org/fhir/observation-status#notvalidcode' for in-memory expansion of ValueSet 'http://hl7.org/fhir/ValueSet/observation-status'"); } @Test diff --git a/hapi-fhir-validation/src/test/java/org/hl7/fhir/r4b/validation/FhirInstanceValidatorR4BTest.java b/hapi-fhir-validation/src/test/java/org/hl7/fhir/r4b/validation/FhirInstanceValidatorR4BTest.java index 8b1d4df1051b..fe416ee36ca5 100644 --- a/hapi-fhir-validation/src/test/java/org/hl7/fhir/r4b/validation/FhirInstanceValidatorR4BTest.java +++ b/hapi-fhir-validation/src/test/java/org/hl7/fhir/r4b/validation/FhirInstanceValidatorR4BTest.java @@ -1371,7 +1371,7 @@ public void testValidateWithUcum() throws IOException { // so first error has `Unknown code for ValueSet` error message assertThat(all.get(0).getMessage()).contains("The Coding provided (http://unitsofmeasure.org#Heck) was not found in the value set 'Vital Signs Units' " + "(http://hl7.org/fhir/ValueSet/ucum-vitals-common|4.3.0), and a code should come from this value set unless it has no suitable code (note that the validator cannot judge what is suitable). " + - " (error message = Unknown code 'http://unitsofmeasure.org#Heck' for in-memory expansion of ValueSet 'http://hl7.org/fhir/ValueSet/ucum-vitals-common')"); + " (error message = Unknown code 'http://unitsofmeasure.org#Heck' for in-memory expansion of ValueSet 'http://hl7.org/fhir/ValueSet/ucum-vitals-common'"); assertThat(all.get(0).getLocationString()).contains("Observation.value.ofType(Quantity)"); // validate second error assertThat(all.get(1).getMessage()).contains("Error processing unit 'Heck': The unit 'Heck' is unknown' at position 0 (for 'http://unitsofmeasure.org#Heck')"); diff --git a/hapi-fhir-validation/src/test/java/org/hl7/fhir/r5/validation/FhirInstanceValidatorR5Test.java b/hapi-fhir-validation/src/test/java/org/hl7/fhir/r5/validation/FhirInstanceValidatorR5Test.java index 7a58d4f7d25c..74bac818a149 100644 --- a/hapi-fhir-validation/src/test/java/org/hl7/fhir/r5/validation/FhirInstanceValidatorR5Test.java +++ b/hapi-fhir-validation/src/test/java/org/hl7/fhir/r5/validation/FhirInstanceValidatorR5Test.java @@ -910,7 +910,7 @@ public void testValidateResourceWithDefaultValuesetBadCode() { ValidationResult output = myVal.validateWithResult(input); logResultsAndReturnAll(output); assertThat(output.getMessages().get(0).getMessage()).contains("Unknown code 'http://hl7.org/fhir/observation-status#notvalidcode'"); - assertThat(output.getMessages().get(1).getMessage()).contains("The value provided ('notvalidcode') was not found in the value set 'Observation Status' (http://hl7.org/fhir/ValueSet/observation-status|5.0.0), and a code is required from this value set (error message = Unknown code 'http://hl7.org/fhir/observation-status#notvalidcode' for in-memory expansion of ValueSet 'http://hl7.org/fhir/ValueSet/observation-status')"); + assertThat(output.getMessages().get(1).getMessage()).contains("The value provided ('notvalidcode') was not found in the value set 'Observation Status' (http://hl7.org/fhir/ValueSet/observation-status|5.0.0), and a code is required from this value set (error message = Unknown code 'http://hl7.org/fhir/observation-status#notvalidcode' for in-memory expansion of ValueSet 'http://hl7.org/fhir/ValueSet/observation-status'"); } diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml index 93b151848b7d..b443b2371ea5 100644 --- a/hapi-tinder-plugin/pom.xml +++ b/hapi-tinder-plugin/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../pom.xml diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml index 15b14e92ac82..48be9ae937bd 100644 --- a/hapi-tinder-test/pom.xml +++ b/hapi-tinder-test/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index 05227daf415b..8d12a0c89b0e 100644 --- a/pom.xml +++ b/pom.xml @@ -8,7 +8,7 @@ ca.uhn.hapi.fhir hapi-fhir pom - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT HAPI-FHIR An open-source implementation of the FHIR specification in Java. @@ -1170,6 +1170,11 @@ caffeine ${caffeine_version} + + com.github.jsqlparser + jsqlparser + 5.0 + com.googlecode.owasp-java-html-sanitizer owasp-java-html-sanitizer @@ -1361,7 +1366,7 @@ org.jetbrains annotations - 23.0.0 + 24.0.1 commons-io @@ -2639,7 +2644,7 @@ ca.uhn.hapi.fhir hapi-tinder-plugin - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml index 7310102bfc5f..ae2b56367f6d 100644 --- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml +++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml index df98eac11cc5..b057bb129406 100644 --- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml index 180c4f6347fe..b89953fadb0e 100644 --- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.8-SNAPSHOT + 7.7.9-SNAPSHOT ../../pom.xml