IFhirVersionServer for this version.
* Note that this method may only be called if the hapi-fhir-server
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchDateRangeParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchDateRangeParam.java
index 0703f080c7e1..3959d3d6a29b 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchDateRangeParam.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchDateRangeParam.java
@@ -19,9 +19,22 @@
*/
package ca.uhn.fhir.rest.param;
+import java.util.Collections;
import java.util.Map;
public class HistorySearchDateRangeParam extends DateRangeParam {
+ /**
+ * Constructor
+ *
+ * @since 8.0.0
+ */
+ public HistorySearchDateRangeParam() {
+ this(Collections.emptyMap(), new DateRangeParam(), null);
+ }
+
+ /**
+ * Constructor
+ */
public HistorySearchDateRangeParam(
Map theParameters, DateRangeParam theDateRange, Integer theOffset) {
super(theDateRange);
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FileUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FileUtil.java
index 2f3dac411e47..84ed3e2303b1 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FileUtil.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FileUtil.java
@@ -29,7 +29,7 @@ public static String formatFileSize(long theBytes) {
if (theBytes <= 0) {
return "0 " + UNITS[0];
}
- int digitGroups = (int) (Math.log10(theBytes) / Math.log10(1024));
+ int digitGroups = (int) (Math.log10((double) theBytes) / Math.log10(1024));
digitGroups = Math.min(digitGroups, UNITS.length - 1);
return new DecimalFormat("###0.#").format(theBytes / Math.pow(1024, digitGroups)) + " " + UNITS[digitGroups];
}
diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml
index 615928fa4229..09650eb318c9 100644
--- a/hapi-fhir-bom/pom.xml
+++ b/hapi-fhir-bom/pom.xml
@@ -4,7 +4,7 @@
4.0.0
ca.uhn.hapi.fhir
hapi-fhir-bom
- 7.7.8-SNAPSHOT
+ 7.7.9-SNAPSHOT
pom
HAPI FHIR BOM
@@ -12,7 +12,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.8-SNAPSHOT
+ 7.7.9-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-checkstyle/pom.xml b/hapi-fhir-checkstyle/pom.xml
index db4657026c13..03c8c95000f1 100644
--- a/hapi-fhir-checkstyle/pom.xml
+++ b/hapi-fhir-checkstyle/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 7.7.8-SNAPSHOT
+ 7.7.9-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
index 33782915ddc0..0d9588d571b1 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.8-SNAPSHOT
+ 7.7.9-SNAPSHOT
../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
index 301353ac697f..b35c0adeb379 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-fhir-cli
- 7.7.8-SNAPSHOT
+ 7.7.9-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml
index 746014589d2c..b39413e22de3 100644
--- a/hapi-fhir-cli/pom.xml
+++ b/hapi-fhir-cli/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 7.7.8-SNAPSHOT
+ 7.7.9-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml
index f6dcd6b81a24..07962556b50c 100644
--- a/hapi-fhir-client-okhttp/pom.xml
+++ b/hapi-fhir-client-okhttp/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.8-SNAPSHOT
+ 7.7.9-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml
index f9b12517d257..77d53a2c2410 100644
--- a/hapi-fhir-client/pom.xml
+++ b/hapi-fhir-client/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.8-SNAPSHOT
+ 7.7.9-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml
index 28d3078c0876..99288856f0b2 100644
--- a/hapi-fhir-converter/pom.xml
+++ b/hapi-fhir-converter/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.8-SNAPSHOT
+ 7.7.9-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml
index 0749e3faaff4..990e45623903 100644
--- a/hapi-fhir-dist/pom.xml
+++ b/hapi-fhir-dist/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 7.7.8-SNAPSHOT
+ 7.7.9-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml
index a8d553276c60..c01781c6e11a 100644
--- a/hapi-fhir-docs/pom.xml
+++ b/hapi-fhir-docs/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.8-SNAPSHOT
+ 7.7.9-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-fix-timezone-issue-for-history-at-query.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-fix-timezone-issue-for-history-at-query.yaml
new file mode 100644
index 000000000000..f6a1e9b750fd
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-fix-timezone-issue-for-history-at-query.yaml
@@ -0,0 +1,7 @@
+---
+type: fix
+issue: 6409
+title: "When performing a `_history` query using the `_at` parameter, the time value
+ is now converted to a zoned-date before being passed to the database. This should
+ avoid conflicts around date changes on some databases.
+ "
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-optimize-versioned-tag-loading.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-optimize-versioned-tag-loading.yaml
new file mode 100644
index 000000000000..37faa975eeb8
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-optimize-versioned-tag-loading.yaml
@@ -0,0 +1,7 @@
+---
+type: perf
+issue: 6409
+title: "When searching in versioned tag mode, the JPA server now avoids a redundant
+ lookup of the un-versioned tags, avoiding an extra unnecessary database query
+ in some cases.
+ "
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-stop-using-provenance-table.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-stop-using-provenance-table.yaml
new file mode 100644
index 000000000000..2f845786de91
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6409-stop-using-provenance-table.yaml
@@ -0,0 +1,11 @@
+---
+type: perf
+issue: 6409
+title: "The JPA server will no longer use the HFJ_RES_VER_PROV table to store and index values from
+ the `Resource.meta.source` element. Beginning in HAPI FHIR 6.8.0 (and Smile CDR 2023.08.R01), a
+ new pair of columns have been used to store data for this element, so this change only affects
+ data which was stored in HAPI FHIR prior to version 6.8.0 (released August 2023). If you have
+ FHIR resources which were stored in a JPA server prior to this version, and you use the
+ Resource.meta.source element and/or the `_source` search parameter, you should perform a complete
+ reindex of your server to ensure that data is not lost. See the upgrade notes for more information.
+ "
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6478-transaction-bundle-updates-with-multiple-conditional-urls.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6478-transaction-bundle-updates-with-multiple-conditional-urls.yaml
new file mode 100644
index 000000000000..f431323b068e
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6478-transaction-bundle-updates-with-multiple-conditional-urls.yaml
@@ -0,0 +1,8 @@
+---
+type: perf
+issue: 6478
+jira: SMILE-8955
+title: "Transactions with multiple saved search urls will have the saved search urls
+ deleted in a batch, instead of 1 at a time.
+ This is a minor performance update.
+"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6522-cache-cleanup.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6522-cache-cleanup.yaml
new file mode 100644
index 000000000000..eb04499d8b6f
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6522-cache-cleanup.yaml
@@ -0,0 +1,7 @@
+---
+type: perf
+issue: 6522
+title: "Several memory caches in various parts of the JPA server have been removed in an
+ effort to consolidate caching in this system to two places: The MemoryCacheService, and
+ ValidationSupportChain. This should make management of the system easier."
+
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/upgrade.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/upgrade.md
index 58969977a1b5..b8fa6a5af5d7 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/upgrade.md
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/upgrade.md
@@ -1,4 +1,20 @@
+# Upgrade Notes
+
+The JPA server stores values for the field `Resource.meta.source` in dedicated columns in its database so that they can be indexes and searched for as needed, using the `_source` Search Parameter.
+
+Prior to HAPI FHIR 6.8.0 (and Smile CDR 2023.08.R01), these values were stored in a dedicated table called `HFJ_RES_VER_PROV`. Beginning in HAPI FHIR 6.8.0 (Smile CDR 2023.08.R01), two new columns were added to the `HFJ_RES_VER`
+table which store the same data and make it available for searches.
+
+As of HAPI FHIR 8.0.0, the legacy table is no longer searched by default. If you do not have Resource.meta.source data stored in HAPI FHIR that was last created/updated prior to version 6.8.0, this change will not affect you and no action needs to be taken.
+
+If you do have such data, you should follow the following steps:
+
+* Enable the JpaStorageSettings setting `setAccessMetaSourceInformationFromProvenanceTable(true)` to configure the server to continue using the legacy table.
+
+* Perform a server resource reindex by invoking the [$reindex Operation (server)](https://smilecdr.com/docs/fhir_repository/search_parameter_reindexing.html#reindex-server) with the `optimizeStorage` parameter set to `ALL_VERSIONS`.
+
+* When this reindex operation has successfully completed, the setting above can be disabled. Disabling this setting avoids an extra database round-trip when loading data, so this change will have a positive performance impact on your server.
+
# Fulltext Search with _lastUpdated Filter
-Fulltext searches have been updated to support `_lastUpdated` search parameter. A reindexing of Search Parameters
-is required to migrate old data to support the `_lastUpdated` search parameter.
+Fulltext searches have been updated to support `_lastUpdated` search parameter. If you are using Advanced Hibernate Search indexing and wish to use the `_lastUpdated` search parameetr with this feature, a full reindex of your repository is required.
diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml
index e6a3f64d67d2..cef258e3a670 100644
--- a/hapi-fhir-jacoco/pom.xml
+++ b/hapi-fhir-jacoco/pom.xml
@@ -11,7 +11,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.8-SNAPSHOT
+ 7.7.9-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml
index f1ce7ec0ce88..ce4f586cd916 100644
--- a/hapi-fhir-jaxrsserver-base/pom.xml
+++ b/hapi-fhir-jaxrsserver-base/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.8-SNAPSHOT
+ 7.7.9-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpa/pom.xml b/hapi-fhir-jpa/pom.xml
index e894f9877787..97b4947eafd3 100644
--- a/hapi-fhir-jpa/pom.xml
+++ b/hapi-fhir-jpa/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.8-SNAPSHOT
+ 7.7.9-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml
index 02f29b40e6ec..2ec6079eb2e8 100644
--- a/hapi-fhir-jpaserver-base/pom.xml
+++ b/hapi-fhir-jpaserver-base/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.8-SNAPSHOT
+ 7.7.9-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionSvcDaoImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionSvcDaoImpl.java
index 6cae759b3dcb..8b34e38699da 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionSvcDaoImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionSvcDaoImpl.java
@@ -23,7 +23,9 @@
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
+import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
+import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
@@ -37,6 +39,7 @@
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import java.util.Optional;
import static org.slf4j.LoggerFactory.getLogger;
@@ -90,20 +93,9 @@ public ResourceVersionMap getVersionMap(
public ResourcePersistentIdMap getLatestVersionIdsForResourceIds(
RequestPartitionId theRequestPartitionId, List theIds) {
ResourcePersistentIdMap idToPID = new ResourcePersistentIdMap();
- HashMap> resourceTypeToIds = new HashMap<>();
- for (IIdType id : theIds) {
- String resourceType = id.getResourceType();
- if (!resourceTypeToIds.containsKey(resourceType)) {
- resourceTypeToIds.put(resourceType, new ArrayList<>());
- }
- resourceTypeToIds.get(resourceType).add(id);
- }
-
- for (List nextIds : resourceTypeToIds.values()) {
- ResourcePersistentIdMap idAndPID = getIdsOfExistingResources(theRequestPartitionId, nextIds);
- idToPID.putAll(idAndPID);
- }
+ ResourcePersistentIdMap idAndPID = getIdsOfExistingResources(theRequestPartitionId, theIds);
+ idToPID.putAll(idAndPID);
return idToPID;
}
@@ -124,14 +116,17 @@ private ResourcePersistentIdMap getIdsOfExistingResources(
return retval;
}
- List jpaPids =
- myIdHelperService.resolveResourcePersistentIdsWithCache(thePartitionId, new ArrayList<>(theIds));
+ Map> identities = myIdHelperService.resolveResourceIdentities(
+ thePartitionId,
+ new ArrayList<>(theIds),
+ ResolveIdentityMode.includeDeleted().cacheOk());
// we'll use this map to fetch pids that require versions
HashMap pidsToVersionToResourcePid = new HashMap<>();
// fill in our map
- for (JpaPid pid : jpaPids) {
+ for (IResourceLookup identity : identities.values()) {
+ JpaPid pid = identity.getPersistentId();
if (pid.getVersion() == null) {
pidsToVersionToResourcePid.put(pid.getId(), pid);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
index a17e3425f466..b889b27e10ff 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
@@ -121,11 +121,12 @@
import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.QuantityNormalizedPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder;
+import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryPredicateBuilder;
+import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryProvenancePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.SearchParamPresentPredicateBuilder;
-import ca.uhn.fhir.jpa.search.builder.predicate.SourcePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.TagPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder;
@@ -699,8 +700,15 @@ public TokenPredicateBuilder newTokenPredicateBuilder(SearchQueryBuilder theSear
@Bean
@Scope("prototype")
- public SourcePredicateBuilder newSourcePredicateBuilder(SearchQueryBuilder theSearchBuilder) {
- return new SourcePredicateBuilder(theSearchBuilder);
+ public ResourceHistoryPredicateBuilder newResourceHistoryPredicateBuilder(SearchQueryBuilder theSearchBuilder) {
+ return new ResourceHistoryPredicateBuilder(theSearchBuilder);
+ }
+
+ @Bean
+ @Scope("prototype")
+ public ResourceHistoryProvenancePredicateBuilder newResourceHistoryProvenancePredicateBuilder(
+ SearchQueryBuilder theSearchBuilder) {
+ return new ResourceHistoryProvenancePredicateBuilder(theSearchBuilder);
}
@Bean
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java
index 80d8665362a7..8bac9ba0312c 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java
@@ -29,7 +29,6 @@
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
-import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
@@ -89,9 +88,6 @@ public class SearchConfig {
@Autowired
private DaoRegistry myDaoRegistry;
- @Autowired
- private IResourceSearchViewDao myResourceSearchViewDao;
-
@Autowired
private FhirContext myContext;
@@ -169,7 +165,6 @@ public ISearchBuilder newSearchBuilder(String theResourceName, Class extends I
myInterceptorBroadcaster,
myResourceTagDao,
myDaoRegistry,
- myResourceSearchViewDao,
myContext,
myIdHelperService,
theResourceType);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
index 5dc46fc61380..396eee7275b3 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
@@ -57,7 +57,6 @@
import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
import ca.uhn.fhir.jpa.model.entity.BaseTag;
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
-import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
@@ -561,8 +560,8 @@ protected EncodedResource populateResourceIntoEntity(
} else {
ResourceHistoryTable currentHistoryVersion = theEntity.getCurrentVersionEntity();
if (currentHistoryVersion == null) {
- currentHistoryVersion = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
- theEntity.getId(), theEntity.getVersion());
+ currentHistoryVersion =
+ myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
}
if (currentHistoryVersion == null || !currentHistoryVersion.hasResource()) {
changed = true;
@@ -883,7 +882,11 @@ public ResourceTable updateEntity(
}
}
if (!StringUtils.isBlank(entity.getResourceType())) {
- validateIncomingResourceTypeMatchesExisting(theResource, entity);
+ String resourceType = myContext.getResourceType(theResource);
+ // This is just a sanity check and should never actually fail.
+ // We resolve the ID using IdLookupService, and there should be
+ // no way to get it to give you a mismatched type for an ID.
+ Validate.isTrue(resourceType.equals(entity.getResourceType()));
}
}
@@ -1079,7 +1082,7 @@ public ResourceTable updateEntity(
*/
if (thePerformIndexing) {
if (newParams == null) {
- myExpungeService.deleteAllSearchParams(JpaPid.fromId(entity.getId()));
+ myExpungeService.deleteAllSearchParams(entity.getPersistentId());
entity.clearAllParamsPopulated();
} else {
@@ -1215,7 +1218,13 @@ public IBasePersistedResource updateHistoryEntity(
} else {
historyEntity = (ResourceHistoryTable) theHistoryEntity;
if (!StringUtils.isBlank(historyEntity.getResourceType())) {
- validateIncomingResourceTypeMatchesExisting(theResource, historyEntity);
+ String resourceType = myContext.getResourceType(theResource);
+ if (!resourceType.equals(historyEntity.getResourceType())) {
+ throw new UnprocessableEntityException(Msg.code(930) + "Existing resource ID["
+ + historyEntity.getIdDt().toUnqualifiedVersionless() + "] is of type["
+ + historyEntity.getResourceType()
+ + "] - Cannot update with [" + resourceType + "]");
+ }
}
historyEntity.setDeleted(null);
@@ -1305,8 +1314,8 @@ private void createHistoryEntry(
* this could return null if the current resourceVersion has been expunged
* in which case we'll still create a new one
*/
- historyEntry = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
- theEntity.getResourceId(), resourceVersion - 1);
+ historyEntry =
+ myResourceHistoryTableDao.findForIdAndVersion(theEntity.getResourceId(), resourceVersion - 1);
if (historyEntry != null) {
reusingHistoryEntity = true;
theEntity.populateHistoryEntityVersionAndDates(historyEntry);
@@ -1364,29 +1373,12 @@ private void createHistoryEntry(
boolean haveSource = isNotBlank(source) && shouldStoreSource;
boolean haveRequestId = isNotBlank(requestId) && shouldStoreRequestId;
if (haveSource || haveRequestId) {
- ResourceHistoryProvenanceEntity provenance = null;
- if (reusingHistoryEntity) {
- /*
- * If version history is disabled, then we may be reusing
- * a previous history entity. If that's the case, let's try
- * to reuse the previous provenance entity too.
- */
- provenance = historyEntry.getProvenance();
- }
- if (provenance == null) {
- provenance = historyEntry.toProvenance();
- }
- provenance.setResourceHistoryTable(historyEntry);
- provenance.setResourceTable(theEntity);
- provenance.setPartitionId(theEntity.getPartitionId());
if (haveRequestId) {
String persistedRequestId = left(requestId, Constants.REQUEST_ID_LENGTH);
- provenance.setRequestId(persistedRequestId);
historyEntry.setRequestId(persistedRequestId);
}
if (haveSource) {
String persistedSource = left(source, ResourceHistoryTable.SOURCE_URI_LENGTH);
- provenance.setSourceUri(persistedSource);
historyEntry.setSourceUri(persistedSource);
}
if (theResource != null) {
@@ -1396,8 +1388,6 @@ private void createHistoryEntry(
shouldStoreRequestId ? requestId : null,
theResource);
}
-
- myEntityManager.persist(provenance);
}
}
@@ -1408,15 +1398,6 @@ private String getRequestId(RequestDetails theRequest, String theSource) {
return theRequest != null ? theRequest.getRequestId() : null;
}
- private void validateIncomingResourceTypeMatchesExisting(IBaseResource theResource, BaseHasResource entity) {
- String resourceType = myContext.getResourceType(theResource);
- if (!resourceType.equals(entity.getResourceType())) {
- throw new UnprocessableEntityException(Msg.code(930) + "Existing resource ID["
- + entity.getIdDt().toUnqualifiedVersionless() + "] is of type[" + entity.getResourceType()
- + "] - Cannot update with [" + resourceType + "]");
- }
- }
-
@Override
public DaoMethodOutcome updateInternal(
RequestDetails theRequestDetails,
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
index ece727ab61b5..7457467562df 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
@@ -44,6 +44,7 @@
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
+import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.delete.DeleteConflictUtil;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
@@ -52,6 +53,7 @@
import ca.uhn.fhir.jpa.model.entity.BaseTag;
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
+import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
@@ -206,6 +208,9 @@ public abstract class BaseHapiFhirResourceDao extends B
@Autowired
private IJobCoordinator myJobCoordinator;
+ @Autowired
+ private IResourceHistoryProvenanceDao myResourceHistoryProvenanceDao;
+
private IInstanceValidatorModule myInstanceValidator;
private String myResourceName;
private Class myResourceType;
@@ -562,7 +567,7 @@ private DaoMethodOutcome doCreateForPostOrPut(
thePerformIndexing);
// Store the resource forced ID if necessary
- JpaPid jpaPid = JpaPid.fromId(updatedEntity.getResourceId());
+ JpaPid jpaPid = updatedEntity.getPersistentId();
// Populate the resource with its actual final stored ID from the entity
theResource.setId(entity.getIdDt());
@@ -570,10 +575,9 @@ private DaoMethodOutcome doCreateForPostOrPut(
// Pre-cache the resource ID
jpaPid.setAssociatedResourceId(entity.getIdType(myFhirContext));
String fhirId = entity.getFhirId();
- if (fhirId == null) {
- fhirId = Long.toString(entity.getId());
- }
- myIdHelperService.addResolvedPidToFhirId(jpaPid, theRequestPartitionId, getResourceName(), fhirId, null);
+ assert fhirId != null;
+ myIdHelperService.addResolvedPidToFhirIdAfterCommit(
+ jpaPid, theRequestPartitionId, getResourceName(), fhirId, null);
theTransactionDetails.addResolvedResourceId(jpaPid.getAssociatedResourceId(), jpaPid);
theTransactionDetails.addResolvedResource(jpaPid.getAssociatedResourceId(), theResource);
@@ -723,9 +727,12 @@ public DaoMethodOutcome delete(
validateIdPresentForDelete(theId);
validateDeleteEnabled();
+ RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(
+ theRequestDetails, getResourceName(), theId);
+
final ResourceTable entity;
try {
- entity = readEntityLatestVersion(theId, theRequestDetails, theTransactionDetails);
+ entity = readEntityLatestVersion(theId, requestPartitionId, theTransactionDetails);
} catch (ResourceNotFoundException ex) {
// we don't want to throw 404s.
// if not found, return an outcome anyways.
@@ -803,6 +810,13 @@ public DaoMethodOutcome delete(
.getMessageSanitized(BaseStorageDao.class, "successfulTimingSuffix", w.getMillis());
outcome.setOperationOutcome(createInfoOperationOutcome(msg, StorageResponseCodeEnum.SUCCESSFUL_DELETE));
+ myIdHelperService.addResolvedPidToFhirIdAfterCommit(
+ entity.getPersistentId(),
+ requestPartitionId,
+ entity.getResourceType(),
+ entity.getFhirId(),
+ entity.getDeleted());
+
return outcome;
}
@@ -1005,7 +1019,7 @@ public void beforeCommit(boolean readOnly) {
protected ResourceTable updateEntityForDelete(
RequestDetails theRequest, TransactionDetails theTransactionDetails, ResourceTable theEntity) {
- myResourceSearchUrlSvc.deleteByResId(theEntity.getId());
+ myResourceSearchUrlSvc.deleteByResId(theEntity.getPersistentId());
Date updateTime = new Date();
return updateEntity(theRequest, null, theEntity, updateTime, true, true, theTransactionDetails, false, true);
}
@@ -1144,15 +1158,15 @@ private void doMetaDelete(
}
@Override
- @Transactional(propagation = Propagation.NEVER)
public ExpungeOutcome expunge(IIdType theId, ExpungeOptions theExpungeOptions, RequestDetails theRequest) {
+ HapiTransactionService.noTransactionAllowed();
validateExpungeEnabled();
return forceExpungeInExistingTransaction(theId, theExpungeOptions, theRequest);
}
@Override
- @Transactional(propagation = Propagation.NEVER)
public ExpungeOutcome expunge(ExpungeOptions theExpungeOptions, RequestDetails theRequestDetails) {
+ HapiTransactionService.noTransactionAllowed();
ourLog.info("Beginning TYPE[{}] expunge operation", getResourceName());
validateExpungeEnabled();
return myExpungeService.expunge(getResourceName(), null, theExpungeOptions, theRequestDetails);
@@ -1250,7 +1264,7 @@ public IBundleProvider history(
return myPersistedJpaBundleProviderFactory.history(
theRequest,
myResourceName,
- entity.getId(),
+ entity.getPersistentId(),
theSince,
theUntil,
theOffset,
@@ -1280,7 +1294,7 @@ public IBundleProvider history(
return myPersistedJpaBundleProviderFactory.history(
theRequest,
myResourceName,
- entity.getId(),
+ JpaPid.fromId(entity.getId()),
theHistorySearchDateRangeParam.getLowerBoundAsInstant(),
theHistorySearchDateRangeParam.getUpperBoundAsInstant(),
theHistorySearchDateRangeParam.getOffset(),
@@ -1380,8 +1394,8 @@ protected void doMetaAddOperation(
doMetaAdd(theMetaAdd, latestVersion, theRequest, transactionDetails);
// Also update history entry
- ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
- entity.getId(), entity.getVersion());
+ ResourceHistoryTable history =
+ myResourceHistoryTableDao.findForIdAndVersion(entity.getId(), entity.getVersion());
doMetaAdd(theMetaAdd, history, theRequest, transactionDetails);
}
@@ -1428,8 +1442,8 @@ public void doMetaDeleteOperation(
} else {
doMetaDelete(theMetaDel, latestVersion, theRequest, transactionDetails);
// Also update history entry
- ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
- entity.getId(), entity.getVersion());
+ ResourceHistoryTable history =
+ myResourceHistoryTableDao.findForIdAndVersion(entity.getId(), entity.getVersion());
doMetaDelete(theMetaDel, history, theRequest, transactionDetails);
}
@@ -1694,7 +1708,7 @@ private void reindexOptimizeStorage(
ResourceTable entity, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageMode) {
ResourceHistoryTable historyEntity = entity.getCurrentVersionEntity();
if (historyEntity != null) {
- reindexOptimizeStorageHistoryEntity(entity, historyEntity);
+ reindexOptimizeStorageHistoryEntityThenDetachIt(entity, historyEntity);
if (theOptimizeStorageMode == ReindexParameters.OptimizeStorageModeEnum.ALL_VERSIONS) {
int pageSize = 100;
for (int page = 0; ((long) page * pageSize) < entity.getVersion(); page++) {
@@ -1704,39 +1718,44 @@ private void reindexOptimizeStorage(
// different pages as the underlying data gets updated.
PageRequest pageRequest = PageRequest.of(page, pageSize, Sort.by("myId"));
Slice historyEntities =
- myResourceHistoryTableDao.findForResourceIdAndReturnEntitiesAndFetchProvenance(
+ myResourceHistoryTableDao.findAllVersionsExceptSpecificForResourcePid(
pageRequest, entity.getId(), historyEntity.getVersion());
for (ResourceHistoryTable next : historyEntities) {
- reindexOptimizeStorageHistoryEntity(entity, next);
+ reindexOptimizeStorageHistoryEntityThenDetachIt(entity, next);
}
}
}
}
}
- private void reindexOptimizeStorageHistoryEntity(ResourceTable entity, ResourceHistoryTable historyEntity) {
- boolean changed = false;
+ /**
+ * Note that the entity will be detached after being saved if it has changed
+ * in order to avoid growing the number of resources in memory to be too big
+ */
+ private void reindexOptimizeStorageHistoryEntityThenDetachIt(
+ ResourceTable entity, ResourceHistoryTable historyEntity) {
if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC
|| historyEntity.getEncoding() == ResourceEncodingEnum.JSON) {
byte[] resourceBytes = historyEntity.getResource();
if (resourceBytes != null) {
String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding());
- if (myResourceHistoryCalculator.conditionallyAlterHistoryEntity(entity, historyEntity, resourceText)) {
- changed = true;
- }
+ myResourceHistoryCalculator.conditionallyAlterHistoryEntity(entity, historyEntity, resourceText);
}
}
- if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) {
- if (historyEntity.getProvenance() != null) {
- historyEntity.setSourceUri(historyEntity.getProvenance().getSourceUri());
- historyEntity.setRequestId(historyEntity.getProvenance().getRequestId());
- changed = true;
+ if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) {
+ if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) {
+ Long id = historyEntity.getId();
+ Optional provenanceEntityOpt =
+ myResourceHistoryProvenanceDao.findById(id);
+ if (provenanceEntityOpt.isPresent()) {
+ ResourceHistoryProvenanceEntity provenanceEntity = provenanceEntityOpt.get();
+ historyEntity.setSourceUri(provenanceEntity.getSourceUri());
+ historyEntity.setRequestId(provenanceEntity.getRequestId());
+ myResourceHistoryProvenanceDao.delete(provenanceEntity);
+ }
}
}
- if (changed) {
- myResourceHistoryTableDao.save(historyEntity);
- }
}
private BaseHasResource readEntity(
@@ -1873,47 +1892,37 @@ private ResourceTable readEntityLatestVersion(
IIdType theId,
@Nonnull RequestPartitionId theRequestPartitionId,
TransactionDetails theTransactionDetails) {
- validateResourceTypeAndThrowInvalidRequestException(theId);
+ HapiTransactionService.requireTransaction();
+
+ IIdType id = theId;
+ validateResourceTypeAndThrowInvalidRequestException(id);
+ if (!id.hasResourceType()) {
+ id = id.withResourceType(getResourceName());
+ }
JpaPid persistentId = null;
if (theTransactionDetails != null) {
- if (theTransactionDetails.isResolvedResourceIdEmpty(theId.toUnqualifiedVersionless())) {
- throw new ResourceNotFoundException(Msg.code(1997) + theId);
+ if (theTransactionDetails.isResolvedResourceIdEmpty(id.toUnqualifiedVersionless())) {
+ throw new ResourceNotFoundException(Msg.code(1997) + id);
}
if (theTransactionDetails.hasResolvedResourceIds()) {
- persistentId = (JpaPid) theTransactionDetails.getResolvedResourceId(theId);
+ persistentId = (JpaPid) theTransactionDetails.getResolvedResourceId(id);
}
}
if (persistentId == null) {
- String resourceName = getResourceName();
- if (myStorageSettings.getResourceClientIdStrategy()
- == JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC) {
- if (theId.isIdPartValidLong()) {
- /*
- * If it's a pure numeric ID and we are in ALPHANUMERIC mode, then the number
- * corresponds to a DB PID. In this case we want to resolve it regardless of
- * which type the client has supplied. This is because DB PIDs are unique across
- * all resource types (unlike FHIR_IDs which are namespaced to the resource type).
- * We want to load the resource with that PID regardless of type because if
- * the user is trying to update it we want to fail if the type is wrong, as
- * opposed to trying to create a new instance.
- */
- resourceName = null;
- }
- }
persistentId = myIdHelperService.resolveResourceIdentityPid(
theRequestPartitionId,
- resourceName,
- theId.getIdPart(),
+ id.getResourceType(),
+ id.getIdPart(),
ResolveIdentityMode.includeDeleted().cacheOk());
}
ResourceTable entity = myEntityManager.find(ResourceTable.class, persistentId.getId());
if (entity == null) {
- throw new ResourceNotFoundException(Msg.code(1998) + theId);
+ throw new ResourceNotFoundException(Msg.code(1998) + id);
}
- validateGivenIdIsAppropriateToRetrieveResource(theId, entity);
+ validateGivenIdIsAppropriateToRetrieveResource(id, entity);
return entity;
}
@@ -2351,13 +2360,13 @@ private DaoMethodOutcome doUpdate(
RequestDetails theRequest,
TransactionDetails theTransactionDetails,
RequestPartitionId theRequestPartitionId) {
-
+ DaoMethodOutcome outcome = null;
preProcessResourceForStorage(theResource);
preProcessResourceForStorage(theResource, theRequest, theTransactionDetails, thePerformIndexing);
ResourceTable entity = null;
- IIdType resourceId;
+ IIdType resourceId = null;
RestOperationTypeEnum update = RestOperationTypeEnum.UPDATE;
if (isNotBlank(theMatchUrl)) {
// Validate that the supplied resource matches the conditional.
@@ -2399,7 +2408,7 @@ && getStorageSettings().getResourceServerIdStrategy()
theResource.setId(UUID.randomUUID().toString());
theResource.setUserData(JpaConstants.RESOURCE_ID_SERVER_ASSIGNED, Boolean.TRUE);
}
- DaoMethodOutcome outcome = doCreateForPostOrPut(
+ outcome = doCreateForPostOrPut(
theRequest,
theResource,
theMatchUrl,
@@ -2414,8 +2423,6 @@ && getStorageSettings().getResourceServerIdStrategy()
myMatchResourceUrlService.matchUrlResolved(
theTransactionDetails, getResourceName(), theMatchUrl, (JpaPid) outcome.getPersistentId());
}
-
- return outcome;
}
} else {
/*
@@ -2427,6 +2434,10 @@ && getStorageSettings().getResourceServerIdStrategy()
assert resourceId != null;
assert resourceId.hasIdPart();
+ if (!resourceId.hasResourceType()) {
+ resourceId = resourceId.withResourceType(getResourceName());
+ }
+
boolean create = false;
if (theRequest != null) {
@@ -2445,7 +2456,7 @@ && getStorageSettings().getResourceServerIdStrategy()
}
if (create) {
- return doCreateForPostOrPut(
+ outcome = doCreateForPostOrPut(
theRequest,
theResource,
null,
@@ -2458,16 +2469,35 @@ && getStorageSettings().getResourceServerIdStrategy()
}
// Start
- return doUpdateForUpdateOrPatch(
- theRequest,
- resourceId,
- theMatchUrl,
- thePerformIndexing,
- theForceUpdateVersion,
- theResource,
- entity,
- update,
- theTransactionDetails);
+ if (outcome == null) {
+ outcome = doUpdateForUpdateOrPatch(
+ theRequest,
+ resourceId,
+ theMatchUrl,
+ thePerformIndexing,
+ theForceUpdateVersion,
+ theResource,
+ entity,
+ update,
+ theTransactionDetails);
+ }
+
+ postUpdateTransaction(theTransactionDetails);
+
+ return outcome;
+ }
+
+ @SuppressWarnings("rawtypes")
+ protected void postUpdateTransaction(TransactionDetails theTransactionDetails) {
+ // Transactions will delete these at the end of the entire transaction
+ if (!theTransactionDetails.isFhirTransaction()) {
+ Set resourceIds = theTransactionDetails.getUpdatedResourceIds();
+ if (resourceIds != null && !resourceIds.isEmpty()) {
+ List ids = resourceIds.stream().map(r -> (Long) r.getId()).collect(Collectors.toList());
+
+ myResourceSearchUrlSvc.deleteByResIds(ids);
+ }
+ }
}
@Override
@@ -2489,9 +2519,22 @@ protected DaoMethodOutcome doUpdateForUpdateOrPatch(
*/
ResourceTable entity = (ResourceTable) theEntity;
if (entity.isSearchUrlPresent()) {
- myResourceSearchUrlSvc.deleteByResId(
- (Long) theEntity.getPersistentId().getId());
- entity.setSearchUrlPresent(false);
+ JpaPid persistentId = JpaPid.fromId(entity.getResourceId());
+ theTransactionDetails.addUpdatedResourceId(persistentId);
+
+ entity.setSearchUrlPresent(false); // it will be removed at the end
+ }
+
+ if (entity.isDeleted()) {
+ // We're un-deleting this entity so let's inform the memory cache service
+ myIdHelperService.addResolvedPidToFhirIdAfterCommit(
+ entity.getPersistentId(),
+ entity.getPartitionId() == null
+ ? RequestPartitionId.defaultPartition()
+ : entity.getPartitionId().toPartitionId(),
+ entity.getResourceType(),
+ entity.getFhirId(),
+ null);
}
return super.doUpdateForUpdateOrPatch(
@@ -2571,7 +2614,6 @@ private DaoMethodOutcome doUpdateWithHistoryRewrite(
}
@Override
- @Transactional(propagation = Propagation.SUPPORTS)
public MethodOutcome validate(
T theResource,
IIdType theId,
@@ -2587,19 +2629,30 @@ public MethodOutcome validate(
throw new InvalidRequestException(
Msg.code(991) + "No ID supplied. ID is required when validating with mode=DELETE");
}
- final ResourceTable entity = readEntityLatestVersion(theId, theRequest, transactionDetails);
- // Validate that there are no resources pointing to the candidate that
- // would prevent deletion
- DeleteConflictList deleteConflicts = new DeleteConflictList();
- if (getStorageSettings().isEnforceReferentialIntegrityOnDelete()) {
- myDeleteConflictService.validateOkToDelete(
- deleteConflicts, entity, true, theRequest, new TransactionDetails());
- }
- DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
+ RequestPartitionId requestPartitionId =
+ myRequestPartitionHelperService.determineReadPartitionForRequestForRead(
+ theRequest, getResourceName(), theId);
+
+ return myTransactionService
+ .withRequest(theRequest)
+ .withRequestPartitionId(requestPartitionId)
+ .execute(() -> {
+ final ResourceTable entity =
+ readEntityLatestVersion(theId, requestPartitionId, transactionDetails);
+
+ // Validate that there are no resources pointing to the candidate that
+ // would prevent deletion
+ DeleteConflictList deleteConflicts = new DeleteConflictList();
+ if (getStorageSettings().isEnforceReferentialIntegrityOnDelete()) {
+ myDeleteConflictService.validateOkToDelete(
+ deleteConflicts, entity, true, theRequest, new TransactionDetails());
+ }
+ DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
- IBaseOperationOutcome oo = createInfoOperationOutcome("Ok to delete");
- return new MethodOutcome(new IdDt(theId.getValue()), oo);
+ IBaseOperationOutcome oo = createInfoOperationOutcome("Ok to delete");
+ return new MethodOutcome(new IdDt(theId.getValue()), oo);
+ });
}
FhirValidator validator = getContext().newValidator();
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
index d21dff545d32..0aad2196720e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
@@ -206,7 +206,7 @@ public void preFetchResources(
* However, for realistic average workloads, this should reduce the number of round trips.
*/
if (!idChunk.isEmpty()) {
- List entityChunk = prefetchResourceTableHistoryAndProvenance(idChunk);
+ List entityChunk = prefetchResourceTableAndHistory(idChunk);
if (thePreFetchIndexes) {
@@ -244,14 +244,13 @@ public void preFetchResources(
}
@Nonnull
- private List prefetchResourceTableHistoryAndProvenance(List idChunk) {
+ private List prefetchResourceTableAndHistory(List idChunk) {
assert idChunk.size() < SearchConstants.MAX_PAGE_SIZE : "assume pre-chunked";
Query query = myEntityManager.createQuery("select r, h "
+ " FROM ResourceTable r "
+ " LEFT JOIN fetch ResourceHistoryTable h "
+ " on r.myVersion = h.myResourceVersion and r.id = h.myResourceId "
- + " left join fetch h.myProvenance "
+ " WHERE r.myId IN ( :IDS ) ");
query.setParameter("IDS", idChunk);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java
index c5a5dba6d94b..2493e97e4eca 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java
@@ -219,7 +219,7 @@ private ISearchQueryExecutor doSearch(
// indicate param was already processed, otherwise queries DB to process it
theParams.setOffset(null);
- return SearchQueryExecutors.from(longs);
+ return SearchQueryExecutors.from(JpaPid.fromLongList(longs));
}
private int getMaxFetchSize(SearchParameterMap theParams, Integer theMax) {
@@ -386,7 +386,6 @@ public List search(
@SuppressWarnings("rawtypes")
private List toList(ISearchQueryExecutor theSearchResultStream, long theMaxSize) {
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(theSearchResultStream, 0), false)
- .map(JpaPid::fromId)
.limit(theMaxSize)
.collect(Collectors.toList());
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java
index dcf578ee5c9d..4d679594475a 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java
@@ -40,7 +40,6 @@
import jakarta.persistence.criteria.CriteriaBuilder;
import jakarta.persistence.criteria.CriteriaQuery;
import jakarta.persistence.criteria.Expression;
-import jakarta.persistence.criteria.JoinType;
import jakarta.persistence.criteria.Predicate;
import jakarta.persistence.criteria.Root;
import jakarta.persistence.criteria.Subquery;
@@ -125,8 +124,6 @@ public List fetchEntities(
addPredicatesToQuery(cb, thePartitionId, criteriaQuery, from, theHistorySearchStyle);
- from.fetch("myProvenance", JoinType.LEFT);
-
/*
* The sort on myUpdated is the important one for _history operations, but there are
* cases where multiple pages of results all have the exact same myUpdated value (e.g.
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java
index fe3ae4824df2..3f7334572913 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java
@@ -19,15 +19,15 @@
*/
package ca.uhn.fhir.jpa.dao;
+import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.BaseTag;
import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
-import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import jakarta.annotation.Nullable;
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.util.Collection;
-public interface IJpaStorageResourceParser extends IStorageResourceParser {
+public interface IJpaStorageResourceParser extends IStorageResourceParser {
/**
* Convert a storage entity into a FHIR resource model instance. This method may return null if the entity is not
@@ -36,7 +36,7 @@ public interface IJpaStorageResourceParser extends IStorageResourceParser {
R toResource(
Class theResourceType,
IBaseResourceEntity theEntity,
- Collection theTagList,
+ Collection theTagList,
boolean theForHistoryOperation);
/**
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaPersistedResourceValidationSupport.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaPersistedResourceValidationSupport.java
index 48a04cf38fe2..b2a9f45ffa49 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaPersistedResourceValidationSupport.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaPersistedResourceValidationSupport.java
@@ -26,8 +26,6 @@
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.term.TermReadSvcUtil;
-import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
-import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.SortOrderEnum;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
@@ -35,76 +33,57 @@
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.UriParam;
-import ca.uhn.fhir.sl.cache.Cache;
-import ca.uhn.fhir.sl.cache.CacheFactory;
import jakarta.annotation.Nullable;
import jakarta.annotation.PostConstruct;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.ImplementationGuide;
import org.hl7.fhir.r4.model.Questionnaire;
import org.hl7.fhir.r4.model.StructureDefinition;
-import org.hl7.fhir.r4.model.UriType;
import org.hl7.fhir.r4.model.ValueSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.transaction.annotation.Propagation;
-import org.springframework.transaction.annotation.Transactional;
import java.util.Arrays;
import java.util.List;
+import java.util.Objects;
import java.util.Optional;
-import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
+import static org.hl7.fhir.instance.model.api.IAnyResource.SP_RES_LAST_UPDATED;
/**
* This class is a {@link IValidationSupport Validation support} module that loads
* validation resources (StructureDefinition, ValueSet, CodeSystem, etc.) from the resources
* persisted in the JPA server.
*/
-@Transactional(propagation = Propagation.REQUIRED)
public class JpaPersistedResourceValidationSupport implements IValidationSupport {
private static final Logger ourLog = LoggerFactory.getLogger(JpaPersistedResourceValidationSupport.class);
private final FhirContext myFhirContext;
- private final IBaseResource myNoMatch;
@Autowired
private DaoRegistry myDaoRegistry;
- @Autowired
- private ITermReadSvc myTermReadSvc;
-
private Class extends IBaseResource> myCodeSystemType;
private Class extends IBaseResource> myStructureDefinitionType;
private Class extends IBaseResource> myValueSetType;
- // TODO: JA2 We shouldn't need to cache here, but we probably still should since the
- // TermReadSvcImpl calls these methods as a part of its "isCodeSystemSupported" calls.
- // We should modify CachingValidationSupport to cache the results of "isXXXSupported"
- // at which point we could do away with this cache
- // TODO: LD: This cache seems to supersede the cache in CachingValidationSupport, as that cache is set to
- // 10 minutes, but this 1 minute cache now determines the expiry.
- // This new behaviour was introduced between the 7.0.0 release and the current master (7.2.0)
- private Cache myLoadCache = CacheFactory.build(TimeUnit.MINUTES.toMillis(1), 1000);
-
/**
* Constructor
*/
public JpaPersistedResourceValidationSupport(FhirContext theFhirContext) {
super();
- Validate.notNull(theFhirContext);
+ Validate.notNull(theFhirContext, "theFhirContext must not be null");
myFhirContext = theFhirContext;
-
- myNoMatch = myFhirContext.getResourceDefinition("Basic").newInstance();
}
@Override
@@ -115,51 +94,44 @@ public String getName() {
@Override
public IBaseResource fetchCodeSystem(String theSystem) {
if (TermReadSvcUtil.isLoincUnversionedCodeSystem(theSystem)) {
- Optional currentCSOpt = getCodeSystemCurrentVersion(new UriType(theSystem));
- if (!currentCSOpt.isPresent()) {
- ourLog.info("Couldn't find current version of CodeSystem: " + theSystem);
- }
- return currentCSOpt.orElse(null);
+ IIdType id = myFhirContext.getVersion().newIdType("CodeSystem", LOINC_LOW);
+ return findResourceByIdWithNoException(id, myCodeSystemType);
}
return fetchResource(myCodeSystemType, theSystem);
}
- /**
- * Obtains the current version of a CodeSystem using the fact that the current
- * version is always pointed by the ForcedId for the no-versioned CS
- */
- private Optional getCodeSystemCurrentVersion(UriType theUrl) {
- if (!theUrl.getValueAsString().contains(LOINC_LOW)) {
- return Optional.empty();
- }
-
- return myTermReadSvc.readCodeSystemByForcedId(LOINC_LOW);
- }
-
@Override
public IBaseResource fetchValueSet(String theSystem) {
if (TermReadSvcUtil.isLoincUnversionedValueSet(theSystem)) {
- Optional currentVSOpt = getValueSetCurrentVersion(new UriType(theSystem));
- return currentVSOpt.orElse(null);
+ Optional vsIdOpt = TermReadSvcUtil.getValueSetId(theSystem);
+ if (vsIdOpt.isEmpty()) {
+ return null;
+ }
+ IIdType id = myFhirContext.getVersion().newIdType("ValueSet", vsIdOpt.get());
+ return findResourceByIdWithNoException(id, myValueSetType);
}
return fetchResource(myValueSetType, theSystem);
}
/**
- * Obtains the current version of a ValueSet using the fact that the current
- * version is always pointed by the ForcedId for the no-versioned VS
+ * Performs a lookup by ID, with no exception thrown (since that can mark the active
+ * transaction as rollback).
*/
- private Optional getValueSetCurrentVersion(UriType theUrl) {
- Optional vsIdOpt = TermReadSvcUtil.getValueSetId(theUrl.getValueAsString());
- if (!vsIdOpt.isPresent()) {
- return Optional.empty();
+ @Nullable
+ private IBaseResource findResourceByIdWithNoException(IIdType id, Class extends IBaseResource> type) {
+ SearchParameterMap map = SearchParameterMap.newSynchronous()
+ .setLoadSynchronousUpTo(1)
+ .add(IAnyResource.SP_RES_ID, new TokenParam(id.getValue()));
+ IFhirResourceDao extends IBaseResource> dao = myDaoRegistry.getResourceDao(type);
+ IBundleProvider outcome = dao.search(map, new SystemRequestDetails());
+ List resources = outcome.getResources(0, 1);
+ if (resources.isEmpty()) {
+ return null;
+ } else {
+ return resources.get(0);
}
-
- IFhirResourceDao extends IBaseResource> valueSetResourceDao = myDaoRegistry.getResourceDao(myValueSetType);
- IBaseResource valueSet = valueSetResourceDao.read(new IdDt("ValueSet", vsIdOpt.get()));
- return Optional.ofNullable(valueSet);
}
@Override
@@ -188,17 +160,7 @@ public T fetchResource(@Nullable Class theClass, St
return null;
}
- String key = theClass + " " + theUri;
- IBaseResource fetched = myLoadCache.get(key, t -> doFetchResource(theClass, theUri));
-
- if (fetched == myNoMatch) {
- ourLog.debug(
- "Invalidating cache entry for URI: {} since the result of the underlying query is empty", theUri);
- myLoadCache.invalidate(key);
- return null;
- }
-
- return (T) fetched;
+ return (T) doFetchResource(theClass, theUri);
}
private IBaseResource doFetchResource(@Nullable Class theClass, String theUri) {
@@ -209,17 +171,14 @@ private IBaseResource doFetchResource(@Nullable Class<
() -> doFetchResource(StructureDefinition.class, theUri)
};
return Arrays.stream(fetchers)
- .map(t -> t.get())
- .filter(t -> t != myNoMatch)
+ .map(Supplier::get)
+ .filter(Objects::nonNull)
.findFirst()
- .orElse(myNoMatch);
+ .orElse(null);
}
IdType id = new IdType(theUri);
- boolean localReference = false;
- if (id.hasBaseUrl() == false && id.hasIdPart() == true) {
- localReference = true;
- }
+ boolean localReference = id.hasBaseUrl() == false && id.hasIdPart() == true;
String resourceName = myFhirContext.getResourceType(theClass);
IBundleProvider search;
@@ -230,7 +189,7 @@ private IBaseResource doFetchResource(@Nullable Class<
params.setLoadSynchronousUpTo(1);
params.add(IAnyResource.SP_RES_ID, new StringParam(theUri));
search = myDaoRegistry.getResourceDao(resourceName).search(params);
- if (search.size() == 0) {
+ if (search.isEmpty()) {
params = new SearchParameterMap();
params.setLoadSynchronousUpTo(1);
params.add(ValueSet.SP_URL, new UriParam(theUri));
@@ -246,7 +205,7 @@ private IBaseResource doFetchResource(@Nullable Class<
} else {
params.add(ValueSet.SP_URL, new UriParam(theUri));
}
- params.setSort(new SortSpec("_lastUpdated").setOrder(SortOrderEnum.DESC));
+ params.setSort(new SortSpec(SP_RES_LAST_UPDATED).setOrder(SortOrderEnum.DESC));
search = myDaoRegistry.getResourceDao(resourceName).search(params);
if (search.isEmpty()
@@ -255,11 +214,13 @@ private IBaseResource doFetchResource(@Nullable Class<
params.setLoadSynchronousUpTo(1);
if (versionSeparator != -1) {
params.add(ValueSet.SP_VERSION, new TokenParam(theUri.substring(versionSeparator + 1)));
- params.add("system", new UriParam(theUri.substring(0, versionSeparator)));
+ params.add(
+ ca.uhn.fhir.model.dstu2.resource.ValueSet.SP_SYSTEM,
+ new UriParam(theUri.substring(0, versionSeparator)));
} else {
- params.add("system", new UriParam(theUri));
+ params.add(ca.uhn.fhir.model.dstu2.resource.ValueSet.SP_SYSTEM, new UriParam(theUri));
}
- params.setSort(new SortSpec("_lastUpdated").setOrder(SortOrderEnum.DESC));
+ params.setSort(new SortSpec(SP_RES_LAST_UPDATED).setOrder(SortOrderEnum.DESC));
search = myDaoRegistry.getResourceDao(resourceName).search(params);
}
}
@@ -269,7 +230,7 @@ private IBaseResource doFetchResource(@Nullable Class<
if (theUri.startsWith("http://hl7.org/fhir/StructureDefinition/")) {
String typeName = theUri.substring("http://hl7.org/fhir/StructureDefinition/".length());
if (myFhirContext.getElementDefinition(typeName) != null) {
- return myNoMatch;
+ return null;
}
}
SearchParameterMap params = new SearchParameterMap();
@@ -299,7 +260,7 @@ private IBaseResource doFetchResource(@Nullable Class<
} else {
params.add(CodeSystem.SP_URL, new UriParam(theUri));
}
- params.setSort(new SortSpec("_lastUpdated").setOrder(SortOrderEnum.DESC));
+ params.setSort(new SortSpec(SP_RES_LAST_UPDATED).setOrder(SortOrderEnum.DESC));
search = myDaoRegistry.getResourceDao(resourceName).search(params);
break;
}
@@ -322,7 +283,7 @@ private IBaseResource doFetchResource(@Nullable Class<
Integer size = search.size();
if (size == null || size == 0) {
- return myNoMatch;
+ return null;
}
if (size > 1) {
@@ -349,8 +310,4 @@ public void start() {
myCodeSystemType = myFhirContext.getResourceDefinition("ValueSet").getImplementingClass();
}
}
-
- public void clearCaches() {
- myLoadCache.invalidateAll();
- }
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java
index 09d46a557e43..4069b91b7c50 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java
@@ -26,9 +26,9 @@
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.IDao;
+import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.entity.PartitionEntity;
-import ca.uhn.fhir.jpa.entity.ResourceSearchView;
import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry;
import ca.uhn.fhir.jpa.esr.IExternallyStoredResourceService;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
@@ -37,9 +37,9 @@
import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
+import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
-import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
@@ -71,12 +71,13 @@
import java.util.ArrayList;
import java.util.Collection;
-import java.util.Collections;
import java.util.Date;
import java.util.List;
+import java.util.Optional;
import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.decodeResource;
import static java.util.Objects.nonNull;
+import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class JpaStorageResourceParser implements IJpaStorageResourceParser {
@@ -92,6 +93,9 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
@Autowired
private IResourceHistoryTableDao myResourceHistoryTableDao;
+ @Autowired
+ private IResourceHistoryProvenanceDao myResourceHistoryProvenanceDao;
+
@Autowired
private PartitionSettings myPartitionSettings;
@@ -115,14 +119,14 @@ public IBaseResource toResource(IBasePersistedResource theEntity, boolean theFor
public R toResource(
Class theResourceType,
IBaseResourceEntity theEntity,
- Collection theTagList,
+ Collection theTagList,
boolean theForHistoryOperation) {
// 1. get resource, it's encoding and the tags if any
byte[] resourceBytes;
String resourceText;
ResourceEncodingEnum resourceEncoding;
- @Nullable Collection extends BaseTag> tagList = Collections.emptyList();
+ @Nullable Collection extends BaseTag> tagList;
long version;
String provenanceSourceUri = null;
String provenanceRequestId = null;
@@ -132,25 +136,42 @@ public R toResource(
resourceBytes = history.getResource();
resourceText = history.getResourceTextVc();
resourceEncoding = history.getEncoding();
- switch (myStorageSettings.getTagStorageMode()) {
- case VERSIONED:
- default:
- if (history.isHasTags()) {
- tagList = history.getTags();
- }
- break;
- case NON_VERSIONED:
- if (history.getResourceTable().isHasTags()) {
- tagList = history.getResourceTable().getTags();
- }
- break;
- case INLINE:
- tagList = null;
+
+ // For search results we get the list of tags passed in because we load it
+ // in bulk for all resources we're going to return, but for read results
+ // we don't get the list passed in so we need to load it here.
+ tagList = theTagList;
+ if (tagList == null) {
+ switch (myStorageSettings.getTagStorageMode()) {
+ case VERSIONED:
+ default:
+ if (history.isHasTags()) {
+ tagList = history.getTags();
+ }
+ break;
+ case NON_VERSIONED:
+ if (history.getResourceTable().isHasTags()) {
+ tagList = history.getResourceTable().getTags();
+ }
+ break;
+ case INLINE:
+ tagList = null;
+ }
}
+
version = history.getVersion();
- if (history.getProvenance() != null) {
- provenanceRequestId = history.getProvenance().getRequestId();
- provenanceSourceUri = history.getProvenance().getSourceUri();
+ provenanceSourceUri = history.getSourceUri();
+ provenanceRequestId = history.getRequestId();
+ if (isBlank(provenanceSourceUri) && isBlank(provenanceRequestId)) {
+ if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) {
+ Optional provenanceOpt =
+ myResourceHistoryProvenanceDao.findById(history.getId());
+ if (provenanceOpt.isPresent()) {
+ ResourceHistoryProvenanceEntity provenance = provenanceOpt.get();
+ provenanceRequestId = provenance.getRequestId();
+ provenanceSourceUri = provenance.getSourceUri();
+ }
+ }
}
} else if (theEntity instanceof ResourceTable) {
ResourceTable resource = (ResourceTable) theEntity;
@@ -159,14 +180,13 @@ public R toResource(
history = resource.getCurrentVersionEntity();
} else {
version = theEntity.getVersion();
- history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version);
+ history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getResourceId(), version);
((ResourceTable) theEntity).setCurrentVersionEntity(history);
while (history == null) {
if (version > 1L) {
version--;
- history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
- theEntity.getId(), version);
+ history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getResourceId(), version);
} else {
return null;
}
@@ -181,36 +201,28 @@ public R toResource(
case NON_VERSIONED:
if (resource.isHasTags()) {
tagList = resource.getTags();
+ } else {
+ tagList = List.of();
}
break;
case INLINE:
+ default:
tagList = null;
break;
}
version = history.getVersion();
- if (history.getProvenance() != null) {
- provenanceRequestId = history.getProvenance().getRequestId();
- provenanceSourceUri = history.getProvenance().getSourceUri();
- }
- } else if (theEntity instanceof ResourceSearchView) {
- // This is the search View
- ResourceSearchView view = (ResourceSearchView) theEntity;
- resourceBytes = view.getResource();
- resourceText = view.getResourceTextVc();
- resourceEncoding = view.getEncoding();
- version = view.getVersion();
- provenanceRequestId = view.getProvenanceRequestId();
- provenanceSourceUri = view.getProvenanceSourceUri();
- switch (myStorageSettings.getTagStorageMode()) {
- case VERSIONED:
- case NON_VERSIONED:
- if (theTagList != null) {
- tagList = theTagList;
+ provenanceSourceUri = history.getSourceUri();
+ provenanceRequestId = history.getRequestId();
+ if (isBlank(provenanceSourceUri) && isBlank(provenanceRequestId)) {
+ if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) {
+ Optional provenanceOpt =
+ myResourceHistoryProvenanceDao.findById(history.getId());
+ if (provenanceOpt.isPresent()) {
+ ResourceHistoryProvenanceEntity provenance = provenanceOpt.get();
+ provenanceRequestId = provenance.getRequestId();
+ provenanceSourceUri = provenance.getSourceUri();
}
- break;
- case INLINE:
- tagList = null;
- break;
+ }
}
} else {
// something wrong
@@ -277,7 +289,7 @@ private R parseResource(
} else if (theResourceEncoding != ResourceEncodingEnum.DEL) {
IParser parser = new TolerantJsonParser(
- getContext(theEntity.getFhirVersion()), LENIENT_ERROR_HANDLER, theEntity.getId());
+ getContext(theEntity.getFhirVersion()), LENIENT_ERROR_HANDLER, theEntity.getResourceId());
try {
retVal = parser.parseResource(theResourceType, theDecodedResourceText);
@@ -519,8 +531,8 @@ public void updateResourceMetadata(IBaseResourceEntity theEntitySource, IBaseRes
theResourceTarget.setId(id);
if (theResourceTarget instanceof IResource) {
- ResourceMetadataKeyEnum.VERSION.put((IResource) theResourceTarget, id.getVersionIdPart());
- ResourceMetadataKeyEnum.UPDATED.put((IResource) theResourceTarget, theEntitySource.getUpdated());
+ ResourceMetadataKeyEnum.VERSION.put(theResourceTarget, id.getVersionIdPart());
+ ResourceMetadataKeyEnum.UPDATED.put(theResourceTarget, theEntitySource.getUpdated());
} else {
IBaseMetaType meta = theResourceTarget.getMeta();
meta.setVersionId(id.getVersionIdPart());
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java
index 6e0280143bb0..c6d7ebdb7cf4 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java
@@ -33,10 +33,12 @@
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
+import ca.uhn.fhir.jpa.search.ResourceSearchUrlSvc;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.util.FhirTerser;
@@ -114,6 +116,9 @@ public class TransactionProcessor extends BaseTransactionProcessor {
@Autowired
private MatchUrlService myMatchUrlService;
+ @Autowired
+ private ResourceSearchUrlSvc myResourceSearchUrlSvc;
+
@Autowired
private IRequestPartitionHelperSvc myRequestPartitionSvc;
@@ -224,6 +229,16 @@ private void preFetch(
systemDao.preFetchResources(JpaPid.fromLongList(idsToPreFetch), true);
}
+ @SuppressWarnings("rawtypes")
+ protected void postTransactionProcess(TransactionDetails theTransactionDetails) {
+ Set resourceIds = theTransactionDetails.getUpdatedResourceIds();
+ if (resourceIds != null && !resourceIds.isEmpty()) {
+ List ids = resourceIds.stream().map(r -> (Long) r.getId()).collect(Collectors.toList());
+
+ myResourceSearchUrlSvc.deleteByResIds(ids);
+ }
+ }
+
private void preFetchResourcesById(
TransactionDetails theTransactionDetails,
List theEntries,
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java
index 765dc33f09b5..f189daf38475 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java
@@ -38,10 +38,8 @@ public interface IResourceHistoryTableDao extends JpaRepository findAllVersionsForResourceIdInOrder(@Param("resId") Long theId);
- @Query(
- "SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :id AND t.myResourceVersion = :version")
- ResourceHistoryTable findForIdAndVersionAndFetchProvenance(
- @Param("id") long theId, @Param("version") long theVersion);
+ @Query("SELECT t FROM ResourceHistoryTable t WHERE t.myResourceId = :id AND t.myResourceVersion = :version")
+ ResourceHistoryTable findForIdAndVersion(@Param("id") long theId, @Param("version") long theVersion);
@Query(
"SELECT t.myId FROM ResourceHistoryTable t WHERE t.myResourceId = :resId AND t.myResourceVersion <> :dontWantVersion")
@@ -49,8 +47,8 @@ Slice findForResourceId(
Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion);
@Query(
- "SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :resId AND t.myResourceVersion <> :dontWantVersion")
- Slice findForResourceIdAndReturnEntitiesAndFetchProvenance(
+ "SELECT t FROM ResourceHistoryTable t WHERE t.myResourceId = :resId AND t.myResourceVersion <> :dontWantVersion")
+ Slice findAllVersionsExceptSpecificForResourcePid(
Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion);
@Query("" + "SELECT v.myId FROM ResourceHistoryTable v "
@@ -91,4 +89,10 @@ void updateVersion(
@Query(
"UPDATE ResourceHistoryTable r SET r.myResourceTextVc = null, r.myResource = :text, r.myEncoding = 'JSONC' WHERE r.myId = :pid")
void updateNonInlinedContents(@Param("text") byte[] theText, @Param("pid") long thePid);
+
+ @Query("SELECT v FROM ResourceHistoryTable v " + "JOIN FETCH v.myResourceTable t "
+ + "WHERE v.myResourceId IN (:pids) "
+ + "AND t.myVersion = v.myResourceVersion")
+ List findCurrentVersionsByResourcePidsAndFetchResourceTable(
+ @Param("pids") List theVersionlessPids);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTagDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTagDao.java
index 01f2bb3e8f31..4b72b9730d86 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTagDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTagDao.java
@@ -25,9 +25,15 @@
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
+import java.util.Collection;
+
public interface IResourceHistoryTagDao extends JpaRepository, IHapiFhirJpaRepository {
@Modifying
@Query("DELETE FROM ResourceHistoryTag t WHERE t.myResourceHistoryPid = :historyPid")
void deleteByPid(@Param("historyPid") Long theResourceHistoryTablePid);
+
+ @Query(
+ "SELECT t FROM ResourceHistoryTag t INNER JOIN FETCH t.myTag WHERE t.myResourceHistory.myId IN (:historyPids)")
+ Collection findByVersionIds(@Param("historyPids") Collection theIdList);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchUrlDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchUrlDao.java
index e73db69698fc..3ea3431cf445 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchUrlDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchUrlDao.java
@@ -26,6 +26,7 @@
import org.springframework.data.repository.query.Param;
import java.util.Date;
+import java.util.List;
public interface IResourceSearchUrlDao extends JpaRepository, IHapiFhirJpaRepository {
@@ -36,4 +37,8 @@ public interface IResourceSearchUrlDao extends JpaRepository theIds);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchResultDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchResultDao.java
index 98e9471a18ca..e82ca24df13e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchResultDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchResultDao.java
@@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.entity.SearchResult;
+import ca.uhn.fhir.jpa.model.dao.JpaPid;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
@@ -28,6 +29,7 @@
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
+import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
@@ -55,4 +57,16 @@ int deleteBySearchIdInRange(
@Query("SELECT count(r) FROM SearchResult r WHERE r.mySearchPid = :search")
int countForSearch(@Param("search") Long theSearchPid);
+
+ /**
+ * Converts a response from {@link #findWithSearchPid(Long, Pageable)} to
+ * a List of JpaPid objects
+ */
+ static List toJpaPidList(List theArrays) {
+ List retVal = new ArrayList<>(theArrays.size());
+ for (Long next : theArrays) {
+ retVal.add(JpaPid.fromId(next));
+ }
+ return retVal;
+ }
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java
index f8d63002793a..81d394319fcb 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java
@@ -321,10 +321,12 @@ protected int expungeEverythingByTypeWithoutPurging(
nativeQuery.setMaxResults(800);
List pids = nativeQuery.getResultList();
- nativeQuery = myEntityManager.createQuery("DELETE FROM " + theEntityType.getSimpleName()
- + " WHERE " + idProperty + " IN (:pids)");
- nativeQuery.setParameter("pids", pids);
- nativeQuery.executeUpdate();
+ if (!pids.isEmpty()) {
+ nativeQuery = myEntityManager.createQuery("DELETE FROM " + theEntityType.getSimpleName()
+ + " WHERE " + idProperty + " IN (:pids)");
+ nativeQuery.setParameter("pids", pids);
+ nativeQuery.executeUpdate();
+ }
return pids.size();
});
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/JpaResourceExpungeService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/JpaResourceExpungeService.java
index 860113cec067..ab47125ca870 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/JpaResourceExpungeService.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/JpaResourceExpungeService.java
@@ -45,6 +45,7 @@
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
+import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
@@ -71,6 +72,7 @@
import java.util.Collections;
import java.util.List;
+import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
@Service
@@ -159,8 +161,7 @@ public List findHistoricalVersionsOfNonDeletedResources(
Slice ids;
if (theJpaPid != null && theJpaPid.getId() != null) {
if (theJpaPid.getVersion() != null) {
- ids = toSlice(myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
- theJpaPid.getId(), theJpaPid.getVersion()));
+ ids = toSlice(myResourceHistoryTableDao.findForIdAndVersion(theJpaPid.getId(), theJpaPid.getVersion()));
} else {
ids = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResourceId(page, theJpaPid.getId());
}
@@ -239,9 +240,10 @@ private void expungeHistoricalVersion(
callHooks(theRequestDetails, theRemainingCount, version, id);
- if (version.getProvenance() != null) {
- myResourceHistoryProvenanceTableDao.deleteByPid(
- version.getProvenance().getId());
+ if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) {
+ Optional provenanceOpt =
+ myResourceHistoryProvenanceTableDao.findById(theNextVersionId);
+ provenanceOpt.ifPresent(entity -> myResourceHistoryProvenanceTableDao.deleteByPid(entity.getId()));
}
myResourceHistoryTagDao.deleteByPid(version.getId());
@@ -302,8 +304,8 @@ protected void expungeCurrentVersionOfResource(
RequestDetails theRequestDetails, Long theResourceId, AtomicInteger theRemainingCount) {
ResourceTable resource = myResourceTableDao.findById(theResourceId).orElseThrow(IllegalStateException::new);
- ResourceHistoryTable currentVersion = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
- resource.getId(), resource.getVersion());
+ ResourceHistoryTable currentVersion =
+ myResourceHistoryTableDao.findForIdAndVersion(resource.getId(), resource.getVersion());
if (currentVersion != null) {
expungeHistoricalVersion(theRequestDetails, currentVersion.getId(), theRemainingCount);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java
index c4542ea8aaca..9ad2c34aa3ef 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java
@@ -61,7 +61,6 @@
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
-import org.hl7.fhir.r4.model.IdType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -70,13 +69,12 @@
import java.util.ArrayList;
import java.util.Collection;
-import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
-import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
@@ -87,7 +85,7 @@
/**
* This class is used to convert between PIDs (the internal primary key for a particular resource as
- * stored in the {@link ca.uhn.fhir.jpa.model.entity.ResourceTable HFJ_RESOURCE} table), and the
+ * stored in the {@link ResourceTable HFJ_RESOURCE} table), and the
* public ID that a resource has.
*
* These IDs are sometimes one and the same (by default, a resource that the server assigns the ID of
@@ -150,20 +148,44 @@ public IResourceLookup resolveResourceIdentity(
throws ResourceNotFoundException {
IIdType id;
+ boolean untyped;
if (theResourceType != null) {
+ untyped = false;
id = newIdType(theResourceType + "/" + theResourceId);
} else {
+ /*
+ * This shouldn't be common, but we need to be able to handle it.
+ * The only real known use case currently is when handing references
+ * in searches where the client didn't qualify the ID. E.g.
+ * /Provenance?target=A,B,C
+ * We emit a warning in this case that they should be qualfying the
+ * IDs, but we do stil allow it.
+ */
+ untyped = true;
id = newIdType(theResourceId);
}
List ids = List.of(id);
Map> outcome = resolveResourceIdentities(theRequestPartitionId, ids, theMode);
// We only pass 1 input in so only 0..1 will come back
- if (!outcome.containsKey(id)) {
+ Validate.isTrue(outcome.size() <= 1, "Unexpected output size %s for ID: %s", outcome.size(), ids);
+
+ IResourceLookup retVal;
+ if (untyped) {
+ if (outcome.isEmpty()) {
+ retVal = null;
+ } else {
+ retVal = outcome.values().iterator().next();
+ }
+ } else {
+ retVal = outcome.get(id);
+ }
+
+ if (retVal == null) {
throw new ResourceNotFoundException(Msg.code(2001) + "Resource " + id + " is not known");
}
- return outcome.get(id);
+ return retVal;
}
@Nonnull
@@ -180,7 +202,11 @@ public Map> resolveResourceIdentities(
}
Collection ids = new ArrayList<>(theIds);
- ids.forEach(id -> Validate.isTrue(id.hasIdPart()));
+ for (IIdType id : theIds) {
+ if (!id.hasIdPart()) {
+ throw new InvalidRequestException(Msg.code(1101) + "Parameter value missing in request");
+ }
+ }
RequestPartitionId requestPartitionId = replaceDefault(theRequestPartitionId);
ListMultimap> idToLookup =
@@ -197,16 +223,16 @@ public Map> resolveResourceIdentities(
}
// Convert the multimap into a simple map
- Map> retVal = new HashMap<>();
+ Map> retVal = new HashMap<>(idToLookup.size());
for (Map.Entry> next : idToLookup.entries()) {
- if (next.getValue().getDeleted() != null) {
+ IResourceLookup nextLookup = next.getValue();
+
+ IIdType resourceId = myFhirCtx.getVersion().newIdType(nextLookup.getResourceType(), nextLookup.getFhirId());
+ if (nextLookup.getDeleted() != null) {
if (theMode.isFailOnDeleted()) {
String msg = myFhirCtx
.getLocalizer()
- .getMessageSanitized(
- IdHelperService.class,
- "deletedId",
- next.getKey().getValue());
+ .getMessageSanitized(IdHelperService.class, "deletedId", resourceId.getValue());
throw new ResourceGoneException(Msg.code(2572) + msg);
}
if (!theMode.isIncludeDeleted()) {
@@ -214,7 +240,9 @@ public Map> resolveResourceIdentities(
}
}
- IResourceLookup previousValue = retVal.put(next.getKey(), next.getValue());
+ nextLookup.getPersistentId().setAssociatedResourceId(resourceId);
+
+ IResourceLookup previousValue = retVal.put(resourceId, nextLookup);
if (previousValue != null) {
/*
* This means that either:
@@ -224,11 +252,7 @@ public Map> resolveResourceIdentities(
* with the same ID.
* 2. The unique constraint on the FHIR_ID column has been dropped
*/
- ourLog.warn(
- "Resource ID[{}] corresponds to lookups: {} and {}",
- next.getKey(),
- previousValue,
- next.getValue());
+ ourLog.warn("Resource ID[{}] corresponds to lookups: {} and {}", resourceId, previousValue, nextLookup);
String msg = myFhirCtx.getLocalizer().getMessage(IdHelperService.class, "nonUniqueForcedId");
throw new PreconditionFailedException(Msg.code(1099) + msg);
}
@@ -320,26 +344,15 @@ private void resolveResourceIdentitiesForFhirIdsUsingDatabase(
// one create one clause per id.
List innerIdPredicates = new ArrayList<>(theIdsToResolve.size());
- boolean haveUntypedIds = false;
for (IIdType next : theIdsToResolve) {
- if (!next.hasResourceType()) {
- haveUntypedIds = true;
- }
-
List idPredicates = new ArrayList<>(2);
- if (myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC
- && next.isIdPartValidLong()) {
- Predicate typeCriteria = cb.equal(from.get("myId"), next.getIdPartAsLong());
+ if (isNotBlank(next.getResourceType())) {
+ Predicate typeCriteria = cb.equal(from.get("myResourceType"), next.getResourceType());
idPredicates.add(typeCriteria);
- } else {
- if (isNotBlank(next.getResourceType())) {
- Predicate typeCriteria = cb.equal(from.get("myResourceType"), next.getResourceType());
- idPredicates.add(typeCriteria);
- }
- Predicate idCriteria = cb.equal(from.get("myFhirId"), next.getIdPart());
- idPredicates.add(idCriteria);
}
+ Predicate idCriteria = cb.equal(from.get("myFhirId"), next.getIdPart());
+ idPredicates.add(idCriteria);
innerIdPredicates.add(cb.and(idPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
}
@@ -357,18 +370,13 @@ private void resolveResourceIdentitiesForFhirIdsUsingDatabase(
Integer partitionId = nextId.get(4, Integer.class);
if (resourcePid != null) {
JpaResourceLookup lookup = new JpaResourceLookup(
- resourceType, resourcePid, deletedAd, PartitionablePartitionId.with(partitionId, null));
+ resourceType, fhirId, resourcePid, deletedAd, PartitionablePartitionId.with(partitionId, null));
MemoryCacheService.ForcedIdCacheKey nextKey =
new MemoryCacheService.ForcedIdCacheKey(resourceType, fhirId, theRequestPartitionId);
IIdType id = nextKey.toIdType(myFhirCtx);
theMapToPopulate.put(id, lookup);
- if (haveUntypedIds) {
- id = nextKey.toIdTypeWithoutResourceType(myFhirCtx);
- theMapToPopulate.put(id, lookup);
- }
-
List> valueToCache = theMapToPopulate.get(id);
myMemoryCacheService.putAfterCommit(
MemoryCacheService.CacheEnum.RESOURCE_LOOKUP_BY_FORCED_ID, nextKey, valueToCache);
@@ -376,78 +384,6 @@ private void resolveResourceIdentitiesForFhirIdsUsingDatabase(
}
}
- /**
- * Returns a mapping of Id -> IResourcePersistentId.
- * If any resource is not found, it will throw ResourceNotFound exception (and no map will be returned)
- * Optionally filters out deleted resources.
- */
- @Override
- @Nonnull
- public Map resolveResourcePersistentIds(
- @Nonnull RequestPartitionId theRequestPartitionId,
- String theResourceType,
- List theIds,
- ResolveIdentityMode theMode) {
- assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive();
- Validate.notNull(theIds, "theIds cannot be null");
- Validate.isTrue(!theIds.isEmpty(), "theIds must not be empty");
-
- Map retVals = new HashMap<>();
- for (String id : theIds) {
- JpaPid retVal;
- if (!idRequiresForcedId(id)) {
- // is already a PID
- retVal = JpaPid.fromId(Long.parseLong(id));
- retVals.put(id, retVal);
- } else {
- // is a forced id
- // we must resolve!
- if (myStorageSettings.isDeleteEnabled()) {
- retVal = resolveResourceIdentity(theRequestPartitionId, theResourceType, id, theMode)
- .getPersistentId();
- retVals.put(id, retVal);
- } else {
- // fetch from cache... adding to cache if not available
- String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, id);
- retVal = myMemoryCacheService.getThenPutAfterCommit(
- MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, t -> {
- List ids = Collections.singletonList(new IdType(theResourceType, id));
- // fetches from cache using a function that checks cache first...
- List resolvedIds =
- resolveResourcePersistentIdsWithCache(theRequestPartitionId, ids);
- if (resolvedIds.isEmpty()) {
- throw new ResourceNotFoundException(Msg.code(1100) + ids.get(0));
- }
- return resolvedIds.get(0);
- });
- retVals.put(id, retVal);
- }
- }
- }
-
- return retVals;
- }
-
- /**
- * Given a resource type and ID, determines the internal persistent ID for the resource.
- * Optionally filters out deleted resources.
- *
- * @throws ResourceNotFoundException If the ID can not be found
- */
- @Nonnull
- @Override
- public JpaPid resolveResourcePersistentIds(
- @Nonnull RequestPartitionId theRequestPartitionId,
- String theResourceType,
- String theId,
- ResolveIdentityMode theMode) {
- Validate.notNull(theId, "theId must not be null");
-
- Map retVal = resolveResourcePersistentIds(
- theRequestPartitionId, theResourceType, Collections.singletonList(theId), theMode);
- return retVal.get(theId); // should be only one
- }
-
/**
* Returns true if the given resource ID should be stored in a forced ID. Under default config
* (meaning client ID strategy is {@link JpaStorageSettings.ClientIdStrategyEnum#ALPHANUMERIC})
@@ -461,132 +397,6 @@ public boolean idRequiresForcedId(String theId) {
|| !isValidPid(theId);
}
- @Nonnull
- private String toForcedIdToPidKey(
- @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId) {
- return RequestPartitionId.stringifyForKey(theRequestPartitionId) + "/" + theResourceType + "/" + theId;
- }
-
- /**
- * Given a collection of resource IDs (resource type + id), resolves the internal persistent IDs.
- *
- * This implementation will always try to use a cache for performance, meaning that it can resolve resources that
- * are deleted (but note that forced IDs can't change, so the cache can't return incorrect results)
- */
- @Override
- @Nonnull
- public List resolveResourcePersistentIdsWithCache(
- RequestPartitionId theRequestPartitionId, List theIds) {
- boolean onlyForcedIds = false;
- return resolveResourcePersistentIdsWithCache(theRequestPartitionId, theIds, onlyForcedIds);
- }
-
- /**
- * Given a collection of resource IDs (resource type + id), resolves the internal persistent IDs.
- *
- * This implementation will always try to use a cache for performance, meaning that it can resolve resources that
- * are deleted (but note that forced IDs can't change, so the cache can't return incorrect results)
- *
- * @param theOnlyForcedIds If true
, resources which are not existing forced IDs will not be resolved
- */
- @Override
- @Nonnull
- public List resolveResourcePersistentIdsWithCache(
- @Nonnull RequestPartitionId theRequestPartitionId, List theIds, boolean theOnlyForcedIds) {
- assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive();
-
- List retVal = new ArrayList<>(theIds.size());
-
- for (IIdType id : theIds) {
- if (!id.hasIdPart()) {
- throw new InvalidRequestException(Msg.code(1101) + "Parameter value missing in request");
- }
- }
-
- if (!theIds.isEmpty()) {
- Set idsToCheck = new HashSet<>(theIds.size());
- for (IIdType nextId : theIds) {
- if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY) {
- if (nextId.isIdPartValidLong()) {
- if (!theOnlyForcedIds) {
- JpaPid jpaPid = JpaPid.fromId(nextId.getIdPartAsLong());
- jpaPid.setAssociatedResourceId(nextId);
- retVal.add(jpaPid);
- }
- continue;
- }
- }
-
- String key = toForcedIdToPidKey(theRequestPartitionId, nextId.getResourceType(), nextId.getIdPart());
- JpaPid cachedId = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key);
- if (cachedId != null) {
- retVal.add(cachedId);
- continue;
- }
-
- idsToCheck.add(nextId);
- }
- new QueryChunker();
- TaskChunker.chunk(
- idsToCheck,
- SearchBuilder.getMaximumPageSize() / 2,
- ids -> doResolvePersistentIds(theRequestPartitionId, ids, retVal));
- }
-
- return retVal;
- }
-
- private void doResolvePersistentIds(
- RequestPartitionId theRequestPartitionId, List theIds, List theOutputListToPopulate) {
- CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
- CriteriaQuery criteriaQuery = cb.createTupleQuery();
- Root from = criteriaQuery.from(ResourceTable.class);
-
- /*
- * IDX_RES_FHIR_ID covers these columns, but RES_ID is only INCLUDEd.
- * Only PG, and MSSql support INCLUDE COLUMNS.
- * @see AddIndexTask.generateSql
- */
- criteriaQuery.multiselect(from.get("myId"), from.get("myResourceType"), from.get("myFhirId"));
-
- // one create one clause per id.
- List predicates = new ArrayList<>(theIds.size());
- for (IIdType next : theIds) {
-
- List andPredicates = new ArrayList<>(3);
-
- if (isNotBlank(next.getResourceType())) {
- Predicate typeCriteria = cb.equal(from.get("myResourceType"), next.getResourceType());
- andPredicates.add(typeCriteria);
- }
-
- Predicate idCriteria = cb.equal(from.get("myFhirId"), next.getIdPart());
- andPredicates.add(idCriteria);
- getOptionalPartitionPredicate(theRequestPartitionId, cb, from).ifPresent(andPredicates::add);
- predicates.add(cb.and(andPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
- }
-
- // join all the clauses as OR
- criteriaQuery.where(cb.or(predicates.toArray(EMPTY_PREDICATE_ARRAY)));
-
- TypedQuery query = myEntityManager.createQuery(criteriaQuery);
- List results = query.getResultList();
- for (Tuple nextId : results) {
- // Check if the nextId has a resource ID. It may have a null resource ID if a commit is still pending.
- Long resourceId = nextId.get(0, Long.class);
- String resourceType = nextId.get(1, String.class);
- String forcedId = nextId.get(2, String.class);
- if (resourceId != null) {
- JpaPid jpaPid = JpaPid.fromId(resourceId);
- populateAssociatedResourceId(resourceType, forcedId, jpaPid);
- theOutputListToPopulate.add(jpaPid);
-
- String key = toForcedIdToPidKey(theRequestPartitionId, resourceType, forcedId);
- myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, jpaPid);
- }
- }
- }
-
/**
* Return optional predicate for searching on forcedId
* 1. If the partition mode is ALLOWED_UNQUALIFIED, the return optional predicate will be empty, so search is across all partitions.
@@ -609,7 +419,7 @@ private Optional getOptionalPartitionPredicate(
return Optional.of(partitionIdNullCriteria);
} else {
Predicate partitionIdCriteria = from.get("myPartitionIdValue")
- .in(partitionIds.stream().filter(t -> t != null).collect(Collectors.toList()));
+ .in(partitionIds.stream().filter(Objects::nonNull).collect(Collectors.toList()));
return Optional.of(cb.or(partitionIdCriteria, partitionIdNullCriteria));
}
} else {
@@ -719,10 +529,14 @@ public PersistentIdToForcedIdMap translatePidsToForcedIds(Set th
}
/**
- * Pre-cache a PID-to-Resource-ID mapping for later retrieval by {@link #translatePidsToForcedIds(Set)} and related methods
+ * This method can be called to pre-emptively add entries to the ID cache. It should
+ * be called by DAO methods if they are creating or changing the deleted status
+ * of a resource. This method returns immediately, but the data is not
+ * added to the internal caches until the current DB transaction is successfully
+ * committed, and nothing is added if the transaction rolls back.
*/
@Override
- public void addResolvedPidToFhirId(
+ public void addResolvedPidToFhirIdAfterCommit(
@Nonnull JpaPid theJpaPid,
@Nonnull RequestPartitionId theRequestPartitionId,
@Nonnull String theResourceType,
@@ -736,11 +550,9 @@ public void addResolvedPidToFhirId(
MemoryCacheService.CacheEnum.PID_TO_FORCED_ID,
theJpaPid.getId(),
Optional.of(theResourceType + "/" + theFhirId));
- String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, theFhirId);
- myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, theJpaPid);
JpaResourceLookup lookup = new JpaResourceLookup(
- theResourceType, theJpaPid.getId(), theDeletedAt, theJpaPid.getPartitionablePartitionId());
+ theResourceType, theFhirId, theJpaPid.getId(), theDeletedAt, theJpaPid.getPartitionablePartitionId());
MemoryCacheService.ForcedIdCacheKey fhirIdKey =
new MemoryCacheService.ForcedIdCacheKey(theResourceType, theFhirId, theRequestPartitionId);
@@ -763,13 +575,6 @@ public void setPartitionSettingsForUnitTest(PartitionSettings thePartitionSettin
myPartitionSettings = thePartitionSettings;
}
- @Override
- @Nonnull
- public List getPidsOrThrowException(
- @Nonnull RequestPartitionId theRequestPartitionId, List theIds) {
- return resolveResourcePersistentIdsWithCache(theRequestPartitionId, theIds);
- }
-
@Override
@Nullable
public JpaPid getPidOrNull(@Nonnull RequestPartitionId theRequestPartitionId, IBaseResource theResource) {
@@ -792,17 +597,6 @@ public JpaPid getPidOrNull(@Nonnull RequestPartitionId theRequestPartitionId, IB
return retVal;
}
- @Override
- @Nonnull
- public JpaPid getPidOrThrowException(@Nonnull RequestPartitionId theRequestPartitionId, IIdType theId) {
- List ids = Collections.singletonList(theId);
- List resourcePersistentIds = resolveResourcePersistentIdsWithCache(theRequestPartitionId, ids);
- if (resourcePersistentIds.isEmpty()) {
- throw new InvalidRequestException(Msg.code(2295) + "Invalid ID was provided: [" + theId.getIdPart() + "]");
- }
- return resourcePersistentIds.get(0);
- }
-
@Override
@Nonnull
public JpaPid getPidOrThrowException(@Nonnull IAnyResource theResource) {
@@ -861,15 +655,6 @@ private IIdType newIdType(String theValue) {
return retVal;
}
- public static boolean isValidPid(IIdType theId) {
- if (theId == null) {
- return false;
- }
-
- String idPart = theId.getIdPart();
- return isValidPid(idPart);
- }
-
public static boolean isValidPid(String theIdPart) {
return StringUtils.isNumeric(theIdPart);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkDaoJpaImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkDaoJpaImpl.java
index a6efdc8fc3ee..dd97f814907e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkDaoJpaImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkDaoJpaImpl.java
@@ -22,6 +22,7 @@
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
+import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
import ca.uhn.fhir.jpa.dao.data.IMdmLinkJpaRepository;
import ca.uhn.fhir.jpa.entity.HapiFhirEnversRevision;
import ca.uhn.fhir.jpa.entity.MdmLink;
@@ -455,9 +456,13 @@ public List> getHistoryForIds(
@Nonnull
private List convertToLongIds(List theMdmHistorySearchParameters) {
return myIdHelperService
- .getPidsOrThrowException(RequestPartitionId.allPartitions(), theMdmHistorySearchParameters)
+ .resolveResourceIdentities(
+ RequestPartitionId.allPartitions(),
+ theMdmHistorySearchParameters,
+ ResolveIdentityMode.includeDeleted().cacheOk())
+ .values()
.stream()
- .map(JpaPid::getId)
+ .map(t -> t.getPersistentId().getId())
.collect(Collectors.toUnmodifiableList());
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/SearchScrollQueryExecutorAdaptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/SearchScrollQueryExecutorAdaptor.java
index 8b0b17b7f9fd..42285f6f071c 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/SearchScrollQueryExecutorAdaptor.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/SearchScrollQueryExecutorAdaptor.java
@@ -19,6 +19,7 @@
*/
package ca.uhn.fhir.jpa.dao.search;
+import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor;
import org.hibernate.search.engine.search.query.SearchScroll;
import org.hibernate.search.engine.search.query.SearchScrollResult;
@@ -57,12 +58,12 @@ public boolean hasNext() {
}
@Override
- public Long next() {
+ public JpaPid next() {
Long result = myCurrentIterator.next();
// was this the last in the current scroll page?
if (!myCurrentIterator.hasNext()) {
advanceNextScrollPage();
}
- return result;
+ return JpaPid.fromId(result);
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceSearchView.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceSearchView.java
deleted file mode 100644
index e53fa57ecad3..000000000000
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceSearchView.java
+++ /dev/null
@@ -1,244 +0,0 @@
-/*
- * #%L
- * HAPI FHIR JPA Server
- * %%
- * Copyright (C) 2014 - 2024 Smile CDR, Inc.
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-package ca.uhn.fhir.jpa.entity;
-
-import ca.uhn.fhir.context.FhirVersionEnum;
-import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
-import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
-import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
-import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
-import ca.uhn.fhir.jpa.model.entity.ResourceTable;
-import ca.uhn.fhir.model.primitive.IdDt;
-import ca.uhn.fhir.model.primitive.InstantDt;
-import ca.uhn.fhir.rest.api.Constants;
-import jakarta.annotation.Nullable;
-import jakarta.persistence.Column;
-import jakarta.persistence.Entity;
-import jakarta.persistence.EnumType;
-import jakarta.persistence.Enumerated;
-import jakarta.persistence.Id;
-import jakarta.persistence.Lob;
-import jakarta.persistence.Temporal;
-import jakarta.persistence.TemporalType;
-import org.hibernate.annotations.Immutable;
-import org.hibernate.annotations.Subselect;
-
-import java.io.Serializable;
-import java.util.Date;
-
-@SuppressWarnings("SqlDialectInspection")
-@Entity
-@Immutable
-// Ideally, all tables and columns should be in UPPERCASE if we ever choose to use a case-sensitive collation for MSSQL
-// and there's a risk that queries on lowercase database objects fail.
-@Subselect("SELECT h.PID as PID, "
- + " r.RES_ID as RES_ID, "
- + " h.RES_TYPE as RES_TYPE, "
- + " h.RES_VERSION as RES_VERSION, "
- // FHIR version
- + " h.RES_VER as RES_VER, "
- // resource version
- + " h.HAS_TAGS as HAS_TAGS, "
- + " h.RES_DELETED_AT as RES_DELETED_AT, "
- + " h.RES_PUBLISHED as RES_PUBLISHED, "
- + " h.RES_UPDATED as RES_UPDATED, "
- + " h.RES_TEXT as RES_TEXT, "
- + " h.RES_TEXT_VC as RES_TEXT_VC, "
- + " h.RES_ENCODING as RES_ENCODING, "
- + " h.PARTITION_ID as PARTITION_ID, "
- + " p.SOURCE_URI as PROV_SOURCE_URI,"
- + " p.REQUEST_ID as PROV_REQUEST_ID,"
- + " r.FHIR_ID as FHIR_ID "
- + "FROM HFJ_RESOURCE r "
- + " INNER JOIN HFJ_RES_VER h ON r.RES_ID = h.RES_ID and r.RES_VER = h.RES_VER"
- + " LEFT OUTER JOIN HFJ_RES_VER_PROV p ON p.RES_VER_PID = h.PID ")
-public class ResourceSearchView implements IBaseResourceEntity, Serializable {
-
- private static final long serialVersionUID = 1L;
-
- @Id
- @Column(name = "PID")
- private Long myId;
-
- @Column(name = "RES_ID")
- private Long myResourceId;
-
- @Column(name = "RES_TYPE", length = Constants.MAX_RESOURCE_NAME_LENGTH)
- private String myResourceType;
-
- @Column(name = "RES_VERSION")
- @Enumerated(EnumType.STRING)
- private FhirVersionEnum myFhirVersion;
-
- @Column(name = "RES_VER")
- private Long myResourceVersion;
-
- @Column(name = "PROV_REQUEST_ID", length = Constants.REQUEST_ID_LENGTH)
- private String myProvenanceRequestId;
-
- @Column(name = "PROV_SOURCE_URI", length = ResourceHistoryTable.SOURCE_URI_LENGTH)
- private String myProvenanceSourceUri;
-
- @Column(name = "HAS_TAGS")
- private boolean myHasTags;
-
- @Column(name = "RES_DELETED_AT")
- @Temporal(TemporalType.TIMESTAMP)
- private Date myDeleted;
-
- @Temporal(TemporalType.TIMESTAMP)
- @Column(name = "RES_PUBLISHED")
- private Date myPublished;
-
- @Temporal(TemporalType.TIMESTAMP)
- @Column(name = "RES_UPDATED")
- private Date myUpdated;
-
- @Column(name = "RES_TEXT")
- @Lob()
- private byte[] myResource;
-
- @Column(name = "RES_TEXT_VC")
- private String myResourceTextVc;
-
- @Column(name = "RES_ENCODING")
- @Enumerated(EnumType.STRING)
- private ResourceEncodingEnum myEncoding;
-
- @Column(name = "FHIR_ID", length = ResourceTable.MAX_FORCED_ID_LENGTH)
- private String myFhirId;
-
- @Column(name = "PARTITION_ID")
- private Integer myPartitionId;
-
- public ResourceSearchView() {
- // public constructor for Hibernate
- }
-
- public String getResourceTextVc() {
- return myResourceTextVc;
- }
-
- public String getProvenanceRequestId() {
- return myProvenanceRequestId;
- }
-
- public String getProvenanceSourceUri() {
- return myProvenanceSourceUri;
- }
-
- @Override
- public Date getDeleted() {
- return myDeleted;
- }
-
- public void setDeleted(Date theDate) {
- myDeleted = theDate;
- }
-
- @Override
- public FhirVersionEnum getFhirVersion() {
- return myFhirVersion;
- }
-
- public void setFhirVersion(FhirVersionEnum theFhirVersion) {
- myFhirVersion = theFhirVersion;
- }
-
- public String getFhirId() {
- return myFhirId;
- }
-
- @Override
- public Long getId() {
- return myResourceId;
- }
-
- @Override
- public IdDt getIdDt() {
- if (myFhirId == null) {
- Long id = myResourceId;
- return new IdDt(myResourceType + '/' + id + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
- } else {
- return new IdDt(getResourceType() + '/' + getFhirId() + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
- }
- }
-
- @Override
- public InstantDt getPublished() {
- if (myPublished != null) {
- return new InstantDt(myPublished);
- } else {
- return null;
- }
- }
-
- public void setPublished(Date thePublished) {
- myPublished = thePublished;
- }
-
- @Override
- public Long getResourceId() {
- return myResourceId;
- }
-
- @Override
- public String getResourceType() {
- return myResourceType;
- }
-
- @Override
- public InstantDt getUpdated() {
- return new InstantDt(myUpdated);
- }
-
- @Override
- public Date getUpdatedDate() {
- return myUpdated;
- }
-
- @Override
- public long getVersion() {
- return myResourceVersion;
- }
-
- @Override
- public boolean isHasTags() {
- return myHasTags;
- }
-
- @Override
- @Nullable
- public PartitionablePartitionId getPartitionId() {
- if (myPartitionId != null) {
- return new PartitionablePartitionId(myPartitionId, null);
- } else {
- return null;
- }
- }
-
- public byte[] getResource() {
- return myResource;
- }
-
- public ResourceEncodingEnum getEncoding() {
- return myEncoding;
- }
-}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/cross/JpaResourceLookup.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/cross/JpaResourceLookup.java
index d20f61a7c6ba..5691eb52902c 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/cross/JpaResourceLookup.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/cross/JpaResourceLookup.java
@@ -29,16 +29,34 @@
public class JpaResourceLookup implements IResourceLookup {
private final String myResourceType;
- private final Long myResourcePid;
+ private final JpaPid myResourcePid;
private final Date myDeletedAt;
private final PartitionablePartitionId myPartitionablePartitionId;
+ private final String myFhirId;
public JpaResourceLookup(
String theResourceType,
+ String theFhirId,
Long theResourcePid,
Date theDeletedAt,
PartitionablePartitionId thePartitionablePartitionId) {
myResourceType = theResourceType;
+ myFhirId = theFhirId;
+ myDeletedAt = theDeletedAt;
+ myPartitionablePartitionId = thePartitionablePartitionId;
+
+ myResourcePid = JpaPid.fromId(theResourcePid);
+ myResourcePid.setPartitionablePartitionId(myPartitionablePartitionId);
+ }
+
+ public JpaResourceLookup(
+ String theResourceType,
+ String theFhirId,
+ JpaPid theResourcePid,
+ Date theDeletedAt,
+ PartitionablePartitionId thePartitionablePartitionId) {
+ myResourceType = theResourceType;
+ myFhirId = theFhirId;
myResourcePid = theResourcePid;
myDeletedAt = theDeletedAt;
myPartitionablePartitionId = thePartitionablePartitionId;
@@ -49,6 +67,11 @@ public String getResourceType() {
return myResourceType;
}
+ @Override
+ public String getFhirId() {
+ return myFhirId;
+ }
+
@Override
public Date getDeleted() {
return myDeletedAt;
@@ -56,10 +79,7 @@ public Date getDeleted() {
@Override
public JpaPid getPersistentId() {
- JpaPid jpaPid = JpaPid.fromId(myResourcePid);
- jpaPid.setPartitionablePartitionId(myPartitionablePartitionId);
-
- return jpaPid;
+ return myResourcePid;
}
@Override
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java
index 18f36ca0fb72..46fcec9ee224 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java
@@ -559,7 +559,7 @@ public NpmPackage installPackage(PackageInstallationSpec theInstallationSpec) th
}
@Override
- @Transactional
+ @Transactional(readOnly = true)
public IBaseResource loadPackageAssetByUrl(FhirVersionEnum theFhirVersion, String theCanonicalUrl) {
String canonicalUrl = theCanonicalUrl;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java
index 77f4f7e3b1ce..bc5eaf628783 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java
@@ -22,6 +22,7 @@
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.entity.PartitionEntity;
+import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import org.apache.commons.lang3.Validate;
@@ -36,17 +37,27 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc {
@Autowired
IPartitionLookupSvc myPartitionConfigSvc;
+ @Autowired
+ PartitionSettings myPartitionSettings;
+
public RequestPartitionHelperSvc() {}
@Override
public RequestPartitionId validateAndNormalizePartitionIds(RequestPartitionId theRequestPartitionId) {
List names = null;
+ List partitionIds = null;
for (int i = 0; i < theRequestPartitionId.getPartitionIds().size(); i++) {
PartitionEntity partition;
Integer id = theRequestPartitionId.getPartitionIds().get(i);
if (id == null) {
partition = null;
+ if (myPartitionSettings.getDefaultPartitionId() != null) {
+ if (partitionIds == null) {
+ partitionIds = new ArrayList<>(theRequestPartitionId.getPartitionIds());
+ }
+ partitionIds.set(i, myPartitionSettings.getDefaultPartitionId());
+ }
} else {
try {
partition = myPartitionConfigSvc.getPartitionById(id);
@@ -88,8 +99,12 @@ public RequestPartitionId validateAndNormalizePartitionIds(RequestPartitionId th
}
if (names != null) {
+ List partitionIdsToUse = theRequestPartitionId.getPartitionIds();
+ if (partitionIds != null) {
+ partitionIdsToUse = partitionIds;
+ }
return RequestPartitionId.forPartitionIdsAndNames(
- names, theRequestPartitionId.getPartitionIds(), theRequestPartitionId.getPartitionDate());
+ names, partitionIdsToUse, theRequestPartitionId.getPartitionDate());
}
return theRequestPartitionId;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java
index a03d51791dd1..24498b0fd26e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java
@@ -24,11 +24,13 @@
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
+import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
import ca.uhn.fhir.jpa.search.builder.tasks.SearchTask;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.HistorySearchStyleEnum;
+import jakarta.annotation.Nullable;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
@@ -56,7 +58,7 @@ public PersistedJpaBundleProvider newInstance(RequestDetails theRequest, Search
public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage(
RequestDetails theRequestDetails,
SearchTask theTask,
- ISearchBuilder theSearchBuilder,
+ ISearchBuilder theSearchBuilder,
RequestPartitionId theRequestPartitionId) {
return (PersistedJpaSearchFirstPageBundleProvider) myApplicationContext.getBean(
JpaConfig.PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER,
@@ -69,7 +71,7 @@ public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage(
public IBundleProvider history(
RequestDetails theRequest,
String theResourceType,
- Long theResourcePid,
+ @Nullable JpaPid theResourcePid,
Date theRangeStartInclusive,
Date theRangeEndInclusive,
Integer theOffset,
@@ -88,7 +90,7 @@ public IBundleProvider history(
public IBundleProvider history(
RequestDetails theRequest,
String theResourceType,
- Long theResourcePid,
+ @Nullable JpaPid theResourcePid,
Date theRangeStartInclusive,
Date theRangeEndInclusive,
Integer theOffset,
@@ -103,7 +105,9 @@ public IBundleProvider history(
search.setLastUpdated(theRangeStartInclusive, theRangeEndInclusive);
search.setUuid(UUID.randomUUID().toString());
search.setResourceType(resourceName);
- search.setResourceId(theResourcePid);
+ if (theResourcePid != null) {
+ search.setResourceId(theResourcePid.getId());
+ }
search.setSearchType(SearchTypeEnum.HISTORY);
search.setStatus(SearchStatusEnum.FINISHED);
search.setHistorySearchStyle(searchParameterType);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java
index d4e336ab8705..3498e961a6f3 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java
@@ -23,6 +23,7 @@
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
@@ -34,6 +35,7 @@
import org.springframework.transaction.annotation.Transactional;
import java.util.Date;
+import java.util.List;
/**
* This service ensures uniqueness of resources during create or create-on-update
@@ -80,8 +82,12 @@ public void deleteEntriesOlderThan(Date theCutoffDate) {
* Once a resource is updated or deleted, we can trust that future match checks will find the committed resource in the db.
* The use of the constraint table is done, and we can delete it to keep the table small.
*/
- public void deleteByResId(long theResId) {
- myResourceSearchUrlDao.deleteByResId(theResId);
+ public void deleteByResId(JpaPid theResId) {
+ myResourceSearchUrlDao.deleteByResId(theResId.getId());
+ }
+
+ public void deleteByResIds(List theResIds) {
+ myResourceSearchUrlDao.deleteByResIds(theResIds);
}
/**
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/ISearchQueryExecutor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/ISearchQueryExecutor.java
index 5ea034496cb4..452a7a11c2f3 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/ISearchQueryExecutor.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/ISearchQueryExecutor.java
@@ -19,10 +19,12 @@
*/
package ca.uhn.fhir.jpa.search.builder;
+import ca.uhn.fhir.jpa.model.dao.JpaPid;
+
import java.io.Closeable;
import java.util.Iterator;
-public interface ISearchQueryExecutor extends Iterator, Closeable {
+public interface ISearchQueryExecutor extends Iterator, Closeable {
/**
* Narrow the signature - no IOException allowed.
*/
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/JpaPidRowMapper.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/JpaPidRowMapper.java
new file mode 100644
index 000000000000..b36c02bfeefe
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/JpaPidRowMapper.java
@@ -0,0 +1,47 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.jpa.search.builder;
+
+import ca.uhn.fhir.jpa.model.dao.JpaPid;
+import org.springframework.jdbc.core.RowMapper;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+public class JpaPidRowMapper implements RowMapper {
+
+ private final boolean mySelectPartitionId;
+
+ public JpaPidRowMapper(boolean theSelectPartitionId) {
+ mySelectPartitionId = theSelectPartitionId;
+ }
+
+ @Override
+ public JpaPid mapRow(ResultSet theResultSet, int theRowNum) throws SQLException {
+ if (mySelectPartitionId) {
+ Integer partitionId = theResultSet.getObject(1, Integer.class);
+ Long resourceId = theResultSet.getLong(2);
+ return JpaPid.fromId(resourceId, partitionId);
+ } else {
+ Long resourceId = theResultSet.getLong(1);
+ return JpaPid.fromId(resourceId);
+ }
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java
index e7d688624d22..7f7c7dbbd29c 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java
@@ -28,6 +28,7 @@
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
@@ -44,13 +45,13 @@
import ca.uhn.fhir.jpa.search.builder.predicate.CoordsPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.DatePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ICanMakeMissingParamPredicate;
+import ca.uhn.fhir.jpa.search.builder.predicate.ISourcePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ParsedLocationParam;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.SearchParamPresentPredicateBuilder;
-import ca.uhn.fhir.jpa.search.builder.predicate.SourcePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.TagPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder;
@@ -151,6 +152,7 @@ public class QueryStack {
private final PartitionSettings myPartitionSettings;
private final JpaStorageSettings myStorageSettings;
private final EnumSet myReusePredicateBuilderTypes;
+ private final RequestDetails myRequestDetails;
private Map myJoinMap;
private Map myParamNameToPredicateBuilderMap;
// used for _offset queries with sort, should be removed once the fix is applied to the async path too.
@@ -161,6 +163,7 @@ public class QueryStack {
* Constructor
*/
public QueryStack(
+ RequestDetails theRequestDetails,
SearchParameterMap theSearchParameters,
JpaStorageSettings theStorageSettings,
FhirContext theFhirContext,
@@ -168,6 +171,7 @@ public QueryStack(
ISearchParamRegistry theSearchParamRegistry,
PartitionSettings thePartitionSettings) {
this(
+ theRequestDetails,
theSearchParameters,
theStorageSettings,
theFhirContext,
@@ -181,6 +185,7 @@ public QueryStack(
* Constructor
*/
private QueryStack(
+ RequestDetails theRequestDetails,
SearchParameterMap theSearchParameters,
JpaStorageSettings theStorageSettings,
FhirContext theFhirContext,
@@ -188,6 +193,7 @@ private QueryStack(
ISearchParamRegistry theSearchParamRegistry,
PartitionSettings thePartitionSettings,
EnumSet theReusePredicateBuilderTypes) {
+ myRequestDetails = theRequestDetails;
myPartitionSettings = thePartitionSettings;
assert theSearchParameters != null;
assert theStorageSettings != null;
@@ -1035,7 +1041,6 @@ private Condition createPredicateFilter(
searchParam,
Collections.singletonList(new UriParam(theFilter.getValue())),
theFilter.getOperation(),
- theRequest,
theRequestPartitionId);
} else if (typeEnum == RestSearchParameterTypeEnum.STRING) {
return theQueryStack3.createPredicateString(
@@ -1220,7 +1225,6 @@ private Condition createPredicateHas(
ResourceLinkPredicateBuilder resourceLinkTableJoin =
mySqlBuilder.addReferencePredicateBuilderReversed(this, theSourceJoinColumn);
- Condition partitionPredicate = resourceLinkTableJoin.createPartitionIdPredicate(theRequestPartitionId);
List paths = resourceLinkTableJoin.createResourceLinkPaths(
targetResourceType, paramReference, new ArrayList<>());
@@ -1242,7 +1246,12 @@ private Condition createPredicateHas(
.setRequest(theRequest)
.setRequestPartitionId(theRequestPartitionId));
- andPredicates.add(toAndPredicate(partitionPredicate, pathPredicate, typePredicate, linkedPredicate));
+ if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) {
+ andPredicates.add(toAndPredicate(pathPredicate, typePredicate, linkedPredicate));
+ } else {
+ Condition partitionPredicate = resourceLinkTableJoin.createPartitionIdPredicate(theRequestPartitionId);
+ andPredicates.add(toAndPredicate(partitionPredicate, pathPredicate, typePredicate, linkedPredicate));
+ }
}
return toAndPredicate(andPredicates);
@@ -1889,7 +1898,6 @@ private Condition createIndexPredicate(
theParamDefinition,
theOrValues,
theOperation,
- theRequest,
theRequestPartitionId,
theSqlBuilder);
break;
@@ -1954,13 +1962,13 @@ private Condition createPredicateSource(
.findFirst();
if (isMissingSourceOptional.isPresent()) {
- SourcePredicateBuilder join =
+ ISourcePredicateBuilder join =
getSourcePredicateBuilder(theSourceJoinColumn, SelectQuery.JoinType.LEFT_OUTER);
orPredicates.add(join.createPredicateMissingSourceUri());
return toOrPredicate(orPredicates);
}
// for all other cases we use "INNER JOIN" to match search parameters
- SourcePredicateBuilder join = getSourcePredicateBuilder(theSourceJoinColumn, SelectQuery.JoinType.INNER);
+ ISourcePredicateBuilder join = getSourcePredicateBuilder(theSourceJoinColumn, SelectQuery.JoinType.INNER);
for (IQueryParameterType nextParameter : theList) {
SourceParam sourceParameter = new SourceParam(nextParameter.getValueAsQueryToken(myFhirContext));
@@ -1980,13 +1988,22 @@ private Condition createPredicateSource(
return toOrPredicate(orPredicates);
}
- private SourcePredicateBuilder getSourcePredicateBuilder(
+ private ISourcePredicateBuilder getSourcePredicateBuilder(
@Nullable DbColumn[] theSourceJoinColumn, SelectQuery.JoinType theJoinType) {
+ if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) {
+ return createOrReusePredicateBuilder(
+ PredicateBuilderTypeEnum.SOURCE,
+ theSourceJoinColumn,
+ Constants.PARAM_SOURCE,
+ () -> mySqlBuilder.addResourceHistoryProvenancePredicateBuilder(
+ theSourceJoinColumn, theJoinType))
+ .getResult();
+ }
return createOrReusePredicateBuilder(
PredicateBuilderTypeEnum.SOURCE,
theSourceJoinColumn,
Constants.PARAM_SOURCE,
- () -> mySqlBuilder.addSourcePredicateBuilder(theSourceJoinColumn, theJoinType))
+ () -> mySqlBuilder.addResourceHistoryPredicateBuilder(theSourceJoinColumn, theJoinType))
.getResult();
}
@@ -2321,7 +2338,6 @@ public Condition createPredicateUri(
RuntimeSearchParam theSearchParam,
List extends IQueryParameterType> theList,
SearchFilterParser.CompareOperation theOperation,
- RequestDetails theRequestDetails,
RequestPartitionId theRequestPartitionId) {
return createPredicateUri(
theSourceJoinColumn,
@@ -2330,7 +2346,6 @@ public Condition createPredicateUri(
theSearchParam,
theList,
theOperation,
- theRequestDetails,
theRequestPartitionId,
mySqlBuilder);
}
@@ -2342,7 +2357,6 @@ public Condition createPredicateUri(
RuntimeSearchParam theSearchParam,
List extends IQueryParameterType> theList,
SearchFilterParser.CompareOperation theOperation,
- RequestDetails theRequestDetails,
RequestPartitionId theRequestPartitionId,
SearchQueryBuilder theSqlBuilder) {
@@ -2361,13 +2375,14 @@ public Condition createPredicateUri(
} else {
UriPredicateBuilder join = theSqlBuilder.addUriPredicateBuilder(theSourceJoinColumn);
- Condition predicate = join.addPredicate(theList, paramName, theOperation, theRequestDetails);
+ Condition predicate = join.addPredicate(theList, paramName, theOperation, myRequestDetails);
return join.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate);
}
}
public QueryStack newChildQueryFactoryWithFullBuilderReuse() {
return new QueryStack(
+ myRequestDetails,
mySearchParameters,
myStorageSettings,
myFhirContext,
@@ -2452,7 +2467,6 @@ public Condition searchForIdsWithAndOr(SearchForIdsParams theSearchForIdsParams)
*/
private Condition createPredicateResourcePID(
DbColumn[] theSourceJoinColumn, List> theAndOrParams) {
-
DbColumn pidColumn = getResourceIdColumn(theSourceJoinColumn);
if (pidColumn == null) {
@@ -2662,7 +2676,6 @@ private Condition createPredicateSearchParameter(
nextParamDef,
nextAnd,
SearchFilterParser.CompareOperation.eq,
- theRequest,
theRequestPartitionId));
}
break;
@@ -2871,12 +2884,13 @@ public void addPredicateCompositeNonUnique(List theIndexStrings, Request
// expand out the pids
public void addPredicateEverythingOperation(
- String theResourceName, List theTypeSourceResourceNames, Long... theTargetPids) {
+ String theResourceName, List theTypeSourceResourceNames, JpaPid... theTargetPids) {
ResourceLinkPredicateBuilder table = mySqlBuilder.addReferencePredicateBuilder(this, null);
Condition predicate =
table.createEverythingPredicate(theResourceName, theTypeSourceResourceNames, theTargetPids);
mySqlBuilder.addPredicate(predicate);
mySqlBuilder.getSelect().setIsDistinct(true);
+ addGrouping();
}
public IQueryParameterType newParameterInstance(
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java
index 088703dbb828..790a8d5ddc4b 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java
@@ -33,7 +33,6 @@
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
-import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean;
@@ -43,20 +42,23 @@
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
-import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
+import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
+import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException;
-import ca.uhn.fhir.jpa.entity.ResourceSearchView;
import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
-import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
+import ca.uhn.fhir.jpa.model.entity.BaseTag;
+import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
+import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag;
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
+import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.search.SearchConstants;
import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor;
import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql;
@@ -98,11 +100,14 @@
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
+import ca.uhn.fhir.system.HapiSystemProperties;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.StringUtil;
import ca.uhn.fhir.util.UrlUtil;
import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
+import com.google.common.collect.MultimapBuilder;
import com.healthmarketscience.sqlbuilder.Condition;
import jakarta.annotation.Nonnull;
import jakarta.annotation.Nullable;
@@ -113,6 +118,7 @@
import jakarta.persistence.Tuple;
import jakarta.persistence.TypedQuery;
import jakarta.persistence.criteria.CriteriaBuilder;
+import org.apache.commons.collections4.ListUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.math.NumberUtils;
@@ -124,11 +130,8 @@
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
-import org.springframework.jdbc.core.RowMapper;
import org.springframework.transaction.support.TransactionSynchronizationManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@@ -168,14 +171,19 @@ public class SearchBuilder implements ISearchBuilder {
public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE;
public static final String RESOURCE_ID_ALIAS = "resource_id";
+ public static final String PARTITION_ID_ALIAS = "partition_id";
public static final String RESOURCE_VERSION_ALIAS = "resource_version";
private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class);
private static final JpaPid NO_MORE = JpaPid.fromId(-1L);
- private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid";
private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid";
- private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType";
+ private static final String MY_SOURCE_RESOURCE_PARTITION_ID = "myPartitionIdValue";
private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType";
+ private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid";
+ private static final String MY_TARGET_RESOURCE_PARTITION_ID = "myTargetResourcePartitionId";
+ private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType";
private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion";
+ public static final JpaPid[] EMPTY_JPA_PID_ARRAY = new JpaPid[0];
+ public static boolean myUseMaxPageSize50ForTest = false;
public static Integer myMaxPageSizeForTests = null;
protected final IInterceptorBroadcaster myInterceptorBroadcaster;
protected final IResourceTagDao myResourceTagDao;
@@ -187,7 +195,6 @@ public class SearchBuilder implements ISearchBuilder {
private final ISearchParamRegistry mySearchParamRegistry;
private final PartitionSettings myPartitionSettings;
private final DaoRegistry myDaoRegistry;
- private final IResourceSearchViewDao myResourceSearchViewDao;
private final FhirContext myContext;
private final IIdHelperService myIdHelperService;
private final JpaStorageSettings myStorageSettings;
@@ -213,6 +220,15 @@ public class SearchBuilder implements ISearchBuilder {
@Autowired
private IJpaStorageResourceParser myJpaStorageResourceParser;
+ @Autowired
+ private IResourceHistoryTableDao myResourceHistoryTableDao;
+
+ @Autowired
+ private IResourceHistoryTagDao myResourceHistoryTagDao;
+
+ @Autowired
+ private IRequestPartitionHelperSvc myPartitionHelperSvc;
+
/**
* Constructor
*/
@@ -228,7 +244,6 @@ public SearchBuilder(
IInterceptorBroadcaster theInterceptorBroadcaster,
IResourceTagDao theResourceTagDao,
DaoRegistry theDaoRegistry,
- IResourceSearchViewDao theResourceSearchViewDao,
FhirContext theContext,
IIdHelperService theIdHelperService,
Class extends IBaseResource> theResourceType) {
@@ -244,7 +259,6 @@ public SearchBuilder(
myInterceptorBroadcaster = theInterceptorBroadcaster;
myResourceTagDao = theResourceTagDao;
myDaoRegistry = theDaoRegistry;
- myResourceSearchViewDao = theResourceSearchViewDao;
myContext = theContext;
myIdHelperService = theIdHelperService;
}
@@ -339,7 +353,8 @@ public Long createCountQuery(
if (queries.isEmpty()) {
return 0L;
} else {
- return queries.get(0).next();
+ JpaPid jpaPid = queries.get(0).next();
+ return jpaPid.getId();
}
}
@@ -462,7 +477,7 @@ private List createQuery(
ourLog.trace("Query needs db after HSearch. Chunking.");
// Finish the query in the database for the rest of the search parameters, sorting, partitioning, etc.
// We break the pids into chunks that fit in the 1k limit for jdbc bind params.
- new QueryChunker()
+ new QueryChunker()
.chunk(
fulltextExecutor,
SearchBuilder.getMaximumPageSize(),
@@ -568,7 +583,7 @@ private List queryHibernateSearchForEverythingPids(RequestDetails theReq
private void doCreateChunkedQueries(
SearchParameterMap theParams,
- List thePids,
+ List thePids,
Integer theOffset,
SortSpec sort,
boolean theCount,
@@ -584,7 +599,7 @@ private void doCreateChunkedQueries(
/**
* Combs through the params for any _id parameters and extracts the PIDs for them
*/
- private void extractTargetPidsFromIdParams(Set theTargetPids) {
+ private void extractTargetPidsFromIdParams(Set theTargetPids) {
// get all the IQueryParameterType objects
// for _id -> these should all be StringParam values
HashSet ids = new HashSet<>();
@@ -621,8 +636,8 @@ private void extractTargetPidsFromIdParams(Set theTargetPids) {
ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled());
// add the pids to targetPids
- for (IResourceLookup pid : idToIdentity.values()) {
- theTargetPids.add((Long) pid.getPersistentId().getId());
+ for (IResourceLookup pid : idToIdentity.values()) {
+ theTargetPids.add(pid.getPersistentId());
}
}
@@ -633,11 +648,17 @@ private void createChunkedQuery(
Integer theMaximumResults,
boolean theCountOnlyFlag,
RequestDetails theRequest,
- List thePidList,
+ List thePidList,
List theSearchQueryExecutors) {
if (myParams.getEverythingMode() != null) {
createChunkedQueryForEverythingSearch(
- theParams, theOffset, theMaximumResults, theCountOnlyFlag, thePidList, theSearchQueryExecutors);
+ theRequest,
+ theParams,
+ theOffset,
+ theMaximumResults,
+ theCountOnlyFlag,
+ thePidList,
+ theSearchQueryExecutors);
} else {
createChunkedQueryNormalSearch(
theParams, sort, theOffset, theCountOnlyFlag, theRequest, thePidList, theSearchQueryExecutors);
@@ -650,7 +671,7 @@ private void createChunkedQueryNormalSearch(
Integer theOffset,
boolean theCountOnlyFlag,
RequestDetails theRequest,
- List thePidList,
+ List thePidList,
List theSearchQueryExecutors) {
SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(
myContext,
@@ -662,7 +683,13 @@ private void createChunkedQueryNormalSearch(
myDialectProvider,
theCountOnlyFlag);
QueryStack queryStack3 = new QueryStack(
- theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
+ theRequest,
+ theParams,
+ myStorageSettings,
+ myContext,
+ sqlBuilder,
+ mySearchParamRegistry,
+ myPartitionSettings);
if (theParams.keySet().size() > 1
|| theParams.getSort() != null
@@ -768,11 +795,12 @@ private void executeSearch(
}
private void createChunkedQueryForEverythingSearch(
+ RequestDetails theRequest,
SearchParameterMap theParams,
Integer theOffset,
Integer theMaximumResults,
boolean theCountOnlyFlag,
- List thePidList,
+ List thePidList,
List theSearchQueryExecutors) {
SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(
@@ -786,11 +814,17 @@ private void createChunkedQueryForEverythingSearch(
theCountOnlyFlag);
QueryStack queryStack3 = new QueryStack(
- theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
+ theRequest,
+ theParams,
+ myStorageSettings,
+ myContext,
+ sqlBuilder,
+ mySearchParamRegistry,
+ myPartitionSettings);
JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theMaximumResults);
- Set targetPids = new HashSet<>();
+ Set targetPids = new HashSet<>();
if (myParams.get(IAnyResource.SP_RES_ID) != null) {
extractTargetPidsFromIdParams(targetPids);
@@ -816,16 +850,8 @@ private void createChunkedQueryForEverythingSearch(
String sql = allTargetsSql.getSql();
Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]);
- List output = jdbcTemplate.query(sql, args, new RowMapper() {
- @Override
- public Long mapRow(ResultSet rs, int rowNum) throws SQLException {
- if (myPartitionSettings.isPartitioningEnabled()) {
- return rs.getLong(2);
- } else {
- return rs.getLong(1);
- }
- }
- });
+ List output =
+ jdbcTemplate.query(sql, args, new JpaPidRowMapper(myPartitionSettings.isPartitioningEnabled()));
// we add a search executor to fetch unlinked patients first
theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output));
@@ -837,7 +863,7 @@ public Long mapRow(ResultSet rs, int rowNum) throws SQLException {
}
queryStack3.addPredicateEverythingOperation(
- myResourceName, typeSourceResources, targetPids.toArray(new Long[0]));
+ myResourceName, typeSourceResources, targetPids.toArray(EMPTY_JPA_PID_ARRAY));
// Add PID list predicate for full text search and/or lastn operation
addPidListPredicate(thePidList, sqlBuilder);
@@ -858,7 +884,7 @@ public Long mapRow(ResultSet rs, int rowNum) throws SQLException {
executeSearch(theOffset, theSearchQueryExecutors, sqlBuilder);
}
- private void addPidListPredicate(List thePidList, SearchQueryBuilder theSqlBuilder) {
+ private void addPidListPredicate(List thePidList, SearchQueryBuilder theSqlBuilder) {
if (thePidList != null && !thePidList.isEmpty()) {
theSqlBuilder.addResourceIdsPredicate(thePidList);
}
@@ -1142,31 +1168,55 @@ private void doLoadPids(
Collection theIncludedPids,
List theResourceListToPopulate,
boolean theForHistoryOperation,
- Map thePosition) {
+ Map thePosition) {
- Map resourcePidToVersion = null;
+ Map resourcePidToVersion = null;
for (JpaPid next : thePids) {
if (next.getVersion() != null && myStorageSettings.isRespectVersionsForSearchIncludes()) {
if (resourcePidToVersion == null) {
resourcePidToVersion = new HashMap<>();
}
- resourcePidToVersion.put((next).getId(), next.getVersion());
+ resourcePidToVersion.put(next, next.getVersion());
}
}
- List versionlessPids = JpaPid.toLongList(thePids);
+ List versionlessPids = new ArrayList<>(thePids);
if (versionlessPids.size() < getMaximumPageSize()) {
versionlessPids = normalizeIdListForInClause(versionlessPids);
}
- // -- get the resource from the searchView
- Collection resourceSearchViewList =
- myResourceSearchViewDao.findByResourceIds(versionlessPids);
+ // Load the resource bodies
+ List resourceSearchViewList =
+ myResourceHistoryTableDao.findCurrentVersionsByResourcePidsAndFetchResourceTable(
+ JpaPid.toLongList(versionlessPids));
+
+ /*
+ * If we have specific versions to load, replace the history entries with the
+ * correct ones
+ *
+ * TODO: this could definitely be made more efficient, probably by not loading the wrong
+ * version entity first, and by batching the fetches. But this is a fairly infrequently
+ * used feature, and loading history entities by PK is a very efficient query so it's
+ * not the end of the world
+ */
+ if (resourcePidToVersion != null) {
+ for (int i = 0; i < resourceSearchViewList.size(); i++) {
+ ResourceHistoryTable next = resourceSearchViewList.get(i);
+ JpaPid resourceId = next.getPersistentId();
+ Long version = resourcePidToVersion.get(resourceId);
+ resourceId.setVersion(version);
+ if (version != null && !version.equals(next.getVersion())) {
+ ResourceHistoryTable replacement =
+ myResourceHistoryTableDao.findForIdAndVersion(next.getResourceId(), version);
+ resourceSearchViewList.set(i, replacement);
+ }
+ }
+ }
// -- preload all tags with tag definition if any
- Map> tagMap = getResourceTagMap(resourceSearchViewList);
+ Map> tagMap = getResourceTagMap(resourceSearchViewList);
- for (IBaseResourceEntity next : resourceSearchViewList) {
+ for (ResourceHistoryTable next : resourceSearchViewList) {
if (next.getDeleted() != null) {
continue;
}
@@ -1174,29 +1224,17 @@ private void doLoadPids(
Class extends IBaseResource> resourceType =
myContext.getResourceDefinition(next.getResourceType()).getImplementingClass();
- JpaPid resourceId = JpaPid.fromId(next.getResourceId());
+ JpaPid resourceId = next.getPersistentId();
- /*
- * If a specific version is requested via an include, we'll replace the current version
- * with the specific desired version. This is not the most efficient thing, given that
- * we're loading the current version and then turning around and throwing it away again.
- * This could be optimized and probably should be, but it's not critical given that
- * this only applies to includes, which don't tend to be massive in numbers.
- */
if (resourcePidToVersion != null) {
- Long version = resourcePidToVersion.get(next.getResourceId());
+ Long version = resourcePidToVersion.get(resourceId);
resourceId.setVersion(version);
- if (version != null && !version.equals(next.getVersion())) {
- IFhirResourceDao extends IBaseResource> dao = myDaoRegistry.getResourceDao(resourceType);
- next = (IBaseResourceEntity)
- dao.readEntity(next.getIdDt().withVersion(Long.toString(version)), null);
- }
}
IBaseResource resource = null;
if (next != null) {
resource = myJpaStorageResourceParser.toResource(
- resourceType, next, tagMap.get(next.getId()), theForHistoryOperation);
+ resourceType, next, tagMap.get(JpaPid.fromId(next.getResourceId())), theForHistoryOperation);
}
if (resource == null) {
if (next != null) {
@@ -1211,7 +1249,7 @@ private void doLoadPids(
continue;
}
- Integer index = thePosition.get(resourceId);
+ Integer index = thePosition.get(resourceId.getId());
if (index == null) {
ourLog.warn("Got back unexpected resource PID {}", resourceId);
continue;
@@ -1227,40 +1265,93 @@ private void doLoadPids(
}
}
- private Map> getResourceTagMap(
- Collection extends IBaseResourceEntity> theResourceSearchViewList) {
+ private Map> getResourceTagMap(Collection theHistoryTables) {
- List idList = new ArrayList<>(theResourceSearchViewList.size());
+ switch (myStorageSettings.getTagStorageMode()) {
+ case VERSIONED:
+ return getPidToTagMapVersioned(theHistoryTables);
+ case NON_VERSIONED:
+ return getPidToTagMapUnversioned(theHistoryTables);
+ case INLINE:
+ default:
+ return Map.of();
+ }
+ }
+
+ @Nonnull
+ private Map> getPidToTagMapVersioned(
+ Collection theHistoryTables) {
+ List idList = new ArrayList<>(theHistoryTables.size());
// -- find all resource has tags
- for (IBaseResourceEntity resource : theResourceSearchViewList) {
- if (resource.isHasTags()) idList.add(resource.getId());
+ for (ResourceHistoryTable resource : theHistoryTables) {
+ if (resource.isHasTags()) {
+ idList.add(resource.getId());
+ }
+ }
+
+ Map> tagMap = new HashMap<>();
+
+ // -- no tags
+ if (idList.isEmpty()) {
+ return tagMap;
+ }
+
+ // -- get all tags for the idList
+ Collection tagList = myResourceHistoryTagDao.findByVersionIds(idList);
+
+ // -- build the map, key = resourceId, value = list of ResourceTag
+ JpaPid resourceId;
+ Collection tagCol;
+ for (ResourceHistoryTag tag : tagList) {
+
+ resourceId = JpaPid.fromId(tag.getResourceId());
+ tagCol = tagMap.get(resourceId);
+ if (tagCol == null) {
+ tagCol = new ArrayList<>();
+ tagCol.add(tag);
+ tagMap.put(resourceId, tagCol);
+ } else {
+ tagCol.add(tag);
+ }
}
- return getPidToTagMap(idList);
+ return tagMap;
}
@Nonnull
- private Map> getPidToTagMap(List thePidList) {
- Map> tagMap = new HashMap<>();
+ private Map> getPidToTagMapUnversioned(
+ Collection theHistoryTables) {
+ List idList = new ArrayList<>(theHistoryTables.size());
+
+ // -- find all resource has tags
+ for (ResourceHistoryTable resource : theHistoryTables) {
+ if (resource.isHasTags()) {
+ idList.add(JpaPid.fromId(resource.getResourceId()));
+ }
+ }
+
+ Map> tagMap = new HashMap<>();
// -- no tags
- if (thePidList.isEmpty()) return tagMap;
+ if (idList.isEmpty()) {
+ return tagMap;
+ }
// -- get all tags for the idList
- Collection tagList = myResourceTagDao.findByResourceIds(thePidList);
+ Collection tagList = myResourceTagDao.findByResourceIds(JpaPid.toLongList(idList));
// -- build the map, key = resourceId, value = list of ResourceTag
JpaPid resourceId;
- Collection tagCol;
+ Collection tagCol;
for (ResourceTag tag : tagList) {
resourceId = JpaPid.fromId(tag.getResourceId());
- tagCol = tagMap.get(resourceId.getId());
+ tagCol = tagMap.get(resourceId);
if (tagCol == null) {
tagCol = new ArrayList<>();
tagCol.add(tag);
- tagMap.put(resourceId.getId(), tagCol);
+ tagMap.put(resourceId, tagCol);
} else {
tagCol.add(tag);
}
@@ -1284,9 +1375,9 @@ public void loadResourcesByPid(
// when running asserts
assert new HashSet<>(thePids).size() == thePids.size() : "PID list contains duplicates: " + thePids;
- Map position = new HashMap<>();
+ Map position = new HashMap<>();
for (JpaPid next : thePids) {
- position.put(next, theResourceListToPopulate.size());
+ position.put(next.getId(), theResourceListToPopulate.size());
theResourceListToPopulate.add(null);
}
@@ -1402,7 +1493,11 @@ public Set loadIncludes(SearchBuilderLoadIncludesParameters theP
return new HashSet<>();
}
String searchPidFieldName = reverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID;
+ String searchPartitionIdFieldName =
+ reverseMode ? MY_TARGET_RESOURCE_PARTITION_ID : MY_SOURCE_RESOURCE_PARTITION_ID;
String findPidFieldName = reverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID;
+ String findPartitionIdFieldName =
+ reverseMode ? MY_SOURCE_RESOURCE_PARTITION_ID : MY_TARGET_RESOURCE_PARTITION_ID;
String findResourceTypeFieldName = reverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE;
String findVersionFieldName = null;
if (!reverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) {
@@ -1444,9 +1539,11 @@ public Set loadIncludes(SearchBuilderLoadIncludesParameters theP
if (matchAll) {
loadIncludesMatchAll(
findPidFieldName,
+ findPartitionIdFieldName,
findResourceTypeFieldName,
findVersionFieldName,
searchPidFieldName,
+ searchPartitionIdFieldName,
wantResourceType,
reverseMode,
hasDesiredResourceTypes,
@@ -1461,13 +1558,16 @@ public Set loadIncludes(SearchBuilderLoadIncludesParameters theP
nextInclude,
fhirContext,
findPidFieldName,
+ findPartitionIdFieldName,
findVersionFieldName,
searchPidFieldName,
+ searchPartitionIdFieldName,
reverseMode,
nextRoundMatches,
entityManager,
maxCount,
- pidsToInclude);
+ pidsToInclude,
+ request);
}
}
@@ -1533,13 +1633,16 @@ private void loadIncludesMatchSpecific(
Include nextInclude,
FhirContext fhirContext,
String findPidFieldName,
+ String findPartitionFieldName,
String findVersionFieldName,
String searchPidFieldName,
+ String searchPartitionFieldName,
boolean reverseMode,
List nextRoundMatches,
EntityManager entityManager,
Integer maxCount,
- HashSet pidsToInclude) {
+ HashSet pidsToInclude,
+ RequestDetails theRequest) {
List paths;
// Start replace
@@ -1578,6 +1681,13 @@ private void loadIncludesMatchSpecific(
if (findVersionFieldName != null) {
fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS;
}
+ if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) {
+ fieldsToLoad += ", r.";
+ fieldsToLoad += findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID)
+ ? "partition_id"
+ : "target_res_partition_id";
+ fieldsToLoad += " as " + PARTITION_ID_ALIAS;
+ }
// Query for includes lookup has 2 cases
// Case 1: Where target_resource_id is available in hfj_res_link table for local references
@@ -1589,30 +1699,45 @@ private void loadIncludesMatchSpecific(
String searchPidFieldSqlColumn =
searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id";
- StringBuilder localReferenceQuery = new StringBuilder("SELECT " + fieldsToLoad + " FROM hfj_res_link r "
- + " WHERE r.src_path = :src_path AND "
- + " r.target_resource_id IS NOT NULL AND "
- + " r."
- + searchPidFieldSqlColumn + " IN (:target_pids) ");
+ StringBuilder localReferenceQuery = new StringBuilder();
+ localReferenceQuery.append("SELECT ").append(fieldsToLoad);
+ localReferenceQuery.append(" FROM hfj_res_link r ");
+ localReferenceQuery.append("WHERE r.src_path = :src_path");
+ if (!"target_resource_id".equals(searchPidFieldSqlColumn)) {
+ localReferenceQuery.append(" AND r.target_resource_id IS NOT NULL");
+ }
+ localReferenceQuery
+ .append(" AND r.")
+ .append(searchPidFieldSqlColumn)
+ .append(" IN (:target_pids) ");
+ if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) {
+ String partitionFieldToSearch = findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID)
+ ? "target_res_partition_id"
+ : "partition_id";
+ localReferenceQuery
+ .append("AND r.")
+ .append(partitionFieldToSearch)
+ .append(" = :search_partition_id ");
+ }
localReferenceQueryParams.put("src_path", nextPath);
// we loop over target_pids later.
if (targetResourceTypes != null) {
if (targetResourceTypes.size() == 1) {
- localReferenceQuery.append(" AND r.target_resource_type = :target_resource_type ");
+ localReferenceQuery.append("AND r.target_resource_type = :target_resource_type ");
localReferenceQueryParams.put(
"target_resource_type",
targetResourceTypes.iterator().next());
} else {
- localReferenceQuery.append(" AND r.target_resource_type in (:target_resource_types) ");
+ localReferenceQuery.append("AND r.target_resource_type in (:target_resource_types) ");
localReferenceQueryParams.put("target_resource_types", targetResourceTypes);
}
}
// Case 2:
Pair> canonicalQuery =
- buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode);
+ buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode, theRequest);
- String sql = localReferenceQuery + " UNION " + canonicalQuery.getLeft();
+ String sql = localReferenceQuery + "UNION " + canonicalQuery.getLeft();
Map limitParams = new HashMap<>();
if (maxCount != null) {
@@ -1637,10 +1762,15 @@ private void loadIncludesMatchSpecific(
sql = sb.toString();
}
- List> partitions = partition(nextRoundMatches, getMaximumPageSize());
+ List> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize());
for (Collection nextPartition : partitions) {
Query q = entityManager.createNativeQuery(sql, Tuple.class);
q.setParameter("target_pids", JpaPid.toLongList(nextPartition));
+ if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) {
+ q.setParameter(
+ "search_partition_id",
+ nextPartition.iterator().next().getPartitionId());
+ }
localReferenceQueryParams.forEach(q::setParameter);
canonicalQuery.getRight().forEach(q::setParameter);
limitParams.forEach(q::setParameter);
@@ -1655,7 +1785,14 @@ private void loadIncludesMatchSpecific(
resourceVersion =
NumberUtils.createLong(String.valueOf(result.get(RESOURCE_VERSION_ALIAS)));
}
- pidsToInclude.add(JpaPid.fromIdAndVersion(resourceId, resourceVersion));
+ Integer partitionId = null;
+ if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) {
+ partitionId = result.get(PARTITION_ID_ALIAS, Integer.class);
+ }
+
+ JpaPid pid = JpaPid.fromIdAndVersion(resourceId, resourceVersion);
+ pid.setPartitionId(partitionId);
+ pidsToInclude.add(pid);
}
}
}
@@ -1664,9 +1801,11 @@ private void loadIncludesMatchSpecific(
private void loadIncludesMatchAll(
String findPidFieldName,
+ String findPartitionFieldName,
String findResourceTypeFieldName,
String findVersionFieldName,
String searchPidFieldName,
+ String searchPartitionFieldName,
String wantResourceType,
boolean reverseMode,
boolean hasDesiredResourceTypes,
@@ -1683,10 +1822,17 @@ private void loadIncludesMatchAll(
if (findVersionFieldName != null) {
sqlBuilder.append(", r.").append(findVersionFieldName);
}
+ if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) {
+ sqlBuilder.append(", r.").append(findPartitionFieldName);
+ }
sqlBuilder.append(" FROM ResourceLink r WHERE ");
- sqlBuilder.append("r.");
- sqlBuilder.append(searchPidFieldName); // (rev mode) target_resource_id | source_resource_id
+ if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) {
+ sqlBuilder.append("r.").append(searchPartitionFieldName);
+ sqlBuilder.append(" = :target_partition_id AND ");
+ }
+
+ sqlBuilder.append("r.").append(searchPidFieldName);
sqlBuilder.append(" IN (:target_pids)");
/*
@@ -1726,10 +1872,14 @@ private void loadIncludesMatchAll(
}
String sql = sqlBuilder.toString();
- List> partitions = partition(nextRoundMatches, getMaximumPageSize());
+ List> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize());
for (Collection nextPartition : partitions) {
TypedQuery> q = entityManager.createQuery(sql, Object[].class);
q.setParameter("target_pids", JpaPid.toLongList(nextPartition));
+ if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) {
+ q.setParameter(
+ "target_partition_id", nextPartition.iterator().next().getPartitionId());
+ }
if (wantResourceType != null) {
q.setParameter("want_resource_type", wantResourceType);
}
@@ -1752,12 +1902,19 @@ private void loadIncludesMatchAll(
Long resourceId = (Long) ((Object[]) nextRow)[0];
String resourceType = (String) ((Object[]) nextRow)[1];
String resourceCanonicalUrl = (String) ((Object[]) nextRow)[2];
+ Integer partitionId = null;
+ int offset = 0;
if (findVersionFieldName != null) {
version = (Long) ((Object[]) nextRow)[3];
+ offset++;
+ }
+ if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) {
+ partitionId = ((Integer) ((Object[]) nextRow)[3 + offset]);
}
if (resourceId != null) {
JpaPid pid = JpaPid.fromIdAndVersionAndResourceType(resourceId, version, resourceType);
+ pid.setPartitionId(partitionId);
pidsToInclude.add(pid);
} else if (resourceCanonicalUrl != null) {
if (canonicalUrls == null) {
@@ -1771,23 +1928,30 @@ private void loadIncludesMatchAll(
String message =
"Search with _include=* can be inefficient when references using canonical URLs are detected. Use more specific _include values instead.";
firePerformanceWarning(request, message);
- loadCanonicalUrls(canonicalUrls, entityManager, pidsToInclude, reverseMode);
+ loadCanonicalUrls(request, canonicalUrls, entityManager, pidsToInclude, reverseMode);
}
}
}
private void loadCanonicalUrls(
+ RequestDetails theRequestDetails,
Set theCanonicalUrls,
EntityManager theEntityManager,
HashSet thePidsToInclude,
boolean theReverse) {
StringBuilder sqlBuilder;
- Set identityHashesForTypes = calculateIndexUriIdentityHashesForResourceTypes(null, theReverse);
- List> canonicalUrlPartitions =
- partition(theCanonicalUrls, getMaximumPageSize() - identityHashesForTypes.size());
+ CanonicalUrlTargets canonicalUrlTargets =
+ calculateIndexUriIdentityHashesForResourceTypes(theRequestDetails, null, theReverse);
+ List> canonicalUrlPartitions = ListUtils.partition(
+ List.copyOf(theCanonicalUrls), getMaximumPageSize() - canonicalUrlTargets.myHashIdentityValues.size());
sqlBuilder = new StringBuilder();
- sqlBuilder.append("SELECT i.myResourcePid ");
+ sqlBuilder.append("SELECT ");
+ if (myPartitionSettings.isPartitioningEnabled()) {
+ sqlBuilder.append("i.myPartitionIdValue, ");
+ }
+ sqlBuilder.append("i.myResourcePid ");
+
sqlBuilder.append("FROM ResourceIndexedSearchParamUri i ");
sqlBuilder.append("WHERE i.myHashIdentity IN (:hash_identity) ");
sqlBuilder.append("AND i.myUri IN (:uris)");
@@ -1795,13 +1959,23 @@ private void loadCanonicalUrls(
String canonicalResSql = sqlBuilder.toString();
for (Collection nextCanonicalUrlList : canonicalUrlPartitions) {
- TypedQuery canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Long.class);
- canonicalResIdQuery.setParameter("hash_identity", identityHashesForTypes);
+ TypedQuery