diff --git a/service/src/main/java/bio/terra/tanagra/db/AnnotationDao.java b/service/src/main/java/bio/terra/tanagra/db/AnnotationDao.java index c423e643d..b546758b0 100644 --- a/service/src/main/java/bio/terra/tanagra/db/AnnotationDao.java +++ b/service/src/main/java/bio/terra/tanagra/db/AnnotationDao.java @@ -123,7 +123,7 @@ public void deleteAnnotationKey(String cohortId, String annotationKeyId) { public List getAllAnnotationKeys(String cohortId, int offset, int limit) { String sql = ANNOTATION_KEY_SELECT_SQL - + " WHERE cohort_id = :cohort_id ORDER BY display_name OFFSET :offset LIMIT :limit"; + + " WHERE cohort_id = :cohort_id ORDER BY display_name LIMIT :limit OFFSET :offset"; LOGGER.debug("GET all annotation keys: {}", sql); MapSqlParameterSource params = new MapSqlParameterSource() @@ -145,7 +145,7 @@ public List getAnnotationKeysMatchingList( } String sql = ANNOTATION_KEY_SELECT_SQL - + " WHERE cohort_id = :cohort_id AND id IN (:ids) ORDER BY display_name OFFSET :offset LIMIT :limit"; + + " WHERE cohort_id = :cohort_id AND id IN (:ids) ORDER BY display_name LIMIT :limit OFFSET :offset"; LOGGER.debug("GET matching annotation keys: {}", sql); MapSqlParameterSource params = new MapSqlParameterSource() diff --git a/service/src/main/java/bio/terra/tanagra/db/CohortDao.java b/service/src/main/java/bio/terra/tanagra/db/CohortDao.java index 6f0582391..fac295f1f 100644 --- a/service/src/main/java/bio/terra/tanagra/db/CohortDao.java +++ b/service/src/main/java/bio/terra/tanagra/db/CohortDao.java @@ -117,7 +117,7 @@ public class CohortDao { // SQL query and row mapper for reading a criteria tag. private static final String CRITERIA_TAG_SELECT_SQL = - "SELECT cohort_revision_id, criteria_group_section_id, criteria_group_id, criteria_id, key, value FROM criteria_tag"; + "SELECT cohort_revision_id, criteria_group_section_id, criteria_group_id, criteria_id, criteria_key, criteria_value FROM criteria_tag"; private static final RowMapper, Pair>> CRITERIA_TAG_ROW_MAPPER = (rs, rowNum) -> Pair.of( @@ -126,7 +126,7 @@ public class CohortDao { rs.getString("criteria_group_id"), rs.getString("criteria_group_section_id"), rs.getString("cohort_revision_id")), - Pair.of(rs.getString("key"), rs.getString("value"))); + Pair.of(rs.getString("criteria_key"), rs.getString("criteria_value"))); private final NamedParameterJdbcTemplate jdbcTemplate; @Autowired @@ -138,7 +138,7 @@ public CohortDao(NamedParameterJdbcTemplate jdbcTemplate) { public List getAllCohorts(String studyId, int offset, int limit) { String sql = COHORT_SELECT_SQL - + " WHERE study_id = :study_id ORDER BY display_name OFFSET :offset LIMIT :limit"; + + " WHERE study_id = :study_id ORDER BY display_name LIMIT :limit OFFSET :offset"; LOGGER.debug("GET ALL cohorts: {}", sql); MapSqlParameterSource params = new MapSqlParameterSource() @@ -153,7 +153,7 @@ public List getAllCohorts(String studyId, int offset, int limit) { @ReadTransaction public List getCohortsMatchingList(Set ids, int offset, int limit) { String sql = - COHORT_SELECT_SQL + " WHERE id IN (:ids) ORDER BY display_name OFFSET :offset LIMIT :limit"; + COHORT_SELECT_SQL + " WHERE id IN (:ids) ORDER BY display_name LIMIT :limit OFFSET :offset"; LOGGER.debug("GET MATCHING cohorts: {}", sql); MapSqlParameterSource params = new MapSqlParameterSource() @@ -652,7 +652,7 @@ private void updateCriteriaHelper( LOGGER.debug("CREATE criteria rowsAffected = {}", rowsAffected); sql = - "INSERT INTO criteria_tag (cohort_revision_id, criteria_group_section_id, criteria_group_id, criteria_id, key, value) " + "INSERT INTO criteria_tag (cohort_revision_id, criteria_group_section_id, criteria_group_id, criteria_id, criteria_key, criteria_value) " + "VALUES (:cohort_revision_id, :criteria_group_section_id, :criteria_group_id, :criteria_id, :key, :value)"; LOGGER.debug("CREATE criteria_tag: {}", sql); rowsAffected = diff --git a/service/src/main/java/bio/terra/tanagra/db/ConceptSetDao.java b/service/src/main/java/bio/terra/tanagra/db/ConceptSetDao.java index 2c198bc1a..340ee00e7 100644 --- a/service/src/main/java/bio/terra/tanagra/db/ConceptSetDao.java +++ b/service/src/main/java/bio/terra/tanagra/db/ConceptSetDao.java @@ -56,12 +56,12 @@ public class ConceptSetDao { // SQL query and row mapper for reading a criteria tag. private static final String CRITERIA_TAG_SELECT_SQL = - "SELECT criteria_id, concept_set_id, key, value FROM criteria_tag"; + "SELECT criteria_id, concept_set_id, criteria_key, criteria_value FROM criteria_tag"; private static final RowMapper, Pair>> CRITERIA_TAG_ROW_MAPPER = (rs, rowNum) -> Pair.of( List.of(rs.getString("criteria_id"), rs.getString("concept_set_id")), - Pair.of(rs.getString("key"), rs.getString("value"))); + Pair.of(rs.getString("criteria_key"), rs.getString("criteria_value"))); private final NamedParameterJdbcTemplate jdbcTemplate; @Autowired @@ -73,7 +73,7 @@ public ConceptSetDao(NamedParameterJdbcTemplate jdbcTemplate) { public List getAllConceptSets(String studyId, int offset, int limit) { String sql = CONCEPT_SET_SELECT_SQL - + " WHERE study_id = :study_id ORDER BY display_name OFFSET :offset LIMIT :limit"; + + " WHERE study_id = :study_id ORDER BY display_name LIMIT :limit OFFSET :offset"; LOGGER.debug("GET ALL concept sets: {}", sql); MapSqlParameterSource params = new MapSqlParameterSource() @@ -89,7 +89,7 @@ public List getAllConceptSets(String studyId, int offset, int limit) public List getConceptSetsMatchingList(Set ids, int offset, int limit) { String sql = CONCEPT_SET_SELECT_SQL - + " WHERE id IN (:ids) ORDER BY display_name OFFSET :offset LIMIT :limit"; + + " WHERE id IN (:ids) ORDER BY display_name LIMIT :limit OFFSET :offset"; LOGGER.debug("GET MATCHING concept sets: {}", sql); MapSqlParameterSource params = new MapSqlParameterSource() @@ -287,7 +287,7 @@ private void updateCriteriaHelper(String conceptSetId, List criteria) // Write the criteria tags. sql = - "INSERT INTO criteria_tag (concept_set_id, criteria_id, key, value) VALUES (:concept_set_id, :criteria_id, :key, :value)"; + "INSERT INTO criteria_tag (concept_set_id, criteria_id, criteria_key, criteria_value) VALUES (:concept_set_id, :criteria_id, :key, :value)"; LOGGER.debug("CREATE criteria tag: {}", sql); List tagParamSets = new ArrayList<>(); criteria.stream() diff --git a/service/src/main/java/bio/terra/tanagra/db/ReviewDao.java b/service/src/main/java/bio/terra/tanagra/db/ReviewDao.java index 418673110..56b96d7cc 100644 --- a/service/src/main/java/bio/terra/tanagra/db/ReviewDao.java +++ b/service/src/main/java/bio/terra/tanagra/db/ReviewDao.java @@ -85,7 +85,7 @@ public ReviewDao(NamedParameterJdbcTemplate jdbcTemplate, CohortDao cohortDao) { public List getAllReviews(String cohortId, int offset, int limit) { String sql = REVIEW_SELECT_SQL - + " WHERE cohort_id = :cohort_id ORDER BY display_name OFFSET :offset LIMIT :limit"; + + " WHERE cohort_id = :cohort_id ORDER BY display_name LIMIT :limit OFFSET :offset"; LOGGER.debug("GET ALL reviews: {}", sql); MapSqlParameterSource params = new MapSqlParameterSource() @@ -100,7 +100,7 @@ public List getAllReviews(String cohortId, int offset, int limit) { @ReadTransaction public List getReviewsMatchingList(Set ids, int offset, int limit) { String sql = - REVIEW_SELECT_SQL + " WHERE id IN (:ids) ORDER BY display_name OFFSET :offset LIMIT :limit"; + REVIEW_SELECT_SQL + " WHERE id IN (:ids) ORDER BY display_name LIMIT :limit OFFSET :offset"; LOGGER.debug("GET MATCHING reviews: {}", sql); MapSqlParameterSource params = new MapSqlParameterSource() diff --git a/service/src/main/java/bio/terra/tanagra/db/StudyDao.java b/service/src/main/java/bio/terra/tanagra/db/StudyDao.java index 07b3fda99..749ce20bb 100644 --- a/service/src/main/java/bio/terra/tanagra/db/StudyDao.java +++ b/service/src/main/java/bio/terra/tanagra/db/StudyDao.java @@ -40,10 +40,12 @@ public class StudyDao { // SQL query and row mapper for reading a property. private static final String PROPERTY_SELECT_SQL = - "SELECT study_id, key, value FROM study_property"; + "SELECT study_id, property_key, property_value FROM study_property"; private static final RowMapper>> PROPERTY_ROW_MAPPER = (rs, rowNum) -> - Pair.of(rs.getString("study_id"), Pair.of(rs.getString("key"), rs.getString("value"))); + Pair.of( + rs.getString("study_id"), + Pair.of(rs.getString("property_key"), rs.getString("property_value"))); private final NamedParameterJdbcTemplate jdbcTemplate; @Autowired @@ -116,7 +118,7 @@ public List getAllStudies(int offset, int limit, @Nullable Study.Builder String sql = STUDY_SELECT_SQL + (filterSql.isEmpty() ? "" : " WHERE " + filterSql) - + " ORDER BY display_name OFFSET :offset LIMIT :limit"; + + " ORDER BY display_name LIMIT :limit OFFSET :offset"; LOGGER.debug("GET all studies: {}", sql); List studies = getStudiesHelper(sql, params); LOGGER.debug("GET all studies numFound = {}", studies.size()); @@ -144,7 +146,7 @@ public List getStudiesMatchingList( STUDY_SELECT_SQL + " WHERE id IN (:ids) " + (filterSql.isEmpty() ? "" : "AND " + filterSql + " ") - + "ORDER BY display_name OFFSET :offset LIMIT :limit"; + + "ORDER BY display_name LIMIT :limit OFFSET :offset"; LOGGER.debug("GET matching studies: {}", sql); List studies = getStudiesHelper(sql, params); LOGGER.debug("GET matching studies numFound = {}", studies.size()); @@ -173,9 +175,9 @@ private String renderSqlForStudyFilter(Study.Builder studyFilter, MapSqlParamete int ctr = 0; for (Map.Entry entry : studyFilter.getProperties().entrySet()) { whereConditions.add( - "EXISTS (SELECT 1 FROM study_property WHERE study_id = id AND key = :key_" + "EXISTS (SELECT 1 FROM study_property WHERE study_id = id AND property_key = :key_" + ctr - + " AND value LIKE :value_like_" + + " AND property_value LIKE :value_like_" + ctr + ")"); params.addValue("key_" + ctr, entry.getKey()); @@ -310,7 +312,8 @@ private void updatePropertiesHelper(String studyId, Map properti LOGGER.debug("DELETE study property rowsAffected = {}", rowsAffected); // Write the properties. - sql = "INSERT INTO study_property (study_id, key, value) VALUES (:study_id, :key, :value)"; + sql = + "INSERT INTO study_property (study_id, property_key, property_value) VALUES (:study_id, :key, :value)"; LOGGER.debug("CREATE study property: {}", sql); List propertyParamSets = properties.entrySet().stream() diff --git a/service/src/main/resources/db/changelog.xml b/service/src/main/resources/db/changelog.xml index 28b4ce572..0d718eb56 100644 --- a/service/src/main/resources/db/changelog.xml +++ b/service/src/main/resources/db/changelog.xml @@ -14,5 +14,6 @@ - + + diff --git a/service/src/main/resources/db/changesets/20230602_postgres_data_migrate.yaml b/service/src/main/resources/db/changesets/20230602_postgres_data_migrate.yaml new file mode 100644 index 000000000..12337ef1c --- /dev/null +++ b/service/src/main/resources/db/changesets/20230602_postgres_data_migrate.yaml @@ -0,0 +1,22 @@ +databaseChangeLog: + - changeSet: + id: postgres_data_migrate + author: marikomedlock, chenchalsubraveti + dbms: postgresql + # TODO: Remove the jsonb and 2 text[] columns in a follow-on changeset, once we're sure this migration was successful. + # study.properties, criteria.tags, annotation_key.enumVals + changes: + - sql: + # Migrate the properties of existing studies from the JSON column to this table. + sql: | + INSERT INTO study_property (study_id, property_key, property_value) + SELECT id, key, properties->>key as value + FROM (SELECT id, jsonb_object_keys(properties) as key, properties FROM study) AS study_property_keys; + + # No migration SQL because the criteria.tags text[] column is not yet being used by the UI. + + - sql: + # Migrate the enum values of existing annotation keys from the text[] column to this table. + sql: | + INSERT INTO annotation_key_enum_value (cohort_id, annotation_key_id, enum) + SELECT cohort_id, id AS annotation_key_id, unnest(enum_vals) AS enum FROM annotation_key; diff --git a/service/src/main/resources/db/changesets/20230530_avoid_postgres_specific_features.yaml b/service/src/main/resources/db/changesets/20230602_schema_extend.yaml similarity index 74% rename from service/src/main/resources/db/changesets/20230530_avoid_postgres_specific_features.yaml rename to service/src/main/resources/db/changesets/20230602_schema_extend.yaml index 81149467d..52f845082 100644 --- a/service/src/main/resources/db/changesets/20230530_avoid_postgres_specific_features.yaml +++ b/service/src/main/resources/db/changesets/20230602_schema_extend.yaml @@ -1,9 +1,7 @@ databaseChangeLog: - changeSet: - id: avoid_postgres_specific_features + id: schema_extend author: marikomedlock, chenchalsubraveti - # TODO: Remove the jsonb and 2 text[] columns in a follow-on changeset, once we're sure this migration was successful. - # study.properties, criteria.tags, annotation_key.enumVals changes: - createTable: tableName: study_property @@ -13,30 +11,24 @@ databaseChangeLog: type: ${id.type} constraints: references: study(id) - foreignKeyName: fk_sp_s + foreignKeyName: fk_sp_s_2 nullable: false deleteCascade: true remarks: Deleting a study will cascade to delete its properties - column: - name: key + name: property_key type: ${id.type} constraints: nullable: false - column: - name: value + name: property_value type: ${text.type} constraints: nullable: true - addUniqueConstraint: - constraintName: pk_sp + constraintName: pk_sp_2 tableName: study_property - columnNames: study_id, key - - sql: - # Migrate the properties of existing studies from the JSON column to this table. - sql: | - INSERT INTO study_property (study_id, key, value) - SELECT id, key, properties->>key as value - FROM (SELECT id, jsonb_object_keys(properties) as key, properties FROM study) AS study_property_keys; + columnNames: study_id, property_key - createTable: tableName: criteria_tag @@ -61,7 +53,7 @@ databaseChangeLog: type: ${id.type} constraints: references: cohort_revision(id) - foreignKeyName: ck_crit_cr + foreignKeyName: ck_crit_cr_2 nullable: true deleteCascade: true remarks: Deleting a cohort revision will cascade to delete the criteria contained in it @@ -70,26 +62,26 @@ databaseChangeLog: type: ${id.type} constraints: references: concept_set(id) - foreignKeyName: fk_crit_cs + foreignKeyName: fk_crit_cs_2 nullable: true deleteCascade: true remarks: Deleting a concept set will cascade to delete the criteria contained in it - column: - name: key + name: criteria_key type: ${id.type} constraints: nullable: false - column: - name: value + name: criteria_value type: ${text.type} constraints: nullable: false - addUniqueConstraint: - constraintName: pk_ct + constraintName: pk_ct_2 tableName: criteria_tag - columnNames: criteria_id, criteria_group_id, criteria_group_section_id, cohort_revision_id, concept_set_id, key + columnNames: criteria_id, criteria_group_id, criteria_group_section_id, cohort_revision_id, concept_set_id, criteria_key - addForeignKeyConstraint: - constraintName: fk_ct_c + constraintName: fk_ct_c_2 baseColumnNames: criteria_id, criteria_group_id, criteria_group_section_id, cohort_revision_id, concept_set_id baseTableName: criteria_tag referencedColumnNames: id, criteria_group_id, criteria_group_section_id, cohort_revision_id, concept_set_id @@ -97,6 +89,7 @@ databaseChangeLog: - dropNotNullConstraint: tableName: criteria columnName: tags + columnDataType: ${text.array.type} # No migration SQL because the criteria.tags text[] column is not yet being used by the UI. - createTable: @@ -118,11 +111,11 @@ databaseChangeLog: constraints: nullable: false - addUniqueConstraint: - constraintName: pk_akev + constraintName: pk_akev_2 tableName: annotation_key_enum_value columnNames: enum, annotation_key_id, cohort_id - addForeignKeyConstraint: - constraintName: fk_akev_ak + constraintName: fk_akev_ak_2 baseColumnNames: annotation_key_id, cohort_id baseTableName: annotation_key_enum_value referencedColumnNames: id, cohort_id @@ -132,8 +125,5 @@ databaseChangeLog: - dropNotNullConstraint: tableName: annotation_key columnName: enum_vals - - sql: - # Migrate the enum values of existing annotation keys from the text[] column to this table. - sql: | - INSERT INTO annotation_key_enum_value (cohort_id, annotation_key_id, enum) - SELECT cohort_id, id AS annotation_key_id, unnest(enum_vals) AS enum FROM annotation_key; + columnDataType: ${text.array.type} +