Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add test for Postgres migration scripts #318

Merged
merged 3 commits into from
Sep 25, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 37 additions & 0 deletions .github/integration/postgres.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,39 @@ services:
volumes:
- ./scripts:/scripts

migration_helper:
container_name: helper
command:
- /usr/bin/tar
- -zxf
- /pgdata.tgz
jbygdell marked this conversation as resolved.
Show resolved Hide resolved
- -C
- /pgdata/
image: ubuntu:jammy
user: 70:70
volumes:
- pgmigrate:/pgdata
- ./tests/postgres/pgdata.tgz:/pgdata.tgz

migrate:
container_name: migrate
depends_on:
migration_helper:
condition: service_completed_successfully
environment:
- POSTGRES_PASSWORD=rootpasswd
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres"]
interval: 5s
timeout: 20s
retries: 3
image: ghcr.io/neicnordic/sensitive-data-archive:PR${PR_NUMBER}-postgres
ports:
- "35432:5432"
restart: always
volumes:
- pgmigrate:/var/lib/postgresql/data

postgres:
build:
context: ../../postgresql
Expand Down Expand Up @@ -80,6 +113,8 @@ services:
condition: service_completed_successfully
postgres:
condition: service_healthy
migrate:
condition: service_healthy
environment:
- PGPASSWORD=rootpasswd
image: ubuntu:jammy
Expand All @@ -88,9 +123,11 @@ services:
volumes:
- ./tests:/tests
- client_certs:/certs
- ../../postgresql/migratedb.d:/migratedb.d

volumes:
certs:
client_certs:
pgdata:
pgmigrate:
pgtlsdata:
9 changes: 8 additions & 1 deletion .github/integration/tests/postgres/10_sanity_check.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,16 @@ if [ "$status" -eq 0 ]; then
exit 1
fi

## verify that migrations worked
migratedb=$(find /migratedb.d/ -name "*.sql" -printf '%f\n' | sort -n | tail -1 | cut -d '.' -f1)
version=$(psql -U postgres -h migrate -d sda -At -c "select max(version) from sda.dbschema_version;")
if [ "$version" -ne "$migratedb" ]; then
echo "Migration scripts failed"
exit 1
fi

## verify all users can connect
for u in download finalize inbox ingest mapper sync verify; do
export PGPASSWORD="$u"
psql -U "$u" -h postgres -d sda -At -c "SELECT version();" 1>/dev/null
done

Binary file added .github/integration/tests/postgres/pgdata.tgz
Binary file not shown.
22 changes: 8 additions & 14 deletions postgresql/migratedb.d/08.sql
Original file line number Diff line number Diff line change
Expand Up @@ -14,40 +14,34 @@ BEGIN
INSERT INTO sda.dbschema_version VALUES(sourcever+1, now(), changes);

-- add new permissions
GRANT USAGE, SELECT ON sda.file_event_log TO finalize;
GRANT USAGE, SELECT ON sda.file_event_log TO ingest;
GRANT USAGE, SELECT ON sda.file_event_log TO verify;
GRANT SELECT ON sda.file_event_log TO finalize;
GRANT SELECT ON sda.file_event_log TO ingest;
GRANT SELECT ON sda.file_event_log TO verify;

-- New ingestion specific functions
CREATE FUNCTION sda.set_archived(file_uuid UUID, corr_id UUID, file_path TEXT, file_size BIGINT, inbox_checksum_value TEXT, inbox_checksum_type TEXT)
RETURNS void AS $set_archived$
DECLARE
fid UUID;
BEGIN
SELECT file_id from sda.file_event_log where correlation_id = corr_id INTO fid;

UPDATE sda.files SET archive_file_path = file_path, archive_file_size = file_size WHERE id = fid;
UPDATE sda.files SET archive_file_path = file_path, archive_file_size = file_size WHERE id = file_uuid;

INSERT INTO sda.checksums(file_id, checksum, type, source)
VALUES(fid, inbox_checksum_value, upper(inbox_checksum_type)::sda.checksum_algorithm, upper(UPLOADED)::sda.checksum_source);
VALUES(file_uuid, inbox_checksum_value, upper(inbox_checksum_type)::sda.checksum_algorithm, upper('UPLOADED')::sda.checksum_source);

INSERT INTO sda.file_event_log(file_id, event, correlation_id) VALUES(fid, 'archived' corr_id);
INSERT INTO sda.file_event_log(file_id, event, correlation_id) VALUES(file_uuid, 'archived', corr_id);
END;

$set_archived$ LANGUAGE plpgsql;

CREATE FUNCTION sda.set_verified(file_uuid UUID, corr_id UUID, archive_checksum TEXT, archive_checksum_type TEXT, decrypted_checksum TEXT, decrypted_checksum_type TEXT, descrypted_size BIGINT)
RETURNS void AS $set_verified$
DECLARE
fid UUID;
BEGIN
UPDATE sda.files SET decrypted_file_size = descrypted_size WHERE id = file_uuid;

INSERT INTO sda.checksums(file_id, checksum, type, source)
VALUES(fid, archive_checksum, upper(archive_checksum_type)::sda.checksum_algorithm, upper('ARCHIVED')::sda.checksum_source);
VALUES(file_uuid, archive_checksum, upper(archive_checksum_type)::sda.checksum_algorithm, upper('ARCHIVED')::sda.checksum_source);

INSERT INTO sda.checksums(file_id, checksum, type, source)
VALUES(fid, decrypted_checksum, upper(decrypted_checksum_type)::sda.checksum_algorithm, upper('UNENCRYPTED')::sda.checksum_source);
VALUES(file_uuid, decrypted_checksum, upper(decrypted_checksum_type)::sda.checksum_algorithm, upper('UNENCRYPTED')::sda.checksum_source);

INSERT INTO sda.file_event_log(file_id, event, correlation_id) VALUES(file_uuid, 'verified', corr_id);
END;
Expand Down
7 changes: 4 additions & 3 deletions postgresql/migratedb.d/09.sql
Original file line number Diff line number Diff line change
Expand Up @@ -6,23 +6,24 @@ DECLARE
sourcever INTEGER := 8;
changes VARCHAR := 'Add dataset event log';
BEGIN
SET search_path TO sda;
IF (select max(version) from sda.dbschema_version) = sourcever then
RAISE NOTICE 'Doing migration from schema version % to %', sourcever, sourcever+1;
RAISE NOTICE 'Changes: %', changes;
INSERT INTO sda.dbschema_version VALUES(sourcever+1, now(), changes);

CREATE TABLE dataset_events (
CREATE TABLE sda.dataset_events (
id SERIAL PRIMARY KEY,
title VARCHAR(64) UNIQUE, -- short name of the action
description TEXT
);

INSERT INTO dataset_events(id,title,description)
INSERT INTO sda.dataset_events(id,title,description)
VALUES (10, 'registered', 'Register a dataset to recieve file accession IDs mappings.'),
(20, 'released' , 'The dataset is released on this date'),
(30, 'deprecated', 'The dataset is deprecated on this date');

CREATE TABLE dataset_event_log (
CREATE TABLE sda.dataset_event_log (
id SERIAL PRIMARY KEY,
dataset_id TEXT REFERENCES datasets(stable_id),
event TEXT REFERENCES dataset_events(title),
Expand Down
5 changes: 2 additions & 3 deletions postgresql/migratedb.d/10.sql
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@ DECLARE
BEGIN
-- No explicit transaction handling here, this all happens in a transaction
-- automatically
IF (select max(version) from local_ega.dbschema_version) = sourcever then
IF (select max(version) from sda.dbschema_version) = sourcever then
RAISE NOTICE 'Doing migration from schema version % to %', sourcever, sourcever+1;
RAISE NOTICE 'Changes: %', changes;
INSERT INTO local_ega.dbschema_version VALUES(sourcever+1, now(), changes);
INSERT INTO sda.dbschema_version VALUES(sourcever+1, now(), changes);

-- Temporary function for creating roles if they do not already exist.
CREATE FUNCTION create_role_if_not_exists(role_name NAME) RETURNS void AS $created$
Expand All @@ -32,7 +32,6 @@ BEGIN
$created$ LANGUAGE plpgsql;

PERFORM create_role_if_not_exists('inbox');
CREATE ROLE inbox;
GRANT USAGE ON SCHEMA sda TO inbox;
GRANT SELECT, INSERT, UPDATE ON sda.files TO inbox;
GRANT SELECT, INSERT ON sda.file_event_log TO inbox;
Expand Down
Loading