From 7f99af3be6b5cec7a8fb9da74cf439a946707dcc Mon Sep 17 00:00:00 2001 From: Rory Doak <138574807+RODO94@users.noreply.github.com> Date: Mon, 13 Jan 2025 10:15:06 +0000 Subject: [PATCH] feat: add `has_send_component` to seed scripts (#4127) --- scripts/seed-database/container.sh | 2 +- scripts/seed-database/write/published_flows.sql | 15 ++++++++++----- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/scripts/seed-database/container.sh b/scripts/seed-database/container.sh index 03390220ad..ec50bee342 100755 --- a/scripts/seed-database/container.sh +++ b/scripts/seed-database/container.sh @@ -51,7 +51,7 @@ done psql --quiet ${REMOTE_PG} --command="\\copy (SELECT id, team_id, staging_bops_submission_url, staging_bops_secret, has_planning_data, staging_govpay_secret, staging_file_api_key, power_automate_webhook_url, staging_power_automate_api_key FROM team_integrations) TO '/tmp/team_integrations.csv' (FORMAT csv, DELIMITER ';');" echo team_integrations downloaded -psql --quiet ${REMOTE_PG} --command="\\copy (SELECT DISTINCT ON (flow_id) id, data, flow_id, summary, publisher_id, created_at FROM published_flows ORDER BY flow_id, created_at DESC) TO '/tmp/published_flows.csv' (FORMAT csv, DELIMITER ';');" +psql --quiet ${REMOTE_PG} --command="\\copy (SELECT DISTINCT ON (flow_id) id, data, flow_id, summary, publisher_id, created_at, has_send_component FROM published_flows ORDER BY flow_id, created_at DESC) TO '/tmp/published_flows.csv' (FORMAT csv, DELIMITER ';');" echo published_flows downloaded if [[ ${RESET} == "reset_flows" ]]; then diff --git a/scripts/seed-database/write/published_flows.sql b/scripts/seed-database/write/published_flows.sql index f54aba1adc..c066a34736 100644 --- a/scripts/seed-database/write/published_flows.sql +++ b/scripts/seed-database/write/published_flows.sql @@ -5,10 +5,12 @@ CREATE TEMPORARY TABLE sync_published_flows ( flow_id uuid, summary text, publisher_id int, - created_at timestamptz + created_at timestamptz, + has_send_component boolean ); -\copy sync_published_flows (id, data, flow_id, summary, publisher_id, created_at) FROM '/tmp/published_flows.csv' (FORMAT csv, DELIMITER ';'); +/* Ensure columns here are kept in sync with container.sh */ +\copy sync_published_flows (id, data, flow_id, summary, publisher_id, created_at, has_send_component) FROM '/tmp/published_flows.csv' (FORMAT csv, DELIMITER ';'); INSERT INTO published_flows ( id, @@ -16,7 +18,8 @@ INSERT INTO published_flows ( flow_id, summary, publisher_id, - created_at + created_at, + has_send_component ) SELECT id, @@ -24,7 +27,8 @@ SELECT flow_id, summary, publisher_id, - created_at + created_at, + has_send_component FROM sync_published_flows ON CONFLICT (id) DO UPDATE SET @@ -32,4 +36,5 @@ SET flow_id = EXCLUDED.flow_id, summary = EXCLUDED.summary, publisher_id = EXCLUDED.publisher_id, - created_at = EXCLUDED.created_at; \ No newline at end of file + created_at = EXCLUDED.created_at, + has_send_component = EXCLUDED.has_send_component; \ No newline at end of file