Skip to content

Commit

Permalink
feat: add has_send_component to seed scripts (#4127)
Browse files Browse the repository at this point in the history
  • Loading branch information
RODO94 authored Jan 13, 2025
1 parent 5420338 commit 7f99af3
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 6 deletions.
2 changes: 1 addition & 1 deletion scripts/seed-database/container.sh
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ done
psql --quiet ${REMOTE_PG} --command="\\copy (SELECT id, team_id, staging_bops_submission_url, staging_bops_secret, has_planning_data, staging_govpay_secret, staging_file_api_key, power_automate_webhook_url, staging_power_automate_api_key FROM team_integrations) TO '/tmp/team_integrations.csv' (FORMAT csv, DELIMITER ';');"
echo team_integrations downloaded

psql --quiet ${REMOTE_PG} --command="\\copy (SELECT DISTINCT ON (flow_id) id, data, flow_id, summary, publisher_id, created_at FROM published_flows ORDER BY flow_id, created_at DESC) TO '/tmp/published_flows.csv' (FORMAT csv, DELIMITER ';');"
psql --quiet ${REMOTE_PG} --command="\\copy (SELECT DISTINCT ON (flow_id) id, data, flow_id, summary, publisher_id, created_at, has_send_component FROM published_flows ORDER BY flow_id, created_at DESC) TO '/tmp/published_flows.csv' (FORMAT csv, DELIMITER ';');"
echo published_flows downloaded

if [[ ${RESET} == "reset_flows" ]]; then
Expand Down
15 changes: 10 additions & 5 deletions scripts/seed-database/write/published_flows.sql
Original file line number Diff line number Diff line change
Expand Up @@ -5,31 +5,36 @@ CREATE TEMPORARY TABLE sync_published_flows (
flow_id uuid,
summary text,
publisher_id int,
created_at timestamptz
created_at timestamptz,
has_send_component boolean
);

\copy sync_published_flows (id, data, flow_id, summary, publisher_id, created_at) FROM '/tmp/published_flows.csv' (FORMAT csv, DELIMITER ';');
/* Ensure columns here are kept in sync with container.sh */
\copy sync_published_flows (id, data, flow_id, summary, publisher_id, created_at, has_send_component) FROM '/tmp/published_flows.csv' (FORMAT csv, DELIMITER ';');

INSERT INTO published_flows (
id,
data,
flow_id,
summary,
publisher_id,
created_at
created_at,
has_send_component
)
SELECT
id,
data,
flow_id,
summary,
publisher_id,
created_at
created_at,
has_send_component
FROM sync_published_flows
ON CONFLICT (id) DO UPDATE
SET
data = EXCLUDED.data,
flow_id = EXCLUDED.flow_id,
summary = EXCLUDED.summary,
publisher_id = EXCLUDED.publisher_id,
created_at = EXCLUDED.created_at;
created_at = EXCLUDED.created_at,
has_send_component = EXCLUDED.has_send_component;

0 comments on commit 7f99af3

Please sign in to comment.