From a1a94119e50e585b2440904d633bb9b675ac2e7c Mon Sep 17 00:00:00 2001 From: Rory Doak Date: Thu, 9 Jan 2025 16:06:58 +0000 Subject: [PATCH 1/2] add in has_send_component --- scripts/seed-database/write/published_flows.sql | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/scripts/seed-database/write/published_flows.sql b/scripts/seed-database/write/published_flows.sql index f54aba1adc..efc58b8db5 100644 --- a/scripts/seed-database/write/published_flows.sql +++ b/scripts/seed-database/write/published_flows.sql @@ -5,10 +5,11 @@ CREATE TEMPORARY TABLE sync_published_flows ( flow_id uuid, summary text, publisher_id int, - created_at timestamptz + created_at timestamptz, + has_send_component boolean ); -\copy sync_published_flows (id, data, flow_id, summary, publisher_id, created_at) FROM '/tmp/published_flows.csv' (FORMAT csv, DELIMITER ';'); +\copy sync_published_flows (id, data, flow_id, summary, publisher_id, created_at, has_send_component) FROM '/tmp/published_flows.csv' (FORMAT csv, DELIMITER ';'); INSERT INTO published_flows ( id, @@ -16,7 +17,8 @@ INSERT INTO published_flows ( flow_id, summary, publisher_id, - created_at + created_at, + has_send_component ) SELECT id, @@ -24,7 +26,8 @@ SELECT flow_id, summary, publisher_id, - created_at + created_at, + has_send_component FROM sync_published_flows ON CONFLICT (id) DO UPDATE SET @@ -32,4 +35,5 @@ SET flow_id = EXCLUDED.flow_id, summary = EXCLUDED.summary, publisher_id = EXCLUDED.publisher_id, - created_at = EXCLUDED.created_at; \ No newline at end of file + created_at = EXCLUDED.created_at, + has_send_component = EXCLUDED.has_send_component; \ No newline at end of file From bc932d4afe510f0eb3fb37beed4d6b4a75ee01c7 Mon Sep 17 00:00:00 2001 From: Rory Doak Date: Fri, 10 Jan 2025 11:05:08 +0000 Subject: [PATCH 2/2] add comment to sql script and update container.sh --- scripts/seed-database/container.sh | 2 +- scripts/seed-database/write/published_flows.sql | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/seed-database/container.sh b/scripts/seed-database/container.sh index 03390220ad..ec50bee342 100755 --- a/scripts/seed-database/container.sh +++ b/scripts/seed-database/container.sh @@ -51,7 +51,7 @@ done psql --quiet ${REMOTE_PG} --command="\\copy (SELECT id, team_id, staging_bops_submission_url, staging_bops_secret, has_planning_data, staging_govpay_secret, staging_file_api_key, power_automate_webhook_url, staging_power_automate_api_key FROM team_integrations) TO '/tmp/team_integrations.csv' (FORMAT csv, DELIMITER ';');" echo team_integrations downloaded -psql --quiet ${REMOTE_PG} --command="\\copy (SELECT DISTINCT ON (flow_id) id, data, flow_id, summary, publisher_id, created_at FROM published_flows ORDER BY flow_id, created_at DESC) TO '/tmp/published_flows.csv' (FORMAT csv, DELIMITER ';');" +psql --quiet ${REMOTE_PG} --command="\\copy (SELECT DISTINCT ON (flow_id) id, data, flow_id, summary, publisher_id, created_at, has_send_component FROM published_flows ORDER BY flow_id, created_at DESC) TO '/tmp/published_flows.csv' (FORMAT csv, DELIMITER ';');" echo published_flows downloaded if [[ ${RESET} == "reset_flows" ]]; then diff --git a/scripts/seed-database/write/published_flows.sql b/scripts/seed-database/write/published_flows.sql index efc58b8db5..c066a34736 100644 --- a/scripts/seed-database/write/published_flows.sql +++ b/scripts/seed-database/write/published_flows.sql @@ -9,6 +9,7 @@ CREATE TEMPORARY TABLE sync_published_flows ( has_send_component boolean ); +/* Ensure columns here are kept in sync with container.sh */ \copy sync_published_flows (id, data, flow_id, summary, publisher_id, created_at, has_send_component) FROM '/tmp/published_flows.csv' (FORMAT csv, DELIMITER ';'); INSERT INTO published_flows (