Skip to content

Commit

Permalink
feat(pizza): sync down users from production (#1370)
Browse files Browse the repository at this point in the history
* break: sync from production

* chore: remove deprecated file

* chore: remove long time deprecated file

this script was used to migrate the database from Vultr to AWS
it had been committed Just Because we might need it sometime
we never needed if after that so better to remove the clutter

* fix: update staging sync to use new script
  • Loading branch information
gunar authored Feb 24, 2023
1 parent bbd0e49 commit 7f9ef7c
Show file tree
Hide file tree
Showing 14 changed files with 108 additions and 876 deletions.
28 changes: 17 additions & 11 deletions .github/workflows/sync-staging-db.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,24 @@ jobs:
- name: Install pulumi
uses: pulumi/setup-pulumi@v2
- run: |
echo "HASURA_GRAPHQL_ADMIN_SECRET=$(pulumi config get hasura-admin-secret --stack staging)" >> $GITHUB_ENV
working-directory: infrastructure/application
echo "STAGING_PG_URL=$(pulumi stack output --stack staging --show-secrets dbRootUrl)" >> $GITHUB_ENV
working-directory: infrastructure/data
env:
PULUMI_ACCESS_TOKEN: ${{ secrets.PULUMI_ACCESS_TOKEN }}
- uses: actions/setup-node@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
node-version: ${{ env.NODE_VERSION }}
- run: npm install -g pnpm
working-directory: scripts/seed-database
- run: pnpm install
working-directory: scripts/seed-database
- run: node ./upsert-production-flows.js --overwrite --limit 1
working-directory: scripts/seed-database
aws-access-key-id: ${{ secrets.PIZZA_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.PIZZA_AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-2
- name: Copy .env files from Staging S3 to the current working directory with AWS CLI
run: ./scripts/pull-secrets.sh
- name: Check if .env files exist
id: check_files
uses: andstor/file-existence-action@v1
with:
files: ".env, .env.staging, api.planx.uk/.env.test, hasura.planx.uk/.env.test"
fail: true
- run: docker run --rm -v "./scripts/seed-database:/app" --workdir="/app" postgis/postgis:12-3.0-alpine "./container.sh ${STAGING_PG_URL} ${PRODUCTION_PG_URL_FOR_USER_GITHUB_ACTIONS}"
env:
HASURA_GRAPHQL_URL: https://hasura.editor.planx.dev/v1/graphql
PRODUCTION_PG_URL_FOR_USER_GITHUB_ACTIONS: ${{ secrets.PRODUCTION_PG_URL_FOR_USER_GITHUB_ACTIONS }}
14 changes: 5 additions & 9 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -96,20 +96,16 @@ services:
retries: 3

seed-database:
build:
context: ./scripts/seed-database
image: postgis/postgis:12-3.0-alpine
volumes:
- "./hasura.planx.uk/:/hasura"
- "./scripts/seed-database:/app"
working_dir: "/app"
entrypoint: sh
command: -c "./container.sh postgres://${PG_USERNAME}:${PG_PASSWORD}@postgres/${PG_DATABASE} ${PRODUCTION_PG_URL_FOR_USER_GITHUB_ACTIONS}"
restart: "no"
depends_on:
hasura-proxy:
condition: service_healthy
environment:
HASURA_GRAPHQL_ADMIN_SECRET: ${HASURA_GRAPHQL_ADMIN_SECRET}
HASURA_GRAPHQL_URL: http://hasura-proxy:7000/v1/graphql
HASURA_GRAPHQL_ENDPOINT: http://hasura-proxy:7000/
PRODUCTION_GRAPHQL_URL: 'https://hasura.editor.planx.uk/v1/graphql'

api:
restart: unless-stopped
build:
Expand Down
7 changes: 0 additions & 7 deletions scripts/pullrequest/create.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,6 @@ echo \
apt-get update -y
apt-get install docker-ce docker-ce-cli containerd.io docker-compose-plugin -y

# install hasura cli
curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | bash

# set env for this shell
set -o allexport
source .env.pizza
Expand All @@ -35,7 +32,3 @@ docker compose \
-f docker-compose.yml \
-f docker-compose.pizza.yml \
up --build --wait

# insert hasura seeds
cd hasura.planx.uk
hasura seed apply --envfile .env
1 change: 0 additions & 1 deletion scripts/seed-database/.dockerignore

This file was deleted.

12 changes: 0 additions & 12 deletions scripts/seed-database/Dockerfile

This file was deleted.

5 changes: 5 additions & 0 deletions scripts/seed-database/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# seed-database

This script uses a read-only database user to selectively sync data from a production database to a local development database.

This is useful for having production-grade data both locally and on ephemeral pizza links (i.e. vultr servers that are spun up for each Pull Request).
26 changes: 26 additions & 0 deletions scripts/seed-database/container.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
#!/usr/bin/env bash
# This is the script that runs inside the container
# Usage: container.sh <local_pg_url> <remote_pg_url>

# cd to this script's directory
cd "$(dirname "$0")" || exit

set -ex

LOCAL_PG="$1"
REMOTE_PG="$2"

# fetch users
psql --command="\\COPY (SELECT * FROM users) TO '/tmp/users.csv' (FORMAT CSV, DELIMITER ';')" "${REMOTE_PG}"

# fetch teams
psql --command="\\COPY (SELECT id, name, slug, theme, settings, domain FROM teams) TO '/tmp/teams.csv' (FORMAT CSV, DELIMITER ';')" "${REMOTE_PG}"

# fetch flows
psql --command="\\COPY (SELECT * FROM flows) TO '/tmp/flows.csv' (FORMAT CSV, DELIMITER ';')" "${REMOTE_PG}"

# fetch published_flows (the last two)
psql --command="\\COPY (SELECT id, data, flow_id, summary, publisher_id FROM (SELECT id, data, flow_id, summary, publisher_id, ROW_NUMBER() OVER (PARTITION BY flow_id ORDER BY created_at DESC) as row_num FROM published_flows) as subquery WHERE row_num <= 2) TO '/tmp/published_flows.csv' (FORMAT CSV);" "${REMOTE_PG}"

# run container.sql
psql "${LOCAL_PG}" < container.sql
34 changes: 34 additions & 0 deletions scripts/seed-database/container.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
BEGIN;

TRUNCATE TABLE users, teams CASCADE;

\COPY users FROM '/tmp/users.csv' (FORMAT CSV, DELIMITER ';');

\COPY teams (id, name, slug, theme, settings, domain) FROM '/tmp/teams.csv' (FORMAT CSV, DELIMITER ';')

\COPY flows FROM '/tmp/flows.csv' (FORMAT CSV, DELIMITER ';');
UPDATE flows SET version = 1;

-- insert an operation for each flow (to make sharedb happy)
INSERT INTO operations (flow_id, data, version)
SELECT
id
, json_build_object(
'm', json_build_object(
'ts', extract(epoch from now()) * 1000
, 'uId', '1'
)
, 'v', 0
, 'seq', 1
, 'src', '1'
, 'create', json_build_object(
'data', '{}'
, 'type', 'http://sharejs.org/types/JSONv0'
)
)
, 1
FROM flows;

\COPY published_flows (id, data, flow_id, summary, publisher_id) FROM '/tmp/published_flows.csv' (FORMAT CSV);

COMMIT;
22 changes: 0 additions & 22 deletions scripts/seed-database/package.json

This file was deleted.

Loading

0 comments on commit 7f9ef7c

Please sign in to comment.