Skip to content

flow/cmd: avoid context.Background, pass contexts down to queries (#1… #3673

flow/cmd: avoid context.Background, pass contexts down to queries (#1…

flow/cmd: avoid context.Background, pass contexts down to queries (#1… #3673

Workflow file for this run

name: Flow build and test
on:
pull_request:
branches: [main]
push:
branches: [main]
jobs:
flow_test:
strategy:
matrix:
runner: [ubicloud-standard-16-ubuntu-2204-arm]
runs-on: ${{ matrix.runner }}
timeout-minutes: 30
services:
pg_cdc:
image: imresamu/postgis:15-3.4-alpine
ports:
- 7132:5432
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
options: >-
--name pg_cdc
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: checkout sources
uses: actions/checkout@v4
- uses: bufbuild/[email protected]
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
- name: setup protos
run: |
./generate_protos.sh
- uses: actions/setup-go@v5
with:
go-version: "1.21"
cache-dependency-path: flow/go.sum
- name: install gotestsum
run: |
go install gotest.tools/gotestsum@latest
- name: install lib-geos
run: |
sudo apt-get update
sudo apt-get install libgeos-dev
- name: download go modules
run: |
go mod download
working-directory: ./flow
- name: setup gcp service account
id: gcp-service-account
uses: jsdaniell/[email protected]
with:
name: "bq_service_account.json"
json: ${{ secrets.GCP_GH_CI_PKEY }}
- name: setup snowflake credentials
id: sf-credentials
uses: jsdaniell/[email protected]
with:
name: "snowflake_creds.json"
json: ${{ secrets.SNOWFLAKE_GH_CI_PKEY }}
- name: setup S3 credentials
id: s3-credentials
uses: jsdaniell/[email protected]
with:
name: "s3_creds.json"
json: ${{ secrets.S3_CREDS }}
- name: setup GCS credentials
id: gcs-credentials
uses: jsdaniell/[email protected]
with:
name: "gcs_creds.json"
json: ${{ secrets.GCS_CREDS }}
- name: create hstore extension, increase logical replication limits, and setup catalog database
run: >
docker exec pg_cdc psql -h localhost -p 5432 -U postgres -c "CREATE EXTENSION hstore;"
-c "ALTER SYSTEM SET wal_level=logical;"
-c "ALTER SYSTEM SET max_replication_slots=192;"
-c "ALTER SYSTEM SET max_wal_senders=256;"
-c "ALTER SYSTEM SET max_connections=2048;" &&
(cat ../nexus/catalog/migrations/V{?,??}__* | docker exec -i pg_cdc psql -h localhost -p 5432 -U postgres) &&
docker restart pg_cdc
working-directory: ./flow
env:
PG_CDC: empty
PGPASSWORD: postgres
- name: run tests
run: |
gotestsum --format testname -- -p 24 ./... -timeout 1200s
working-directory: ./flow
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION: ${{ secrets.AWS_REGION }}
TEST_BQ_CREDS: ${{ github.workspace }}/bq_service_account.json
TEST_SF_CREDS: ${{ github.workspace }}/snowflake_creds.json
TEST_S3_CREDS: ${{ github.workspace }}/s3_creds.json
TEST_GCS_CREDS: ${{ github.workspace }}/gcs_creds.json
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
ENABLE_SQLSERVER_TESTS: true
SQLSERVER_HOST: ${{ secrets.SQLSERVER_HOST }}
SQLSERVER_PORT: ${{ secrets.SQLSERVER_PORT }}
SQLSERVER_USER: ${{ secrets.SQLSERVER_USER }}
SQLSERVER_PASSWORD: ${{ secrets.SQLSERVER_PASSWORD }}
SQLSERVER_DB: ${{ secrets.SQLSERVER_DB }}
PEERDB_CATALOG_HOST: localhost
PEERDB_CATALOG_PORT: 7132
PEERDB_CATALOG_USER: postgres
PEERDB_CATALOG_PASSWORD: postgres
PEERDB_CATALOG_DATABASE: postgres