Skip to content
This repository has been archived by the owner on Apr 8, 2024. It is now read-only.

support dbt-core between 1.6.0 to 1.7.8 #921

support dbt-core between 1.6.0 to 1.7.8

support dbt-core between 1.6.0 to 1.7.8 #921

name: dbt-fal integration tests
on:
pull_request:
types: [assigned, opened, synchronize, reopened]
paths:
- "projects/adapter/**"
- ".github/workflows/test_integration_adapter.yml"
push:
branches: [main]
paths:
- "projects/adapter/**"
# schedule:
# every monday
# - cron: "0 0 * * 1"
workflow_dispatch:
jobs:
run:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
profile:
- postgres
- bigquery
- snowflake
# TODO: enable as 1.5 becomes available for following adapters
# - trino
# - duckdb
# - sqlserver
dbt_version:
- "1.7.0"
python:
- "3.8"
- "3.9"
- "3.10"
# - "3.11"
include:
- profile: snowflake
teleport: true
cloud: true
- profile: bigquery
cloud: true
concurrency:
group: "${{ github.head_ref || github.run_id }}-${{ github.workflow }}-${{ matrix.profile }}-${{ matrix.python }}"
cancel-in-progress: true
steps:
- uses: actions/checkout@v2
with:
path: "fal"
- name: Start Docker database
working-directory: fal/projects/adapter/cli_tests
if: contains(fromJSON('["postgres"]'), matrix.profile)
run: docker-compose up -d
- name: Start trino
working-directory: fal/projects/adapter/integration_tests/configs/trino
if: contains(fromJSON('["trino"]'), matrix.profile)
run: docker-compose up -d
- name: Setup sqlserver dependencies
if: contains(fromJSON('["sqlserver"]'), matrix.profile)
run: |
sudo apt install unixodbc-dev
sudo curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add -
curl https://packages.microsoft.com/config/ubuntu/$(lsb_release -rs)/prod.list > prod.list
sudo cp prod.list /etc/apt/sources.list.d/mssql-release.list
sudo apt update
sudo ACCEPT_EULA=Y apt-get install -y msodbcsql18
- name: Start sqlserver
working-directory: fal/projects/adapter/integration_tests/configs/sqlserver
if: contains(fromJSON('["sqlserver"]'), matrix.profile)
run: |
docker-compose up -d
- name: Setup latest dependencies
working-directory: fal/projects/adapter/integration_tests
run: |
python -m venv .venv
source .venv/bin/activate
pip install --upgrade pip
ADAPTER_PACKAGE="dbt-${{ matrix.profile }}"
if [[ -n '${{ matrix.dbt_version }}' ]]
then
ADAPTER_PACKAGE="${ADAPTER_PACKAGE}==${{ matrix.dbt_version }}"
fi
pushd ..
EXTRAS="${{ matrix.profile }}"
if [[ '${{ matrix.teleport }}' == 'true' ]]
then
EXTRAS="$EXTRAS,teleport"
fi
if [[ '${{ matrix.cloud }}' == 'true' ]]
then
EXTRAS="$EXTRAS,cloud"
fi
DBT_FAL_PACKAGE=".[$EXTRAS]"
echo "pip install $ADAPTER_PACKAGE -e $DBT_FAL_PACKAGE"
pip install $ADAPTER_PACKAGE -e $DBT_FAL_PACKAGE
popd
- name: Setup behave
working-directory: fal/projects/adapter/integration_tests
run: pip install behave
- name: Run tests
id: test_run
working-directory: fal/projects/adapter/integration_tests
env:
FAL_STATS_ENABLED: false
# Teleport
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# Isolate Cloud
FAL_HOST: ${{ secrets.FAL_HOST }}
FAL_KEY_SECRET: ${{ secrets.FAL_KEY_SECRET }}
FAL_KEY_ID: ${{ secrets.FAL_KEY_ID }}
# BigQuery
KEYFILE: ${{ secrets.GCP_SA_KEY }}
GCLOUD_PROJECT: ${{ secrets.GCP_PROJECT_ID }}
BQ_DATASET: ${{ secrets.BQ_DATASET }}
# Snowflake
SF_ACCOUNT: ${{ secrets.SF_ACCOUNT }}
SF_USER: ${{ secrets.SF_USER }}
SF_PASSWORD: ${{ secrets.SF_PASSWORD }}
SF_ROLE: ${{ secrets.SF_ROLE }}
SF_DATABASE: ${{ secrets.SF_DATABASE }}
SF_WAREHOUSE: ${{ secrets.SF_WAREHOUSE }}
SF_SCHEMA: ${{ secrets.SF_SCHEMA }}
# Duckdb
DB_PATH: ${{ github.workspace }}/duck.db
run: |
source .venv/bin/activate
# Database and schema setup for sources
if [[ '${{ matrix.profile }}' == "bigquery" ]]
then
export DBT_DATABASE="$GCLOUD_PROJECT" DBT_SCHEMA="$BQ_DATASET"
fi
if [[ '${{ matrix.profile }}' == "snowflake" ]]
then
export DBT_DATABASE="$SF_DATABASE" DBT_SCHEMA="$SF_SCHEMA"
fi
if [[ '${{ matrix.profile }}' == "duckdb" ]]
then
# TODO: which to use for sources? Example:
# database: "{{ env_var('DBT_DATABASE', 'test') }}"
# schema: "{{ env_var('DBT_SCHEMA', 'dbt_fal') }}"
export DBT_DATABASE="" DBT_SCHEMA=""
fi
if [[ '${{ matrix.profile }}' == "bigquery" ]]
then
export GCLOUD_PRIVATE_KEY_ID=$(echo $KEYFILE | jq '.private_key_id' | tr -d '"')
export RAW_PRIVATE_KEY=$(echo $KEYFILE | jq '.private_key' | tr -d '"')
export GCLOUD_PRIVATE_KEY="${RAW_PRIVATE_KEY//'\n'/$'\n'}"
export GCLOUD_CLIENT_EMAIL=$(echo $KEYFILE | jq '.client_email' | tr -d '"')
export GCLOUD_CLIENT_ID=$(echo $KEYFILE | jq '.client_id' | tr -d '"')
export GCLOUD_X509_CERT_URL=$(echo $KEYFILE | jq '.client_x509_cert_url' | tr -d '"')
fi
# Could not get the real job_id easily from context
UUID=$(uuidgen | head -c8)
export DB_NAMESPACE="${{ github.run_id }}_${UUID}"
BEHAVE_TAGS="--tags=-TODO-${{ matrix.profile }}"
if [[ '${{ matrix.teleport }}' != 'true' ]]
then
BEHAVE_TAGS="$BEHAVE_TAGS --tags=-teleport"
fi
if [[ '${{ matrix.cloud }}' != 'true' ]]
then
BEHAVE_TAGS="$BEHAVE_TAGS --tags=-cloud"
fi
if [[ -z "${GITHUB_HEAD_REF}" ]]
then
export FAL_GITHUB_BRANCH=${GITHUB_BASE_REF:-${GITHUB_REF#refs/heads/}}
else
export FAL_GITHUB_BRANCH=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}
fi
behave $BEHAVE_TAGS -fplain -D profile=${{ matrix.profile }}
- name: Send custom JSON data to Slack workflow
if: (failure() || cancelled()) && github.event_name == 'schedule'
id: slack
uses: slackapi/[email protected]
with:
# For posting a rich message using Block Kit
payload: |
{
"text": "Integration tests failed for dbt-${{ matrix.profile }}@${{ matrix.dbt }} (Python ${{ matrix.python }})\nhttps://github.com/fal-ai/fal/actions/runs/${{ github.run_id }}"
}
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK