-
Notifications
You must be signed in to change notification settings - Fork 0
210 lines (178 loc) · 6.1 KB
/
e2e.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
name: e2e
on:
workflow_dispatch:
inputs:
TARGET:
description: 'Target for profile'
required: true
default: 'dev'
type: choice
options:
- 'dev'
- 'prod'
DATASET:
description: 'BigQuery dataset name for "dev" profile'
required: true
default: 'z_dbt_wizard'
schedule:
# Runs “At minute 0 past every 6nd hour.” (see https://crontab.guru/#0_*/6_*_*_*)
- cron: '0 */6 * * *'
jobs:
package:
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v4
- name: set up node
uses: actions/setup-node@v4
with:
node-version: '19.x'
- name: build and test
run: |
npm install
npm run build
- name: set environment variable with file name
run: echo "FILE_NAME=${{ github.sha }}-darwin-x64.vsix" >> $GITHUB_ENV
- name: delete libraries for other platforms
run: |
find server/node_modules/@fivetrandevelopers/zetasql/lib/zetasql -maxdepth 1 -type f ! -name 'remote_server' -exec rm -f {} \;
- name: package to vsix file
run: npx @vscode/vsce package -o ${FILE_NAME} --target darwin-x64 --githubBranch main
- name: create cache
uses: actions/cache@v4
with:
path: ./${{ env.FILE_NAME }}
key: ${{ env.FILE_NAME }}
run-e2e-tests:
needs: package
runs-on: macos-latest
steps:
- name: set environment variable with file name
run: echo "FILE_NAME=${{ github.sha }}-darwin-x64.vsix" >> $GITHUB_ENV
- name: checkout
uses: actions/checkout@v4
- name: checkout repository with models
uses: actions/checkout@v4
with:
repository: fivetran/analytics
path: analytics
token: ${{ secrets.ANALYTICS_REPO_ACCESS_TOKEN }}
- name: load vsix from cache
uses: actions/cache@v4
id: cache
with:
path: ./${{ env.FILE_NAME }}
key: ${{ env.FILE_NAME }}
- name: fail if cache not hit
if: steps.cache.outputs.cache-hit != 'true'
run: exit 1
- uses: actions/setup-node@v4
with:
node-version: '19.x'
- name: build tests
run: | # ignore postinstall script and install dependencies for e2e
npm install --ignore-scripts
cd common
npx tsc --sourceMap false --project tsconfig.json
cd ../e2e
npm install
npx tsc --sourceMap false --project tsconfig.json
- uses: actions/setup-python@v5
id: setup-python
with:
python-version: '3.9.12'
- name: prepare profile config
run: |
mkdir -p ~/.dbt/
echo "$BQ_ANALYTICS_ACCOUNT" > ~/.dbt/analytics.json
user=$(whoami)
homedir=$(eval echo "~${user}")
keyFilePath=${homedir}/.dbt/analytics.json
echo ${keyFilePath}
private_key=$(echo ${BQ_ANALYTICS_ACCOUNT} | jq '.private_key')
read project_id private_key_id client_id client_email auth_uri token_uri auth_provider_x509_cert_url client_x509_cert_url < <(echo $(echo ${BQ_ANALYTICS_ACCOUNT} | jq -r '.project_id, .private_key_id, .client_id, .client_email, .auth_uri, .token_uri, .auth_provider_x509_cert_url, .client_x509_cert_url'))
if [[ -z "${TARGET}" ]]; then
target_value=dev
else
target_value="${TARGET}"
fi
if [[ -z "${DATASET}" ]]; then
dataset_value=z_dbt_wizard
else
dataset_value="${DATASET}"
fi
cat <<EOT > ~/.dbt/profiles.yml
fivetran:
outputs:
dev:
type: bigquery
method: service-account
project: ${project_id}
keyfile: ${keyFilePath}
dataset: ${dataset_value}
threads: 4
prod:
type: bigquery
method: service-account
project: ${project_id}
keyfile: ${keyFilePath}
dataset: transforms
threads: 4
target: ${target_value}
EOT
cat <<EOT > analytics/dbt_ft_prod.code-workspace
{
"folders": [
{
"path": "dbt_ft_prod"
}
],
"settings": {
"python.defaultInterpreterPath": "${PYTHON_INSTALL_LOC}"
}
}
env:
BQ_ANALYTICS_ACCOUNT: ${{ secrets.BQ_ANALYTICS_ACCOUNT }}
TARGET: ${{ inputs.TARGET }}
DATASET: ${{ inputs.DATASET }}
PYTHON_INSTALL_LOC: '${{ steps.setup-python.outputs.python-path }}'
- name: show prepared files
run: |
cd ~/.dbt/
pwd
ls -la
- name: install dbt
# https://github.com/fivetran/analytics/blob/a83516d1f662ef4db5750e4bff6735b56b823123/.buildkite/scripts/step_2_run_models_dry.sh#L11
run: ${PYTHON_INSTALL_LOC} -m pip install "dbt-bigquery>=1.5.0,<1.6.0"
env:
PYTHON_INSTALL_LOC: '${{ steps.setup-python.outputs.python-path }}'
- name: show info
run: |
which python
python --version
python -m pip --version
python -m pip list | grep dbt
which dbt
dbt --version
node -v
echo ${PYTHON_INSTALL_LOC}
${PYTHON_INSTALL_LOC} --version
env:
PYTHON_INSTALL_LOC: '${{ steps.setup-python.outputs.python-path }}'
- name: build and test
run: |
cd analytics/dbt_ft_prod
dbt deps
- run: unzip ${{ env.FILE_NAME }} -d e2e-tests
- name: run e2e tests
run: node e2e/out/runners/runFtTest $(pwd)/e2e-tests/extension
- name: upload logs
if: always()
uses: actions/upload-artifact@v4
with:
name: all_logs
path: |
./.vscode-test/user-data/logs/**/*Wizard for dbt Core (TM).log
./e2e/out/diagnostics.txt
./e2e/out/log.txt
./analytics/dbt_ft_prod/logs/*