Skip to content

Commit 4d8f4c0

Browse files
authored
Merge pull request #320 from dbt-labs/er/support-tox
Add tox support for testing redshift, bigquery and snowflake
2 parents 241cd44 + 495c454 commit 4d8f4c0

File tree

11 files changed

+203
-75
lines changed

11 files changed

+203
-75
lines changed

.github/workflows/ci.yml

+43
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
# **what?**
2+
# Run tests for dbt-external-tables against supported adapters
3+
4+
# **why?**
5+
# To ensure that dbt-external-tables works as expected with all supported adapters
6+
7+
# **when?**
8+
# On every PR, and every push to main and when manually triggered
9+
10+
name: Package Integration Tests
11+
12+
on:
13+
push:
14+
branches:
15+
- main
16+
pull_request:
17+
workflow_dispatch:
18+
19+
jobs:
20+
run-tests:
21+
uses: dbt-labs/dbt-package-testing/.github/workflows/run_tox.yml@v1
22+
with:
23+
# redshift
24+
REDSHIFT_HOST: ${{ vars.REDSHIFT_HOST }}
25+
REDSHIFT_USER: ${{ vars.REDSHIFT_USER }}
26+
REDSHIFT_PORT: ${{ vars.REDSHIFT_PORT }}
27+
REDSHIFT_DATABASE: ${{ vars.REDSHIFT_DATABASE }}
28+
REDSHIFT_SCHEMA: "integration_tests_redshift_${{ github.run_number }}"
29+
# snowflake
30+
SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }}
31+
SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }}
32+
SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }}
33+
SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }}
34+
SNOWFLAKE_SCHEMA: "integration_tests_snowflake_${{ github.run_number }}"
35+
# bigquery
36+
BIGQUERY_PROJECT: ${{ vars.BIGQUERY_PROJECT }}
37+
BIGQUERY_SCHEMA: "integration_tests_bigquery_${{ github.run_number }}"
38+
39+
secrets:
40+
DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.DBT_ENV_SECRET_REDSHIFT_PASS }}
41+
SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_ACCOUNT }}
42+
DBT_ENV_SECRET_SNOWFLAKE_PASS: ${{ secrets.DBT_ENV_SECRET_SNOWFLAKE_PASS }}
43+
BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_KEYFILE_JSON }}

.github/workflows/integration_tests.yml

+23-20
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,6 @@ jobs:
1010
build:
1111

1212
runs-on: ubuntu-latest
13-
environment:
14-
name: ci_testing
1513
strategy:
1614
fail-fast: true
1715
max-parallel: 3
@@ -40,22 +38,27 @@ jobs:
4038
dbt run-operation prep_external --target ${{ matrix.data-platform }}
4139
dbt -d run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target ${{ matrix.data-platform }}
4240
dbt run-operation dbt_external_tables.stage_external_sources --target ${{ matrix.data-platform }}
43-
dbt -d test --target ${{ matrix.data-platform }}
44-
env:
45-
REDSHIFT_TEST_HOST: ${{ secrets.REDSHIFT_TEST_HOST }}
46-
REDSHIFT_TEST_USER: ${{ secrets.REDSHIFT_TEST_USER }}
47-
REDSHIFT_TEST_PASS: ${{ secrets.REDSHIFT_TEST_PASS }}
48-
REDSHIFT_TEST_PORT: ${{ secrets.REDSHIFT_TEST_PORT }}
49-
REDSHIFT_TEST_DBNAME: ${{ secrets.REDSHIFT_TEST_DBNAME }}
41+
dbt test --target ${{ matrix.data-platform }}
42+
env:
43+
44+
env:
45+
# redshift
46+
REDSHIFT_HOST: ${{ secrets.REDSHIFT_TEST_HOST }}
47+
REDSHIFT_USER: ${{ secrets.REDSHIFT_TEST_USER }}
48+
DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.REDSHIFT_TEST_PASS }}
49+
REDSHIFT_PORT: ${{ secrets.REDSHIFT_TEST_PORT }}
50+
REDSHIFT_DATABASE: ${{ secrets.REDSHIFT_TEST_DBNAME }}
5051
REDSHIFT_SPECTRUM_IAM_ROLE: ${{ secrets.REDSHIFT_SPECTRUM_IAM_ROLE }}
51-
SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
52-
SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
53-
SNOWFLAKE_TEST_PASS: ${{ secrets.SNOWFLAKE_TEST_PASS }}
54-
SNOWFLAKE_TEST_WHNAME: ${{ secrets.SNOWFLAKE_TEST_WHNAME }}
55-
SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
56-
SNOWFLAKE_TEST_DBNAME: ${{ secrets.SNOWFLAKE_TEST_DBNAME }}
57-
BIGQUERY_TEST_PROJECT: ${{ secrets.BIGQUERY_TEST_PROJECT }}
58-
BIGQUERY_PRIVATE_KEY: ${{ secrets.BIGQUERY_PRIVATE_KEY }}
59-
BIGQUERY_PRIVATE_KEY_ID: ${{ secrets.BIGQUERY_PRIVATE_KEY_ID }}
60-
BIGQUERY_CLIENT_EMAIL: ${{ secrets.BIGQUERY_CLIENT_EMAIL }}
61-
BIGQUERY_CLIENT_ID: ${{ secrets.BIGQUERY_CLIENT_ID }}
52+
REDSHIFT_SCHEMA: "dbt_external_tables_integration_tests_redshift"
53+
#snowflake
54+
SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
55+
SNOWFLAKE_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
56+
DBT_ENV_SECRET_SNOWFLAKE_PASS: ${{ secrets.SNOWFLAKE_TEST_PASS }}
57+
SNOWFLAKE_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_WHNAME }}
58+
SNOWFLAKE_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
59+
SNOWFLAKE_DATABASE: ${{ secrets.SNOWFLAKE_TEST_DBNAME }}
60+
SNOWFLAKE_SCHEMA: "dbt_external_tables_integration_tests_snowflake"
61+
# bigquery
62+
BIGQUERY_PROJECT: ${{ vars.BIGQUERY_PROJECT }}
63+
BIGQUERY_SCHEMA: "dbt_external_tables_integration_tests_bigquery"
64+
BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_KEYFILE_JSON }}

.gitignore

+1
Original file line numberDiff line numberDiff line change
@@ -6,3 +6,4 @@
66
**/env/
77
**/venv/
88
**/test.env
9+
integration_tests/vars.env

integration_tests/dbt_project.yml

+4
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,10 @@ clean-targets:
1717
- "target"
1818
- "dbt_packages"
1919

20+
flags:
21+
send_anonymous_usage_stats: False
22+
use_colors: True
23+
2024
dispatch:
2125
- macro_namespace: dbt_external_tables
2226
search_order: ['dbt_external_tables_integration_tests', 'dbt_external_tables']

integration_tests/macros/plugins/redshift/prep_external.sql

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
{{ external_schema }}
99
from data catalog
1010
database '{{ external_schema }}'
11-
iam_role '{{ env_var("REDSHIFT_SPECTRUM_IAM_ROLE") }}'
11+
iam_role 'arn:aws:iam::859831564954:role/RedshiftSpectrumTesting'
1212
create external database if not exists;
1313

1414
{% endset %}

integration_tests/packages.yml

-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
21
packages:
32
- local: ../
43
- package: dbt-labs/dbt_utils

integration_tests/profiles.yml

+24-36
Original file line numberDiff line numberDiff line change
@@ -2,52 +2,40 @@
22
# HEY! This file is used in the dbt-external-tables integrations tests with CircleCI.
33
# You should __NEVER__ check credentials into version control. Thanks for reading :)
44

5-
config:
6-
send_anonymous_usage_stats: False
7-
use_colors: True
8-
95
integration_tests:
106
target: postgres
117
outputs:
128

139
redshift:
14-
type: redshift
15-
host: "{{ env_var('REDSHIFT_TEST_HOST') }}"
16-
user: "{{ env_var('REDSHIFT_TEST_USER') }}"
17-
pass: "{{ env_var('REDSHIFT_TEST_PASS') }}"
18-
dbname: "{{ env_var('REDSHIFT_TEST_DBNAME') }}"
19-
port: "{{ env_var('REDSHIFT_TEST_PORT') | as_number }}"
20-
schema: dbt_external_tables_integration_tests_redshift
21-
threads: 1
10+
type: "redshift"
11+
host: "{{ env_var('REDSHIFT_HOST') }}"
12+
user: "{{ env_var('REDSHIFT_USER') }}"
13+
pass: "{{ env_var('DBT_ENV_SECRET_REDSHIFT_PASS') }}"
14+
dbname: "{{ env_var('REDSHIFT_DATABASE') }}"
15+
port: "{{ env_var('REDSHIFT_PORT') | as_number }}"
16+
schema: "{{ env_var('REDSHIFT_SCHEMA') }}"
17+
threads: 5
2218

2319
snowflake:
24-
type: snowflake
25-
account: "{{ env_var('SNOWFLAKE_TEST_ACCOUNT') }}"
26-
user: "{{ env_var('SNOWFLAKE_TEST_USER') }}"
27-
password: "{{ env_var('SNOWFLAKE_TEST_PASS') }}"
28-
role: "{{ env_var('SNOWFLAKE_TEST_ROLE') }}"
29-
database: "{{ env_var('SNOWFLAKE_TEST_DBNAME') }}"
30-
warehouse: "{{ env_var('SNOWFLAKE_TEST_WHNAME') }}"
31-
schema: dbt_external_tables_integration_tests_snowflake
32-
threads: 1
20+
type: "snowflake"
21+
account: "{{ env_var('SNOWFLAKE_ACCOUNT') }}"
22+
user: "{{ env_var('SNOWFLAKE_USER') }}"
23+
password: "{{ env_var('DBT_ENV_SECRET_SNOWFLAKE_PASS') }}"
24+
role: "{{ env_var('SNOWFLAKE_ROLE') }}"
25+
database: "{{ env_var('SNOWFLAKE_DATABASE') }}"
26+
warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE') }}"
27+
schema: "{{ env_var('SNOWFLAKE_SCHEMA') }}"
28+
threads: 10
3329

3430
bigquery:
35-
type: bigquery
36-
method: service-account-json
31+
type: "bigquery"
32+
method: "service-account-json"
33+
project: "{{ env_var('BIGQUERY_PROJECT') }}"
34+
dataset: "{{ env_var('BIGQUERY_SCHEMA') }}"
35+
threads: 10
3736
keyfile_json:
38-
type: "service_account"
39-
project_id: "{{ env_var('BIGQUERY_TEST_PROJECT') }}"
40-
private_key: "{{ env_var('BIGQUERY_PRIVATE_KEY') }}"
41-
private_key_id: "{{ env_var('BIGQUERY_PRIVATE_KEY_ID') }}"
42-
client_email: "{{ env_var('BIGQUERY_CLIENT_EMAIL') }}"
43-
client_id: "{{ env_var('BIGQUERY_CLIENT_ID') }}"
44-
auth_uri: "https://accounts.google.com/o/oauth2/auth"
45-
token_uri: "https://oauth2.googleapis.com/token"
46-
auth_provider_x509_cert_url: "https://www.googleapis.com/oauth2/v1/certs"
47-
client_x509_cert_url: https://www.googleapis.com/robot/v1/metadata/x509/{{ env_var('BIGQUERY_CLIENT_EMAIL') | urlencode }}"
48-
project: "{{ env_var('BIGQUERY_TEST_PROJECT') }}"
49-
schema: dbt_external_tables_integration_tests_bigquery
50-
threads: 1
37+
"{{ env_var('BIGQUERY_KEYFILE_JSON') | as_native}}"
38+
job_retries: 3
5139

5240
databricks:
5341
type: spark

integration_tests/test.env.sample

+17-17
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,28 @@
11
# gh secret set -f integration_tests/test.env -e ci_testing
22

33
# redshift
4-
REDSHIFT_TEST_HOST=
5-
REDSHIFT_TEST_USER=
6-
REDSHIFT_TEST_PASS=
7-
REDSHIFT_TEST_DBNAME=
8-
REDSHIFT_TEST_PORT=
4+
REDSHIFT_HOST=
5+
REDSHIFT_USER=
6+
DBT_ENV_SECRET_REDSHIFT_PASS=
7+
REDSHIFT_PORT=
8+
REDSHIFT_DBNAME=
9+
REDSHIFT_SCHEMA=
910
REDSHIFT_SPECTRUM_IAM_ROLE=
1011

1112
# snowflake
12-
13-
SNOWFLAKE_TEST_ACCOUNT=
14-
SNOWFLAKE_TEST_USER=
15-
SNOWFLAKE_TEST_PASS=
16-
SNOWFLAKE_TEST_ROLE=
17-
SNOWFLAKE_TEST_DBNAME=
18-
SNOWFLAKE_TEST_WHNAME=
13+
SNOWFLAKE_ACCOUNT=
14+
SNOWFLAKE_USER=
15+
DBT_ENV_SECRET_SNOWFLAKE_PASS=
16+
SNOWFLAKE_ROLE=
17+
SNOWFLAKE_DATABASE=
18+
SNOWFLAKE_SCHEMA=
19+
SNOWFLAKE_WAREHOUSE=
1920

2021
# bigquery
21-
BIGQUERY_PRIVATE_KEY=
22-
BIGQUERY_PRIVATE_KEY_ID=
23-
BIGQUERY_CLIENT_EMAIL=
24-
BIGQUERY_CLIENT_ID=
25-
BIGQUERY_TEST_PROJECT=
22+
BIGQUERY_PROJECT=
23+
BIGQUERY_SCHEMA=
24+
BIGQUERY_KEYFILE_JSON=
25+
2626

2727
# databricks
2828
DATABRICKS_TEST_HOST=

integration_tests/vars.env.sample

+18
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
# gh variable set -f integration_tests/vars.env
2+
3+
# redshift
4+
REDSHIFT_HOST=
5+
REDSHIFT_USER=
6+
REDSHIFT_DATABASE=
7+
REDSHIFT_PORT=
8+
REDSHIFT_SPECTRUM_IAM_ROLE=
9+
10+
# snowflake
11+
SNOWFLAKE_ACCOUNT=
12+
SNOWFLAKE_USER=
13+
SNOWFLAKE_ROLE=
14+
SNOWFLAKE_DATABASE=
15+
SNOWFLAKE_WAREHOUSE=
16+
17+
# bigquery
18+
BIGQUERY_PROJECT=

supported_adapters.env

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
SUPPORTED_ADAPTERS=snowflake,redshift,bigquery

tox.ini

+71
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
[tox]
2+
skipsdist = True
3+
envlist = lint_all, testenv
4+
5+
[testenv]
6+
passenv =
7+
# redshift
8+
REDSHIFT_HOST
9+
REDSHIFT_USER
10+
DBT_ENV_SECRET_REDSHIFT_PASS
11+
REDSHIFT_DATABASE
12+
REDSHIFT_SCHEMA
13+
REDSHIFT_PORT
14+
REDSHIFT_SPECTRUM_IAM_ROLE
15+
# snowflake
16+
SNOWFLAKE_ACCOUNT
17+
SNOWFLAKE_USER
18+
DBT_ENV_SECRET_SNOWFLAKE_PASS
19+
SNOWFLAKE_ROLE
20+
SNOWFLAKE_DATABASE
21+
SNOWFLAKE_WAREHOUSE
22+
SNOWFLAKE_SCHEMA
23+
# bigquery
24+
BIGQUERY_KEYFILE_JSON
25+
BIGQUERY_PROJECT
26+
BIGQUERY_SCHEMA
27+
28+
# run dbt commands directly, assumes dbt is already installed in environment
29+
[testenv:dbt_integration_redshift]
30+
changedir = integration_tests
31+
allowlist_externals =
32+
dbt
33+
skip_install = true
34+
commands =
35+
dbt deps --target redshift
36+
dbt seed --full-refresh --target redshift
37+
dbt run --target redshift
38+
dbt run-operation prep_external --target redshift
39+
dbt run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target redshift
40+
dbt run-operation dbt_external_tables.stage_external_sources --target redshift
41+
dbt test --target redshift
42+
43+
# run dbt commands directly, assumes dbt is already installed in environment
44+
[testenv:dbt_integration_snowflake]
45+
changedir = integration_tests
46+
allowlist_externals =
47+
dbt
48+
skip_install = true
49+
commands =
50+
dbt deps --target snowflake
51+
dbt seed --full-refresh --target snowflake
52+
dbt run --target snowflake
53+
dbt run-operation prep_external --target snowflake
54+
dbt run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target snowflake
55+
dbt run-operation dbt_external_tables.stage_external_sources --target snowflake
56+
dbt test --target snowflake
57+
58+
# run dbt commands directly, assumes dbt is already installed in environment
59+
[testenv:dbt_integration_bigquery]
60+
changedir = integration_tests
61+
allowlist_externals =
62+
dbt
63+
skip_install = true
64+
commands =
65+
dbt deps --target bigquery
66+
dbt seed --full-refresh --target bigquery
67+
dbt run --target bigquery
68+
dbt run-operation prep_external --target bigquery
69+
dbt run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target bigquery
70+
dbt run-operation dbt_external_tables.stage_external_sources --target bigquery
71+
dbt test --target bigquery

0 commit comments

Comments
 (0)