Integration tests #5
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: "Integration tests" | ||
on: | ||
workflow_call: | ||
inputs: | ||
package: | ||
description: "Choose the package to test" | ||
type: string | ||
required: true | ||
branch: | ||
description: "Choose the branch to test" | ||
type: string | ||
default: "main" | ||
repository: | ||
description: "Choose the repository to test, when using a fork" | ||
type: string | ||
default: "dbt-labs/dbt-adapters" | ||
os: | ||
description: "Choose the OS to test against" | ||
type: string | ||
default: "ubuntu-22.04" | ||
python-version: | ||
description: "Choose the Python version to test against" | ||
type: string | ||
default: "3.9" | ||
is-flaky-test-run: | ||
description: "What kind of tests to execute in this run (true for flaky tests, false for the rest)" | ||
type: boolean | ||
default: false | ||
workflow_dispatch: | ||
inputs: | ||
package: | ||
description: "Choose the package to test" | ||
type: choice | ||
options: | ||
- "dbt-athena" | ||
- "dbt-athena-community" | ||
- "dbt-bigquery" | ||
- "dbt-spark" | ||
branch: | ||
description: "Choose the branch to test" | ||
type: string | ||
default: "main" | ||
repository: | ||
description: "Choose the repository to test, when using a fork" | ||
type: string | ||
default: "dbt-labs/dbt-adapters" | ||
os: | ||
description: "Choose the OS to test against" | ||
type: string | ||
default: "ubuntu-22.04" | ||
python-version: | ||
description: "Choose the Python version to test against" | ||
type: choice | ||
options: ["3.9", "3.10", "3.11", "3.12"] | ||
is-flaky-test-run: | ||
description: "What kind of tests to execute in this run (true for flaky tests, false for the rest)" | ||
type: boolean | ||
default: false | ||
permissions: | ||
id-token: write | ||
contents: read | ||
env: | ||
DBT_INVOCATION_ENV: ${{ vars.DBT_INVOCATION_ENV }} | ||
DD_CIVISIBILITY_AGENTLESS_ENABLED: ${{ vars.DD_CIVISIBILITY_AGENTLESS_ENABLED }} | ||
DD_API_KEY: ${{ secrets.DD_API_KEY }} | ||
DD_SITE: ${{ vars.DD_SITE }} | ||
DD_ENV: ${{ vars.DD_ENV }} | ||
DD_SERVICE: ${{ github.event.repository.name }} # this can change per run because of forks | ||
DBT_TEST_USER_1: ${{ vars.DBT_TEST_USER_1 }} | ||
DBT_TEST_USER_2: ${{ vars.DBT_TEST_USER_2 }} | ||
DBT_TEST_USER_3: ${{ vars.DBT_TEST_USER_3 }} | ||
jobs: | ||
integration-tests-athena: | ||
name: integration-tests-athena-${{ inputs.is-flaky-test-run && "flaky" || "not-flaky" }} | ||
Check failure on line 78 in .github/workflows/_integration-tests.yml GitHub Actions / Integration testsInvalid workflow file
|
||
if: ${{ inputs.package == 'dbt-athena' || inputs.package == 'dbt-athena-community' }} | ||
runs-on: ${{ inputs.os }} | ||
environment: | ||
name: "dbt-athena" | ||
env: | ||
DBT_TEST_ATHENA_DATABASE: ${{ vars.DBT_TEST_ATHENA_DATABASE }} | ||
DBT_TEST_ATHENA_NUM_RETRIES: ${{ vars.DBT_TEST_ATHENA_NUM_RETRIES }} | ||
DBT_TEST_ATHENA_POLL_INTERVAL: ${{ vars.DBT_TEST_ATHENA_POLL_INTERVAL }} | ||
DBT_TEST_ATHENA_REGION_NAME: ${{ vars.DBT_TEST_ATHENA_REGION_NAME }} | ||
DBT_TEST_ATHENA_S3_STAGING_DIR: ${{ vars.DBT_TEST_ATHENA_S3_STAGING_DIR }} | ||
DBT_TEST_ATHENA_S3_TMP_TABLE_DIR: ${{ vars.DBT_TEST_ATHENA_S3_TMP_TABLE_DIR }} | ||
DBT_TEST_ATHENA_SCHEMA: ${{ vars.DBT_TEST_ATHENA_SCHEMA }} | ||
DBT_TEST_ATHENA_THREADS: ${{ vars.DBT_TEST_ATHENA_THREADS }} | ||
DBT_TEST_ATHENA_WORK_GROUP: ${{ vars.DBT_TEST_ATHENA_WORK_GROUP }} | ||
steps: | ||
- uses: actions/checkout@v4 | ||
with: | ||
ref: ${{ inputs.branch }} | ||
repository: ${{ inputs.repository }} | ||
- uses: actions/setup-python@v5 | ||
with: | ||
python-version: ${{ inputs.python-version }} | ||
- uses: pypa/hatch@install | ||
- uses: aws-actions/configure-aws-credentials@v4 | ||
with: | ||
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.ASSUMABLE_ROLE_NAME }} | ||
aws-region: ${{ vars.DBT_TEST_ATHENA_REGION_NAME }} | ||
# run flaky tests command if is-flaky-test-run flag is true. Otherwise, run the other tests | ||
- run: ${{ inputs.is-flaky-test-run && "hatch run integration-tests -m flaky -n1 --ddtrace" || hatch run integration-tests -m "not flaky" }} | ||
working-directory: ./${{ inputs.package }} | ||
integration-tests-bigquery: | ||
if: ${{ inputs.package == 'dbt-bigquery' }} | ||
runs-on: ${{ inputs.os }} | ||
environment: | ||
name: "dbt-bigquery" | ||
env: | ||
BIGQUERY_TEST_SERVICE_ACCOUNT_JSON: ${{ secrets.BIGQUERY_TEST_SERVICE_ACCOUNT_JSON }} | ||
BIGQUERY_TEST_ALT_DATABASE: ${{ vars.BIGQUERY_TEST_ALT_DATABASE }} | ||
BIGQUERY_TEST_NO_ACCESS_DATABASE: ${{ vars.BIGQUERY_TEST_NO_ACCESS_DATABASE }} | ||
DBT_TEST_USER_1: ${{ vars.DBT_TEST_USER_1 }} | ||
DBT_TEST_USER_2: ${{ vars.DBT_TEST_USER_2 }} | ||
DBT_TEST_USER_3: ${{ vars.DBT_TEST_USER_3 }} | ||
DATAPROC_REGION: ${{ vars.DATAPROC_REGION }} | ||
DATAPROC_CLUSTER_NAME: ${{ vars.DATAPROC_CLUSTER_NAME }} | ||
GCS_BUCKET: ${{ vars.GCS_BUCKET }} | ||
steps: | ||
- uses: actions/checkout@v4 | ||
with: | ||
ref: ${{ inputs.branch }} | ||
repository: ${{ inputs.repository }} | ||
- uses: actions/setup-python@v5 | ||
with: | ||
python-version: ${{ inputs.python-version }} | ||
- uses: pypa/hatch@install | ||
- run: hatch run integration-tests tests/functional -k "not TestPython" | ||
working-directory: ./${{ inputs.package }} | ||
- run: hatch run integration-tests tests/functional -n1 -k "TestPython" | ||
if: ${{ inputs.python-version == '3.9' }} # we only run this for one version to run in series | ||
working-directory: ./${{ inputs.package }} | ||
integration-tests-spark: | ||
if: ${{ inputs.package == 'dbt-spark' }} | ||
runs-on: ${{ inputs.os }} | ||
environment: | ||
name: "dbt-spark" | ||
env: | ||
DBT_DATABRICKS_CLUSTER_NAME: ${{ secrets.DBT_DATABRICKS_CLUSTER_NAME }} | ||
DBT_DATABRICKS_HOST_NAME: ${{ secrets.DBT_DATABRICKS_HOST_NAME }} | ||
DBT_DATABRICKS_ENDPOINT: ${{ secrets.DBT_DATABRICKS_ENDPOINT }} | ||
DBT_DATABRICKS_TOKEN: ${{ secrets.DBT_DATABRICKS_TOKEN }} | ||
DBT_DATABRICKS_USER: ${{ secrets.DBT_DATABRICKS_USER }} | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
profile: | ||
- "apache_spark" | ||
- "spark_session" | ||
- "spark_http_odbc" | ||
- "databricks_sql_endpoint" | ||
- "databricks_cluster" | ||
- "databricks_http_cluster" | ||
steps: | ||
- uses: actions/checkout@v4 | ||
with: | ||
ref: ${{ inputs.branch }} | ||
repository: ${{ inputs.repository }} | ||
- uses: actions/setup-python@v5 | ||
with: | ||
python-version: ${{ inputs.python-version }} | ||
- uses: pypa/hatch@install | ||
- run: hatch run pip install -r dagger/requirements.txt | ||
working-directory: ./${{ inputs.package }} | ||
- run: hatch run integration-tests --profile ${{ matrix.profile }} | ||
working-directory: ./${{ inputs.package }} |