Skip to content

deploy-to-gcp

deploy-to-gcp #75

Workflow file for this run

name: deploy-to-gcp
on:
push:
branches:
- main
paths-ignore:
- 'docs/**'
schedule:
- cron: '0 4 * * *'
jobs:
deploy:
runs-on: ubuntu-latest
env:
PIP_REQUIRE_VIRTUALENV: true
DBT_PYPI_EARLIEST_DOWNLOAD_DATE: ${{ vars.DBT_PYPI_EARLIEST_DOWNLOAD_DATE }}
DBT_MAX_BYTES_BILLED: ${{ vars.DBT_MAX_BYTES_BILLED }}
permissions:
contents: read
id-token: write
actions: read
pages: write
steps:
- uses: actions/checkout@v4
- name: setup dbt
uses: ./.github/actions/setup_dbt
- uses: google-github-actions/auth@v2
with:
workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }}
service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }}
- uses: google-github-actions/setup-gcloud@v2
with:
version: '>= 363.0.0'
- name: ensure prod dataset exists
run: |
source .venv/bin/activate
source .envs/prod/.env
dbt run-operation ensure_target_dataset_exists
bq update --source .envs/prod/dataset_acl.json "${DBT_PROJECT}:${DBT_DATASET}"
- name: load prod safety db
run: |
source .venv/bin/activate
source .envs/prod/.env
python etl/safety_db/load_missing_partitions.py
- name: dbt prod build
uses: ./.github/actions/dbt_build
with:
env: prod
- name: dbt prod contracts
uses: ./.github/actions/dbt_contract_test
with:
env: prod
- name: upload prod pages
uses: actions/upload-pages-artifact@v3
with:
path: target
- name: deploy prod pages
uses: actions/deploy-pages@v4