name: DBT_prod_scheduled_workflow
# Controls when the action will run.
on:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
inputs:
name:
description: 'Reason for Manual Trigger'
required: true
default: 'Custom re-run'
# Environment variable values are retrived from Github secrets
env:
DBT_TARGET: PROD
DBT_SNOWFLAKE_ACCOUNT: ${{ secrets.DBT_SNOWFLAKE_ACCOUNT }}
DBT_USER: ${{ secrets.DBT_PROD_USER }}
DBT_PASSWORD: ${{ secrets.DBT_PROD_PASSWORD }}
DBT_ROLE: PROD_TRANSFORMER
DBT_DB: ONEDB
DBT_SCHEMA: EDM_AI
DBT_WH: PROD_TRANSFORMER_WH
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "build"
build:
# The type of runner that the job will run on
runs-on: self-hosted
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
- name: Event info
run: |
echo This workflow on ${{ github.repository }} was started by ${{ github.actor }} from the event ${{ github.event_name }} and action ${{github.event.action}}
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- name: Checkout branch
uses: actions/checkout@v3
- name: Install DBT
run: |
pip3 install dbt-snowflake==1.0.0
- name: Install dbt dependency packages
run: |
dbt deps
- name: drop seed tables
run: |
dbname=ONEDB
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_CRM_PERSON}' --profiles-dir ./dbt_profiles
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_CRM_PRODUCTS}' --profiles-dir ./dbt_profiles
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: FLEX_LOB}' --profiles-dir ./dbt_profiles
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_WC_POSITION}' --profiles-dir ./dbt_profiles
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: WC_ORG_LKP_CATEGORY}' --profiles-dir ./dbt_profiles
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: ONEDBGOLIVE}' --profiles-dir ./dbt_profiles
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: THIRD_PARTY_PERMISSIONS}' --profiles-dir ./dbt_profiles
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: THIRD_PARTY_READER_ACCOUNTS}' --profiles-dir ./dbt_profiles
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: WC_ORG_D_SEED}' --profiles-dir ./dbt_profiles
- name: Deploy the seeds for this release
run: |
dbt seed --vars '{"batch_name":"github_batch"}' --profiles-dir ./dbt_profiles
- name: Deploy the models in example folder
run: |
dbt run --full-refresh --model tag:daily --vars '{"batch_name":"DailyBatch"}' --profiles-dir ./dbt_profiles
name: DBT_prod_scheduled_workflow
# Controls when the action will run.
on:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
inputs:
name:
description: 'Reason for Manual Trigger'
required: true
default: 'Custom re-run'
# Environment variable values are retrived from Github secrets
env:
DBT_TARGET: PROD
DBT_SNOWFLAKE_ACCOUNT: ${{ secrets.DBT_SNOWFLAKE_ACCOUNT }}
DBT_USER: ${{ secrets.DBT_PROD_USER }}
DBT_PASSWORD: ${{ secrets.DBT_PROD_PASSWORD }}
DBT_ROLE: PROD_TRANSFORMER
DBT_DB: ONEDB
DBT_SCHEMA: EDM_AI
DBT_WH: PROD_TRANSFORMER_WH
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "build"
build:
# The type of runner that the job will run on
runs-on: self-hosted
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
- name: Event info
run: |
echo This workflow on ${{ github.repository }} was started by ${{ github.actor }} from the event ${{ github.event_name }} and action ${{github.event.action}}
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- name: Checkout branch
uses: actions/checkout@v3
- name: Install DBT
run: |
pip3 install dbt-snowflake==1.0.0
- name: Install dbt dependency packages
run: |
dbt deps
- name: drop seed tables
run: |
dbname=ONEDB
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_CRM_PERSON}' --profiles-dir ./dbt_profiles
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_CRM_PRODUCTS}' --profiles-dir ./dbt_profiles
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: FLEX_LOB}' --profiles-dir ./dbt_profiles
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_WC_POSITION}' --profiles-dir ./dbt_profiles
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: WC_ORG_LKP_CATEGORY}' --profiles-dir ./dbt_profiles
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: ONEDBGOLIVE}' --profiles-dir ./dbt_profiles
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: THIRD_PARTY_PERMISSIONS}' --profiles-dir ./dbt_profiles
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: THIRD_PARTY_READER_ACCOUNTS}' --profiles-dir ./dbt_profiles
dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: WC_ORG_D_SEED}' --profiles-dir ./dbt_profiles
- name: Deploy the seeds for this release
run: |
dbt seed --vars '{"batch_name":"github_batch"}' --profiles-dir ./dbt_profiles
- name: Deploy the models in example folder
run: |
dbt run --full-refresh --model tag:daily --vars '{"batch_name":"DailyBatch"}' --profiles-dir ./dbt_profiles