Example - Test_DBT_Execute.yml

 
#========================================================================================================================
# -- Description: This workflow is used to perform various operations such as 
                  # 1- Clone the Production database into a TESTDB
                  # 2- Deploy the tagged models into the TESTDB
                  # 3- Test the deployed models in TESTDB
                  # 4- At the end, drop the cloned TESTDB automatically.
#=========================================================================================================================
 
name: DBT_testing_workflow
 
# Controls when the action will run. 
on:
  # Triggers the workflow on push or pull request events but only for the main branch
 
  pull_request:
    types: [opened,closed,synchronize]
    branches: [ main ]
 
 
# Environment variable values are retrived from Github secrets
env:
  DBT_TARGET:            PROD
  DBT_SNOWFLAKE_ACCOUNT: ${{ secrets.DBT_SNOWFLAKE_ACCOUNT }}
  DBT_USER:              ${{ secrets.DBT_PROD_USER }}
  DBT_PASSWORD:          ${{ secrets.DBT_PROD_PASSWORD }}
  DBT_ROLE:              PROD_TRANSFORMER
  DBT_DB:                ONEDB
  DBT_SCHEMA:            EDM_AI
  DBT_WH:                PROD_TRANSFORMER_WH
 
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
  # This workflow contains two jobs 1. To clone the prod db  and 2. to drop the test db.
  clonedeploytest:
    if: github.event.action == 'opened' ||  github.event.action == 'synchronize'
 
    # The type of runner that the job will run on
    runs-on: ubuntu-latest
 
    # Steps represent a sequence of tasks that will be executed as part of the job
    steps:
      - name: Event info
        run: |
          echo This workflow on ${{ github.repository }} was started by ${{ github.actor }} from the event ${{ github.event_name }} and action ${{github.event.action}} and  ${{github.event.pull_request}}
 
      # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
      - name: Checkout
        uses: actions/checkout@v3
 
      - name: Checkout main to compare against
        uses: actions/checkout@v3
        with:
          repository: ONEDB/DataBuildTool
          ref: main
          path: main
 
      - name: Install Python 3.8
        uses: actions/setup-python@v4
        with:
          python-version: 3.8
 
      - name: Install dbt==1.6.0
        run: |
          python -m pip install --upgrade pip
          pip install dbt-snowflake==1.6.0
 
      - name: Install dbt dependency packages
        run: |
          dbt deps          
          
      - name: Create Zero copy clone of Production.
        run: |
          echo ${{github.head_ref}}
          dbname=testdb_${{github.head_ref}}
          dbt run-operation clone_prod_to_test --args '{createordrop : create, dbname: '${dbname}', testrole: PROD_TRANSFORMER}'  --profiles-dir ./dbt_profiles
          echo "DBT_DB=$(echo '"'${dbname^^}'"')" >> $GITHUB_ENV
      
      - name: Compile dbt models in example folder
        run: | 
          dbt compile --profiles-dir ./dbt_profiles
          cd main
          dbt deps
          #We just build a single model here to force it to build the manifest file on the main.
          #We don't need to build everything. From the docs "manifest: produced by commands that read and understand your project"
          dbt compile --select curfl_Responsibility --profiles-dir ../dbt_profiles #We just build a single model here to force it to build the manifest file
          cd ..
 
      - name: drop seed tables
        run: |
          dbname=testdb_${{github.head_ref}}
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_CRM_PERSON}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_CRM_PRODUCTS}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: LINE_OF_BUSINESS}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_WC_POSITION}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: WC_ORG_LKP_CATEGORY}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: LENDERGOLIVE}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: THIRD_PARTY_PERMISSIONS}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: THIRD_PARTY_READER_ACCOUNTS}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: WC_ORG_D_SEED}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: MAJOR_LENDERS}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_REV_DATA}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_PRODUCT_DATA}' --profiles-dir ./dbt_profiles            
 
      - name: Deploy the seeds for this release
        run: |
          dbt seed --vars '{"batch_name":"github_batch"}' --profiles-dir ./dbt_profiles  
 
      - name: Deploy the models for this release
        run: |
          dbt run --select state:modified+ --state ${{ github.workspace }}/main/target/ --vars '{"batch_name":"github_batch"}' --profiles-dir ./dbt_profiles  
      
      - name: Test the models for this release
        run: |
          dbt test --select state:modified+ --state ${{ github.workspace }}/main/target/ --profiles-dir ./dbt_profiles
 
  dropclonedb:
    if: github.event.action == 'closed' 
    # The type of runner that the job will run on
    runs-on: ubuntu-latest
 
    # Steps represent a sequence of tasks that will be executed as part of the job
    steps:
      - name: Event info
        run: |
          echo This workflow on ${{ github.repository }} was started by ${{ github.actor }} from the event ${{ github.event_name }} and action ${{github.event.action}} and ${{github.event.pull_request}}
      # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
      - name: Checkout branch
        uses: actions/checkout@v2
 
      - name: Install Python 3.8
        uses: actions/setup-python@v2.2.2
        with:
          python-version: 3.8
 
      - name: Install dbt==1.6.0
        run: |
          python -m pip install --upgrade pip
          pip3 install dbt-snowflake==1.6.0
 
      - name: Install dbt dependency packages
        run: |
          dbt deps          
          
      - name: Drop the cloned database.
        run: |
          echo ${{github.head_ref}}
          dbname=testdb_${{github.head_ref}}
          dbt run-operation clone_prod_to_test --args '{createordrop : drop, dbname: '${dbname}', testrole: PROD_TRANSFORMER}'  --profiles-dir ./dbt_profiles
          
      
 
#========================================================================================================================
# -- Description: This workflow is used to perform various operations such as 
                  # 1- Clone the Production database into a TESTDB
                  # 2- Deploy the tagged models into the TESTDB
                  # 3- Test the deployed models in TESTDB
                  # 4- At the end, drop the cloned TESTDB automatically.
#=========================================================================================================================
 
name: DBT_testing_workflow
 
# Controls when the action will run. 
on:
  # Triggers the workflow on push or pull request events but only for the main branch
 
  pull_request:
    types: [opened,closed,synchronize]
    branches: [ main ]
 
 
# Environment variable values are retrived from Github secrets
env:
  DBT_TARGET:            PROD
  DBT_SNOWFLAKE_ACCOUNT: ${{ secrets.DBT_SNOWFLAKE_ACCOUNT }}
  DBT_USER:              ${{ secrets.DBT_PROD_USER }}
  DBT_PASSWORD:          ${{ secrets.DBT_PROD_PASSWORD }}
  DBT_ROLE:              PROD_TRANSFORMER
  DBT_DB:                ONEDB
  DBT_SCHEMA:            EDM_AI
  DBT_WH:                PROD_TRANSFORMER_WH
 
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
  # This workflow contains two jobs 1. To clone the prod db  and 2. to drop the test db.
  clonedeploytest:
    if: github.event.action == 'opened' ||  github.event.action == 'synchronize'
 
    # The type of runner that the job will run on
    runs-on: ubuntu-latest
 
    # Steps represent a sequence of tasks that will be executed as part of the job
    steps:
      - name: Event info
        run: |
          echo This workflow on ${{ github.repository }} was started by ${{ github.actor }} from the event ${{ github.event_name }} and action ${{github.event.action}} and  ${{github.event.pull_request}}
 
      # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
      - name: Checkout
        uses: actions/checkout@v3
 
      - name: Checkout main to compare against
        uses: actions/checkout@v3
        with:
          repository: ONEDB/DataBuildTool
          ref: main
          path: main
 
      - name: Install Python 3.8
        uses: actions/setup-python@v4
        with:
          python-version: 3.8
 
      - name: Install dbt==1.6.0
        run: |
          python -m pip install --upgrade pip
          pip install dbt-snowflake==1.6.0
 
      - name: Install dbt dependency packages
        run: |
          dbt deps          
          
      - name: Create Zero copy clone of Production.
        run: |
          echo ${{github.head_ref}}
          dbname=testdb_${{github.head_ref}}
          dbt run-operation clone_prod_to_test --args '{createordrop : create, dbname: '${dbname}', testrole: PROD_TRANSFORMER}'  --profiles-dir ./dbt_profiles
          echo "DBT_DB=$(echo '"'${dbname^^}'"')" >> $GITHUB_ENV
      
      - name: Compile dbt models in example folder
        run: | 
          dbt compile --profiles-dir ./dbt_profiles
          cd main
          dbt deps
          #We just build a single model here to force it to build the manifest file on the main.
          #We don't need to build everything. From the docs "manifest: produced by commands that read and understand your project"
          dbt compile --select curfl_Responsibility --profiles-dir ../dbt_profiles #We just build a single model here to force it to build the manifest file
          cd ..
 
      - name: drop seed tables
        run: |
          dbname=testdb_${{github.head_ref}}
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_CRM_PERSON}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_CRM_PRODUCTS}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: LINE_OF_BUSINESS}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_WC_POSITION}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: WC_ORG_LKP_CATEGORY}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: LENDERGOLIVE}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: THIRD_PARTY_PERMISSIONS}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: THIRD_PARTY_READER_ACCOUNTS}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: WC_ORG_D_SEED}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: MAJOR_LENDERS}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_REV_DATA}' --profiles-dir ./dbt_profiles
          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_PRODUCT_DATA}' --profiles-dir ./dbt_profiles            
 
      - name: Deploy the seeds for this release
        run: |
          dbt seed --vars '{"batch_name":"github_batch"}' --profiles-dir ./dbt_profiles  
 
      - name: Deploy the models for this release
        run: |
          dbt run --select state:modified+ --state ${{ github.workspace }}/main/target/ --vars '{"batch_name":"github_batch"}' --profiles-dir ./dbt_profiles  
      
      - name: Test the models for this release
        run: |
          dbt test --select state:modified+ --state ${{ github.workspace }}/main/target/ --profiles-dir ./dbt_profiles
 
  dropclonedb:
    if: github.event.action == 'closed' 
    # The type of runner that the job will run on
    runs-on: ubuntu-latest
 
    # Steps represent a sequence of tasks that will be executed as part of the job
    steps:
      - name: Event info
        run: |
          echo This workflow on ${{ github.repository }} was started by ${{ github.actor }} from the event ${{ github.event_name }} and action ${{github.event.action}} and ${{github.event.pull_request}}
      # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
      - name: Checkout branch
        uses: actions/checkout@v2
 
      - name: Install Python 3.8
        uses: actions/setup-python@v2.2.2
        with:
          python-version: 3.8
 
      - name: Install dbt==1.6.0
        run: |
          python -m pip install --upgrade pip
          pip3 install dbt-snowflake==1.6.0
 
      - name: Install dbt dependency packages
        run: |
          dbt deps          
          
      - name: Drop the cloned database.
        run: |
          echo ${{github.head_ref}}
          dbname=testdb_${{github.head_ref}}
          dbt run-operation clone_prod_to_test --args '{createordrop : drop, dbname: '${dbname}', testrole: PROD_TRANSFORMER}'  --profiles-dir ./dbt_profiles
          
      
 

 

#========================================================================================================================

# -- Description: This workflow is used to perform various operations such as 

                  # 1- Clone the Production database into a TESTDB

                  # 2- Deploy the tagged models into the TESTDB

                  # 3- Test the deployed models in TESTDB

                  # 4- At the end, drop the cloned TESTDB automatically.

#=========================================================================================================================

 

name: DBT_testing_workflow

 

# Controls when the action will run. 

on:

  # Triggers the workflow on push or pull request events but only for the main branch

 

  pull_request:

    types: [opened,closed,synchronize]

    branches: [ main ]

 

 

# Environment variable values are retrived from Github secrets

env:

  DBT_TARGET:            PROD

  DBT_SNOWFLAKE_ACCOUNT: ${{ secrets.DBT_SNOWFLAKE_ACCOUNT }}

  DBT_USER:              ${{ secrets.DBT_PROD_USER }}

  DBT_PASSWORD:          ${{ secrets.DBT_PROD_PASSWORD }}

  DBT_ROLE:              PROD_TRANSFORMER

  DBT_DB:                ONEDB

  DBT_SCHEMA:            EDM_AI

  DBT_WH:                PROD_TRANSFORMER_WH

 

# A workflow run is made up of one or more jobs that can run sequentially or in parallel

jobs:

  # This workflow contains two jobs 1. To clone the prod db  and 2. to drop the test db.

  clonedeploytest:

    if: github.event.action == 'opened' ||  github.event.action == 'synchronize'

 

    # The type of runner that the job will run on

    runs-on: ubuntu-latest

 

    # Steps represent a sequence of tasks that will be executed as part of the job

    steps:

      - name: Event info

        run: |

          echo This workflow on ${{ github.repository }} was started by ${{ github.actor }} from the event ${{ github.event_name }} and action ${{github.event.action}} and  ${{github.event.pull_request}}

 

      # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it

      - name: Checkout

        uses: actions/checkout@v3

 

      - name: Checkout main to compare against

        uses: actions/checkout@v3

        with:

          repository: ONEDB/DataBuildTool

          ref: main

          path: main

 

      - name: Install Python 3.8

        uses: actions/setup-python@v4

        with:

          python-version: 3.8

 

      - name: Install dbt==1.6.0

        run: |

          python -m pip install --upgrade pip

          pip install dbt-snowflake==1.6.0

 

      - name: Install dbt dependency packages

        run: |

          dbt deps          

          

      - name: Create Zero copy clone of Production.

        run: |

          echo ${{github.head_ref}}

          dbname=testdb_${{github.head_ref}}

          dbt run-operation clone_prod_to_test --args '{createordrop : create, dbname: '${dbname}', testrole: PROD_TRANSFORMER}'  --profiles-dir ./dbt_profiles

          echo "DBT_DB=$(echo '"'${dbname^^}'"')" >> $GITHUB_ENV

      

      - name: Compile dbt models in example folder

        run: | 

          dbt compile --profiles-dir ./dbt_profiles

          cd main

          dbt deps

          #We just build a single model here to force it to build the manifest file on the main.

          #We don't need to build everything. From the docs "manifest: produced by commands that read and understand your project"

          dbt compile --select curfl_Responsibility --profiles-dir ../dbt_profiles #We just build a single model here to force it to build the manifest file

          cd ..

 

      - name: drop seed tables

        run: |

          dbname=testdb_${{github.head_ref}}

          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_CRM_PERSON}' --profiles-dir ./dbt_profiles

          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_CRM_PRODUCTS}' --profiles-dir ./dbt_profiles

          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: LINE_OF_BUSINESS}' --profiles-dir ./dbt_profiles

          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_WC_POSITION}' --profiles-dir ./dbt_profiles

          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: WC_ORG_LKP_CATEGORY}' --profiles-dir ./dbt_profiles

          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: LENDERGOLIVE}' --profiles-dir ./dbt_profiles

          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: THIRD_PARTY_PERMISSIONS}' --profiles-dir ./dbt_profiles

          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: THIRD_PARTY_READER_ACCOUNTS}' --profiles-dir ./dbt_profiles

          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: WC_ORG_D_SEED}' --profiles-dir ./dbt_profiles

          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: MAJOR_LENDERS}' --profiles-dir ./dbt_profiles

          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_REV_DATA}' --profiles-dir ./dbt_profiles

          dbt run-operation drop_seed_table --args '{dbname: '${dbname}', tablename: NON_FLEX_PRODUCT_DATA}' --profiles-dir ./dbt_profiles            

 

      - name: Deploy the seeds for this release

        run: |

          dbt seed --vars '{"batch_name":"github_batch"}' --profiles-dir ./dbt_profiles  

 

      - name: Deploy the models for this release

        run: |

          dbt run --select state:modified+ --state ${{ github.workspace }}/main/target/ --vars '{"batch_name":"github_batch"}' --profiles-dir ./dbt_profiles  

      

      - name: Test the models for this release

        run: |

          dbt test --select state:modified+ --state ${{ github.workspace }}/main/target/ --profiles-dir ./dbt_profiles

 

  dropclonedb:

    if: github.event.action == 'closed' 

    # The type of runner that the job will run on

    runs-on: ubuntu-latest

 

    # Steps represent a sequence of tasks that will be executed as part of the job

    steps:

      - name: Event info

        run: |

          echo This workflow on ${{ github.repository }} was started by ${{ github.actor }} from the event ${{ github.event_name }} and action ${{github.event.action}} and ${{github.event.pull_request}}

      # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it

      - name: Checkout branch

        uses: actions/checkout@v2

 

      - name: Install Python 3.8

        uses: actions/setup-python@v2.2.2

        with:

          python-version: 3.8

 

      - name: Install dbt==1.6.0

        run: |

          python -m pip install --upgrade pip

          pip3 install dbt-snowflake==1.6.0

 

      - name: Install dbt dependency packages

        run: |

          dbt deps          

          

      - name: Drop the cloned database.

        run: |

          echo ${{github.head_ref}}

          dbname=testdb_${{github.head_ref}}

          dbt run-operation clone_prod_to_test --args '{createordrop : drop, dbname: '${dbname}', testrole: PROD_TRANSFORMER}'  --profiles-dir ./dbt_profiles