|
1 | 1 | #!/usr/bin/env bash
|
2 | 2 | set -ex
|
3 | 3 |
|
4 |
| -# Creating python 3.8 virtual environment to run dbt warehouse-transform job |
5 |
| -PYTHON38_VENV="py38_venv" |
6 |
| -virtualenv --python=python3.8 --clear "${PYTHON38_VENV}" |
7 |
| -source "${PYTHON38_VENV}/bin/activate" |
| 4 | +# Creating python 3.11 virtual environment to run dbt warehouse-transform job |
| 5 | +PYTHON311_VENV="py311_venv" |
| 6 | +virtualenv --python=python3.11 --clear "${PYTHON311_VENV}" |
| 7 | +source "${PYTHON311_VENV}/bin/activate" |
8 | 8 |
|
9 | 9 | # Setup to run python script to create snowflake schema
|
10 | 10 | cd $WORKSPACE/analytics-tools/snowflake
|
|
48 | 48 | # Schema_Name will be the Github Pull Request ID e.g. 1724 prefixed with 'PR_*' and sufixed with project name e.g. PR_1724_reporting
|
49 | 49 | export CI_SCHEMA_NAME=PR_${ghprbPullId}_reporting
|
50 | 50 | # Schema is dynamically created against each PR. It is the PR number with 'PR_*' as prefixed.
|
51 |
| - # profiles.yml contains the name of Schema which is used to create output models when dbt runs. |
52 |
| - python create_ci_schema.py --key_path $KEY_PATH --passphrase_path $PASSPHRASE_PATH --automation_user $USER --account $ACCOUNT --db_name $DB_NAME --schema_name $CI_SCHEMA_NAME |
| 51 | + # profiles.yml contains the name of Schema which is used to create output models when dbt runs. |
| 52 | + python create_ci_schema.py --key_path $KEY_PATH --passphrase_path $PASSPHRASE_PATH --automation_user $USER --account $ACCOUNT --db_name $DB_NAME --schema_name $CI_SCHEMA_NAME |
53 | 53 | # create_ci_schema python script not just create schema but also drops the schema if it exists already, and the reason for doing so is if dbt model changes tables that are
|
54 | 54 | # created in seed job it will fail, so dropping those tables or deleting the whole schema is important to avoid such failure. We noticed while create_ci_schema being running
|
55 | 55 | # the dbt commands below starts running as they were using different sessions (warehouse and users), in order to complete the drop and create operation before running dbt adding sleep
|
|
60 | 60 | DBT_RUN_EXCLUDE='' ## Add excluded models here if any
|
61 | 61 | # This is a Slim CI syntax used to "test" only modified and downstream models
|
62 | 62 | DBT_TEST_OPTIONS="-m state:modified+ --defer --state $WORKSPACE/manifest"
|
63 |
| - DBT_TEST_EXCLUDE='--exclude test_name:relationships' |
| 63 | + DBT_TEST_EXCLUDE='--exclude test_name:relationships' |
64 | 64 |
|
65 | 65 | source $WORKSPACE/jenkins-job-dsl/dataeng/resources/warehouse-transforms-ci-dbt.sh
|
66 | 66 |
|
67 | 67 | cd $WORKSPACE/analytics-tools/snowflake
|
68 |
| - python remove_ci_schema.py --key_path $KEY_PATH --passphrase_path $PASSPHRASE_PATH --automation_user $USER --account $ACCOUNT --db_name $DB_NAME --schema_name $CI_SCHEMA_NAME |
| 68 | + python remove_ci_schema.py --key_path $KEY_PATH --passphrase_path $PASSPHRASE_PATH --automation_user $USER --account $ACCOUNT --db_name $DB_NAME --schema_name $CI_SCHEMA_NAME |
69 | 69 |
|
70 | 70 | fi
|
71 | 71 |
|
|
88 | 88 | cd $WORKSPACE/analytics-tools/snowflake
|
89 | 89 | python remove_ci_schema.py --key_path $KEY_PATH --passphrase_path $PASSPHRASE_PATH --automation_user $USER --account $ACCOUNT --db_name $DB_NAME --schema_name $CI_SCHEMA_NAME
|
90 | 90 |
|
91 |
| - |
| 91 | + |
92 | 92 | fi
|
93 | 93 |
|
94 | 94 | if [ "$isRawToSource" == "true" ]
|
|
0 commit comments