-
Notifications
You must be signed in to change notification settings - Fork 5
/
dbt_project.yml
123 lines (106 loc) · 4.32 KB
/
dbt_project.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
# Name your project! Project names should contain only lowercase characters
# and underscores. A good package name should reflect your organization's
# name or the intended use of these models
name: "snowflake_demo"
version: "1.0.0"
config-version: 2
# v1.0.0: The config source-paths has been deprecated in favor of model-paths
# v1.0.0: The clean-target dbt_modules has been deprecated in favor of dbt_packages
# v1.6.0: We are now utilizing the new support for dynamic_tables
# v1.7.0: There were several important bugs fixed for dynamic tables
require-dbt-version: ">=1.7.0"
# This setting configures which "profile" dbt uses for this project.
profile: "SNOWFLAKE"
# These configurations specify where dbt should look for different types of files.
# The `source-paths` config, for example, states that models in this project can be
# found in the "models/" directory. You probably won't need to change these!
#model-paths: ["models"]
#analysis-paths: ["analysis"]
#test-paths: ["tests"]
#seed-paths: ["data"]
#macro-paths: ["macros"]
#snapshot-paths: ["snapshots"]
#target-path: "target" # directory which will store compiled SQL files
clean-targets: # directories to be removed by `dbt clean`
- "target"
- "dbt_packages" # Comment to not clean up dependencies
- "logs"
# Global initialization and shutdown commands
# These are not required but will allow us to reduce consumption to the second
on-run-start:
- "{{ create_masking_policies() }}"
#on-run-end:
#- "{{ dbt_artifacts.upload_results(results) }}"
#- "ALTER WAREHOUSE DFLIPPO_DBT_XLARGE_DEMO_WH SUSPEND"
#- "{% if target.name == 'prod' %}{{ dbt_artifacts.upload_results(results) }}{% endif %}"
# Global variables
vars:
# The number of days that we want to reprocess data when doing incremental loads
prune_days: 2
# Configuring models
# Full documentation: https://docs.getdbt.com/docs/configuring-models
# In this example config, we tell dbt by default to build all models as views.
# These settings can be overridden at lower levels and in the individual model files
# using the `{{ config(...) }}` macro.
models:
# You can set custom properties on models that will be stored in the dbt logs
+meta:
owner: "Dan Flippo"
owner_email: "[email protected]"
pre-hook:
- "{{ set_warehouse() }}"
# Config indicated by + applies to all files at and below that level
post-hook:
- "{{ dbt_snow_mask.apply_masking_policy() }}"
# By default grants are lost unless you add this configuration
+copy_grants: true
+snowflake_warehouse: "DFLIPPO_XSMALL_WH"
+materialized: view
snowflake_demo:
# We will store table and column descriptions in Snowflake
+persist_docs:
relation: true
columns: true
10_raw:
+tags: "Raw_Layer"
20_integration:
+tags: "Integration_Layer"
TPC_H_PERFORMANCE:
+materialized: table
+schema: TPC_H
# This folder will use a specific WH
#+snowflake_warehouse: "DFLIPPO_DBT_XLARGE_DEMO_WH"
30_presentation:
+tags: "Presentation_Layer"
+materialized: table
# dbt supports many Snowflake specific features. Here we will make one folder all SECURE views
sensitive:
+materialized: view
+secure: true
# The dbt_artifacts configuration here is for logging your executions
dbt_artifacts:
+enabled: false
+database: "{{ env_var('DBT_ARTIFACTS_DATABASE', target.database ) }}" # optional, default is your target database
+schema: "{{ env_var('DBT_ARTIFACTS_SCHEMA', target.schema ) }}" # optional, default is your target schema
sources:
enabled: true
materialized: incremental
snapshots:
# The snowflake__snapshot_hash_arguments macro in this project removes MD5 from the dbt_scd_id
# Below is an expensive one-time update macro that can fix any previously created snapshot records
# This should be removed after it has executed at least once
pre-hook: "{{ refresh_dbt_scd_id() }}"
target_schema: "DBT_DEMO_SNAPSHOTS"
+meta:
owner: "Dan Flippo"
owner_email: "[email protected]"
snowflake_demo:
30_presentation:
+tags: "Presentation_Layer"
tests:
# You can set custom properties on tests that will be stored in the dbt logs
+meta:
owner: "Dan Flippo"
owner_email: "[email protected]"
+limit: 1000 # will only include the first 1000 failures
# +store_failures: true # by default we will store failures