-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathvariables.tf
191 lines (166 loc) · 6.05 KB
/
variables.tf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
variable "workspace_id" {
type = string
description = "Databricks Workspace ID"
}
variable "sp_client_id_secret_name" {
type = string
description = "The name of Azure Key Vault secret that contains ClientID of Service Principal to access in Azure Key Vault"
}
variable "sp_key_secret_name" {
type = string
description = "The name of Azure Key Vault secret that contains client secret of Service Principal to access in Azure Key Vault"
}
variable "tenant_id_secret_name" {
type = string
description = "The name of Azure Key Vault secret that contains tenant ID secret of Service Principal to access in Azure Key Vault"
}
variable "key_vault_id" {
type = string
description = "ID of the Key Vault instance where the Secret resides"
}
variable "pat_token_lifetime_seconds" {
type = number
description = "The lifetime of the token, in seconds. If no lifetime is specified, the token remains valid indefinitely"
default = 315569520
}
variable "users" {
type = list(string)
description = "List of users to access Databricks"
default = []
}
variable "permissions" {
type = list(map(string))
description = "Databricks Workspace permission maps"
default = [
{
object_id = null
role = null
}
]
}
# Shared autoscaling cluster config variables
variable "cluster_nodes_availability" {
type = string
description = "Availability type used for all subsequent nodes past the first_on_demand ones: [SPOT_AZURE|SPOT_WITH_FALLBACK_AZURE|ON_DEMAND_AZURE]"
default = null
}
variable "first_on_demand" {
type = number
description = "The first first_on_demand nodes of the cluster will be placed on on-demand instances: [[:number]]"
default = 0
}
variable "spot_bid_max_price" {
type = number
description = "The max price for Azure spot instances. Use -1 to specify lowest price."
default = -1
}
variable "autotermination_minutes" {
type = number
description = "Automatically terminate the cluster after being inactive for this time in minutes. If not set, Databricks won't automatically terminate an inactive cluster. If specified, the threshold must be between 10 and 10000 minutes. You can also set this value to 0 to explicitly disable automatic termination."
default = 15
}
variable "min_workers" {
type = number
description = "The minimum number of workers to which the cluster can scale down when underutilized. It is also the initial number of workers the cluster will have after creation."
default = 1
}
variable "max_workers" {
type = number
description = "The maximum number of workers to which the cluster can scale up when overloaded. max_workers must be strictly greater than min_workers."
default = 2
}
variable "data_security_mode" {
type = string
description = "Security features of the cluster"
default = "NONE"
validation {
condition = contains(["SINGLE_USER", "USER_ISOLATION", "NONE"], var.data_security_mode)
error_message = "Catalog Access mode must be either 'SINGLE_USER', 'USER_ISOLATION' or 'NONE' value"
}
}
variable "single_user_name" {
type = string
description = "single user cluster mode"
default = null
}
variable "custom_default_cluster_name" {
type = string
description = "Databricks cluster name, which does not have to be unique"
default = null
}
variable "spark_version" {
type = string
description = "Runtime version"
default = "11.3.x-scala2.12"
}
variable "spark_conf" {
type = map(any)
description = "Map with key-value pairs to fine-tune Spark clusters, where you can provide custom Spark configuration properties in a cluster configuration."
default = {}
}
variable "spark_env_vars" {
type = map(any)
description = "Map with environment variable key-value pairs to fine-tune Spark clusters. Key-value pairs of the form (X,Y) are exported (i.e., X='Y') while launching the driver and workers."
default = {}
}
variable "cluster_log_conf_destination" {
type = string
description = "Provide a dbfs location to push all cluster logs to certain location"
default = ""
validation {
condition = length(var.cluster_log_conf_destination) == 0 ? true : startswith(var.cluster_log_conf_destination, "dbfs:/")
error_message = "Provide valid path to dbfs logs folder, example: 'dbfs:/logs'"
}
}
variable "node_type" {
type = string
description = "Databricks_node_type id"
default = "Standard_D3_v2"
}
variable "mountpoints" {
type = map(object({
storage_account_name = string
container_name = string
}))
description = "Mountpoints for databricks"
default = {}
}
# Secret Scope variables
variable "secret_scope" {
type = list(object({
scope_name = string
acl = optional(list(object({
principal = string
permission = string
})))
secrets = optional(list(object({
key = string
string_value = string
})))
}))
description = <<-EOT
Provides an ability to create custom Secret Scope, store secrets in it and assigning ACL for access management
scope_name - name of Secret Scope to create;
acl - list of objects, where 'principal' custom group name, this group is created in 'Premium' module; 'permission' is one of "READ", "WRITE", "MANAGE";
secrets - list of objects, where object's 'key' param is created key name and 'string_value' is a value for it;
EOT
default = [{
scope_name = null
acl = null
secrets = null
}]
}
# At the nearest future, Azure will allow acquiring AAD tokens by service principals,
# thus providing an ability to create Azure backed Key Vault with Terraform
# https://github.com/databricks/terraform-provider-databricks/pull/1965
#variable "key_vault_secret_scope" {
# type = object({
# key_vault_id = string
# dns_name = string
# })
# description = "Object with Azure Key Vault parameters required for creation of Azure-backed Databricks Secret scope"
# default = {
# key_vault_id = null
# dns_name = null
# }
#}