Skip to content

Commit

Permalink
fix(Hadoop): Fix argument validation and name (#424)
Browse files Browse the repository at this point in the history
* fix(hadoop): Fix admin_user_password validation

* fix(hadoop): Change login_key to login_key_name

* fix(hadoop): Allow dot character in bootstrap_script

* fix(hadoop): Fix admin_user_password regexp
  • Loading branch information
youngmn authored Apr 24, 2024
1 parent b96ef7c commit 80ba996
Show file tree
Hide file tree
Showing 7 changed files with 13 additions and 13 deletions.
2 changes: 1 addition & 1 deletion docs/data-sources/hadoop.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ This data source exports the following attributes in addition to the arguments a
* `version` - The version of Hadoop.
* `ambari_server_host` - The name of ambari host.
* `cluster_direct_access_account` - Account name with direct access to the cluster.
* `login_key` - The login key name.
* `login_key_name` - The login key name.
* `bucket_name` - The name of object storage bucket.
* `use_kdc` - Whether to use Kerberos authentication configuration.
* `kdc_realm` - Realm information of kerberos authentication.
Expand Down
2 changes: 1 addition & 1 deletion docs/resources/hadoop.md
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ The following arguments are supported:
* `cluster_type_code` - (Required) Cluster type code to determine the cluster type to create. Options: CORE_HADOOP_WITH_SPARK
* `admin_user_name` - (Required) Admin user name of cluster to create. It is the administrator account required to access the Ambari management console. Can only be composed of English letters (lowercase), numbers, and dashes (-). Must start and end with an English letter (lowercase) or a number. Min: 3, Max: 15
* `admin_user_password` - (Required) Admin user password of cluster to create. Must include at least 1 alphabetical character (capital letter), special character, and number. Special characters, such as single quotations ('), double quotations ("), the KRW symbol (₩), slashes (/), ampersands (&), back quotes (`), and spaces cannot be included. Min: 8, Max: 20
* `login_key` - (Required) Login key name to set the SSH authentication key required when connecting directly to the node.
* `login_key_name` - (Required) Login key name to set the SSH authentication key required when connecting directly to the node.
* `edge_node_subnet_no` - (Required) The Subnet ID of edge node. Can select a subnet that will locate the edge node. Edge nodes are located in private/public subnets.
* `master_node_subnet_no` - (Required) The Subnet ID of master node. Can select a subnet that will locate the master node. Master nodes are located in private/public subnets
* `worker_node_subnet_no` - (Required) The Subnet ID of worker node. Must be located in Private Subnet.
Expand Down
2 changes: 1 addition & 1 deletion examples/hadoop/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ resource "ncloud_hadoop" "hadoop" {
cluster_type_code = "CORE_HADOOP_WITH_SPARK"
admin_user_name = var.admin_user_name
admin_user_password = var.admin_user_password
login_key = ncloud_login_key.login_key.key_name
login_key_name = ncloud_login_key.login_key.key_name
edge_node_subnet_no = ncloud_subnet.edge_subnet.subnet_no
master_node_subnet_no = ncloud_subnet.master_subnet.subnet_no
worker_node_subnet_no = ncloud_subnet.worker_subnet.subnet_no
Expand Down
12 changes: 6 additions & 6 deletions internal/service/hadoop/hadoop.go
Original file line number Diff line number Diff line change
Expand Up @@ -109,13 +109,13 @@ func (r *hadoopResource) Schema(_ context.Context, req resource.SchemaRequest, r
stringvalidator.LengthBetween(8, 20),
stringvalidator.RegexMatches(regexp.MustCompile(`[A-Z]+`), "Must have at least one uppercase alphabet"),
stringvalidator.RegexMatches(regexp.MustCompile(`\d+`), "Must have at least one number"),
stringvalidator.RegexMatches(regexp.MustCompile(`[~!@#$%^*()\-_=\[\]\{\};:,.<>?]+`), "Must have at least one special character"),
stringvalidator.RegexMatches(regexp.MustCompile(`^[^&+\\"'/\s`+"`"+`]*$`), "Must not have ` & + \\ \" ' / and white space."),
stringvalidator.RegexMatches(regexp.MustCompile(`[\W_]+`), "Must have at least one special character"),
stringvalidator.RegexMatches(regexp.MustCompile(`^[^&\\"'/\s`+"`"+`]*$`), "Must not have ` & \\ \" ' / and white space."),
),
},
Sensitive: true,
},
"login_key": schema.StringAttribute{
"login_key_name": schema.StringAttribute{
Required: true,
PlanModifiers: []planmodifier.String{
stringplanmodifier.RequiresReplace(),
Expand Down Expand Up @@ -304,8 +304,8 @@ func (r *hadoopResource) Schema(_ context.Context, req resource.SchemaRequest, r
}...),
stringvalidator.LengthAtMost(1024),
stringvalidator.RegexMatches(
regexp.MustCompile(`^[a-zA-Z]+$`),
"Composed of alphabets.",
regexp.MustCompile(`^[a-zA-Z.]+$`),
"Only English and dot characters are supported. Maximum length only up to 1024 Bytes",
),
},
},
Expand Down Expand Up @@ -852,7 +852,7 @@ type hadoopResourceModel struct {
ClusterTypeCode types.String `tfsdk:"cluster_type_code"`
AdminUserName types.String `tfsdk:"admin_user_name"`
AdminUserPassword types.String `tfsdk:"admin_user_password"`
LoginKey types.String `tfsdk:"login_key"`
LoginKey types.String `tfsdk:"login_key_name"`
EdgeNodeSubnetNo types.String `tfsdk:"edge_node_subnet_no"`
MasterNodeSubnetNo types.String `tfsdk:"master_node_subnet_no"`
WorkerNodeSubnetNo types.String `tfsdk:"worker_node_subnet_no"`
Expand Down
4 changes: 2 additions & 2 deletions internal/service/hadoop/hadoop_data_source.go
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ func (d *hadoopDataSource) Schema(ctx context.Context, req datasource.SchemaRequ
"cluster_direct_access_account": schema.StringAttribute{
Computed: true,
},
"login_key": schema.StringAttribute{
"login_key_name": schema.StringAttribute{
Computed: true,
},
"bucket_name": schema.StringAttribute{
Expand Down Expand Up @@ -284,7 +284,7 @@ type hadoopDataSourceModel struct {
Version types.String `tfsdk:"version"`
AmbariServerHost types.String `tfsdk:"ambari_server_host"`
ClusterDirectAccessAccount types.String `tfsdk:"cluster_direct_access_account"`
LoginKey types.String `tfsdk:"login_key"`
LoginKey types.String `tfsdk:"login_key_name"`
BucketName types.String `tfsdk:"bucket_name"`
UseKdc types.Bool `tfsdk:"use_kdc"`
KdcRealm types.String `tfsdk:"kdc_realm"`
Expand Down
2 changes: 1 addition & 1 deletion internal/service/hadoop/hadoop_data_source_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ resource "ncloud_hadoop" "hadoop" {
cluster_type_code = "CORE_HADOOP_WITH_SPARK"
admin_user_name = "admin-test"
admin_user_password = "Admin!2Admin"
login_key = ncloud_login_key.login_key.key_name
login_key_name = ncloud_login_key.login_key.key_name
edge_node_subnet_no = ncloud_subnet.edge_subnet.subnet_no
master_node_subnet_no = ncloud_subnet.master_subnet.subnet_no
worker_node_subnet_no = ncloud_subnet.worker_subnet.subnet_no
Expand Down
2 changes: 1 addition & 1 deletion internal/service/hadoop/hadoop_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ resource "ncloud_hadoop" "hadoop" {
cluster_type_code = "CORE_HADOOP_WITH_SPARK"
admin_user_name = "admin-test"
admin_user_password = "Admin!2Admin"
login_key = ncloud_login_key.login_key.key_name
login_key_name = ncloud_login_key.login_key.key_name
edge_node_subnet_no = ncloud_subnet.edge_subnet.subnet_no
master_node_subnet_no = ncloud_subnet.master_subnet.subnet_no
worker_node_subnet_no = ncloud_subnet.worker_subnet.subnet_no
Expand Down

0 comments on commit 80ba996

Please sign in to comment.